pax_global_header00006660000000000000000000000064141325051400014505gustar00rootroot0000000000000052 comment=51e1d12cc573f9d79707037cca7f565c926716ce buildbot-3.4.0/000077500000000000000000000000001413250514000133155ustar00rootroot00000000000000buildbot-3.4.0/.bbtravis.yml000066400000000000000000000177001413250514000157370ustar00rootroot00000000000000# BBTravis CI configuration file language: python # Available Python versions: python: - "3.8" label_mapping: TWISTED: tw SQLALCHEMY: sqla SQLALCHEMY_MIGRATE: sqlam latest: l python: py TESTS: t DB_TYPE: db WORKER_PYTHON: wp env: global: - BUILDBOT_TEST_DB_URL=sqlite:// - NUM_CPU=700m - MEMORY_SIZE=1G - CHROME_BIN=/usr/bin/google-chrome matrix: # include "ci" string into the name of the status that is eventually submitted to Github, so # that the codecov.io service would wait until this build is finished before creating report. - TWISTED=latest SQLALCHEMY=latest TESTS=ci/coverage # add js tests in separate job. Start it early because it is quite long - TWISTED=latest SQLALCHEMY=latest TESTS=js_build NUM_CPU=2 MEMORY_SIZE=2G - TWISTED=latest SQLALCHEMY=latest TESTS=js_unit NUM_CPU=2 MEMORY_SIZE=2G - TWISTED=latest SQLALCHEMY=latest TESTS=smokes NUM_CPU=4 MEMORY_SIZE=4G - TWISTED=17.9.0 SQLALCHEMY=latest TESTS=trial - TWISTED=latest SQLALCHEMY=latest TESTS=interop WORKER_PYTHON=3.8 - TWISTED=latest SQLALCHEMY=latest TESTS=interop WORKER_PYTHON=3.7 - TWISTED=latest SQLALCHEMY=latest TESTS=interop WORKER_PYTHON=3.6 # Configuration when SQLite database is persistent between running tests # (by default in other tests in-memory SQLite database is used which is # recreated for each test). # Helps to detect issues with incorrect database setup/cleanup in tests. - TWISTED=latest SQLALCHEMY=latest TESTS=trial BUILDBOT_TEST_DB_URL=sqlite:////tmp/test_db.sqlite DB_TYPE=sqlite # Configuration that runs tests with real MySQL database (TODO does not work yet with our docker image) - TWISTED=latest SQLALCHEMY=latest TESTS=trial BUILDBOT_TEST_DB_URL=mysql+mysqldb://travis@127.0.0.1/bbtest DB_TYPE=mysql # innodb tests takes 20min probably because of docker aufs. # travis images provides much faster innodb so we keep these test there until we implement ramfs based # mysql installation # - TWISTED=latest SQLALCHEMY=latest TESTS=trial BUILDBOT_TEST_DB_URL=mysql+mysqldb://travis@127.0.0.1/bbtest?storage_engine=InnoDB # Configuration that runs tests with real PostgreSQL database with pg8000 and psycopg2 drivers # psycopg2 uses Peer Authentication which is configured in the dockerfile, while pg8000 use md5 auth with dummy password #- TWISTED=latest SQLALCHEMY=latest TESTS=trial BUILDBOT_TEST_DB_URL=postgresql+psycopg2:///bbtest #- TWISTED=latest SQLALCHEMY=latest TESTS=trial 'BUILDBOT_TEST_DB_URL=postgresql+pg8000:///bbtest?user=buildbot&password=x' # Test different versions of SQLAlchemy - TWISTED=17.9.0 SQLALCHEMY=1.3.0 TESTS=trial - TWISTED=17.9.0 SQLALCHEMY=latest TESTS=trial # Tests for the worker on old versions of twisted. - TWISTED=17.9.0 SQLALCHEMY=latest TESTS=trial_worker matrix: fast_finish: true include: # flake8, isort, pylint, docs first as they're more likely to find issues - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=flake8 - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=isort - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=pylint NUM_CPU=2 MEMORY_SIZE=4G - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=docs - python: "3.6" env: TWISTED=latest SQLALCHEMY=latest TESTS=ci/coverage - python: "3.7" env: TWISTED=latest SQLALCHEMY=latest TESTS=trial - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=trial - python: "3.9" env: TWISTED=latest SQLALCHEMY=latest TESTS=trial - python: "3.9" env: TWISTED=latest SQLALCHEMY=latest TESTS=minimal_install - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=interop WORKER_PYTHON=2.7 # keep worker supported on py2.7. # Twisted 20.3.0 is last version which supports py2.7. - python: "2.7" env: TWISTED=20.3.0 SQLALCHEMY=latest TESTS=trial_worker # Dependencies installation commands install: - pip install -U pip - condition: TESTS not in ("minimal_install", "smokes", "trial_worker") cmd: pip install -r requirements-ci.txt - condition: TESTS == "minimal_install" cmd: pip install -r requirements-minimal.txt - condition: TESTS == "trial_worker" cmd: pip install -r requirements-ciworker.txt - condition: TESTS == "docs" cmd: pip install -r requirements-cidocs.txt - condition: '"sqlite" not in BUILDBOT_TEST_DB_URL' cmd: pip install -r requirements-cidb.txt - condition: TESTS == "interop" cmd: | virtualenv -p python$WORKER_PYTHON /tmp/workerenv /tmp/workerenv/bin/pip install -e worker - | # pip installs for backward compat set -e if [ $TWISTED = trunk ]; then pip install git+https://github.com/twisted/twisted fi if [ $TWISTED != latest -a $TWISTED != trunk ]; then pip install Twisted==$TWISTED ; fi if [ $SQLALCHEMY != latest ]; then pip install sqlalchemy==$SQLALCHEMY; fi - step: !ShellCommand command: "/buildbot/buildbot-job/build/sandbox/bin/pip check" warnOnFailure: True flunkOnFailure: False haltOnFailure: False name: "pip check" title: "pip check" before_script: # create real database for tests - condition: '"mysql" in BUILDBOT_TEST_DB_URL' cmd: sudo /prepare_mysql - condition: '"postgresql" in BUILDBOT_TEST_DB_URL' cmd: | sudo /prepare_postgres # for pg8000 driver we can't use peer authentication or empty password, so set a dummy password # This also serves as a way to wait that the database is ready while ! psql -d bbtest -c 'ALTER USER "buildbot" WITH PASSWORD '"'x'"';' ; do sleep 1 ; done # Tests running commands script: # make frontend_install_tests takes 17 min, so we only do it post submit - title: frontend build tests condition: TESTS == "js_build" and TRAVIS_PULL_REQUEST cmd: make frontend - title: full frontend tests condition: TESTS == "js_build" and not TRAVIS_PULL_REQUEST cmd: make frontend_install_tests - title: frontend unit tests condition: TESTS == "js_unit" cmd: make frontend_tests_headless - title: master and worker tests condition: TESTS in ("minimal_install", "trial") cmd: trial --reporter=text --rterrors buildbot.test buildbot_worker.test - title: interop tests condition: TESTS == "interop" cmd: SANDBOXED_WORKER_PATH=/tmp/workerenv/bin/buildbot-worker coverage run --rcfile=.coveragerc $(which trial) --reporter=text --rterrors buildbot.test.integration.interop - title: worker tests condition: TESTS == "trial_worker" cmd: trial --reporter=text --rterrors buildbot_worker.test # run tests under coverage for latest only (it's slower..) - title: coverage tests condition: TESTS == "ci/coverage" cmd: coverage run --rcfile=.coveragerc $(which trial) --reporter=text --rterrors buildbot.test buildbot_worker.test # Run additional tests in their separate job - title: pylint condition: TESTS == "pylint" cmd: make pylint - title: flake8 condition: TESTS == "flake8" cmd: make flake8 - title: isort condition: TESTS == "isort" cmd: isort --check -df `git ls-files |grep '.py$'` # Build documentation - title: docs condition: TESTS == "docs" cmd: make docs-release # Run spell checker on documentation - title: spelling condition: TESTS == "docs" cmd: make docs-release-spelling - title: maketarballs condition: TESTS == "smokes" cmd: make tarballs - title: protractor tests condition: TESTS == "smokes" cmd: ./common/smokedist.sh whl - title: tarballs protractor tests condition: TESTS == "smokes" and not TRAVIS_PULL_REQUEST cmd: ./common/smokedist.sh tar.gz notifications: email: false after_script: - | # codecov if [ $TESTS = ci/coverage ]; then CODECOV_TOKEN="b80c80d7-689d-46d7-b1aa-59168bb4c9a9" codecov; fi # List installed packages along with their versions. - "pip list" sudo: false branches: # Only build main-line branches. only: - master - eight git: depth: 300 buildbot-3.4.0/.circleci/000077500000000000000000000000001413250514000151505ustar00rootroot00000000000000buildbot-3.4.0/.circleci/config.yml000066400000000000000000000054251413250514000171460ustar00rootroot00000000000000# Python CircleCI 2.0 configuration file # # Check https://circleci.com/docs/2.0/language-python/ for more details # version: 2 jobs: build: docker: # use the same build image as we use for metabuildbot - image: buildbot/metabbotcfg working_directory: ~/repo steps: - checkout # Download and cache dependencies - restore_cache: keys: - 37-dependencies-{{ checksum "requirements-ci.txt" }}-{{ checksum "requirements-cidocs.txt" }} # fallback to using the latest cache if no exact match is found - 37-dependencies- - run: name: install dependencies command: | env python3.7 -m venv .venv . .venv/bin/activate pip install -U pip 'setuptools<45.0.0' pip install -r requirements-ci.txt pip install -r requirements-cidocs.txt pip install pyinstaller - save_cache: paths: - .venv key: 3-dependencies-{{ checksum "requirements-ci.txt" }}-{{ checksum "requirements-cidocs.txt" }} - run: name: run tests command: | . .venv/bin/activate make docs-release make tarballs # Note that installing www/base depends on frontend_deps target being built, which is # a dependency of the tarballs target. pip install -e www/base pyinstaller -F pyinstaller/buildbot-worker.spec # we test the new generated binary with the global virtualenv SANDBOXED_WORKER_PATH=`pwd`/dist/buildbot-worker trial --reporter=text --rterrors buildbot.test.integration.interop - persist_to_workspace: root: dist paths: . - store_artifacts: path: master/docs/_build/html/ destination: docs - store_artifacts: path: dist destination: dist # publish pipeline that is run on tags publish: docker: # image that can push to github - image: cibuilds/github:0.10 steps: - attach_workspace: at: dist - run: name: upload binaries to github release command: | env # rename the buildbot-worker pyinstaller binary mv dist/buildbot-worker dist/buildbot-worker-linux-amd64-$CIRCLE_TAG.bin # upload the github release binary ghr -t $GITHUB_TOKEN -u $CIRCLE_PROJECT_USERNAME -r $CIRCLE_PROJECT_REPONAME --replace $CIRCLE_TAG dist/ workflows: version: 2 build-deploy: jobs: - build: filters: tags: only: /v.*/ - publish: requires: [build] filters: tags: only: /v.*/ branches: ignore: /.*/ buildbot-3.4.0/.coveragerc000077700000000000000000000000001413250514000207642common/coveragercustar00rootroot00000000000000buildbot-3.4.0/.dockerignore000066400000000000000000000004541413250514000157740ustar00rootroot00000000000000**/*.pyc **/dist **/*.egg-info **/build **/node_modules # Note: not ignoring .git so that build scripts can figure out which version of Buildbot we're # building. .dockerignore .venv* Dockerfile.master master/_build master/docs/manual/mydashboard.html master/docs/manual/mydashboard.py _trial_temp buildbot-3.4.0/.github/000077500000000000000000000000001413250514000146555ustar00rootroot00000000000000buildbot-3.4.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000012411413250514000204540ustar00rootroot00000000000000## Remove this paragraph If you don't remove this paragraph from the pull request description, this means you didn't read our contributor documentation, and your patch will need more back and forth before it can be accepted! Please have a look at our developer documentation before submitting your Pull Request. http://docs.buildbot.net/latest/developer/quickstart.html And especially: http://docs.buildbot.net/latest/developer/pull-request.html ## Contributor Checklist: * [ ] I have updated the unit tests * [ ] I have created a file in the `newsfragments` directory (and read the `README.txt` in that directory) * [ ] I have updated the appropriate documentation buildbot-3.4.0/.github/stale.yml000066400000000000000000000017451413250514000165170ustar00rootroot00000000000000# Configuration for probot-stale - https://github.com/probot/stale # Number of days of inactivity before an Issue or Pull Request becomes stale daysUntilStale: 60 # Number of days of inactivity before a stale Issue or Pull Request is closed daysUntilClose: 67 # Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable exemptLabels: [] # Label to use when marking as stale staleLabel: stalled # Comment to post when marking as stale. Set to `false` to disable markComment: > This pull request has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions. # Comment to post when removing the stale label. Set to `false` to disable unmarkComment: false # Comment to post when closing a stale Issue or Pull Request. Set to `false` to disable closeComment: "closing due to our stalled pull request policy" # Limit to only `issues` or `pulls` only: pulls buildbot-3.4.0/.github/workflows/000077500000000000000000000000001413250514000167125ustar00rootroot00000000000000buildbot-3.4.0/.github/workflows/ci.yml000066400000000000000000000054621413250514000200370ustar00rootroot00000000000000name: CI on: push: branches: - master pull_request: branches: - master jobs: db: name: DB / ${{ matrix.name }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: include: - name: MySQL 5 database: mysql:5 connection: 'mysql+mysqldb://buildbot:buildbot@127.0.0.1:3306/bbtest?storage_engine=InnoDB' check: mysqladmin ping - name: MySQL latest database: mysql:latest connection: 'mysql+mysqldb://buildbot:buildbot@127.0.0.1:3306/bbtest?storage_engine=InnoDB' check: mysqladmin ping - name: PostgreSQL 9 / psycopg2 database: postgres:9 connection: 'postgresql+psycopg2://buildbot:buildbot@127.0.0.1:5432/bbtest' check: pg_isready - name: PostgreSQL 9 / pg8000 database: postgres:9 connection: 'postgresql+pg8000://buildbot:buildbot@127.0.0.1:5432/bbtest' check: pg_isready - name: PostgreSQL latest / psycopg2 database: postgres:latest connection: 'postgresql+psycopg2://buildbot:buildbot@127.0.0.1:5432/bbtest' check: pg_isready - name: PostgreSQL latest / pg8000 database: postgres:latest connection: 'postgresql+pg8000://buildbot:buildbot@127.0.0.1:5432/bbtest' check: pg_isready env: BUILDBOT_TEST_DB_URL: ${{ matrix.connection }} services: database: image: ${{ matrix.database }} env: MYSQL_USER: buildbot MYSQL_PASSWORD: buildbot MYSQL_DATABASE: bbtest MYSQL_ALLOW_EMPTY_PASSWORD: yes POSTGRES_USER: buildbot POSTGRES_PASSWORD: buildbot POSTGRES_DB: bbtest ports: - '3306:3306' - '5432:5432' options: --health-cmd "${{ matrix.check }}" --health-interval 10s --health-timeout 5s --health-retries 10 steps: - uses: actions/checkout@v2 - run: sudo apt-get install aspell aspell-en enchant iamerican ispell - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.8 - name: Cache pip uses: actions/cache@v2 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ hashFiles('requirements-ci.txt', 'requirements-cidb.txt') }} restore-keys: ${{ runner.os }}-pip- - run: pip install -U pip - run: pip install -r requirements-ci.txt -r requirements-cidb.txt # run real db tests under coverage to have several merging coverage report # https://github.com/codecov/support/wiki/Merging-Reports - run: coverage run --rcfile=.coveragerc $(which trial) --reporter=text --rterrors buildbot.test buildbot_worker.test - run: codecov buildbot-3.4.0/.isort.cfg000066400000000000000000000006761413250514000152250ustar00rootroot00000000000000[settings] line_length=110 known_future_library=__future__,future known_standard_library=pkg_resources,html known_twisted=twisted,zope,autobahn,klein,txaio known_mock=mock known_third_party=migrate,sqlalchemy,ldap3,txrequests,requests,MySQLdb,coverage,jinja2,dateutil,sphinx,setuptools,jwt,flask,docutils,aiohttp known_first_party=buildbot,buildbot_worker force_single_line=1 sections=FUTURE,STDLIB,THIRDPARTY,MOCK,TWISTED,FIRSTPARTY,LOCALFOLDER buildbot-3.4.0/.mailmap000066400000000000000000000077071413250514000147510ustar00rootroot00000000000000Abdelrahman Hussein A. T. Hofkamp Amber Yust Andrew Melo Aurélien Bompard Ben Hearsum Ben Hearsum Ben Hearsum Benoît Allard Benoît Allard Benoît Allard Brian Warner Brian Warner Brian Warner Brian Warner Chad S Metcalf Charles Lepple Chris Soyars Dan Scott Daniel Dunbar Daniel Dunbar Douglas Hubler Dustin J. Mitchell Dustin J. Mitchell Dustin J. Mitchell Gary Poster Gary Poster Georges Racinet Geraud Boyer Greg Ward Ian Zimmerman Ian Zimmerman John Carr John Ford John O'Duinn Jon Olsson Jonathan S. Romero Joshua Kugler Justin Wood Justin Wood Justin Wood Justin Wood Kristian Nielsen Lital Natan Louis Opter Louis Opter Marc-Antoine Ruel Marc-Antoine Ruel Marcus Lindblom Mark Lakewood Mark Lakewood Matisse Enzer Michael MacDonald Michael MacDonald Nate Bragg Neil Hemingway Neil Hemingway Nicolas Sylvain Pierre Tardy Quentin Raynaud Randall Bosetti Randall Bosetti Rene Müller Rene Müller Scott Garman Stefan Seefeld Stefan Seefeld Stefan Zager Steve "Ashcrow" Milner William Deegan Zooko Wilcox-O'Hearn adam Harry Borkhuis Andy Howell buildbot-3.4.0/.mention-bot000066400000000000000000000001041413250514000155440ustar00rootroot00000000000000{ "userBlacklist" : [ "tomprince", "djmitche" ] } buildbot-3.4.0/.pyup.yml000066400000000000000000000003621413250514000151140ustar00rootroot00000000000000# update schedule, default is not set # the bot will visit the repo once and bundle all updates in a single PR for the given # day/week/month schedule: "every two weeks" # allowed ["every day", "every week", "every two weeks", "every month"] buildbot-3.4.0/CONTRIBUTING.rst000066400000000000000000000033401413250514000157560ustar00rootroot00000000000000Contributing to Buildbot ======================== .. contents:: :local: We value your contribution to Buildbot and thank you for it! If it happens that your contribution is not reviewed within two days, please do not hesitate to remind us about it by leaving a comment "Please review this PR". What appears below is just a quick summary. See http://trac.buildbot.net/wiki/Development for the full story. Issues, Bugs, Tickets --------------------- Please file tickets for any bugs you discover at https://github.com/buildbot/buildbot/issues. It is not necessary to file a bug if you are preparing a patch. Submitting Patches ------------------ See http://trac.buildbot.net/wiki/SubmittingPatches for the details. Your contribution must be licensed under the GPLv2, and copyright assignment is not expected. See http://trac.buildbot.net/wiki/LicensingYourContribution for details. You should run ``common/validate.sh`` before sending your patches. Also you can install our git hook for validating and fixing most common coding style issues :: cp common/hooks/post-commit .git/hooks Review ------ Buildbot's code-review process is described at http://trac.buildbot.net/wiki/PatchReview. The important point to know is that Buildbot requires a positive review (adding the "merge-me" label) before a change is eligible to be merged. While we try to perform reviews in a timely fashion, if your review has lagged for a week or more please do feel free to nag us in whatever way is easiest for you. Development Tips ---------------- The easiest way to hack on Buildbot is in a ``virtualenv``. See http://docs.buildbot.net/latest/developer/tests.html#quick-start for a description of how to set up such a thing, and how to run the test suite. buildbot-3.4.0/Dockerfile.master000066400000000000000000000067731413250514000166160ustar00rootroot00000000000000# buildbot/buildbot-master # please follow docker best practices # https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/ # Use a multi-stage build: # https://docs.docker.com/develop/develop-images/multistage-build/ # Provides a base Debian (10) image with latest buildbot mater installed # the master image is not optimized for size, but rather uses Debian for wider package availability # Provide an intermediate Docker image named "buildbot-build". # This intermediate image builds binary wheels # which get installed in the final image. # This allows us to avoid installing build tools like gcc in the final image. FROM debian:10 AS buildbot-build MAINTAINER Buildbot maintainers # Last build date - this can be updated whenever there are security updates so # that everything is rebuilt ENV security_updates_as_of 2021-04-28 RUN \ apt-get update && \ apt-get -y upgrade && \ apt-get -y install -q \ curl \ git \ libcairo-gobject2 \ libcairo2-dev \ libgirepository1.0-dev \ libglib2.0-dev \ libffi-dev \ libpq-dev \ libssl-dev \ pkg-config \ python3 \ python3-dev \ python3-pip \ yarnpkg \ tar \ tzdata \ virtualenv \ && \ rm -rf /var/lib/apt/lists/* COPY . /usr/src/buildbot RUN cd /usr/src/buildbot && make tarballs RUN virtualenv --python=python3 /buildbot_venv && \ /buildbot_venv/bin/pip3 install -r /usr/src/buildbot/requirements-master-docker-extras.txt && \ env CRYPTOGRAPHY_DONT_BUILD_RUST=1 /buildbot_venv/bin/pip3 install /usr/src/buildbot/dist/*.whl RUN mkdir -p /wheels && \ /buildbot_venv/bin/pip3 list --format freeze | grep -v '^buildbot' | grep -v '^pkg_resources' > /wheels/wheels.txt && \ cat /wheels/wheels.txt && \ cd /wheels && \ /buildbot_venv/bin/pip3 wheel -r wheels.txt && \ rm /wheels/wheels.txt && \ cp /usr/src/buildbot/dist/*.whl /wheels #============================================================================================== # Build the final image here. Use build artifacts from the buildbot-build # container. # Note that the UI and worker packages are the latest version published on pypi # This is to avoid pulling node inside this container FROM debian:10-slim MAINTAINER Buildbot maintainers # Last build date - this can be updated whenever there are security updates so # that everything is rebuilt ENV security_updates_as_of 2021-04-28 RUN \ apt-get update && \ apt-get -y upgrade && \ apt-get -y install -q \ curl \ dumb-init \ git \ libpq5 \ libcairo2 \ openssh-client \ python3 \ python3-pip \ tar \ tzdata \ virtualenv \ && \ rm -rf /var/lib/apt/lists # Build wheels in other container using the Dockerfile.build # and copy them into this container. # We do this to avoid having to pull gcc for building native extensions. COPY --from=buildbot-build /wheels /wheels # install pip dependencies RUN virtualenv --python=python3 /buildbot_venv && \ /buildbot_venv/bin/pip3 install --upgrade pip setuptools && \ cd /wheels && /buildbot_venv/bin/pip3 install $(ls -1 | grep -v 'buildbot-worker') && \ rm -r /root/.cache /wheels COPY master/docker/buildbot.tac /usr/src/buildbot/buildbot.tac COPY master/docker/start_buildbot.sh /usr/src/buildbot/start_buildbot.sh WORKDIR /buildbot CMD ["dumb-init", "/usr/src/buildbot/start_buildbot.sh"] buildbot-3.4.0/LICENSE000066400000000000000000000354221413250514000143300ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS buildbot-3.4.0/Makefile000066400000000000000000000134561413250514000147660ustar00rootroot00000000000000# developer utilities DOCKERBUILD := docker build --build-arg http_proxy=$$http_proxy --build-arg https_proxy=$$https_proxy ROOT_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) .PHONY: docs pylint flake8 virtualenv VENV_NAME := .venv$(VENV_PY_VERSION) PIP ?= $(ROOT_DIR)/$(VENV_NAME)/bin/pip PYTHON ?= $(ROOT_DIR)/$(VENV_NAME)/bin/python VENV_PY_VERSION ?= python3 YARN := $(shell which yarnpkg || which yarn) WWW_PKGS := www/base www/console_view www/grid_view www/waterfall_view www/wsgi_dashboards www/badges WWW_EX_PKGS := www/nestedexample www/codeparameter WWW_DEP_PKGS := www/guanlecoja-ui www/data_module ALL_PKGS := master worker pkg $(WWW_PKGS) WWW_PKGS_FOR_UNIT_TESTS := $(filter-out www/badges, $(WWW_DEP_PKGS) $(WWW_PKGS)) ALL_PKGS_TARGETS := $(addsuffix _pkg,$(ALL_PKGS)) .PHONY: $(ALL_PKGS_TARGETS) # build rst documentation docs: $(MAKE) -C master/docs dev @echo "You can now open master/docs/_build/html/index.html" docs-towncrier: if command -v towncrier >/dev/null 2>&1 ;\ then \ towncrier --draft | grep 'No significant changes.' || yes n | towncrier ;\ fi docs-spelling: $(MAKE) -C master/docs SPHINXOPTS=-W spelling docs-linkcheck: $(MAKE) -C master/docs SPHINXOPTS=-q linkcheck docs-release: docs-towncrier $(MAKE) -C master/docs docs-release-spelling: docs-towncrier $(MAKE) -C master/docs SPHINXOPTS=-W spelling # pylint the whole sourcecode (validate.sh will do that as well, but only process the modified files) pylint: $(MAKE) -C master pylint; master_res=$$?; \ $(MAKE) -C worker pylint; worker_res=$$?; \ if [ $$master_res != 0 ] || [ $$worker_res != 0 ]; then exit 1; fi # flake8 the whole sourcecode (validate.sh will do that as well, but only process the modified files) flake8: $(MAKE) -C master flake8 $(MAKE) -C worker flake8 flake8 --config=common/flake8rc www/*/buildbot_*/ flake8 --config=common/flake8rc www/*/setup.py flake8 --config=common/flake8rc common/*.py frontend_deps: $(VENV_NAME) $(PIP) install -e pkg $(PIP) install mock wheel buildbot cd www/build_common; $(YARN) install --pure-lockfile for i in $(WWW_DEP_PKGS); \ do (cd $$i; $(YARN) install --pure-lockfile; $(YARN) run build); done frontend_tests: frontend_deps for i in $(WWW_PKGS); \ do (cd $$i; $(YARN) install --pure-lockfile); done for i in $(WWW_PKGS_FOR_UNIT_TESTS); \ do (cd $$i; $(YARN) run build-dev || exit 1; $(YARN) run test || exit 1) || exit 1; done frontend_tests_headless: frontend_deps for i in $(WWW_PKGS); \ do (cd $$i; $(YARN) install --pure-lockfile); done for i in $(WWW_PKGS_FOR_UNIT_TESTS); \ do (cd $$i; $(YARN) run build-dev || exit 1; $(YARN) run test --browsers BBChromeHeadless || exit 1) || exit 1; done # rebuild front-end from source frontend: frontend_deps for i in pkg $(WWW_PKGS); do $(PIP) install -e $$i || exit 1; done # build frontend wheels for installation elsewhere frontend_wheels: frontend_deps for i in pkg $(WWW_PKGS); \ do (cd $$i; $(PYTHON) setup.py bdist_wheel || exit 1) || exit 1; done # do installation tests. Test front-end can build and install for all install methods frontend_install_tests: frontend_deps trial pkg/test_buildbot_pkg.py # upgrade FE dependencies frontend_yarn_upgrade: for i in $(WWW_PKGS) $(WWW_EX_PKGS) $(WWW_DEP_PKGS); \ do (cd $$i; echo $$i; rm -rf yarn.lock; $(YARN) install || echo $$i failed); done # install git hooks for validating patches at commit time hooks: cp common/hooks/* `git rev-parse --git-dir`/hooks rmpyc: find master worker \( -name '*.pyc' -o -name '*.pyo' \) -exec rm -v {} \; isort: isort -rc worker master git diff --name-only --stat "HEAD" | grep '.py$$' | xargs autopep8 -i git add -u docker: docker-buildbot-worker docker-buildbot-master echo done docker-buildbot-worker: $(DOCKERBUILD) -t buildbot/buildbot-worker:master worker docker-buildbot-master: $(DOCKERBUILD) -t buildbot/buildbot-master:master master $(VENV_NAME): virtualenv -p $(VENV_PY_VERSION) $(VENV_NAME) $(PIP) install -U pip setuptools # helper for virtualenv creation virtualenv: $(VENV_NAME) # usage: make virtualenv VENV_PY_VERSION=python3.4 $(PIP) install -r requirements-minimal.txt \ packaging towncrier @echo now you can type following command to activate your virtualenv @echo . $(VENV_NAME)/bin/activate TRIALOPTS?=buildbot .PHONY: trial trial: virtualenv . $(VENV_NAME)/bin/activate && trial $(TRIALOPTS) release_notes: $(VENV_NAME) test ! -z "$(VERSION)" # usage: make release_notes VERSION=0.9.2 yes | towncrier --version $(VERSION) --date `date -u +%F` git commit -m "Release notes for $(VERSION)" $(ALL_PKGS_TARGETS): cleanup_for_tarballs frontend_deps . $(VENV_NAME)/bin/activate && ./common/maketarball.sh $(patsubst %_pkg,%,$@) cleanup_for_tarballs: find master pkg worker www -name VERSION -exec rm {} \; rm -rf dist mkdir dist .PHONY: cleanup_for_tarballs tarballs: $(ALL_PKGS_TARGETS) .PHONY: tarballs # helper for release creation release: virtualenv test ! -z "$(VERSION)" # usage: make release VERSION=0.9.2 test -d "../bbdocs/.git" # make release should be done with bbdocs populated at the same level as buildbot dir GPG_TTY=`tty` git tag -a -sf v$(VERSION) -m "TAG $(VERSION)" git push buildbot "v$(VERSION)" # tarballs are made by circleci.yml, and create a github release export VERSION=$(VERSION) ; . .venv/bin/activate && make docs-release rm -rf ../bbdocs/docs/$(VERSION) # in case of re-run cp -r master/docs/_build/html ../bbdocs/docs/$(VERSION) cd ../bbdocs && git pull . .venv/bin/activate && cd ../bbdocs && make && git add docs && git commit -m $(VERSION) && git push @echo When tarballs have been generated by circleci: @echo make finishrelease finishrelease: rm -rf dist python3 ./common/download_release.py rm -rf ./dist/v* twine upload --sign dist/* pyinstaller: virtualenv $(PIP) install pyinstaller $(VENV_NAME)/bin/pyinstaller -F pyinstaller/buildbot-worker.spec buildbot-3.4.0/README.rst000066400000000000000000000023601413250514000150050ustar00rootroot00000000000000========== Buildbot ========== -------------------------------------- The Continuous Integration Framework -------------------------------------- Buildbot is based on original work from `Brian Warner `_, and currently maintained by `the Botherders `_. Visit us on http://buildbot.net ! |travis-badge|_ |codecov-badge|_ |readthedocs-badge|_ Buildbot consists of several components: * master * worker * www/base * www/console_view * www/waterfall_view and so on See the README in each subdirectory for more information Related repositories: * https://github.com/buildbot/buildbot-media - Buildbot-related media * https://github.com/buildbot/buildbot-website - Source for http://buildbot.net .. |travis-badge| image:: https://travis-ci.org/buildbot/buildbot.svg?branch=master .. _travis-badge: https://travis-ci.org/buildbot/buildbot .. |codecov-badge| image:: http://codecov.io/github/buildbot/buildbot/coverage.svg?branch=master .. _codecov-badge: http://codecov.io/github/buildbot/buildbot?branch=master .. |readthedocs-badge| image:: https://readthedocs.org/projects/buildbot/badge/?version=latest .. _readthedocs-badge: https://readthedocs.org/projects/buildbot/builds/ buildbot-3.4.0/RELEASING.rst000066400000000000000000000105051413250514000153610ustar00rootroot00000000000000Creating a release ================== This document is documentation intended for Buildbot maintainers. It documents the release process of Buildbot. Step 1: Verify that external dependants can be built ---------------------------------------------------- Verify that the following resources can be built from the latest master. This can be checked by looking into the dashboards (maintainer access may be needed). - Docker Hub (buildbot-master) (https://hub.docker.com/repository/docker/buildbot/buildbot-master/general) - Docker Hub (buildbot-worker) (https://hub.docker.com/repository/docker/buildbot/buildbot-worker/general) - Read the Docs (https://readthedocs.org/projects/buildbot/builds/) These external dependencies build only by git tag. Thus if a release is done with broken build process, it would be impossible to fix without releasing a new version. Step 2: Release notes PR ------------------------ Open a new branch (e.g. `release`) and run the following: . .venv/bin/activate && make release_notes VERSION=x.y.z This collects the release notes using the `towncrier` tool and then commits the result. This step is done as a PR so that CI can check for spelling errors and similar issues. Local checks are insufficient as spelling check in particular depends on what dictionaries are installed. It's best to run `make docs-release` afterwards and check `master/docs/_build/html/relnotes/index.html` file for obvious rendering errors. This will have much faster turnaround compared to if the error is noticed after the CI runs. If any errors are found, just amend the commit created by `make release_notes`. Certain file names are not properly supported by the `towncrier` tool and it ignores them. Check `newsfragments` directory for any forgotten release notes Step 3: Merge the release notes PR ---------------------------------- Step 4: Perform actual release ------------------------------ This step requires the Buildbot git repository to contain `buildbot` remote that points to https://github.com/buildbot/buildbot and can be pushed to. Additionally, the Buildbot docs repository (https://github.com/buildbot/bbdocs) must be checked out at `../bbdocs` path. Pull the merge commit created on the `master` branch during the step 2. Then run: make release VERSION=x.y.z This will create the required tags, make documentation, copy it to bbdocs repo and push everything. Step 5: Draft a new release and wait for CircleCi to create release tarballs ---------------------------------------------------------------------------- The push of tags created during step 3 will activate CircleCi configuration that generates tarballs and uploads them to GitHub. CircleCi will automatically publish a new release when uploading assets. The release notes must be added manually by drafting a release on the GitHub UI at https://github.com/buildbot/buildbot/releases. If you draft the release and publish it before CircleCi, make sure the release name matches the git tag. This is a requirement for subsequent release scripts to work. Manual publishing is preferred, because the releases created by CircleCi don't contain release notes, thus GitHub notifications are not informative. Step 6: Upload release to pypi ------------------------------ This step requires GitHub Hub tool to be installed and authorized to GitHub (https://github.com/github/hub). Additionally you have to have access to GPG key that is used to sign the releases. Finally, you have to be added as a maintainer to all Buildbot PyPi projects. To complete the release just run the following: make finishrelease The above will download the releases from GitHub and upload them using twine. If you get bytes-related error after entering Pypi password, you'll need to upgrade Twine. Step 7: Announce the release ---------------------------- This step involves announcing the release of the new Buildbot version on several channels. Write an email to the BuildBot mailing lists: announce@buildbot.net, devel@buildbot.net, users@buildbot.net. Write a blog post on the Buildbot Medium account: https://medium.com/buildbot. The blog post should include the highlights of the release in less monotonous style than the release notes. Any extra important remarks can be added there. Lastly, include the output of `git shortlog --no-merges -ns v...v` to recognize the contributors. buildbot-3.4.0/appveyor.yml000066400000000000000000000045541413250514000157150ustar00rootroot00000000000000# AppVeyor CI # https://www.appveyor.com/docs environment: matrix: # For Python versions available on AppVeyor, see # http://www.appveyor.com/docs/installed-software#python - PYTHON: "C:\\Python36" - PYTHON: "C:\\Python37" - PYTHON: "C:\\Python38" install: - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" - "python -c \"import sys; print(sys.prefix)\"" - "python -c \"import sys; print(sys.exec_prefix)\"" - "python -c \"import sys; print(sys.executable)\"" - "python -V -V" - "python -m pip install -U pip setuptools" - "python -m pip install -r requirements-ci.txt" - "python -m pip list" # Check that pywin32 is properly installed - "python -c \"import win32api\"" build: false test_script: - "coverage run --rcfile=common/coveragerc -m twisted.trial --reporter=text --rterrors buildbot.test buildbot_worker.test" - ps: | echo $ENV:PYTHON if ($env:PYTHON -imatch 'C:\\Python27') { iex 'pyinstaller -F pyinstaller/buildbot-worker.spec' iex 'appveyor PushArtifact dist\\buildbot-worker.exe' } on_success: - "coverage xml --rcfile=common/coveragerc -o coverage.xml -i" - "codecov" on_failure: # Store _trial_temp directory as artifact on build failure. # See - ps: | $root = Resolve-Path _trial_temp; [IO.Directory]::GetFiles($root.Path, '*.*', 'AllDirectories') | % { Push-AppveyorArtifact $_ -FileName $_.Substring($root.Path.Length + 1) -DeploymentName trial-log } # Uncomment this part if you want to interactively debug tests on AppVeyor. # This will pause build at the end and setup RDP server. # Connection details will be printed in the build log. # For detail see: https://www.appveyor.com/docs/how-to/rdp-to-build-worker #on_finish: #- ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1')) deploy: release: $(APPVEYOR_REPO_TAG_NAME) description: 'windows binary for buildbot-worker' provider: GitHub auth_token: secure: HQNlcAyaY9Jznbl77rfNatZG62Gg+qFY7emzj5n3Wu16fkr8dLlFNTKOJlfXO5uK artifact: "buildbot-worker.exe" draft: false prerelease: false on: appveyor_repo_tag: true # deploy on tag push only PYTHON: "C:\\Python35" buildbot-3.4.0/common/000077500000000000000000000000001413250514000146055ustar00rootroot00000000000000buildbot-3.4.0/common/code_spelling_ignore_words.txt000066400000000000000000000410001413250514000227310ustar00rootroot00000000000000abspath accesskey accumulateclasslist ack acknowledgement activation activations actuateat actuateattimer actuateok adaptor adbapi addbuild addbuildset addbuildsetcalls addbuildsetforchanges addbuildsetforsourcestamp addbuildsetforsourcestampswithdefaults addbuildsetforxxx addchange addedbuilder addedchanges addlog addservice addsourcestamp addsourcestampset addstep addsuppression admin admin's admins agenced aiohttp allard allen allfiles allowanonymousaccess allowforce allowshutdown allura alwaysrun alwaysuselatest amazonaws andrew andy antoine anysentinel apache api apimaster app appdata applicative approle apps araujo arg args argv armstrong ascii asdict ashcrow aslist assertable assertargspecmatches assertbuildset assertconsumingchanges assertequal assertisinstance assertproduceswarning assertraisesregex assertraisesregexp assertregex assertregexpmatches assertresults async asynclrucache asyncio atlassian atm atomicity attachscheduler attr attrs attributeerror aug auth authenticator authz autocommit autoconf autocreatetables autodoc autogenerated autorelease avatarmethods awaitables awk aws axx backend backends backlinks backoff backporting backslashing basedir basename basepath baserev basetgz baseurl basicauth baz bb bbot bbindex bbreftargetdirective bc bdict bdictlist bear's behaviour ben benchmarking benjamin bennetts berlin bindparam bitbucket blahblah blamelist bldr blocksiag blocksize bobrik bool boolean boston botmaster botmaster's botname boto br brainerd branchfile branchtype brd brdict brdicts breq breqs brian brid brid's brids brs bruheim bsd bsid bsids buid buidlrequestcompletions buidlsets builbot buildable buildargs buildbot buildbotnetusagedata buildbotNetUsageData buildbotoptions buildbot's buildbots buildbotting buildboturl buildclass builddir builderadded builderchangedstate builderid buildermasterids buildername buildernames builderremoved buildetaupdate buildfactory buildfinished buildid buildmaster buildmaster's buildmasters buildnum buildot buildreq buildrequest buildrequestcompletions buildrequestdistributor buildrequestresults buildrequests buildrequestsconnectorcomponent buildresult buildroot build's buildset buildsetcomplete buildset's buildsets buildsetsubmitted buildslave buildslaves buildstarted buildstatus buildstep buildstepmixin buildsteps buildstepstatus buildworker builtin builtins bulid bulider buliderid butbucket bwaitall bwverbose bytestring bytestrings bzip bzr caas calderone callbacked callconsumer calllater calllater's callremote callwhenrunning cancelcleanshutdown cancelled cancelling canonicalize canstartbuild canstartwithworkerforbuilder cb cbc cbd cd ce cfg cgi chainedprotocolfactory changeadded changedict changefilter changehookresource changehorizon changeid changeids changemaster change's changesconnectorcomponent changeset changesource changesourceid changesources charset charsets chdict chdir chdir'ed checkconfig checkfirst checkin checkip checkoutdelay checkworkerhascommand childs chmod choosenextbuild christopher chroot chunkify ci clientconnectionlost clientid clientids clientsecret clobberonbranchchange clobberonfailure closestdin cls cmake cmd cmdclass cmdline cmdref cmopared cmp codebase codebasegenerator codebases codec collapserequests comitters commandcomplete commandinterrupted commandname commitish committer committers committer's compability comparablemixin comparaison comparator compat compatiblity completers compresslog computesourcerevision comspec concat conchc config configfile configfunc configs configurator configurators configjson contextvars conn connectedness connectionmade connectionpool contrib copyablefailure coroutine coroutines cors cowbuilder cppcheck cpu cpython cray createabsolutesourcestamps createmaster createsummary createworker cred credc creds cribed croniter css ctime ctrl currentbuilds currentstep cvar cvs cvsroot daemonize dan darcs datafields datareceived datetime dateutil's dave david dbapi DBConnector dbpool dbs ddl de debian debounce debounced debouncer debouncing decoderc deepcopy defaultbranch defaultdeny defaultwhenfalse deferredlist deferred's deferreds defertothread delayedcall denormalized dependabot deprecatedmoduleattribute descriptionsuffix desynchronization dev DEVNULL dict dicts dicttype diff diffinfo diffs dir directoryenterpattern directoryupload dirname dirs dirwatcher disownserviceparent distutils distros dn dnotify doclobber docname doconfig docopy docstring doctest doctests documentclass dom donovan dostepif downloadfile downloadstring dradez dss du dup durations durchmesser dustin ee eg egypt encoding encodings enforcechosenworker enginestrategy enosuch ensurehasssl entitytype env environ eof epydoc eqconnectionpool eric errback errback'd errbacked errbacks err'ed errno erroring errormessages errortoo et evaluatecommand exe execfile executables exe's expanduser expcalls expectcomplete expectedbuildset expectedfailures expectedmessages expectedreturn expectmessage expectsuccess extractall extrapackages eyal failedtogetperspective fakebotmaster fakeconnector fakedb fakemaster fakemethod fakeopen fakeserviceparent fakeworkerworker fallback fallbacks favicon fbi fd fdescfs fdopen fds featureful feb fifo fileloc filename FILENAME filenames filepath filesystem filewriter findbuilderid findchangesourceid findsomethingid finduserbyattr firewaall firstname fixme fixup fk fks flunkonfailure flunkonwarnings flushloggederrors flushwarnings fn fnmatch foo foobar foogit foreignkey formatargspec formatinterval fp fqdn freebsd freshcvs fri frontend fromchdict fromdir fs fullname func funcname gardiner gatherresults gayton gc gcc gc'd gerrit getaddress getargspec getattr getbuild getbuilderid getbuildrequest getbuildrequests getbuildsetproperties getchange getchangeclassifications getchangefromssid getchanges getchangesource getchangesources getcodebase getcommand getconfigfilefromtac getdescription getevent getexitcodeprocess getfromkwargs getlastfinishedbuild getlog getloginresource getloglines getname getnextbuildtime getnumber getpage getperspective getpid getppid getprocessoutput getprocessoutputandvalue getprocessoutputandvaluewithinput getpwnam getrenderingfor getresponsibleusersforbuild getresults getrlimit getrootobject getschedulers getservice getslaveinfo getsourcestamp getspec getstate getsteps getter gettestresults gettext gettimernameforchange geturlforbuild geturlforthing getworker getworkerinfo gf gib giger gitattributes github gitlab gitorious gitpoller globals gmail gmt google googlegroups googlesource gopts gotchange gotperspective gotrootobject gpo gql gracefulshutdown graphql gravatar groupmemberpattern gtk guanlecoja gz gzip haltonfailure hammond handlejobfile hardcoded hartwell hasattr hashable hashcolumns hashedcolumns haskey hasproperty hermann herve hg hinputwritetemp hmac hoc hostname hostnames houtreadtemp howto hprocess hthread html http httpclientservice https hvac ibuildrequestcontrol ibuildrequeststatus ibuildsetstatus ibuildstatus ichange ico icredentialschecker ics i'd ident idn idsdeferred ie iff i'm impl ina incrementing indextemplates indices influxdb infos init initializers initialstdin inline inlinecallbacks innodb inrepo inrepos inserttestdata installdir instanceuri instantiation instantiations insubstantiate insubstantiated insubstantiating insubstantiation insubstantiations integrations internet interoperability interruptable interruptsignal intialization intranet invariants io ip ippolito iproperties iproperty iprops iprotocol irc ircclient irealm irenderable isavailable isbusy isdir ish isinstance ismessageneeded isort isotoma istatus istatusreceiver isworkerdir itamar iter iterable ivar ivars jã jacobs james jason java javascript jellyable jessica jinja jmason jobdir jobfiles jobid joe jonathan js json jsonable jsonapi json'd jsonrc jsonrpc jwt keepalive keepaliveinterval keepalives keepalivetimeout keepstamp keepstderr keepstdinopen keepstdout kevin keyerror keyfile keypair keypairs keyring klass knielsen kube kubectl kubernetes kv kvm kwarg kwargs lange largebinary lastactuation lastlog lastname lastresort lasttrigger latentworkerforbuilder latin lazylogfiles ldap ldapuserinfo lefkowitz legator len libvirt lifecycle lintian linux listdir listentcp liveness loadconfig localhost localname localtime lockaccess lockclass lockfiles lockid logchunk logchunks logentries logentry logenviron logfile logfiles logfinished logid login logmsg logname logout logrotate logstarted longaccess lookahead lookup lookups loopback LoopingCall loseconnection lostremote lotem lp lru mailchangesource maildir maildirs maintainance makedirs makefile makerbase makeremoteshellcommand maketelnetprotocol manba mangold manifestbranch manifestfile manifestoverrideurl manifesturl maraujop marius markh marshalled maruel mary massachusetts masterdest masterfqdn masterid masterlock mastersideworker masterstatus matcher matchers matthew maxcount maxcountforworker maxdelay maxint maxlength maxsize maxthreads maxtime maybeaddwarning maybebuildsetcomplete maybedeferred maybestartbuild maybestartbuilds maybestartbuildsforbuilder maybestartbuildsforworker maybestartbuildson maybestartbuildsonbuilder mb md meijer melo merchantability mesos messagereceived metadata meth methodtemplate methodundertest microsoft middleware miguel milner mimic mingw minidom minidom's minikube minimalistic mintime mispelling misr mit mixedcase mixin mkdir mkstemp mobarak moduleauthor mon monkeypatch monkeypatches moshe moto mq mr msc msdn msg msgbody msys mtime mtn mtr mtrlogobserver multi multiline multimaster multiservice multiservice's multithreading mumbo munge mydashboard mysql mysqlclient mysqld mysql's namedservice namedtuple namespace namespaces nat nestedlist netstring netstrings newcred newmsg nextbuild nextworker nfs ngrok niklaus nobody's nodaemon nonexisting noninfringement noop noqa noreconfig noreply nosuchmethod notabranch notifyondisconnect nov novaclient noworkererror nstderr nt nullable num oauth oauthconf objectid offline oid ok oldcred online onlinepubs onlyifchanged onlyimportant openfile opengroup oposite optfile optflags optparameters ordermatters ored org orgs orm os osaf osx otherstatusreceivers overridable overridebuildsetmethods pam paramiko params parms parseable parsegotrevision parsejob passwd patchable patchid patchlevel patchset patchup pathnames pathpattern pathpatterns pathsep paul pauseproducing pavel pb pbamanger pbconnectionlost pbmanager pbmanager's pbuilder pendingbuilds pergamenshchik periodicbuildtimer peticolas pfactory pid pidfile pidfiles pids pitrou pluggableauthenticationmoduleschecker plugin plugins pn png pollatlaunch pollatreconfigure poller pollers pollingchangesource pollinterval popnextbuild portstr portstrs posix postdata postgres postoptions postpath ppl pragma pre precompute preferlastchangedrev prefilter preflight prepend prepended prepends preston printstatus prioritizebuilders privmsgs proc processdone processended processterminated procgroupprocess produceevent programmatically progressmetrics proj projectname proto prs pserver pty ptys pullrequests pushjet pwd py py's pycrypto pyd pyflakes pygments pyjade pylint pylint's pyparsing pypugjs pythonpath qa queriable qmail quickmode quickstart quiesce radez raiseexpectationfailure ralph raml rc reactorname readlines readsourcedata readthedocs realdatabasemixin realmasterlock realworkerlock reasonstring recoded recompress reconf reconfig reconfig'd reconfigresource reconfigs reconfigservice reconfigservicebuilders reconfigservicewithconstructorargs reconfigservicewithsibling reconfigurability reconfigurable reconfiguring reconnection recurse redhat reentrant refactor refcount refetch regex regen regexes regexp regexps regexs registeradapter reid rejectedworkers releaselocks releasers remoteaddresses remotecommand remotecomplete remotegetworkerinfo remote's remoteshellcommand remotestep remotetransfer removeservice renderable renderables renderer renderers repl repo repodownloads repoll reponame reponds repourl repr req requestavatar requestavatarid requestcancelled requestjson requestsubmitted requeue requeued requiredargs reschedulenextbuild resetmocks resourceneedsreconfigs resourcetype restrmatcher restructuredtext resultdir resultsdeferred resultsmixin resultspec resumeproducing retreive retryable retryfetch reviewcb revisionfor revlink revno revnum revset revsets rewrap rewrapped rewraps rf rgen rhead rieder riley rmdir rmdirrecursive rmfile rmtree robocopy routingkey rp rpc rpmdir rpmlint rpms rsa rsh rst rstrip rtd rtype rtypes ruamel runcommand runinteraction runprocess runquery runstep's runtest runtime rv sa saas safecat saturday sb sched schedulerid schedulernames scheduler's schwarzian scm sean secpol secretkey sectionauthor seealso sendbuilderlist sendchange sendcompletionmessage sendmail sendstatus sendstderr sendstdout sendupdate seqdiag servicemanager setbuilderlist setchangesourcemaster setcommand setdefault setid setpgrp setproperty setserviceparent setstate settext setupbasedir setupbuildresults setupmailnotifier setupsite setupsourcestep setupstep setuptools sg sha shellcommand shlex showblamelist shtull shutil sig sighup sigkill signalled signalling signame sigterm sigtermtime sigtermtimeout simms simplifiable singlebranchscheduler slavedest slavesrc slowdb smallinteger smilner somecommand somedays sourcedata sourcedir sourceforge sourcestamp sourcestampid sourcestamps sourcestampset sourcetamp spam spamassassin spawnprocess specdir specfile specfiles spellcheck split splitter spulec sql sqlalchemy sqlite sqlites src srcdir srcrpmdir srpm ssdict sse sshd sshpublickeydatabase ssid ssids ssl stackexchange stacklevel stackoverflow stacktrace startbuild startcommand startgettingperspective startlogin startmissingtimer startservice startup startvc startworker stat'd statm statusdict stderr stdin stdlib stdout stepetaupdate stepfinished stepid stepstarted steve stickysidebar stopbuild stop's stopservice stopworker str streamlog strerror strftime stringreceived stringsmatcher striplevel strports strptime sts subarg subclassability subclassed subclassers subclasses subclassing subcommand subcommands subdir subdirectories subdirectory subfields sublicense submittedat submitters submodule submodules suboption suboptions subprocess subprocesses subprotocol subquery substring subunit summarycb suppressionfile suppressions suppresstags svc svciddeferred svn svnpoller svn's svnuricanonicalize svnversion sw swartz symlink syncallbranches syncmail syncQuietly syntaxerror sys tac taichino tarball targetname tbl tcp tcpdump teardown teardownbasedir tempfile terminateprocess testcase testcases testchanges testnamelimit testpath testslave testsuite texinfo textbox textlimit tf th that'll thijs thingie thingy thomas threadpool ths thurs thursday tid timedelta timestamp timezone timezones tld tls tmp tmpbb tmpl tochanges toctree todir todo tokenization tomprince topdir topfile toplevel tport trac traceback trauring travis treestabletimer treestabletimers treq trialargs trialmode triemstra triggerable ttl tue tues tuesday tuple Tuple tuples twistd twistedmatrix twisted's txgithub txrequest txrequests txt tz ubuntu ufffd ui uid umask un unabbreviated unambiguity unclaim unclaimedbrdicts unclosed unconfigure unconfigured underpowered unencrypted unexpectedsuccesses ungrouped unhandled unhighlighted unicast unicode unicoded unicodified unicodify unique'd unittest unix unixpassworddatabase unlink unlinked unparsable unparseable unpatch unpause unpaused unpausing unregister unregisters unserialized unsubscribe unsubscribed unsubscriptable unsubscription unsubsribed unsubstantiated untarring untracked upcall upcloud updatable updatebuilderlist updatebuildsummarypolicy updatefromkwargs updateinterval updatenum updateof updatesession updatesummary updatesummary's updatetarballage updateuser uploaddirectory uploadfile uppercased uri url urldecoded urllib urlopen urlparse urls urltext usa usageerror usechange uselog useprocgroup usepty usererrors username usetestcasenames usetls usr utc utf util utils uuid validator valuefromstring vc vcs vcsrevision vcx vda verifymessage verifymessages versioned versioning viewspec viff virtualenv visualstudio vm vms waitforbuilderlist waitforfinish waituntilavailable wal wamp wannabuild wantdata wantdb wantmq warner warningextractor warningpattern warnonfailure warnonwarnings wb weakref webclient webdriver webhook webhooks webserver websocket wehn werzeug wfb wget whatever's whereclauses whitespace wiki wikipedia wildcard winerror wireshark wkdir workdir workdirs workerbuilddir workerdest workerdir workerenvironment workerforbuilder workerforbuilders workerlock workermanager workername workernames workersrc workersrcs workerversion workerworker workspaces worststatus writesourcedata writestdin ws www xbsd xda xhtml xml xxab xxx yaml yieldmetricsvalue za zadka zope buildbot-3.4.0/common/coveragerc000066400000000000000000000014331413250514000166510ustar00rootroot00000000000000[report] # Regexes for lines to exclude from consideration exclude_lines = # Have to re-enable the standard pragma pragma: no cover # Don't complain about missing debug-only code: def __repr__ if self\.debug # Don't complain if tests don't hit defensive assertion code: raise AssertionError raise NotImplementedError # Don't complain if non-runnable code isn't run: if 0: if __name__ == .__main__.: # 'pass' generally means 'this won't be called' ^ *pass *$ # conditionals on twisted versions aren't coverable if twisted.version include = master/* worker/* omit = # omit all of our tests common/* */test/* # templates cause coverage errors */templates/* master/buildbot/scripts/buildbot_tac.tmpl buildbot-3.4.0/common/download_release.py000066400000000000000000000050161413250514000204700ustar00rootroot00000000000000#!/usr/bin/env python3 import os import requests import yaml def download(session, url, fn): if os.path.exists(fn): print('Removing old file {}'.format(fn)) os.unlink(fn) print('Downloading {} from {}'.format(fn, url)) with open(fn, 'wb') as f: r = session.get(url, stream=True) r.raise_for_status() for c in r.iter_content(1024): f.write(c) def main(): with open(os.path.expanduser("~/.config/hub")) as f: conf = yaml.safe_load(f) token = conf['github.com'][0]['oauth_token'] s = requests.Session() s.headers.update({'Authorization': 'token ' + token}) r = s.get("https://api.github.com/repos/buildbot/buildbot/releases/latest") r.raise_for_status() r = r.json() tag = r['name'] upload_url = r['upload_url'].split('{')[0] assets = s.get(("https://api.github.com/repos/buildbot/buildbot/releases/{id}/assets" ).format(id=r['id'])) assets.raise_for_status() assets = assets.json() os.makedirs('dist', exist_ok=True) for url in (a['browser_download_url'] for a in assets): if 'gitarchive' in url: raise Exception('The git archive has already been uploaded. Are you trying to fix ' 'broken upload? If this is the case, delete the asset in the GitHub ' 'UI and retry this command') if url.endswith(".whl") or url.endswith(".tar.gz"): fn = os.path.join('dist', url.split('/')[-1]) download(s, url, fn) # download tag archive url = "https://github.com/buildbot/buildbot/archive/{tag}.tar.gz".format(tag=tag) fn = os.path.join('dist', "buildbot-{tag}.gitarchive.tar.gz".format(tag=tag)) download(s, url, fn) sigfn = fn + ".asc" if os.path.exists(sigfn): os.unlink(sigfn) # sign the tag archive for debian os.system("gpg --armor --detach-sign --output {} {}".format(sigfn, fn)) sigfnbase = os.path.basename(sigfn) r = s.post(upload_url, headers={'Content-Type': "application/pgp-signature"}, params={"name": sigfnbase}, data=open(sigfn, 'rb')) print(r.content) fnbase = os.path.basename(fn) r = s.post(upload_url, headers={'Content-Type': "application/gzip"}, params={"name": fnbase}, data=open(fn, 'rb')) print(r.content) # remove files so that twine upload do not upload them os.unlink(sigfn) os.unlink(fn) if __name__ == '__main__': main() buildbot-3.4.0/common/flake8rc000066400000000000000000000020721413250514000162300ustar00rootroot00000000000000[flake8] show-source = yes statistics = yes count = yes max-line-length = 100 # List of currently ignored PEP8 issues. Some of them definetely should be # enabled in future. # # E122 continuation line missing indentation or outdented # E123 closing bracket does not match indentation of opening bracket's line # (pep8 seems to misdiagnose this) # E126 continuation line over-indented for hanging indent # E128 continuation line under-indented for visual indent # E211 whitespace before '(' # E711 comparison to None should be 'if cond is None:' # E712 comparison to False should be 'if cond is False:' or 'if not cond:' # E721 do not compare types, use 'isinstance()' # E741 ambiguous variable name # W503 line break before binary operator # W504 line break after binary operator # E731 do not assign a lambda expression, use a def # H302 only import modules (we also import classes) # H301 one import per line (we rely on isort for that) # H306 sorted import (we rely on isort for that) ignore = E122,E123,E126,E128,E211,E711,E712,E721,E731,E741,W503,W504,H302,H306,H301, buildbot-3.4.0/common/gather_dependabot.py000077500000000000000000000031531413250514000206230ustar00rootroot00000000000000#!/usr/bin/env python3 # this script takes all the PR created by dependabot and gather them into one import argparse import os import requests import yaml def main(): parser = argparse.ArgumentParser() parser.add_argument('--remote', type=str, default=None, help='The name of the remote to use for pull request. ' + 'Uses hub default if not specified') args = parser.parse_args() with open(os.path.expanduser("~/.config/hub")) as f: conf = yaml.safe_load(f) token = conf['github.com'][0]['oauth_token'] os.system("git fetch https://github.com/buildbot/buildbot master") os.system("git checkout FETCH_HEAD -B gather_dependabot") s = requests.Session() s.headers.update({'Authorization': 'token ' + token}) r = s.get("https://api.github.com/repos/buildbot/buildbot/pulls") r.raise_for_status() prs = r.json() with open("/tmp/hub_pr_message", 'w') as f: f.write("gather dependabot PRs\n\n") for pr in prs: if 'dependabot' in pr['user']['login']: print(pr['number'], pr['title']) f.write(f"#{pr['number']}: {pr['title']}\n") os.system( "git fetch https://github.com/buildbot/buildbot " f"refs/pull/{pr['number']}/head") os.system("git cherry-pick FETCH_HEAD") if args.remote is not None: os.system(f'git push {args.remote} gather_dependabot') os.system("hub pull-request -b buildbot:master -p -F /tmp/hub_pr_message -l dependencies") if __name__ == '__main__': main() buildbot-3.4.0/common/hooks/000077500000000000000000000000001413250514000157305ustar00rootroot00000000000000buildbot-3.4.0/common/hooks/post-commit000077500000000000000000000000411413250514000201240ustar00rootroot00000000000000common/validate.sh HEAD~ --quick buildbot-3.4.0/common/maketarball.sh000077500000000000000000000007101413250514000174210ustar00rootroot00000000000000#!/bin/bash set -e pkg=$1 ( cd ${pkg} rm -rf MANIFEST dist if [ ${pkg} == "master" ] || [ ${pkg} == "worker" ]; then python setup.py sdist # wheels must be build separately in order to properly omit tests python setup.py bdist_wheel else # retry once to workaround instabilities python setup.py sdist bdist_wheel || (git clean -xdf; python setup.py sdist bdist_wheel) fi ) cp ${pkg}/dist/* dist/ buildbot-3.4.0/common/merge_and_pep8.sh000066400000000000000000000050371413250514000200230ustar00rootroot00000000000000#!/bin/bash function status() { _ESC=$'\e' LTCYAN="$_ESC[1;36m" NORM="$_ESC[0;0m" echo "" echo "${LTCYAN}-- ${*} --${NORM}" } function newshell() { echo "I will launch a new shell. When you are done, just exit the shell" echo "and I will continue the process" bash echo "ok lets continue" } function unittests() { status run the whole test suite as a double check find . -name \*.pyc -exec rm {} \; trial --reporter=text buildbot_worker buildbot if [[ $? != 0 ]] then echo "Oups.. the tests are failing, better resolve them now before the big autopep8 work" newshell fi } if [ $# -eq 0 ]; then echo "USAGE: common/merge_and_pep8.sh " echo " This script will merge your branch to master" echo " and apply pep8" echo "Run this if you want to contribute a branch based on pre-autopep8 rework" exit 1 fi MASTER=$1 PREPEP8=`git log $MASTER --grep "PRE_PEP8_COMMIT" --pretty="format:%H"` POSTPEP8=`git log $MASTER --grep "POST_PEP8_COMMIT" --pretty="format:%H"` status "merging against last commit before autopep8" git merge $PREPEP8 if [[ $? != 0 ]] then echo "Please fix the merge conflicts between your branch, and last commit before autopep8!" newshell fi status "merging against first commit after autopep8 and take our version when there are conflicts" git merge $POSTPEP8 # autopep8 takes 1h30 to run on the whole codebase, so let git resolve the obvious merge conflicts. # using -s recursive -x ours works at chunk level, which proved not to work for nine -> master merge if [[ $? != 0 ]] then status "resolve conflicts by checking out ours file" git status --porcelain |egrep "^DU" | awk '{print $2}' | xargs git rm git status --porcelain |egrep "^UU" | awk '{print $2}' | xargs git checkout --ours git status --porcelain |egrep "^UU" | awk '{print $2}' | xargs git add git commit --no-edit fi unittests status "re-apply autopep8 on the files modified by our branch" git diff --name-only $POSTPEP8 | ( # there is no real use of displaying output of autopep8 # so we just display a simple progress status FILES=() while read filename; do FILES+=($filename) done n=0 for filename in ${FILES[@]}; do n=$(($n + 1)) echo -n $(($n * 100 / ${#FILES[@]}))% echo " processing $filename" echo "$filename" | bash common/style_check_and_fix.sh >&/dev/null done ) git commit -s -a -m "re-auto-pep8" unittests status "finally merge to latest version of master" git merge $MASTER buildbot-3.4.0/common/porttostable.py000066400000000000000000000026071413250514000177060ustar00rootroot00000000000000from __future__ import absolute_import from __future__ import division from __future__ import print_function import os from subprocess import CalledProcessError from subprocess import check_output import requests import yaml s = requests.Session() with open(os.path.expanduser('~/.config/hub')) as f: config = yaml.load(f)['github.com'][0] s.auth = config['user'], config['oauth_token'] os.system("git fetch --all") r = s.get("https://api.github.com/search/issues?q=label:\"port%20to%20stable\"+repo:buildbot/buildbot") # noqa pylint: disable=line-too-long to_port = r.json() summary = "" for pr in to_port['items']: r = s.get("https://api.github.com/repos/buildbot/buildbot/pulls/{number}/commits".format(**pr)) commits = r.json() for c in commits: title = c['commit']['message'].split("\n")[0] try: check_output("git cherry-pick {sha} 2>&1".format(**c), shell=True) except CalledProcessError as e: os.system("git diff") os.system("git reset --hard HEAD 2>&1 >/dev/null") if '--allow-empty' in e.output: continue if 'fatal: bad object' in e.output: continue print("cannot automatically cherry-pick", pr['number'], c['sha'], title, e.output) else: summary += "\n#{number}: {title}".format(number=pr['number'], title=title, **c) print(summary) buildbot-3.4.0/common/pylintrc000066400000000000000000000256151413250514000164050ustar00rootroot00000000000000[MASTER] # Specify a configuration file. #rcfile= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Profiled execution. profile=no # Add files or directories to the blacklist. They should be base names, not # paths. ignore= # Pickle collected data for later comparisons. persistent=no # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins= [MESSAGES CONTROL] # For now disable bunch of checks that does not pass. Some of them should be # re-enabled and reported issues fixed, while most are bugs in pylint and could # be re-enabled when those are fixed. # Following are the checks we don't care about, and thus should remain disabled # # blacklisted-name # missing-docstring # too-many-lines # no-self-use # duplicate-code # too-many-ancestors # too-many-instance-attributes # too-few-public-methods # too-many-public-methods # too-many-return-statements # too-many-branches # too-many-arguments # too-many-locals # too-many-statements # abstract-class-not-used # abstract-class-little-used # exec-used # star-args # deprecated-module # fixme # global-variable-undefined # unused-argument # unpacking-non-sequence # maybe-no-member # "bad-continuation" disabled due to conflict with flake8 (and PEP8) # See # flake8 wants: # func("..." # ) # pylint wants: # func("..." # ) disable= blacklisted-name, invalid-name, missing-docstring, too-many-lines, no-self-use, duplicate-code, too-many-ancestors, too-many-instance-attributes, too-few-public-methods, too-many-public-methods, too-many-return-statements, too-many-branches, too-many-arguments, too-many-locals, too-many-statements, abstract-class-not-used, abstract-class-little-used, deprecated-module, fixme, global-variable-undefined, unused-argument, maybe-no-member, locally-disabled, bad-classmethod-argument, method-hidden, no-name-in-module, no-member, not-callable, too-many-function-args, unexpected-keyword-arg, redundant-keyword-arg, import-error, import-outside-toplevel, exec-used, star-args, unreachable, dangerous-default-value, pointless-statement, pointless-string-statement, expression-not-assigned, useless-else-on-loop, bad-builtin, attribute-defined-outside-init, protected-access, arguments-differ, signature-differs, abstract-method, super-init-not-called, no-init, non-parent-init-called, bad-indentation, global-statement, unused-variable, redefined-outer-name, redefined-builtin, unidiomatic-typecheck, undefined-loop-variable, unbalanced-tuple-unpacking, broad-except, bad-open-mode, superfluous-parens, no-self-argument, no-value-for-parameter, interface-not-implemented, bad-continuation, keyword-arg-before-vararg, unsubscriptable-object, useless-object-inheritance, deprecated-method, useless-return, no-else-return, assignment-from-none, comparison-with-callable, comparison-with-itself, assignment-from-no-return, stop-iteration-return, old-style-class, redefined-variable-type, deprecated-lambda, bad-string-format-type, # https://github.com/PyCQA/pylint/issues/2631 duplicate-string-formatting-argument, cyclic-import, # only happens when pylint is ran in non-parallel mode wrong-import-order, # we use isort for import orders raise-missing-from, # TODO: remove when we no longer have Python 2 code super-with-arguments, # TODO: remove when we no longer have Python 2 code [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs # (visual studio) and html. You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=text # Change the default error message template. msg-template={path}:{line} {msg} [{symbol}] # Include message's id in output include-ids=yes # Include symbolic ids of messages in output symbols=no # Put messages in a separate file for each module / package specified on the # command line instead of printing them on stdout. Reports (if any) will be # written in a file name "pylint_global.[txt|html]". files-output=no # Tells whether to display a full report or only the messages reports=no # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Add a comment according to your evaluation note. This is used by the global # evaluation report (RP0004). comment=no [SPELLING] # Spelling dictionary name. # If this value will be non-empty (e.g. 'en_US') and pyenchant will not be # installed, pylint will fail. # If this will be left empty pylint will ignore all spelling errors. spelling-dict=en_US # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains private dictionary; one word per line. # Path relative to current working directory. spelling-private-dict-file=../common/code_spelling_ignore_words.txt # Tells whether to store unknown words to indicated private dictionary in # --spelling-private-dict-file option instead of raising a message. spelling-store-unknown-words=no [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME,XXX,TODO [SIMILARITIES] # Minimum lines number of a similarity. min-similarity-lines=4 # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no [FORMAT] # Maximum number of characters on a single line. max-line-length=100 # Maximum number of lines in a module max-module-lines=1000 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' [TYPECHECK] # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # List of classes names for which member attributes should not be checked # (useful for classes with attributes dynamically set). ignored-classes=SQLObject # When zope mode is activated, add a predefined set of Zope acquired attributes # to generated-members. zope=no # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E0201 when accessed. Python regular # expressions are accepted. generated-members=REQUEST,acl_users,aq_parent [BASIC] # Will be removed in PyLint 2.0 # Required attributes for module, separated by a comma #required-attributes= # List of builtins function names that should not be used, separated by a comma bad-functions=map,filter,apply,input # Regular expression which should only match correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression which should only match correct module level names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Regular expression which should only match correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Regular expression which should only match correct function names function-rgx=[a-z_][a-zA-Z0-9]{2,30}$ # Regular expression which should only match correct method names method-rgx=[_]{0,2}[a-z][a-zA-Z0-9]{2,30}[_]{0,2}$ # Regular expression which should only match correct instance attribute names attr-rgx=[a-z_][a-zA-Z0-9]{2,30}$ # Regular expression which should only match correct argument names argument-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct variable names variable-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct list comprehension / # generator expression variable names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_ # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Regular expression which should only match functions or classes name which do # not require a docstring no-docstring-rgx=__.*__ [VARIABLES] # Tells whether we should check for unused import in __init__ files. init-import=no # A regular expression matching the beginning of the name of dummy variables # (i.e. not used). dummy-variables-rgx=_|dummy # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= [CLASSES] # This option will be removed in PyLint 2.0. # List of interface methods to ignore, separated by a comma. This is used for # instance to not check methods defines in Zope's Interface base class. # ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs [IMPORTS] # Deprecated modules which should not be used, separated by a comma deprecated-modules=regsub,string,TERMIOS,Bastion,rexec # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= [DESIGN] # Maximum number of arguments for function / method max-args=5 # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.* # Maximum number of locals for function / method body max-locals=15 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of branch for function / method body max-branchs=12 # Maximum number of statements in function / method body max-statements=50 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Minimum number of public methods for a class (see R0903). min-public-methods=2 # Maximum number of public methods for a class (see R0904). max-public-methods=20 [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception buildbot-3.4.0/common/smokedist-download-compatible-chromedriver.py000077500000000000000000000060161413250514000256000ustar00rootroot00000000000000#!/usr/bin/env python3 import argparse import re from subprocess import DEVNULL from subprocess import check_call from subprocess import check_output def parse_chrome_major_version(output): for line in output.splitlines(): # e.g.: # Chromium 69.0.3497.81 Built on Ubuntu , running on Ubuntu 18.04 # Google Chrome 70.0.3538.77 m = re.match(r'.*[cC]hrom.*\s(\d+)\.(\d+)\.(\d+)(?:\.\d+|).*', line) if m is not None: return int(m.group(1)), int(m.group(2)), int(m.group(3)) return None def get_chrome_version(browsers): for browser in browsers: try: print([browser, ' --version']) output = check_output([browser, ' --version'], stderr=DEVNULL) output = output.decode('utf-8', errors='ignore') version = parse_chrome_major_version(output) if version is not None: return (browser, version) except Exception: pass return (None, None) def main(): parser = argparse.ArgumentParser( prog='smokedist-download-compatible-chromedriver') parser.add_argument('manager', type=str, help="Path to the webdriver-manager") parser.add_argument('browsers', type=str, nargs='+', help="The browsers to get version info from. The first " "existing browser from the list will be used") args = parser.parse_args() try: browser, version = get_chrome_version(args.browsers) if browser is None: raise Exception('Could no get browser version') print('Using {0} release {1}'.format(browser, version)) chrome_major, chrome_minor, chrome_patch = version if chrome_major >= 73: # webdriver manager requires us to provide the 4th version component, however does not # use it when picking the version to download chromedriver_version = '{}.{}.{}.0'.format(chrome_major, chrome_minor, chrome_patch) else: chrome_major_to_chromedriver = { 73: '2.46', 72: '2.46', 71: '2.46', 70: '2.45', 69: '2.44', } if chrome_major not in chrome_major_to_chromedriver: raise Exception('Unknown Chrome version {}.{}.{}'.format( chrome_major, chrome_minor, chrome_patch)) chromedriver_version = chrome_major_to_chromedriver[chrome_major] print('Using chromedriver release {0}'.format(chromedriver_version)) cmd = [args.manager, 'update', '--versions.chrome', chromedriver_version, '--versions.standalone', '3.141.59'] print('Calling: ' + ' '.join(cmd)) check_call(cmd) return except Exception as e: print(str(e)) print('Failed to get compatible chromedriver version, using latest') check_call([args.manager + ' update'], shell=True) if __name__ == '__main__': main() buildbot-3.4.0/common/smokedist-www-backwards-compat.py000077500000000000000000000064701413250514000232350ustar00rootroot00000000000000#!/usr/bin/env python3 import argparse import json import os import shutil import subprocess def checkout_buildbot_at_revision(curr_buildbot_root, test_buildbot_root, revision): if os.path.isdir(test_buildbot_root): print('Removing {}'.format(test_buildbot_root)) shutil.rmtree(test_buildbot_root) os.makedirs(test_buildbot_root) subprocess.check_call(['git', 'clone', curr_buildbot_root, test_buildbot_root]) subprocess.check_call(['git', 'reset', '--hard', revision], cwd=test_buildbot_root) def install_local_dependencies(curr_buildbot_root, test_buildbot_root): packages = [ # data_module must be first, then guanlecoja-ui, as other packages depend on them 'www/data_module', 'www/guanlecoja-ui', 'www/base', 'www/codeparameter', 'www/console_view', 'www/grid_view', 'www/waterfall_view', 'www/wsgi_dashboards', ] for package in packages: package_root = os.path.join(test_buildbot_root, package) package_json_path = os.path.join(package_root, 'package.json') with open(package_json_path) as in_f: contents = json.load(in_f) replacements = [ ('guanlecoja-ui', 'link:' + os.path.join(curr_buildbot_root, 'www/data_module')), ('buildbot-data-js', 'link:' + os.path.join(curr_buildbot_root, 'www/guanlecoja-ui')), ('buildbot-build-common', 'link:' + os.path.join(curr_buildbot_root, 'www/build_common')), ] for dep_key in ['dependencies', 'devDependencies']: if dep_key not in contents: continue deps = contents[dep_key] for package, target in replacements: if package in deps: deps[package] = target with open(package_json_path, 'w') as out_f: json.dump(contents, out_f, indent=4, sort_keys=True) def run_test(test_buildbot_root): subprocess.check_call(['make', 'tarballs'], cwd=test_buildbot_root) subprocess.check_call(['common/smokedist.sh', 'whl'], cwd=test_buildbot_root) def main(): parser = argparse.ArgumentParser(prog='smokedist-www-backwards-compat') parser.add_argument('revision', type=str, help="A commit or tag that is accepted by git to test against") parser.add_argument('--tmp-path', type=str, default=None, help="The path to checkout old Buildbot version to") parser.add_argument('--dont-clean', action='store_true', default=False, help="If set, the temporary buildbot checkout will not be deleted") args = parser.parse_args() curr_buildbot_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) if args.tmp_path is not None: test_buildbot_root = args.tmp_path else: test_buildbot_root = os.path.join(curr_buildbot_root, 'tmp-buildbot-smokedist') print('Using {} as temporary path for buildbot checkout'.format(test_buildbot_root)) checkout_buildbot_at_revision(curr_buildbot_root, test_buildbot_root, args.revision) install_local_dependencies(curr_buildbot_root, test_buildbot_root) run_test(test_buildbot_root) if not args.dont_clean: shutil.rmtree(test_buildbot_root) if __name__ == '__main__': main() buildbot-3.4.0/common/smokedist.sh000077500000000000000000000007771413250514000171610ustar00rootroot00000000000000#!/bin/bash if [ -z $1 ]; then suffixes="whl tar.gz" else suffixes=$1 fi set -e for suffix in $suffixes do VE=sandbox.$suffix rm -rf $VE if [ -z "$python" ]; then virtualenv --python python3 $VE else virtualenv --python python$python $VE fi . $VE/bin/activate pip install -U pip pip install mock requests flask pip install dist/buildbot-[0-9]*.$suffix pip install dist/buildbot?pkg*.$suffix pip install dist/*.$suffix smokes/run.sh done buildbot-3.4.0/common/validate.sh000077500000000000000000000200511413250514000167330ustar00rootroot00000000000000#! /bin/bash TEST='buildbot.test buildbot_worker.test' # if stdout is a terminal define some colors # validate.sh can be run as hook from GUI git clients, such as git-gui if test -t 1; then # plain _ESC=$'\e' GREEN="$_ESC[0;32m" MAGENTA="$_ESC[0;35m" RED="$_ESC[0;31m" LTCYAN="$_ESC[1;36m" YELLOW="$_ESC[1;33m" NORM="$_ESC[0;0m" fi ## parse options quick=false no_js=false help=false while [ $# -gt 0 ]; do case $1 in --quick) quick=true ;; --no-js) no_js=true ;; --help) help=true ;; -*) echo "$0: error - unrecognized option $1" 1>&2; help=true ;; *) REVRANGE="$1..HEAD" ;; esac shift done if $help; then echo "USAGE: common/validate.sh [oldrev] [--quick] [--no-js] [--help]" echo " This script will test a set of patches (oldrev..HEAD) for basic acceptability as a patch" echo " Run it in an activated virtualenv with the current Buildbot installed, as well as" echo " sphinx, flake8, mock, and so on" echo "To use a different directory for tests, pass TRIALTMP=/path as an env variable" echo "if --quick is passed validate will skip unit tests and concentrate on coding style" echo "if --no-js is passed validate will skip tests that require Node and NPM" echo "if --help is passed validate will output this message and exit" echo "if no oldrev is passed validate will assume master...HEAD" exit 1 fi [ -z "$REVRANGE" ] && REVRANGE="master..HEAD" status() { echo "${LTCYAN}-- ${*} --${NORM}" } ok=true problem_summary="" not_ok() { ok=false echo "${RED}** ${*} **${NORM}" problem_summary="$problem_summary"$'\n'"${RED}**${NORM} ${*}" } warning() { echo "${YELLOW}** ${*} **${NORM}" problem_summary="$problem_summary"$'\n'"${YELLOW}**${NORM} ${*} (warning)" } check_tabs() { git diff "$REVRANGE" | grep -q $'+.*\t' } check_long_lines() { # only check python files local long_lines=false for f in $(git diff --name-only --stat "$REVRANGE" | grep '.py$'); do # don't try to check removed files [ ! -f "$f" ] && continue if [ $(git diff "$REVRANGE" $f | grep -E -c '^\+.{80}') != 0 ]; then echo " $f" long_lines=true fi done $long_lines } check_yield_defer_returnValue() { local yields=false if git diff "$REVRANGE" | grep '+.*yield defer.returnValue'; then yields=true fi $yields } check_relnotes() { if git diff --exit-code "$REVRANGE" master/docs/relnotes/index.rst >/dev/null 2>&1; then return 1 else return 0 fi } check_sa_Table() { local bad_files=$(git grep -l 'sa\.Table(' | grep '\.py$' | grep -v '^master/buildbot/util/sautils\.py$') if [ -n "${bad_files}" ]; then echo "${YELLOW}Source files found containing 'sa.Table':${NORM}" for f in $bad_files; do echo " ${YELLOW}*${NORM} $f" done echo "${YELLOW}import buildbot.util.sautils and use sautils.Table instead.${NORM}" return 1 fi return 0 } run_tests() { if [ -n "${TRIALTMP}" ]; then TEMP_DIRECTORY_OPT="--temp-directory ${TRIALTMP}" else warning "please provide a TRIALTMP env variable pointing to a ramfs for 30x speed up of the integration tests" fi find . -name \*.pyc -exec rm {} \; trial --reporter text ${TEMP_DIRECTORY_OPT} ${TEST} } if ! git diff --no-ext-diff --quiet --exit-code; then not_ok "changed files in working copy" if ! $quick; then exit 1 fi fi # get a list of changed files, used below; this uses a tempfile to work around # shell behavior when piping to 'while' tempfile=$(mktemp -t tmp.XXXXXX) trap "rm -f ${tempfile}; exit 1" 1 2 3 15 git diff --name-only $REVRANGE | grep '\.py$' | grep -v '\(^master/docs\|/setup\.py\)' > ${tempfile} py_files=() while read line; do if test -f "${line}"; then py_files+=($line) fi done < ${tempfile} echo "${MAGENTA}Validating the following commits:${NORM}" git log "$REVRANGE" --pretty=oneline || exit 1 if ! $quick && ! $no_js; then for module in www/base www/console_view www/grid_view www/waterfall_view www/codeparameter www/wsgi_dashboards; do status "running 'setup.py develop' for $module" if ! (cd $module; python setup.py develop >/dev/null ); then warning "$module/setup.py failed; retrying with cleared libs/" rm -rf "$module/libs" (cd $module; python setup.py develop >/dev/null ) || not_ok "$module/setup.py failed" fi done else warning "Skipping JavaScript Tests" fi if ! $quick; then status "running Python tests" run_tests || not_ok "Python tests failed" elif [ -z `command -v cctrial` ]; then warning "Skipping Python Tests ('pip install cctrial' for quick tests)" else cctrial -H buildbot buildbot_worker || not_ok "Python tests failed" fi status "checking formatting" check_tabs && not_ok "$REVRANGE adds tabs" check_long_lines && warning "$REVRANGE adds long lines" check_yield_defer_returnValue && not_ok "$REVRANGE yields defer.returnValue" status "checking for use of sa.Table" check_sa_Table || warning "use (buildbot.util.)sautils.Table instead of sa.Table" status "checking for release notes" check_relnotes || warning "$REVRANGE does not add release notes" if [ ${#py_files[@]} -ne 0 ]; then status "checking import module convention in modified files" if [[ -z `command -v isort` ]]; then warning "isort is not installed" else if ! isort ${py_files[@]}; then warning "unable to run isort on modified files" else if ! git diff --quiet --exit-code ${py_files[@]}; then not_ok "isort made changes" fi fi fi fi status "running autopep8" if [[ -z `command -v autopep8` ]]; then warning "autopep8 is not installed" elif [[ ! -f common/flake8rc ]]; then warning "common/flake8rc not found" else changes_made=false for filename in ${py_files[@]}; do LINEWIDTH=$(grep -E "max-line-length" common/flake8rc | sed 's/ //g' | cut -d'=' -f 2) # even if we don't enforce errors, if they can be fixed automatically, that's better.. IGNORES=E123,E501,W6 # ignore is not None for SQLAlchemy code.. if [[ "$filename" =~ "/db/" ]]; then IGNORES=$IGNORES,E711,E712 fi autopep8 --in-place --max-line-length=$LINEWIDTH --ignore=$IGNORES "$filename" if ! git diff --quiet --exit-code "$filename"; then changes_made=true fi done if ${changes_made}; then not_ok "autopep8 made changes" fi fi status "running flake8" if [[ -z `command -v flake8` ]]; then warning "flake8 is not installed" else flake8_ok=true for filename in ${py_files[@]}; do if ! flake8 --config=common/flake8rc "$filename"; then flake8_ok=false fi done $flake8_ok || not_ok "flake8 failed" fi status "running pylint" if [[ -z `command -v pylint` ]]; then warning "pylint is not installed" elif [[ ! -f common/pylintrc ]]; then warning "common/pylintrc not found" else pylint_ok=true for filename in ${py_files[@]}; do if ! pylint --rcfile=common/pylintrc --disable=R,line-too-long \ --enable=W0611 --output-format=text --reports=no \ --spelling-private-dict-file=common/code_spelling_ignore_words.txt \ "$filename"; then pylint_ok=false fi done $pylint_ok || not_ok "pylint failed" fi if git diff --name-only $REVRANGE | grep ^master/docs/ ; then status "building docs" # Don't clean builddir if built in quick mode if ! $quick ; then make -C master/docs clean || not_ok "docs cleanup failed" fi make -C master/docs VERSION=latest html || not_ok "docs failed" else status "not building docs, because it was not changed" fi echo "" if $ok; then if [ -z "${problem_summary}" ]; then echo "${GREEN}GOOD!${NORM}" else echo "${YELLOW}WARNINGS${NORM}${problem_summary}" fi exit 0 else echo "${RED}NO GOOD!${NORM}${problem_summary}" exit 1 fi buildbot-3.4.0/master/000077500000000000000000000000001413250514000146105ustar00rootroot00000000000000buildbot-3.4.0/master/COPYING000066400000000000000000000354221413250514000156510ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS buildbot-3.4.0/master/CREDITS000066400000000000000000000075261413250514000156420ustar00rootroot00000000000000This is a list of everybody who has contributed to Buildbot in some way, in no particular order. Thanks everybody! A. T. Hofkamp Aaron Hsieh Abdelrahman Hussein Adam Collard Adam MacBeth Adam Sjøgren Adam Slater Adam Vandenberg Alexander Lorenz Alexander Staubo Aloisio Almeida Jr Amar Takhar Amber Yust Andi Albrecht Andreas Lawitzky Andrew Bennetts Andrew Bortz Andrew Melo Andrew Straw Andriy Senkovych Andy Howell Anthony Baxter Arkadiusz Miskiewicz Augie Fackler Aurélien Bompard Aviv Ben-Yosef Axel Hecht Baptiste Lepilleur Ben Bangert Ben Hearsum Benjamin Smedberg Benoit Sigoure Benoît Allard Bobby Impollonia Brad Hards Brandon Ehle Brandon Philips Brandon R. Stoner Brett Neely Brian Campbell Brian Warner Chad S Metcalf Charles Davis Charles Hardin Charles Lepple Chase Phillips Chris AtLee Chris Peyer Chris Rivera Chris Soyars Chris Templin Christian Lins Christian Unger Claude Vittoria Clement Stenac ClusterHQ Inc. Dan Kegel Dan Locks Dan Savilonis Dan Scott Daniel Dunbar Daniel Svensson Darragh Bailey Dave Abrahams Dave Liebreich Dave Peticolas David Adam (zanchey) Derek Hurley Dmitry Gladkov Dmitry Nezhevenko Dobes Vandermeer Doug Goldstein Doug Latornell Douglas Hubler Douglas Leeder Duncan Ferguson Dustin J. Mitchell Dustin Sallings Elliot Murphy Fabrice Crestois Federico G. Schwindt Filip Hautekeete François Poirotte Gabriele Giacone Gareth Armstrong Gary Granger Gary Poster Gavin McDonald Georges Racinet Georgi Valkov Gerald Combs Gerard Escalante Geraud Boyer Greg McNew Greg Ward Grig Gheorghiu Haavard Skinnemoen Harry Borkhuis Ian Zimmerman Igor Slepchin Iustin Pop Jakub Gustak James Knight James Porter James Tomson Jared Grubb Jared Morrow Jason Hoos Jay Soffian Jean-Paul Calderone Jeff Bailey Jeff Olson Jeremy Gill Jerome Davann Jochen Eisinger Johan Bergström John Backstrand John Carr John F Leach John Ford John O'Duinn John Pye John Saxton Johnnie Pittman Jon Olsson Jonathan Romero Jonathan S. Romero Jorge Gonzalez Jose Dapena Paz Joshua Kugler Joshua Olson Joshua Root Julien Boeuf Justin Wood KATO Kazuyoshi Karl Norby Kevin Turner Kirill Lapshin Kovarththanan Rajaratnam Kristian Nielsen Lital Natan Louis Opter Love Hörnquist Åstrand Loïc Minier Lukas Blakk Łukasz Jernaś Marc Abramowitz Marc Mengel Marc-Antoine Ruel Marcus Lindblom Marius Gedminas Mark A. Grondona Mark Dillavou Mark Hammond Mark Lakewood Mark Pauley Mark Rowe Mark Wielaard Martin Nordholts Mateusz Loskot Matisse Enzer Matt Heitzenroder Matt Whiteley Matthew Scott Matthew Jacobi Mattias Brändström Michael Haggerty Michael Lyle Michael MacDonald Michael Stapelberg Michał Šrajer Mihai Parparita Mikael Lind Mike "Bear" Taylor Mikhail Gusarov Mirko Boehm Monty Taylor Nathaniel Smith Nate Bragg Neal Norwitz Neil Hemingway Nick Mathewson Nick Mills Nick Trout Nicolas Sylvain Nicolás Alvarez Niklaus Giger Olivier Bonnet Olly Betts P. Christeas Pam Selle Patrick Gansterer Paul Warren Paul Winkler Phil Thompson Philipp Frauenfelder Philippe McLean Pierre Tardy Piotr Sikora Pradeepkumar Gayam Quentin Raynaud Rafaël Carré Randall Bosetti Renato Alves Rene Müller Rene Rivera Riccardo Magliocchetti Richard Holden Richard Levitte Rob Helmer Robert Collins Robert Iannucci Robin Eckert Saurabh Kumar Satya Graha Scott Garman Scott Lamb Scott Lawrence Seo Sanghyeon Sergey Lipnevich Shawn Chin Shimizukawa Sidnei da Silva Simon Kennedy Stanislav Kupryakhin Stefan Marr Stefan Seefeld Stefan Zager Stephen Davis Steve "Ashcrow" Milner Steven Walter Stuart Auchterlonie Ted Mielczarek Terence Haddock Thijs Triemstra Thomas Moschny Thomas Vander Stichele Tim Hatch Timothy Fitz Tobi Vollebregt Tobias Oberstein Tom Fogal Tom Prince Tom Wardill Tomaz Muraus Umesh Patel Unknown tagger Wade Brainerd Wanner Markus William Deegan William Siegrist Yoz Grahame Zandr Milewski Zellyn Hunter Zooko Wilcox-O'Hearn Konstantinos Koukopoulos Name Unknown: adam chops code gollum gv lurker99 strank buildbot-3.4.0/master/MANIFEST.in000066400000000000000000000022061413250514000163460ustar00rootroot00000000000000include MANIFEST.in README.rst CREDITS COPYING UPGRADING include docs/examples/*.cfg include docs/conf.py include docs/Makefile include docs/buildbot.1 include docs/*.rst include docs/_images/* include docs/_static/* include docs/_templates/* include docs/tutorial/*.rst include docs/tutorial/_images/*.png include docs/manual/*.rst include docs/manual/_images/*.svg include docs/manual/_images/*.png include docs/manual/_images/*.txt include docs/manual/_images/icon.blend include docs/manual/_images/Makefile include docs/manual/installation/*.rst include docs/bbdocs/*.py include docs/developer/* include docs/developer/_images/* include docs/relnotes/* include buildbot/scripts/sample.cfg include buildbot/scripts/buildbot_tac.tmpl include buildbot/reporters/templates/*.txt include buildbot/spec/api.raml include buildbot/spec/types/*.raml include buildbot/db/migrations/README include buildbot/db/migrations/alembic.ini include contrib/* contrib/windows/* contrib/os-x/* contrib/css/* contrib/libvirt/* include contrib/trac/* contrib/trac/bbwatcher/* contrib/trac/bbwatcher/templates/* include contrib/init-scripts/* contrib/bash/* contrib/zsh/* buildbot-3.4.0/master/Makefile000066400000000000000000000003771413250514000162570ustar00rootroot00000000000000# developer utilities pylint: pylint -j4 --rcfile=../common/pylintrc buildbot docs/*.py setup.py @test ! -f fail tutorial: cd docs/tutorial; $(MAKE) html flake8: flake8 --config=../common/flake8rc buildbot docs/*.py setup.py rmpyc: make -C .. rmpyc buildbot-3.4.0/master/README.rst000066400000000000000000000071671413250514000163120ustar00rootroot00000000000000Buildbot: The Continuous Integration Framework ============================================== :Site: https://buildbot.net :Original author: Brian Warner :Current maintainer: `The Botherders `_. .. contents:: :local: Buildbot is an open-source continuous integration framework for automating software build, test, and release processes. * Buildbot is easy to set up, but very extensible and customizable. It supports arbitrary build processes, and is not limited to common build processes for particular languages (e.g., autotools or ant) * Buildbot supports building and testing on a variety of platforms. Developers, who do not have the facilities to test their changes everywhere before committing, will know shortly afterwards whether they have broken the build or not. * Buildbot allows to track various metrics (warning counts, lint checks, image size, compile time, etc) over time. * Buildbot has minimal requirements for workers: using virtualenv, only a Python installation is required. * Workers can be run behind a NAT firewall and communicate with the master. * Buildbot has a variety of status-reporting tools to get information about builds in front of developers in a timely manner. Documentation ------------- See https://docs.buildbot.net/current/ for documentation of the current version of Buildbot. Docker container ---------------- Buildbot comes with a ready to use docker container available at buildbot/buildbot-master Following environment variables are supported for configuration: * ``BUILDBOT_CONFIG_URL``: http url to a config tarball. The tarball must be in the .tar.gz format. The tarball must contain a directory, which will contain a master.cfg file in it. The tarball may contain a twisted.tac file in it, which can be used to configure the twisted logging system (e.g to log in logstash instead of the default stdout). The tarball will be extracted in a directory named ``$BUILDBOT_CONFIG_DIR`` in the master directory, and can contain additional python module that the master.cfg can load. If ``BUILDBOT_CONFIG_URL`` does not end with .tar.gz, it is considered to be an URL to the direct ``master.cfg`` * ``BUILDBOT_CONFIG_DIR`` directory where to extract the config tarball within the master directory. It is important so that you can do relative imports in your master.cfg like it is done in the metabbotcfg (https://github.com/buildbot/metabbotcfg) Requirements ------------ See https://docs.buildbot.net/current/manual/installation/index.html Briefly: python, Twisted, Jinja2, simplejson, and SQLite. Simplejson and SQLite are included with recent versions of Python. Contributing ------------- Please send your patches to https://github.com/buildbot/buildbot/ Support ------- Please send questions, file bugs, etc, on the Buildbot Github project https://github.com/buildbot/buildbot/issues. Alternatively, write to the buildbot-devel mailing list reachable through https://buildbot.net/. Copying ------- Buildbot is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. For full details, please see the file named COPYING in the top directory of the source tree. You should have received a copy of the GNU General Public License along with this program. If not, see . buildbot-3.4.0/master/UPGRADING000066400000000000000000000002411413250514000160500ustar00rootroot00000000000000For information on ugprading Buildbot, see the section "Upgrading" in the buildbot documentation. This may be found locally in docs/manual/upgrading/index.rst. buildbot-3.4.0/master/buildbot/000077500000000000000000000000001413250514000164145ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/__init__.py000066400000000000000000000107671413250514000205400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # We can't put this method in utility modules, because they import dependency packages import datetime import os import re from subprocess import PIPE from subprocess import STDOUT from subprocess import Popen def gitDescribeToPep440(version): # git describe produce version in the form: v0.9.8-20-gf0f45ca # where 20 is the number of commit since last release, and gf0f45ca is the short commit id # preceded by 'g' we parse this a transform into a pep440 release version 0.9.9.dev20 # (increment last digit and add dev before 20) VERSION_MATCH = re.compile(r'(?P\d+)\.(?P\d+)\.(?P\d+)(\.post(?P\d+))?(-(?P\d+))?(-g(?P.+))?') # noqa pylint: disable=line-too-long v = VERSION_MATCH.search(version) if v: major = int(v.group('major')) minor = int(v.group('minor')) patch = int(v.group('patch')) if v.group('dev'): patch += 1 dev = int(v.group('dev')) return "{}.{}.{}-dev{}".format(major, minor, patch, dev) if v.group('post'): return "{}.{}.{}.post{}".format(major, minor, patch, v.group('post')) return "{}.{}.{}".format(major, minor, patch) return v def mTimeVersion(init_file): cwd = os.path.dirname(os.path.abspath(init_file)) m = 0 for root, dirs, files in os.walk(cwd): for f in files: m = max(os.path.getmtime(os.path.join(root, f)), m) d = datetime.datetime.utcfromtimestamp(m) return d.strftime("%Y.%m.%d") def getVersionFromArchiveId(git_archive_id='1634372192 (HEAD -> master, tag: v3.4.0)'): """ Extract the tag if a source is from git archive. When source is exported via `git archive`, the git_archive_id init value is modified and placeholders are expanded to the "archived" revision: %ct: committer date, UNIX timestamp %d: ref names, like the --decorate option of git-log See man gitattributes(5) and git-log(1) (PRETTY FORMATS) for more details. """ # mangle the magic string to make sure it is not replaced by git archive if not git_archive_id.startswith('$For''mat:'): # source was modified by git archive, try to parse the version from # the value of git_archive_id match = re.search(r'tag:\s*v([^,)]+)', git_archive_id) if match: # archived revision is tagged, use the tag return gitDescribeToPep440(match.group(1)) # archived revision is not tagged, use the commit date tstamp = git_archive_id.strip().split()[0] d = datetime.datetime.utcfromtimestamp(int(tstamp)) return d.strftime('%Y.%m.%d') return None def getVersion(init_file): """ Return BUILDBOT_VERSION environment variable, content of VERSION file, git tag or 'latest' """ try: return os.environ['BUILDBOT_VERSION'] except KeyError: pass try: cwd = os.path.dirname(os.path.abspath(init_file)) fn = os.path.join(cwd, 'VERSION') with open(fn) as f: return f.read().strip() except IOError: pass version = getVersionFromArchiveId() if version is not None: return version try: p = Popen(['git', 'describe', '--tags', '--always'], stdout=PIPE, stderr=STDOUT, cwd=cwd) out = p.communicate()[0] if (not p.returncode) and out: v = gitDescribeToPep440(str(out)) if v: return v except OSError: pass try: # if we really can't find the version, we use the date of modification of the most recent # file docker hub builds cannot use git describe return mTimeVersion(init_file) except Exception: # bummer. lets report something return "latest" version = getVersion(__file__) __version__ = version buildbot-3.4.0/master/buildbot/asyncio.py000066400000000000000000000070401413250514000204340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Alternatively, you can use and copy this module under the MIT License # Copyright Buildbot Team Members import asyncio import inspect import sys from asyncio import base_events from asyncio import events from twisted.internet import defer def deferred_await(self): # if a deferred is awaited from a asyncio loop context, we must return # the future wrapper, but if it is awaited from normal twisted loop # we must return self. if isinstance(asyncio.get_event_loop(), AsyncIOLoopWithTwisted): return self.asFuture(asyncio.get_event_loop()) return self defer.Deferred.__await__ = deferred_await def as_deferred(f): return asyncio.get_event_loop().as_deferred(f) def as_future(d): return d.asFuture(asyncio.get_event_loop()) if sys.version_info[:2] >= (3, 7): def make_handle(callback, args, loop, context): return events.Handle(callback, args, loop, context) else: def make_handle(callback, args, loop, context): # python 3.6 does not support async contextvars return events.Handle(callback, args, loop) class AsyncIOLoopWithTwisted(base_events.BaseEventLoop): """ Minimal asyncio loop for Buildbot asyncio only dependencies as of now, only graphql is needing asyncio loop As of now, it can only run basic coroutines, no network operation is supported But this could be implemented as needed """ def __init__(self, reactor): self._running = False self._reactor = reactor super().__init__() self._running = True def start(self): self._running = True events._set_running_loop(self) def stop(self): self._running = False events._set_running_loop(None) def is_running(self): return self._running def call_soon(self, callback, *args, context=None): handle = make_handle(callback, args, self, context) self._reactor.callLater(0, handle._run) return handle def call_soon_threadsafe(self, callback, *args, context=None): handle = make_handle(callback, args, self, context) self._reactor.callFromThread(handle._run) return handle def time(self): # we delegate timekeeping to the reactor so that it can be faked return self._reactor.seconds() def call_at(self, when, callback, *args, context=None): handle = make_handle(callback, args, self, context) # Twisted timers are relatives, contrary to asyncio. delay = when - self.time() if delay < 0: delay = 0 self._reactor.callLater(delay, handle._run) return handle def as_deferred(self, thing): if isinstance(thing, defer.Deferred): return thing # check for coroutine objects if inspect.isawaitable(thing): return defer.Deferred.fromFuture(asyncio.ensure_future(thing)) return defer.succeed(thing) buildbot-3.4.0/master/buildbot/buildbot_net_usage_data.py000066400000000000000000000170431413250514000236220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ This files implement buildbotNetUsageData options It uses urllib instead of requests in order to avoid requiring another dependency for statistics feature. urllib supports http_proxy already. urllib is blocking and thus everything is done from a thread. """ import hashlib import inspect import json import os import platform import socket from urllib import error as urllib_error from urllib import request as urllib_request from twisted.internet import threads from twisted.python import log from buildbot.process.buildstep import _BuildStepFactory from buildbot.util import unicode2bytes from buildbot.www.config import IndexResource # This can't change! or we will need to make sure we are compatible with all # released version of buildbot >=0.9.0 PHONE_HOME_URL = "https://events.buildbot.net/events/phone_home" def linux_distribution(): os_release = "/etc/os-release" meta_data = {'ID': "unknown_linux", 'VERSION_ID': "unknown_version"} if os.path.exists(os_release): with open("/etc/os-release") as f: for line in f: try: k, v = line.strip().split("=") meta_data[k] = v.strip('""') except Exception: pass return meta_data['ID'], meta_data['VERSION_ID'] def get_distro(): system = platform.system() if system == "Linux": dist = linux_distribution() return "{}:{}".format(dist[0], dist[1]) elif system == "Windows": dist = platform.win32_ver() return "{}:{}".format(dist[0], dist[1]) elif system == "Java": dist = platform.java_ver() return "{}:{}".format(dist[0], dist[1]) elif system == "Darwin": dist = platform.mac_ver() return "{}".format(dist[0]) # else: return ":".join(platform.uname()[0:1]) def getName(obj): """This method finds the first parent class which is within the buildbot namespace it prepends the name with as many ">" as the class is subclassed """ # elastic search does not like '.' in dict keys, so we replace by / def sanitize(name): return name.replace(".", "/") if isinstance(obj, _BuildStepFactory): klass = obj.factory else: klass = type(obj) name = "" klasses = (klass, ) + inspect.getmro(klass) for klass in klasses: if hasattr(klass, "__module__") and klass.__module__.startswith("buildbot."): return sanitize(name + klass.__module__ + "." + klass.__name__) else: name += ">" return sanitize(type(obj).__name__) def countPlugins(plugins_uses, lst): if isinstance(lst, dict): lst = lst.values() for i in lst: name = getName(i) plugins_uses.setdefault(name, 0) plugins_uses[name] += 1 def basicData(master): plugins_uses = {} countPlugins(plugins_uses, master.config.workers) countPlugins(plugins_uses, master.config.builders) countPlugins(plugins_uses, master.config.schedulers) countPlugins(plugins_uses, master.config.services) countPlugins(plugins_uses, master.config.change_sources) for b in master.config.builders: countPlugins(plugins_uses, b.factory.steps) # we hash the master's name + various other master dependent variables # to get as much as possible an unique id # we hash it to not leak private information about the installation such as hostnames and domain # names hashInput = ( master.name + # master name contains hostname + master basepath socket.getfqdn() # we add the fqdn to account for people # call their buildbot host 'buildbot' # and install it in /var/lib/buildbot ) hashInput = unicode2bytes(hashInput) installid = hashlib.sha1(hashInput).hexdigest() return { 'installid': installid, 'versions': dict(IndexResource.getEnvironmentVersions()), 'platform': { 'platform': platform.platform(), 'system': platform.system(), 'machine': platform.machine(), 'processor': platform.processor(), 'python_implementation': platform.python_implementation(), # xBSD including osx will disclose too much information after [4] like where it # was built 'version': " ".join(platform.version().split(' ')[:4]), 'distro': get_distro() }, 'plugins': plugins_uses, 'db': master.config.db['db_url'].split("://")[0], 'mq': master.config.mq['type'], 'www_plugins': list(master.config.www['plugins'].keys()) } def fullData(master): """ Send the actual configuration of the builders, how the steps are agenced. Note that full data will never send actual detail of what command is run, name of servers, etc. """ builders = [] for b in master.config.builders: steps = [] for step in b.factory.steps: steps.append(getName(step)) builders.append(steps) return {'builders': builders} def computeUsageData(master): if master.config.buildbotNetUsageData is None: return None data = basicData(master) if master.config.buildbotNetUsageData != "basic": data.update(fullData(master)) if callable(master.config.buildbotNetUsageData): data = master.config.buildbotNetUsageData(data) return data def _sendWithUrlib(url, data): data = json.dumps(data).encode() clen = len(data) req = urllib_request.Request(url, data, { 'Content-Type': 'application/json', 'Content-Length': clen }) try: f = urllib_request.urlopen(req) except urllib_error.URLError: return None res = f.read() f.close() return res def _sendWithRequests(url, data): try: import requests # pylint: disable=import-outside-toplevel except ImportError: return None r = requests.post(url, json=data) return r.text def _sendBuildbotNetUsageData(data): log.msg("buildbotNetUsageData: sending {}".format(data)) # first try with requests, as this is the most stable http library res = _sendWithRequests(PHONE_HOME_URL, data) # then we try with stdlib, which not always work with https if res is None: res = _sendWithUrlib(PHONE_HOME_URL, data) # at last stage if res is None: log.msg("buildbotNetUsageData: Could not send using https, " "please `pip install 'requests[security]'` for proper SSL implementation`") data['buggySSL'] = True res = _sendWithUrlib(PHONE_HOME_URL.replace("https://", "http://"), data) log.msg("buildbotNetUsageData: buildbot.net said:", res) def sendBuildbotNetUsageData(master): if master.config.buildbotNetUsageData is None: return data = computeUsageData(master) if data is None: return threads.deferToThread(_sendBuildbotNetUsageData, data) buildbot-3.4.0/master/buildbot/buildrequest.py000066400000000000000000000014321413250514000214760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.buildrequest import BuildRequest _hush_pyflakes = [BuildRequest] buildbot-3.4.0/master/buildbot/changes/000077500000000000000000000000001413250514000200245ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/changes/__init__.py000066400000000000000000000000001413250514000221230ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/changes/base.py000066400000000000000000000131401413250514000213070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import config from buildbot.interfaces import IChangeSource from buildbot.util import service from buildbot.util.poll import method as poll_method from buildbot.warnings import warn_deprecated @implementer(IChangeSource) class ChangeSource(service.ClusteredBuildbotService): def describe(self): pass # activity handling def activate(self): return defer.succeed(None) def deactivate(self): return defer.succeed(None) # service handling def _getServiceId(self): return self.master.data.updates.findChangeSourceId(self.name) def _claimService(self): return self.master.data.updates.trySetChangeSourceMaster(self.serviceid, self.master.masterid) def _unclaimService(self): return self.master.data.updates.trySetChangeSourceMaster(self.serviceid, None) class ReconfigurablePollingChangeSource(ChangeSource): pollInterval = None pollAtLaunch = None pollRandomDelayMin = None pollRandomDelayMax = None def checkConfig(self, name=None, pollInterval=60 * 10, pollAtLaunch=False, pollRandomDelayMin=0, pollRandomDelayMax=0): super().checkConfig(name=name) if pollInterval < 0: config.error("interval must be >= 0: {}".format(pollInterval)) if pollRandomDelayMin < 0: config.error("min random delay must be >= 0: {}".format(pollRandomDelayMin)) if pollRandomDelayMax < 0: config.error("max random delay must be >= 0: {}".format(pollRandomDelayMax)) if pollRandomDelayMin > pollRandomDelayMax: config.error("min random delay must be <= {}: {}".format(pollRandomDelayMax, pollRandomDelayMin)) if pollRandomDelayMax >= pollInterval: config.error("max random delay must be < {}: {}".format(pollInterval, pollRandomDelayMax)) @defer.inlineCallbacks def reconfigService(self, name=None, pollInterval=60 * 10, pollAtLaunch=False, pollRandomDelayMin=0, pollRandomDelayMax=0): self.pollInterval, prevPollInterval = pollInterval, self.pollInterval self.pollAtLaunch = pollAtLaunch self.pollRandomDelayMin = pollRandomDelayMin self.pollRandomDelayMax = pollRandomDelayMax yield super().reconfigService(name=name) # pollInterval change is the only value which makes sense to reconfigure check. if prevPollInterval != pollInterval and self.doPoll.running: yield self.doPoll.stop() # As a implementation detail, poller will 'pollAtReconfigure' if poll interval changes # and pollAtLaunch=True yield self.doPoll.start(interval=self.pollInterval, now=self.pollAtLaunch, random_delay_min=self.pollRandomDelayMin, random_delay_max=self.pollRandomDelayMax) def poll(self): pass @poll_method def doPoll(self): d = defer.maybeDeferred(self.poll) d.addErrback(log.err, '{}: while polling for changes'.format(self)) return d def force(self): self.doPoll() def activate(self): self.doPoll.start(interval=self.pollInterval, now=self.pollAtLaunch, random_delay_min=self.pollRandomDelayMin, random_delay_max=self.pollRandomDelayMax) def deactivate(self): return self.doPoll.stop() class PollingChangeSource(ReconfigurablePollingChangeSource): # Legacy code will be very painful to port to BuildbotService life cycle # because the unit tests keep doing shortcuts for the Service life cycle (i.e by no calling # startService) instead of porting everything at once, we make a class to support legacy def checkConfig(self, name=None, pollInterval=60 * 10, pollAtLaunch=False, pollRandomDelayMin=0, pollRandomDelayMax=0, **kwargs): super().checkConfig(name=name, pollInterval=60 * 10, pollAtLaunch=False, pollRandomDelayMin=0, pollRandomDelayMax=0) warn_deprecated('3.3.0', 'PollingChangeSource has been deprecated: ' + 'please use ReconfigurablePollingChangeSource') self.pollInterval = pollInterval self.pollAtLaunch = pollAtLaunch self.pollRandomDelayMin = pollRandomDelayMin self.pollRandomDelayMax = pollRandomDelayMax def reconfigService(self, *args, **kwargs): # BuildbotServiceManager will detect such exception and swap old service with new service, # instead of just reconfiguring raise NotImplementedError() buildbot-3.4.0/master/buildbot/changes/bitbucket.py000066400000000000000000000217241413250514000223600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import json import time from datetime import datetime from twisted.internet import defer from twisted.python import log from twisted.web import client from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import datetime2epoch from buildbot.util import deferredLocked from buildbot.util import epoch2datetime from buildbot.util.pullrequest import PullRequestMixin class BitbucketPullrequestPoller(base.ReconfigurablePollingChangeSource, PullRequestMixin): compare_attrs = ("owner", "slug", "branch", "pollInterval", "useTimestamps", "category", "project", "pollAtLaunch") db_class_name = 'BitbucketPullrequestPoller' property_basename = "bitbucket" def __init__(self, owner, slug, **kwargs): kwargs['name'] = self.build_name(owner, slug) self.initLock = defer.DeferredLock() super().__init__(owner, slug, **kwargs) def checkConfig(self, owner, slug, branch=None, pollInterval=10 * 60, useTimestamps=True, category=None, project='', pullrequest_filter=True, pollAtLaunch=False, auth=None, bitbucket_property_whitelist=None): super().checkConfig(name=self.build_name(owner, slug), pollInterval=pollInterval, pollAtLaunch=pollAtLaunch) @defer.inlineCallbacks def reconfigService(self, owner, slug, branch=None, pollInterval=10 * 60, useTimestamps=True, category=None, project='', pullrequest_filter=True, pollAtLaunch=False, auth=None, bitbucket_property_whitelist=None): self.owner = owner self.slug = slug self.branch = branch if bitbucket_property_whitelist is None: bitbucket_property_whitelist = [] if hasattr(pullrequest_filter, '__call__'): self.pullrequest_filter = pullrequest_filter else: self.pullrequest_filter = (lambda _: pullrequest_filter) self.lastChange = time.time() self.lastPoll = time.time() self.useTimestamps = useTimestamps self.category = category if callable( category) else bytes2unicode(category) self.project = bytes2unicode(project) self.external_property_whitelist = bitbucket_property_whitelist if auth is not None: encoded_credentials = base64.b64encode(":".join(auth).encode()) self.headers = {b"Authorization": b"Basic " + encoded_credentials} else: self.headers = None yield super().reconfigService(self.build_name(owner, slug), pollInterval=pollInterval, pollAtLaunch=pollAtLaunch) def build_name(self, owner, slug): return '/'.join([owner, slug]) def describe(self): return "BitbucketPullrequestPoller watching the "\ "Bitbucket repository {}/{}, branch: {}".format(self.owner, self.slug, self.branch) @deferredLocked('initLock') @defer.inlineCallbacks def poll(self): page = yield self._getChanges() yield self._processChanges(page) def _getChanges(self): self.lastPoll = time.time() log.msg("BitbucketPullrequestPoller: polling " "Bitbucket repository {}/{}, branch: {}".format(self.owner, self.slug, self.branch)) url = "https://bitbucket.org/api/2.0/repositories/{}/{}/pullrequests".format(self.owner, self.slug) return client.getPage(url, timeout=self.pollInterval, headers=self.headers) @defer.inlineCallbacks def _processChanges(self, page): result = json.loads(page) for pr in result['values']: branch = pr['source']['branch']['name'] nr = int(pr['id']) # Note that this is a short hash. The full length hash can be accessed via the # commit api resource but we want to avoid requesting multiple pages as long as # we are not sure that the pull request is new or updated. revision = pr['source']['commit']['hash'] # check branch if not self.branch or branch in self.branch: current = yield self._getCurrentRev(nr) # compare _short_ hashes to check if the PR has been updated if not current or current[0:12] != revision[0:12]: # parse pull request api page (required for the filter) page = yield client.getPage(str(pr['links']['self']['href']), headers=self.headers) pr_json = json.loads(page) # filter pull requests by user function if not self.pullrequest_filter(pr_json): log.msg('pull request does not match filter') continue # access additional information author = pr['author']['display_name'] prlink = pr['links']['html']['href'] # Get time updated time. Note that the timezone offset is # ignored. if self.useTimestamps: updated = datetime.strptime( pr['updated_on'].split('.')[0], '%Y-%m-%dT%H:%M:%S') else: updated = epoch2datetime(self.master.reactor.seconds()) title = pr['title'] # parse commit api page page = yield client.getPage( str(pr['source']['commit']['links']['self']['href']), headers=self.headers, ) commit_json = json.loads(page) # use the full-length hash from now on revision = commit_json['hash'] revlink = commit_json['links']['html']['href'] # parse repo api page page = yield client.getPage( str(pr['source']['repository']['links']['self']['href']), headers=self.headers, ) repo_json = json.loads(page) repo = repo_json['links']['html']['href'] # update database yield self._setCurrentRev(nr, revision) # emit the change yield self.master.data.updates.addChange( author=bytes2unicode(author), committer=None, revision=bytes2unicode(revision), revlink=bytes2unicode(revlink), comments='pull-request #{}: {}\n{}'.format(nr, title, prlink), when_timestamp=datetime2epoch(updated), branch=bytes2unicode(branch), category=self.category, project=self.project, repository=bytes2unicode(repo), properties={'pullrequesturl': prlink, **self.extractProperties(pr), }, src='bitbucket', ) def _getCurrentRev(self, pr_id): # Return a deferred datetime object for the given pull request number # or None. d = self._getStateObjectId() @d.addCallback def oid_callback(oid): current = self.master.db.state.getState( oid, 'pull_request%d' % pr_id, None) @current.addCallback def result_callback(result): return result return current return d def _setCurrentRev(self, pr_id, rev): # Set the datetime entry for a specified pull request. d = self._getStateObjectId() @d.addCallback def oid_callback(oid): return self.master.db.state.setState(oid, 'pull_request%d' % pr_id, rev) return d def _getStateObjectId(self): # Return a deferred for object id in state db. return self.master.db.state.getObjectId( '{}/{}#{}'.format(self.owner, self.slug, self.branch), self.db_class_name) buildbot-3.4.0/master/buildbot/changes/changes.py000066400000000000000000000164051413250514000220140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import html # py2: via future import time from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.process.properties import Properties from buildbot.util import datetime2epoch class Change: """I represent a single change to the source tree. This may involve several files, but they are all changed by the same person, and there is a change comment for the group as a whole.""" number = None branch = None category = None revision = None # used to create a source-stamp links = [] # links are gone, but upgrade code expects this attribute @classmethod def fromChdict(cls, master, chdict): """ Class method to create a L{Change} from a dictionary as returned by L{ChangesConnectorComponent.getChange}. @param master: build master instance @param ssdict: change dictionary @returns: L{Change} via Deferred """ cache = master.caches.get_cache("Changes", cls._make_ch) return cache.get(chdict['changeid'], chdict=chdict, master=master) @classmethod def _make_ch(cls, changeid, master, chdict): change = cls(None, None, None, _fromChdict=True) change.who = chdict['author'] change.committer = chdict['committer'] change.comments = chdict['comments'] change.revision = chdict['revision'] change.branch = chdict['branch'] change.category = chdict['category'] change.revlink = chdict['revlink'] change.repository = chdict['repository'] change.codebase = chdict['codebase'] change.project = chdict['project'] change.number = chdict['changeid'] when = chdict['when_timestamp'] if when: when = datetime2epoch(when) change.when = when change.files = sorted(chdict['files']) change.properties = Properties() for n, (v, s) in chdict['properties'].items(): change.properties.setProperty(n, v, s) return defer.succeed(change) def __init__(self, who, files, comments, committer=None, revision=None, when=None, branch=None, category=None, revlink='', properties=None, repository='', codebase='', project='', _fromChdict=False): if properties is None: properties = {} # skip all this madness if we're being built from the database if _fromChdict: return self.who = who self.committer = committer self.comments = comments def none_or_unicode(x): if x is None: return x return str(x) self.revision = none_or_unicode(revision) now = util.now() if when is None: self.when = now elif when > now: # this happens when the committing system has an incorrect clock, for example. # handle it gracefully log.msg( "received a Change with when > now; assuming the change happened now") self.when = now else: self.when = when self.branch = none_or_unicode(branch) self.category = none_or_unicode(category) self.revlink = revlink self.properties = Properties() self.properties.update(properties, "Change") self.repository = repository self.codebase = codebase self.project = project # keep a sorted list of the files, for easier display self.files = sorted(files or []) def __setstate__(self, dict): self.__dict__ = dict # Older Changes won't have a 'properties' attribute in them if not hasattr(self, 'properties'): self.properties = Properties() if not hasattr(self, 'revlink'): self.revlink = "" def __str__(self): return ("Change(revision=%r, who=%r, committer=%r, branch=%r, comments=%r, " + "when=%r, category=%r, project=%r, repository=%r, " + "codebase=%r)") % ( self.revision, self.who, self.committer, self.branch, self.comments, self.when, self.category, self.project, self.repository, self.codebase) def __eq__(self, other): return self.number == other.number def __ne__(self, other): return self.number != other.number def __lt__(self, other): return self.number < other.number def __le__(self, other): return self.number <= other.number def __gt__(self, other): return self.number > other.number def __ge__(self, other): return self.number >= other.number def asText(self): data = "" data += "Files:\n" for f in self.files: data += " {}\n".format(f) if self.repository: data += "On: {}\n".format(self.repository) if self.project: data += "For: {}\n".format(self.project) data += "At: {}\n".format(self.getTime()) data += "Changed By: {}\n".format(self.who) data += "Committed By: {}\n".format(self.committer) data += "Comments: {}".format(self.comments) data += "Properties: \n" for prop in self.properties.asList(): data += " {}: {}".format(prop[0], prop[1]) data += '\n\n' return data def asDict(self): '''returns a dictionary with suitable info for html/mail rendering''' files = [dict(name=f) for f in self.files] files.sort(key=lambda a: a['name']) result = { # Constant 'number': self.number, 'branch': self.branch, 'category': self.category, 'who': self.getShortAuthor(), 'committer': self.committer, 'comments': self.comments, 'revision': self.revision, 'rev': self.revision, 'when': self.when, 'at': self.getTime(), 'files': files, 'revlink': getattr(self, 'revlink', None), 'properties': self.properties.asList(), 'repository': getattr(self, 'repository', None), 'codebase': getattr(self, 'codebase', ''), 'project': getattr(self, 'project', None) } return result def getShortAuthor(self): return self.who def getTime(self): if not self.when: return "?" return time.strftime("%a %d %b %Y %H:%M:%S", time.localtime(self.when)) def getTimes(self): return (self.when, None) def getText(self): return [html.escape(self.who)] def getLogs(self): return {} buildbot-3.4.0/master/buildbot/changes/filter.py000066400000000000000000000135251413250514000216710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from buildbot.util import ComparableMixin from buildbot.util import NotABranch class ChangeFilter(ComparableMixin): # NOTE: If users use a filter_fn, we have no way to determine whether it has # changed at reconfig, so the scheduler will always be restarted. That's as # good as Python can do. compare_attrs = ('filter_fn', 'checks') def __init__(self, # gets a Change object, returns boolean filter_fn=None, # change attribute comparisons: exact match to PROJECT, member of # list PROJECTS, regular expression match to PROJECT_RE, or # PROJECT_FN returns True when called with the project; repository, # branch, and so on are similar. Note that the regular expressions # are anchored to the first character of the string. For convenience, # a list can also be specified to the singular option (e.g, PROJECTS). project=None, project_re=None, project_fn=None, repository=None, repository_re=None, repository_fn=None, branch=NotABranch, branch_re=None, branch_fn=None, category=None, category_re=None, category_fn=None, codebase=None, codebase_re=None, codebase_fn=None): self.filter_fn = filter_fn self.checks = self.createChecks( (project, project_re, project_fn, "project"), (repository, repository_re, repository_fn, "repository"), (branch, branch_re, branch_fn, "branch"), (category, category_re, category_fn, "category"), (codebase, codebase_re, codebase_fn, "codebase"), ) def createChecks(self, *checks): def mklist(x): if x is not None and not isinstance(x, list): return [x] return x def mklist_br(x): # branch needs to be handled specially if x is NotABranch: return None if not isinstance(x, list): return [x] return x def mkre(r): if r is not None and not hasattr(r, 'match'): r = re.compile(r) return r ret = {} for filt_list, filt_re, filt_fn, chg_attr in checks: if "branch" in chg_attr: ret[chg_attr] = (mklist_br(filt_list), mkre(filt_re), filt_fn) else: ret[chg_attr] = (mklist(filt_list), mkre(filt_re), filt_fn) return ret def filter_change(self, change): if self.filter_fn is not None and not self.filter_fn(change): return False for chg_attr, (filt_list, filt_re, filt_fn) in self.checks.items(): if chg_attr.startswith("prop:"): chg_val = change.properties.getProperty( chg_attr.split(":", 1)[1], '') else: chg_val = getattr(change, chg_attr, '') if filt_list is not None and chg_val not in filt_list: return False if filt_re is not None and (chg_val is None or not filt_re.match(chg_val)): return False if filt_fn is not None and not filt_fn(chg_val): return False return True def __repr__(self): checks = [] for chg_attr, (filt_list, filt_re, filt_fn) in sorted(self.checks.items()): if filt_list is not None and len(filt_list) == 1: checks.append('{} == {}'.format(chg_attr, filt_list[0])) elif filt_list is not None: checks.append('{} in {}'.format(chg_attr, repr(filt_list))) if filt_re is not None: checks.append('{} ~/{}/'.format(chg_attr, filt_re)) if filt_fn is not None: checks.append('{}({})'.format(filt_fn.__name__, chg_attr)) return "<{} on {}>".format(self.__class__.__name__, ' and '.join(checks)) @staticmethod def fromSchedulerConstructorArgs(change_filter=None, branch=NotABranch, categories=None): """ Static method to create a filter based on constructor args change_filter, branch, and categories; use default values @code{None}, @code{NotABranch}, and @code{None}, respectively. These arguments are interpreted as documented for the L{buildbot.schedulers.basic.Scheduler} class. @returns: L{ChangeFilter} instance or None for not filtering """ # use a change_filter, if given one if change_filter: if (branch is not NotABranch or categories is not None): raise RuntimeError("cannot specify both change_filter and " "branch or categories") return change_filter elif branch is not NotABranch or categories: # build a change filter from the deprecated category and branch # args cfargs = {} if branch is not NotABranch: cfargs['branch'] = branch if categories: cfargs['category'] = categories return ChangeFilter(**cfargs) else: return None buildbot-3.4.0/master/buildbot/changes/gerritchangesource.py000066400000000000000000000465341413250514000242750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import datetime import json from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot import config from buildbot import util from buildbot.changes import base from buildbot.changes.filter import ChangeFilter from buildbot.util import bytes2unicode from buildbot.util import httpclientservice from buildbot.util import runprocess from buildbot.util.protocol import LineProcessProtocol from buildbot.util.pullrequest import PullRequestMixin def _canonicalize_event(event): """ Return an event dictionary which is consistent between the gerrit event stream and the gerrit event log formats. """ # For "patchset-created" the events-log JSON looks like: # "project": {"name": "buildbot"} # while the stream-events JSON looks like: # "project": "buildbot" # so we canonicalize them to the latter if "change" not in event: return event change = event["change"] if "project" not in change: return event project = change["project"] if not isinstance(project, dict): return event if "name" not in project: return event event = copy.deepcopy(event) event["change"]["project"] = project["name"] return event class GerritChangeFilter(ChangeFilter): """This gerrit specific change filter helps creating pre-commit and post-commit builders""" def __init__(self, eventtype=None, eventtype_re=None, eventtype_fn=None, **kw): super().__init__(**kw) self.checks.update( self.createChecks( (eventtype, eventtype_re, eventtype_fn, "prop:event.type"), )) # for branch change filter, we take the real gerrit branch # instead of the change's branch, which is also used as a grouping key if "branch" in self.checks: self.checks["prop:event.change.branch"] = self.checks["branch"] del self.checks["branch"] def _gerrit_user_to_author(props, username="unknown"): """ Convert Gerrit account properties to Buildbot format Take into account missing values """ username = props.get("username", username) username = props.get("name", username) if "email" in props: username += " <%(email)s>" % props return username class GerritChangeSourceBase(base.ChangeSource, PullRequestMixin): """This source will maintain a connection to gerrit ssh server that will provide us gerrit events in json format.""" compare_attrs = ("gerritserver", "gerritport") name = None # list of properties that are no of no use to be put in the event dict external_property_denylist = ["event.eventCreatedOn"] external_property_whitelist = ['*'] property_basename = 'event' def checkConfig(self, gitBaseURL=None, handled_events=("patchset-created", "ref-updated"), debug=False, get_files=False): if gitBaseURL is None: config.error("gitBaseURL must be specified") def reconfigService(self, gitBaseURL=None, handled_events=("patchset-created", "ref-updated"), debug=False, get_files=False): self.gitBaseURL = gitBaseURL self.handled_events = list(handled_events) self._get_files = get_files self.debug = debug def lineReceived(self, line): try: event = json.loads(bytes2unicode(line)) except ValueError: log.msg("bad json line: {}".format(line)) return defer.succeed(None) if not(isinstance(event, dict) and "type" in event): if self.debug: log.msg("no type in event {}".format(line)) return defer.succeed(None) return self.eventReceived(event) def build_properties(self, event): properties = self.extractProperties(event) properties["event.source"] = self.__class__.__name__ if event['type'] in ('patchset-created', 'comment-added') and 'change' in event: properties['target_branch'] = event["change"]["branch"] return properties def eventReceived(self, event): if not (event['type'] in self.handled_events): if self.debug: log.msg("the event type '{}' is not setup to handle".format(event['type'])) return defer.succeed(None) properties = self.build_properties(event) func_name = "eventReceived_{}".format(event["type"].replace("-", "_")) func = getattr(self, func_name, None) if func is None: return self.addChangeFromEvent(properties, event) return func(properties, event) @defer.inlineCallbacks def addChange(self, event_type, chdict): stampdict = { "branch": chdict["branch"], "revision": chdict["revision"], "patch_author": chdict["author"], "patch_comment": chdict["comments"], "repository": chdict["repository"], "project": chdict["project"], "codebase": '', } stampid, found_existing = yield( self.master.db.sourcestamps.findOrCreateId(**stampdict)) if found_existing and event_type in ("patchset-created", "ref-updated"): if self.debug: eventstr = "{}/{} -- {}:{}".format( self.gitBaseURL, chdict["project"], chdict["branch"], chdict["revision"]) message = ( "gerrit: duplicate change event {} by {}" .format(eventstr, self.__class__.__name__)) log.msg(message.encode("utf-8")) return if self.debug: eventstr = "{} -- {}:{}".format( chdict["repository"], chdict["branch"], chdict["revision"]) message = ( "gerrit: adding change from {} in {}" .format(eventstr, self.__class__.__name__)) log.msg(message.encode("utf-8")) try: yield self.master.data.updates.addChange(**chdict) except Exception: # eat failures.. log.err('error adding change from GerritChangeSource') def get_branch_from_event(self, event): if event['type'] in ('patchset-created', 'comment-added'): return event["patchSet"]["ref"] return event["change"]["branch"] @defer.inlineCallbacks def addChangeFromEvent(self, properties, event): if "change" not in event: if self.debug: log.msg("unsupported event {}".format(event["type"])) return None if "patchSet" not in event: if self.debug: log.msg("unsupported event {}".format(event["type"])) return None event = _canonicalize_event(event) event_change = event["change"] files = ["unknown"] if self._get_files: files = yield self.getFiles( change=event_change["number"], patchset=event["patchSet"]["number"] ) yield self.addChange(event['type'], { 'author': _gerrit_user_to_author(event_change["owner"]), 'project': util.bytes2unicode(event_change["project"]), 'repository': "{}/{}".format( self.gitBaseURL, event_change["project"]), 'branch': self.get_branch_from_event(event), 'revision': event["patchSet"]["revision"], 'revlink': event_change["url"], 'comments': event_change["subject"], 'files': files, 'category': event["type"], 'properties': properties}) return None def eventReceived_ref_updated(self, properties, event): ref = event["refUpdate"] author = "gerrit" if "submitter" in event: author = _gerrit_user_to_author(event["submitter"], author) # Ignore ref-updated events if patchset-created events are expected for this push. # ref-updated events may arrive before patchset-created events and cause problems, as # builds would be using properties from ref-updated event and not from patchset-created. # As a result it may appear that the change was not related to a Gerrit change and cause # reporters to not submit reviews for example. if 'patchset-created' in self.handled_events and ref['refName'].startswith('refs/changes/'): return None return self.addChange(event['type'], dict( author=author, project=ref["project"], repository="{}/{}".format(self.gitBaseURL, ref["project"]), branch=ref["refName"], revision=ref["newRev"], comments="Gerrit: commit(s) pushed.", files=["unknown"], category=event["type"], properties=properties)) class GerritChangeSource(GerritChangeSourceBase): """This source will maintain a connection to gerrit ssh server that will provide us gerrit events in json format.""" compare_attrs = ("gerritserver", "gerritport") STREAM_GOOD_CONNECTION_TIME = 120 "(seconds) connections longer than this are considered good, and reset the backoff timer" STREAM_BACKOFF_MIN = 0.5 "(seconds) minimum, but nonzero, time to wait before retrying a failed connection" STREAM_BACKOFF_EXPONENT = 1.5 "multiplier used to increase the backoff from MIN to MAX on repeated failures" STREAM_BACKOFF_MAX = 60 "(seconds) maximum time to wait before retrying a failed connection" name = None def checkConfig(self, gerritserver, username, gerritport=29418, identity_file=None, **kwargs): if self.name is None: self.name = "GerritChangeSource:{}@{}:{}".format(username, gerritserver, gerritport) if 'gitBaseURL' not in kwargs: kwargs['gitBaseURL'] = "automatic at reconfigure" super().checkConfig(**kwargs) def reconfigService(self, gerritserver, username, gerritport=29418, identity_file=None, name=None, **kwargs): if 'gitBaseURL' not in kwargs: kwargs['gitBaseURL'] = "ssh://{}@{}:{}".format(username, gerritserver, gerritport) self.gerritserver = gerritserver self.gerritport = gerritport self.username = username self.identity_file = identity_file self.process = None self.wantProcess = False self.streamProcessTimeout = self.STREAM_BACKOFF_MIN return super().reconfigService(**kwargs) class LocalPP(LineProcessProtocol): def __init__(self, change_source): super().__init__() self.change_source = change_source @defer.inlineCallbacks def outLineReceived(self, line): if self.change_source.debug: log.msg(b"gerrit: " + line) yield self.change_source.lineReceived(line) def errLineReceived(self, line): if self.change_source.debug: log.msg(b"gerrit stderr: " + line) def processEnded(self, status): super().processEnded(status) self.change_source.streamProcessStopped() def streamProcessStopped(self): self.process = None # if the service is stopped, don't try to restart the process if not self.wantProcess or not self.running: return now = util.now() if now - self.lastStreamProcessStart < \ self.STREAM_GOOD_CONNECTION_TIME: # bad startup; start the stream process again after a timeout, # and then increase the timeout log.msg( "'gerrit stream-events' failed; restarting after %ds" % round(self.streamProcessTimeout)) self.master.reactor.callLater( self.streamProcessTimeout, self.startStreamProcess) self.streamProcessTimeout *= self.STREAM_BACKOFF_EXPONENT if self.streamProcessTimeout > self.STREAM_BACKOFF_MAX: self.streamProcessTimeout = self.STREAM_BACKOFF_MAX else: # good startup, but lost connection; restart immediately, # and set the timeout to its minimum # make sure we log the reconnection, so that it might be detected # and network connectivity fixed log.msg("gerrit stream-events lost connection. Reconnecting...") self.startStreamProcess() self.streamProcessTimeout = self.STREAM_BACKOFF_MIN def _buildGerritCommand(self, *gerrit_args): '''Get an ssh command list which invokes gerrit with the given args on the remote host''' cmd = [ "ssh", "-o", "BatchMode=yes", "{}@{}".format(self.username, self.gerritserver), "-p", str(self.gerritport) ] if self.identity_file is not None: cmd.extend(["-i", self.identity_file]) cmd.append("gerrit") cmd.extend(gerrit_args) return cmd def startStreamProcess(self): if self.debug: log.msg("starting 'gerrit stream-events'") cmd = self._buildGerritCommand("stream-events") self.lastStreamProcessStart = util.now() self.process = reactor.spawnProcess(self.LocalPP(self), "ssh", cmd, env=None) @defer.inlineCallbacks def getFiles(self, change, patchset): cmd = self._buildGerritCommand("query", str(change), "--format", "JSON", "--files", "--patch-sets") if self.debug: log.msg("querying gerrit for changed files in change {}/{}: {}".format(change, patchset, cmd)) rc, out = yield runprocess.run_process(self.master.reactor, cmd, env=None, collect_stderr=False) if rc != 0: return ["unknown"] out = out.splitlines()[0] res = json.loads(bytes2unicode(out)) if res.get("rowCount") == 0: return ["unknown"] patchsets = {i["number"]: i["files"] for i in res["patchSets"]} return [i["file"] for i in patchsets[int(patchset)]] def activate(self): self.wantProcess = True self.startStreamProcess() def deactivate(self): self.wantProcess = False if self.process: self.process.signalProcess("KILL") # TODO: if this occurs while the process is restarting, some exceptions # may be logged, although things will settle down normally def describe(self): status = "" if not self.process: status = "[NOT CONNECTED - check log]" return (("GerritChangeSource watching the remote " "Gerrit repository {}@{} {}").format(self.username, self.gerritserver, status)) class GerritEventLogPoller(GerritChangeSourceBase): POLL_INTERVAL_SEC = 30 FIRST_FETCH_LOOKBACK_DAYS = 30 def checkConfig(self, baseURL, auth, pollInterval=POLL_INTERVAL_SEC, pollAtLaunch=True, firstFetchLookback=FIRST_FETCH_LOOKBACK_DAYS, **kwargs): if self.name is None: self.name = "GerritEventLogPoller:{}".format(baseURL) super().checkConfig(**kwargs) @defer.inlineCallbacks def reconfigService(self, baseURL, auth, pollInterval=POLL_INTERVAL_SEC, pollAtLaunch=True, firstFetchLookback=FIRST_FETCH_LOOKBACK_DAYS, **kwargs): yield super().reconfigService(**kwargs) if baseURL.endswith('/'): baseURL = baseURL[:-1] self._pollInterval = pollInterval self._pollAtLaunch = pollAtLaunch self._oid = yield self.master.db.state.getObjectId(self.name, self.__class__.__name__) self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, auth=auth) self._first_fetch_lookback = firstFetchLookback self._last_event_time = None @staticmethod def now(): """patchable now (datetime is not patchable as builtin)""" return datetime.datetime.utcnow() @defer.inlineCallbacks def poll(self): last_event_ts = yield self.master.db.state.getState(self._oid, 'last_event_ts', None) if last_event_ts is None: # If there is not last event time stored in the database, then set # the last event time to some historical look-back last_event = self.now() - datetime.timedelta(days=self._first_fetch_lookback) else: last_event = datetime.datetime.utcfromtimestamp(last_event_ts) last_event_formatted = last_event.strftime("%Y-%m-%d %H:%M:%S") if self.debug: log.msg("Polling gerrit: {}".format(last_event_formatted).encode("utf-8")) res = yield self._http.get("/plugins/events-log/events/", params=dict(t1=last_event_formatted)) lines = yield res.content() for line in lines.splitlines(): yield self.lineReceived(line) @defer.inlineCallbacks def eventReceived(self, event): res = yield super().eventReceived(event) if 'eventCreatedOn' in event: yield self.master.db.state.setState(self._oid, 'last_event_ts', event['eventCreatedOn']) return res @defer.inlineCallbacks def getFiles(self, change, patchset): res = yield self._http.get("/changes/{}/revisions/{}/files/".format(change, patchset)) res = yield res.content() res = res.splitlines()[1].decode('utf8') # the first line of every response is `)]}'` return list(json.loads(res)) # FIXME this copy the code from PollingChangeSource # but as PollingChangeSource and its subclasses need to be ported to reconfigurability # we can't use it right now @base.poll_method def doPoll(self): d = defer.maybeDeferred(self.poll) d.addErrback(log.err, 'while polling for changes') return d def force(self): self.doPoll() def activate(self): self.doPoll.start(interval=self._pollInterval, now=self._pollAtLaunch) def deactivate(self): return self.doPoll.stop() def describe(self): msg = ("GerritEventLogPoller watching the remote " "Gerrit repository {}") return msg.format(self.name) buildbot-3.4.0/master/buildbot/changes/github.py000066400000000000000000000252751413250514000216730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from datetime import datetime from twisted.internet import defer from buildbot import config from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import datetime2epoch from buildbot.util import httpclientservice from buildbot.util.logger import Logger from buildbot.util.pullrequest import PullRequestMixin from buildbot.util.state import StateMixin log = Logger() HOSTED_BASE_URL = "https://api.github.com" link_urls = { "https": "clone_url", "svn": "svn_url", "git": "git_url", "ssh": "ssh_url" } class GitHubPullrequestPoller(base.ReconfigurablePollingChangeSource, StateMixin, PullRequestMixin): compare_attrs = ("owner", "repo", "token", "branches", "pollInterval", "category", "pollAtLaunch", "name") db_class_name = 'GitHubPullrequestPoller' property_basename = "github" def __init__(self, owner, repo, **kwargs): name = kwargs.get("name") if not name: kwargs["name"] = "GitHubPullrequestPoller:" + owner + "/" + repo super().__init__(owner, repo, **kwargs) def checkConfig(self, owner, repo, branches=None, category='pull', baseURL=None, pullrequest_filter=True, token=None, magic_link=False, repository_type="https", github_property_whitelist=None, **kwargs): if repository_type not in ["https", "svn", "git", "ssh"]: config.error( "repository_type must be one of {https, svn, git, ssh}") super().checkConfig(name=self.name, **kwargs) @defer.inlineCallbacks def reconfigService(self, owner, repo, branches=None, pollInterval=10 * 60, category=None, baseURL=None, pullrequest_filter=True, token=None, pollAtLaunch=False, magic_link=False, repository_type="https", github_property_whitelist=None, **kwargs): yield super().reconfigService(name=self.name, **kwargs) if baseURL is None: baseURL = HOSTED_BASE_URL if baseURL.endswith('/'): baseURL = baseURL[:-1] http_headers = {'User-Agent': 'Buildbot'} if token is not None: token = yield self.renderSecrets(token) http_headers.update({'Authorization': 'token ' + token}) if github_property_whitelist is None: github_property_whitelist = [] self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, headers=http_headers) self.token = token self.owner = owner self.repo = repo self.branches = branches self.pollInterval = pollInterval self.pollAtLaunch = pollAtLaunch self.repository_type = link_urls[repository_type] self.magic_link = magic_link self.external_property_whitelist = github_property_whitelist if callable(pullrequest_filter): self.pullrequest_filter = pullrequest_filter else: self.pullrequest_filter = (lambda _: pullrequest_filter) self.category = category if callable(category) else bytes2unicode( category) def describe(self): return ("GitHubPullrequestPoller watching the " "GitHub repository {}/{}").format(self.owner, self.repo) @defer.inlineCallbacks def _getPullInformation(self, pull_number): result = yield self._http.get('/'.join( ['/repos', self.owner, self.repo, 'pulls', str(pull_number)])) my_json = yield result.json() return my_json @defer.inlineCallbacks def _getPulls(self): log.debug(("GitHubPullrequestPoller: polling " "GitHub repository {}/{}, branches: {}").format(self.owner, self.repo, self.branches)) result = yield self._http.get('/'.join( ['/repos', self.owner, self.repo, 'pulls'])) my_json = yield result.json() if result.code != 200: message = my_json.get('message', 'unknown') log.error("GitHubPullrequestPoller error {0.code} '{1}' " "while loading {0.url}".format(result, message)) return [] return my_json @defer.inlineCallbacks def _getFiles(self, prnumber): result = yield self._http.get("/".join([ '/repos', self.owner, self.repo, 'pulls', str(prnumber), 'files' ])) my_json = yield result.json() return [f["filename"] for f in my_json] @defer.inlineCallbacks def _getCommitters(self, prnumber): result = yield self._http.get("/".join([ '/repos', self.owner, self.repo, 'pulls', str(prnumber), 'commits' ])) my_json = yield result.json() return [[c["commit"]["committer"]["name"], c["commit"]["committer"]["email"]] for c in my_json] @defer.inlineCallbacks def _getAuthors(self, prnumber): result = yield self._http.get("/".join([ '/repos', self.owner, self.repo, 'pulls', str(prnumber), 'commits' ])) my_json = yield result.json() return [[a["commit"]["author"]["name"], a["commit"]["author"]["email"]] for a in my_json] @defer.inlineCallbacks def _getCurrentRev(self, prnumber): # Get currently assigned revision of PR number result = yield self._getStateObjectId() rev = yield self.master.db.state.getState(result, 'pull_request%d' % prnumber, None) return rev @defer.inlineCallbacks def _setCurrentRev(self, prnumber, rev): # Set the updated revision for PR number. result = yield self._getStateObjectId() yield self.master.db.state.setState(result, 'pull_request%d' % prnumber, rev) @defer.inlineCallbacks def _getStateObjectId(self): # Return a deferred for object id in state db. result = yield self.master.db.state.getObjectId('{}/{}'.format(self.owner, self.repo), self.db_class_name) return result @defer.inlineCallbacks def _processChanges(self, github_result): for pr in github_result: # Track PRs for specified branches base_branch = pr['base']['ref'] prnumber = pr['number'] revision = pr['head']['sha'] # Check to see if the branch is set or matches if self.branches is not None and base_branch not in self.branches: continue if (self.pullrequest_filter is not None and not self.pullrequest_filter(pr)): continue current = yield self._getCurrentRev(prnumber) if not current or current[0:12] != revision[0:12]: # Access title, repo, html link, and comments pr = yield self._getPullInformation(prnumber) title = pr['title'] if self.magic_link: branch = 'refs/pull/{:d}/merge'.format(prnumber) repo = pr['base']['repo'][self.repository_type] else: branch = pr['head']['ref'] repo = pr['head']['repo'][self.repository_type] revlink = pr['html_url'] comments = pr['body'] updated = datetime.strptime(pr['updated_at'], '%Y-%m-%dT%H:%M:%SZ') # update database yield self._setCurrentRev(prnumber, revision) project = pr['base']['repo']['full_name'] commits = pr['commits'] dl = defer.DeferredList( [self._getAuthors(prnumber), self._getCommitters(prnumber), self._getFiles(prnumber)], consumeErrors=True) results = yield dl failures = [r[1] for r in results if not r[0]] if failures: for failure in failures: log.error("while processing changes for " "Pullrequest {} revision {}".format( prnumber, revision)) # Fail on the first error! failures[0].raiseException() [authors, committers, files] = [r[1] for r in results] author = authors[0][0] + " <" + authors[0][1] + ">" committer = committers[0][0] + " <" + committers[0][1] + ">" # emit the change yield self.master.data.updates.addChange( author=author, committer=committer, revision=bytes2unicode(revision), revlink=bytes2unicode(revlink), comments='GitHub Pull Request #{0} ({1} commit{2})\n{3}\n{4}'. format(prnumber, commits, 's' if commits > 0 else '', title, comments), when_timestamp=datetime2epoch(updated), branch=bytes2unicode(branch), category=self.category, project=project, repository=bytes2unicode(repo), files=files, properties={ 'pullrequesturl': revlink, **self.extractProperties(pr), }, src='git') @defer.inlineCallbacks def poll(self): result = yield self._getPulls() yield self._processChanges(result) buildbot-3.4.0/master/buildbot/changes/gitpoller.py000066400000000000000000000451701413250514000224060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import stat from urllib.parse import quote as urlquote from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import private_tempdir from buildbot.util import runprocess from buildbot.util.git import GitMixin from buildbot.util.git import getSshKnownHostsContents from buildbot.util.misc import writeLocalFile from buildbot.util.state import StateMixin class GitError(Exception): """Raised when git exits with code 128.""" class GitPoller(base.ReconfigurablePollingChangeSource, StateMixin, GitMixin): """This source will poll a remote git repo for changes and submit them to the change master.""" compare_attrs = ("repourl", "branches", "workdir", "pollInterval", "gitbin", "usetimestamps", "category", "project", "pollAtLaunch", "buildPushesWithNoCommits", "sshPrivateKey", "sshHostKey", "sshKnownHosts", "pollRandomDelayMin", "pollRandomDelayMax") secrets = ("sshPrivateKey", "sshHostKey", "sshKnownHosts") def __init__(self, repourl, **kwargs): name = kwargs.get("name", None) if name is None: kwargs["name"] = repourl super().__init__(repourl, **kwargs) def checkConfig(self, repourl, branches=None, branch=None, workdir=None, pollInterval=10 * 60, gitbin="git", usetimestamps=True, category=None, project=None, pollinterval=-2, fetch_refspec=None, encoding="utf-8", name=None, pollAtLaunch=False, buildPushesWithNoCommits=False, only_tags=False, sshPrivateKey=None, sshHostKey=None, sshKnownHosts=None, pollRandomDelayMin=0, pollRandomDelayMax=0): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval if only_tags and (branch or branches): config.error("GitPoller: can't specify only_tags and branch/branches") if branch and branches: config.error("GitPoller: can't specify both branch and branches") self.sshPrivateKey = sshPrivateKey self.sshHostKey = sshHostKey self.sshKnownHosts = sshKnownHosts self.setupGit(logname='GitPoller') # check the configuration if fetch_refspec is not None: config.error("GitPoller: fetch_refspec is no longer supported. " "Instead, only the given branches are downloaded.") if name is None: name = repourl super().checkConfig(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, pollRandomDelayMin=pollRandomDelayMin, pollRandomDelayMax=pollRandomDelayMax) @defer.inlineCallbacks def reconfigService(self, repourl, branches=None, branch=None, workdir=None, pollInterval=10 * 60, gitbin="git", usetimestamps=True, category=None, project=None, pollinterval=-2, fetch_refspec=None, encoding="utf-8", name=None, pollAtLaunch=False, buildPushesWithNoCommits=False, only_tags=False, sshPrivateKey=None, sshHostKey=None, sshKnownHosts=None, pollRandomDelayMin=0, pollRandomDelayMax=0): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval if name is None: name = repourl if project is None: project = '' if branch: branches = [branch] elif not branches: if only_tags: branches = lambda ref: ref.startswith('refs/tags/') # noqa: E731 else: branches = ['master'] self.repourl = repourl self.branches = branches self.encoding = encoding self.buildPushesWithNoCommits = buildPushesWithNoCommits self.gitbin = gitbin self.workdir = workdir self.usetimestamps = usetimestamps self.category = category if callable( category) else bytes2unicode(category, encoding=self.encoding) self.project = bytes2unicode(project, encoding=self.encoding) self.changeCount = 0 self.lastRev = {} self.sshPrivateKey = sshPrivateKey self.sshHostKey = sshHostKey self.sshKnownHosts = sshKnownHosts self.setupGit(logname='GitPoller') if self.workdir is None: self.workdir = 'gitpoller-work' # make our workdir absolute, relative to the master's basedir if not os.path.isabs(self.workdir): self.workdir = os.path.join(self.master.basedir, self.workdir) log.msg("gitpoller: using workdir '{}'".format(self.workdir)) yield super().reconfigService(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, pollRandomDelayMin=pollRandomDelayMin, pollRandomDelayMax=pollRandomDelayMax) @defer.inlineCallbacks def _checkGitFeatures(self): stdout = yield self._dovccmd('--version', []) self.parseGitFeatures(stdout) if not self.gitInstalled: raise EnvironmentError('Git is not installed') if (self.sshPrivateKey is not None and not self.supportsSshPrivateKeyAsEnvOption): raise EnvironmentError('SSH private keys require Git 2.3.0 or newer') @defer.inlineCallbacks def activate(self): try: self.lastRev = yield self.getState('lastRev', {}) super().activate() except Exception as e: log.err(e, 'while initializing GitPoller repository') def describe(self): str = ('GitPoller watching the remote git repository ' + bytes2unicode(self.repourl, self.encoding)) if self.branches: if self.branches is True: str += ', branches: ALL' elif not callable(self.branches): str += ', branches: ' + ', '.join(self.branches) if not self.master: str += " [STOPPED - check log]" return str def _getBranches(self): d = self._dovccmd('ls-remote', ['--refs', self.repourl]) @d.addCallback def parseRemote(rows): branches = [] for row in rows.splitlines(): if '\t' not in row: # Not a useful line continue sha, ref = row.split("\t") branches.append(ref) return branches return d def _headsFilter(self, branch): """Filter out remote references that don't begin with 'refs/heads'.""" return branch.startswith("refs/heads/") def _removeHeads(self, branch): """Remove 'refs/heads/' prefix from remote references.""" if branch.startswith("refs/heads/"): branch = branch[11:] return branch def _trackerBranch(self, branch): # manually quote tilde for Python 3.7 url = urlquote(self.repourl, '').replace('~', '%7E') return "refs/buildbot/{}/{}".format(url, self._removeHeads(branch)) def poll_should_exit(self): # A single gitpoller loop may take a while on a loaded master, which would block # reconfiguration, so we try to exit early. return not self.doPoll.running @defer.inlineCallbacks def poll(self): yield self._checkGitFeatures() try: yield self._dovccmd('init', ['--bare', self.workdir]) except GitError as e: log.msg(e.args[0]) return branches = self.branches if self.branches else [] remote_refs = yield self._getBranches() if self.poll_should_exit(): return if branches is True or callable(branches): if callable(self.branches): branches = [b for b in remote_refs if self.branches(b)] else: branches = [b for b in remote_refs if self._headsFilter(b)] elif branches and remote_refs: remote_branches = [self._removeHeads(b) for b in remote_refs] branches = sorted(list(set(branches) & set(remote_branches))) refspecs = [ '+{}:{}'.format(self._removeHeads(branch), self._trackerBranch(branch)) for branch in branches ] try: yield self._dovccmd('fetch', ['--progress', self.repourl] + refspecs, path=self.workdir) except GitError as e: log.msg(e.args[0]) return revs = {} log.msg('gitpoller: processing changes from "{}"'.format(self.repourl)) for branch in branches: try: if self.poll_should_exit(): # pragma: no cover # Note that we still want to update the last known revisions for the branches # we did process break rev = yield self._dovccmd( 'rev-parse', [self._trackerBranch(branch)], path=self.workdir) revs[branch] = bytes2unicode(rev, self.encoding) yield self._process_changes(revs[branch], branch) except Exception: log.err(_why="trying to poll branch {} of {}".format( branch, self.repourl)) self.lastRev = revs yield self.setState('lastRev', self.lastRev) def _get_commit_comments(self, rev): args = ['--no-walk', r'--format=%s%n%b', rev, '--'] d = self._dovccmd('log', args, path=self.workdir) return d def _get_commit_timestamp(self, rev): # unix timestamp args = ['--no-walk', r'--format=%ct', rev, '--'] d = self._dovccmd('log', args, path=self.workdir) @d.addCallback def process(git_output): if self.usetimestamps: try: stamp = int(git_output) except Exception as e: log.msg(('gitpoller: caught exception converting output \'{}\' to timestamp' ).format(git_output)) raise e return stamp return None return d def _get_commit_files(self, rev): args = ['--name-only', '--no-walk', r'--format=%n', rev, '--'] d = self._dovccmd('log', args, path=self.workdir) def decode_file(file): # git use octal char sequences in quotes when non ASCII match = re.match('^"(.*)"$', file) if match: file = bytes2unicode(match.groups()[0], encoding=self.encoding, errors='unicode_escape') return bytes2unicode(file, encoding=self.encoding) @d.addCallback def process(git_output): fileList = [decode_file(file) for file in [s for s in git_output.splitlines() if len(s)]] return fileList return d def _get_commit_author(self, rev): args = ['--no-walk', r'--format=%aN <%aE>', rev, '--'] d = self._dovccmd('log', args, path=self.workdir) @d.addCallback def process(git_output): if not git_output: raise EnvironmentError('could not get commit author for rev') return git_output return d @defer.inlineCallbacks def _get_commit_committer(self, rev): args = ['--no-walk', r'--format=%cN <%cE>', rev, '--'] res = yield self._dovccmd('log', args, path=self.workdir) if not res: raise EnvironmentError('could not get commit committer for rev') return res @defer.inlineCallbacks def _process_changes(self, newRev, branch): """ Read changes since last change. - Read list of commit hashes. - Extract details from each commit. - Add changes to database. """ # initial run, don't parse all history if not self.lastRev: return # get the change list revListArgs = (['--ignore-missing'] + ['--format=%H', '{}'.format(newRev)] + ['^' + rev for rev in sorted(self.lastRev.values())] + ['--']) self.changeCount = 0 results = yield self._dovccmd('log', revListArgs, path=self.workdir) # process oldest change first revList = results.split() revList.reverse() if self.buildPushesWithNoCommits and not revList: existingRev = self.lastRev.get(branch) if existingRev != newRev: revList = [newRev] if existingRev is None: # This branch was completely unknown, rebuild log.msg('gitpoller: rebuilding {} for new branch "{}"'.format( newRev, branch)) else: # This branch is known, but it now points to a different # commit than last time we saw it, rebuild. log.msg('gitpoller: rebuilding {} for updated branch "{}"'.format( newRev, branch)) self.changeCount = len(revList) self.lastRev[branch] = newRev if self.changeCount: log.msg('gitpoller: processing {} changes: {} from "{}" branch "{}"'.format( self.changeCount, revList, self.repourl, branch)) for rev in revList: dl = defer.DeferredList([ self._get_commit_timestamp(rev), self._get_commit_author(rev), self._get_commit_committer(rev), self._get_commit_files(rev), self._get_commit_comments(rev), ], consumeErrors=True) results = yield dl # check for failures failures = [r[1] for r in results if not r[0]] if failures: for failure in failures: log.err( failure, "while processing changes for {} {}".format(newRev, branch)) # just fail on the first error; they're probably all related! failures[0].raiseException() timestamp, author, committer, files, comments = [r[1] for r in results] yield self.master.data.updates.addChange( author=author, committer=committer, revision=bytes2unicode(rev, encoding=self.encoding), files=files, comments=comments, when_timestamp=timestamp, branch=bytes2unicode(self._removeHeads(branch)), project=self.project, repository=bytes2unicode(self.repourl, encoding=self.encoding), category=self.category, src='git') def _isSshPrivateKeyNeededForCommand(self, command): commandsThatNeedKey = [ 'fetch', 'ls-remote', ] if self.sshPrivateKey is not None and command in commandsThatNeedKey: return True return False def _downloadSshPrivateKey(self, keyPath): # We change the permissions of the key file to be user-readable only so # that ssh does not complain. This is not used for security because the # parent directory will have proper permissions. writeLocalFile(keyPath, self.sshPrivateKey, mode=stat.S_IRUSR) def _downloadSshKnownHosts(self, path): if self.sshKnownHosts is not None: contents = self.sshKnownHosts else: contents = getSshKnownHostsContents(self.sshHostKey) writeLocalFile(path, contents) def _getSshPrivateKeyPath(self, ssh_data_path): return os.path.join(ssh_data_path, 'ssh-key') def _getSshKnownHostsPath(self, ssh_data_path): return os.path.join(ssh_data_path, 'ssh-known-hosts') @defer.inlineCallbacks def _dovccmd(self, command, args, path=None): if self._isSshPrivateKeyNeededForCommand(command): with private_tempdir.PrivateTemporaryDirectory( dir=self.workdir, prefix='.buildbot-ssh') as tmp_path: stdout = yield self._dovccmdImpl(command, args, path, tmp_path) else: stdout = yield self._dovccmdImpl(command, args, path, None) return stdout @defer.inlineCallbacks def _dovccmdImpl(self, command, args, path, ssh_workdir): full_args = [] full_env = os.environ.copy() if self._isSshPrivateKeyNeededForCommand(command): key_path = self._getSshPrivateKeyPath(ssh_workdir) self._downloadSshPrivateKey(key_path) known_hosts_path = None if self.sshHostKey is not None or self.sshKnownHosts is not None: known_hosts_path = self._getSshKnownHostsPath(ssh_workdir) self._downloadSshKnownHosts(known_hosts_path) self.adjustCommandParamsForSshPrivateKey(full_args, full_env, key_path, None, known_hosts_path) full_args += [command] + args res = yield runprocess.run_process(self.master.reactor, [self.gitbin] + full_args, path, env=full_env) (code, stdout, stderr) = res stdout = bytes2unicode(stdout, self.encoding) stderr = bytes2unicode(stderr, self.encoding) if code != 0: if code == 128: raise GitError('command {} in {} on repourl {} failed with exit code {}: {}'.format( full_args, path, self.repourl, code, stderr)) raise EnvironmentError(('command {} in {} on repourl {} failed with exit code {}: {}' ).format(full_args, path, self.repourl, code, stderr)) return stdout.strip() buildbot-3.4.0/master/buildbot/changes/hgpoller.py000066400000000000000000000354621413250514000222240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import time from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import deferredLocked from buildbot.util import runprocess from buildbot.util.state import StateMixin class HgPoller(base.ReconfigurablePollingChangeSource, StateMixin): """This source will poll a remote hg repo for changes and submit them to the change master.""" compare_attrs = ("repourl", "branch", "branches", "bookmarks", "workdir", "pollInterval", "hgpoller", "usetimestamps", "category", "project", "pollAtLaunch", "pollRandomDelayMin", "pollRandomDelayMax") db_class_name = 'HgPoller' def __init__(self, repourl, **kwargs): name = kwargs.get("name", None) if not name: branches = self.build_branches(kwargs.get('branch', None), kwargs.get('branches', None)) kwargs["name"] = self.build_name(None, repourl, kwargs.get('bookmarks', None), branches) self.initLock = defer.DeferredLock() super().__init__(repourl, **kwargs) def checkConfig(self, repourl, branch=None, branches=None, bookmarks=None, workdir=None, pollInterval=10 * 60, hgbin="hg", usetimestamps=True, category=None, project="", pollinterval=-2, encoding="utf-8", name=None, pollAtLaunch=False, revlink=lambda branch, revision: (""), pollRandomDelayMin=0, pollRandomDelayMax=0): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval if branch and branches: config.error("HgPoller: can't specify both branch and branches") if not callable(revlink): config.error("You need to provide a valid callable for revlink") if workdir is None: config.error("workdir is mandatory for now in HgPoller") name = self.build_name(name, repourl, bookmarks, self.build_branches(branch, branches)) super().checkConfig(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, pollRandomDelayMin=pollRandomDelayMin, pollRandomDelayMax=pollRandomDelayMax) @defer.inlineCallbacks def reconfigService(self, repourl, branch=None, branches=None, bookmarks=None, workdir=None, pollInterval=10 * 60, hgbin="hg", usetimestamps=True, category=None, project="", pollinterval=-2, encoding="utf-8", name=None, pollAtLaunch=False, revlink=lambda branch, revision: (""), pollRandomDelayMin=0, pollRandomDelayMax=0): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval self.repourl = repourl self.branches = self.build_branches(branch, branches) self.bookmarks = bookmarks or [] name = self.build_name(name, repourl, bookmarks, self.branches) if not self.branches and not self.bookmarks: self.branches = ['default'] self.encoding = encoding self.lastChange = time.time() self.lastPoll = time.time() self.hgbin = hgbin self.workdir = workdir self.usetimestamps = usetimestamps self.category = category if callable( category) else bytes2unicode(category) self.project = project self.lastRev = {} self.revlink_callable = revlink yield super().reconfigService(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, pollRandomDelayMin=pollRandomDelayMin, pollRandomDelayMax=pollRandomDelayMax) def build_name(self, name, repourl, bookmarks, branches): if name is not None: return name name = repourl if bookmarks: name += "_" + "_".join(bookmarks) if branches: name += "_" + "_".join(branches) return name def build_branches(self, branch, branches): if branch: return [branch] return branches or [] @defer.inlineCallbacks def activate(self): self.lastRev = yield self.getState('lastRev', {}) super().activate() def describe(self): status = "" if not self.master: status = "[STOPPED - check log]" return (("HgPoller watching the remote Mercurial repository '{}', " "branches: {}, in workdir '{}' {}").format(self.repourl, ', '.join(self.branches), self.workdir, status)) @deferredLocked('initLock') @defer.inlineCallbacks def poll(self): yield self._getChanges() yield self._processChanges() def _absWorkdir(self): workdir = self.workdir if os.path.isabs(workdir): return workdir return os.path.join(self.master.basedir, workdir) @defer.inlineCallbacks def _getRevDetails(self, rev): """Return a deferred for (date, author, files, comments) of given rev. Deferred will be in error if rev is unknown. """ command = [ self.hgbin, 'log', '-r', rev, os.linesep.join(( '--template={date|hgdate}', '{author}', "{files % '{file}" + os.pathsep + "'}", '{desc|strip}'))] # Mercurial fails with status 255 if rev is unknown rc, output = yield runprocess.run_process(self.master.reactor, command, workdir=self._absWorkdir(), env=os.environ, collect_stderr=False, stderr_is_error=True) if rc != 0: msg = '{}: got error {} when getting details for revision {}'.format(self, rc, rev) raise Exception(msg) # all file names are on one line output = output.decode(self.encoding, "replace") date, author, files, comments = output.split(os.linesep, 3) if not self.usetimestamps: stamp = None else: try: stamp = float(date.split()[0]) except Exception: log.msg('hgpoller: caught exception converting output %r ' 'to timestamp' % date) raise return stamp, author.strip(), files.split(os.pathsep)[:-1], comments.strip() def _isRepositoryReady(self): """Easy to patch in tests.""" return os.path.exists(os.path.join(self._absWorkdir(), '.hg')) @defer.inlineCallbacks def _initRepository(self): """Have mercurial init the workdir as a repository (hg init) if needed. hg init will also create all needed intermediate directories. """ if self._isRepositoryReady(): return log.msg('hgpoller: initializing working dir from {}'.format(self.repourl)) rc = yield runprocess.run_process(self.master.reactor, [self.hgbin, 'init', self._absWorkdir()], env=os.environ, collect_stdout=False, collect_stderr=False) if rc != 0: self._stopOnFailure() raise EnvironmentError('{}: repository init failed with exit code {}'.format(self, rc)) log.msg("hgpoller: finished initializing working dir {}".format(self.workdir)) @defer.inlineCallbacks def _getChanges(self): self.lastPoll = time.time() yield self._initRepository() log.msg("{}: polling hg repo at {}".format(self, self.repourl)) command = [self.hgbin, 'pull'] for name in self.branches: command += ['-b', name] for name in self.bookmarks: command += ['-B', name] command += [self.repourl] yield runprocess.run_process(self.master.reactor, command, workdir=self._absWorkdir(), env=os.environ, collect_stdout=False, collect_stderr=False) def _getCurrentRev(self, branch='default'): """Return a deferred for current numeric rev in state db. If never has been set, current rev is None. """ return self.lastRev.get(branch, None) def _setCurrentRev(self, rev, branch='default'): """Return a deferred to set current revision in persistent state.""" self.lastRev[branch] = str(rev) return self.setState('lastRev', self.lastRev) @defer.inlineCallbacks def _getHead(self, branch): """Return a deferred for branch head revision or None. We'll get an error if there is no head for this branch, which is probably a good thing, since it's probably a misspelling (if really buildbotting a branch that does not have any changeset yet, one shouldn't be surprised to get errors) """ rc, stdout = yield runprocess.run_process(self.master.reactor, [self.hgbin, 'heads', branch, '--template={rev}' + os.linesep], workdir=self._absWorkdir(), env=os.environ, collect_stderr=False, stderr_is_error=True) if rc != 0: log.err("{}: could not find revision {} in repository {}".format(self, branch, self.repourl)) return None if not stdout: return None if len(stdout.split()) > 1: log.err(("{}: caught several heads in branch {} " "from repository {}. Staying at previous revision" "You should wait until the situation is normal again " "due to a merge or directly strip if remote repo " "gets stripped later.").format(self, branch, self.repourl)) return None # in case of whole reconstruction, are we sure that we'll get the # same node -> rev assignations ? return stdout.strip().decode(self.encoding) @defer.inlineCallbacks def _processChanges(self): """Send info about pulled changes to the master and record current. HgPoller does the recording by moving the working dir to the head of the branch. We don't update the tree (unnecessary treatment and waste of space) instead, we simply store the current rev number in a file. Recall that hg rev numbers are local and incremental. """ for branch in self.branches + self.bookmarks: rev = yield self._getHead(branch) if rev is None: # Nothing pulled? continue yield self._processBranchChanges(rev, branch) @defer.inlineCallbacks def _getRevNodeList(self, revset): rc, stdout = yield runprocess.run_process(self.master.reactor, [self.hgbin, 'log', '-r', revset, r'--template={rev}:{node}\n'], workdir=self._absWorkdir(), env=os.environ, collect_stdout=True, collect_stderr=False, stderr_is_error=True) if rc != 0: raise EnvironmentError('{}: could not get rev node list: {}'.format(self, rc)) results = stdout.decode(self.encoding) revNodeList = [rn.split(':', 1) for rn in results.strip().split()] return revNodeList @defer.inlineCallbacks def _processBranchChanges(self, new_rev, branch): prev_rev = yield self._getCurrentRev(branch) if new_rev == prev_rev: # Nothing new. return if prev_rev is None: # First time monitoring; start at the top. yield self._setCurrentRev(new_rev, branch) return # two passes for hg log makes parsing simpler (comments is multi-lines) revNodeList = yield self._getRevNodeList('{}::{}'.format(prev_rev, new_rev)) # revsets are inclusive. Strip the already-known "current" changeset. if not revNodeList: # empty revNodeList probably means the branch has changed head (strip of force push?) # in that case, we should still produce a change for that new rev (but we can't know # how many parents were pushed) revNodeList = yield self._getRevNodeList(new_rev) else: del revNodeList[0] log.msg('hgpoller: processing %d changes in branch %r: %r in %r' % (len(revNodeList), branch, revNodeList, self._absWorkdir())) for rev, node in revNodeList: timestamp, author, files, comments = yield self._getRevDetails( node) yield self.master.data.updates.addChange( author=author, committer=None, revision=str(node), revlink=self.revlink_callable(branch, str(node)), files=files, comments=comments, when_timestamp=int(timestamp) if timestamp else None, branch=bytes2unicode(branch), category=bytes2unicode(self.category), project=bytes2unicode(self.project), repository=bytes2unicode(self.repourl), src='hg') # writing after addChange so that a rev is never missed, # but at once to avoid impact from later errors yield self._setCurrentRev(new_rev, branch) def _stopOnFailure(self, f): "utility method to stop the service when a failure occurs" if self.running: d = defer.maybeDeferred(self.stopService) d.addErrback(log.err, 'while stopping broken HgPoller service') return f buildbot-3.4.0/master/buildbot/changes/mail.py000066400000000000000000000454121413250514000213260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Parse various kinds of 'CVS notify' email. """ import calendar import datetime import re import time from email import message_from_file from email.iterators import body_line_iterator from email.utils import mktime_tz from email.utils import parseaddr from email.utils import parsedate_tz from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import util from buildbot.interfaces import IChangeSource from buildbot.util.maildir import MaildirService @implementer(IChangeSource) class MaildirSource(MaildirService, util.ComparableMixin): """Generic base class for Maildir-based change sources""" compare_attrs = ("basedir", "pollinterval", "prefix") name = 'MaildirSource' def __init__(self, maildir, prefix=None, category='', repository=''): super().__init__(maildir) self.prefix = prefix self.category = category self.repository = repository if prefix and not prefix.endswith("/"): log.msg(("MaildirSource: you probably want your prefix=('{}') to end with a slash" ).format(prefix)) def describe(self): return "{} watching maildir '{}'".format(self.__class__.__name__, self.basedir) @defer.inlineCallbacks def messageReceived(self, filename): with self.moveToCurDir(filename) as f: chtuple = self.parse_file(f, self.prefix) src, chdict = None, None if chtuple: src, chdict = chtuple if chdict: yield self.master.data.updates.addChange(src=str(src), **chdict) else: log.msg("no change found in maildir file '{}'".format(filename)) def parse_file(self, fd, prefix=None): m = message_from_file(fd) return self.parse(m, prefix) class CVSMaildirSource(MaildirSource): name = "CVSMaildirSource" def __init__(self, maildir, prefix=None, category='', repository='', properties=None): super().__init__(maildir, prefix, category, repository) if properties is None: properties = {} self.properties = properties def parse(self, m, prefix=None): """Parse messages sent by the 'buildbot-cvs-mail' program. """ # The mail is sent from the person doing the checkin. Assume that the # local username is enough to identify them (this assumes a one-server # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS # model) name, addr = parseaddr(m["from"]) if not addr: # no From means this message isn't from buildbot-cvs-mail return None at = addr.find("@") if at == -1: author = addr # might still be useful else: author = addr[:at] author = util.bytes2unicode(author, encoding="ascii") # CVS accepts RFC822 dates. buildbot-cvs-mail adds the date as # part of the mail header, so use that. # This assumes cvs is being access via ssh or pserver, so the time # will be the CVS server's time. # calculate a "revision" based on that timestamp, or the current time # if we're unable to parse the date. log.msg('Processing CVS mail') dateTuple = parsedate_tz(m["date"]) if dateTuple is None: when = util.now() else: when = mktime_tz(dateTuple) theTime = datetime.datetime.utcfromtimestamp(float(when)) rev = theTime.strftime('%Y-%m-%d %H:%M:%S') catRE = re.compile(r'^Category:\s*(\S.*)') cvsRE = re.compile(r'^CVSROOT:\s*(\S.*)') cvsmodeRE = re.compile(r'^Cvsmode:\s*(\S.*)') filesRE = re.compile(r'^Files:\s*(\S.*)') modRE = re.compile(r'^Module:\s*(\S.*)') pathRE = re.compile(r'^Path:\s*(\S.*)') projRE = re.compile(r'^Project:\s*(\S.*)') singleFileRE = re.compile(r'(.*) (NONE|\d(\.|\d)+) (NONE|\d(\.|\d)+)') tagRE = re.compile(r'^\s+Tag:\s*(\S.*)') updateRE = re.compile(r'^Update of:\s*(\S.*)') comments = "" branch = None cvsroot = None fileList = None files = [] isdir = 0 path = None project = None lines = list(body_line_iterator(m)) while lines: line = lines.pop(0) m = catRE.match(line) if m: category = m.group(1) continue m = cvsRE.match(line) if m: cvsroot = m.group(1) continue m = cvsmodeRE.match(line) if m: cvsmode = m.group(1) continue m = filesRE.match(line) if m: fileList = m.group(1) continue m = modRE.match(line) if m: # We don't actually use this # module = m.group(1) continue m = pathRE.match(line) if m: path = m.group(1) continue m = projRE.match(line) if m: project = m.group(1) continue m = tagRE.match(line) if m: branch = m.group(1) continue m = updateRE.match(line) if m: # We don't actually use this # updateof = m.group(1) continue if line == "Log Message:\n": break # CVS 1.11 lists files as: # repo/path file,old-version,new-version file2,old-version,new-version # Version 1.12 lists files as: # file1 old-version new-version file2 old-version new-version # # files consists of tuples of 'file-name old-version new-version' # The versions are either dotted-decimal version numbers, ie 1.1 # or NONE. New files are of the form 'NONE NUMBER', while removed # files are 'NUMBER NONE'. 'NONE' is a literal string # Parsing this instead of files list in 'Added File:' etc # makes it possible to handle files with embedded spaces, though # it could fail if the filename was 'bad 1.1 1.2' # For cvs version 1.11, we expect # my_module new_file.c,NONE,1.1 # my_module removed.txt,1.2,NONE # my_module modified_file.c,1.1,1.2 # While cvs version 1.12 gives us # new_file.c NONE 1.1 # removed.txt 1.2 NONE # modified_file.c 1.1,1.2 if fileList is None: log.msg('CVSMaildirSource Mail with no files. Ignoring') return None # We don't have any files. Email not from CVS if cvsmode == '1.11': # Please, no repo paths with spaces! m = re.search('([^ ]*) ', fileList) if m: path = m.group(1) else: log.msg( 'CVSMaildirSource can\'t get path from file list. Ignoring mail') return None fileList = fileList[len(path):].strip() singleFileRE = re.compile( r'(.+?),(NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+)),(NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+))(?: |$)') # noqa pylint: disable=line-too-long elif cvsmode == '1.12': singleFileRE = re.compile( r'(.+?) (NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+)) (NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+))(?: |$)') # noqa pylint: disable=line-too-long if path is None: raise ValueError( 'CVSMaildirSource cvs 1.12 require path. Check cvs loginfo config') else: raise ValueError('Expected cvsmode 1.11 or 1.12. got: {}'.format(cvsmode)) log.msg("CVSMaildirSource processing filelist: {}".format(fileList)) while(fileList): m = singleFileRE.match(fileList) if m: curFile = path + '/' + m.group(1) files.append(curFile) fileList = fileList[m.end():] else: log.msg('CVSMaildirSource no files matched regex. Ignoring') return None # bail - we couldn't parse the files that changed # Now get comments while lines: line = lines.pop(0) comments += line comments = comments.rstrip() + "\n" if comments == '\n': comments = None return ('cvs', dict(author=author, committer=None, files=files, comments=comments, isdir=isdir, when=when, branch=branch, revision=rev, category=category, repository=cvsroot, project=project, properties=self.properties)) # svn "commit-email.pl" handler. The format is very similar to freshcvs mail; # here's a sample: # From: username [at] apache.org [slightly obfuscated to avoid spam here] # To: commits [at] spamassassin.apache.org # Subject: svn commit: r105955 - in spamassassin/trunk: . lib/Mail # ... # # Author: username # Date: Sat Nov 20 00:17:49 2004 [note: TZ = local tz on server!] # New Revision: 105955 # # Modified: [also Removed: and Added:] # [filename] # ... # Log: # [log message] # ... # # # Modified: spamassassin/trunk/lib/Mail/SpamAssassin.pm # [unified diff] # # [end of mail] class SVNCommitEmailMaildirSource(MaildirSource): name = "SVN commit-email.pl" def parse(self, m, prefix=None): """Parse messages sent by the svn 'commit-email.pl' trigger. """ # The mail is sent from the person doing the checkin. Assume that the # local username is enough to identify them (this assumes a one-server # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS # model) name, addr = parseaddr(m["from"]) if not addr: return None # no From means this message isn't from svn at = addr.find("@") if at == -1: author = addr # might still be useful else: author = addr[:at] # we take the time of receipt as the time of checkin. Not correct (it # depends upon the email latency), but it avoids the # out-of-order-changes issue. Also syncmail doesn't give us anything # better to work with, unless you count pulling the v1-vs-v2 # timestamp out of the diffs, which would be ugly. TODO: Pulling the # 'Date:' header from the mail is a possibility, and # email.utils.parsedate_tz may be useful. It should be configurable, # however, because there are a lot of broken clocks out there. when = util.now() files = [] comments = "" lines = list(body_line_iterator(m)) rev = None while lines: line = lines.pop(0) # "Author: jmason" match = re.search(r"^Author: (\S+)", line) if match: author = match.group(1) # "New Revision: 105955" match = re.search(r"^New Revision: (\d+)", line) if match: rev = match.group(1) # possible TODO: use "Date: ..." data here instead of time of # commit message receipt, above. however, this timestamp is # specified *without* a timezone, in the server's local TZ, so to # be accurate buildbot would need a config setting to specify the # source server's expected TZ setting! messy. # this stanza ends with the "Log:" if (line == "Log:\n"): break # commit message is terminated by the file-listing section while lines: line = lines.pop(0) if line in ("Modified:\n", "Added:\n", "Removed:\n"): break comments += line comments = comments.rstrip() + "\n" while lines: line = lines.pop(0) if line == "\n": break if line.find("Modified:\n") == 0: continue # ignore this line if line.find("Added:\n") == 0: continue # ignore this line if line.find("Removed:\n") == 0: continue # ignore this line line = line.strip() thesefiles = line.split(" ") for f in thesefiles: if prefix: # insist that the file start with the prefix: we may get # changes we don't care about too if f.startswith(prefix): f = f[len(prefix):] else: log.msg(("ignored file from svn commit: prefix '{}' " "does not match filename '{}'").format(prefix, f)) continue # TODO: figure out how new directories are described, set # .isdir files.append(f) if not files: log.msg("no matching files found, ignoring commit") return None return ('svn', dict(author=author, committer=None, files=files, comments=comments, when=when, revision=rev)) # bzr Launchpad branch subscription mails. Sample mail: # # From: noreply@launchpad.net # Subject: [Branch ~knielsen/maria/tmp-buildbot-test] Rev 2701: test add file # To: Joe # ... # # ------------------------------------------------------------ # revno: 2701 # committer: Joe # branch nick: tmpbb # timestamp: Fri 2009-05-15 10:35:43 +0200 # message: # test add file # added: # test-add-file # # # -- # # https://code.launchpad.net/~knielsen/maria/tmp-buildbot-test # # You are subscribed to branch lp:~knielsen/maria/tmp-buildbot-test. # To unsubscribe from this branch go to # https://code.launchpad.net/~knielsen/maria/tmp-buildbot-test/+edit-subscription. # # [end of mail] class BzrLaunchpadEmailMaildirSource(MaildirSource): name = "Launchpad" compare_attrs = ("branchMap", "defaultBranch") def __init__(self, maildir, prefix=None, branchMap=None, defaultBranch=None, **kwargs): self.branchMap = branchMap self.defaultBranch = defaultBranch super().__init__(maildir, prefix, **kwargs) def parse(self, m, prefix=None): """Parse branch notification messages sent by Launchpad. """ subject = m["subject"] match = re.search(r"^\s*\[Branch\s+([^]]+)\]", subject) if match: repository = match.group(1) else: repository = None # Put these into a dictionary, otherwise we cannot assign them # from nested function definitions. d = {'files': [], 'comments': ""} gobbler = None rev = None author = None when = util.now() def gobble_comment(s): d['comments'] += s + "\n" def gobble_removed(s): d['files'].append('{} REMOVED'.format(s)) def gobble_added(s): d['files'].append('{} ADDED'.format(s)) def gobble_modified(s): d['files'].append('{} MODIFIED'.format(s)) def gobble_renamed(s): match = re.search(r"^(.+) => (.+)$", s) if match: d['files'].append('{} RENAMED {}'.format(match.group(1), match.group(2))) else: d['files'].append('{} RENAMED'.format(s)) lines = list(body_line_iterator(m, True)) rev = None while lines: line = str(lines.pop(0), "utf-8", errors="ignore") # revno: 101 match = re.search(r"^revno: ([0-9.]+)", line) if match: rev = match.group(1) # committer: Joe match = re.search(r"^committer: (.*)$", line) if match: author = match.group(1) # timestamp: Fri 2009-05-15 10:35:43 +0200 # datetime.strptime() is supposed to support %z for time zone, but # it does not seem to work. So handle the time zone manually. match = re.search( r"^timestamp: [a-zA-Z]{3} (\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) ([-+])(\d{2})(\d{2})$", line) # noqa pylint: disable=line-too-long if match: datestr = match.group(1) tz_sign = match.group(2) tz_hours = match.group(3) tz_minutes = match.group(4) when = parseLaunchpadDate( datestr, tz_sign, tz_hours, tz_minutes) if re.search(r"^message:\s*$", line): gobbler = gobble_comment elif re.search(r"^removed:\s*$", line): gobbler = gobble_removed elif re.search(r"^added:\s*$", line): gobbler = gobble_added elif re.search(r"^renamed:\s*$", line): gobbler = gobble_renamed elif re.search(r"^modified:\s*$", line): gobbler = gobble_modified elif re.search(r"^ ", line) and gobbler: gobbler(line[2:-1]) # Use :-1 to gobble trailing newline # Determine the name of the branch. branch = None if self.branchMap and repository: if repository in self.branchMap: branch = self.branchMap[repository] elif ("lp:" + repository) in self.branchMap: branch = self.branchMap['lp:' + repository] if not branch: if self.defaultBranch: branch = self.defaultBranch else: if repository: branch = 'lp:' + repository else: branch = None if rev and author: return ('bzr', dict(author=author, committer=None, files=d['files'], comments=d['comments'], when=when, revision=rev, branch=branch, repository=repository or '')) return None def parseLaunchpadDate(datestr, tz_sign, tz_hours, tz_minutes): time_no_tz = calendar.timegm(time.strptime(datestr, "%Y-%m-%d %H:%M:%S")) tz_delta = 60 * 60 * int(tz_sign + tz_hours) + 60 * int(tz_minutes) return time_no_tz - tz_delta buildbot-3.4.0/master/buildbot/changes/manager.py000066400000000000000000000016611413250514000220140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.measured_service import MeasuredBuildbotServiceManager class ChangeManager(MeasuredBuildbotServiceManager): name = "ChangeManager" managed_services_name = "changesources" config_attr = "change_sources" buildbot-3.4.0/master/buildbot/changes/p4poller.py000066400000000000000000000342201413250514000221400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2011 National Instruments # Many thanks to Dave Peticolas for contributing this module import datetime import os import re import dateutil.tz from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.python import log from buildbot import config from buildbot import util from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import runprocess debug_logging = False class P4PollerError(Exception): """Something went wrong with the poll. This is used as a distinctive exception type so that unit tests can detect and ignore it.""" class TicketLoginProtocol(protocol.ProcessProtocol): """ Twisted process protocol to run `p4 login` and enter our password in the stdin.""" def __init__(self, stdin, p4base): self.deferred = defer.Deferred() self.stdin = stdin.encode('ascii') self.stdout = b'' self.stderr = b'' self.p4base = p4base def connectionMade(self): if self.stdin: if debug_logging: log.msg("P4Poller: entering password for {}: {}".format(self.p4base, self.stdin)) self.transport.write(self.stdin) self.transport.closeStdin() def processEnded(self, reason): if debug_logging: log.msg("P4Poller: login process finished for {}: {}".format(self.p4base, reason.value.exitCode)) self.deferred.callback(reason.value.exitCode) def outReceived(self, data): if debug_logging: log.msg("P4Poller: login stdout for {}: {}".format(self.p4base, data)) self.stdout += data def errReceived(self, data): if debug_logging: log.msg("P4Poller: login stderr for {}: {}".format(self.p4base, data)) self.stderr += data def get_simple_split(branchfile): """Splits the branchfile argument and assuming branch is the first path component in branchfile, will return branch and file else None.""" index = branchfile.find('/') if index == -1: return None, None branch, file = branchfile.split('/', 1) return branch, file class P4Source(base.ReconfigurablePollingChangeSource, util.ComparableMixin): """This source will poll a perforce repository for changes and submit them to the change master.""" compare_attrs = ("p4port", "p4user", "p4passwd", "p4base", "p4bin", "pollInterval", "pollAtLaunch", "server_tz", "pollRandomDelayMin", "pollRandomDelayMax") env_vars = ["P4CLIENT", "P4PORT", "P4PASSWD", "P4USER", "P4CHARSET", "P4CONFIG", "P4TICKETS", "PATH", "HOME"] changes_line_re = re.compile( r"Change (?P\d+) on \S+ by \S+@\S+ '.*'$") describe_header_re = re.compile( r"Change \d+ by (?P\S+)@\S+ on (?P.+)$") file_re = re.compile(r"^\.\.\. (?P[^#]+)#\d+ [/\w]+$") datefmt = '%Y/%m/%d %H:%M:%S' parent = None # filled in when we're added last_change = None loop = None def __init__(self, **kwargs): name = kwargs.get("name", None) if name is None: kwargs['name'] = self.build_name(name, kwargs.get('p4port', None), kwargs.get('p4base', '//')) super().__init__(**kwargs) def checkConfig(self, p4port=None, p4user=None, p4passwd=None, p4base="//", p4bin="p4", split_file=lambda branchfile: (None, branchfile), pollInterval=60 * 10, histmax=None, pollinterval=-2, encoding="utf8", project=None, name=None, use_tickets=False, ticket_login_interval=60 * 60 * 24, server_tz=None, pollAtLaunch=False, revlink=lambda branch, revision: (""), resolvewho=lambda who: (who), pollRandomDelayMin=0, pollRandomDelayMax=0): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval name = self.build_name(name, p4port, p4base) if use_tickets and not p4passwd: config.error("You need to provide a P4 password to use ticket authentication") if not callable(revlink): config.error("You need to provide a valid callable for revlink") if not callable(resolvewho): config.error("You need to provide a valid callable for resolvewho") if server_tz is not None and dateutil.tz.gettz(server_tz) is None: raise P4PollerError(("Failed to get timezone from server_tz string '{}'" ).format(server_tz)) super().checkConfig(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, pollRandomDelayMin=pollRandomDelayMin, pollRandomDelayMax=pollRandomDelayMax) @defer.inlineCallbacks def reconfigService(self, p4port=None, p4user=None, p4passwd=None, p4base="//", p4bin="p4", split_file=lambda branchfile: (None, branchfile), pollInterval=60 * 10, histmax=None, pollinterval=-2, encoding="utf8", project=None, name=None, use_tickets=False, ticket_login_interval=60 * 60 * 24, server_tz=None, pollAtLaunch=False, revlink=lambda branch, revision: (""), resolvewho=lambda who: (who), pollRandomDelayMin=0, pollRandomDelayMax=0): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval name = self.build_name(name, p4port, p4base) if project is None: project = '' self.p4port = p4port self.p4user = p4user self.p4passwd = p4passwd self.p4base = p4base self.p4bin = p4bin self.split_file = split_file self.encoding = encoding self.project = util.bytes2unicode(project) self.use_tickets = use_tickets self.ticket_login_interval = ticket_login_interval self.revlink_callable = revlink self.resolvewho_callable = resolvewho self.server_tz = dateutil.tz.gettz(server_tz) if server_tz else None self._ticket_login_counter = 0 yield super().reconfigService(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, pollRandomDelayMin=pollRandomDelayMin, pollRandomDelayMax=pollRandomDelayMax) def build_name(self, name, p4port, p4base): if name is not None: return name return "P4Source:{}:{}".format(p4port, p4base) def describe(self): return "p4source {} {}".format(self.p4port, self.p4base) def poll(self): d = self._poll() d.addErrback(log.err, 'P4 poll failed on {}, {}'.format(self.p4port, self.p4base)) return d @defer.inlineCallbacks def _get_process_output(self, args): env = {e: os.environ.get(e) for e in self.env_vars if os.environ.get(e)} res, out = yield runprocess.run_process(self.master.reactor, [self.p4bin] + args, env=env, collect_stderr=False, stderr_is_error=True) if res != 0: raise P4PollerError('Failed to run {}'.format(self.p4bin)) return out def _acquireTicket(self, protocol): command = [self.p4bin, ] if self.p4port: command.extend(['-p', self.p4port]) if self.p4user: command.extend(['-u', self.p4user]) command.append('login') command = [c.encode('utf-8') for c in command] reactor.spawnProcess(protocol, self.p4bin, command, env=os.environ) @defer.inlineCallbacks def _poll(self): if self.use_tickets: self._ticket_login_counter -= 1 if self._ticket_login_counter <= 0: # Re-acquire the ticket and reset the counter. log.msg("P4Poller: (re)acquiring P4 ticket for {}...".format(self.p4base)) protocol = TicketLoginProtocol( self.p4passwd + "\n", self.p4base) self._acquireTicket(protocol) yield protocol.deferred args = [] if self.p4port: args.extend(['-p', self.p4port]) if not self.use_tickets: if self.p4user: args.extend(['-u', self.p4user]) if self.p4passwd: args.extend(['-P', self.p4passwd]) args.extend(['changes']) if self.last_change is not None: args.extend(['{}...@{},#head'.format(self.p4base, self.last_change + 1)]) else: args.extend(['-m', '1', '{}...'.format(self.p4base,)]) result = yield self._get_process_output(args) # decode the result from its designated encoding try: result = bytes2unicode(result, self.encoding) except UnicodeError as ex: log.msg("{}: cannot fully decode {} in {}".format( ex, repr(result), self.encoding)) result = bytes2unicode(result, encoding=self.encoding, errors="replace") last_change = self.last_change changelists = [] for line in result.split('\n'): line = line.strip() if not line: continue m = self.changes_line_re.match(line) if not m: raise P4PollerError( "Unexpected 'p4 changes' output: %r" % result) num = int(m.group('num')) if last_change is None: # first time through, the poller just gets a "baseline" for where to # start on the next poll log.msg('P4Poller: starting at change %d' % num) self.last_change = num return changelists.append(num) changelists.reverse() # oldest first # Retrieve each sequentially. for num in changelists: args = [] if self.p4port: args.extend(['-p', self.p4port]) if not self.use_tickets: if self.p4user: args.extend(['-u', self.p4user]) if self.p4passwd: args.extend(['-P', self.p4passwd]) args.extend(['describe', '-s', str(num)]) result = yield self._get_process_output(args) # decode the result from its designated encoding try: result = bytes2unicode(result, self.encoding) except UnicodeError as ex: log.msg("P4Poller: couldn't decode changelist description: {}".format(ex.encoding)) log.msg("P4Poller: in object: {}".format(ex.object)) log.err("P4Poller: poll failed on {}, {}".format(self.p4port, self.p4base)) raise lines = result.split('\n') # SF#1555985: Wade Brainerd reports a stray ^M at the end of the date # field. The rstrip() is intended to remove that. lines[0] = lines[0].rstrip() m = self.describe_header_re.match(lines[0]) if not m: raise P4PollerError( "Unexpected 'p4 describe -s' result: %r" % result) who = self.resolvewho_callable(m.group('who')) when = datetime.datetime.strptime(m.group('when'), self.datefmt) if self.server_tz: # Convert from the server's timezone to the local timezone. when = when.replace(tzinfo=self.server_tz) when = util.datetime2epoch(when) comment_lines = [] lines.pop(0) # describe header lines.pop(0) # blank line while not lines[0].startswith('Affected files'): if lines[0].startswith('\t'): # comment is indented by one tab comment_lines.append(lines.pop(0)[1:]) else: lines.pop(0) # discard non comment line comments = '\n'.join(comment_lines) lines.pop(0) # affected files branch_files = {} # dict for branch mapped to file(s) while lines: line = lines.pop(0).strip() if not line: continue m = self.file_re.match(line) if not m: raise P4PollerError("Invalid file line: %r" % line) path = m.group('path') if path.startswith(self.p4base): branch, file = self.split_file(path[len(self.p4base):]) if (branch is None and file is None): continue if branch in branch_files: branch_files[branch].append(file) else: branch_files[branch] = [file] for branch in branch_files: yield self.master.data.updates.addChange( author=who, committer=None, files=branch_files[branch], comments=comments, revision=str(num), when_timestamp=when, branch=branch, project=self.project, revlink=self.revlink_callable(branch, str(num))) self.last_change = num buildbot-3.4.0/master/buildbot/changes/pb.py000066400000000000000000000143311413250514000210010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.changes import base from buildbot.pbutil import NewCredPerspective class ChangePerspective(NewCredPerspective): def __init__(self, master, prefix): self.master = master self.prefix = prefix def attached(self, mind): return self def detached(self, mind): pass def perspective_addChange(self, changedict): log.msg("perspective_addChange called") if 'revlink' in changedict and not changedict['revlink']: changedict['revlink'] = '' if 'repository' in changedict and not changedict['repository']: changedict['repository'] = '' if 'project' in changedict and not changedict['project']: changedict['project'] = '' if 'files' not in changedict or not changedict['files']: changedict['files'] = [] if 'committer' in changedict and not changedict['committer']: changedict['committer'] = None # rename arguments to new names. Note that the client still uses the # "old" names (who, when, and isdir), as they are not deprecated yet, # although the master will accept the new names (author, # when_timestamp). After a few revisions have passed, we # can switch the client to use the new names. if 'who' in changedict: changedict['author'] = changedict['who'] del changedict['who'] if 'when' in changedict: changedict['when_timestamp'] = changedict['when'] del changedict['when'] # turn any bytestring keys into unicode, assuming utf8 but just # replacing unknown characters. Ideally client would send us unicode # in the first place, but older clients do not, so this fallback is # useful. for key in changedict: if isinstance(changedict[key], bytes): changedict[key] = changedict[key].decode('utf8', 'replace') changedict['files'] = list(changedict['files']) for i, file in enumerate(changedict.get('files', [])): if isinstance(file, bytes): changedict['files'][i] = file.decode('utf8', 'replace') files = [] for path in changedict['files']: if self.prefix: if not path.startswith(self.prefix): # this file does not start with the prefix, so ignore it continue path = path[len(self.prefix):] files.append(path) changedict['files'] = files if not files: log.msg("No files listed in change... bit strange, but not fatal.") if "links" in changedict: log.msg("Found links: " + repr(changedict['links'])) del changedict['links'] d = self.master.data.updates.addChange(**changedict) # set the return value to None, so we don't get users depending on # getting a changeid d.addCallback(lambda _: None) return d class PBChangeSource(base.ChangeSource): compare_attrs = ("user", "passwd", "port", "prefix", "port") def __init__(self, user="change", passwd="changepw", port=None, prefix=None, name=None): if name is None: if prefix: name = "PBChangeSource:{}:{}".format(prefix, port) else: name = "PBChangeSource:{}".format(port) super().__init__(name=name) self.user = user self.passwd = passwd self.port = port self.prefix = prefix self.registration = None self.registered_port = None def describe(self): portname = self.registered_port d = "PBChangeSource listener on " + str(portname) if self.prefix is not None: d += " (prefix '{}')".format(self.prefix) return d def _calculatePort(self, cfg): # calculate the new port, defaulting to the worker's PB port if # none was specified port = self.port if port is None: port = cfg.protocols.get('pb', {}).get('port') return port @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): port = self._calculatePort(new_config) if not port: config.error("No port specified for PBChangeSource, and no " "worker port configured") # and, if it's changed, re-register if port != self.registered_port and self.isActive(): yield self._unregister() yield self._register(port) yield super().reconfigServiceWithBuildbotConfig(new_config) @defer.inlineCallbacks def activate(self): port = self._calculatePort(self.master.config) yield self._register(port) def deactivate(self): return self._unregister() @defer.inlineCallbacks def _register(self, port): if not port: return self.registered_port = port self.registration = yield self.master.pbmanager.register(port, self.user, self.passwd, self.getPerspective) def _unregister(self): self.registered_port = None if self.registration: reg = self.registration self.registration = None return reg.unregister() return defer.succeed(None) def getPerspective(self, mind, username): assert username == self.user return ChangePerspective(self.master, self.prefix) buildbot-3.4.0/master/buildbot/changes/svnpoller.py000066400000000000000000000454661413250514000224410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Based on the work of Dave Peticolas for the P4poll # Changed to svn (using xml.dom.minidom) by Niklaus Giger # Hacked beyond recognition by Brian Warner import os import xml.dom.minidom from urllib.parse import quote_plus as urlquote_plus from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import runprocess # these split_file_* functions are available for use as values to the # split_file= argument. def split_file_alwaystrunk(path): return dict(path=path) def split_file_branches(path): # turn "trunk/subdir/file.c" into (None, "subdir/file.c") # and "trunk/subdir/" into (None, "subdir/") # and "trunk/" into (None, "") # and "branches/1.5.x/subdir/file.c" into ("branches/1.5.x", "subdir/file.c") # and "branches/1.5.x/subdir/" into ("branches/1.5.x", "subdir/") # and "branches/1.5.x/" into ("branches/1.5.x", "") pieces = path.split('/') if len(pieces) > 1 and pieces[0] == 'trunk': return (None, '/'.join(pieces[1:])) elif len(pieces) > 2 and pieces[0] == 'branches': return ('/'.join(pieces[0:2]), '/'.join(pieces[2:])) return None def split_file_projects_branches(path): # turn projectname/trunk/subdir/file.c into dict(project=projectname, # branch=trunk, path=subdir/file.c) if "/" not in path: return None project, path = path.split("/", 1) f = split_file_branches(path) if f: info = dict(project=project, path=f[1]) if f[0]: info['branch'] = f[0] return info return f class SVNPoller(base.ReconfigurablePollingChangeSource, util.ComparableMixin): """ Poll a Subversion repository for changes and submit them to the change master. """ compare_attrs = ("repourl", "split_file", "svnuser", "svnpasswd", "project", "pollInterval", "histmax", "svnbin", "category", "cachepath", "pollAtLaunch", "pollRandomDelayMin", "pollRandomDelayMax") secrets = ("svnuser", "svnpasswd") parent = None # filled in when we're added last_change = None loop = None def __init__(self, repourl, **kwargs): name = kwargs.get('name', None) if name is None: kwargs['name'] = repourl super().__init__(repourl, **kwargs) def checkConfig(self, repourl, split_file=None, svnuser=None, svnpasswd=None, pollInterval=10 * 60, histmax=100, svnbin="svn", revlinktmpl="", category=None, project="", cachepath=None, pollinterval=-2, extra_args=None, name=None, pollAtLaunch=False, pollRandomDelayMin=0, pollRandomDelayMax=0): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval if name is None: name = repourl super().checkConfig(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, pollRandomDelayMin=pollRandomDelayMin, pollRandomDelayMax=pollRandomDelayMax) @defer.inlineCallbacks def reconfigService(self, repourl, split_file=None, svnuser=None, svnpasswd=None, pollInterval=10 * 60, histmax=100, svnbin="svn", revlinktmpl="", category=None, project="", cachepath=None, pollinterval=-2, extra_args=None, name=None, pollAtLaunch=False, pollRandomDelayMin=0, pollRandomDelayMax=0): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval if name is None: name = repourl if repourl.endswith("/"): repourl = repourl[:-1] # strip the trailing slash self.repourl = repourl self.extra_args = extra_args self.split_file = split_file or split_file_alwaystrunk self.svnuser = svnuser self.svnpasswd = svnpasswd self.revlinktmpl = revlinktmpl # include environment variables required for ssh-agent auth self.environ = os.environ.copy() self.svnbin = svnbin self.histmax = histmax self._prefix = None self.category = category if callable( category) else util.bytes2unicode(category) self.project = util.bytes2unicode(project) self.cachepath = cachepath if self.cachepath and os.path.exists(self.cachepath): try: with open(self.cachepath, "r") as f: self.last_change = int(f.read().strip()) log.msg(("SVNPoller: SVNPoller({}) setting last_change to {}" ).format(self.repourl, self.last_change)) # try writing it, too with open(self.cachepath, "w") as f: f.write(str(self.last_change)) except Exception: self.cachepath = None log.msg(("SVNPoller: SVNPoller({}) cache file corrupt or unwriteable; " + "skipping and not using").format(self.repourl)) log.err() yield super().reconfigService(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, pollRandomDelayMin=pollRandomDelayMin, pollRandomDelayMax=pollRandomDelayMax) def describe(self): return "SVNPoller: watching {}".format(self.repourl) def poll(self): # Our return value is only used for unit testing. # we need to figure out the repository root, so we can figure out # repository-relative pathnames later. Each REPOURL is in the form # (ROOT)/(PROJECT)/(BRANCH)/(FILEPATH), where (ROOT) is something # like svn://svn.twistedmatrix.com/svn/Twisted (i.e. there is a # physical repository at /svn/Twisted on that host), (PROJECT) is # something like Projects/Twisted (i.e. within the repository's # internal namespace, everything under Projects/Twisted/ has # something to do with Twisted, but these directory names do not # actually appear on the repository host), (BRANCH) is something like # "trunk" or "branches/2.0.x", and (FILEPATH) is a tree-relative # filename like "twisted/internet/defer.py". # our self.repourl attribute contains (ROOT)/(PROJECT) combined # together in a way that we can't separate without svn's help. If the # user is not using the split_file= argument, then self.repourl might # be (ROOT)/(PROJECT)/(BRANCH) . In any case, the filenames we will # get back from 'svn log' will be of the form # (PROJECT)/(BRANCH)/(FILEPATH), but we want to be able to remove # that (PROJECT) prefix from them. To do this without requiring the # user to tell us how repourl is split into ROOT and PROJECT, we do an # 'svn info --xml' command at startup. This command will include a # element that tells us ROOT. We then strip this prefix from # self.repourl to determine PROJECT, and then later we strip the # PROJECT prefix from the filenames reported by 'svn log --xml' to # get a (BRANCH)/(FILEPATH) that can be passed to split_file() to # turn into separate BRANCH and FILEPATH values. # whew. if self.project: log.msg("SVNPoller: polling " + self.project) else: log.msg("SVNPoller: polling") d = defer.succeed(None) if not self._prefix: d.addCallback(lambda _: self.get_prefix()) @d.addCallback def set_prefix(prefix): self._prefix = prefix d.addCallback(self.get_logs) d.addCallback(self.parse_logs) d.addCallback(self.get_new_logentries) d.addCallback(self.create_changes) d.addCallback(self.submit_changes) d.addCallback(self.finished_ok) # eat errors d.addErrback(log.err, 'SVNPoller: Error in while polling') return d @defer.inlineCallbacks def get_prefix(self): command = [self.svnbin, "info", "--xml", "--non-interactive", self.repourl] if self.svnuser: command.append("--username={}".format(self.svnuser)) if self.svnpasswd is not None: command.append("--password={}".format(self.svnpasswd)) if self.extra_args: command.extend(self.extra_args) rc, output = yield runprocess.run_process(self.master.reactor, command, env=self.environ, collect_stderr=False, stderr_is_error=True) if rc != 0: raise EnvironmentError('{}: Got error when retrieving svn prefix'.format(self)) try: doc = xml.dom.minidom.parseString(output) except xml.parsers.expat.ExpatError: log.msg("SVNPoller: SVNPoller.get_prefix: ExpatError in '{}'".format(output)) raise rootnodes = doc.getElementsByTagName("root") if not rootnodes: # this happens if the URL we gave was already the root. In this # case, our prefix is empty. self._prefix = "" return self._prefix rootnode = rootnodes[0] root = "".join([c.data for c in rootnode.childNodes]) # root will be a unicode string if not self.repourl.startswith(root): log.msg(format="Got root %(root)r from `svn info`, but it is " "not a prefix of the configured repourl", repourl=self.repourl, root=root) raise RuntimeError("Configured repourl doesn't match svn root") prefix = self.repourl[len(root):] if prefix.startswith("/"): prefix = prefix[1:] log.msg("SVNPoller: repourl={}, root={}, so prefix={}".format(self.repourl, root, prefix)) return prefix @defer.inlineCallbacks def get_logs(self, _): command = [self.svnbin, "log", "--xml", "--verbose", "--non-interactive"] if self.svnuser: command.extend(["--username={}".format(self.svnuser)]) if self.svnpasswd is not None: command.extend(["--password={}".format(self.svnpasswd)]) if self.extra_args: command.extend(self.extra_args) command.extend(["--limit=%d" % (self.histmax), self.repourl]) rc, output = yield runprocess.run_process(self.master.reactor, command, env=self.environ, collect_stderr=False, stderr_is_error=True) if rc != 0: raise EnvironmentError('{}: Got error when retrieving svn logs'.format(self)) return output def parse_logs(self, output): # parse the XML output, return a list of nodes try: doc = xml.dom.minidom.parseString(output) except xml.parsers.expat.ExpatError: log.msg("SVNPoller: SVNPoller.parse_logs: ExpatError in '{}'".format(output)) raise logentries = doc.getElementsByTagName("logentry") return logentries def get_new_logentries(self, logentries): last_change = old_last_change = self.last_change # given a list of logentries, calculate new_last_change, and # new_logentries, where new_logentries contains only the ones after # last_change new_last_change = None new_logentries = [] if logentries: new_last_change = int(logentries[0].getAttribute("revision")) if last_change is None: # if this is the first time we've been run, ignore any changes # that occurred before now. This prevents a build at every # startup. log.msg('SVNPoller: starting at change {}'.format(new_last_change)) elif last_change == new_last_change: # an unmodified repository will hit this case log.msg('SVNPoller: no changes') else: for el in logentries: if last_change == int(el.getAttribute("revision")): break new_logentries.append(el) new_logentries.reverse() # return oldest first self.last_change = new_last_change log.msg('SVNPoller: _process_changes {} .. {}'.format(old_last_change, new_last_change)) return new_logentries def _get_text(self, element, tag_name): try: child_nodes = element.getElementsByTagName(tag_name)[0].childNodes text = "".join([t.data for t in child_nodes]) except IndexError: text = "unknown" return text def _transform_path(self, path): if not path.startswith(self._prefix): log.msg(format="SVNPoller: ignoring path '%(path)s' which doesn't" "start with prefix '%(prefix)s'", path=path, prefix=self._prefix) return None relative_path = path[len(self._prefix):] if relative_path.startswith("/"): relative_path = relative_path[1:] where = self.split_file(relative_path) # 'where' is either None, (branch, final_path) or a dict if not where: return None if isinstance(where, tuple): where = dict(branch=where[0], path=where[1]) return where def create_changes(self, new_logentries): changes = [] for el in new_logentries: revision = str(el.getAttribute("revision")) revlink = '' if self.revlinktmpl and revision: revlink = self.revlinktmpl % urlquote_plus(revision) revlink = str(revlink) log.msg("Adding change revision {}".format(revision)) author = self._get_text(el, "author") comments = self._get_text(el, "msg") # there is a "date" field, but it provides localtime in the # repository's timezone, whereas we care about buildmaster's # localtime (since this will get used to position the boxes on # the Waterfall display, etc). So ignore the date field, and # addChange will fill in with the current time branches = {} try: pathlist = el.getElementsByTagName("paths")[0] except IndexError: # weird, we got an empty revision log.msg("ignoring commit with no paths") continue for p in pathlist.getElementsByTagName("path"): kind = p.getAttribute("kind") action = p.getAttribute("action") path = "".join([t.data for t in p.childNodes]) if path.startswith("/"): path = path[1:] if kind == "dir" and not path.endswith("/"): path += "/" where = self._transform_path(path) # if 'where' is None, the file was outside any project that # we care about and we should ignore it if where: branch = where.get("branch", None) filename = where["path"] if branch not in branches: branches[branch] = { 'files': [], 'number_of_directories': 0} if filename == "": # root directory of branch branches[branch]['files'].append(filename) branches[branch]['number_of_directories'] += 1 elif filename.endswith("/"): # subdirectory of branch branches[branch]['files'].append(filename[:-1]) branches[branch]['number_of_directories'] += 1 else: branches[branch]['files'].append(filename) if "action" not in branches[branch]: branches[branch]['action'] = action for key in ("repository", "project", "codebase"): if key in where: branches[branch][key] = where[key] for branch in branches: action = branches[branch]['action'] files = branches[branch]['files'] number_of_directories_changed = branches[ branch]['number_of_directories'] number_of_files_changed = len(files) if (action == 'D' and number_of_directories_changed == 1 and number_of_files_changed == 1 and files[0] == ''): log.msg("Ignoring deletion of branch '{}'".format(branch)) else: chdict = dict( author=author, committer=None, # weakly assume filenames are utf-8 files=[bytes2unicode(f, 'utf-8', 'replace') for f in files], comments=comments, revision=revision, branch=util.bytes2unicode(branch), revlink=revlink, category=self.category, repository=util.bytes2unicode( branches[branch].get('repository', self.repourl)), project=util.bytes2unicode( branches[branch].get('project', self.project)), codebase=util.bytes2unicode( branches[branch].get('codebase', None))) changes.append(chdict) return changes @defer.inlineCallbacks def submit_changes(self, changes): for chdict in changes: yield self.master.data.updates.addChange(src='svn', **chdict) def finished_ok(self, res): if self.cachepath: with open(self.cachepath, "w") as f: f.write(str(self.last_change)) log.msg("SVNPoller: finished polling {}".format(res)) return res buildbot-3.4.0/master/buildbot/clients/000077500000000000000000000000001413250514000200555ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/clients/__init__.py000066400000000000000000000000001413250514000221540ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/clients/sendchange.py000066400000000000000000000051301413250514000225250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.cred import credentials from twisted.internet import reactor from twisted.spread import pb from buildbot.util import unicode2bytes class Sender: def __init__(self, master, auth=('change', 'changepw'), encoding='utf8'): self.username = unicode2bytes(auth[0]) self.password = unicode2bytes(auth[1]) self.host, self.port = master.split(":") self.port = int(self.port) self.encoding = encoding def send(self, branch, revision, comments, files, who=None, category=None, when=None, properties=None, repository='', vc=None, project='', revlink='', codebase=None): if properties is None: properties = {} change = {'project': project, 'repository': repository, 'who': who, 'files': files, 'comments': comments, 'branch': branch, 'revision': revision, 'category': category, 'when': when, 'properties': properties, 'revlink': revlink, 'src': vc} # codebase is only sent if set; this won't work with masters older than # 0.8.7 if codebase: change['codebase'] = codebase for key in change: if isinstance(change[key], bytes): change[key] = change[key].decode(self.encoding, 'replace') change['files'] = list(change['files']) for i, file in enumerate(change.get('files', [])): if isinstance(file, bytes): change['files'][i] = file.decode(self.encoding, 'replace') f = pb.PBClientFactory() d = f.login(credentials.UsernamePassword(self.username, self.password)) reactor.connectTCP(self.host, self.port, f) @d.addCallback def call_addChange(remote): d = remote.callRemote('addChange', change) d.addCallback(lambda res: remote.broker.transport.loseConnection()) return d return d buildbot-3.4.0/master/buildbot/clients/tryclient.py000066400000000000000000000760331413250514000224550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import json import os import random import re import shlex import string import sys import time from twisted.cred import credentials from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.internet import task from twisted.internet import utils from twisted.python import log from twisted.python import runtime from twisted.python.procutils import which from twisted.spread import pb from buildbot.process.results import SUCCESS from buildbot.process.results import Results from buildbot.util import bytes2unicode from buildbot.util import now from buildbot.util import unicode2bytes from buildbot.util.eventual import fireEventually class SourceStamp: def __init__(self, branch, revision, patch, repository=''): self.branch = branch self.revision = revision self.patch = patch self.repository = repository def output(*msg): print(' '.join([str(m)for m in msg])) class SourceStampExtractor: def __init__(self, treetop, branch, repository): self.treetop = treetop self.repository = repository self.branch = branch exes = which(self.vcexe) if not exes: output("Could not find executable '{}'.".format(self.vcexe)) sys.exit(1) self.exe = exes[0] def dovc(self, cmd): """This accepts the arguments of a command, without the actual command itself.""" env = os.environ.copy() env['LC_ALL'] = "C" d = utils.getProcessOutputAndValue(self.exe, cmd, env=env, path=self.treetop) d.addCallback(self._didvc, cmd) return d def _didvc(self, res, cmd): (stdout, stderr, code) = res # 'bzr diff' sets rc=1 if there were any differences. # cvs does something similar, so don't bother requiring rc=0. return stdout def get(self): """Return a Deferred that fires with a SourceStamp instance.""" d = self.getBaseRevision() d.addCallback(self.getPatch) d.addCallback(self.done) return d def readPatch(self, diff, patchlevel): if not diff: diff = None self.patch = (patchlevel, diff) def done(self, res): if not self.repository: self.repository = self.treetop # TODO: figure out the branch and project too ss = SourceStamp(bytes2unicode(self.branch), self.baserev, self.patch, repository=self.repository) return ss class CVSExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "cvs" def getBaseRevision(self): # this depends upon our local clock and the repository's clock being # reasonably synchronized with each other. We express everything in # UTC because the '%z' format specifier for strftime doesn't always # work. self.baserev = time.strftime("%Y-%m-%d %H:%M:%S +0000", time.gmtime(now())) return defer.succeed(None) def getPatch(self, res): # the -q tells CVS to not announce each directory as it works if self.branch is not None: # 'cvs diff' won't take both -r and -D at the same time (it # ignores the -r). As best I can tell, there is no way to make # cvs give you a diff relative to a timestamp on the non-trunk # branch. A bare 'cvs diff' will tell you about the changes # relative to your checked-out versions, but I know of no way to # find out what those checked-out versions are. output("Sorry, CVS 'try' builds don't work with branches") sys.exit(1) args = ['-q', 'diff', '-u', '-D', self.baserev] d = self.dovc(args) d.addCallback(self.readPatch, self.patchlevel) return d class SVNExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "svn" def getBaseRevision(self): d = self.dovc(["status", "-u"]) d.addCallback(self.parseStatus) return d def parseStatus(self, res): # svn shows the base revision for each file that has been modified or # which needs an update. You can update each file to a different # version, so each file is displayed with its individual base # revision. It also shows the repository-wide latest revision number # on the last line ("Status against revision: \d+"). # for our purposes, we use the latest revision number as the "base" # revision, and get a diff against that. This means we will get # reverse-diffs for local files that need updating, but the resulting # tree will still be correct. The only weirdness is that the baserev # that we emit may be different than the version of the tree that we # first checked out. # to do this differently would probably involve scanning the revision # numbers to find the max (or perhaps the min) revision, and then # using that as a base. for line in res.split(b"\n"): m = re.search(br'^Status against revision:\s+(\d+)', line) if m: self.baserev = m.group(1) return output( b"Could not find 'Status against revision' in SVN output: " + res) sys.exit(1) def getPatch(self, res): d = self.dovc(["diff", "-r{}".format(self.baserev)]) d.addCallback(self.readPatch, self.patchlevel) return d class BzrExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "bzr" def getBaseRevision(self): d = self.dovc(["revision-info", "-rsubmit:"]) d.addCallback(self.get_revision_number) return d def get_revision_number(self, out): revno, revid = out.split() self.baserev = 'revid:' + revid return def getPatch(self, res): d = self.dovc(["diff", "-r{}..".format(self.baserev)]) d.addCallback(self.readPatch, self.patchlevel) return d class MercurialExtractor(SourceStampExtractor): patchlevel = 1 vcexe = "hg" def _didvc(self, res, cmd): (stdout, stderr, code) = res if code: cs = ' '.join(['hg'] + cmd) if stderr: stderr = '\n' + stderr.rstrip() raise RuntimeError("{} returned {} {}".format(cs, code, stderr)) return stdout @defer.inlineCallbacks def getBaseRevision(self): upstream = "" if self.repository: upstream = "r'{}'".format(self.repository) output = '' try: output = yield self.dovc(["log", "--template", "{node}\\n", "-r", "max(::. - outgoing({}))".format(upstream)]) except RuntimeError: # outgoing() will abort if no default-push/default path is # configured if upstream: raise # fall back to current working directory parent output = yield self.dovc(["log", "--template", "{node}\\n", "-r", "p1()"]) m = re.search(br'^(\w+)', output) if not m: raise RuntimeError( "Revision {!r} is not in the right format".format(output)) self.baserev = m.group(0) def getPatch(self, res): d = self.dovc(["diff", "-r", self.baserev]) d.addCallback(self.readPatch, self.patchlevel) return d class PerforceExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "p4" def getBaseRevision(self): d = self.dovc(["changes", "-m1", "..."]) d.addCallback(self.parseStatus) return d def parseStatus(self, res): # # extract the base change number # m = re.search(br'Change (\d+)', res) if m: self.baserev = m.group(1) return output(b"Could not find change number in output: " + res) sys.exit(1) def readPatch(self, res, patchlevel): # # extract the actual patch from "res" # if not self.branch: output("you must specify a branch") sys.exit(1) mpatch = "" found = False for line in res.split("\n"): m = re.search('==== //depot/' + self.branch + r'/([\w/\.\d\-_]+)#(\d+) -', line) if m: mpatch += "--- {}#{}\n".format(m.group(1), m.group(2)) mpatch += "+++ {}\n".format(m.group(1)) found = True else: mpatch += line mpatch += "\n" if not found: output(b"could not parse patch file") sys.exit(1) self.patch = (patchlevel, unicode2bytes(mpatch)) def getPatch(self, res): d = self.dovc(["diff"]) d.addCallback(self.readPatch, self.patchlevel) return d class DarcsExtractor(SourceStampExtractor): patchlevel = 1 vcexe = "darcs" def getBaseRevision(self): d = self.dovc(["changes", "--context"]) d.addCallback(self.parseStatus) return d def parseStatus(self, res): self.baserev = res # the whole context file def getPatch(self, res): d = self.dovc(["diff", "-u"]) d.addCallback(self.readPatch, self.patchlevel) return d class GitExtractor(SourceStampExtractor): patchlevel = 1 vcexe = "git" config = None def getBaseRevision(self): # If a branch is specified, parse out the rev it points to # and extract the local name. if self.branch: d = self.dovc(["rev-parse", self.branch]) d.addCallback(self.override_baserev) d.addCallback(self.extractLocalBranch) return d d = self.dovc(["branch", "--no-color", "-v", "--no-abbrev"]) d.addCallback(self.parseStatus) return d # remove remote-prefix from self.branch (assumes format /) # this uses "git remote" to retrieve all configured remote names def extractLocalBranch(self, res): if '/' in self.branch: d = self.dovc(["remote"]) d.addCallback(self.fixBranch) return d return None # strip remote prefix from self.branch def fixBranch(self, remotes): for l in bytes2unicode(remotes).split("\n"): r = l.strip() if r and self.branch.startswith(r + "/"): self.branch = self.branch[len(r) + 1:] break def readConfig(self): if self.config: return defer.succeed(self.config) d = self.dovc(["config", "-l"]) d.addCallback(self.parseConfig) return d def parseConfig(self, res): self.config = {} for l in res.split(b"\n"): if l.strip(): parts = l.strip().split(b"=", 2) if len(parts) < 2: parts.append('true') self.config[parts[0]] = parts[1] return self.config def parseTrackingBranch(self, res): # If we're tracking a remote, consider that the base. remote = self.config.get(b"branch." + self.branch + b".remote") ref = self.config.get(b"branch." + self.branch + b".merge") if remote and ref: remote_branch = ref.split(b"/", 2)[-1] baserev = remote + b"/" + remote_branch else: baserev = b"master" d = self.dovc(["rev-parse", baserev]) d.addCallback(self.override_baserev) return d def override_baserev(self, res): self.baserev = bytes2unicode(res).strip() def parseStatus(self, res): # The current branch is marked by '*' at the start of the # line, followed by the branch name and the SHA1. # # Branch names may contain pretty much anything but whitespace. m = re.search(br'^\* (\S+)\s+([0-9a-f]{40})', res, re.MULTILINE) if m: self.baserev = m.group(2) self.branch = m.group(1) d = self.readConfig() d.addCallback(self.parseTrackingBranch) return d output(b"Could not find current GIT branch: " + res) sys.exit(1) def getPatch(self, res): d = self.dovc(["diff", "--src-prefix=a/", "--dst-prefix=b/", "--no-textconv", "--no-ext-diff", self.baserev]) d.addCallback(self.readPatch, self.patchlevel) return d class MonotoneExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "mtn" def getBaseRevision(self): d = self.dovc(["automate", "get_base_revision_id"]) d.addCallback(self.parseStatus) return d def parseStatus(self, output): hash = output.strip() if len(hash) != 40: self.baserev = None self.baserev = hash def getPatch(self, res): d = self.dovc(["diff"]) d.addCallback(self.readPatch, self.patchlevel) return d def getSourceStamp(vctype, treetop, branch=None, repository=None): if vctype == "cvs": cls = CVSExtractor elif vctype == "svn": cls = SVNExtractor elif vctype == "bzr": cls = BzrExtractor elif vctype == "hg": cls = MercurialExtractor elif vctype == "p4": cls = PerforceExtractor elif vctype == "darcs": cls = DarcsExtractor elif vctype == "git": cls = GitExtractor elif vctype == "mtn": cls = MonotoneExtractor elif vctype == "none": return defer.succeed(SourceStamp("", "", (1, ""), "")) else: output("unknown vctype '{}'".format(vctype)) sys.exit(1) return cls(treetop, branch, repository).get() def ns(s): return "{}:{},".format(len(s), s) def createJobfile(jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties): # Determine job file version from provided arguments try: bytes2unicode(patch_body) version = 5 except UnicodeDecodeError: version = 6 job = "" job += ns(str(version)) job_dict = { 'jobid': jobid, 'branch': branch, 'baserev': str(baserev), 'patch_level': patch_level, 'repository': repository, 'project': project, 'who': who, 'comment': comment, 'builderNames': builderNames, 'properties': properties, } if version > 5: job_dict['patch_body_base64'] = bytes2unicode(base64.b64encode(patch_body)) else: job_dict['patch_body'] = bytes2unicode(patch_body) job += ns(json.dumps(job_dict)) return job def getTopdir(topfile, start=None): """walk upwards from the current directory until we find this topfile""" if not start: start = os.getcwd() here = start toomany = 20 while toomany > 0: if os.path.exists(os.path.join(here, topfile)): return here next = os.path.dirname(here) if next == here: break # we've hit the root here = next toomany -= 1 output("Unable to find topfile '{}' anywhere " "from {} upwards".format(topfile, start)) sys.exit(1) class RemoteTryPP(protocol.ProcessProtocol): def __init__(self, job): self.job = job self.d = defer.Deferred() def connectionMade(self): self.transport.write(unicode2bytes(self.job)) self.transport.closeStdin() def outReceived(self, data): sys.stdout.write(bytes2unicode(data)) def errReceived(self, data): sys.stderr.write(bytes2unicode(data)) def processEnded(self, status_object): sig = status_object.value.signal rc = status_object.value.exitCode if sig is not None or rc != 0: self.d.errback(RuntimeError("remote 'buildbot tryserver' failed" ": sig={}, rc={}".format(sig, rc))) return self.d.callback((sig, rc)) class FakeBuildSetStatus: def callRemote(self, name): if name == "getBuildRequests": return defer.succeed([]) raise NotImplementedError() class Try(pb.Referenceable): buildsetStatus = None quiet = False printloop = False def __init__(self, config): self.config = config self.connect = self.getopt('connect') if self.connect not in ['ssh', 'pb']: output("you must specify a connect style: ssh or pb") sys.exit(1) self.builderNames = self.getopt('builders') self.project = self.getopt('project', '') self.who = self.getopt('who') self.comment = self.getopt('comment') def getopt(self, config_name, default=None): value = self.config.get(config_name) if value is None or value == []: value = default return value def createJob(self): # returns a Deferred which fires when the job parameters have been # created # generate a random (unique) string. It would make sense to add a # hostname and process ID here, but a) I suspect that would cause # windows portability problems, and b) really this is good enough self.bsid = "{}-{}".format(time.time(), random.randint(0, 1000000)) # common options branch = self.getopt("branch") difffile = self.config.get("diff") if difffile: baserev = self.config.get("baserev") if difffile == "-": diff = sys.stdin.read() else: with open(difffile, "rb") as f: diff = f.read() if not diff: diff = None patch = (self.config['patchlevel'], diff) ss = SourceStamp( branch, baserev, patch, repository=self.getopt("repository")) d = defer.succeed(ss) else: vc = self.getopt("vc") if vc in ("cvs", "svn"): # we need to find the tree-top topdir = self.getopt("topdir") if topdir: treedir = os.path.expanduser(topdir) else: topfile = self.getopt("topfile") if topfile: treedir = getTopdir(topfile) else: output("Must specify topdir or topfile.") sys.exit(1) else: treedir = os.getcwd() d = getSourceStamp(vc, treedir, branch, self.getopt("repository")) d.addCallback(self._createJob_1) return d def _createJob_1(self, ss): self.sourcestamp = ss patchlevel, diff = ss.patch if diff is None: raise RuntimeError("There is no patch to try, diff is empty.") if self.connect == "ssh": revspec = ss.revision if revspec is None: revspec = "" self.jobfile = createJobfile( self.bsid, ss.branch or "", revspec, patchlevel, diff, ss.repository, self.project, self.who, self.comment, self.builderNames, self.config.get('properties', {})) def fakeDeliverJob(self): # Display the job to be delivered, but don't perform delivery. ss = self.sourcestamp output("Job:\n\tRepository: {}\n\tProject: {}\n\tBranch: {}\n\t" "Revision: {}\n\tBuilders: {}\n{}".format( ss.repository, self.project, ss.branch, ss.revision, self.builderNames, ss.patch[1])) self.buildsetStatus = FakeBuildSetStatus() d = defer.Deferred() d.callback(True) return d def deliver_job_ssh(self): tryhost = self.getopt("host") tryport = self.getopt("port") tryuser = self.getopt("username") trydir = self.getopt("jobdir") buildbotbin = self.getopt("buildbotbin") ssh_command = self.getopt("ssh") if not ssh_command: ssh_commands = which("ssh") if not ssh_commands: raise RuntimeError("couldn't find ssh executable, make sure " "it is available in the PATH") argv = [ssh_commands[0]] else: # Split the string on whitespace to allow passing options in # ssh command too, but preserving whitespace inside quotes to # allow using paths with spaces in them which is common under # Windows. And because Windows uses backslashes in paths, we # can't just use shlex.split there as it would interpret them # specially, so do it by hand. if runtime.platformType == 'win32': # Note that regex here matches the arguments, not the # separators, as it's simpler to do it like this. And then we # just need to get all of them together using the slice and # also remove the quotes from those that were quoted. argv = [string.strip(a, '"') for a in re.split(r'''([^" ]+|"[^"]+")''', ssh_command)[1::2]] else: # Do use standard tokenization logic under POSIX. argv = shlex.split(ssh_command) if tryuser: argv += ["-l", tryuser] if tryport: argv += ["-p", tryport] argv += [tryhost, buildbotbin, "tryserver", "--jobdir", trydir] pp = RemoteTryPP(self.jobfile) reactor.spawnProcess(pp, argv[0], argv, os.environ) d = pp.d return d @defer.inlineCallbacks def deliver_job_pb(self): user = self.getopt("username") passwd = self.getopt("passwd") master = self.getopt("master") tryhost, tryport = master.split(":") tryport = int(tryport) f = pb.PBClientFactory() d = f.login(credentials.UsernamePassword(unicode2bytes(user), unicode2bytes(passwd))) reactor.connectTCP(tryhost, tryport, f) remote = yield d ss = self.sourcestamp output("Delivering job; comment=", self.comment) self.buildsetStatus = \ yield remote.callRemote("try", ss.branch, ss.revision, ss.patch, ss.repository, self.project, self.builderNames, self.who, self.comment, self.config.get('properties', {})) def deliverJob(self): # returns a Deferred that fires when the job has been delivered if self.connect == "ssh": return self.deliver_job_ssh() if self.connect == "pb": return self.deliver_job_pb() raise RuntimeError("unknown connecttype '{}', " "should be 'ssh' or 'pb'".format(self.connect)) def getStatus(self): # returns a Deferred that fires when the builds have finished, and # may emit status messages while we wait wait = bool(self.getopt("wait")) if not wait: output("not waiting for builds to finish") elif self.connect == "ssh": output("waiting for builds with ssh is not supported") else: self.running = defer.Deferred() if not self.buildsetStatus: output("try scheduler on the master does not have the builder configured") return None self._getStatus_1() # note that we don't wait for the returned Deferred if bool(self.config.get("dryrun")): self.statusDone() return self.running return None @defer.inlineCallbacks def _getStatus_1(self): # gather the set of BuildRequests brs = yield self.buildsetStatus.callRemote("getBuildRequests") self.builderNames = [] self.buildRequests = {} # self.builds holds the current BuildStatus object for each one self.builds = {} # self.outstanding holds the list of builderNames which haven't # finished yet self.outstanding = [] # self.results holds the list of build results. It holds a tuple of # (result, text) self.results = {} # self.currentStep holds the name of the Step that each build is # currently running self.currentStep = {} # self.ETA holds the expected finishing time (absolute time since # epoch) self.ETA = {} for n, br in brs: self.builderNames.append(n) self.buildRequests[n] = br self.builds[n] = None self.outstanding.append(n) self.results[n] = [None, None] self.currentStep[n] = None self.ETA[n] = None # get new Builds for this buildrequest. We follow each one until # it finishes or is interrupted. br.callRemote("subscribe", self) # now that those queries are in transit, we can start the # display-status-every-30-seconds loop if not self.getopt("quiet"): self.printloop = task.LoopingCall(self.printStatus) self.printloop.start(3, now=False) # these methods are invoked by the status objects we've subscribed to def remote_newbuild(self, bs, builderName): if self.builds[builderName]: self.builds[builderName].callRemote("unsubscribe", self) self.builds[builderName] = bs bs.callRemote("subscribe", self, 20) d = bs.callRemote("waitUntilFinished") d.addCallback(self._build_finished, builderName) def remote_stepStarted(self, buildername, build, stepname, step): self.currentStep[buildername] = stepname def remote_stepFinished(self, buildername, build, stepname, step, results): pass def remote_buildETAUpdate(self, buildername, build, eta): self.ETA[buildername] = now() + eta @defer.inlineCallbacks def _build_finished(self, bs, builderName): # we need to collect status from the newly-finished build. We don't # remove the build from self.outstanding until we've collected # everything we want. self.builds[builderName] = None self.ETA[builderName] = None self.currentStep[builderName] = "finished" self.results[builderName][0] = yield bs.callRemote("getResults") self.results[builderName][1] = yield bs.callRemote("getText") self.outstanding.remove(builderName) if not self.outstanding: self.statusDone() def printStatus(self): try: names = sorted(self.buildRequests.keys()) for n in names: if n not in self.outstanding: # the build is finished, and we have results code, text = self.results[n] t = Results[code] if text: t += " ({})".format(" ".join(text)) elif self.builds[n]: t = self.currentStep[n] or "building" if self.ETA[n]: t += " [ETA {}s]".format(self.ETA[n] - now()) else: t = "no build" self.announce("{}: {}".format(n, t)) self.announce("") except Exception: log.err(None, "printing status") def statusDone(self): if self.printloop: self.printloop.stop() self.printloop = None output("All Builds Complete") # TODO: include a URL for all failing builds names = sorted(self.buildRequests.keys()) happy = True for n in names: code, text = self.results[n] t = "{}: {}".format(n, Results[code]) if text: t += " ({})".format(" ".join(text)) output(t) if code != SUCCESS: happy = False if happy: self.exitcode = 0 else: self.exitcode = 1 self.running.callback(self.exitcode) @defer.inlineCallbacks def getAvailableBuilderNames(self): # This logs into the master using the PB protocol to # get the names of the configured builders that can # be used for the --builder argument if self.connect == "pb": user = self.getopt("username") passwd = self.getopt("passwd") master = self.getopt("master") tryhost, tryport = master.split(":") tryport = int(tryport) f = pb.PBClientFactory() d = f.login(credentials.UsernamePassword(unicode2bytes(user), unicode2bytes(passwd))) reactor.connectTCP(tryhost, tryport, f) remote = yield d buildernames = yield remote.callRemote("getAvailableBuilderNames") output("The following builders are available for the try scheduler: ") for buildername in buildernames: output(buildername) yield remote.broker.transport.loseConnection() return if self.connect == "ssh": output("Cannot get available builders over ssh.") sys.exit(1) raise RuntimeError( "unknown connecttype '{}', should be 'pb'".format(self.connect)) def announce(self, message): if not self.quiet: output(message) @defer.inlineCallbacks def run_impl(self): output("using '{}' connect method".format(self.connect)) self.exitcode = 0 # we can't do spawnProcess until we're inside reactor.run(), so force asynchronous execution yield fireEventually(None) try: if bool(self.config.get("get-builder-names")): yield self.getAvailableBuilderNames() else: yield self.createJob() yield self.announce("job created") if bool(self.config.get("dryrun")): yield self.fakeDeliverJob() else: yield self.deliverJob() yield self.announce("job has been delivered") yield self.getStatus() if not bool(self.config.get("dryrun")): yield self.cleanup() except SystemExit as e: self.exitcode = e.code except Exception as e: log.err(e) raise def run(self): d = self.run_impl() d.addCallback(lambda res: reactor.stop()) reactor.run() sys.exit(self.exitcode) def trapSystemExit(self, why): why.trap(SystemExit) self.exitcode = why.value.code def cleanup(self, res=None): if self.buildsetStatus: self.buildsetStatus.broker.transport.loseConnection() buildbot-3.4.0/master/buildbot/clients/usersclient.py000066400000000000000000000036261413250514000227760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this class is known to contain cruft and will be looked at later, so # no current implementation utilizes it aside from scripts.runner. from twisted.cred import credentials from twisted.internet import reactor from twisted.spread import pb class UsersClient: """ Client set up in buildbot.scripts.runner to send `buildbot user` args over a PB connection to perspective_commandline that will execute the args on the database. """ def __init__(self, master, username, password, port): self.host = master self.username = username self.password = password self.port = int(port) def send(self, op, bb_username, bb_password, ids, info): f = pb.PBClientFactory() d = f.login(credentials.UsernamePassword(self.username, self.password)) reactor.connectTCP(self.host, self.port, f) @d.addCallback def call_commandline(remote): d = remote.callRemote("commandline", op, bb_username, bb_password, ids, info) @d.addCallback def returnAndLose(res): remote.broker.transport.loseConnection() return res return d return d buildbot-3.4.0/master/buildbot/config.py000077500000000000000000001102211413250514000202330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import os import re import sys import traceback import warnings from twisted.python import failure from twisted.python import log from twisted.python.compat import execfile from zope.interface import implementer from buildbot import interfaces from buildbot import locks from buildbot import util from buildbot.interfaces import IRenderable from buildbot.revlinks import default_revlink_matcher from buildbot.util import ComparableMixin from buildbot.util import bytes2unicode from buildbot.util import config as util_config from buildbot.util import identifiers as util_identifiers from buildbot.util import safeTranslate from buildbot.util import service as util_service from buildbot.warnings import ConfigWarning from buildbot.www import auth from buildbot.www import avatar from buildbot.www.authz import authz class ConfigErrors(Exception): def __init__(self, errors=None): if errors is None: errors = [] self.errors = errors[:] def __str__(self): return "\n".join(self.errors) def addError(self, msg): self.errors.append(msg) def merge(self, errors): self.errors.extend(errors.errors) def __bool__(self): return bool(len(self.errors)) _errors = None DEFAULT_DB_URL = 'sqlite:///state.sqlite' RESERVED_UNDERSCORE_NAMES = ["__Janitor"] def error(error, always_raise=False): if _errors is not None and not always_raise: _errors.addError(error) else: raise ConfigErrors([error]) _in_unit_tests = False def loadConfigDict(basedir, configFileName): if not os.path.isdir(basedir): raise ConfigErrors([ "basedir '{}' does not exist".format(basedir), ]) filename = os.path.join(basedir, configFileName) if not os.path.exists(filename): raise ConfigErrors([ "configuration file '{}' does not exist".format(filename), ]) try: with open(filename, "r"): pass except IOError as e: raise ConfigErrors([ "unable to open configuration file {}: {}".format(repr(filename), e), ]) from e log.msg("Loading configuration from %r" % (filename,)) # execute the config file localDict = { 'basedir': os.path.expanduser(basedir), '__file__': os.path.abspath(filename), } old_sys_path = sys.path[:] sys.path.append(basedir) try: try: execfile(filename, localDict) except ConfigErrors: raise except SyntaxError: error(("encountered a SyntaxError while parsing config file:\n{} " ).format(traceback.format_exc()), always_raise=True) except Exception: log.err(failure.Failure(), 'error while parsing config file:') error(("error while parsing config file: {} (traceback in logfile)" ).format(sys.exc_info()[1]), always_raise=True) finally: sys.path[:] = old_sys_path if 'BuildmasterConfig' not in localDict: error("Configuration file %r does not define 'BuildmasterConfig'" % (filename,), always_raise=True, ) return filename, localDict['BuildmasterConfig'] @implementer(interfaces.IConfigLoader) class FileLoader(ComparableMixin): compare_attrs = ['basedir', 'configFileName'] def __init__(self, basedir, configFileName): self.basedir = basedir self.configFileName = configFileName def loadConfig(self): # from here on out we can batch errors together for the user's # convenience global _errors _errors = errors = ConfigErrors() try: filename, config_dict = loadConfigDict( self.basedir, self.configFileName) config = MasterConfig.loadFromDict(config_dict, filename) except ConfigErrors as e: errors.merge(e) finally: _errors = None if errors: raise errors return config class MasterConfig(util.ComparableMixin): def __init__(self): # local import to avoid circular imports from buildbot.process import properties # default values for all attributes # global self.title = 'Buildbot' self.titleURL = 'http://buildbot.net' self.buildbotURL = 'http://localhost:8080/' self.changeHorizon = None self.logCompressionLimit = 4 * 1024 self.logCompressionMethod = 'gz' self.logEncoding = 'utf-8' self.logMaxSize = None self.logMaxTailSize = None self.properties = properties.Properties() self.collapseRequests = None self.codebaseGenerator = None self.prioritizeBuilders = None self.multiMaster = False self.manhole = None self.protocols = {} self.buildbotNetUsageData = "basic" self.validation = dict( branch=re.compile(r'^[\w.+/~-]*$'), revision=re.compile(r'^[ \w\.\-/]*$'), property_name=re.compile(r'^[\w\.\-/~:]*$'), property_value=re.compile(r'^[\w\.\-/~:]*$'), ) self.db = dict( db_url=DEFAULT_DB_URL, ) self.mq = dict( type='simple', ) self.metrics = None self.caches = dict( Builds=15, Changes=10, ) self.schedulers = {} self.secretsProviders = [] self.builders = [] self.workers = [] self.change_sources = [] self.machines = [] self.status = [] self.user_managers = [] self.revlink = default_revlink_matcher self.www = dict( port=None, plugins=dict(), auth=auth.NoAuth(), authz=authz.Authz(), avatar_methods=avatar.AvatarGravatar(), logfileName='http.log', ) self.services = {} _known_config_keys = set([ "buildbotNetUsageData", "buildbotURL", "buildCacheSize", "builders", "caches", "change_source", "codebaseGenerator", "configurators", "changeCacheSize", "changeHorizon", 'db', "db_url", "logCompressionLimit", "logCompressionMethod", "logEncoding", "logMaxSize", "logMaxTailSize", "manhole", "machines", "collapseRequests", "metrics", "mq", "multiMaster", "prioritizeBuilders", "projectName", "projectURL", "properties", "protocols", "revlink", "schedulers", "secretsProviders", "services", "title", "titleURL", "user_managers", "validation", "www", "workers", ]) compare_attrs = list(_known_config_keys) def preChangeGenerator(self, **kwargs): return { 'author': kwargs.get('author', None), 'files': kwargs.get('files', None), 'comments': kwargs.get('comments', None), 'revision': kwargs.get('revision', None), 'when_timestamp': kwargs.get('when_timestamp', None), 'branch': kwargs.get('branch', None), 'category': kwargs.get('category', None), 'revlink': kwargs.get('revlink', ''), 'properties': kwargs.get('properties', {}), 'repository': kwargs.get('repository', ''), 'project': kwargs.get('project', ''), 'codebase': kwargs.get('codebase', None) } @classmethod def loadFromDict(cls, config_dict, filename): # warning, all of this is loaded from a thread global _errors _errors = errors = ConfigErrors() # check for unknown keys unknown_keys = set(config_dict.keys()) - cls._known_config_keys if unknown_keys: if len(unknown_keys) == 1: error('Unknown BuildmasterConfig key {}'.format(unknown_keys.pop())) else: error('Unknown BuildmasterConfig keys {}'.format(', '.join(sorted(unknown_keys)))) # instantiate a new config object, which will apply defaults # automatically config = cls() # and defer the rest to sub-functions, for code clarity try: config.run_configurators(filename, config_dict) config.load_global(filename, config_dict) config.load_validation(filename, config_dict) config.load_db(filename, config_dict) config.load_mq(filename, config_dict) config.load_metrics(filename, config_dict) config.load_secrets(filename, config_dict) config.load_caches(filename, config_dict) config.load_schedulers(filename, config_dict) config.load_builders(filename, config_dict) config.load_workers(filename, config_dict) config.load_change_sources(filename, config_dict) config.load_machines(filename, config_dict) config.load_user_managers(filename, config_dict) config.load_www(filename, config_dict) config.load_services(filename, config_dict) # run some sanity checks config.check_single_master() config.check_schedulers() config.check_locks() config.check_builders() config.check_ports() config.check_machines() finally: _errors = None if errors: raise errors return config def run_configurators(self, filename, config_dict): for configurator in config_dict.get('configurators', []): interfaces.IConfigurator(configurator).configure(config_dict) def load_global(self, filename, config_dict): def copy_param(name, alt_key=None, check_type=None, check_type_name=None, can_be_callable=False): if name in config_dict: v = config_dict[name] elif alt_key and alt_key in config_dict: v = config_dict[alt_key] else: return if v is not None and check_type and not ( isinstance(v, check_type) or (can_be_callable and callable(v))): error("c['{}'] must be {}".format(name, check_type_name)) else: setattr(self, name, v) def copy_int_param(name, alt_key=None): copy_param(name, alt_key=alt_key, check_type=int, check_type_name='an int') def copy_str_param(name, alt_key=None): copy_param(name, alt_key=alt_key, check_type=(str,), check_type_name='a string') copy_str_param('title', alt_key='projectName') max_title_len = 18 if len(self.title) > max_title_len: # Warn if the title length limiting logic in www/base/src/app/app.route.js # would hide the title. warnings.warn('WARNING: Title is too long to be displayed. ' + '"Buildbot" will be used instead.', category=ConfigWarning) copy_str_param('titleURL', alt_key='projectURL') copy_str_param('buildbotURL') def copy_str_or_callable_param(name, alt_key=None): copy_param(name, alt_key=alt_key, check_type=(str,), check_type_name='a string or callable', can_be_callable=True) if "buildbotNetUsageData" not in config_dict: if _in_unit_tests: self.buildbotNetUsageData = None else: warnings.warn( '`buildbotNetUsageData` is not configured and defaults to basic.\n' 'This parameter helps the buildbot development team to understand' ' the installation base.\n' 'No personal information is collected.\n' 'Only installation software version info and plugin usage is sent.\n' 'You can `opt-out` by setting this variable to None.\n' 'Or `opt-in` for more information by setting it to "full".\n', category=ConfigWarning) copy_str_or_callable_param('buildbotNetUsageData') copy_int_param('changeHorizon') copy_int_param('logCompressionLimit') self.logCompressionMethod = config_dict.get( 'logCompressionMethod', 'gz') if self.logCompressionMethod not in ('raw', 'bz2', 'gz', 'lz4'): error( "c['logCompressionMethod'] must be 'raw', 'bz2', 'gz' or 'lz4'") if self.logCompressionMethod == "lz4": try: import lz4 # pylint: disable=import-outside-toplevel [lz4] except ImportError: error("To set c['logCompressionMethod'] to 'lz4' " "you must install the lz4 library ('pip install lz4')") copy_int_param('logMaxSize') copy_int_param('logMaxTailSize') copy_param('logEncoding') properties = config_dict.get('properties', {}) if not isinstance(properties, dict): error("c['properties'] must be a dictionary") else: self.properties.update(properties, filename) collapseRequests = config_dict.get('collapseRequests') if (collapseRequests not in (None, True, False) and not callable(collapseRequests)): error("collapseRequests must be a callable, True, or False") else: self.collapseRequests = collapseRequests codebaseGenerator = config_dict.get('codebaseGenerator') if (codebaseGenerator is not None and not callable(codebaseGenerator)): error( "codebaseGenerator must be a callable accepting a dict and returning a str") else: self.codebaseGenerator = codebaseGenerator prioritizeBuilders = config_dict.get('prioritizeBuilders') if prioritizeBuilders is not None and not callable(prioritizeBuilders): error("prioritizeBuilders must be a callable") else: self.prioritizeBuilders = prioritizeBuilders protocols = config_dict.get('protocols', {}) if isinstance(protocols, dict): for proto, options in protocols.items(): if not isinstance(proto, str): error("c['protocols'] keys must be strings") if not isinstance(options, dict): error("c['protocols']['{}'] must be a dict".format(proto)) return if proto == "wamp": self.check_wamp_proto(options) else: error("c['protocols'] must be dict") return self.protocols = protocols if 'multiMaster' in config_dict: self.multiMaster = config_dict["multiMaster"] if 'debugPassword' in config_dict: log.msg("the 'debugPassword' parameter is unused and " "can be removed from the configuration file") if 'manhole' in config_dict: # we don't check that this is a manhole instance, since that # requires importing buildbot.manhole for every user, and currently # that will fail if cryptography isn't installed self.manhole = config_dict['manhole'] if 'revlink' in config_dict: revlink = config_dict['revlink'] if not callable(revlink): error("revlink must be a callable") else: self.revlink = revlink def load_validation(self, filename, config_dict): validation = config_dict.get("validation", {}) if not isinstance(validation, dict): error("c['validation'] must be a dictionary") else: unknown_keys = ( set(validation.keys()) - set(self.validation.keys())) if unknown_keys: error("unrecognized validation key(s): {}".format(", ".join(unknown_keys))) else: self.validation.update(validation) @staticmethod def getDbUrlFromConfig(config_dict, throwErrors=True): if 'db' in config_dict: db = config_dict['db'] if set(db.keys()) - set(['db_url']) and throwErrors: error("unrecognized keys in c['db']") config_dict = db # we don't attempt to parse db URLs here - the engine strategy will do # so. if 'db_url' in config_dict: return config_dict['db_url'] return DEFAULT_DB_URL def load_db(self, filename, config_dict): self.db = dict(db_url=self.getDbUrlFromConfig(config_dict)) def load_mq(self, filename, config_dict): from buildbot.mq import connector # avoid circular imports if 'mq' in config_dict: self.mq.update(config_dict['mq']) classes = connector.MQConnector.classes typ = self.mq.get('type', 'simple') if typ not in classes: error("mq type '{}' is not known".format(typ)) return known_keys = classes[typ]['keys'] unk = set(self.mq.keys()) - known_keys - set(['type']) if unk: error("unrecognized keys in c['mq']: {}".format(', '.join(unk))) def load_metrics(self, filename, config_dict): # we don't try to validate metrics keys if 'metrics' in config_dict: metrics = config_dict["metrics"] if not isinstance(metrics, dict): error("c['metrics'] must be a dictionary") else: self.metrics = metrics def load_secrets(self, filename, config_dict): if 'secretsProviders' in config_dict: secretsProviders = config_dict["secretsProviders"] if not isinstance(secretsProviders, list): error("c['secretsProviders'] must be a list") else: self.secretsProviders = secretsProviders def load_caches(self, filename, config_dict): explicit = False if 'caches' in config_dict: explicit = True caches = config_dict['caches'] if not isinstance(caches, dict): error("c['caches'] must be a dictionary") else: for (name, value) in caches.items(): if not isinstance(value, int): error("value for cache size '{}' must be an integer".format(name)) return if value < 1: error("'{}' cache size must be at least 1, got '{}'".format(name, value)) self.caches.update(caches) if 'buildCacheSize' in config_dict: if explicit: msg = "cannot specify c['caches'] and c['buildCacheSize']" error(msg) self.caches['Builds'] = config_dict['buildCacheSize'] if 'changeCacheSize' in config_dict: if explicit: msg = "cannot specify c['caches'] and c['changeCacheSize']" error(msg) self.caches['Changes'] = config_dict['changeCacheSize'] def load_schedulers(self, filename, config_dict): if 'schedulers' not in config_dict: return schedulers = config_dict['schedulers'] ok = True if not isinstance(schedulers, (list, tuple)): ok = False else: for s in schedulers: if not interfaces.IScheduler.providedBy(s): ok = False if not ok: msg = "c['schedulers'] must be a list of Scheduler instances" error(msg) # convert from list to dict, first looking for duplicates seen_names = set() for s in schedulers: if s.name in seen_names: error("scheduler name '{}' used multiple times".format(s.name)) seen_names.add(s.name) self.schedulers = dict((s.name, s) for s in schedulers) def load_builders(self, filename, config_dict): if 'builders' not in config_dict: return builders = config_dict['builders'] if not isinstance(builders, (list, tuple)): error("c['builders'] must be a list") return # convert all builder configs to BuilderConfig instances def mapper(b): if isinstance(b, BuilderConfig): return b elif isinstance(b, dict): return BuilderConfig(**b) else: error("%r is not a builder config (in c['builders']" % (b,)) return None builders = [mapper(b) for b in builders] for builder in builders: if builder and os.path.isabs(builder.builddir): warnings.warn( ("Absolute path '{}' for builder may cause mayhem. Perhaps you meant to " "specify workerbuilddir instead.").format(builder.builddir), category=ConfigWarning, ) self.builders = builders @staticmethod def _check_workers(workers, conf_key): if not isinstance(workers, (list, tuple)): error("{0} must be a list".format(conf_key)) return False for worker in workers: if not interfaces.IWorker.providedBy(worker): msg = "{} must be a list of Worker instances but there is {!r}".format( conf_key, worker) error(msg) return False def validate(workername): if workername in ("debug", "change", "status"): yield "worker name %r is reserved" % workername if not util_identifiers.ident_re.match(workername): yield "worker name %r is not an identifier" % workername if not workername: yield "worker name %r cannot be an empty string" % workername if len(workername) > 50: yield "worker name %r is longer than %d characters" % (workername, 50) errors = list(validate(worker.workername)) for msg in errors: error(msg) if errors: return False return True def load_workers(self, filename, config_dict): workers = config_dict.get('workers') if workers is None: return if not self._check_workers(workers, "c['workers']"): return self.workers = workers[:] def load_change_sources(self, filename, config_dict): change_source = config_dict.get('change_source', []) if isinstance(change_source, (list, tuple)): change_sources = change_source else: change_sources = [change_source] for s in change_sources: if not interfaces.IChangeSource.providedBy(s): msg = "c['change_source'] must be a list of change sources" error(msg) return self.change_sources = change_sources def load_machines(self, filename, config_dict): if 'machines' not in config_dict: return machines = config_dict['machines'] msg = "c['machines'] must be a list of machines" if not isinstance(machines, (list, tuple)): error(msg) return for m in machines: if not interfaces.IMachine.providedBy(m): error(msg) return self.machines = machines def load_user_managers(self, filename, config_dict): if 'user_managers' not in config_dict: return user_managers = config_dict['user_managers'] msg = "c['user_managers'] must be a list of user managers" if not isinstance(user_managers, (list, tuple)): error(msg) return self.user_managers = user_managers def load_www(self, filename, config_dict): if 'www' not in config_dict: return www_cfg = config_dict['www'] allowed = { 'allowed_origins', 'auth', 'authz', 'avatar_methods', 'change_hook_auth', 'change_hook_dialects', 'cookie_expiration_time', 'custom_templates_dir', 'debug', 'default_page', 'json_cache_seconds', 'jsonp', 'logRotateLength', 'logfileName', 'maxRotatedFiles', 'plugins', 'port', 'rest_minimum_version', 'ui_default_config', 'versions', 'ws_ping_interval', 'graphql', } unknown = set(list(www_cfg)) - allowed if unknown: error("unknown www configuration parameter(s) {}".format(', '.join(unknown))) versions = www_cfg.get('versions') if versions is not None: cleaned_versions = [] if not isinstance(versions, list): error('Invalid www configuration value of versions') else: for i, v in enumerate(versions): if not isinstance(v, tuple) or len(v) < 2: error('Invalid www configuration value of versions') break cleaned_versions.append(v) www_cfg['versions'] = cleaned_versions cookie_expiration_time = www_cfg.get('cookie_expiration_time') if cookie_expiration_time is not None: if not isinstance(cookie_expiration_time, datetime.timedelta): error('Invalid www["cookie_expiration_time"] configuration should ' 'be a datetime.timedelta') self.www.update(www_cfg) def load_services(self, filename, config_dict): if 'services' not in config_dict: return self.services = {} for _service in config_dict['services']: if not isinstance(_service, util_service.BuildbotService): error(("{} object should be an instance of " "buildbot.util.service.BuildbotService").format(type(_service))) continue if _service.name in self.services: error('Duplicate service name %r' % _service.name) continue self.services[_service.name] = _service def check_single_master(self): # check additional problems that are only valid in a single-master # installation if self.multiMaster: return if not self.workers: error("no workers are configured") if not self.builders: error("no builders are configured") # check that all builders are implemented on this master unscheduled_buildernames = {b.name for b in self.builders} for s in self.schedulers.values(): builderNames = s.listBuilderNames() if interfaces.IRenderable.providedBy(builderNames): unscheduled_buildernames.clear() else: for n in builderNames: if interfaces.IRenderable.providedBy(n): unscheduled_buildernames.clear() elif n in unscheduled_buildernames: unscheduled_buildernames.remove(n) if unscheduled_buildernames: names_str = ', '.join(unscheduled_buildernames) error("builder(s) {} have no schedulers to drive them".format(names_str)) def check_schedulers(self): # don't perform this check in multiMaster mode if self.multiMaster: return all_buildernames = {b.name for b in self.builders} for s in self.schedulers.values(): builderNames = s.listBuilderNames() if interfaces.IRenderable.providedBy(builderNames): continue for n in builderNames: if interfaces.IRenderable.providedBy(n): continue if n not in all_buildernames: error("Unknown builder '{}' in scheduler '{}'".format(n, s.name)) def check_locks(self): # assert that all locks used by the Builds and their Steps are # uniquely named. lock_dict = {} def check_lock(lock): if isinstance(lock, locks.LockAccess): lock = lock.lockid if lock.name in lock_dict: if lock_dict[lock.name] is not lock: msg = "Two locks share the same name, '{}'".format(lock.name) error(msg) else: lock_dict[lock.name] = lock for b in self.builders: if b.locks and not IRenderable.providedBy(b.locks): for lock in b.locks: check_lock(lock) def check_builders(self): # look both for duplicate builder names, and for builders pointing # to unknown workers workernames = {w.workername for w in self.workers} seen_names = set() seen_builddirs = set() for b in self.builders: unknowns = set(b.workernames) - workernames if unknowns: error("builder '{}' uses unknown workers {}".format(b.name, ", ".join(repr(u) for u in unknowns))) if b.name in seen_names: error("duplicate builder name '{}'".format(b.name)) seen_names.add(b.name) if b.builddir in seen_builddirs: error("duplicate builder builddir '{}'".format(b.builddir)) seen_builddirs.add(b.builddir) def check_ports(self): ports = set() if self.protocols: for proto, options in self.protocols.items(): if proto == 'null': port = -1 else: port = options.get("port") if not port: continue if isinstance(port, int): # Conversion needed to compare listenTCP and strports ports port = "tcp:%d" % port if port != -1 and port in ports: error("Some of ports in c['protocols'] duplicated") ports.add(port) if ports: return if self.workers: error("workers are configured, but c['protocols'] not") def check_machines(self): seen_names = set() for mm in self.machines: if mm.name in seen_names: error("duplicate machine name '{}'".format(mm.name)) seen_names.add(mm.name) for w in self.workers: if w.machine_name is not None and w.machine_name not in seen_names: error("worker '{}' uses unknown machine '{}'".format( w.name, w.machine_name)) class BuilderConfig(util_config.ConfiguredMixin): def __init__(self, name=None, workername=None, workernames=None, builddir=None, workerbuilddir=None, factory=None, tags=None, nextWorker=None, nextBuild=None, locks=None, env=None, properties=None, collapseRequests=None, description=None, canStartBuild=None, defaultProperties=None ): # name is required, and can't start with '_' if not name or type(name) not in (bytes, str): error("builder's name is required") name = '' elif name[0] == '_' and name not in RESERVED_UNDERSCORE_NAMES: error("builder names must not start with an underscore: '{}'".format(name)) try: self.name = util.bytes2unicode(name, encoding="ascii") except UnicodeDecodeError: error("builder names must be unicode or ASCII") # factory is required if factory is None: error("builder '{}' has no factory".format(name)) from buildbot.process.factory import BuildFactory if factory is not None and not isinstance(factory, BuildFactory): error("builder '{}'s factory is not a BuildFactory instance".format(name)) self.factory = factory # workernames can be a single worker name or a list, and should also # include workername, if given if isinstance(workernames, str): workernames = [workernames] if workernames: if not isinstance(workernames, list): error("builder '{}': workernames must be a list or a string".format(name)) else: workernames = [] if workername: if not isinstance(workername, str): error(("builder '{}': workername must be a string but it is {}" ).format(name, repr(workername))) workernames = workernames + [workername] if not workernames: error("builder '{}': at least one workername is required".format(name)) self.workernames = workernames # builddir defaults to name if builddir is None: builddir = safeTranslate(name) builddir = bytes2unicode(builddir) self.builddir = builddir # workerbuilddir defaults to builddir if workerbuilddir is None: workerbuilddir = builddir self.workerbuilddir = workerbuilddir # remainder are optional if tags: if not isinstance(tags, list): error("builder '{}': tags must be a list".format(name)) bad_tags = any((tag for tag in tags if not isinstance(tag, str))) if bad_tags: error( "builder '{}': tags list contains something that is not a string".format(name)) if len(tags) != len(set(tags)): dupes = " ".join({x for x in tags if tags.count(x) > 1}) error( "builder '{}': tags list contains duplicate tags: {}".format(name, dupes)) else: tags = [] self.tags = tags self.nextWorker = nextWorker if nextWorker and not callable(nextWorker): error('nextWorker must be a callable') self.nextBuild = nextBuild if nextBuild and not callable(nextBuild): error('nextBuild must be a callable') self.canStartBuild = canStartBuild if canStartBuild and not callable(canStartBuild): error('canStartBuild must be a callable') self.locks = locks or [] self.env = env or {} if not isinstance(self.env, dict): error("builder's env must be a dictionary") self.properties = properties or {} self.defaultProperties = defaultProperties or {} self.collapseRequests = collapseRequests self.description = description def getConfigDict(self): # note: this method will disappear eventually - put your smarts in the # constructor! rv = { 'name': self.name, 'workernames': self.workernames, 'factory': self.factory, 'builddir': self.builddir, 'workerbuilddir': self.workerbuilddir, } if self.tags: rv['tags'] = self.tags if self.nextWorker: rv['nextWorker'] = self.nextWorker if self.nextBuild: rv['nextBuild'] = self.nextBuild if self.locks: rv['locks'] = self.locks if self.env: rv['env'] = self.env if self.properties: rv['properties'] = self.properties if self.defaultProperties: rv['defaultProperties'] = self.defaultProperties if self.collapseRequests is not None: rv['collapseRequests'] = self.collapseRequests if self.description: rv['description'] = self.description return rv buildbot-3.4.0/master/buildbot/configurators/000077500000000000000000000000001413250514000213015ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/configurators/__init__.py000066400000000000000000000031501413250514000234110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # from zope.interface import implementer from buildbot.interfaces import IConfigurator """ This module holds configurators, which helps setup schedulers, builders, steps, for a very specific purpose. Higher level interfaces to buildbot configurations components. """ @implementer(IConfigurator) class ConfiguratorBase: """ I provide base helper methods for configurators """ def __init__(self): pass def configure(self, config_dict): self.config_dict = c = config_dict if 'schedulers' not in c: c['schedulers'] = [] self.schedulers = c['schedulers'] if 'protocols' not in c: c['protocols'] = {} self.protocols = c['protocols'] if 'builders' not in c: c['builders'] = [] self.builders = c['builders'] if 'workers' not in c: c['workers'] = [] self.workers = c['workers'] buildbot-3.4.0/master/buildbot/configurators/janitor.py000066400000000000000000000076771413250514000233420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # import datetime from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.configurators import ConfiguratorBase from buildbot.process.buildstep import BuildStep from buildbot.process.factory import BuildFactory from buildbot.process.results import SUCCESS from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.timed import Nightly from buildbot.util import datetime2epoch from buildbot.worker.local import LocalWorker """ Janitor is a configurator which create a Janitor Builder with all needed Janitor steps """ JANITOR_NAME = "__Janitor" # If you read this code, you may want to patch this name. def now(): """patchable now (datetime is not patchable as builtin)""" return datetime.datetime.utcnow() class LogChunksJanitor(BuildStep): name = 'LogChunksJanitor' renderables = ["logHorizon"] def __init__(self, logHorizon): super().__init__() self.logHorizon = logHorizon @defer.inlineCallbacks def run(self): older_than_timestamp = datetime2epoch(now() - self.logHorizon) deleted = yield self.master.db.logs.deleteOldLogChunks(older_than_timestamp) self.descriptionDone = ["deleted", str(deleted), "logchunks"] return SUCCESS class BuildDataJanitor(BuildStep): name = 'BuildDataJanitor' renderables = ["build_data_horizon"] def __init__(self, build_data_horizon): super().__init__() self.build_data_horizon = build_data_horizon @defer.inlineCallbacks def run(self): older_than_timestamp = datetime2epoch(now() - self.build_data_horizon) deleted = yield self.master.db.build_data.deleteOldBuildData(older_than_timestamp) self.descriptionDone = ["deleted", str(deleted), "build data key-value pairs"] return SUCCESS class JanitorConfigurator(ConfiguratorBase): def __init__(self, logHorizon=None, hour=0, build_data_horizon=None, **kwargs): super().__init__() self.logHorizon = logHorizon self.build_data_horizon = build_data_horizon self.hour = hour self.kwargs = kwargs def configure(self, config_dict): steps = [] if self.logHorizon is not None: steps.append(LogChunksJanitor(logHorizon=self.logHorizon)) if self.build_data_horizon is not None: steps.append(BuildDataJanitor(build_data_horizon=self.build_data_horizon)) if not steps: return hour = self.hour kwargs = self.kwargs super().configure(config_dict) nightly_kwargs = {} # we take the defaults of Nightly, except for hour for arg in ('minute', 'dayOfMonth', 'month', 'dayOfWeek'): if arg in kwargs: nightly_kwargs[arg] = kwargs[arg] self.schedulers.append(Nightly( name=JANITOR_NAME, builderNames=[JANITOR_NAME], hour=hour, **nightly_kwargs)) self.schedulers.append(ForceScheduler( name=JANITOR_NAME + "_force", builderNames=[JANITOR_NAME])) self.builders.append(BuilderConfig( name=JANITOR_NAME, workername=JANITOR_NAME, factory=BuildFactory(steps=steps) )) self.protocols.setdefault('null', {}) self.workers.append(LocalWorker(JANITOR_NAME)) buildbot-3.4.0/master/buildbot/data/000077500000000000000000000000001413250514000173255ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/data/__init__.py000066400000000000000000000000001413250514000214240ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/data/base.py000066400000000000000000000174041413250514000206170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import functools import re from collections import UserList from twisted.internet import defer from buildbot.data import exceptions class ResourceType: name = None plural = None endpoints = [] keyField = None eventPathPatterns = "" entityType = None subresources = [] def __init__(self, master): self.master = master self.compileEventPathPatterns() def compileEventPathPatterns(self): # We'll run a single format, and then split the string # to get the final event path tuple pathPatterns = self.eventPathPatterns pathPatterns = pathPatterns.split() identifiers = re.compile(r':([^/]*)') for i, pp in enumerate(pathPatterns): pp = identifiers.sub(r'{\1}', pp) if pp.startswith("/"): pp = pp[1:] pathPatterns[i] = pp self.eventPaths = pathPatterns @functools.lru_cache(1) def getEndpoints(self): endpoints = self.endpoints[:] for i, ep in enumerate(endpoints): if not issubclass(ep, Endpoint): raise TypeError("Not an Endpoint subclass") endpoints[i] = ep(self, self.master) return endpoints @functools.lru_cache(1) def getDefaultEndpoint(self): for ep in self.getEndpoints(): if not ep.isCollection: return ep return None @functools.lru_cache(1) def getCollectionEndpoint(self): for ep in self.getEndpoints(): if ep.isCollection or ep.isPseudoCollection: return ep return None @staticmethod def sanitizeMessage(msg): msg = copy.deepcopy(msg) return msg def produceEvent(self, msg, event): if msg is not None: msg = self.sanitizeMessage(msg) for path in self.eventPaths: path = path.format(**msg) routingKey = tuple(path.split("/")) + (event,) self.master.mq.produce(routingKey, msg) class SubResource: def __init__(self, rtype): self.rtype = rtype self.endpoints = {} for endpoint in rtype.endpoints: if endpoint.isCollection: self.endpoints[rtype.plural] = endpoint else: self.endpoints[rtype.name] = endpoint class Endpoint: pathPatterns = "" rootLinkName = None isCollection = False isPseudoCollection = False isRaw = False parentMapping = {} def __init__(self, rtype, master): self.rtype = rtype self.master = master def get(self, resultSpec, kwargs): raise NotImplementedError def control(self, action, args, kwargs): # we convert the action into a mixedCase method name action_method = getattr(self, "action" + action.capitalize(), None) if action_method is None: raise exceptions.InvalidControlException("action: {} is not supported".format(action)) return action_method(args, kwargs) def get_kwargs_from_graphql_parent(self, parent, parent_type): if parent_type not in self.parentMapping: rtype = self.master.data.getResourceTypeForGraphQlType(parent_type) if rtype.keyField in parent: parentid = rtype.keyField else: raise NotImplementedError( "Collection endpoint should implement " "get_kwargs_from_graphql or parentMapping" ) else: parentid = self.parentMapping[parent_type] ret = {'graphql': True} ret[parentid] = parent[parentid] return ret def get_kwargs_from_graphql(self, parent, resolve_info, args): if self.isCollection or self.isPseudoCollection: if parent is not None: return self.get_kwargs_from_graphql_parent( parent, resolve_info.parent_type.name ) return {'graphql': True} ret = {'graphql': True} k = self.rtype.keyField v = args.pop(k) if v is not None: ret[k] = v return ret def __repr__(self): return "endpoint for " + ",".join(self.pathPatterns.split()) class BuildNestingMixin: """ A mixin for methods to decipher the many ways a build, step, or log can be specified. """ @defer.inlineCallbacks def getBuildid(self, kwargs): # need to look in the context of a step, specified by build or # builder or whatever if 'buildid' in kwargs: return kwargs['buildid'] else: builderid = yield self.getBuilderId(kwargs) if builderid is None: return None build = yield self.master.db.builds.getBuildByNumber( builderid=builderid, number=kwargs['build_number']) if not build: return None return build['id'] @defer.inlineCallbacks def getStepid(self, kwargs): if 'stepid' in kwargs: return kwargs['stepid'] else: buildid = yield self.getBuildid(kwargs) if buildid is None: return None dbdict = yield self.master.db.steps.getStep(buildid=buildid, number=kwargs.get( 'step_number'), name=kwargs.get('step_name')) if not dbdict: return None return dbdict['id'] def getBuilderId(self, kwargs): if 'buildername' in kwargs: return self.master.db.builders.findBuilderId(kwargs['buildername'], autoCreate=False) return defer.succeed(kwargs['builderid']) class ListResult(UserList): __slots__ = ['offset', 'total', 'limit'] def __init__(self, values, offset=None, total=None, limit=None): super().__init__(values) # if set, this is the index in the overall results of the first element of # this list self.offset = offset # if set, this is the total number of results self.total = total # if set, this is the limit, either from the user or the implementation self.limit = limit def __repr__(self): return "ListResult(%r, offset=%r, total=%r, limit=%r)" % \ (self.data, self.offset, self.total, self.limit) def __eq__(self, other): if isinstance(other, ListResult): return self.data == other.data \ and self.offset == other.offset \ and self.total == other.total \ and self.limit == other.limit return self.data == other \ and self.offset == self.limit is None \ and (self.total is None or self.total == len(other)) def __ne__(self, other): return not (self == other) def updateMethod(func): """Decorate this resourceType instance as an update method, made available at master.data.updates.$funcname""" func.isUpdateMethod = True return func buildbot-3.4.0/master/buildbot/data/build_data.py000066400000000000000000000073161413250514000217760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class Db2DataMixin: def db2data(self, dbdict): data = { 'buildid': dbdict['buildid'], 'name': dbdict['name'], 'value': dbdict['value'], 'length': dbdict['length'], 'source': dbdict['source'], } return defer.succeed(data) class BuildDatasNoValueEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = True pathPatterns = """ /builders/n:builderid/builds/n:build_number/data /builders/i:buildername/builds/n:build_number/data /builds/n:buildid/data """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): buildid = yield self.getBuildid(kwargs) build_datadicts = yield self.master.db.build_data.getAllBuildDataNoValues(buildid) results = [] for dbdict in build_datadicts: results.append((yield self.db2data(dbdict))) return results class BuildDataNoValueEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /builders/n:builderid/builds/n:build_number/data/i:name /builders/i:buildername/builds/n:build_number/data/i:name /builds/n:buildid/data/i:name """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): buildid = yield self.getBuildid(kwargs) name = kwargs['name'] build_datadict = yield self.master.db.build_data.getBuildDataNoValue(buildid, name) return (yield self.db2data(build_datadict)) if build_datadict else None class BuildDataEndpoint(base.BuildNestingMixin, base.Endpoint): isCollection = False isRaw = True pathPatterns = """ /builders/n:builderid/builds/n:build_number/data/i:name/value /builders/i:buildername/builds/n:build_number/data/i:name/value /builds/n:buildid/data/i:name/value """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): buildid = yield self.getBuildid(kwargs) name = kwargs['name'] dbdict = yield self.master.db.build_data.getBuildData(buildid, name) if not dbdict: return None return {'raw': dbdict['value'], 'mime-type': 'application/octet-stream', 'filename': dbdict['name']} class BuildData(base.ResourceType): name = "build_data" plural = "build_data" endpoints = [BuildDatasNoValueEndpoint, BuildDataNoValueEndpoint, BuildDataEndpoint] keyField = "name" class EntityType(types.Entity): buildid = types.Integer() name = types.String() length = types.Integer() value = types.NoneOk(types.Binary()) source = types.String() entityType = EntityType(name, 'BuildData') @base.updateMethod def setBuildData(self, buildid, name, value, source): # forward deferred directly return self.master.db.build_data.setBuildData(buildid, name, value, source) buildbot-3.4.0/master/buildbot/data/builders.py000066400000000000000000000124051413250514000215120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class BuilderEndpoint(base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /builders/n:builderid /builders/i:buildername /masters/n:masterid/builders/n:builderid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): builderid = yield self.getBuilderId(kwargs) if builderid is None: return None bdict = yield self.master.db.builders.getBuilder(builderid) if not bdict: return None if 'masterid' in kwargs: if kwargs['masterid'] not in bdict['masterids']: return None return dict(builderid=builderid, name=bdict['name'], masterids=bdict['masterids'], description=bdict['description'], tags=bdict['tags']) class BuildersEndpoint(base.Endpoint): isCollection = True rootLinkName = 'builders' pathPatterns = """ /builders /masters/n:masterid/builders """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): bdicts = yield self.master.db.builders.getBuilders( masterid=kwargs.get('masterid', None)) return [dict(builderid=bd['id'], name=bd['name'], masterids=bd['masterids'], description=bd['description'], tags=bd['tags']) for bd in bdicts] def get_kwargs_from_graphql(self, parent, resolve_info, args): if parent is not None: return {'masterid': parent['masterid']} return {} class Builder(base.ResourceType): name = "builder" plural = "builders" endpoints = [BuilderEndpoint, BuildersEndpoint] keyField = 'builderid' eventPathPatterns = """ /builders/:builderid """ subresources = ["Build", "Forcescheduler", "Scheduler", "Buildrequest"] class EntityType(types.Entity): builderid = types.Integer() name = types.Identifier(70) masterids = types.List(of=types.Integer()) description = types.NoneOk(types.String()) tags = types.List(of=types.String()) entityType = EntityType(name, 'Builder') @defer.inlineCallbacks def generateEvent(self, _id, event): builder = yield self.master.data.get(('builders', str(_id))) self.produceEvent(builder, event) @base.updateMethod def findBuilderId(self, name): return self.master.db.builders.findBuilderId(name) @base.updateMethod @defer.inlineCallbacks def updateBuilderInfo(self, builderid, description, tags): ret = yield self.master.db.builders.updateBuilderInfo(builderid, description, tags) yield self.generateEvent(builderid, "update") return ret @base.updateMethod @defer.inlineCallbacks def updateBuilderList(self, masterid, builderNames): # get the "current" list of builders for this master, so we know what # changes to make. Race conditions here aren't a great worry, as this # is the only master inserting or deleting these records. builders = yield self.master.db.builders.getBuilders(masterid=masterid) # figure out what to remove and remove it builderNames_set = set(builderNames) for bldr in builders: if bldr['name'] not in builderNames_set: builderid = bldr['id'] yield self.master.db.builders.removeBuilderMaster( masterid=masterid, builderid=builderid) self.master.mq.produce(('builders', str(builderid), 'stopped'), dict(builderid=builderid, masterid=masterid, name=bldr['name'])) else: builderNames_set.remove(bldr['name']) # now whatever's left in builderNames_set is new for name in builderNames_set: builderid = yield self.master.db.builders.findBuilderId(name) yield self.master.db.builders.addBuilderMaster( masterid=masterid, builderid=builderid) self.master.mq.produce(('builders', str(builderid), 'started'), dict(builderid=builderid, masterid=masterid, name=name)) # returns a Deferred that returns None def _masterDeactivated(self, masterid): # called from the masters rtype to indicate that the given master is # deactivated return self.updateBuilderList(masterid, []) buildbot-3.4.0/master/buildbot/data/buildrequests.py000066400000000000000000000272461413250514000226050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types from buildbot.db.buildrequests import AlreadyClaimedError from buildbot.db.buildrequests import NotClaimedError from buildbot.process import results from buildbot.process.results import RETRY class Db2DataMixin: def _generate_filtered_properties(self, props, filters): """ This method returns Build's properties according to property filters. :param props: Properties as a dict (from db) :param filters: Desired properties keys as a list (from API URI) """ # by default no properties are returned if props and filters: return (props if '*' in filters else dict(((k, v) for k, v in props.items() if k in filters))) return None @defer.inlineCallbacks def addPropertiesToBuildRequest(self, buildrequest, filters): if not filters: return None props = yield self.master.db.buildsets.getBuildsetProperties(buildrequest['buildsetid']) filtered_properties = self._generate_filtered_properties(props, filters) if filtered_properties: buildrequest['properties'] = filtered_properties return None def db2data(self, dbdict): data = { 'buildrequestid': dbdict['buildrequestid'], 'buildsetid': dbdict['buildsetid'], 'builderid': dbdict['builderid'], 'priority': dbdict['priority'], 'claimed': dbdict['claimed'], 'claimed_at': dbdict['claimed_at'], 'claimed_by_masterid': dbdict['claimed_by_masterid'], 'complete': dbdict['complete'], 'results': dbdict['results'], 'submitted_at': dbdict['submitted_at'], 'complete_at': dbdict['complete_at'], 'waited_for': dbdict['waited_for'], 'properties': dbdict.get('properties'), } return defer.succeed(data) fieldMapping = { 'buildrequestid': 'buildrequests.id', 'buildsetid': 'buildrequests.buildsetid', 'builderid': 'buildrequests.builderid', 'priority': 'buildrequests.priority', 'complete': 'buildrequests.complete', 'results': 'buildrequests.results', 'submitted_at': 'buildrequests.submitted_at', 'complete_at': 'buildrequests.complete_at', 'waited_for': 'buildrequests.waited_for', # br claim 'claimed_at': 'buildrequest_claims.claimed_at', 'claimed_by_masterid': 'buildrequest_claims.masterid', } class BuildRequestEndpoint(Db2DataMixin, base.Endpoint): isCollection = False pathPatterns = """ /buildrequests/n:buildrequestid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): buildrequest = yield self.master.db.buildrequests.getBuildRequest(kwargs['buildrequestid']) if buildrequest: filters = resultSpec.popProperties() if hasattr(resultSpec, 'popProperties') else [] yield self.addPropertiesToBuildRequest(buildrequest, filters) return (yield self.db2data(buildrequest)) return None @defer.inlineCallbacks def control(self, action, args, kwargs): if action != "cancel": raise ValueError("action: {} is not supported".format(action)) brid = kwargs['buildrequestid'] # first, try to claim the request; if this fails, then it's too late to # cancel the build anyway try: b = yield self.master.db.buildrequests.claimBuildRequests(brids=[brid]) except AlreadyClaimedError: # XXX race condition # - After a buildrequest was claimed, and # - Before creating a build, # the claiming master still # needs to do some processing, (send a message to the message queue, # call maybeStartBuild on the related builder). # In that case we won't have the related builds here. We don't have # an alternative to letting them run without stopping them for now. builds = yield self.master.data.get(("buildrequests", brid, "builds")) # Don't call the data API here, as the buildrequests might have been # taken by another master. We just send the stop message and forget # about those. mqArgs = {'reason': args.get('reason', 'no reason')} for b in builds: self.master.mq.produce(("control", "builds", str(b['buildid']), "stop"), mqArgs) return None # then complete it with 'CANCELLED'; this is the closest we can get to # cancelling a request without running into trouble with dangling # references. yield self.master.data.updates.completeBuildRequests([brid], results.CANCELLED) return None class BuildRequestsEndpoint(Db2DataMixin, base.Endpoint): isCollection = True pathPatterns = """ /buildrequests /builders/n:builderid/buildrequests """ rootLinkName = 'buildrequests' @defer.inlineCallbacks def get(self, resultSpec, kwargs): builderid = kwargs.get("builderid", None) complete = resultSpec.popBooleanFilter('complete') claimed_by_masterid = resultSpec.popBooleanFilter( 'claimed_by_masterid') if claimed_by_masterid: # claimed_by_masterid takes precedence over 'claimed' filter # (no need to check consistency with 'claimed' filter even if # 'claimed'=False with 'claimed_by_masterid' set, doesn't make sense) claimed = claimed_by_masterid else: claimed = resultSpec.popBooleanFilter('claimed') bsid = resultSpec.popOneFilter('buildsetid', 'eq') resultSpec.fieldMapping = self.fieldMapping buildrequests = yield self.master.db.buildrequests.getBuildRequests( builderid=builderid, complete=complete, claimed=claimed, bsid=bsid, resultSpec=resultSpec) results = [] filters = resultSpec.popProperties() if hasattr(resultSpec, 'popProperties') else [] for br in buildrequests: yield self.addPropertiesToBuildRequest(br, filters) results.append((yield self.db2data(br))) return results class BuildRequest(base.ResourceType): name = "buildrequest" plural = "buildrequests" endpoints = [BuildRequestEndpoint, BuildRequestsEndpoint] keyField = 'buildrequestid' eventPathPatterns = """ /buildsets/:buildsetid/builders/:builderid/buildrequests/:buildrequestid /buildrequests/:buildrequestid /builders/:builderid/buildrequests/:buildrequestid """ subresources = ["Build"] class EntityType(types.Entity): buildrequestid = types.Integer() buildsetid = types.Integer() builderid = types.Integer() priority = types.Integer() claimed = types.Boolean() claimed_at = types.NoneOk(types.DateTime()) claimed_by_masterid = types.NoneOk(types.Integer()) complete = types.Boolean() results = types.NoneOk(types.Integer()) submitted_at = types.DateTime() complete_at = types.NoneOk(types.DateTime()) waited_for = types.Boolean() properties = types.NoneOk(types.SourcedProperties()) entityType = EntityType(name, 'Buildrequest') @defer.inlineCallbacks def generateEvent(self, brids, event): for brid in brids: # get the build and munge the result for the notification br = yield self.master.data.get(('buildrequests', str(brid))) self.produceEvent(br, event) @defer.inlineCallbacks def callDbBuildRequests(self, brids, db_callable, event, **kw): if not brids: # empty buildrequest list. No need to call db API return True try: yield db_callable(brids, **kw) except AlreadyClaimedError: # the db layer returned an AlreadyClaimedError exception, usually # because one of the buildrequests has already been claimed by # another master return False yield self.generateEvent(brids, event) return True @base.updateMethod def claimBuildRequests(self, brids, claimed_at=None): return self.callDbBuildRequests(brids, self.master.db.buildrequests.claimBuildRequests, event="claimed", claimed_at=claimed_at) @base.updateMethod @defer.inlineCallbacks def unclaimBuildRequests(self, brids): if brids: yield self.master.db.buildrequests.unclaimBuildRequests(brids) yield self.generateEvent(brids, "unclaimed") @base.updateMethod @defer.inlineCallbacks def completeBuildRequests(self, brids, results, complete_at=None): assert results != RETRY, "a buildrequest cannot be completed with a retry status!" if not brids: # empty buildrequest list. No need to call db API return True try: yield self.master.db.buildrequests.completeBuildRequests( brids, results, complete_at=complete_at) except NotClaimedError: # the db layer returned a NotClaimedError exception, usually # because one of the buildrequests has been claimed by another # master return False yield self.generateEvent(brids, "complete") # check for completed buildsets -- one call for each build request with # a unique bsid seen_bsids = set() for brid in brids: brdict = yield self.master.db.buildrequests.getBuildRequest(brid) if brdict: bsid = brdict['buildsetid'] if bsid in seen_bsids: continue seen_bsids.add(bsid) yield self.master.data.updates.maybeBuildsetComplete(bsid) return True @base.updateMethod @defer.inlineCallbacks def rebuildBuildrequest(self, buildrequest): # goal is to make a copy of the original buildset buildset = yield self.master.data.get(('buildsets', buildrequest['buildsetid'])) properties = yield self.master.data.get(('buildsets', buildrequest['buildsetid'], 'properties')) ssids = [ss['ssid'] for ss in buildset['sourcestamps']] res = yield self.master.data.updates.addBuildset( waited_for=False, scheduler='rebuild', sourcestamps=ssids, reason='rebuild', properties=properties, builderids=[buildrequest['builderid']], external_idstring=buildset['external_idstring'], parent_buildid=buildset['parent_buildid'], parent_relationship=buildset['parent_relationship']) return res buildbot-3.4.0/master/buildbot/data/builds.py000066400000000000000000000223261413250514000211660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types from buildbot.data.resultspec import ResultSpec class Db2DataMixin: def _generate_filtered_properties(self, props, filters): """ This method returns Build's properties according to property filters. .. seealso:: `Official Documentation `_ :param props: The Build's properties as a dict (from db) :param filters: Desired properties keys as a list (from API URI) """ # by default none properties are returned if props and filters: return (props if '*' in filters else dict(((k, v) for k, v in props.items() if k in filters))) return None def db2data(self, dbdict): data = { 'buildid': dbdict['id'], 'number': dbdict['number'], 'builderid': dbdict['builderid'], 'buildrequestid': dbdict['buildrequestid'], 'workerid': dbdict['workerid'], 'masterid': dbdict['masterid'], 'started_at': dbdict['started_at'], 'complete_at': dbdict['complete_at'], 'complete': dbdict['complete_at'] is not None, 'state_string': dbdict['state_string'], 'results': dbdict['results'], 'properties': {} } return defer.succeed(data) fieldMapping = { 'buildid': 'builds.id', 'number': 'builds.number', 'builderid': 'builds.builderid', 'buildrequestid': 'builds.buildrequestid', 'workerid': 'builds.workerid', 'masterid': 'builds.masterid', 'started_at': 'builds.started_at', 'complete_at': 'builds.complete_at', 'state_string': 'builds.state_string', 'results': 'builds.results', } class BuildEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /builds/n:buildid /builders/n:builderid/builds/n:number /builders/i:buildername/builds/n:number """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): if 'buildid' in kwargs: dbdict = yield self.master.db.builds.getBuild(kwargs['buildid']) else: bldr = yield self.getBuilderId(kwargs) if bldr is None: return None num = kwargs['number'] dbdict = yield self.master.db.builds.getBuildByNumber(bldr, num) data = yield self.db2data(dbdict) if dbdict else None # In some cases, data could be None if data: filters = resultSpec.popProperties() if hasattr( resultSpec, 'popProperties') else [] # Avoid to request DB for Build's properties if not specified if filters: try: props = yield self.master.db.builds.getBuildProperties(data['buildid']) except (KeyError, TypeError): props = {} filtered_properties = self._generate_filtered_properties( props, filters) if filtered_properties: data['properties'] = filtered_properties return data @defer.inlineCallbacks def actionStop(self, args, kwargs): buildid = kwargs.get('buildid') if buildid is None: bldr = kwargs['builderid'] num = kwargs['number'] dbdict = yield self.master.db.builds.getBuildByNumber(bldr, num) buildid = dbdict['id'] self.master.mq.produce(("control", "builds", str(buildid), 'stop'), dict(reason=kwargs.get('reason', args.get('reason', 'no reason')))) @defer.inlineCallbacks def actionRebuild(self, args, kwargs): # we use the self.get and not self.data.get to be able to support all # the pathPatterns of this endpoint build = yield self.get(ResultSpec(), kwargs) buildrequest = yield self.master.data.get(('buildrequests', build['buildrequestid'])) res = yield self.master.data.updates.rebuildBuildrequest(buildrequest) return res class BuildsEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = True pathPatterns = """ /builds /builders/n:builderid/builds /builders/i:buildername/builds /buildrequests/n:buildrequestid/builds /changes/n:changeid/builds /workers/n:workerid/builds """ rootLinkName = 'builds' @defer.inlineCallbacks def get(self, resultSpec, kwargs): changeid = kwargs.get('changeid') if changeid is not None: builds = yield self.master.db.builds.getBuildsForChange(changeid) else: # following returns None if no filter # true or false, if there is a complete filter builderid = None if 'builderid' in kwargs or 'buildername' in kwargs: builderid = yield self.getBuilderId(kwargs) if builderid is None: return [] complete = resultSpec.popBooleanFilter("complete") buildrequestid = resultSpec.popIntegerFilter("buildrequestid") resultSpec.fieldMapping = self.fieldMapping builds = yield self.master.db.builds.getBuilds( builderid=builderid, buildrequestid=kwargs.get('buildrequestid', buildrequestid), workerid=kwargs.get('workerid'), complete=complete, resultSpec=resultSpec) # returns properties' list filters = resultSpec.popProperties() buildscol = [] for b in builds: data = yield self.db2data(b) if kwargs.get('graphql'): # let the graphql engine manage the properties del data['properties'] else: # Avoid to request DB for Build's properties if not specified if filters: props = yield self.master.db.builds.getBuildProperties(data["buildid"]) filtered_properties = self._generate_filtered_properties(props, filters) if filtered_properties: data["properties"] = filtered_properties buildscol.append(data) return buildscol class Build(base.ResourceType): name = "build" plural = "builds" endpoints = [BuildEndpoint, BuildsEndpoint] keyField = "buildid" eventPathPatterns = """ /builders/:builderid/builds/:number /builds/:buildid /workers/:workerid/builds/:buildid """ subresources = ["Step", "Property"] class EntityType(types.Entity): buildid = types.Integer() number = types.Integer() builderid = types.Integer() buildrequestid = types.Integer() workerid = types.Integer() masterid = types.Integer() started_at = types.DateTime() complete = types.Boolean() complete_at = types.NoneOk(types.DateTime()) results = types.NoneOk(types.Integer()) state_string = types.String() properties = types.NoneOk(types.SourcedProperties()) entityType = EntityType(name, 'Build') @defer.inlineCallbacks def generateEvent(self, _id, event): # get the build and munge the result for the notification build = yield self.master.data.get(('builds', str(_id))) self.produceEvent(build, event) @base.updateMethod @defer.inlineCallbacks def addBuild(self, builderid, buildrequestid, workerid): res = yield self.master.db.builds.addBuild( builderid=builderid, buildrequestid=buildrequestid, workerid=workerid, masterid=self.master.masterid, state_string='created') return res @base.updateMethod def generateNewBuildEvent(self, buildid): return self.generateEvent(buildid, "new") @base.updateMethod @defer.inlineCallbacks def setBuildStateString(self, buildid, state_string): res = yield self.master.db.builds.setBuildStateString( buildid=buildid, state_string=state_string) yield self.generateEvent(buildid, "update") return res @base.updateMethod @defer.inlineCallbacks def finishBuild(self, buildid, results): res = yield self.master.db.builds.finishBuild( buildid=buildid, results=results) yield self.generateEvent(buildid, "finished") return res buildbot-3.4.0/master/buildbot/data/buildsets.py000066400000000000000000000214161413250514000217010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy from twisted.internet import defer from twisted.python import log from buildbot.data import base from buildbot.data import sourcestamps as sourcestampsapi from buildbot.data import types from buildbot.db.buildsets import AlreadyCompleteError from buildbot.process.buildrequest import BuildRequestCollapser from buildbot.process.results import SUCCESS from buildbot.process.results import worst_status from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class Db2DataMixin: @defer.inlineCallbacks def db2data(self, bsdict): if not bsdict: return None buildset = bsdict.copy() # gather the actual sourcestamps, in parallel sourcestamps = [] @defer.inlineCallbacks def getSs(ssid): ss = yield self.master.data.get(('sourcestamps', str(ssid))) sourcestamps.append(ss) yield defer.DeferredList([getSs(id) for id in buildset['sourcestamps']], fireOnOneErrback=True, consumeErrors=True) buildset['sourcestamps'] = sourcestamps # minor modifications buildset['submitted_at'] = datetime2epoch(buildset['submitted_at']) buildset['complete_at'] = datetime2epoch(buildset['complete_at']) return buildset fieldMapping = { 'bsid': 'buildsets.id', 'external_idstring': 'buildsets.external_idstring', 'reason': 'buildsets.reason', 'submitted_at': 'buildsets.submitted_at', 'complete': 'buildsets.complete', 'complete_at': 'buildsets.complete_at', 'results': 'buildsets.results', 'parent_buildid': 'buildsets.parent_buildid', 'parent_relationship': 'buildsets.parent_relationship' } class BuildsetEndpoint(Db2DataMixin, base.Endpoint): isCollection = False pathPatterns = """ /buildsets/n:bsid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): res = yield self.master.db.buildsets.getBuildset(kwargs['bsid']) res = yield self.db2data(res) return res class BuildsetsEndpoint(Db2DataMixin, base.Endpoint): isCollection = True pathPatterns = """ /buildsets """ rootLinkName = 'buildsets' def get(self, resultSpec, kwargs): complete = resultSpec.popBooleanFilter('complete') resultSpec.fieldMapping = self.fieldMapping d = self.master.db.buildsets.getBuildsets( complete=complete, resultSpec=resultSpec) @d.addCallback def db2data(buildsets): d = defer.DeferredList([self.db2data(bs) for bs in buildsets], fireOnOneErrback=True, consumeErrors=True) @d.addCallback def getResults(res): return [r[1] for r in res] return d return d class Buildset(base.ResourceType): name = "buildset" plural = "buildsets" endpoints = [BuildsetEndpoint, BuildsetsEndpoint] keyField = 'bsid' eventPathPatterns = """ /buildsets/:bsid """ class EntityType(types.Entity): bsid = types.Integer() external_idstring = types.NoneOk(types.String()) reason = types.String() submitted_at = types.Integer() complete = types.Boolean() complete_at = types.NoneOk(types.Integer()) results = types.NoneOk(types.Integer()) sourcestamps = types.List( of=sourcestampsapi.SourceStamp.entityType) parent_buildid = types.NoneOk(types.Integer()) parent_relationship = types.NoneOk(types.String()) entityType = EntityType(name, 'Buildset') subresources = ["Property"] @base.updateMethod @defer.inlineCallbacks def addBuildset(self, waited_for, scheduler=None, sourcestamps=None, reason='', properties=None, builderids=None, external_idstring=None, parent_buildid=None, parent_relationship=None): if sourcestamps is None: sourcestamps = [] if properties is None: properties = {} if builderids is None: builderids = [] submitted_at = int(self.master.reactor.seconds()) bsid, brids = yield self.master.db.buildsets.addBuildset( sourcestamps=sourcestamps, reason=reason, properties=properties, builderids=builderids, waited_for=waited_for, external_idstring=external_idstring, submitted_at=epoch2datetime(submitted_at), parent_buildid=parent_buildid, parent_relationship=parent_relationship) yield BuildRequestCollapser(self.master, list(brids.values())).collapse() # get each of the sourcestamps for this buildset (sequentially) bsdict = yield self.master.db.buildsets.getBuildset(bsid) sourcestamps = [] for ssid in bsdict['sourcestamps']: sourcestamps.append( (yield self.master.data.get(('sourcestamps', str(ssid)))).copy() ) # notify about the component build requests brResource = self.master.data.getResourceType("buildrequest") brResource.generateEvent(list(brids.values()), 'new') # and the buildset itself msg = dict( bsid=bsid, external_idstring=external_idstring, reason=reason, submitted_at=submitted_at, complete=False, complete_at=None, results=None, scheduler=scheduler, sourcestamps=sourcestamps) # TODO: properties=properties) self.produceEvent(msg, "new") log.msg("added buildset %d to database" % bsid) # if there are no builders, then this is done already, so send the # appropriate messages for that if not builderids: yield self.maybeBuildsetComplete(bsid) return (bsid, brids) @base.updateMethod @defer.inlineCallbacks def maybeBuildsetComplete(self, bsid): brdicts = yield self.master.db.buildrequests.getBuildRequests( bsid=bsid, complete=False) # if there are incomplete buildrequests, bail out if brdicts: return brdicts = yield self.master.db.buildrequests.getBuildRequests(bsid=bsid) # figure out the overall results of the buildset: cumulative_results = SUCCESS for brdict in brdicts: cumulative_results = worst_status( cumulative_results, brdict['results']) # get a copy of the buildset bsdict = yield self.master.db.buildsets.getBuildset(bsid) # if it's already completed, we're late to the game, and there's # nothing to do. # # NOTE: there's still a strong possibility of a race condition here, # which would cause buildset being completed twice. # in this case, the db layer will detect that and raise AlreadyCompleteError if bsdict['complete']: return # mark it as completed in the database complete_at = epoch2datetime(int(self.master.reactor.seconds())) try: yield self.master.db.buildsets.completeBuildset(bsid, cumulative_results, complete_at=complete_at) except AlreadyCompleteError: return # get the sourcestamps for the message # get each of the sourcestamps for this buildset (sequentially) bsdict = yield self.master.db.buildsets.getBuildset(bsid) sourcestamps = [] for ssid in bsdict['sourcestamps']: sourcestamps.append( copy.deepcopy( (yield self.master.data.get(('sourcestamps', str(ssid)))) ) ) msg = dict( bsid=bsid, external_idstring=bsdict['external_idstring'], reason=bsdict['reason'], sourcestamps=sourcestamps, submitted_at=bsdict['submitted_at'], complete=True, complete_at=complete_at, results=cumulative_results) # TODO: properties=properties) self.produceEvent(msg, "complete") buildbot-3.4.0/master/buildbot/data/changes.py000066400000000000000000000212001413250514000213020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import json from twisted.internet import defer from twisted.python import log from buildbot.data import base from buildbot.data import sourcestamps from buildbot.data import types from buildbot.process import metrics from buildbot.process.users import users from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class FixerMixin: @defer.inlineCallbacks def _fixChange(self, change, is_graphql): # TODO: make these mods in the DB API if change: change = change.copy() change['when_timestamp'] = datetime2epoch(change['when_timestamp']) if is_graphql: props = change['properties'] change['properties'] = [ {'name': k, 'source': v[1], 'value': json.dumps(v[0])} for k, v in props.items() ] else: sskey = ('sourcestamps', str(change['sourcestampid'])) change['sourcestamp'] = yield self.master.data.get(sskey) del change['sourcestampid'] return change fieldMapping = { 'changeid': 'changes.changeid', } class ChangeEndpoint(FixerMixin, base.Endpoint): isCollection = False pathPatterns = """ /changes/n:changeid """ def get(self, resultSpec, kwargs): d = self.master.db.changes.getChange(kwargs['changeid']) d.addCallback(self._fixChange, is_graphql='graphql' in kwargs) return d class ChangesEndpoint(FixerMixin, base.BuildNestingMixin, base.Endpoint): isCollection = True pathPatterns = """ /changes /builders/n:builderid/builds/n:build_number/changes /builds/n:buildid/changes /sourcestamps/n:ssid/changes """ rootLinkName = 'changes' @defer.inlineCallbacks def get(self, resultSpec, kwargs): buildid = kwargs.get('buildid') if 'build_number' in kwargs: buildid = yield self.getBuildid(kwargs) ssid = kwargs.get('ssid') if buildid is not None: changes = yield self.master.db.changes.getChangesForBuild(buildid) elif ssid is not None: change = yield self.master.db.changes.getChangeFromSSid(ssid) if change is not None: changes = [change] else: changes = [] else: if resultSpec is not None: resultSpec.fieldMapping = self.fieldMapping changes = yield self.master.db.changes.getChanges(resultSpec=resultSpec) results = [] for ch in changes: results.append((yield self._fixChange(ch, is_graphql='graphql' in kwargs))) return results class Change(base.ResourceType): name = "change" plural = "changes" endpoints = [ChangeEndpoint, ChangesEndpoint] eventPathPatterns = """ /changes/:changeid """ keyField = "changeid" subresources = ["Build", "Property"] class EntityType(types.Entity): changeid = types.Integer() parent_changeids = types.List(of=types.Integer()) author = types.String() committer = types.String() files = types.List(of=types.String()) comments = types.String() revision = types.NoneOk(types.String()) when_timestamp = types.Integer() branch = types.NoneOk(types.String()) category = types.NoneOk(types.String()) revlink = types.NoneOk(types.String()) properties = types.SourcedProperties() repository = types.String() project = types.String() codebase = types.String() sourcestamp = sourcestamps.SourceStamp.entityType entityType = EntityType(name, 'Change') @base.updateMethod @defer.inlineCallbacks def addChange(self, files=None, comments=None, author=None, committer=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase=None, project='', src=None): metrics.MetricCountEvent.log("added_changes", 1) if properties is None: properties = {} # add the source to the properties for k in properties: properties[k] = (properties[k], 'Change') # get a user id if src: # create user object, returning a corresponding uid uid = yield users.createUserObject(self.master, author, src) else: uid = None if not revlink and revision and repository and callable(self.master.config.revlink): # generate revlink from revision and repository using the configured callable revlink = self.master.config.revlink(revision, repository) or '' if callable(category): pre_change = self.master.config.preChangeGenerator(author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=when_timestamp, branch=branch, revlink=revlink, properties=properties, repository=repository, project=project) category = category(pre_change) # set the codebase, either the default, supplied, or generated if codebase is None \ and self.master.config.codebaseGenerator is not None: pre_change = self.master.config.preChangeGenerator(author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=when_timestamp, branch=branch, category=category, revlink=revlink, properties=properties, repository=repository, project=project) codebase = self.master.config.codebaseGenerator(pre_change) codebase = str(codebase) else: codebase = codebase or '' # add the Change to the database changeid = yield self.master.db.changes.addChange( author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=epoch2datetime(when_timestamp), branch=branch, category=category, revlink=revlink, properties=properties, repository=repository, codebase=codebase, project=project, uid=uid) # get the change and munge the result for the notification change = yield self.master.data.get(('changes', str(changeid))) change = copy.deepcopy(change) self.produceEvent(change, 'new') # log, being careful to handle funny characters msg = "added change with revision {} to database".format(revision) log.msg(msg.encode('utf-8', 'replace')) return changeid buildbot-3.4.0/master/buildbot/data/changesources.py000066400000000000000000000100531413250514000225270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import masters from buildbot.data import types from buildbot.db.changesources import ChangeSourceAlreadyClaimedError class Db2DataMixin: @defer.inlineCallbacks def db2data(self, dbdict): master = None if dbdict['masterid'] is not None: master = yield self.master.data.get( ('masters', dbdict['masterid'])) data = { 'changesourceid': dbdict['id'], 'name': dbdict['name'], 'master': master, } return data class ChangeSourceEndpoint(Db2DataMixin, base.Endpoint): pathPatterns = """ /changesources/n:changesourceid /masters/n:masterid/changesources/n:changesourceid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): dbdict = yield self.master.db.changesources.getChangeSource( kwargs['changesourceid']) if 'masterid' in kwargs: if dbdict['masterid'] != kwargs['masterid']: return None return (yield self.db2data(dbdict)) if dbdict else None class ChangeSourcesEndpoint(Db2DataMixin, base.Endpoint): isCollection = True pathPatterns = """ /changesources /masters/n:masterid/changesources """ rootLinkName = 'changesources' @defer.inlineCallbacks def get(self, resultSpec, kwargs): changesources = yield self.master.db.changesources.getChangeSources( masterid=kwargs.get('masterid')) csdicts = yield defer.DeferredList( [self.db2data(cs) for cs in changesources], consumeErrors=True, fireOnOneErrback=True) return [r for (s, r) in csdicts] class ChangeSource(base.ResourceType): name = "changesource" plural = "changesources" endpoints = [ChangeSourceEndpoint, ChangeSourcesEndpoint] keyField = 'changesourceid' class EntityType(types.Entity): changesourceid = types.Integer() name = types.String() master = types.NoneOk(masters.Master.entityType) entityType = EntityType(name, 'Changesource') @base.updateMethod def findChangeSourceId(self, name): return self.master.db.changesources.findChangeSourceId(name) @base.updateMethod def trySetChangeSourceMaster(self, changesourceid, masterid): # the db layer throws an exception if the claim fails; we translate # that to a straight true-false value. We could trap the exception # type, but that seems a bit too restrictive d = self.master.db.changesources.setChangeSourceMaster( changesourceid, masterid) # set is successful: deferred result is True d.addCallback(lambda _: True) @d.addErrback def trapAlreadyClaimedError(why): # the db layer throws an exception if the claim fails; we squash # that error but let other exceptions continue upward why.trap(ChangeSourceAlreadyClaimedError) # set failed: deferred result is False return False return d @defer.inlineCallbacks def _masterDeactivated(self, masterid): changesources = yield self.master.db.changesources.getChangeSources( masterid=masterid) for cs in changesources: yield self.master.db.changesources.setChangeSourceMaster(cs['id'], None) buildbot-3.4.0/master/buildbot/data/connector.py000066400000000000000000000250201413250514000216700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import functools import inspect from twisted.internet import defer from twisted.python import reflect from buildbot.data import base from buildbot.data import exceptions from buildbot.data import resultspec from buildbot.util import bytes2unicode from buildbot.util import pathmatch from buildbot.util import service class Updates: # empty container object; see _scanModule, below pass class RTypes: # empty container object; see _scanModule, below pass class DataConnector(service.AsyncService): submodules = [ 'buildbot.data.build_data', 'buildbot.data.builders', 'buildbot.data.builds', 'buildbot.data.buildrequests', 'buildbot.data.workers', 'buildbot.data.steps', 'buildbot.data.logs', 'buildbot.data.logchunks', 'buildbot.data.buildsets', 'buildbot.data.changes', 'buildbot.data.changesources', 'buildbot.data.masters', 'buildbot.data.sourcestamps', 'buildbot.data.schedulers', 'buildbot.data.forceschedulers', 'buildbot.data.root', 'buildbot.data.properties', 'buildbot.data.test_results', 'buildbot.data.test_result_sets', ] name = "data" def __init__(self): self.matcher = pathmatch.Matcher() self.rootLinks = [] # links from the root of the API @defer.inlineCallbacks def setServiceParent(self, parent): yield super().setServiceParent(parent) self._setup() def _scanModule(self, mod, _noSetattr=False): for sym in dir(mod): obj = getattr(mod, sym) if inspect.isclass(obj) and issubclass(obj, base.ResourceType): rtype = obj(self.master) setattr(self.rtypes, rtype.name, rtype) setattr(self.plural_rtypes, rtype.plural, rtype) self.graphql_rtypes[rtype.entityType.toGraphQLTypeName()] = rtype # put its update methods into our 'updates' attribute for name in dir(rtype): o = getattr(rtype, name) if hasattr(o, 'isUpdateMethod'): setattr(self.updates, name, o) # load its endpoints for ep in rtype.getEndpoints(): # don't use inherited values for these parameters clsdict = ep.__class__.__dict__ pathPatterns = clsdict.get('pathPatterns', '') pathPatterns = pathPatterns.split() pathPatterns = [tuple(pp.split('/')[1:]) for pp in pathPatterns] for pp in pathPatterns: # special-case the root if pp == ('',): pp = () self.matcher[pp] = ep rootLinkName = clsdict.get('rootLinkName') if rootLinkName: self.rootLinks.append({'name': rootLinkName}) def _setup(self): self.updates = Updates() self.graphql_rtypes = {} self.rtypes = RTypes() self.plural_rtypes = RTypes() for moduleName in self.submodules: module = reflect.namedModule(moduleName) self._scanModule(module) def getEndpoint(self, path): try: return self.matcher[path] except KeyError as e: raise exceptions.InvalidPathError( "Invalid path: " + "/".join([str(p) for p in path])) from e def getResourceType(self, name): return getattr(self.rtypes, name, None) def getEndPointForResourceName(self, name): rtype = getattr(self.rtypes, name, None) rtype_plural = getattr(self.plural_rtypes, name, None) if rtype is not None: return rtype.getDefaultEndpoint() elif rtype_plural is not None: return rtype_plural.getCollectionEndpoint() return None def getResourceTypeForGraphQlType(self, type): if type not in self.graphql_rtypes: raise RuntimeError(f"Can't get rtype for {type}: {self.graphql_rtypes.keys()}") return self.graphql_rtypes.get(type) def get(self, path, filters=None, fields=None, order=None, limit=None, offset=None): resultSpec = resultspec.ResultSpec(filters=filters, fields=fields, order=order, limit=limit, offset=offset) return self.get_with_resultspec(path, resultSpec) @defer.inlineCallbacks def get_with_resultspec(self, path, resultSpec): endpoint, kwargs = self.getEndpoint(path) rv = yield endpoint.get(resultSpec, kwargs) if resultSpec: rv = resultSpec.apply(rv) return rv def control(self, action, args, path): endpoint, kwargs = self.getEndpoint(path) return endpoint.control(action, args, kwargs) def produceEvent(self, rtype, msg, event): # warning, this is temporary api, until all code is migrated to data # api rsrc = self.getResourceType(rtype) return rsrc.produceEvent(msg, event) @functools.lru_cache(1) def allEndpoints(self): """return the full spec of the connector as a list of dicts """ paths = [] for k, v in sorted(self.matcher.iterPatterns()): paths.append(dict(path="/".join(k), plural=str(v.rtype.plural), type=str(v.rtype.entityType.name), type_spec=v.rtype.entityType.getSpec())) return paths def resultspec_from_jsonapi(self, req_args, entityType, is_collection): def checkFields(fields, negOk=False): for field in fields: k = bytes2unicode(field) if k[0] == '-' and negOk: k = k[1:] if k not in entityType.fieldNames: raise exceptions.InvalidQueryParameter("no such field '{}'".format(k)) limit = offset = order = fields = None filters, properties = [], [] limit = offset = order = fields = None filters, properties = [], [] for arg in req_args: argStr = bytes2unicode(arg) if argStr == 'order': order = tuple([bytes2unicode(o) for o in req_args[arg]]) checkFields(order, True) elif argStr == 'field': fields = req_args[arg] checkFields(fields, False) elif argStr == 'limit': try: limit = int(req_args[arg][0]) except Exception as e: raise exceptions.InvalidQueryParameter('invalid limit') from e elif argStr == 'offset': try: offset = int(req_args[arg][0]) except Exception as e: raise exceptions.InvalidQueryParameter('invalid offset') from e elif argStr == 'property': try: props = [] for v in req_args[arg]: if not isinstance(v, (bytes, str)): raise TypeError( "Invalid type {} for {}".format(type(v), v)) props.append(bytes2unicode(v)) except Exception as e: raise exceptions.InvalidQueryParameter( 'invalid property value for {}'.format(arg)) from e properties.append(resultspec.Property(arg, 'eq', props)) elif argStr in entityType.fieldNames: field = entityType.fields[argStr] try: values = [field.valueFromString(v) for v in req_args[arg]] except Exception as e: raise exceptions.InvalidQueryParameter( 'invalid filter value for {}'.format(argStr)) from e filters.append(resultspec.Filter(argStr, 'eq', values)) elif '__' in argStr: field, op = argStr.rsplit('__', 1) args = req_args[arg] operators = (resultspec.Filter.singular_operators if len(args) == 1 else resultspec.Filter.plural_operators) if op in operators and field in entityType.fieldNames: fieldType = entityType.fields[field] try: values = [fieldType.valueFromString(v) for v in req_args[arg]] except Exception as e: raise exceptions.InvalidQueryParameter( 'invalid filter value for {}'.format(argStr)) from e filters.append(resultspec.Filter(field, op, values)) else: raise exceptions.InvalidQueryParameter( "unrecognized query parameter '{}'".format(argStr)) # if ordering or filtering is on a field that's not in fields, bail out if fields: fields = [bytes2unicode(f) for f in fields] fieldsSet = set(fields) if order and {o.lstrip('-') for o in order} - fieldsSet: raise exceptions.InvalidQueryParameter("cannot order on un-selected fields") for filter in filters: if filter.field not in fieldsSet: raise exceptions.InvalidQueryParameter("cannot filter on un-selected fields") # build the result spec rspec = resultspec.ResultSpec(fields=fields, limit=limit, offset=offset, order=order, filters=filters, properties=properties) # for singular endpoints, only allow fields if not is_collection: if rspec.filters: raise exceptions.InvalidQueryParameter("this is not a collection") return rspec buildbot-3.4.0/master/buildbot/data/exceptions.py000066400000000000000000000022651413250514000220650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # copy some exceptions from the DB layer from buildbot.db.schedulers import SchedulerAlreadyClaimedError __all__ = [ 'SchedulerAlreadyClaimedError', 'InvalidPathError', 'InvalidControlException', ] class DataException(Exception): pass class InvalidPathError(DataException): "A path argument was invalid or unknown" class InvalidControlException(DataException): "Action is not supported" class InvalidQueryParameter(DataException): "Query Parameter was invalid" buildbot-3.4.0/master/buildbot/data/forceschedulers.py000066400000000000000000000074171413250514000230700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types from buildbot.schedulers import forcesched from buildbot.www.rest import JSONRPC_CODES from buildbot.www.rest import BadJsonRpc2 def forceScheduler2Data(sched): ret = dict(all_fields=[], name=str(sched.name), button_name=str(sched.buttonName), label=str(sched.label), builder_names=[str(name) for name in sched.builderNames], enabled=sched.enabled) ret["all_fields"] = [field.getSpec() for field in sched.all_fields] return ret class ForceSchedulerEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /forceschedulers/i:schedulername """ def findForceScheduler(self, schedulername): # eventually this may be db backed. This is why the API is async for sched in self.master.allSchedulers(): if sched.name == schedulername and isinstance(sched, forcesched.ForceScheduler): return defer.succeed(sched) return None @defer.inlineCallbacks def get(self, resultSpec, kwargs): sched = yield self.findForceScheduler(kwargs['schedulername']) if sched is not None: return forceScheduler2Data(sched) return None @defer.inlineCallbacks def control(self, action, args, kwargs): if action == "force": sched = yield self.findForceScheduler(kwargs['schedulername']) if "owner" not in args: args['owner'] = "user" try: res = yield sched.force(**args) return res except forcesched.CollectedValidationError as e: raise BadJsonRpc2(e.errors, JSONRPC_CODES["invalid_params"]) from e return None class ForceSchedulersEndpoint(base.Endpoint): isCollection = True pathPatterns = """ /forceschedulers /builders/:builderid/forceschedulers """ rootLinkName = 'forceschedulers' @defer.inlineCallbacks def get(self, resultSpec, kwargs): ret = [] builderid = kwargs.get('builderid', None) if builderid is not None: bdict = yield self.master.db.builders.getBuilder(builderid) for sched in self.master.allSchedulers(): if isinstance(sched, forcesched.ForceScheduler): if builderid is not None and bdict['name'] not in sched.builderNames: continue ret.append(forceScheduler2Data(sched)) return ret class ForceScheduler(base.ResourceType): name = "forcescheduler" plural = "forceschedulers" endpoints = [ForceSchedulerEndpoint, ForceSchedulersEndpoint] keyField = "name" class EntityType(types.Entity): name = types.Identifier(50) button_name = types.String() label = types.String() builder_names = types.List(of=types.Identifier(50)) enabled = types.Boolean() all_fields = types.List(of=types.JsonObject()) entityType = EntityType(name, 'Forcescheduler') buildbot-3.4.0/master/buildbot/data/graphql.py000066400000000000000000000264111413250514000213410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import asyncio import functools import sys import textwrap from buildbot.asyncio import AsyncIOLoopWithTwisted from buildbot.asyncio import as_deferred from buildbot.asyncio import as_future from buildbot.data import resultspec from buildbot.data.types import Entity from buildbot.util import service try: import graphql from graphql.execution.execute import default_field_resolver except ImportError: # pragma: no cover graphql = None def _enforce_list(v): if isinstance(v, list): return v return [v] class GraphQLConnector(service.AsyncService): """Mixin class to separate the GraphQL traits for the data connector This class needs to use some async methods in the asyncio domain (instead of twisted) await in those domains are not compatible, and must be prefixed with as_deferred / as_future any method doing so must be prefixed with "_aio_" to indicate that their return value should be transformed with as_deferred, and they should themselves transform normal data api results with as_future() """ data = None asyncio_loop = None # asyncio will create an event loop if none exists yet in get_event_loop(). We need to set it # back via set_event_loop() if we want it to be properly closed. _saved_event_loop = None def reconfigServiceWithBuildbotConfig(self, new_config): if self.data is None: self.data = self.master.data config = new_config.www.get('graphql') self.enabled = False if config is None: return if graphql is None: raise ImportError("graphql is enabled but 'graphql-core' is not installed") self.enabled = True self.config = config loop = None try: if self._saved_event_loop is None: # Ideally we would like to use asyncio.get_event_loop() here. However, its API # makes it hard to use: the behavior depends on the current asyncio policy and # the default policy will create a new loop for the main thread if a loop was not # set before. Unfortunately we can't know whether a new loop was created, # and as a result we can't cleanup it in stopService(). Specifically, we can't # call the close() function, which results in occasional ResourceWarnings because # there is no one who would close the created loop. # # Using asyncio.get_running_loop() would potentially break if non-default asyncio # policy was used. The default policy is fine because separate threads have # separate event loops. Fortunately Buildbot does not change the default asyncio # policy, so this concern does not matter in practice. # # Note that asyncio.get_event_loop() is deprecated in favor of get_running_loop() if sys.version_info[:2] >= (3, 7): loop = asyncio.get_running_loop() else: loop = asyncio.get_event_loop() except RuntimeError: # get_running_loop throws if there's no current loop. # get_event_loop throws is there's no current loop and we're not on main thread. pass if self._saved_event_loop is None and not isinstance(loop, AsyncIOLoopWithTwisted): self._saved_event_loop = loop self.asyncio_loop = AsyncIOLoopWithTwisted(self.master.reactor) asyncio.set_event_loop(self.asyncio_loop) self.asyncio_loop.start() self.debug = self.config.get("debug") self.schema = graphql.build_schema(self.get_schema()) def stopService(self): if self.asyncio_loop: self.asyncio_loop.stop() self.asyncio_loop.close() # We want to restore the original event loop value even if was None because otherwise # we would be leaving our closed AsyncIOLoopWithTwisted instance as the event loop asyncio.set_event_loop(self._saved_event_loop) self.asyncio_loop = None return super().stopService() @functools.lru_cache(1) def get_schema(self): """Return the graphQL Schema of the buildbot data model""" types = {} schema = textwrap.dedent( """ # custom scalar types for buildbot data model scalar Date # stored as utc unix timestamp scalar Binary # arbitrary data stored as base85 scalar JSON # arbitrary json stored as string, mainly used for properties values """ ) # type dependencies must be added recursively def add_dependent_types(ent): typename = ent.toGraphQLTypeName() if typename in types: return if isinstance(ent, Entity): types[typename] = ent for dtyp in ent.graphQLDependentTypes(): add_dependent_types(dtyp) rtype = self.data.getResourceType(ent.name) if rtype is not None: for subresource in rtype.subresources: rtype = self.data.getResourceTypeForGraphQlType(subresource) add_dependent_types(rtype.entityType) # root query contain the list of item available directly # mapped against the rootLinks queries_schema = "" def format_query_fields(query_fields): query_fields = ",\n ".join(query_fields) if query_fields: query_fields = f"({query_fields})" return query_fields def format_subresource(rtype): queries_schema = "" typ = rtype.entityType typename = typ.toGraphQLTypeName() add_dependent_types(typ) query_fields = [] # build the queriable parameters, via query_fields for field, field_type in sorted(rtype.entityType.fields.items()): # in graphql, we handle properties as queriable sub resources # instead of hardcoded attributes like in rest api if field == 'properties': continue field_type_graphql = field_type.getGraphQLInputType() if field_type_graphql is None: continue query_fields.append(f"{field}: {field_type_graphql}") for op in sorted(operators): if op in ["in", "notin"]: if field_type_graphql in ["String", "Int"]: query_fields.append( f"{field}__{op}: [{field_type_graphql}]") else: query_fields.append(f"{field}__{op}: {field_type_graphql}") query_fields.extend(["order: String", "limit: Int", "offset: Int"]) ep = self.data.getEndPointForResourceName(rtype.plural) if ep is None or not ep.isPseudoCollection: plural_typespec = f"[{typename}]" else: plural_typespec = typename queries_schema += ( f" {rtype.plural}{format_query_fields(query_fields)}: {plural_typespec}!\n" ) # build the queriable parameter, via keyField keyfields = [] field = rtype.keyField if field not in rtype.entityType.fields: raise RuntimeError(f"bad keyField {field} not in entityType {rtype.entityType}") field_type = rtype.entityType.fields[field] field_type_graphql = field_type.toGraphQLTypeName() keyfields.append(f"{field}: {field_type_graphql}") queries_schema += ( f" {rtype.name}{format_query_fields(keyfields)}: {typename}\n" ) return queries_schema operators = set(resultspec.Filter.singular_operators) operators.update(resultspec.Filter.plural_operators) for rootlink in sorted(v["name"] for v in self.data.rootLinks): ep = self.data.matcher[(rootlink,)][0] queries_schema += format_subresource(ep.rtype) schema += "type Query {\n" + queries_schema + "}\n" schema += "type Subscription {\n" + queries_schema + "}\n" for name, typ in types.items(): type_spec = typ.toGraphQL() schema += f"type {name} {{\n" for field in type_spec.get("fields", []): field_type = field["type"] if not isinstance(field_type, str): field_type = field_type["type"] schema += f" {field['name']}: {field_type}\n" rtype = self.data.getResourceType(typ.name) if rtype is not None: for subresource in rtype.subresources: rtype = self.data.getResourceTypeForGraphQlType(subresource) schema += format_subresource(rtype) schema += "}\n" return schema async def _aio_ep_get(self, ep, kwargs, resultSpec): rv = await as_future(ep.get(resultSpec, kwargs)) if resultSpec: rv = resultSpec.apply(rv) return rv async def _aio_query(self, query): query = graphql.parse(query) errors = graphql.validate(self.schema, query) if errors: r = graphql.execution.ExecutionResult() r.errors = errors return r async def field_resolver(parent, resolve_info, **args): field = resolve_info.field_name if parent is not None and field in parent: res = default_field_resolver(parent, resolve_info, **args) if isinstance(res, list) and args: ep = self.data.getEndPointForResourceName(field) args = {k: _enforce_list(v) for k, v in args.items()} rspec = self.data.resultspec_from_jsonapi(args, ep.rtype.entityType, True) res = rspec.apply(res) return res ep = self.data.getEndPointForResourceName(field) rspec = None kwargs = ep.get_kwargs_from_graphql(parent, resolve_info, args) if ep.isCollection or ep.isPseudoCollection: args = {k: _enforce_list(v) for k, v in args.items()} rspec = self.data.resultspec_from_jsonapi(args, ep.rtype.entityType, True) return await self._aio_ep_get(ep, kwargs, rspec) # Execute res = await graphql.execute( self.schema, query, field_resolver=field_resolver, ) return res def query(self, query): return as_deferred(self._aio_query(query)) buildbot-3.4.0/master/buildbot/data/logchunks.py000066400000000000000000000120751413250514000217010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class LogChunkEndpointBase(base.BuildNestingMixin, base.Endpoint): @defer.inlineCallbacks def getLogIdAndDbDictFromKwargs(self, kwargs): # calculate the logid if 'logid' in kwargs: logid = kwargs['logid'] dbdict = None else: stepid = yield self.getStepid(kwargs) if stepid is None: return (None, None) dbdict = yield self.master.db.logs.getLogBySlug(stepid, kwargs.get('log_slug')) if not dbdict: return (None, None) logid = dbdict['id'] return (logid, dbdict) class LogChunkEndpoint(LogChunkEndpointBase): # Note that this is a singular endpoint, even though it overrides the # offset/limit query params in ResultSpec isCollection = False isPseudoCollection = True pathPatterns = """ /logchunks /logs/n:logid/contents /steps/n:stepid/logs/i:log_slug/contents /builds/n:buildid/steps/i:step_name/logs/i:log_slug/contents /builds/n:buildid/steps/n:step_number/logs/i:log_slug/contents /builders/n:builderid/builds/n:build_number/steps/i:step_name/logs/i:log_slug/contents /builders/n:builderid/builds/n:build_number/steps/n:step_number/logs/i:log_slug/contents """ rootLinkName = "logchunks" @defer.inlineCallbacks def get(self, resultSpec, kwargs): logid, dbdict = yield self.getLogIdAndDbDictFromKwargs(kwargs) if logid is None: return None firstline = int(resultSpec.offset or 0) lastline = None if resultSpec.limit is None else firstline + \ int(resultSpec.limit) - 1 resultSpec.removePagination() # get the number of lines, if necessary if lastline is None: if not dbdict: dbdict = yield self.master.db.logs.getLog(logid) if not dbdict: return None lastline = int(max(0, dbdict['num_lines'] - 1)) # bounds checks if firstline < 0 or lastline < 0 or firstline > lastline: return None logLines = yield self.master.db.logs.getLogLines( logid, firstline, lastline) return {'logid': logid, 'firstline': firstline, 'content': logLines} def get_kwargs_from_graphql(self, parent, resolve_info, args): if parent is not None: return self.get_kwargs_from_graphql_parent( parent, resolve_info.parent_type.name ) return {"logid": args["logid"]} class RawLogChunkEndpoint(LogChunkEndpointBase): # Note that this is a singular endpoint, even though it overrides the # offset/limit query params in ResultSpec isCollection = False isRaw = True pathPatterns = """ /logs/n:logid/raw /steps/n:stepid/logs/i:log_slug/raw /builds/n:buildid/steps/i:step_name/logs/i:log_slug/raw /builds/n:buildid/steps/n:step_number/logs/i:log_slug/raw /builders/n:builderid/builds/n:build_number/steps/i:step_name/logs/i:log_slug/raw /builders/n:builderid/builds/n:build_number/steps/n:step_number/logs/i:log_slug/raw """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): logid, dbdict = yield self.getLogIdAndDbDictFromKwargs(kwargs) if logid is None: return None if not dbdict: dbdict = yield self.master.db.logs.getLog(logid) if not dbdict: return None lastline = max(0, dbdict['num_lines'] - 1) logLines = yield self.master.db.logs.getLogLines( logid, 0, lastline) if dbdict['type'] == 's': logLines = "\n".join([line[1:] for line in logLines.splitlines()]) return {'raw': logLines, 'mime-type': 'text/html' if dbdict['type'] == 'h' else 'text/plain', 'filename': dbdict['slug']} class LogChunk(base.ResourceType): name = "logchunk" plural = "logchunks" endpoints = [LogChunkEndpoint, RawLogChunkEndpoint] keyField = "logid" class EntityType(types.Entity): logid = types.Integer() firstline = types.Integer() content = types.String() entityType = EntityType(name, 'LogChunk') buildbot-3.4.0/master/buildbot/data/logs.py000066400000000000000000000121711413250514000206450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types from buildbot.util import identifiers class EndpointMixin: def db2data(self, dbdict): data = { 'logid': dbdict['id'], 'name': dbdict['name'], 'slug': dbdict['slug'], 'stepid': dbdict['stepid'], 'complete': dbdict['complete'], 'num_lines': dbdict['num_lines'], 'type': dbdict['type'], } return defer.succeed(data) class LogEndpoint(EndpointMixin, base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /logs/n:logid /steps/n:stepid/logs/i:log_slug /builds/n:buildid/steps/i:step_name/logs/i:log_slug /builds/n:buildid/steps/n:step_number/logs/i:log_slug /builders/n:builderid/builds/n:build_number/steps/i:step_name/logs/i:log_slug /builders/n:builderid/builds/n:build_number/steps/n:step_number/logs/i:log_slug /builders/i:buildername/builds/n:build_number/steps/i:step_name/logs/i:log_slug /builders/i:buildername/builds/n:build_number/steps/n:step_number/logs/i:log_slug """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): if 'logid' in kwargs: dbdict = yield self.master.db.logs.getLog(kwargs['logid']) return (yield self.db2data(dbdict)) if dbdict else None stepid = yield self.getStepid(kwargs) if stepid is None: return None dbdict = yield self.master.db.logs.getLogBySlug(stepid, kwargs.get('log_slug')) return (yield self.db2data(dbdict)) if dbdict else None class LogsEndpoint(EndpointMixin, base.BuildNestingMixin, base.Endpoint): isCollection = True pathPatterns = """ /steps/n:stepid/logs /builds/n:buildid/steps/i:step_name/logs /builds/n:buildid/steps/n:step_number/logs /builders/n:builderid/builds/n:build_number/steps/i:step_name/logs /builders/n:builderid/builds/n:build_number/steps/n:step_number/logs /builders/i:buildername/builds/n:build_number/steps/i:step_name/logs /builders/i:buildername/builds/n:build_number/steps/n:step_number/logs """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): stepid = yield self.getStepid(kwargs) if not stepid: return [] logs = yield self.master.db.logs.getLogs(stepid=stepid) results = [] for dbdict in logs: results.append((yield self.db2data(dbdict))) return results class Log(base.ResourceType): name = "log" plural = "logs" endpoints = [LogEndpoint, LogsEndpoint] keyField = "logid" eventPathPatterns = """ /logs/:logid /steps/:stepid/logs/:slug """ subresources = ["LogChunk"] class EntityType(types.Entity): logid = types.Integer() name = types.String() slug = types.Identifier(50) stepid = types.Integer() complete = types.Boolean() num_lines = types.Integer() type = types.Identifier(1) entityType = EntityType(name, 'Log') @defer.inlineCallbacks def generateEvent(self, _id, event): # get the build and munge the result for the notification build = yield self.master.data.get(('logs', str(_id))) self.produceEvent(build, event) @base.updateMethod @defer.inlineCallbacks def addLog(self, stepid, name, type): slug = identifiers.forceIdentifier(50, name) while True: try: logid = yield self.master.db.logs.addLog( stepid=stepid, name=name, slug=slug, type=type) except KeyError: slug = identifiers.incrementIdentifier(50, slug) continue self.generateEvent(logid, "new") return logid @base.updateMethod @defer.inlineCallbacks def appendLog(self, logid, content): res = yield self.master.db.logs.appendLog(logid=logid, content=content) self.generateEvent(logid, "append") return res @base.updateMethod @defer.inlineCallbacks def finishLog(self, logid): res = yield self.master.db.logs.finishLog(logid=logid) self.generateEvent(logid, "finished") return res @base.updateMethod def compressLog(self, logid): return self.master.db.logs.compressLog(logid=logid) buildbot-3.4.0/master/buildbot/data/masters.py000066400000000000000000000157371413250514000213720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.data import base from buildbot.data import resultspec from buildbot.data import types from buildbot.process.results import RETRY from buildbot.util import epoch2datetime # time, in minutes, after which a master that hasn't checked in will be # marked as inactive EXPIRE_MINUTES = 10 def _db2data(master): return dict(masterid=master['id'], name=master['name'], active=master['active'], last_active=master['last_active']) class MasterEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /masters/n:masterid /builders/n:builderid/masters/n:masterid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): # if a builder is given, only return the master if it's associated with # this builder if 'builderid' in kwargs: builder = yield self.master.db.builders.getBuilder( builderid=kwargs['builderid']) if not builder or kwargs['masterid'] not in builder['masterids']: return None m = yield self.master.db.masters.getMaster(kwargs['masterid']) return _db2data(m) if m else None class MastersEndpoint(base.Endpoint): isCollection = True pathPatterns = """ /masters /builders/n:builderid/masters """ rootLinkName = 'masters' @defer.inlineCallbacks def get(self, resultSpec, kwargs): masterlist = yield self.master.db.masters.getMasters() if 'builderid' in kwargs: builder = yield self.master.db.builders.getBuilder( builderid=kwargs['builderid']) if builder: masterids = set(builder['masterids']) masterlist = [m for m in masterlist if m['id'] in masterids] else: masterlist = [] return [_db2data(m) for m in masterlist] class Master(base.ResourceType): name = "master" plural = "masters" endpoints = [MasterEndpoint, MastersEndpoint] eventPathPatterns = """ /masters/:masterid """ keyField = "masterid" subresources = ["Builder"] class EntityType(types.Entity): masterid = types.Integer() name = types.String() active = types.Boolean() last_active = types.DateTime() entityType = EntityType(name, 'Master') @base.updateMethod @defer.inlineCallbacks def masterActive(self, name, masterid): activated = yield self.master.db.masters.setMasterState( masterid=masterid, active=True) if activated: self.produceEvent( dict(masterid=masterid, name=name, active=True), 'started') @base.updateMethod @defer.inlineCallbacks def expireMasters(self, forceHouseKeeping=False): too_old = epoch2datetime(self.master.reactor.seconds() - 60 * EXPIRE_MINUTES) masters = yield self.master.db.masters.getMasters() for m in masters: if m['last_active'] is not None and m['last_active'] >= too_old: continue # mark the master inactive, and send a message on its behalf deactivated = yield self.master.db.masters.setMasterState( masterid=m['id'], active=False) if deactivated: yield self._masterDeactivated(m['id'], m['name']) elif forceHouseKeeping: yield self._masterDeactivatedHousekeeping(m['id'], m['name']) @base.updateMethod @defer.inlineCallbacks def masterStopped(self, name, masterid): deactivated = yield self.master.db.masters.setMasterState( masterid=masterid, active=False) if deactivated: yield self._masterDeactivated(masterid, name) @defer.inlineCallbacks def _masterDeactivatedHousekeeping(self, masterid, name): log.msg("doing housekeeping for master {} {}".format(masterid, name)) # common code for deactivating a master yield self.master.data.rtypes.worker._masterDeactivated( masterid=masterid) yield self.master.data.rtypes.builder._masterDeactivated( masterid=masterid) yield self.master.data.rtypes.scheduler._masterDeactivated( masterid=masterid) yield self.master.data.rtypes.changesource._masterDeactivated( masterid=masterid) # for each build running on that instance.. builds = yield self.master.data.get(('builds',), filters=[resultspec.Filter('masterid', 'eq', [masterid]), resultspec.Filter('complete', 'eq', [False])]) for build in builds: # stop any running steps.. steps = yield self.master.data.get( ('builds', build['buildid'], 'steps'), filters=[resultspec.Filter('results', 'eq', [None])]) for step in steps: # finish remaining logs for those steps.. logs = yield self.master.data.get( ('steps', step['stepid'], 'logs'), filters=[resultspec.Filter('complete', 'eq', [False])]) for _log in logs: yield self.master.data.updates.finishLog( logid=_log['logid']) yield self.master.data.updates.finishStep( stepid=step['stepid'], results=RETRY, hidden=False) # then stop the build itself yield self.master.data.updates.finishBuild( buildid=build['buildid'], results=RETRY) # unclaim all of the build requests owned by the deactivated instance buildrequests = yield self.master.db.buildrequests.getBuildRequests( complete=False, claimed=masterid) yield self.master.db.buildrequests.unclaimBuildRequests( brids=[br['buildrequestid'] for br in buildrequests]) @defer.inlineCallbacks def _masterDeactivated(self, masterid, name): yield self._masterDeactivatedHousekeeping(masterid, name) self.produceEvent( dict(masterid=masterid, name=name, active=False), 'stopped') buildbot-3.4.0/master/buildbot/data/patches.py000066400000000000000000000022751413250514000213340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.data import base from buildbot.data import types # NOTE: patches are not available via endpoints class Patch(base.ResourceType): name = "patch" plural = "patches" endpoints = [] keyField = 'patchid' class EntityType(types.Entity): patchid = types.Integer() body = types.Binary() level = types.Integer() subdir = types.String() author = types.String() comment = types.String() entityType = EntityType(name, 'Patch') buildbot-3.4.0/master/buildbot/data/properties.py000066400000000000000000000122431413250514000220750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from buildbot.data import base from buildbot.data import types class BuildsetPropertiesEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /buildsets/n:bsid/properties """ def get(self, resultSpec, kwargs): return self.master.db.buildsets.getBuildsetProperties(kwargs['bsid']) class BuildPropertiesEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /builders/n:builderid/builds/n:build_number/properties /builds/n:buildid/properties """ def get(self, resultSpec, kwargs): buildid = kwargs.get("buildid", None) if buildid is None: # fixme: this cannot work... buildid = kwargs.get("build_number") return self.master.db.builds.getBuildProperties(buildid) class PropertiesListEndpoint(base.Endpoint): isCollection = True pathPatterns = """ /builds/n:buildid/property_list /buildsets/n:bsid/properties_list /changes/n:changeid/properties_list """ buildFieldMapping = { "name": "build_properties.name", "source": "build_properties.source", "value": "build_properties.value", } buildsetFieldMapping = { "name": "buildset_properties.name", "source": "buildset_properties.source", "value": "buildset_properties.value", } changeFieldMapping = { "name": "change_properties.name", "source": "change_properties.source", "value": "change_properties.value", } @defer.inlineCallbacks def get(self, resultSpec, kwargs): buildid = kwargs.get("buildid", None) bsid = kwargs.get("bsid", None) changeid = kwargs.get("changeid", None) if buildid is not None: if resultSpec is not None: resultSpec.fieldMapping = self.buildFieldMapping props = yield self.master.db.builds.getBuildProperties(buildid, resultSpec) elif bsid is not None: if resultSpec is not None: resultSpec.fieldMapping = self.buildsetFieldMapping props = yield self.master.db.buildsets.getBuildsetProperties(bsid) elif changeid is not None: if resultSpec is not None: resultSpec.fieldMapping = self.buildsetFieldMapping props = yield self.master.db.changes.getChangeProperties(changeid) return [{'name': k, 'source': v[1], 'value': json.dumps(v[0])} for k, v in props.items()] class Property(base.ResourceType): name = "_property" plural = "_properties" endpoints = [PropertiesListEndpoint] keyField = "name" entityType = types.PropertyEntityType(name, 'Property') class Properties(base.ResourceType): name = "property" plural = "properties" endpoints = [BuildsetPropertiesEndpoint, BuildPropertiesEndpoint] keyField = "name" entityType = types.SourcedProperties() def generateUpdateEvent(self, buildid, newprops): # This event cannot use the produceEvent mechanism, as the properties resource type is a bit # specific (this is a dictionary collection) # We only send the new properties, and count on the client to merge the resulting properties # dict # We are good, as there is no way to delete a property. routingKey = ('builds', str(buildid), "properties", "update") newprops = self.sanitizeMessage(newprops) return self.master.mq.produce(routingKey, newprops) @base.updateMethod @defer.inlineCallbacks def setBuildProperties(self, buildid, properties): to_update = {} oldproperties = yield self.master.data.get(('builds', str(buildid), "properties")) properties = properties.getProperties() properties = yield properties.render(properties.asDict()) for k, v in properties.items(): if k in oldproperties and oldproperties[k] == v: continue to_update[k] = v if to_update: for k, v in to_update.items(): yield self.master.db.builds.setBuildProperty( buildid, k, v[0], v[1]) yield self.generateUpdateEvent(buildid, to_update) @base.updateMethod @defer.inlineCallbacks def setBuildProperty(self, buildid, name, value, source): res = yield self.master.db.builds.setBuildProperty( buildid, name, value, source) yield self.generateUpdateEvent(buildid, dict(name=(value, source))) return res buildbot-3.4.0/master/buildbot/data/resultspec.py000066400000000000000000000347041413250514000221000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.python import log from buildbot.data import base class FieldBase: """ This class implements a basic behavior to wrap value into a `Field` instance """ __slots__ = ['field', 'op', 'values'] singular_operators = { 'eq': lambda d, v: d == v[0], 'ne': lambda d, v: d != v[0], 'lt': lambda d, v: d < v[0], 'le': lambda d, v: d <= v[0], 'gt': lambda d, v: d > v[0], 'ge': lambda d, v: d >= v[0], 'contains': lambda d, v: v[0] in d, 'in': lambda d, v: d in v, 'notin': lambda d, v: d not in v, } singular_operators_sql = { 'eq': lambda d, v: d == v[0], 'ne': lambda d, v: d != v[0], 'lt': lambda d, v: d < v[0], 'le': lambda d, v: d <= v[0], 'gt': lambda d, v: d > v[0], 'ge': lambda d, v: d >= v[0], 'contains': lambda d, v: d.contains(v[0]), # only support string values, because currently there are no queries against lists in SQL 'in': lambda d, v: d.in_(v), 'notin': lambda d, v: d.notin_(v), } plural_operators = { 'eq': lambda d, v: d in v, 'ne': lambda d, v: d not in v, 'contains': lambda d, v: len(set(v).intersection(set(d))) > 0, 'in': lambda d, v: d in v, 'notin': lambda d, v: d not in v, } plural_operators_sql = { 'eq': lambda d, v: d.in_(v), 'ne': lambda d, v: d.notin_(v), 'contains': lambda d, vs: sa.or_(*[d.contains(v) for v in vs]), 'in': lambda d, v: d.in_(v), 'notin': lambda d, v: d.notin_(v), # sqlalchemy v0.8's or_ cannot take generator arguments, so this has to be manually expanded # only support string values, because currently there are no queries against lists in SQL } def __init__(self, field, op, values): self.field = field self.op = op self.values = values def getOperator(self, sqlMode=False): v = self.values if len(v) == 1: if sqlMode: ops = self.singular_operators_sql else: ops = self.singular_operators else: if sqlMode: ops = self.plural_operators_sql else: ops = self.plural_operators v = set(v) return ops[self.op] def apply(self, data): fld = self.field v = self.values f = self.getOperator() return (d for d in data if f(d[fld], v)) def __repr__(self): return "resultspec.{}('{}','{}',{})".format(self.__class__.__name__, self.field, self.op, self.values) def __eq__(self, b): for i in self.__slots__: if getattr(self, i) != getattr(b, i): return False return True def __ne__(self, b): return not (self == b) class Property(FieldBase): """ Wraps ``property`` type value(s) """ class Filter(FieldBase): """ Wraps ``filter`` type value(s) """ class NoneComparator: """ Object which wraps 'None' when doing comparisons in sorted(). '> None' and '< None' are not supported in Python 3. """ def __init__(self, value): self.value = value def __lt__(self, other): if self.value is None and other.value is None: return False elif self.value is None: return True elif other.value is None: return False return self.value < other.value def __eq__(self, other): return self.value == other.value def __ne__(self, other): return self.value != other.value def __gt_(self, other): if self.value is None and other.value is None: return False elif self.value is None: return False elif other.value is None: return True return self.value < other.value class ReverseComparator: """ Object which swaps '<' and '>' so instead of a < b, it does b < a, and instead of a > b, it does b > a. This can be used in reverse comparisons. """ def __init__(self, value): self.value = value def __lt__(self, other): return other.value < self.value def __eq__(self, other): return other.value == self.value def __ne__(self, other): return other.value != self.value def __gt_(self, other): return other.value > self.value class ResultSpec: __slots__ = ['filters', 'fields', 'properties', 'order', 'limit', 'offset', 'fieldMapping'] def __init__(self, filters=None, fields=None, properties=None, order=None, limit=None, offset=None): self.filters = filters or [] self.properties = properties or [] self.fields = fields self.order = order self.limit = limit self.offset = offset self.fieldMapping = {} def __repr__(self): return ("ResultSpec(**{{'filters': {}, 'fields': {}, 'properties': {}, " "'order': {}, 'limit': {}, 'offset': {}").format( self.filters, self.fields, self.properties, self.order, self.limit, self.offset) + "})" def __eq__(self, b): for i in ['filters', 'fields', 'properties', 'order', 'limit', 'offset']: if getattr(self, i) != getattr(b, i): return False return True def __ne__(self, b): return not (self == b) def popProperties(self): values = [] for p in self.properties: if p.field == b'property' and p.op == 'eq': self.properties.remove(p) values = p.values break return values def popFilter(self, field, op): for f in self.filters: if f.field == field and f.op == op: self.filters.remove(f) return f.values return None def popOneFilter(self, field, op): v = self.popFilter(field, op) return v[0] if v is not None else None def popBooleanFilter(self, field): eqVals = self.popFilter(field, 'eq') if eqVals and len(eqVals) == 1: return eqVals[0] neVals = self.popFilter(field, 'ne') if neVals and len(neVals) == 1: return not neVals[0] return None def popStringFilter(self, field): eqVals = self.popFilter(field, 'eq') if eqVals and len(eqVals) == 1: return eqVals[0] return None def popIntegerFilter(self, field): eqVals = self.popFilter(field, 'eq') if eqVals and len(eqVals) == 1: try: return int(eqVals[0]) except ValueError as e: raise ValueError("Filter value for {} should be integer, but got: {}".format( field, eqVals[0])) from e return None def removePagination(self): self.limit = self.offset = None def removeOrder(self): self.order = None def popField(self, field): try: i = self.fields.index(field) except ValueError: return False del self.fields[i] return True def findColumn(self, query, field): # will throw key error if field not in mapping mapped = self.fieldMapping[field] for col in query.inner_columns: if str(col) == mapped: return col raise KeyError("unable to find field {} in query".format(field)) def applyFilterToSQLQuery(self, query, f): field = f.field col = self.findColumn(query, field) # as sqlalchemy is overriding python operators, we can just use the same # python code generated by the filter return query.where(f.getOperator(sqlMode=True)(col, f.values)) def applyOrderToSQLQuery(self, query, o): reverse = False if o.startswith('-'): reverse = True o = o[1:] col = self.findColumn(query, o) if reverse: col = col.desc() return query.order_by(col) def applyToSQLQuery(self, query): filters = self.filters order = self.order unmatched_filters = [] unmatched_order = [] # apply the filters if the name of field is found in the model, and # db2data for f in filters: try: query = self.applyFilterToSQLQuery(query, f) except KeyError: # if filter is unmatched, we will do the filtering manually in # self.apply unmatched_filters.append(f) # apply order if necessary if order: for o in order: try: query = self.applyOrderToSQLQuery(query, o) except KeyError: # if order is unmatched, we will do the ordering manually # in self.apply unmatched_order.append(o) # we cannot limit in sql if there is missing filtering or ordering if unmatched_filters or unmatched_order: if self.offset is not None or self.limit is not None: log.msg("Warning: limited data api query is not backed by db " "because of following filters", unmatched_filters, unmatched_order) self.filters = unmatched_filters self.order = tuple(unmatched_order) return query, None count_query = sa.select([sa.func.count()]).select_from(query.alias('query')) self.order = None self.filters = [] # finally, slice out the limit/offset if self.offset is not None: query = query.offset(self.offset) self.offset = None if self.limit is not None: query = query.limit(self.limit) self.limit = None return query, count_query def thd_execute(self, conn, q, dictFromRow): offset, limit = self.offset, self.limit q, qc = self.applyToSQLQuery(q) res = conn.execute(q) rv = [dictFromRow(row) for row in res.fetchall()] if qc is not None and (offset or limit): total = conn.execute(qc).scalar() rv = base.ListResult(rv) rv.offset, rv.total, rv.limit = offset, total, limit return rv def apply(self, data): if data is None: return data if self.fields: fields = set(self.fields) def includeFields(d): return dict((k, v) for k, v in d.items() if k in fields) applyFields = includeFields else: fields = None if isinstance(data, dict): # item details if fields: data = applyFields(data) return data else: filters = self.filters order = self.order # item collection if isinstance(data, base.ListResult): # if pagination was applied, then fields, etc. must be empty assert not fields and not order and not filters, \ "endpoint must apply fields, order, and filters if it performs pagination" offset, total = data.offset, data.total limit = data.limit else: offset, total = None, None limit = None if fields: data = (applyFields(d) for d in data) # link the filters together and then flatten to list for f in self.filters: data = f.apply(data) data = list(data) if total is None: total = len(data) if self.order: def keyFunc(elem, order=self.order): """ Do a multi-level sort by passing in the keys to sort by. @param elem: each item in the list to sort. It must be a C{dict} @param order: a list of keys to sort by, such as: ('lastName', 'firstName', 'age') @return: a key used by sorted(). This will be a list such as: [a['lastName', a['firstName'], a['age']] @rtype: a C{list} """ compareKey = [] for k in order: doReverse = False if k[0] == '-': # If we get a key '-lastName', # it means sort by 'lastName' in reverse. k = k[1:] doReverse = True val = NoneComparator(elem[k]) if doReverse: val = ReverseComparator(val) compareKey.append(val) return compareKey data.sort(key=keyFunc) # finally, slice out the limit/offset if self.offset is not None or self.limit is not None: if offset is not None or limit is not None: raise AssertionError("endpoint must clear offset/limit") end = ((self.offset or 0) + self.limit if self.limit is not None else None) data = data[self.offset:end] offset = self.offset limit = self.limit rv = base.ListResult(data) rv.offset, rv.total = offset, total rv.limit = limit return rv # a resultSpec which does not implement filtering in python (for tests) class OptimisedResultSpec(ResultSpec): def apply(self, data): return data buildbot-3.4.0/master/buildbot/data/root.py000066400000000000000000000032761413250514000206720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class RootEndpoint(base.Endpoint): isCollection = True pathPatterns = "/" def get(self, resultSpec, kwargs): return defer.succeed(self.master.data.rootLinks) class Root(base.ResourceType): name = "rootlink" plural = "rootlinks" endpoints = [RootEndpoint] class EntityType(types.Entity): name = types.String() entityType = EntityType(name, 'Rootlink') class SpecEndpoint(base.Endpoint): isCollection = True pathPatterns = "/application.spec" def get(self, resultSpec, kwargs): return defer.succeed(self.master.data.allEndpoints()) class Spec(base.ResourceType): name = "spec" plural = "specs" endpoints = [SpecEndpoint] class EntityType(types.Entity): path = types.String() type = types.String() plural = types.String() type_spec = types.JsonObject() entityType = EntityType(name, 'Spec') buildbot-3.4.0/master/buildbot/data/schedulers.py000066400000000000000000000111511413250514000220370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import masters from buildbot.data import types from buildbot.db.schedulers import SchedulerAlreadyClaimedError class Db2DataMixin: @defer.inlineCallbacks def db2data(self, dbdict): master = None if dbdict['masterid'] is not None: master = yield self.master.data.get( ('masters', dbdict['masterid'])) data = { 'schedulerid': dbdict['id'], 'name': dbdict['name'], 'enabled': dbdict['enabled'], 'master': master, } return data class SchedulerEndpoint(Db2DataMixin, base.Endpoint): isCollection = False pathPatterns = """ /schedulers/n:schedulerid /masters/n:masterid/schedulers/n:schedulerid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): dbdict = yield self.master.db.schedulers.getScheduler( kwargs['schedulerid']) if 'masterid' in kwargs: if dbdict['masterid'] != kwargs['masterid']: return None return (yield self.db2data(dbdict)) if dbdict else None @defer.inlineCallbacks def control(self, action, args, kwargs): if action == 'enable': schedulerid = kwargs['schedulerid'] v = args['enabled'] yield self.master.data.updates.schedulerEnable(schedulerid, v) return None class SchedulersEndpoint(Db2DataMixin, base.Endpoint): isCollection = True pathPatterns = """ /schedulers /masters/n:masterid/schedulers """ rootLinkName = 'schedulers' @defer.inlineCallbacks def get(self, resultSpec, kwargs): schedulers = yield self.master.db.schedulers.getSchedulers( masterid=kwargs.get('masterid')) schdicts = yield defer.DeferredList( [self.db2data(schdict) for schdict in schedulers], consumeErrors=True, fireOnOneErrback=True) return [r for (s, r) in schdicts] class Scheduler(base.ResourceType): name = "scheduler" plural = "schedulers" endpoints = [SchedulerEndpoint, SchedulersEndpoint] keyField = 'schedulerid' eventPathPatterns = """ /schedulers/:schedulerid """ class EntityType(types.Entity): schedulerid = types.Integer() name = types.String() enabled = types.Boolean() master = types.NoneOk(masters.Master.entityType) entityType = EntityType(name, 'Scheduler') @defer.inlineCallbacks def generateEvent(self, schedulerid, event): scheduler = yield self.master.data.get(('schedulers', str(schedulerid))) self.produceEvent(scheduler, event) @base.updateMethod @defer.inlineCallbacks def schedulerEnable(self, schedulerid, v): yield self.master.db.schedulers.enable(schedulerid, v) yield self.generateEvent(schedulerid, 'updated') return None @base.updateMethod def findSchedulerId(self, name): return self.master.db.schedulers.findSchedulerId(name) @base.updateMethod def trySetSchedulerMaster(self, schedulerid, masterid): d = self.master.db.schedulers.setSchedulerMaster( schedulerid, masterid) # set is successful: deferred result is True d.addCallback(lambda _: True) @d.addErrback def trapAlreadyClaimedError(why): # the db layer throws an exception if the claim fails; we squash # that error but let other exceptions continue upward why.trap(SchedulerAlreadyClaimedError) # set failed: deferred result is False return False return d @defer.inlineCallbacks def _masterDeactivated(self, masterid): schedulers = yield self.master.db.schedulers.getSchedulers( masterid=masterid) for sch in schedulers: yield self.master.db.schedulers.setSchedulerMaster(sch['id'], None) buildbot-3.4.0/master/buildbot/data/sourcestamps.py000066400000000000000000000053141413250514000224320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import patches from buildbot.data import types def _db2data(ss): data = { 'ssid': ss['ssid'], 'branch': ss['branch'], 'revision': ss['revision'], 'project': ss['project'], 'repository': ss['repository'], 'codebase': ss['codebase'], 'created_at': ss['created_at'], 'patch': None, } if ss['patch_body']: data['patch'] = { 'patchid': ss['patchid'], 'level': ss['patch_level'], 'subdir': ss['patch_subdir'], 'author': ss['patch_author'], 'comment': ss['patch_comment'], 'body': ss['patch_body'], } return data class SourceStampEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /sourcestamps/n:ssid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): ssdict = yield self.master.db.sourcestamps.getSourceStamp( kwargs['ssid']) return _db2data(ssdict) if ssdict else None class SourceStampsEndpoint(base.Endpoint): isCollection = True pathPatterns = """ /sourcestamps """ rootLinkName = 'sourcestamps' @defer.inlineCallbacks def get(self, resultSpec, kwargs): return [_db2data(ssdict) for ssdict in (yield self.master.db.sourcestamps.getSourceStamps())] class SourceStamp(base.ResourceType): name = "sourcestamp" plural = "sourcestamps" endpoints = [SourceStampEndpoint, SourceStampsEndpoint] keyField = 'ssid' subresources = ["Change"] class EntityType(types.Entity): ssid = types.Integer() revision = types.NoneOk(types.String()) branch = types.NoneOk(types.String()) repository = types.String() project = types.String() codebase = types.String() patch = types.NoneOk(patches.Patch.entityType) created_at = types.DateTime() entityType = EntityType(name, 'Sourcestamp') buildbot-3.4.0/master/buildbot/data/steps.py000066400000000000000000000127351413250514000210450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class Db2DataMixin: def db2data(self, dbdict): data = { 'stepid': dbdict['id'], 'number': dbdict['number'], 'name': dbdict['name'], 'buildid': dbdict['buildid'], 'started_at': dbdict['started_at'], 'complete': dbdict['complete_at'] is not None, 'complete_at': dbdict['complete_at'], 'state_string': dbdict['state_string'], 'results': dbdict['results'], 'urls': dbdict['urls'], 'hidden': dbdict['hidden'], } return defer.succeed(data) class StepEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /steps/n:stepid /builds/n:buildid/steps/i:step_name /builds/n:buildid/steps/n:step_number /builders/n:builderid/builds/n:build_number/steps/i:step_name /builders/n:builderid/builds/n:build_number/steps/n:step_number /builders/i:buildername/builds/n:build_number/steps/i:step_name /builders/i:buildername/builds/n:build_number/steps/n:step_number """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): if 'stepid' in kwargs: dbdict = yield self.master.db.steps.getStep(kwargs['stepid']) return (yield self.db2data(dbdict)) if dbdict else None buildid = yield self.getBuildid(kwargs) if buildid is None: return None dbdict = yield self.master.db.steps.getStep( buildid=buildid, number=kwargs.get('step_number'), name=kwargs.get('step_name')) return (yield self.db2data(dbdict)) if dbdict else None class StepsEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = True pathPatterns = """ /builds/n:buildid/steps /builders/n:builderid/builds/n:build_number/steps /builders/i:buildername/builds/n:build_number/steps """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): if 'buildid' in kwargs: buildid = kwargs['buildid'] else: buildid = yield self.getBuildid(kwargs) if buildid is None: return None steps = yield self.master.db.steps.getSteps(buildid=buildid) results = [] for dbdict in steps: results.append((yield self.db2data(dbdict))) return results class UrlEntityType(types.Entity): name = types.String() url = types.String() class Step(base.ResourceType): name = "step" plural = "steps" endpoints = [StepEndpoint, StepsEndpoint] keyField = 'stepid' eventPathPatterns = """ /builds/:buildid/steps/:stepid /steps/:stepid """ subresources = ["Log"] class EntityType(types.Entity): stepid = types.Integer() number = types.Integer() name = types.Identifier(50) buildid = types.Integer() started_at = types.NoneOk(types.DateTime()) complete = types.Boolean() complete_at = types.NoneOk(types.DateTime()) results = types.NoneOk(types.Integer()) state_string = types.String() urls = types.List( of=UrlEntityType("Url", "Url")) hidden = types.Boolean() entityType = EntityType(name, 'Step') @defer.inlineCallbacks def generateEvent(self, stepid, event): step = yield self.master.data.get(('steps', stepid)) self.produceEvent(step, event) @base.updateMethod @defer.inlineCallbacks def addStep(self, buildid, name): stepid, num, name = yield self.master.db.steps.addStep( buildid=buildid, name=name, state_string='pending') yield self.generateEvent(stepid, 'new') return (stepid, num, name) @base.updateMethod @defer.inlineCallbacks def startStep(self, stepid): yield self.master.db.steps.startStep(stepid=stepid) yield self.generateEvent(stepid, 'started') @base.updateMethod @defer.inlineCallbacks def setStepStateString(self, stepid, state_string): yield self.master.db.steps.setStepStateString( stepid=stepid, state_string=state_string) yield self.generateEvent(stepid, 'updated') @base.updateMethod @defer.inlineCallbacks def addStepURL(self, stepid, name, url): yield self.master.db.steps.addURL( stepid=stepid, name=name, url=url) yield self.generateEvent(stepid, 'updated') @base.updateMethod @defer.inlineCallbacks def finishStep(self, stepid, results, hidden): yield self.master.db.steps.finishStep( stepid=stepid, results=results, hidden=hidden) yield self.generateEvent(stepid, 'finished') buildbot-3.4.0/master/buildbot/data/test_result_sets.py000066400000000000000000000126041413250514000233150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class Db2DataMixin: def db2data(self, dbdict): data = { 'test_result_setid': dbdict['id'], 'builderid': dbdict['builderid'], 'buildid': dbdict['buildid'], 'stepid': dbdict['stepid'], 'description': dbdict['description'], 'category': dbdict['category'], 'value_unit': dbdict['value_unit'], 'tests_passed': dbdict['tests_passed'], 'tests_failed': dbdict['tests_failed'], 'complete': bool(dbdict['complete']), } return defer.succeed(data) class TestResultSetsEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = True pathPatterns = """ /builders/n:builderid/test_result_sets /builders/i:buildername/test_result_sets /builds/n:buildid/test_result_sets /steps/n:stepid/test_result_sets """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): complete = resultSpec.popBooleanFilter('complete') if 'stepid' in kwargs: step_dbdict = yield self.master.db.steps.getStep(kwargs['stepid']) build_dbdict = yield self.master.db.builds.getBuild(step_dbdict['buildid']) sets = yield self.master.db.test_result_sets.getTestResultSets( build_dbdict['builderid'], buildid=step_dbdict['buildid'], stepid=kwargs['stepid'], complete=complete, result_spec=resultSpec) elif 'buildid' in kwargs: build_dbdict = yield self.master.db.builds.getBuild(kwargs['buildid']) sets = yield self.master.db.test_result_sets.getTestResultSets( build_dbdict['builderid'], buildid=kwargs['buildid'], complete=complete, result_spec=resultSpec) else: # The following is true: 'buildername' in kwargs or 'builderid' in kwargs: builderid = yield self.getBuilderId(kwargs) sets = yield self.master.db.test_result_sets.getTestResultSets( builderid, complete=complete, result_spec=resultSpec) results = [] for dbdict in sets: results.append((yield self.db2data(dbdict))) return results class TestResultSetEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /test_result_sets/n:test_result_setid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): dbdict = yield self.master.db.test_result_sets.getTestResultSet(kwargs['test_result_setid']) return (yield self.db2data(dbdict)) if dbdict else None class TestResultSet(base.ResourceType): name = "test_result_set" plural = "test_result_sets" endpoints = [TestResultSetsEndpoint, TestResultSetEndpoint] keyField = 'test_result_setid' eventPathPatterns = """ /test_result_sets/:test_result_setid """ class EntityType(types.Entity): test_result_setid = types.Integer() builderid = types.Integer() buildid = types.Integer() stepid = types.Integer() description = types.NoneOk(types.String()) category = types.String() value_unit = types.String() tests_passed = types.NoneOk(types.Integer()) tests_failed = types.NoneOk(types.Integer()) complete = types.Boolean() entityType = EntityType(name, 'TestResultSet') @defer.inlineCallbacks def generateEvent(self, test_result_setid, event): test_result_set = yield self.master.data.get(('test_result_sets', test_result_setid)) self.produceEvent(test_result_set, event) @base.updateMethod @defer.inlineCallbacks def addTestResultSet(self, builderid, buildid, stepid, description, category, value_unit): test_result_setid = \ yield self.master.db.test_result_sets.addTestResultSet(builderid, buildid, stepid, description, category, value_unit) yield self.generateEvent(test_result_setid, 'new') return test_result_setid @base.updateMethod @defer.inlineCallbacks def completeTestResultSet(self, test_result_setid, tests_passed=None, tests_failed=None): yield self.master.db.test_result_sets.completeTestResultSet(test_result_setid, tests_passed, tests_failed) yield self.generateEvent(test_result_setid, 'completed') buildbot-3.4.0/master/buildbot/data/test_results.py000066400000000000000000000065371413250514000224520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class Db2DataMixin: def db2data(self, dbdict): data = { 'test_resultid': dbdict['id'], 'builderid': dbdict['builderid'], 'test_result_setid': dbdict['test_result_setid'], 'test_name': dbdict['test_name'], 'test_code_path': dbdict['test_code_path'], 'line': dbdict['line'], 'duration_ns': dbdict['duration_ns'], 'value': dbdict['value'], } return defer.succeed(data) class TestResultsEndpoint(Db2DataMixin, base.Endpoint): isCollection = True pathPatterns = """ /test_result_sets/n:test_result_setid/results """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): set_dbdict = \ yield self.master.db.test_result_sets.getTestResultSet(kwargs['test_result_setid']) if set_dbdict is None: return [] result_dbdicts = \ yield self.master.db.test_results.getTestResults(set_dbdict['builderid'], kwargs['test_result_setid'], result_spec=resultSpec) results = [] for dbdict in result_dbdicts: results.append((yield self.db2data(dbdict))) return results class TestResult(base.ResourceType): name = "test_result" plural = "test_results" endpoints = [TestResultsEndpoint] keyField = 'test_resultid' eventPathPatterns = """ /test_result_sets/:test_result_setid/results """ class EntityType(types.Entity): test_resultid = types.Integer() builderid = types.Integer() test_result_setid = types.Integer() test_name = types.NoneOk(types.String()) test_code_path = types.NoneOk(types.String()) line = types.NoneOk(types.Integer()) duration_ns = types.NoneOk(types.Integer()) value = types.String() entityType = EntityType(name, 'TestResult') @base.updateMethod @defer.inlineCallbacks def addTestResults(self, builderid, test_result_setid, result_values): # We're not adding support for emitting any messages, because in all cases all test results # will be part of a test result set. The users should wait for a 'complete' event on a # test result set and only then fetch the test results, which won't change from that time # onward. yield self.master.db.test_results.addTestResults(builderid, test_result_setid, result_values) buildbot-3.4.0/master/buildbot/data/types.py000066400000000000000000000306041413250514000210460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # See "Type Validation" in master/docs/developer/tests.rst import datetime import json import re from buildbot import util from buildbot.util import bytes2unicode def capitalize(word): return ''.join(x.capitalize() or '_' for x in word.split('_')) class Type: name = None doc = None graphQLType = "unknown" @property def ramlname(self): return self.name def valueFromString(self, arg): # convert a urldecoded bytestring as given in a URL to a value, or # raise an exception trying. This parent method raises an exception, # so if the method is missing in a subclass, it cannot be created from # a string. raise TypeError def cmp(self, val, arg): argVal = self.valueFromString(arg) if val < argVal: return -1 elif val == argVal: return 0 return 1 def validate(self, name, object): raise NotImplementedError def getSpec(self): r = dict(name=self.name) if self.doc is not None: r["doc"] = self.doc return r def toGraphQL(self): return self.graphQLType def toGraphQLTypeName(self): return self.graphQLType def graphQLDependentTypes(self): return [] def getGraphQLInputType(self): return self.toGraphQLTypeName() class NoneOk(Type): def __init__(self, nestedType): assert isinstance(nestedType, Type) self.nestedType = nestedType self.name = self.nestedType.name + " or None" @property def ramlname(self): return self.nestedType.ramlname def valueFromString(self, arg): return self.nestedType.valueFromString(arg) def cmp(self, val, arg): return self.nestedType.cmp(val, arg) def validate(self, name, object): if object is None: return for msg in self.nestedType.validate(name, object): yield msg def getSpec(self): r = self.nestedType.getSpec() r["can_be_null"] = True return r def toRaml(self): return self.nestedType.toRaml() def toGraphQL(self): # remove trailing ! if isinstance(self.nestedType, Entity): return self.nestedType.graphql_name return self.nestedType.toGraphQL()[:-1] def graphQLDependentTypes(self): return [self.nestedType] def getGraphQLInputType(self): return self.nestedType.getGraphQLInputType() class Instance(Type): types = () ramlType = "unknown" graphQLType = "unknown" @property def ramlname(self): return self.ramlType def validate(self, name, object): if not isinstance(object, self.types): yield "{} ({}) is not a {}".format(name, repr(object), self.name or repr(self.types)) def toRaml(self): return self.ramlType def toGraphQL(self): return self.graphQLType + "!" class Integer(Instance): name = "integer" types = (int,) ramlType = "integer" graphQLType = "Int" def valueFromString(self, arg): return int(arg) class DateTime(Instance): name = "datetime" types = (datetime.datetime,) ramlType = "date" graphQLType = "Date" # custom def valueFromString(self, arg): return int(arg) def validate(self, name, object): if isinstance(object, datetime.datetime): return if isinstance(object, int): try: datetime.datetime.fromtimestamp(object) except (OverflowError, OSError): pass else: return yield "{} ({}) is not a valid timestamp".format(name, object) class String(Instance): name = "string" types = (str,) ramlType = "string" graphQLType = "String" def valueFromString(self, arg): val = util.bytes2unicode(arg) return val class Binary(Instance): name = "binary" types = (bytes,) ramlType = "string" graphQLType = "Binary" # custom def valueFromString(self, arg): return arg class Boolean(Instance): name = "boolean" types = (bool,) ramlType = "boolean" graphQLType = "Boolean" # custom def valueFromString(self, arg): return util.string2boolean(arg) class Identifier(Type): name = "identifier" identRe = re.compile('^[a-zA-Z_-][a-zA-Z0-9._-]*$') ramlType = "string" graphQLType = "String" def __init__(self, len=None, **kwargs): super().__init__(**kwargs) self.len = len def valueFromString(self, arg): val = util.bytes2unicode(arg) if not self.identRe.match(val) or len(val) > self.len or not val: raise TypeError return val def validate(self, name, object): if not isinstance(object, str): yield "{} - {} - is not a unicode string".format(name, repr(object)) elif not self.identRe.match(object): yield "{} - {} - is not an identifier".format(name, repr(object)) elif not object: yield "{} - identifiers cannot be an empty string".format(name) elif len(object) > self.len: yield "{} - {} - is longer than {} characters".format(name, repr(object), self.len) def toRaml(self): return {'type': self.ramlType, 'pattern': self.identRe.pattern} class List(Type): name = "list" ramlType = "list" @property def ramlname(self): return self.of.ramlname def __init__(self, of=None, **kwargs): super().__init__(**kwargs) self.of = of def validate(self, name, object): if not isinstance(object, list): # we want a list, and NOT a subclass yield "{} ({}) is not a {}".format(name, repr(object), self.name) return for idx, elt in enumerate(object): for msg in self.of.validate("{}[{}]".format(name, idx), elt): yield msg def valueFromString(self, arg): # valueFromString is used to process URL args, which come one at # a time, so we defer to the `of` return self.of.valueFromString(arg) def getSpec(self): return dict(type=self.name, of=self.of.getSpec()) def toRaml(self): return {'type': 'array', 'items': self.of.name} def toGraphQL(self): return f"[{self.of.toGraphQLTypeName()}]!" def toGraphQLTypeName(self): return f"[{self.of.toGraphQLTypeName()}]" def graphQLDependentTypes(self): return [self.of] def getGraphQLInputType(self): return self.of.getGraphQLInputType() def ramlMaybeNoneOrList(k, v): if isinstance(v, NoneOk): return k + "?" if isinstance(v, List): return k + "[]" return k class SourcedProperties(Type): name = "sourcedproperties" def validate(self, name, object): if not isinstance(object, dict): # we want a dict, and NOT a subclass yield "{} is not sourced properties (not a dict)".format(name) return for k, v in object.items(): if not isinstance(k, str): yield "{} property name {} is not unicode".format(name, repr(k)) if not isinstance(v, tuple) or len(v) != 2: yield "{} property value for '{}' is not a 2-tuple".format(name, k) return propval, propsrc = v if not isinstance(propsrc, str): yield "{}[{}] source {} is not unicode".format(name, k, repr(propsrc)) try: json.loads(bytes2unicode(propval)) except ValueError: yield "{}[{}] value is not JSON-able".format(name, repr(k)) def toRaml(self): return {'type': "object", 'properties': {'[]': {'type': 'object', 'properties': { 1: 'string', 2: 'integer | string | object | array | boolean' } }}} def toGraphQL(self): return "[Property]!" def graphQLDependentTypes(self): return [PropertyEntityType("property", 'Property')] def getGraphQLInputType(self): return None class JsonObject(Type): name = "jsonobject" ramlname = 'object' graphQLType = "JSON" def validate(self, name, object): if not isinstance(object, dict): yield "{} ({}) is not a dictionary (got type {})".format(name, repr(object), type(object)) return # make sure JSON can represent it try: json.dumps(object) except Exception as e: yield "{} is not JSON-able: {}".format(name, e) return def toRaml(self): return "object" class Entity(Type): # NOTE: this type is defined by subclassing it in each resource type class. # Instances are generally accessed at e.g., # * buildsets.Buildset.entityType or # * self.master.data.rtypes.buildsets.entityType name = None # set in constructor graphql_name = None # set in constructor fields = {} fieldNames = set([]) def __init__(self, name, graphql_name): fields = {} for k, v in self.__class__.__dict__.items(): if isinstance(v, Type): fields[k] = v self.fields = fields self.fieldNames = set(fields) self.name = name self.graphql_name = graphql_name def validate(self, name, object): # this uses isinstance, allowing dict subclasses as used by the DB API if not isinstance(object, dict): yield "{} ({}) is not a dictionary (got type {})".format(name, repr(object), type(object)) return gotNames = set(object.keys()) unexpected = gotNames - self.fieldNames if unexpected: yield "{} has unexpected keys {}".format(name, ", ".join([repr(n) for n in unexpected])) missing = self.fieldNames - gotNames if missing: yield "{} is missing keys {}".format(name, ", ".join([repr(n) for n in missing])) for k in gotNames & self.fieldNames: f = self.fields[k] for msg in f.validate("{}[{}]".format(name, repr(k)), object[k]): yield msg def getSpec(self): return dict(type=self.name, fields=[dict(name=k, type=v.name, type_spec=v.getSpec()) for k, v in self.fields.items() ]) def toRaml(self): return {'type': "object", 'properties': { ramlMaybeNoneOrList(k, v): {'type': v.ramlname, 'description': ''} for k, v in self.fields.items()}} def toGraphQL(self): return dict(type=self.graphql_name, fields=[dict(name=k, type=v.toGraphQL()) for k, v in self.fields.items() # in graphql, we handle properties as queriable sub resources # instead of hardcoded attributes like in rest api if k != "properties" ]) def toGraphQLTypeName(self): return self.graphql_name def graphQLDependentTypes(self): return self.fields.values() def getGraphQLInputType(self): # for now, complex types are not query able # in the future, we may want to declare (and implement) graphql input types return None class PropertyEntityType(Entity): name = String() source = String() value = JsonObject() buildbot-3.4.0/master/buildbot/data/workers.py000066400000000000000000000152311413250514000213750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import exceptions from buildbot.data import types from buildbot.util import identifiers class Db2DataMixin: def db2data(self, dbdict): return { 'workerid': dbdict['id'], 'name': dbdict['name'], 'workerinfo': dbdict['workerinfo'], 'paused': dbdict['paused'], 'graceful': dbdict['graceful'], 'connected_to': [ {'masterid': id} for id in dbdict['connected_to']], 'configured_on': [ {'masterid': c['masterid'], 'builderid': c['builderid']} for c in dbdict['configured_on']], } class WorkerEndpoint(Db2DataMixin, base.Endpoint): isCollection = False pathPatterns = """ /workers/n:workerid /workers/i:name /masters/n:masterid/workers/n:workerid /masters/n:masterid/workers/i:name /masters/n:masterid/builders/n:builderid/workers/n:workerid /masters/n:masterid/builders/n:builderid/workers/i:name /builders/n:builderid/workers/n:workerid /builders/n:builderid/workers/i:name """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): sldict = yield self.master.db.workers.getWorker( workerid=kwargs.get('workerid'), name=kwargs.get('name'), masterid=kwargs.get('masterid'), builderid=kwargs.get('builderid')) if sldict: return self.db2data(sldict) return None @defer.inlineCallbacks def control(self, action, args, kwargs): if action not in ("stop", "pause", "unpause", "kill"): raise exceptions.InvalidControlException("action: {} is not supported".format(action)) worker = yield self.get(None, kwargs) if worker is not None: self.master.mq.produce(("control", "worker", str(worker['workerid']), action), dict(reason=kwargs.get('reason', args.get('reason', 'no reason')))) else: raise exceptions.exceptions.InvalidPathError("worker not found") class WorkersEndpoint(Db2DataMixin, base.Endpoint): isCollection = True rootLinkName = 'workers' pathPatterns = """ /workers /masters/n:masterid/workers /masters/n:masterid/builders/n:builderid/workers /builders/n:builderid/workers """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): paused = resultSpec.popBooleanFilter('paused') graceful = resultSpec.popBooleanFilter('graceful') workers_dicts = yield self.master.db.workers.getWorkers( builderid=kwargs.get('builderid'), masterid=kwargs.get('masterid'), paused=paused, graceful=graceful) return [self.db2data(w) for w in workers_dicts] class MasterBuilderEntityType(types.Entity): masterid = types.Integer() builderid = types.Integer() class MasterIdEntityType(types.Entity): masterid = types.Integer() class Worker(base.ResourceType): name = "worker" plural = "workers" endpoints = [WorkerEndpoint, WorkersEndpoint] keyField = 'workerid' eventPathPatterns = """ /workers/:workerid """ subresources = ["Build"] class EntityType(types.Entity): workerid = types.Integer() name = types.String() connected_to = types.List(of=MasterIdEntityType("master_id", 'MasterId')) configured_on = types.List(of=MasterBuilderEntityType("master_builder", 'MasterBuilder')) workerinfo = types.JsonObject() paused = types.Boolean() graceful = types.Boolean() entityType = EntityType(name, 'Worker') @base.updateMethod # returns a Deferred that returns None def workerConfigured(self, workerid, masterid, builderids): return self.master.db.workers.workerConfigured( workerid=workerid, masterid=masterid, builderids=builderids) @base.updateMethod def findWorkerId(self, name): if not identifiers.isIdentifier(50, name): raise ValueError( "Worker name %r is not a 50-character identifier" % (name,)) return self.master.db.workers.findWorkerId(name) @base.updateMethod @defer.inlineCallbacks def workerConnected(self, workerid, masterid, workerinfo): yield self.master.db.workers.workerConnected( workerid=workerid, masterid=masterid, workerinfo=workerinfo) bs = yield self.master.data.get(('workers', workerid)) self.produceEvent(bs, 'connected') @base.updateMethod @defer.inlineCallbacks def workerDisconnected(self, workerid, masterid): yield self.master.db.workers.workerDisconnected( workerid=workerid, masterid=masterid) bs = yield self.master.data.get(('workers', workerid)) self.produceEvent(bs, 'disconnected') @base.updateMethod @defer.inlineCallbacks def workerMissing(self, workerid, masterid, last_connection, notify): bs = yield self.master.data.get(('workers', workerid)) bs['last_connection'] = last_connection bs['notify'] = notify self.produceEvent(bs, 'missing') @base.updateMethod @defer.inlineCallbacks def setWorkerState(self, workerid, paused, graceful): yield self.master.db.workers.setWorkerState( workerid=workerid, paused=paused, graceful=graceful) bs = yield self.master.data.get(('workers', workerid)) self.produceEvent(bs, 'state_updated') @base.updateMethod def deconfigureAllWorkersForMaster(self, masterid): # unconfigure all workers for this master return self.master.db.workers.deconfigureAllWorkersForMaster( masterid=masterid) def _masterDeactivated(self, masterid): return self.deconfigureAllWorkersForMaster(masterid) buildbot-3.4.0/master/buildbot/db/000077500000000000000000000000001413250514000170015ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/db/__init__.py000066400000000000000000000014671413250514000211220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # a NULL constant to use in sqlalchemy whereclauses e.g. (tbl.c.results == NULL) # so that pep8 is happy NULL = None buildbot-3.4.0/master/buildbot/db/base.py000066400000000000000000000130761413250514000202740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib import itertools import sqlalchemy as sa from buildbot.util import unicode2bytes class DBConnectorComponent: # A fixed component of the DBConnector, handling one particular aspect of # the database. Instances of subclasses are assigned to attributes of the # DBConnector object, so that they are available at e.g., # C{master.db.model} or C{master.db.changes}. This parent class takes care # of the necessary backlinks and other housekeeping. connector = None data2db = {} def __init__(self, connector): self.db = connector # set up caches for method in dir(self.__class__): o = getattr(self, method) if isinstance(o, CachedMethod): setattr(self, method, o.get_cached_method(self)) @property def master(self): return self.db.master _isCheckLengthNecessary = None def checkLength(self, col, value): if not self._isCheckLengthNecessary: if self.db.pool.engine.dialect.name == 'mysql': self._isCheckLengthNecessary = True else: # not necessary, so just stub out the method self.checkLength = lambda col, value: None return assert col.type.length, "column {} does not have a length".format(col) if value and len(value) > col.type.length: raise RuntimeError( "value for column {} is greater than max of {} characters: {}".format( col, col.type.length, value)) def ensureLength(self, col, value): assert col.type.length, "column {} does not have a length".format(col) if value and len(value) > col.type.length: value = value[:col.type.length // 2] + \ hashlib.sha1(unicode2bytes(value)).hexdigest()[:col.type.length // 2] return value # returns a Deferred that returns a value def findSomethingId(self, tbl, whereclause, insert_values, _race_hook=None, autoCreate=True): d = self.findOrCreateSomethingId(tbl, whereclause, insert_values, _race_hook, autoCreate) d.addCallback(lambda pair: pair[0]) return d def findOrCreateSomethingId(self, tbl, whereclause, insert_values, _race_hook=None, autoCreate=True): """ Find a matching row and if one cannot be found optionally create it. Returns a deferred which resolves to the pair (id, found) where id is the primary key of the matching row and `found` is True if a match was found. `found` will be false if a new row was created. """ def thd(conn, no_recurse=False): # try to find the master q = sa.select([tbl.c.id], whereclause=whereclause) r = conn.execute(q) row = r.fetchone() r.close() # found it! if row: return row.id, True if not autoCreate: return None, False _race_hook and _race_hook(conn) try: r = conn.execute(tbl.insert(), [insert_values]) return r.inserted_primary_key[0], False except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # try it all over again, in case there was an overlapping, # identical call, but only retry once. if no_recurse: raise return thd(conn, no_recurse=True) return self.db.pool.do(thd) def hashColumns(self, *args): def encode(x): if x is None: return b'\xf5' elif isinstance(x, str): return x.encode('utf-8') return str(x).encode('utf-8') return hashlib.sha1(b'\0'.join(map(encode, args))).hexdigest() def doBatch(self, batch, batch_n=500): iterator = iter(batch) while True: batch = list(itertools.islice(iterator, batch_n)) if not batch: break yield batch class CachedMethod: def __init__(self, cache_name, method): self.cache_name = cache_name self.method = method def get_cached_method(self, component): meth = self.method meth_name = meth.__name__ cache = component.db.master.caches.get_cache(self.cache_name, lambda key: meth(component, key)) def wrap(key, no_cache=0): if no_cache: return meth(component, key) return cache.get(key) wrap.__name__ = meth_name + " (wrapped)" wrap.__module__ = meth.__module__ wrap.__doc__ = meth.__doc__ wrap.cache = cache return wrap def cached(cache_name): return lambda method: CachedMethod(cache_name, method) buildbot-3.4.0/master/buildbot/db/build_data.py000066400000000000000000000140321413250514000214430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.internet import defer from buildbot.db import NULL from buildbot.db import base class BuildDataDict(dict): pass class BuildDataConnectorComponent(base.DBConnectorComponent): def _insert_race_hook(self, conn): # called so tests can simulate a race condition during insertion pass @defer.inlineCallbacks def setBuildData(self, buildid, name, value, source): def thd(conn): build_data_table = self.db.model.build_data update_values = { 'value': value, 'length': len(value), 'source': source, } insert_values = { 'buildid': buildid, 'name': name, 'value': value, 'length': len(value), 'source': source, } while True: q = build_data_table.update() q = q.where((build_data_table.c.buildid == buildid) & (build_data_table.c.name == name)) q = q.values(update_values) r = conn.execute(q) if r.rowcount > 0: return r.close() self._insert_race_hook(conn) try: q = build_data_table.insert().values(insert_values) r = conn.execute(q) return except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # there's been a competing insert, retry pass yield self.db.pool.do(thd) @defer.inlineCallbacks def getBuildData(self, buildid, name): def thd(conn): build_data_table = self.db.model.build_data q = build_data_table.select().where((build_data_table.c.buildid == buildid) & (build_data_table.c.name == name)) res = conn.execute(q) row = res.fetchone() if not row: return None return self._row2dict(conn, row) res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def getBuildDataNoValue(self, buildid, name): def thd(conn): build_data_table = self.db.model.build_data q = sa.select([build_data_table.c.buildid, build_data_table.c.name, build_data_table.c.length, build_data_table.c.source]) q = q.where((build_data_table.c.buildid == buildid) & (build_data_table.c.name == name)) res = conn.execute(q) row = res.fetchone() if not row: return None return self._row2dict_novalue(conn, row) res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def getAllBuildDataNoValues(self, buildid): def thd(conn): build_data_table = self.db.model.build_data q = sa.select([build_data_table.c.buildid, build_data_table.c.name, build_data_table.c.length, build_data_table.c.source]) q = q.where(build_data_table.c.buildid == buildid) return [self._row2dict_novalue(conn, row) for row in conn.execute(q).fetchall()] res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def deleteOldBuildData(self, older_than_timestamp): build_data = self.db.model.build_data builds = self.db.model.builds def count_build_datum(conn): res = conn.execute(sa.select([sa.func.count(build_data.c.id)])) count = res.fetchone()[0] res.close() return count def thd(conn): count_before = count_build_datum(conn) if self.db._engine.dialect.name == 'sqlite': # sqlite does not support delete with a join, so for this case we use a subquery, # which is much slower q = sa.select([builds.c.id]) q = q.where((builds.c.complete_at >= older_than_timestamp) | (builds.c.complete_at == NULL)) q = build_data.delete().where(build_data.c.buildid.notin_(q)) else: q = build_data.delete() q = q.where(builds.c.id == build_data.c.buildid) q = q.where((builds.c.complete_at >= older_than_timestamp) | (builds.c.complete_at == NULL)) res = conn.execute(q) res.close() count_after = count_build_datum(conn) return count_before - count_after res = yield self.db.pool.do(thd) return res def _row2dict(self, conn, row): return BuildDataDict(buildid=row.buildid, name=row.name, value=row.value, length=row.length, source=row.source) def _row2dict_novalue(self, conn, row): return BuildDataDict(buildid=row.buildid, name=row.name, value=None, length=row.length, source=row.source) buildbot-3.4.0/master/buildbot/db/builders.py000066400000000000000000000135311413250514000211670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import defaultdict import sqlalchemy as sa from twisted.internet import defer from buildbot.db import base class BuildersConnectorComponent(base.DBConnectorComponent): def findBuilderId(self, name, autoCreate=True): tbl = self.db.model.builders name_hash = self.hashColumns(name) return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name_hash == name_hash), insert_values=dict( name=name, name_hash=name_hash, ), autoCreate=autoCreate) @defer.inlineCallbacks def updateBuilderInfo(self, builderid, description, tags): # convert to tag IDs first, as necessary def toTagid(tag): if isinstance(tag, type(1)): return defer.succeed(tag) ssConnector = self.master.db.tags return ssConnector.findTagId(tag) tagsids = [r[1] for r in (yield defer.DeferredList( [toTagid(tag) for tag in tags], fireOnOneErrback=True, consumeErrors=True))] def thd(conn): builders_tbl = self.db.model.builders builders_tags_tbl = self.db.model.builders_tags transaction = conn.begin() q = builders_tbl.update( whereclause=(builders_tbl.c.id == builderid)) conn.execute(q, description=description).close() # remove previous builders_tags conn.execute(builders_tags_tbl.delete( whereclause=((builders_tags_tbl.c.builderid == builderid)))).close() # add tag ids if tagsids: conn.execute(builders_tags_tbl.insert(), [dict(builderid=builderid, tagid=tagid) for tagid in tagsids]).close() transaction.commit() return (yield self.db.pool.do(thd)) def getBuilder(self, builderid): d = self.getBuilders(_builderid=builderid) @d.addCallback def first(bldrs): if bldrs: return bldrs[0] return None return d # returns a Deferred that returns None def addBuilderMaster(self, builderid=None, masterid=None): def thd(conn, no_recurse=False): try: tbl = self.db.model.builder_masters q = tbl.insert() conn.execute(q, builderid=builderid, masterid=masterid) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): pass return self.db.pool.do(thd) # returns a Deferred that returns None def removeBuilderMaster(self, builderid=None, masterid=None): def thd(conn, no_recurse=False): tbl = self.db.model.builder_masters conn.execute(tbl.delete( whereclause=((tbl.c.builderid == builderid) & (tbl.c.masterid == masterid)))) return self.db.pool.do(thd) def getBuilders(self, masterid=None, _builderid=None): def thd(conn): bldr_tbl = self.db.model.builders bm_tbl = self.db.model.builder_masters builders_tags_tbl = self.db.model.builders_tags tags_tbl = self.db.model.tags j = bldr_tbl.outerjoin(bm_tbl) # if we want to filter by masterid, we must join to builder_masters # again, so we can still get the full set of masters for each # builder if masterid is not None: limiting_bm_tbl = bm_tbl.alias('limiting_bm') j = j.join(limiting_bm_tbl, onclause=(bldr_tbl.c.id == limiting_bm_tbl.c.builderid)) q = sa.select( [bldr_tbl.c.id, bldr_tbl.c.name, bldr_tbl.c.description, bm_tbl.c.masterid], from_obj=[j], order_by=[bldr_tbl.c.id, bm_tbl.c.masterid]) if masterid is not None: # filter the masterid from the limiting table q = q.where(limiting_bm_tbl.c.masterid == masterid) if _builderid is not None: q = q.where(bldr_tbl.c.id == _builderid) # build up a intermediate builder id -> tag names map (fixes performance issue #3396) bldr_id_to_tags = defaultdict(list) bldr_q = sa.select([builders_tags_tbl.c.builderid, tags_tbl.c.name]) bldr_q = bldr_q.select_from(tags_tbl.join(builders_tags_tbl)) for bldr_id, tag in conn.execute(bldr_q).fetchall(): bldr_id_to_tags[bldr_id].append(tag) # now group those by builderid, aggregating by masterid rv = [] last = None for row in conn.execute(q).fetchall(): # pylint: disable=unsubscriptable-object if not last or row['id'] != last['id']: last = dict(id=row.id, name=row.name, masterids=[], description=row.description, tags=bldr_id_to_tags[row.id]) rv.append(last) if row['masterid']: last['masterids'].append(row['masterid']) return rv return self.db.pool.do(thd) buildbot-3.4.0/master/buildbot/db/buildrequests.py000066400000000000000000000226041413250514000222520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import itertools import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from buildbot.db import NULL from buildbot.db import base from buildbot.process.results import RETRY from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class AlreadyClaimedError(Exception): pass class NotClaimedError(Exception): pass class BrDict(dict): pass class BuildRequestsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst def _saSelectQuery(self): reqs_tbl = self.db.model.buildrequests claims_tbl = self.db.model.buildrequest_claims bsets_tbl = self.db.model.buildsets builder_tbl = self.db.model.builders bsss_tbl = self.db.model.buildset_sourcestamps sstamps_tbl = self.db.model.sourcestamps from_clause = reqs_tbl.outerjoin(claims_tbl, reqs_tbl.c.id == claims_tbl.c.brid) from_clause = from_clause.join(bsets_tbl, reqs_tbl.c.buildsetid == bsets_tbl.c.id) from_clause = from_clause.join(bsss_tbl, bsets_tbl.c.id == bsss_tbl.c.buildsetid) from_clause = from_clause.join(sstamps_tbl, bsss_tbl.c.sourcestampid == sstamps_tbl.c.id) from_clause = from_clause.join(builder_tbl, reqs_tbl.c.builderid == builder_tbl.c.id) return sa.select([reqs_tbl, claims_tbl, sstamps_tbl.c.branch, sstamps_tbl.c.repository, sstamps_tbl.c.codebase, builder_tbl.c.name.label('buildername') ]).select_from(from_clause) # returns a Deferred that returns a value def getBuildRequest(self, brid): def thd(conn): reqs_tbl = self.db.model.buildrequests q = self._saSelectQuery() q = q.where(reqs_tbl.c.id == brid) res = conn.execute(q) row = res.fetchone() rv = None if row: rv = self._brdictFromRow(row, self.db.master.masterid) res.close() return rv return self.db.pool.do(thd) @defer.inlineCallbacks def getBuildRequests(self, builderid=None, complete=None, claimed=None, bsid=None, branch=None, repository=None, resultSpec=None): def deduplicateBrdict(brdicts): return list(({b['buildrequestid']: b for b in brdicts}).values()) def thd(conn): reqs_tbl = self.db.model.buildrequests claims_tbl = self.db.model.buildrequest_claims sstamps_tbl = self.db.model.sourcestamps q = self._saSelectQuery() if claimed is not None: if isinstance(claimed, bool): if not claimed: q = q.where( (claims_tbl.c.claimed_at == NULL) & (reqs_tbl.c.complete == 0)) else: q = q.where( (claims_tbl.c.claimed_at != NULL)) else: q = q.where( (claims_tbl.c.masterid == claimed)) if builderid is not None: q = q.where(reqs_tbl.c.builderid == builderid) if complete is not None: if complete: q = q.where(reqs_tbl.c.complete != 0) else: q = q.where(reqs_tbl.c.complete == 0) if bsid is not None: q = q.where(reqs_tbl.c.buildsetid == bsid) if branch is not None: q = q.where(sstamps_tbl.c.branch == branch) if repository is not None: q = q.where(sstamps_tbl.c.repository == repository) if resultSpec is not None: return deduplicateBrdict(resultSpec.thd_execute( conn, q, lambda r: self._brdictFromRow(r, self.db.master.masterid))) res = conn.execute(q) return deduplicateBrdict([self._brdictFromRow(row, self.db.master.masterid) for row in res.fetchall()]) res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def claimBuildRequests(self, brids, claimed_at=None): if claimed_at is not None: claimed_at = datetime2epoch(claimed_at) else: claimed_at = int(self.master.reactor.seconds()) def thd(conn): transaction = conn.begin() tbl = self.db.model.buildrequest_claims try: q = tbl.insert() conn.execute(q, [ dict(brid=id, masterid=self.db.master.masterid, claimed_at=claimed_at) for id in brids]) except (sa.exc.IntegrityError, sa.exc.ProgrammingError) as e: transaction.rollback() raise AlreadyClaimedError() from e transaction.commit() yield self.db.pool.do(thd) # returns a Deferred that returns None def unclaimBuildRequests(self, brids): def thd(conn): transaction = conn.begin() claims_tbl = self.db.model.buildrequest_claims # we'll need to batch the brids into groups of 100, so that the # parameter lists supported by the DBAPI aren't exhausted iterator = iter(brids) while True: batch = list(itertools.islice(iterator, 100)) if not batch: break # success! try: q = claims_tbl.delete( (claims_tbl.c.brid.in_(batch)) & (claims_tbl.c.masterid == self.db.master.masterid)) conn.execute(q) except Exception: transaction.rollback() raise transaction.commit() return self.db.pool.do(thd) @defer.inlineCallbacks def completeBuildRequests(self, brids, results, complete_at=None): assert results != RETRY, "a buildrequest cannot be completed with a retry status!" if complete_at is not None: complete_at = datetime2epoch(complete_at) else: complete_at = int(self.master.reactor.seconds()) def thd(conn): transaction = conn.begin() # the update here is simple, but a number of conditions are # attached to ensure that we do not update a row inappropriately, # Note that checking that the request is mine would require a # subquery, so for efficiency that is not checked. reqs_tbl = self.db.model.buildrequests # we'll need to batch the brids into groups of 100, so that the # parameter lists supported by the DBAPI aren't exhausted for batch in self.doBatch(brids, 100): q = reqs_tbl.update() q = q.where(reqs_tbl.c.id.in_(batch)) q = q.where(reqs_tbl.c.complete != 1) res = conn.execute(q, complete=1, results=results, complete_at=complete_at) # if an incorrect number of rows were updated, then we failed. if res.rowcount != len(batch): log.msg("tried to complete %d buildrequests, " "but only completed %d" % (len(batch), res.rowcount)) transaction.rollback() raise NotClaimedError transaction.commit() yield self.db.pool.do(thd) @staticmethod def _brdictFromRow(row, master_masterid): claimed = False claimed_by_masterid = None claimed_at = None if row.claimed_at is not None: claimed_at = row.claimed_at claimed = True claimed_by_masterid = row.masterid submitted_at = epoch2datetime(row.submitted_at) complete_at = epoch2datetime(row.complete_at) claimed_at = epoch2datetime(claimed_at) return BrDict(buildrequestid=row.id, buildsetid=row.buildsetid, builderid=row.builderid, buildername=row.buildername, priority=row.priority, claimed=claimed, claimed_at=claimed_at, claimed_by_masterid=claimed_by_masterid, complete=bool(row.complete), results=row.results, submitted_at=submitted_at, complete_at=complete_at, waited_for=bool(row.waited_for)) buildbot-3.4.0/master/buildbot/db/builds.py000066400000000000000000000246451413250514000206500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import sqlalchemy as sa from twisted.internet import defer from buildbot.db import NULL from buildbot.db import base from buildbot.util import epoch2datetime class BuildsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst # returns a Deferred that returns a value def _getBuild(self, whereclause): def thd(conn): q = self.db.model.builds.select(whereclause=whereclause) res = conn.execute(q) row = res.fetchone() rv = None if row: rv = self._builddictFromRow(row) res.close() return rv return self.db.pool.do(thd) def getBuild(self, buildid): return self._getBuild(self.db.model.builds.c.id == buildid) def getBuildByNumber(self, builderid, number): return self._getBuild( (self.db.model.builds.c.builderid == builderid) & (self.db.model.builds.c.number == number)) # returns a Deferred that returns a value def _getRecentBuilds(self, whereclause, offset=0, limit=1): def thd(conn): tbl = self.db.model.builds q = tbl.select(whereclause=whereclause, order_by=[sa.desc(tbl.c.complete_at)], offset=offset, limit=limit) res = conn.execute(q) return list([self._builddictFromRow(row) for row in res.fetchall()]) return self.db.pool.do(thd) @defer.inlineCallbacks def getPrevSuccessfulBuild(self, builderid, number, ssBuild): gssfb = self.master.db.sourcestamps.getSourceStampsForBuild rv = None tbl = self.db.model.builds offset = 0 increment = 1000 matchssBuild = {(ss['repository'], ss['branch'], ss['codebase']) for ss in ssBuild} while rv is None: # Get some recent successful builds on the same builder prevBuilds = yield self._getRecentBuilds(whereclause=((tbl.c.builderid == builderid) & (tbl.c.number < number) & (tbl.c.results == 0)), offset=offset, limit=increment) if not prevBuilds: break for prevBuild in prevBuilds: prevssBuild = {(ss['repository'], ss['branch'], ss['codebase']) for ss in (yield gssfb(prevBuild['id']))} if prevssBuild == matchssBuild: # A successful build with the same # repository/branch/codebase was found ! rv = prevBuild break offset += increment return rv def getBuildsForChange(self, changeid): assert changeid > 0 def thd(conn): # Get builds for the change changes_tbl = self.db.model.changes bsets_tbl = self.db.model.buildsets bsss_tbl = self.db.model.buildset_sourcestamps reqs_tbl = self.db.model.buildrequests builds_tbl = self.db.model.builds from_clause = changes_tbl.join(bsss_tbl, changes_tbl.c.sourcestampid == bsss_tbl.c.sourcestampid) from_clause = from_clause.join(bsets_tbl, bsss_tbl.c.buildsetid == bsets_tbl.c.id) from_clause = from_clause.join(reqs_tbl, bsets_tbl.c.id == reqs_tbl.c.buildsetid) from_clause = from_clause.join(builds_tbl, reqs_tbl.c.id == builds_tbl.c.buildrequestid) q = sa.select([builds_tbl]).select_from( from_clause).where(changes_tbl.c.changeid == changeid) res = conn.execute(q) return [self._builddictFromRow(row) for row in res.fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns a value def getBuilds(self, builderid=None, buildrequestid=None, workerid=None, complete=None, resultSpec=None): def thd(conn): tbl = self.db.model.builds q = tbl.select() if builderid is not None: q = q.where(tbl.c.builderid == builderid) if buildrequestid is not None: q = q.where(tbl.c.buildrequestid == buildrequestid) if workerid is not None: q = q.where(tbl.c.workerid == workerid) if complete is not None: if complete: q = q.where(tbl.c.complete_at != NULL) else: q = q.where(tbl.c.complete_at == NULL) if resultSpec is not None: return resultSpec.thd_execute(conn, q, self._builddictFromRow) res = conn.execute(q) return [self._builddictFromRow(row) for row in res.fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns a value def addBuild(self, builderid, buildrequestid, workerid, masterid, state_string, _race_hook=None): started_at = int(self.master.reactor.seconds()) def thd(conn): tbl = self.db.model.builds # get the highest current number r = conn.execute(sa.select([sa.func.max(tbl.c.number)], whereclause=(tbl.c.builderid == builderid))) number = r.scalar() new_number = 1 if number is None else number + 1 # insert until we are successful.. while True: if _race_hook: _race_hook(conn) try: r = conn.execute(self.db.model.builds.insert(), dict(number=new_number, builderid=builderid, buildrequestid=buildrequestid, workerid=workerid, masterid=masterid, started_at=started_at, complete_at=None, state_string=state_string)) except (sa.exc.IntegrityError, sa.exc.ProgrammingError) as e: # pg 9.5 gives this error which makes it pass some build # numbers if 'duplicate key value violates unique constraint "builds_pkey"' not in str(e): new_number += 1 continue return r.inserted_primary_key[0], new_number return self.db.pool.do(thd) # returns a Deferred that returns None def setBuildStateString(self, buildid, state_string): def thd(conn): tbl = self.db.model.builds q = tbl.update(whereclause=(tbl.c.id == buildid)) conn.execute(q, state_string=state_string) return self.db.pool.do(thd) # returns a Deferred that returns None def finishBuild(self, buildid, results): def thd(conn): tbl = self.db.model.builds q = tbl.update(whereclause=(tbl.c.id == buildid)) conn.execute(q, complete_at=int(self.master.reactor.seconds()), results=results) return self.db.pool.do(thd) # returns a Deferred that returns a value def getBuildProperties(self, bid, resultSpec=None): def thd(conn): bp_tbl = self.db.model.build_properties q = sa.select( [bp_tbl.c.name, bp_tbl.c.value, bp_tbl.c.source], whereclause=(bp_tbl.c.buildid == bid)) props = [] if resultSpec is not None: data = resultSpec.thd_execute(conn, q, lambda x: x) else: data = conn.execute(q) for row in data: prop = (json.loads(row.value), row.source) props.append((row.name, prop)) return dict(props) return self.db.pool.do(thd) @defer.inlineCallbacks def setBuildProperty(self, bid, name, value, source): """ A kind of create_or_update, that's between one or two queries per call """ def thd(conn): bp_tbl = self.db.model.build_properties self.checkLength(bp_tbl.c.name, name) self.checkLength(bp_tbl.c.source, source) whereclause = sa.and_(bp_tbl.c.buildid == bid, bp_tbl.c.name == name) q = sa.select( [bp_tbl.c.value, bp_tbl.c.source], whereclause=whereclause) prop = conn.execute(q).fetchone() value_js = json.dumps(value) if prop is None: conn.execute(bp_tbl.insert(), dict(buildid=bid, name=name, value=value_js, source=source)) elif (prop.value != value_js) or (prop.source != source): conn.execute(bp_tbl.update(whereclause=whereclause), dict(value=value_js, source=source)) yield self.db.pool.do(thd) def _builddictFromRow(self, row): return dict( id=row.id, number=row.number, builderid=row.builderid, buildrequestid=row.buildrequestid, workerid=row.workerid, masterid=row.masterid, started_at=epoch2datetime(row.started_at), complete_at=epoch2datetime(row.complete_at), state_string=row.state_string, results=row.results) buildbot-3.4.0/master/buildbot/db/buildsets.py000066400000000000000000000226331413250514000213570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Support for buildsets in the database """ import json import sqlalchemy as sa from twisted.internet import defer from buildbot.db import NULL from buildbot.db import base from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class BsDict(dict): pass class BsProps(dict): pass class AlreadyCompleteError(RuntimeError): pass class BuildsetsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst @defer.inlineCallbacks def addBuildset(self, sourcestamps, reason, properties, builderids, waited_for, external_idstring=None, submitted_at=None, parent_buildid=None, parent_relationship=None): if submitted_at is not None: submitted_at = datetime2epoch(submitted_at) else: submitted_at = int(self.master.reactor.seconds()) # convert to sourcestamp IDs first, as necessary def toSsid(sourcestamp): if isinstance(sourcestamp, int): return defer.succeed(sourcestamp) ssConnector = self.master.db.sourcestamps return ssConnector.findSourceStampId(**sourcestamp) sourcestamps = yield defer.DeferredList( [toSsid(ss) for ss in sourcestamps], fireOnOneErrback=True, consumeErrors=True) sourcestampids = [r[1] for r in sourcestamps] def thd(conn): buildsets_tbl = self.db.model.buildsets self.checkLength(buildsets_tbl.c.reason, reason) self.checkLength(buildsets_tbl.c.external_idstring, external_idstring) transaction = conn.begin() # insert the buildset itself r = conn.execute(buildsets_tbl.insert(), dict( submitted_at=submitted_at, reason=reason, complete=0, complete_at=None, results=-1, external_idstring=external_idstring, parent_buildid=parent_buildid, parent_relationship=parent_relationship)) bsid = r.inserted_primary_key[0] # add any properties if properties: bs_props_tbl = self.db.model.buildset_properties inserts = [ dict(buildsetid=bsid, property_name=k, property_value=json.dumps([v, s])) for k, (v, s) in properties.items()] for i in inserts: self.checkLength(bs_props_tbl.c.property_name, i['property_name']) conn.execute(bs_props_tbl.insert(), inserts) # add sourcestamp ids r = conn.execute(self.db.model.buildset_sourcestamps.insert(), [dict(buildsetid=bsid, sourcestampid=ssid) for ssid in sourcestampids]) # and finish with a build request for each builder. Note that # sqlalchemy and the Python DBAPI do not provide a way to recover # inserted IDs from a multi-row insert, so this is done one row at # a time. brids = {} br_tbl = self.db.model.buildrequests ins = br_tbl.insert() for builderid in builderids: r = conn.execute(ins, dict(buildsetid=bsid, builderid=builderid, priority=0, claimed_at=0, claimed_by_name=None, claimed_by_incarnation=None, complete=0, results=-1, submitted_at=submitted_at, complete_at=None, waited_for=1 if waited_for else 0)) brids[builderid] = r.inserted_primary_key[0] transaction.commit() return (bsid, brids) bsid, brids = yield self.db.pool.do(thd) # Seed the buildset property cache. self.getBuildsetProperties.cache.put(bsid, BsProps(properties)) return (bsid, brids) @defer.inlineCallbacks def completeBuildset(self, bsid, results, complete_at=None): if complete_at is not None: complete_at = datetime2epoch(complete_at) else: complete_at = int(self.master.reactor.seconds()) def thd(conn): tbl = self.db.model.buildsets q = tbl.update(whereclause=( (tbl.c.id == bsid) & ((tbl.c.complete == NULL) | (tbl.c.complete != 1)))) res = conn.execute(q, complete=1, results=results, complete_at=complete_at) if res.rowcount != 1: # happens when two buildrequests finish at the same time raise AlreadyCompleteError() yield self.db.pool.do(thd) # returns a Deferred that returns a value def getBuildset(self, bsid): def thd(conn): bs_tbl = self.db.model.buildsets q = bs_tbl.select(whereclause=(bs_tbl.c.id == bsid)) res = conn.execute(q) row = res.fetchone() if not row: return None return self._thd_row2dict(conn, row) return self.db.pool.do(thd) @defer.inlineCallbacks def getBuildsets(self, complete=None, resultSpec=None): def thd(conn): bs_tbl = self.db.model.buildsets q = bs_tbl.select() if complete is not None: if complete: q = q.where(bs_tbl.c.complete != 0) else: q = q.where((bs_tbl.c.complete == 0) | (bs_tbl.c.complete == NULL)) if resultSpec is not None: return resultSpec.thd_execute(conn, q, lambda x: self._thd_row2dict(conn, x)) res = conn.execute(q) return [self._thd_row2dict(conn, row) for row in res.fetchall()] res = yield self.db.pool.do(thd) return res # returns a Deferred that returns a value def getRecentBuildsets(self, count=None, branch=None, repository=None, complete=None): def thd(conn): bs_tbl = self.db.model.buildsets ss_tbl = self.db.model.sourcestamps j = self.db.model.buildsets j = j.join(self.db.model.buildset_sourcestamps) j = j.join(self.db.model.sourcestamps) q = sa.select(columns=[bs_tbl], from_obj=[j], distinct=True) q = q.order_by(sa.desc(bs_tbl.c.submitted_at)) q = q.limit(count) if complete is not None: if complete: q = q.where(bs_tbl.c.complete != 0) else: q = q.where((bs_tbl.c.complete == 0) | (bs_tbl.c.complete == NULL)) if branch: q = q.where(ss_tbl.c.branch == branch) if repository: q = q.where(ss_tbl.c.repository == repository) res = conn.execute(q) return list(reversed([self._thd_row2dict(conn, row) for row in res.fetchall()])) return self.db.pool.do(thd) # returns a Deferred that returns a value @base.cached("BuildsetProperties") def getBuildsetProperties(self, bsid): def thd(conn): bsp_tbl = self.db.model.buildset_properties q = sa.select( [bsp_tbl.c.property_name, bsp_tbl.c.property_value], whereclause=(bsp_tbl.c.buildsetid == bsid)) ret = [] for row in conn.execute(q): try: properties = json.loads(row.property_value) ret.append((row.property_name, tuple(properties))) except ValueError: pass return BsProps(ret) return self.db.pool.do(thd) def _thd_row2dict(self, conn, row): # get sourcestamps tbl = self.db.model.buildset_sourcestamps sourcestamps = [r.sourcestampid for r in conn.execute(sa.select([tbl.c.sourcestampid], (tbl.c.buildsetid == row.id))).fetchall()] return BsDict(external_idstring=row.external_idstring, reason=row.reason, submitted_at=epoch2datetime(row.submitted_at), complete=bool(row.complete), complete_at=epoch2datetime(row.complete_at), results=row.results, bsid=row.id, sourcestamps=sourcestamps, parent_buildid=row.parent_buildid, parent_relationship=row.parent_relationship) buildbot-3.4.0/master/buildbot/db/changes.py000066400000000000000000000355741413250514000210010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Support for changes in the database """ import json import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from buildbot.db import base from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class ChDict(dict): pass class ChangesConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst # returns a Deferred that returns a value def getParentChangeIds(self, branch, repository, project, codebase): def thd(conn): changes_tbl = self.db.model.changes q = sa.select([changes_tbl.c.changeid], whereclause=((changes_tbl.c.branch == branch) & (changes_tbl.c.repository == repository) & (changes_tbl.c.project == project) & (changes_tbl.c.codebase == codebase)), order_by=sa.desc(changes_tbl.c.changeid), limit=1) parent_id = conn.scalar(q) return [parent_id] if parent_id else [] return self.db.pool.do(thd) @defer.inlineCallbacks def addChange(self, author=None, committer=None, files=None, comments=None, is_dir=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase='', project='', uid=None): assert project is not None, "project must be a string, not None" assert repository is not None, "repository must be a string, not None" if is_dir is not None: log.msg("WARNING: change source is providing deprecated " "value is_dir (ignored)") if when_timestamp is None: when_timestamp = epoch2datetime(self.master.reactor.seconds()) if properties is None: properties = {} # verify that source is 'Change' for each property for pv in properties.values(): assert pv[1] == 'Change', ("properties must be qualified with" "source 'Change'") ch_tbl = self.db.model.changes self.checkLength(ch_tbl.c.author, author) self.checkLength(ch_tbl.c.committer, committer) self.checkLength(ch_tbl.c.branch, branch) self.checkLength(ch_tbl.c.revision, revision) self.checkLength(ch_tbl.c.revlink, revlink) self.checkLength(ch_tbl.c.category, category) self.checkLength(ch_tbl.c.repository, repository) self.checkLength(ch_tbl.c.project, project) # calculate the sourcestamp first, before adding it ssid = yield self.db.sourcestamps.findSourceStampId( revision=revision, branch=branch, repository=repository, codebase=codebase, project=project) parent_changeids = yield self.getParentChangeIds(branch, repository, project, codebase) # Someday, changes will have multiple parents. # But for the moment, a Change can only have 1 parent parent_changeid = parent_changeids[0] if parent_changeids else None def thd(conn): # note that in a read-uncommitted database like SQLite this # transaction does not buy atomicity - other database users may # still come across a change without its files, properties, # etc. That's OK, since we don't announce the change until it's # all in the database, but beware. transaction = conn.begin() r = conn.execute(ch_tbl.insert(), dict( author=author, committer=committer, comments=comments, branch=branch, revision=revision, revlink=revlink, when_timestamp=datetime2epoch(when_timestamp), category=category, repository=repository, codebase=codebase, project=project, sourcestampid=ssid, parent_changeids=parent_changeid)) changeid = r.inserted_primary_key[0] if files: tbl = self.db.model.change_files for f in files: self.checkLength(tbl.c.filename, f) conn.execute(tbl.insert(), [ dict(changeid=changeid, filename=f) for f in files ]) if properties: tbl = self.db.model.change_properties inserts = [ dict(changeid=changeid, property_name=k, property_value=json.dumps(v)) for k, v in properties.items() ] for i in inserts: self.checkLength(tbl.c.property_name, i['property_name']) conn.execute(tbl.insert(), inserts) if uid: ins = self.db.model.change_users.insert() conn.execute(ins, dict(changeid=changeid, uid=uid)) transaction.commit() return changeid return (yield self.db.pool.do(thd)) # returns a Deferred that returns a value @base.cached("chdicts") def getChange(self, changeid): assert changeid >= 0 def thd(conn): # get the row from the 'changes' table changes_tbl = self.db.model.changes q = changes_tbl.select( whereclause=(changes_tbl.c.changeid == changeid)) rp = conn.execute(q) row = rp.fetchone() if not row: return None # and fetch the ancillary data (files, properties) return self._chdict_from_change_row_thd(conn, row) return self.db.pool.do(thd) @defer.inlineCallbacks def getChangesForBuild(self, buildid): assert buildid > 0 gssfb = self.master.db.sourcestamps.getSourceStampsForBuild changes = list() currentBuild = yield self.master.db.builds.getBuild(buildid) fromChanges, toChanges = dict(), dict() ssBuild = yield gssfb(buildid) for ss in ssBuild: fromChanges[ss['codebase']] = yield self.getChangeFromSSid(ss['ssid']) # Get the last successful build on the same builder previousBuild = yield self.master.db.builds.getPrevSuccessfulBuild( currentBuild['builderid'], currentBuild['number'], ssBuild) if previousBuild: for ss in (yield gssfb(previousBuild['id'])): toChanges[ss['codebase']] = yield self.getChangeFromSSid(ss['ssid']) else: # If no successful previous build, then we need to catch all # changes for cb in fromChanges: toChanges[cb] = {'changeid': None} # For each codebase, append changes until we match the parent for cb, change in fromChanges.items(): # Careful; toChanges[cb] may be None from getChangeFromSSid toCbChange = toChanges.get(cb) or {} if change and change['changeid'] != toCbChange.get('changeid'): changes.append(change) while ((toCbChange.get('changeid') not in change['parent_changeids']) and change['parent_changeids']): # For the moment, a Change only have 1 parent. change = yield self.master.db.changes.getChange(change['parent_changeids'][0]) # http://trac.buildbot.net/ticket/3461 sometimes, # parent_changeids could be corrupted if change is None: break changes.append(change) return changes # returns a Deferred that returns a value def getChangeFromSSid(self, sourcestampid): assert sourcestampid >= 0 def thd(conn): # get the row from the 'changes' table changes_tbl = self.db.model.changes q = changes_tbl.select( whereclause=(changes_tbl.c.sourcestampid == sourcestampid)) # if there are multiple changes for this ssid, get the most recent one q = q.order_by(changes_tbl.c.changeid.desc()) q = q.limit(1) rp = conn.execute(q) row = rp.fetchone() if not row: return None # and fetch the ancillary data (files, properties) return self._chdict_from_change_row_thd(conn, row) return self.db.pool.do(thd) # returns a Deferred that returns a value def getChangeUids(self, changeid): assert changeid >= 0 def thd(conn): cu_tbl = self.db.model.change_users q = cu_tbl.select(whereclause=(cu_tbl.c.changeid == changeid)) res = conn.execute(q) rows = res.fetchall() row_uids = [row.uid for row in rows] return row_uids return self.db.pool.do(thd) def _getDataFromRow(self, row): return row.changeid def getChanges(self, resultSpec=None): def thd(conn): # get the changeids from the 'changes' table changes_tbl = self.db.model.changes q = sa.select([changes_tbl.c.changeid]) if resultSpec is not None: return reversed(resultSpec.thd_execute(conn, q, self._getDataFromRow)) rp = conn.execute(q) changeids = [self._getDataFromRow(row) for row in rp] rp.close() return list(changeids) d = self.db.pool.do(thd) # then turn those into changes, using the cache @d.addCallback def get_changes(changeids): return defer.gatherResults([self.getChange(changeid) for changeid in changeids]) return d # returns a Deferred that returns a value def getChangesCount(self): def thd(conn): changes_tbl = self.db.model.changes q = sa.select([sa.func.count()]).select_from(changes_tbl) rp = conn.execute(q) r = 0 for row in rp: r = row[0] rp.close() return int(r) return self.db.pool.do(thd) # returns a Deferred that returns a value def getLatestChangeid(self): def thd(conn): changes_tbl = self.db.model.changes q = sa.select([changes_tbl.c.changeid], order_by=sa.desc(changes_tbl.c.changeid), limit=1) return conn.scalar(q) return self.db.pool.do(thd) # utility methods @defer.inlineCallbacks def pruneChanges(self, changeHorizon): """ Called periodically by DBConnector, this method deletes changes older than C{changeHorizon}. """ if not changeHorizon: return None def thd(conn): changes_tbl = self.db.model.changes # First, get the list of changes to delete. This could be written # as a subquery but then that subquery would be run for every # table, which is very inefficient; also, MySQL's subquery support # leaves much to be desired, and doesn't support this particular # form. q = sa.select([changes_tbl.c.changeid], order_by=[sa.desc(changes_tbl.c.changeid)], offset=changeHorizon) res = conn.execute(q) ids_to_delete = [r.changeid for r in res] # and delete from all relevant tables, in dependency order for table_name in ('scheduler_changes', 'change_files', 'change_properties', 'changes', 'change_users'): remaining = ids_to_delete[:] while remaining: batch, remaining = remaining[:100], remaining[100:] table = self.db.model.metadata.tables[table_name] conn.execute( table.delete(table.c.changeid.in_(batch))) yield self.db.pool.do(thd) def _chdict_from_change_row_thd(self, conn, ch_row): # This method must be run in a db.pool thread, and returns a chdict # given a row from the 'changes' table change_files_tbl = self.db.model.change_files change_properties_tbl = self.db.model.change_properties if ch_row.parent_changeids: parent_changeids = [ch_row.parent_changeids] else: parent_changeids = [] chdict = ChDict( changeid=ch_row.changeid, parent_changeids=parent_changeids, author=ch_row.author, committer=ch_row.committer, files=[], # see below comments=ch_row.comments, revision=ch_row.revision, when_timestamp=epoch2datetime(ch_row.when_timestamp), branch=ch_row.branch, category=ch_row.category, revlink=ch_row.revlink, properties={}, # see below repository=ch_row.repository, codebase=ch_row.codebase, project=ch_row.project, sourcestampid=int(ch_row.sourcestampid)) query = change_files_tbl.select( whereclause=(change_files_tbl.c.changeid == ch_row.changeid)) rows = conn.execute(query) for r in rows: chdict['files'].append(r.filename) # and properties must be given without a source, so strip that, but # be flexible in case users have used a development version where the # change properties were recorded incorrectly def split_vs(vs): try: v, s = vs if s != "Change": v, s = vs, "Change" except (ValueError, TypeError): v, s = vs, "Change" return v, s query = change_properties_tbl.select( whereclause=(change_properties_tbl.c.changeid == ch_row.changeid)) rows = conn.execute(query) for r in rows: try: v, s = split_vs(json.loads(r.property_value)) chdict['properties'][r.property_name] = (v, s) except ValueError: pass return chdict buildbot-3.4.0/master/buildbot/db/changesources.py000066400000000000000000000075341413250514000222150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.internet import defer from buildbot.db import NULL from buildbot.db import base class ChangeSourceAlreadyClaimedError(Exception): pass class ChangeSourcesConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst def findChangeSourceId(self, name): tbl = self.db.model.changesources name_hash = self.hashColumns(name) return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name_hash == name_hash), insert_values=dict( name=name, name_hash=name_hash, )) # returns a Deferred that returns None def setChangeSourceMaster(self, changesourceid, masterid): def thd(conn): cs_mst_tbl = self.db.model.changesource_masters # handle the masterid=None case to get it out of the way if masterid is None: q = cs_mst_tbl.delete( whereclause=(cs_mst_tbl.c.changesourceid == changesourceid)) conn.execute(q) return # try a blind insert.. try: q = cs_mst_tbl.insert() conn.execute(q, dict(changesourceid=changesourceid, masterid=masterid)) except (sa.exc.IntegrityError, sa.exc.ProgrammingError) as e: # someone already owns this changesource. raise ChangeSourceAlreadyClaimedError from e return self.db.pool.do(thd) @defer.inlineCallbacks def getChangeSource(self, changesourceid): cs = yield self.getChangeSources(_changesourceid=changesourceid) if cs: return cs[0] return None # returns a Deferred that returns a value def getChangeSources(self, active=None, masterid=None, _changesourceid=None): def thd(conn): cs_tbl = self.db.model.changesources cs_mst_tbl = self.db.model.changesource_masters # handle the trivial case of masterid=xx and active=False if masterid is not None and active is not None and not active: return [] join = cs_tbl.outerjoin(cs_mst_tbl, (cs_tbl.c.id == cs_mst_tbl.c.changesourceid)) # if we're given a _changesourceid, select only that row wc = None if _changesourceid: wc = (cs_tbl.c.id == _changesourceid) else: # otherwise, filter with active, if necessary if masterid is not None: wc = (cs_mst_tbl.c.masterid == masterid) elif active: wc = (cs_mst_tbl.c.masterid != NULL) elif active is not None: wc = (cs_mst_tbl.c.masterid == NULL) q = sa.select([cs_tbl.c.id, cs_tbl.c.name, cs_mst_tbl.c.masterid], from_obj=join, whereclause=wc) return [dict(id=row.id, name=row.name, masterid=row.masterid) for row in conn.execute(q).fetchall()] return self.db.pool.do(thd) buildbot-3.4.0/master/buildbot/db/connector.py000066400000000000000000000145071413250514000213540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.application import internet from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.db import build_data from buildbot.db import builders from buildbot.db import buildrequests from buildbot.db import builds from buildbot.db import buildsets from buildbot.db import changes from buildbot.db import changesources from buildbot.db import enginestrategy from buildbot.db import exceptions from buildbot.db import logs from buildbot.db import masters from buildbot.db import model from buildbot.db import pool from buildbot.db import schedulers from buildbot.db import sourcestamps from buildbot.db import state from buildbot.db import steps from buildbot.db import tags from buildbot.db import test_result_sets from buildbot.db import test_results from buildbot.db import users from buildbot.db import workers from buildbot.util import service upgrade_message = textwrap.dedent("""\ The Buildmaster database needs to be upgraded before this version of buildbot can run. Use the following command-line buildbot upgrade-master {basedir} to upgrade the database, and try starting the buildmaster again. You may want to make a backup of your buildmaster before doing so. """).strip() class DBConnector(service.ReconfigurableServiceMixin, service.AsyncMultiService): # The connection between Buildbot and its backend database. This is # generally accessible as master.db, but is also used during upgrades. # # Most of the interesting operations available via the connector are # implemented in connector components, available as attributes of this # object, and listed below. # Period, in seconds, of the cleanup task. This master will perform # periodic cleanup actions on this schedule. CLEANUP_PERIOD = 3600 def __init__(self, basedir): super().__init__() self.setName('db') self.basedir = basedir # not configured yet - we don't build an engine until the first # reconfig self.configured_url = None # set up components self._engine = None # set up in reconfigService self.pool = None # set up in reconfigService @defer.inlineCallbacks def setServiceParent(self, p): yield super().setServiceParent(p) self.model = model.Model(self) self.changes = changes.ChangesConnectorComponent(self) self.changesources = changesources.ChangeSourcesConnectorComponent( self) self.schedulers = schedulers.SchedulersConnectorComponent(self) self.sourcestamps = sourcestamps.SourceStampsConnectorComponent(self) self.buildsets = buildsets.BuildsetsConnectorComponent(self) self.buildrequests = buildrequests.BuildRequestsConnectorComponent( self) self.state = state.StateConnectorComponent(self) self.builds = builds.BuildsConnectorComponent(self) self.build_data = build_data.BuildDataConnectorComponent(self) self.workers = workers.WorkersConnectorComponent(self) self.users = users.UsersConnectorComponent(self) self.masters = masters.MastersConnectorComponent(self) self.builders = builders.BuildersConnectorComponent(self) self.steps = steps.StepsConnectorComponent(self) self.tags = tags.TagsConnectorComponent(self) self.logs = logs.LogsConnectorComponent(self) self.test_results = test_results.TestResultsConnectorComponent(self) self.test_result_sets = test_result_sets.TestResultSetsConnectorComponent(self) self.cleanup_timer = internet.TimerService(self.CLEANUP_PERIOD, self._doCleanup) self.cleanup_timer.clock = self.master.reactor yield self.cleanup_timer.setServiceParent(self) @defer.inlineCallbacks def setup(self, check_version=True, verbose=True): db_url = self.configured_url = self.master.config.db['db_url'] log.msg("Setting up database with URL %r" % util.stripUrlPassword(db_url)) # set up the engine and pool self._engine = enginestrategy.create_engine(db_url, basedir=self.basedir) self.pool = pool.DBThreadPool( self._engine, reactor=self.master.reactor, verbose=verbose) # make sure the db is up to date, unless specifically asked not to if check_version: if db_url == 'sqlite://': # Using in-memory database. Since it is reset after each process # restart, `buildbot upgrade-master` cannot be used (data is not # persistent). Upgrade model here to allow startup to continue. yield self.model.upgrade() current = yield self.model.is_current() if not current: for l in upgrade_message.format(basedir=self.master.basedir).split('\n'): log.msg(l) raise exceptions.DatabaseNotReadyError() def reconfigServiceWithBuildbotConfig(self, new_config): # double-check -- the master ensures this in config checks assert self.configured_url == new_config.db['db_url'] return super().reconfigServiceWithBuildbotConfig(new_config) def _doCleanup(self): """ Perform any periodic database cleanup tasks. @returns: Deferred """ # pass on this if we're not configured yet if not self.configured_url: return None d = self.changes.pruneChanges(self.master.config.changeHorizon) d.addErrback(log.err, 'while pruning changes') return d buildbot-3.4.0/master/buildbot/db/dbconfig.py000066400000000000000000000056511413250514000211350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from sqlalchemy.exc import OperationalError from sqlalchemy.exc import ProgrammingError from buildbot.config import MasterConfig from buildbot.db import enginestrategy from buildbot.db import model from buildbot.db import state class FakeDBConnector: pass class FakeCacheManager: def get_cache(self, cache_name, miss_fn): return None class FakeMaster: pass class FakePool: pass class DbConfig: def __init__(self, BuildmasterConfig, basedir, name="config"): self.db_url = MasterConfig.getDbUrlFromConfig( BuildmasterConfig, throwErrors=False) self.basedir = basedir self.name = name def getDb(self): try: db_engine = enginestrategy.create_engine(self.db_url, basedir=self.basedir) except Exception: # db_url is probably trash. Just ignore, config.py db part will # create proper message return None db = FakeDBConnector() db.master = FakeMaster() db.pool = FakePool() db.pool.engine = db_engine db.master.caches = FakeCacheManager() db.model = model.Model(db) db.state = state.StateConnectorComponent(db) try: self.objectid = db.state.thdGetObjectId( db_engine, self.name, "DbConfig")['id'] except (ProgrammingError, OperationalError): # ProgrammingError: mysql&pg, OperationalError: sqlite # assume db is not initialized db.pool.engine.dispose() return None return db def get(self, name, default=state.StateConnectorComponent.Thunk): db = self.getDb() if db is not None: ret = db.state.thdGetState( db.pool.engine, self.objectid, name, default=default) db.pool.engine.dispose() else: if default is not state.StateConnectorComponent.Thunk: return default raise KeyError("Db not yet initialized") return ret def set(self, name, value): db = self.getDb() if db is not None: db.state.thdSetState(db.pool.engine, self.objectid, name, value) db.pool.engine.dispose() buildbot-3.4.0/master/buildbot/db/enginestrategy.py000066400000000000000000000212361413250514000224070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ A wrapper around `sqlalchemy.create_engine` that handles all of the special cases that Buildbot needs. Those include: - pool_recycle for MySQL - %(basedir) substitution - optimal thread pool size calculation """ import os import sqlalchemy as sa from sqlalchemy.engine import url from sqlalchemy.pool import NullPool from twisted.python import log from buildbot.util import sautils # from http://www.mail-archive.com/sqlalchemy@googlegroups.com/msg15079.html class ReconnectingListener: def __init__(self): self.retried = False class Strategy: def set_up(self, u, engine): pass def should_retry(self, operational_error): try: text = operational_error.args[0] return 'Lost connection' in text or 'database is locked' in text except Exception: return False class SqlLiteStrategy(Strategy): def set_up(self, u, engine): """Special setup for sqlite engines""" def connect_listener_enable_fk(connection, record): # fk must be enabled for all connections if not getattr(engine, "fk_disabled", False): return # http://trac.buildbot.net/ticket/3490#ticket # connection.execute('pragma foreign_keys=ON') sa.event.listen(engine.pool, 'connect', connect_listener_enable_fk) # try to enable WAL logging if u.database: def connect_listener(connection, record): connection.execute("pragma checkpoint_fullfsync = off") sa.event.listen(engine.pool, 'connect', connect_listener) log.msg("setting database journal mode to 'wal'") try: engine.execute("pragma journal_mode = wal") except Exception: log.msg("failed to set journal mode - database may fail") class MySQLStrategy(Strategy): disconnect_error_codes = (2006, 2013, 2014, 2045, 2055) deadlock_error_codes = (1213,) def in_error_codes(self, args, error_codes): if args: return args[0] in error_codes return False def is_disconnect(self, args): return self.in_error_codes(args, self.disconnect_error_codes) def is_deadlock(self, args): return self.in_error_codes(args, self.deadlock_error_codes) def set_up(self, u, engine): """Special setup for mysql engines""" # add the reconnecting PoolListener that will detect a # disconnected connection and automatically start a new # one. This provides a measure of additional safety over # the pool_recycle parameter, and is useful when e.g., the # mysql server goes away def checkout_listener(dbapi_con, con_record, con_proxy): try: cursor = dbapi_con.cursor() cursor.execute("SELECT 1") except dbapi_con.OperationalError as ex: if self.is_disconnect(ex.args): # sqlalchemy will re-create the connection log.msg('connection will be removed') raise sa.exc.DisconnectionError() log.msg('exception happened {}'.format(ex)) raise # older versions of sqlalchemy require the listener to be specified # in the kwargs, in a class instance if sautils.sa_version() < (0, 7, 0): class ReconnectingListener: pass rcl = ReconnectingListener() rcl.checkout = checkout_listener engine.pool.add_listener(rcl) else: sa.event.listen(engine.pool, 'checkout', checkout_listener) def should_retry(self, ex): return any([self.is_disconnect(ex.orig.args), self.is_deadlock(ex.orig.args), super().should_retry(ex)]) def sa_url_set_attr(u, attr, value): if hasattr(u, 'set'): return u.set(**{attr: value}) setattr(u, attr, value) return u def special_case_sqlite(u, kwargs): """For sqlite, percent-substitute %(basedir)s and use a full path to the basedir. If using a memory database, force the pool size to be 1.""" max_conns = 1 # when given a database path, stick the basedir in there if u.database: # Use NullPool instead of the sqlalchemy-0.6.8-default # SingletonThreadPool for sqlite to suppress the error in # http://groups.google.com/group/sqlalchemy/msg/f8482e4721a89589, # which also explains that NullPool is the new default in # sqlalchemy 0.7 for non-memory SQLite databases. kwargs.setdefault('poolclass', NullPool) database = u.database database = database % dict(basedir=kwargs['basedir']) if not os.path.isabs(database[0]): database = os.path.join(kwargs['basedir'], database) u = sa_url_set_attr(u, 'database', database) else: # For in-memory database SQLAlchemy will use SingletonThreadPool # and we will run connection creation and all queries in the single # thread. # However connection destruction will be run from the main # thread, which is safe in our case, but not safe in general, # so SQLite will emit warning about it. # Silence that warning. kwargs.setdefault('connect_args', {})['check_same_thread'] = False # ignore serializing access to the db if 'serialize_access' in u.query: query = dict(u.query) query.pop('serialize_access') u = sa_url_set_attr(u, 'query', query) return u, kwargs, max_conns def special_case_mysql(u, kwargs): """For mysql, take max_idle out of the query arguments, and use its value for pool_recycle. Also, force use_unicode and charset to be True and 'utf8', failing if they were set to anything else.""" query = dict(u.query) kwargs['pool_recycle'] = int(query.pop('max_idle', 3600)) # default to the MyISAM storage engine storage_engine = query.pop('storage_engine', 'MyISAM') kwargs['connect_args'] = { 'init_command': 'SET default_storage_engine={}'.format(storage_engine) } if 'use_unicode' in query: if query['use_unicode'] != "True": raise TypeError("Buildbot requires use_unicode=True " + "(and adds it automatically)") else: query['use_unicode'] = "True" if 'charset' in query: if query['charset'] != "utf8": raise TypeError("Buildbot requires charset=utf8 " + "(and adds it automatically)") else: query['charset'] = 'utf8' u = sa_url_set_attr(u, 'query', query) return u, kwargs, None def get_drivers_strategy(drivername): if drivername.startswith('sqlite'): return SqlLiteStrategy() elif drivername.startswith('mysql'): return MySQLStrategy() return Strategy() def create_engine(name_or_url, **kwargs): if 'basedir' not in kwargs: raise TypeError('no basedir supplied to create_engine') max_conns = None # apply special cases u = url.make_url(name_or_url) if u.drivername.startswith('sqlite'): u, kwargs, max_conns = special_case_sqlite(u, kwargs) elif u.drivername.startswith('mysql'): u, kwargs, max_conns = special_case_mysql(u, kwargs) # remove the basedir as it may confuse sqlalchemy basedir = kwargs.pop('basedir') # calculate the maximum number of connections from the pool parameters, # if it hasn't already been specified if max_conns is None: max_conns = kwargs.get( 'pool_size', 5) + kwargs.get('max_overflow', 10) driver_strategy = get_drivers_strategy(u.drivername) engine = sa.create_engine(u, **kwargs) driver_strategy.set_up(u, engine) engine.should_retry = driver_strategy.should_retry # annotate the engine with the optimal thread pool size; this is used # by DBConnector to configure the surrounding thread pool engine.optimal_thread_pool_size = max_conns # keep the basedir engine.buildbot_basedir = basedir return engine buildbot-3.4.0/master/buildbot/db/exceptions.py000066400000000000000000000013641413250514000215400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class DatabaseNotReadyError(Exception): pass buildbot-3.4.0/master/buildbot/db/logs.py000066400000000000000000000410221413250514000203160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import bz2 import zlib import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from buildbot.db import base try: # lz4 > 0.9.0 from lz4.block import compress as dumps_lz4 from lz4.block import decompress as read_lz4 except ImportError: try: # lz4 < 0.9.0 from lz4 import dumps as dumps_lz4 from lz4 import loads as read_lz4 except ImportError: # pragma: no cover # config.py actually forbid this code path def dumps_lz4(data): return data def read_lz4(data): return data def dumps_gzip(data): return zlib.compress(data, 9) def read_gzip(data): return zlib.decompress(data) def dumps_bz2(data): return bz2.compress(data, 9) def read_bz2(data): return bz2.decompress(data) class LogsConnectorComponent(base.DBConnectorComponent): # Postgres and MySQL will both allow bigger sizes than this. The limit # for MySQL appears to be max_packet_size (default 1M). # note that MAX_CHUNK_SIZE is equal to BUFFER_SIZE in buildbot_worker.runprocess MAX_CHUNK_SIZE = 65536 # a chunk may not be bigger than this MAX_CHUNK_LINES = 1000 # a chunk may not have more lines than this COMPRESSION_MODE = {"raw": {"id": 0, "dumps": lambda x: x, "read": lambda x: x}, "gz": {"id": 1, "dumps": dumps_gzip, "read": read_gzip}, "bz2": {"id": 2, "dumps": dumps_bz2, "read": read_bz2}, "lz4": {"id": 3, "dumps": dumps_lz4, "read": read_lz4}} COMPRESSION_BYID = dict((x["id"], x) for x in COMPRESSION_MODE.values()) total_raw_bytes = 0 total_compressed_bytes = 0 # returns a Deferred that returns a value def _getLog(self, whereclause): def thd_getLog(conn): q = self.db.model.logs.select(whereclause=whereclause) res = conn.execute(q) row = res.fetchone() rv = None if row: rv = self._logdictFromRow(row) res.close() return rv return self.db.pool.do(thd_getLog) def getLog(self, logid): return self._getLog(self.db.model.logs.c.id == logid) def getLogBySlug(self, stepid, slug): tbl = self.db.model.logs return self._getLog((tbl.c.slug == slug) & (tbl.c.stepid == stepid)) # returns a Deferred that returns a value def getLogs(self, stepid=None): def thdGetLogs(conn): tbl = self.db.model.logs q = tbl.select() if stepid is not None: q = q.where(tbl.c.stepid == stepid) q = q.order_by(tbl.c.id) res = conn.execute(q) return [self._logdictFromRow(row) for row in res.fetchall()] return self.db.pool.do(thdGetLogs) # returns a Deferred that returns a value def getLogLines(self, logid, first_line, last_line): def thdGetLogLines(conn): # get a set of chunks that completely cover the requested range tbl = self.db.model.logchunks q = sa.select([tbl.c.first_line, tbl.c.last_line, tbl.c.content, tbl.c.compressed]) q = q.where(tbl.c.logid == logid) q = q.where(tbl.c.first_line <= last_line) q = q.where(tbl.c.last_line >= first_line) q = q.order_by(tbl.c.first_line) rv = [] for row in conn.execute(q): # Retrieve associated "reader" and extract the data # Note that row.content is stored as bytes, and our caller expects unicode data = self.COMPRESSION_BYID[ row.compressed]["read"](row.content) content = data.decode('utf-8') if row.first_line < first_line: idx = -1 count = first_line - row.first_line for _ in range(count): idx = content.index('\n', idx + 1) content = content[idx + 1:] if row.last_line > last_line: idx = len(content) + 1 count = row.last_line - last_line for _ in range(count): idx = content.rindex('\n', 0, idx) content = content[:idx] rv.append(content) return '\n'.join(rv) + '\n' if rv else '' return self.db.pool.do(thdGetLogLines) # returns a Deferred that returns a value def addLog(self, stepid, name, slug, type): assert type in 'tsh', "Log type must be one of t, s, or h" def thdAddLog(conn): try: r = conn.execute(self.db.model.logs.insert(), dict(name=name, slug=slug, stepid=stepid, complete=0, num_lines=0, type=type)) return r.inserted_primary_key[0] except (sa.exc.IntegrityError, sa.exc.ProgrammingError) as e: raise KeyError( "log with slug '%r' already exists in this step" % (slug,)) from e return self.db.pool.do(thdAddLog) def thdCompressChunk(self, chunk): # Set the default compressed mode to "raw" id compressed_id = self.COMPRESSION_MODE["raw"]["id"] self.total_raw_bytes += len(chunk) # Do we have to compress the chunk? if self.master.config.logCompressionMethod != "raw": compressed_mode = self.COMPRESSION_MODE[ self.master.config.logCompressionMethod] compressed_chunk = compressed_mode["dumps"](chunk) # Is it useful to compress the chunk? if len(chunk) > len(compressed_chunk): compressed_id = compressed_mode["id"] chunk = compressed_chunk self.total_compressed_bytes += len(chunk) return chunk, compressed_id def thdSplitAndAppendChunk(self, conn, logid, content, first_line): # Break the content up into chunks. This takes advantage of the # fact that no character but u'\n' maps to b'\n' in UTF-8. remaining = content chunk_first_line = last_line = first_line while remaining: chunk, remaining = self._splitBigChunk(remaining, logid) last_line = chunk_first_line + chunk.count(b'\n') chunk, compressed_id = self.thdCompressChunk(chunk) conn.execute(self.db.model.logchunks.insert(), dict(logid=logid, first_line=chunk_first_line, last_line=last_line, content=chunk, compressed=compressed_id)).close() chunk_first_line = last_line + 1 conn.execute(self.db.model.logs.update(whereclause=(self.db.model.logs.c.id == logid)), num_lines=last_line + 1).close() return first_line, last_line def thdAppendLog(self, conn, logid, content): # check for trailing newline and strip it for storage -- chunks omit # the trailing newline assert content[-1] == '\n' # Note that row.content is stored as bytes, and our caller is sending unicode content = content[:-1].encode('utf-8') q = sa.select([self.db.model.logs.c.num_lines]) q = q.where(self.db.model.logs.c.id == logid) res = conn.execute(q) num_lines = res.fetchone() res.close() if not num_lines: return None # ignore a missing log return self.thdSplitAndAppendChunk(conn=conn, logid=logid, content=content, first_line=num_lines[0]) # returns a Deferred that returns a value def appendLog(self, logid, content): def thdappendLog(conn): return self.thdAppendLog(conn, logid, content) return self.db.pool.do(thdappendLog) def _splitBigChunk(self, content, logid): """ Split CONTENT on a line boundary into a prefix smaller than 64k and a suffix containing the remainder, omitting the splitting newline. """ # if it's small enough, just return it if len(content) < self.MAX_CHUNK_SIZE: return content, None # find the last newline before the limit i = content.rfind(b'\n', 0, self.MAX_CHUNK_SIZE) if i != -1: return content[:i], content[i + 1:] log.msg('truncating long line for log %d' % logid) # first, truncate this down to something that decodes correctly truncline = content[:self.MAX_CHUNK_SIZE] while truncline: try: truncline.decode('utf-8') break except UnicodeDecodeError: truncline = truncline[:-1] # then find the beginning of the next line i = content.find(b'\n', self.MAX_CHUNK_SIZE) if i == -1: return truncline, None return truncline, content[i + 1:] # returns a Deferred that returns None def finishLog(self, logid): def thdfinishLog(conn): tbl = self.db.model.logs q = tbl.update(whereclause=(tbl.c.id == logid)) conn.execute(q, complete=1) return self.db.pool.do(thdfinishLog) @defer.inlineCallbacks def compressLog(self, logid, force=False): def thdcompressLog(conn): tbl = self.db.model.logchunks q = sa.select([tbl.c.first_line, tbl.c.last_line, sa.func.length(tbl.c.content), tbl.c.compressed]) q = q.where(tbl.c.logid == logid) q = q.order_by(tbl.c.first_line) rows = conn.execute(q) todo_gather_list = [] numchunks = 0 totlength = 0 todo_numchunks = 0 todo_first_line = 0 todo_last_line = 0 todo_length = 0 # first pass, we fetch the full list of chunks (without content) and find out # the chunk groups which could use some gathering. for row in rows: if (todo_length + row.length_1 > self.MAX_CHUNK_SIZE or (row.last_line - todo_first_line) > self.MAX_CHUNK_LINES): if todo_numchunks > 1 or (force and todo_numchunks): # this group is worth re-compressing todo_gather_list.append((todo_first_line, todo_last_line)) todo_first_line = row.first_line todo_length = 0 todo_numchunks = 0 todo_last_line = row.last_line # note that we count the compressed size for efficiency reason # unlike to the on-the-flow chunk splitter todo_length += row.length_1 totlength += row.length_1 todo_numchunks += 1 numchunks += 1 rows.close() if totlength == 0: # empty log return 0 if todo_numchunks > 1 or (force and todo_numchunks): # last chunk group todo_gather_list.append((todo_first_line, todo_last_line)) for todo_first_line, todo_last_line in todo_gather_list: # decompress this group of chunks. Note that the content is binary bytes. # no need to decode anything as we are going to put in back stored as bytes anyway q = sa.select( [tbl.c.first_line, tbl.c.last_line, tbl.c.content, tbl.c.compressed]) q = q.where(tbl.c.logid == logid) q = q.where(tbl.c.first_line >= todo_first_line) q = q.where(tbl.c.last_line <= todo_last_line) q = q.order_by(tbl.c.first_line) rows = conn.execute(q) chunk = b"" for row in rows: if chunk: chunk += b"\n" chunk += self.COMPRESSION_BYID[row.compressed][ "read"](row.content) rows.close() # Transaction is necessary so that readers don't see disappeared chunks transaction = conn.begin() # we remove the chunks that we are compressing d = tbl.delete() d = d.where(tbl.c.logid == logid) d = d.where(tbl.c.first_line >= todo_first_line) d = d.where(tbl.c.last_line <= todo_last_line) conn.execute(d).close() # and we recompress them in one big chunk chunk, compressed_id = self.thdCompressChunk(chunk) conn.execute(tbl.insert(), dict(logid=logid, first_line=todo_first_line, last_line=todo_last_line, content=chunk, compressed=compressed_id)).close() transaction.commit() # calculate how many bytes we saved q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))]) q = q.where(tbl.c.logid == logid) newsize = conn.execute(q).fetchone()[0] return totlength - newsize saved = yield self.db.pool.do(thdcompressLog) return saved # returns a Deferred that returns a value def deleteOldLogChunks(self, older_than_timestamp): def thddeleteOldLogs(conn): model = self.db.model res = conn.execute(sa.select([sa.func.count(model.logchunks.c.logid)])) count1 = res.fetchone()[0] res.close() # update log types older than timestamps # we do it first to avoid having UI discrepancy # N.B.: we utilize the fact that steps.id is auto-increment, thus steps.started_at # times are effectively sorted and we only need to find the steps.id at the upper # bound of steps to update. # SELECT steps.id from steps WHERE steps.started_at < older_than_timestamp ORDER BY # steps.id DESC LIMIT 1; res = conn.execute( sa.select([model.steps.c.id]) .where(model.steps.c.started_at < older_than_timestamp) .order_by(model.steps.c.id.desc()) .limit(1) ) res_list = res.fetchone() stepid_max = None if res_list: stepid_max = res_list[0] res.close() # UPDATE logs SET logs.type = 'd' WHERE logs.stepid <= stepid_max AND type != 'd'; if stepid_max: res = conn.execute( model.logs.update() .where(sa.and_(model.logs.c.stepid <= stepid_max, model.logs.c.type != 'd')) .values(type='d') ) res.close() # query all logs with type 'd' and delete their chunks. if self.db._engine.dialect.name == 'sqlite': # sqlite does not support delete with a join, so for this case we use a subquery, # which is much slower q = sa.select([model.logs.c.id]) q = q.select_from(model.logs) q = q.where(model.logs.c.type == 'd') # delete their logchunks q = model.logchunks.delete().where(model.logchunks.c.logid.in_(q)) else: q = model.logchunks.delete() q = q.where(model.logs.c.id == model.logchunks.c.logid) q = q.where(model.logs.c.type == 'd') res = conn.execute(q) res.close() res = conn.execute(sa.select([sa.func.count(model.logchunks.c.logid)])) count2 = res.fetchone()[0] res.close() return count1 - count2 return self.db.pool.do(thddeleteOldLogs) def _logdictFromRow(self, row): rv = dict(row) rv['complete'] = bool(rv['complete']) return rv buildbot-3.4.0/master/buildbot/db/masters.py000066400000000000000000000075261413250514000210430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.db import base from buildbot.util import epoch2datetime class MasterDict(dict): pass class MastersConnectorComponent(base.DBConnectorComponent): data2db = {"masterid": "id", "link": "id"} def findMasterId(self, name): tbl = self.db.model.masters return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name == name), insert_values=dict( name=name, name_hash=self.hashColumns(name), active=0, # initially inactive last_active=int(self.master.reactor.seconds()) )) # returns a Deferred that returns a value def setMasterState(self, masterid, active): def thd(conn): tbl = self.db.model.masters whereclause = (tbl.c.id == masterid) # get the old state r = conn.execute(sa.select([tbl.c.active], whereclause=whereclause)) rows = r.fetchall() r.close() if not rows: return False # can't change a row that doesn't exist.. was_active = bool(rows[0].active) if not active: # if we're marking inactive, then delete any links to this # master sch_mst_tbl = self.db.model.scheduler_masters q = sch_mst_tbl.delete( whereclause=(sch_mst_tbl.c.masterid == masterid)) conn.execute(q) # set the state (unconditionally, just to be safe) q = tbl.update(whereclause=whereclause) q = q.values(active=1 if active else 0) if active: q = q.values(last_active=int(self.master.reactor.seconds())) conn.execute(q) # return True if there was a change in state return was_active != bool(active) return self.db.pool.do(thd) # returns a Deferred that returns a value def getMaster(self, masterid): def thd(conn): tbl = self.db.model.masters res = conn.execute(tbl.select( whereclause=(tbl.c.id == masterid))) row = res.fetchone() rv = None if row: rv = self._masterdictFromRow(row) res.close() return rv return self.db.pool.do(thd) # returns a Deferred that returns a value def getMasters(self): def thd(conn): tbl = self.db.model.masters return [ self._masterdictFromRow(row) for row in conn.execute(tbl.select()).fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns None def setAllMastersActiveLongTimeAgo(self): def thd(conn): tbl = self.db.model.masters q = tbl.update().values(active=1, last_active=0) conn.execute(q) return self.db.pool.do(thd) def _masterdictFromRow(self, row): return MasterDict(id=row.id, name=row.name, active=bool(row.active), last_active=epoch2datetime(row.last_active)) buildbot-3.4.0/master/buildbot/db/migrate_utils.py000066400000000000000000000031741413250514000222300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def test_unicode(migrate_engine): """Test that the database can handle inserting and selecting Unicode""" # set up a subsidiary MetaData object to hold this temporary table submeta = sa.MetaData() submeta.bind = migrate_engine test_unicode = sautils.Table( 'test_unicode', submeta, sa.Column('u', sa.Unicode(length=100)), sa.Column('b', sa.LargeBinary), ) test_unicode.create() # insert a unicode value in there u = "Frosty the \N{SNOWMAN}" b = b'\xff\xff\x00' ins = test_unicode.insert().values(u=u, b=b) migrate_engine.execute(ins) # see if the data is intact row = migrate_engine.execute(sa.select([test_unicode])).fetchall()[0] assert isinstance(row['u'], str) assert row['u'] == u assert isinstance(row['b'], bytes) assert row['b'] == b # drop the test table test_unicode.drop() buildbot-3.4.0/master/buildbot/db/migrations/000077500000000000000000000000001413250514000211555ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/db/migrations/README000066400000000000000000000005151413250514000220360ustar00rootroot00000000000000This is a database migration repository. More information at https://alembic.sqlalchemy.org/en/latest/ In order to run the alembic tool the current directory must be this directory. To create new migration revision run `alembic revision --rev-id REV_ID --message MESSAGE` where REV_ID is the the last revision incremented by one. buildbot-3.4.0/master/buildbot/db/migrations/alembic.ini000066400000000000000000000036261413250514000232610ustar00rootroot00000000000000# A generic, single database configuration. [alembic] # path to migration scripts script_location = %(here)s/ # template used to generate migration files file_template = %%(rev)s_%%(year)d-%%(month).2d-%%(day).2d_%%(slug)s # sys.path path, will be prepended to sys.path if present. # defaults to the current working directory. prepend_sys_path = . # timezone to use when rendering the date # within the migration file as well as the filename. # string value is passed to dateutil.tz.gettz() # leave blank for localtime # timezone = truncate_slug_length = 40 # set to 'true' to run the environment during # the 'revision' command, regardless of autogenerate # revision_environment = false # set to 'true' to allow .pyc and .pyo files without # a source .py file to be detected as revisions in the # versions/ directory # sourceless = false version_locations = %(here)s/versions # the output encoding used when revision files are written from script.py.mako output_encoding = utf-8 sqlalchemy.url = driver://user:pass@localhost/dbname [post_write_hooks] # post_write_hooks defines scripts or Python functions that are run # on newly generated revision scripts. See the documentation for further # detail and examples # format using "black" - use the console_scripts runner, against the "black" entrypoint # hooks = black # black.type = console_scripts # black.entrypoint = black # black.options = -l 79 REVISION_SCRIPT_FILENAME # Logging configuration [loggers] keys = root,sqlalchemy,alembic [handlers] keys = console [formatters] keys = generic [logger_root] level = WARN handlers = console qualname = [logger_sqlalchemy] level = WARN handlers = qualname = sqlalchemy.engine [logger_alembic] level = INFO handlers = qualname = alembic [handler_console] class = StreamHandler args = (sys.stderr,) level = NOTSET formatter = generic [formatter_generic] format = %(levelname)-5.5s [%(name)s] %(message)s datefmt = %H:%M:%S buildbot-3.4.0/master/buildbot/db/migrations/env.py000066400000000000000000000033651413250514000223260ustar00rootroot00000000000000from logging.config import fileConfig from alembic import context from sqlalchemy import engine_from_config from sqlalchemy import pool from buildbot.db import model # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) target_metadata = model.Model.metadata def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure( url=url, target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, ) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ connectable = engine_from_config( config.get_section(config.config_ini_section), prefix="sqlalchemy.", poolclass=pool.NullPool, ) with connectable.connect() as connection: context.configure( connection=connection, target_metadata=target_metadata ) with context.begin_transaction(): context.run_migrations() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online() buildbot-3.4.0/master/buildbot/db/migrations/script.py.mako000066400000000000000000000007561413250514000237710ustar00rootroot00000000000000"""${message} Revision ID: ${up_revision} Revises: ${down_revision | comma,n} Create Date: ${create_date} """ from alembic import op import sqlalchemy as sa ${imports if imports else ""} # revision identifiers, used by Alembic. revision = ${repr(up_revision)} down_revision = ${repr(down_revision)} branch_labels = ${repr(branch_labels)} depends_on = ${repr(depends_on)} def upgrade(): ${upgrades if upgrades else "pass"} def downgrade(): ${downgrades if downgrades else "pass"} buildbot-3.4.0/master/buildbot/db/migrations/versions/000077500000000000000000000000001413250514000230255ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/db/migrations/versions/059_2021-09-07_alembic_initial.py000066400000000000000000000005671413250514000302270ustar00rootroot00000000000000"""initial Revision ID: 059 Revises: (none) Create Date: 2021-09-07 20:00:00.000000 This empty Alembic revision is used as a placeholder revision for upgrades from older versions of the database. """ # revision identifiers, used by Alembic. revision = '059' down_revision = None branch_labels = None depends_on = None def upgrade(): pass def downgrade(): pass buildbot-3.4.0/master/buildbot/db/migrations/versions/__init__.py000066400000000000000000000000001413250514000251240ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/db/model.py000066400000000000000000001277201413250514000204640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import alembic import alembic.config import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from twisted.python import util from buildbot.db import base from buildbot.db.migrate_utils import test_unicode from buildbot.db.types.json import JsonObject from buildbot.util import sautils class UpgradeFromBefore0p9Error(Exception): def __init__(self): message = """You are trying to upgrade a buildbot 0.8.x master to buildbot 0.9.x or newer. This is not supported. Please start from a clean database http://docs.buildbot.net/latest/manual/upgrading/0.9-upgrade.html""" # Call the base class constructor with the parameters it needs super().__init__(message) class UpgradeFromBefore3p0Error(Exception): def __init__(self): message = """You are trying to upgrade to Buildbot 3.0 or newer from Buildbot 2.x or older. This is only supported via an intermediate upgrade to newest Buildbot 2.10.x that is available. Please first upgrade to 2.10.x and then try to upgrade to this version. http://docs.buildbot.net/latest/manual/upgrading/3.0-upgrade.html""" super().__init__(message) class Model(base.DBConnectorComponent): # # schema # metadata = sa.MetaData() # NOTES # * server_defaults here are included to match those added by the migration # scripts, but they should not be depended on - all code accessing these # tables should supply default values as necessary. The defaults are # required during migration when adding non-nullable columns to existing # tables. # # * dates are stored as unix timestamps (UTC-ish epoch time) # # * sqlalchemy does not handle sa.Boolean very well on MySQL or Postgres; # use sa.SmallInteger instead # # * BuildRequest.canBeCollapsed() depends on buildrequest.id being auto-incremented which is # sqlalchemy default. # Tables related to build requests # -------------------------------- # A BuildRequest is a request for a particular build to be performed. Each # BuildRequest is a part of a Buildset. BuildRequests are claimed by # masters, to avoid multiple masters running the same build. buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id', ondelete='CASCADE'), nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('priority', sa.Integer, nullable=False, server_default=sa.DefaultClause("0")), # if this is zero, then the build is still pending sa.Column('complete', sa.Integer, server_default=sa.DefaultClause("0")), # results is only valid when complete == 1; 0 = SUCCESS, 1 = WARNINGS, # etc - see master/buildbot/status/builder.py sa.Column('results', sa.SmallInteger), # time the buildrequest was created sa.Column('submitted_at', sa.Integer, nullable=False), # time the buildrequest was completed, or NULL sa.Column('complete_at', sa.Integer), # boolean indicating whether there is a step blocking, waiting for this # request to complete sa.Column('waited_for', sa.SmallInteger, server_default=sa.DefaultClause("0")), ) # Each row in this table represents a claimed build request, where the # claim is made by the master referenced by masterid. buildrequest_claims = sautils.Table( 'buildrequest_claims', metadata, sa.Column('brid', sa.Integer, sa.ForeignKey('buildrequests.id', ondelete='CASCADE'), nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), index=True, nullable=False), sa.Column('claimed_at', sa.Integer, nullable=False), ) # Tables related to builds # ------------------------ # This table contains the build properties build_properties = sautils.Table( 'build_properties', metadata, sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id', ondelete='CASCADE'), nullable=False), sa.Column('name', sa.String(256), nullable=False), # JSON encoded value sa.Column('value', sa.Text, nullable=False), sa.Column('source', sa.String(256), nullable=False), ) # This table contains transient build state. build_data = sautils.Table( 'build_data', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id', ondelete='CASCADE'), nullable=False), sa.Column('name', sa.String(256), nullable=False), sa.Column('value', sa.LargeBinary().with_variant(sa.dialects.mysql.LONGBLOB, "mysql"), nullable=False), sa.Column('length', sa.Integer, nullable=False), sa.Column('source', sa.String(256), nullable=False), ) # This table contains basic information about each build. builds = sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), # note that there is 1:N relationship here. # In case of worker loss, build has results RETRY # and buildrequest is unclaimed. # We use use_alter to prevent circular reference # (buildrequests -> buildsets -> builds). sa.Column('buildrequestid', sa.Integer, sa.ForeignKey( 'buildrequests.id', use_alter=True, name='buildrequestid', ondelete='CASCADE'), nullable=False), # worker which performed this build # keep nullable to support worker-free builds sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id', ondelete='SET NULL'), nullable=True), # master which controlled this build sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), # start/complete times sa.Column('started_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), sa.Column('state_string', sa.Text, nullable=False), sa.Column('results', sa.Integer), ) # Tables related to steps # ----------------------- steps = sautils.Table( 'steps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('name', sa.String(50), nullable=False), sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id', ondelete='CASCADE'), nullable=False), sa.Column('started_at', sa.Integer), sa.Column('complete_at', sa.Integer), sa.Column('state_string', sa.Text, nullable=False), sa.Column('results', sa.Integer), sa.Column('urls_json', sa.Text, nullable=False), sa.Column( 'hidden', sa.SmallInteger, nullable=False, server_default='0'), ) # Tables related to logs # ---------------------- logs = sautils.Table( 'logs', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, nullable=False), sa.Column('slug', sa.String(50), nullable=False), sa.Column('stepid', sa.Integer, sa.ForeignKey('steps.id', ondelete='CASCADE'), nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False), sa.Column('num_lines', sa.Integer, nullable=False), # 's' = stdio, 't' = text, 'h' = html, 'd' = deleted sa.Column('type', sa.String(1), nullable=False), ) logchunks = sautils.Table( 'logchunks', metadata, sa.Column('logid', sa.Integer, sa.ForeignKey('logs.id', ondelete='CASCADE'), nullable=False), # 0-based line number range in this chunk (inclusive); note that for # HTML logs, this counts lines of HTML, not lines of rendered output sa.Column('first_line', sa.Integer, nullable=False), sa.Column('last_line', sa.Integer, nullable=False), # log contents, including a terminating newline, encoded in utf-8 or, # if 'compressed' is not 0, compressed with gzip, bzip2 or lz4 sa.Column('content', sa.LargeBinary(65536)), sa.Column('compressed', sa.SmallInteger, nullable=False), ) # Tables related to buildsets # --------------------------- # This table contains input properties for buildsets buildset_properties = sautils.Table( 'buildset_properties', metadata, sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id', ondelete='CASCADE'), nullable=False), sa.Column('property_name', sa.String(256), nullable=False), # JSON-encoded tuple of (value, source) sa.Column('property_value', sa.Text, nullable=False), ) # This table represents Buildsets - sets of BuildRequests that share the # same original cause and source information. buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), # a simple external identifier to track down this buildset later, e.g., # for try requests sa.Column('external_idstring', sa.String(256)), # a short string giving the reason the buildset was created sa.Column('reason', sa.String(256)), sa.Column('submitted_at', sa.Integer, nullable=False), # if this is zero, then the build set is still pending sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), # results is only valid when complete == 1; 0 = SUCCESS, 1 = WARNINGS, # etc - see master/buildbot/status/builder.py sa.Column('results', sa.SmallInteger), # optional parent build, we use use_alter to prevent circular reference # http://docs.sqlalchemy.org/en/latest/orm/relationships.html#rows-that-point-to-themselves-mutually-dependent-rows sa.Column('parent_buildid', sa.Integer, sa.ForeignKey('builds.id', use_alter=True, name='parent_buildid', ondelete='SET NULL'), nullable=True), # text describing what is the relationship with the build # could be 'triggered from', 'rebuilt from', 'inherited from' sa.Column('parent_relationship', sa.Text), ) # Tables related to change sources # -------------------------------- # The changesources table gives a unique identifier to each ChangeSource. It # also links to other tables used to ensure only one master runs each # changesource changesources = sautils.Table( 'changesources', metadata, sa.Column("id", sa.Integer, primary_key=True), # name for this changesource, as given in the configuration, plus a hash # of that name used for a unique index sa.Column('name', sa.Text, nullable=False), sa.Column('name_hash', sa.String(40), nullable=False), ) # This links changesources to the master where they are running. A changesource # linked to a master that is inactive can be unlinked by any master. This # is a separate table so that we can "claim" changesources on a master by # inserting; this has better support in database servers for ensuring that # exactly one claim succeeds. changesource_masters = sautils.Table( 'changesource_masters', metadata, sa.Column('changesourceid', sa.Integer, sa.ForeignKey('changesources.id', ondelete='CASCADE'), nullable=False, primary_key=True), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), ) # Tables related to workers # ------------------------- workers = sautils.Table( "workers", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("name", sa.String(50), nullable=False), sa.Column("info", JsonObject, nullable=False), sa.Column("paused", sa.SmallInteger, nullable=False, server_default="0"), sa.Column("graceful", sa.SmallInteger, nullable=False, server_default="0"), ) # link workers to all builder/master pairs for which they are # configured configured_workers = sautils.Table( 'configured_workers', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('buildermasterid', sa.Integer, sa.ForeignKey('builder_masters.id', ondelete='CASCADE'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id', ondelete='CASCADE'), nullable=False), ) # link workers to the masters they are currently connected to connected_workers = sautils.Table( 'connected_workers', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id', ondelete='CASCADE'), nullable=False), ) # Tables related to changes # ---------------------------- # Files touched in changes change_files = sautils.Table( 'change_files', metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), sa.Column('filename', sa.String(1024), nullable=False), ) # Properties for changes change_properties = sautils.Table( 'change_properties', metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), sa.Column('property_name', sa.String(256), nullable=False), # JSON-encoded tuple of (value, source) sa.Column('property_value', sa.Text, nullable=False), ) # users associated with this change; this allows multiple users for # situations where a version-control system can represent both an author # and committer, for example. change_users = sautils.Table( "change_users", metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), # uid for the author of the change with the given changeid sa.Column('uid', sa.Integer, sa.ForeignKey('users.uid', ondelete='CASCADE'), nullable=False), ) # Changes to the source code, produced by ChangeSources changes = sautils.Table( 'changes', metadata, # changeid also serves as 'change number' sa.Column('changeid', sa.Integer, primary_key=True), # author's name (usually an email address) sa.Column('author', sa.String(255), nullable=False), # committer's name sa.Column('committer', sa.String(255), nullable=True), # commit comment sa.Column('comments', sa.Text, nullable=False), # The branch where this change occurred. When branch is NULL, that # means the main branch (trunk, master, etc.) sa.Column('branch', sa.String(255)), # revision identifier for this change sa.Column('revision', sa.String(255)), # CVS uses NULL sa.Column('revlink', sa.String(256)), # this is the timestamp of the change - it is usually copied from the # version-control system, and may be long in the past or even in the # future! sa.Column('when_timestamp', sa.Integer, nullable=False), # an arbitrary string used for filtering changes sa.Column('category', sa.String(255)), # repository specifies, along with revision and branch, the # source tree in which this change was detected. sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), # codebase is a logical name to specify what is in the repository sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), # project names the project this source code represents. It is used # later to filter changes sa.Column('project', sa.String(length=512), nullable=False, server_default=''), # the sourcestamp this change brought the codebase to sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), # The parent of the change # Even if for the moment there's only 1 parent for a change, we use plural here because # somedays a change will have multiple parent. This way we don't need # to change the API sa.Column('parent_changeids', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='SET NULL'), nullable=True), ) # Tables related to sourcestamps # ------------------------------ # Patches for SourceStamps that were generated through the try mechanism patches = sautils.Table( 'patches', metadata, sa.Column('id', sa.Integer, primary_key=True), # number of directory levels to strip off (patch -pN) sa.Column('patchlevel', sa.Integer, nullable=False), # base64-encoded version of the patch file sa.Column('patch_base64', sa.Text, nullable=False), # patch author, if known sa.Column('patch_author', sa.Text, nullable=False), # patch comment sa.Column('patch_comment', sa.Text, nullable=False), # subdirectory in which the patch should be applied; NULL for top-level sa.Column('subdir', sa.Text), ) # A sourcestamp identifies a particular instance of the source code. # Ideally, this would always be absolute, but in practice source stamps can # also mean "latest" (when revision is NULL), which is of course a # time-dependent definition. sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), # hash of the branch, revision, patchid, repository, codebase, and # project, using hashColumns. sa.Column('ss_hash', sa.String(40), nullable=False), # the branch to check out. When branch is NULL, that means # the main branch (trunk, master, etc.) sa.Column('branch', sa.String(256)), # the revision to check out, or the latest if NULL sa.Column('revision', sa.String(256)), # the patch to apply to generate this source code sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id', ondelete='CASCADE'), nullable=True), # the repository from which this source should be checked out sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), # codebase is a logical name to specify what is in the repository sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), # the project this source code represents sa.Column('project', sa.String(length=512), nullable=False, server_default=''), # the time this sourcetamp was first seen (the first time it was added) sa.Column('created_at', sa.Integer, nullable=False), ) # a many-to-may relationship between buildsets and sourcestamps buildset_sourcestamps = sautils.Table( 'buildset_sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id', ondelete='CASCADE'), nullable=False), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), ) # Tables related to schedulers # ---------------------------- # The schedulers table gives a unique identifier to each scheduler. It # also links to other tables used to ensure only one master runs each # scheduler, and to track changes that a scheduler may trigger a build for # later. schedulers = sautils.Table( 'schedulers', metadata, sa.Column("id", sa.Integer, primary_key=True), # name for this scheduler, as given in the configuration, plus a hash # of that name used for a unique index sa.Column('name', sa.Text, nullable=False), sa.Column('name_hash', sa.String(40), nullable=False), sa.Column('enabled', sa.SmallInteger, server_default=sa.DefaultClause("1")), ) # This links schedulers to the master where they are running. A scheduler # linked to a master that is inactive can be unlinked by any master. This # is a separate table so that we can "claim" schedulers on a master by # inserting; this has better support in database servers for ensuring that # exactly one claim succeeds. The ID column is present for external users; # see bug #1053. scheduler_masters = sautils.Table( 'scheduler_masters', metadata, sa.Column('schedulerid', sa.Integer, sa.ForeignKey('schedulers.id', ondelete='CASCADE'), nullable=False, primary_key=True), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), ) # This table references "classified" changes that have not yet been # "processed". That is, the scheduler has looked at these changes and # determined that something should be done, but that hasn't happened yet. # Rows are deleted from this table as soon as the scheduler is done with # the change. scheduler_changes = sautils.Table( 'scheduler_changes', metadata, sa.Column('schedulerid', sa.Integer, sa.ForeignKey('schedulers.id', ondelete='CASCADE'), nullable=False), sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), # true (nonzero) if this change is important to this scheduler sa.Column('important', sa.Integer), ) # Tables related to builders # -------------------------- builders = sautils.Table( 'builders', metadata, sa.Column('id', sa.Integer, primary_key=True), # builder's name sa.Column('name', sa.Text, nullable=False), # builder's description sa.Column('description', sa.Text, nullable=True), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), ) # This links builders to the master where they are running. A builder # linked to a master that is inactive can be unlinked by any master. Note # that builders can run on multiple masters at the same time. builder_masters = sautils.Table( 'builder_masters', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), ) # Tables related to tags # ---------------------- tags = sautils.Table( 'tags', metadata, sa.Column('id', sa.Integer, primary_key=True), # tag's name sa.Column('name', sa.Text, nullable=False), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), ) # a many-to-may relationship between builders and tags builders_tags = sautils.Table( 'builders_tags', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('tagid', sa.Integer, sa.ForeignKey('tags.id', ondelete='CASCADE'), nullable=False), ) # Tables related to test results # ------------------------------ # Represents a single test result set. A step can any number of test result sets, # each of which may contain any number of test results. test_result_sets = sautils.Table( 'test_result_sets', metadata, sa.Column('id', sa.Integer, primary_key=True), # In the future we will want to rearrange the underlying data in the database according # to (builderid, buildid) tuple, so that huge number of entries in the table does not # reduce the efficiency of retrieval of data for a particular build. sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id', ondelete='CASCADE'), nullable=False), sa.Column('stepid', sa.Integer, sa.ForeignKey('steps.id', ondelete='CASCADE'), nullable=False), # The free-form description of the source of the test data that represent the test result # set. sa.Column('description', sa.Text, nullable=True), sa.Column('category', sa.Text, nullable=False), sa.Column('value_unit', sa.Text, nullable=False), # The number of passed tests in cases when the pass or fail criteria depends only on how # that single test runs. sa.Column('tests_passed', sa.Integer, nullable=True), # The number of failed tests in cases when the pass or fail criteria depends only on how # that single test runs. sa.Column('tests_failed', sa.Integer, nullable=True), # true when all test results associated with test result set have been generated. sa.Column('complete', sa.SmallInteger, nullable=False), ) # Represents a test result. A single test result set will represent thousands of test results # in any significant codebase that's tested. # # A common table is used for all tests results regardless of what data they carry. Most serious # database engines will be able to optimize nullable fields out, so extra columns are almost # free when not used in such cases. test_results = sautils.Table( 'test_results', metadata, sa.Column('id', sa.Integer, primary_key=True), # The builder ID of the test result set that the test result belongs to. # This is included for future partitioning support. sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('test_result_setid', sa.Integer, sa.ForeignKey('test_result_sets.id', ondelete='CASCADE'), nullable=False), sa.Column('test_nameid', sa.Integer, sa.ForeignKey('test_names.id', ondelete='CASCADE'), nullable=True), sa.Column('test_code_pathid', sa.Integer, sa.ForeignKey('test_code_paths.id', ondelete='CASCADE'), nullable=True), # The code line that the test originated from sa.Column('line', sa.Integer, nullable=True), # The duration of the test execution itself sa.Column('duration_ns', sa.Integer, nullable=True), # The result of the test converted to a string. sa.Column('value', sa.Text, nullable=False), ) # Represents the test names of test results. test_names = sautils.Table( 'test_names', metadata, sa.Column('id', sa.Integer, primary_key=True), # The builder ID of the test result set that the test result belongs to. # This is included for future partitioning support and also for querying all test names # for a builder. sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('name', sa.Text, nullable=False), ) # Represents the file paths of test results. test_code_paths = sautils.Table( 'test_code_paths', metadata, sa.Column('id', sa.Integer, primary_key=True), # The builder ID of the test result set that the test result belongs to. # This is included for future partitioning support sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('path', sa.Text, nullable=False), ) # Tables related to objects # ------------------------- # This table uniquely identifies objects that need to maintain state across # invocations. objects = sautils.Table( "objects", metadata, # unique ID for this object sa.Column("id", sa.Integer, primary_key=True), # object's user-given name sa.Column('name', sa.String(128), nullable=False), # object's class name, basically representing a "type" for the state sa.Column('class_name', sa.String(128), nullable=False), ) # This table stores key/value pairs for objects, where the key is a string # and the value is a JSON string. object_state = sautils.Table( "object_state", metadata, # object for which this value is set sa.Column('objectid', sa.Integer, sa.ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), # name for this value (local to the object) sa.Column("name", sa.String(length=255), nullable=False), # value, as a JSON string sa.Column("value_json", sa.Text, nullable=False), ) # Tables related to users # ----------------------- # This table identifies individual users, and contains buildbot-specific # information about those users. users = sautils.Table( "users", metadata, # unique user id number sa.Column("uid", sa.Integer, primary_key=True), # identifier (nickname) for this user; used for display sa.Column("identifier", sa.String(255), nullable=False), # username portion of user credentials for authentication sa.Column("bb_username", sa.String(128)), # password portion of user credentials for authentication sa.Column("bb_password", sa.String(128)), ) # This table stores information identifying a user that's related to a # particular interface - a version-control system, status plugin, etc. users_info = sautils.Table( "users_info", metadata, # unique user id number sa.Column('uid', sa.Integer, sa.ForeignKey('users.uid', ondelete='CASCADE'), nullable=False), # type of user attribute, such as 'git' sa.Column("attr_type", sa.String(128), nullable=False), # data for given user attribute, such as a commit string or password sa.Column("attr_data", sa.String(128), nullable=False), ) # Tables related to masters # ------------------------- masters = sautils.Table( "masters", metadata, # unique id per master sa.Column('id', sa.Integer, primary_key=True), # master's name (generally in the form hostname:basedir) sa.Column('name', sa.Text, nullable=False), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), # true if this master is running sa.Column('active', sa.Integer, nullable=False), # updated periodically by a running master, so silently failed masters # can be detected by other masters sa.Column('last_active', sa.Integer, nullable=False), ) # Indexes # ------- sa.Index('buildrequests_buildsetid', buildrequests.c.buildsetid) sa.Index('buildrequests_builderid', buildrequests.c.builderid) sa.Index('buildrequests_complete', buildrequests.c.complete) sa.Index('build_properties_buildid', build_properties.c.buildid) sa.Index('build_data_buildid_name', build_data.c.buildid, build_data.c.name, unique=True) sa.Index('builds_buildrequestid', builds.c.buildrequestid) sa.Index('buildsets_complete', buildsets.c.complete) sa.Index('buildsets_submitted_at', buildsets.c.submitted_at) sa.Index('buildset_properties_buildsetid', buildset_properties.c.buildsetid) sa.Index('workers_name', workers.c.name, unique=True) sa.Index('changes_branch', changes.c.branch) sa.Index('changes_revision', changes.c.revision) sa.Index('changes_author', changes.c.author) sa.Index('changes_category', changes.c.category) sa.Index('changes_when_timestamp', changes.c.when_timestamp) sa.Index('change_files_changeid', change_files.c.changeid) sa.Index('change_properties_changeid', change_properties.c.changeid) sa.Index('changes_sourcestampid', changes.c.sourcestampid) sa.Index('changesource_name_hash', changesources.c.name_hash, unique=True) sa.Index('scheduler_name_hash', schedulers.c.name_hash, unique=True) sa.Index('scheduler_changes_schedulerid', scheduler_changes.c.schedulerid) sa.Index('scheduler_changes_changeid', scheduler_changes.c.changeid) sa.Index('scheduler_changes_unique', scheduler_changes.c.schedulerid, scheduler_changes.c.changeid, unique=True) sa.Index('builder_name_hash', builders.c.name_hash, unique=True) sa.Index('builder_masters_builderid', builder_masters.c.builderid) sa.Index('builder_masters_masterid', builder_masters.c.masterid) sa.Index('builder_masters_identity', builder_masters.c.builderid, builder_masters.c.masterid, unique=True) sa.Index('tag_name_hash', tags.c.name_hash, unique=True) sa.Index('builders_tags_builderid', builders_tags.c.builderid) sa.Index('builders_tags_unique', builders_tags.c.builderid, builders_tags.c.tagid, unique=True) sa.Index('configured_workers_buildmasterid', configured_workers.c.buildermasterid) sa.Index('configured_workers_workers', configured_workers.c.workerid) sa.Index('configured_workers_identity', configured_workers.c.buildermasterid, configured_workers.c.workerid, unique=True) sa.Index('connected_workers_masterid', connected_workers.c.masterid) sa.Index('connected_workers_workers', connected_workers.c.workerid) sa.Index('connected_workers_identity', connected_workers.c.masterid, connected_workers.c.workerid, unique=True) sa.Index('users_identifier', users.c.identifier, unique=True) sa.Index('users_info_uid', users_info.c.uid) sa.Index('users_info_uid_attr_type', users_info.c.uid, users_info.c.attr_type, unique=True) sa.Index('users_info_attrs', users_info.c.attr_type, users_info.c.attr_data, unique=True) sa.Index('change_users_changeid', change_users.c.changeid) sa.Index('users_bb_user', users.c.bb_username, unique=True) sa.Index('object_identity', objects.c.name, objects.c.class_name, unique=True) sa.Index('name_per_object', object_state.c.objectid, object_state.c.name, unique=True) sa.Index('master_name_hashes', masters.c.name_hash, unique=True) sa.Index('buildrequest_claims_brids', buildrequest_claims.c.brid, unique=True) sa.Index('sourcestamps_ss_hash_key', sourcestamps.c.ss_hash, unique=True) sa.Index('buildset_sourcestamps_buildsetid', buildset_sourcestamps.c.buildsetid) sa.Index('buildset_sourcestamps_unique', buildset_sourcestamps.c.buildsetid, buildset_sourcestamps.c.sourcestampid, unique=True) sa.Index('builds_number', builds.c.builderid, builds.c.number, unique=True) sa.Index('builds_workerid', builds.c.workerid) sa.Index('builds_masterid', builds.c.masterid) sa.Index('steps_number', steps.c.buildid, steps.c.number, unique=True) sa.Index('steps_name', steps.c.buildid, steps.c.name, unique=True) sa.Index('steps_started_at', steps.c.started_at) sa.Index('logs_slug', logs.c.stepid, logs.c.slug, unique=True) sa.Index('logchunks_firstline', logchunks.c.logid, logchunks.c.first_line) sa.Index('logchunks_lastline', logchunks.c.logid, logchunks.c.last_line) sa.Index('test_names_name', test_names.c.builderid, test_names.c.name, mysql_length={'name': 255}) sa.Index('test_code_paths_path', test_code_paths.c.builderid, test_code_paths.c.path, mysql_length={'path': 255}) # MySQL creates indexes for foreign keys, and these appear in the # reflection. This is a list of (table, index) names that should be # expected on this platform implied_indexes = [ ('change_users', dict(unique=False, column_names=['uid'], name='uid')), ('sourcestamps', dict(unique=False, column_names=['patchid'], name='patchid')), ('scheduler_masters', dict(unique=False, column_names=['masterid'], name='masterid')), ('changesource_masters', dict(unique=False, column_names=['masterid'], name='masterid')), ('buildset_sourcestamps', dict(unique=False, column_names=['sourcestampid'], name='sourcestampid')), ('buildsets', dict(unique=False, column_names=['parent_buildid'], name='parent_buildid')), ('builders_tags', dict(unique=False, column_names=['tagid'], name='tagid')), ('changes', dict(unique=False, column_names=['parent_changeids'], name='parent_changeids')), ('test_result_sets', { 'name': 'builderid', 'column_names': ['builderid'], 'unique': False, }), ('test_result_sets', { 'name': 'buildid', 'column_names': ['buildid'], 'unique': False, }), ('test_result_sets', { 'name': 'stepid', 'column_names': ['stepid'], 'unique': False, }), ('test_results', { 'name': 'test_result_setid', 'column_names': ['test_result_setid'], 'unique': False, }), ('test_results', { 'name': 'test_code_pathid', 'column_names': ['test_code_pathid'], 'unique': False, }), ('test_results', { 'name': 'builderid', 'column_names': ['builderid'], 'unique': False, }), ('test_results', { 'name': 'test_nameid', 'column_names': ['test_nameid'], 'unique': False, }), ] # Migration support # ----------------- # Buildbot has historically used 3 database migration systems: # - homegrown system that used "version" table to track versions # - SQLAlchemy-migrate that used "migrate_version" table to track versions # - alembic that uses "alembic_version" table to track versions (current) # We need to detect each case and tell the user how to upgrade. config_path = util.sibpath(__file__, "migrations/alembic.ini") def table_exists(self, conn, table): try: r = conn.execute(f"select * from {table} limit 1") r.close() return True except Exception: return False def migrate_get_version(self, conn): r = conn.execute("select version from migrate_version limit 1") version = r.scalar() r.close() return version def alembic_get_scripts(self): alembic_config = alembic.config.Config(self.config_path) return alembic.script.ScriptDirectory.from_config(alembic_config) def alembic_stamp(self, conn, alembic_scripts, revision): context = alembic.runtime.migration.MigrationContext.configure(conn) context.stamp(alembic_scripts, revision) @defer.inlineCallbacks def is_current(self): def thd(conn): if not self.table_exists(conn, 'alembic_version'): return False alembic_scripts = self.alembic_get_scripts() current_script_rev_head = alembic_scripts.get_current_head() context = alembic.runtime.migration.MigrationContext.configure(conn) current_rev = context.get_current_revision() return current_rev == current_script_rev_head ret = yield self.db.pool.do(thd) return ret # returns a Deferred that returns None def create(self): # this is nice and simple, but used only for tests def thd(engine): self.metadata.create_all(bind=engine) return self.db.pool.do_with_engine(thd) @defer.inlineCallbacks def upgrade(self): # the upgrade process must run in a db thread def thd(conn): alembic_scripts = self.alembic_get_scripts() current_script_rev_head = alembic_scripts.get_current_head() if self.table_exists(conn, 'version'): raise UpgradeFromBefore0p9Error() if self.table_exists(conn, 'migrate_version'): version = self.migrate_get_version(conn) if version < 40: raise UpgradeFromBefore0p9Error() last_sqlalchemy_migrate_version = 58 if version != last_sqlalchemy_migrate_version: raise UpgradeFromBefore3p0Error() self.alembic_stamp(conn, alembic_scripts, alembic_scripts.get_base()) conn.execute('drop table migrate_version') if not self.table_exists(conn, 'alembic_version'): log.msg("Initializing empty database") # Do some tests first test_unicode(conn) Model.metadata.create_all(conn) self.alembic_stamp(conn, alembic_scripts, current_script_rev_head) return context = alembic.runtime.migration.MigrationContext.configure(conn) current_rev = context.get_current_revision() if current_rev == current_script_rev_head: log.msg('Upgrading database: the current database schema is already the newest') return log.msg('Upgrading database') with sautils.withoutSqliteForeignKeys(conn): with context.begin_transaction(): context.run_migrations() log.msg('Upgrading database: done') yield self.db.pool.do(thd) buildbot-3.4.0/master/buildbot/db/pool.py000066400000000000000000000230711413250514000203270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect import sqlite3 import time import traceback import sqlalchemy as sa from twisted.internet import defer from twisted.internet import threads from twisted.python import log from twisted.python import threadpool from buildbot.db.buildrequests import AlreadyClaimedError from buildbot.db.buildsets import AlreadyCompleteError from buildbot.db.changesources import ChangeSourceAlreadyClaimedError from buildbot.db.schedulers import SchedulerAlreadyClaimedError from buildbot.process import metrics # set this to True for *very* verbose query debugging output; this can # be monkey-patched from master.cfg, too: # from buildbot.db import pool # pool.debug = True debug = False _debug_id = 1 def timed_do_fn(f): """Decorate a do function to log before, after, and elapsed time, with the name of the calling function. This is not speedy!""" def wrap(callable, *args, **kwargs): global _debug_id # get a description of the function that called us st = traceback.extract_stack(limit=2) file, line, name, _ = st[0] # and its locals frame = inspect.currentframe() locals = frame.f_locals # invent a unique ID for the description id, _debug_id = _debug_id, _debug_id + 1 descr = "%s-%08x" % (name, id) start_time = time.time() log.msg("{} - before ('{}' line {})".format(descr, file, line)) for name in locals: if name in ('self', 'thd'): continue log.msg("{} - {} = {}".format(descr, name, repr(locals[name]))) # wrap the callable to log the begin and end of the actual thread # function def callable_wrap(*args, **kargs): log.msg("{} - thd start".format(descr)) try: return callable(*args, **kwargs) finally: log.msg("{} - thd end".format(descr)) d = f(callable_wrap, *args, **kwargs) @d.addBoth def after(x): end_time = time.time() elapsed = (end_time - start_time) * 1000 log.msg("%s - after (%0.2f ms elapsed)" % (descr, elapsed)) return x return d wrap.__name__ = f.__name__ wrap.__doc__ = f.__doc__ return wrap class DBThreadPool: running = False def __init__(self, engine, reactor, verbose=False): # verbose is used by upgrade scripts, and if it is set we should print # messages about versions and other warnings log_msg = log.msg if verbose: def _log_msg(m): print(m) log_msg = _log_msg self.reactor = reactor pool_size = 5 # If the engine has an C{optimal_thread_pool_size} attribute, then the # maxthreads of the thread pool will be set to that value. This is # most useful for SQLite in-memory connections, where exactly one # connection (and thus thread) should be used. if hasattr(engine, 'optimal_thread_pool_size'): pool_size = engine.optimal_thread_pool_size self._pool = threadpool.ThreadPool(minthreads=1, maxthreads=pool_size, name='DBThreadPool') self.engine = engine if engine.dialect.name == 'sqlite': vers = self.get_sqlite_version() if vers < (3, 7): log_msg("Using SQLite Version {}".format(vers)) log_msg("NOTE: this old version of SQLite does not support " "WAL journal mode; a busy master may encounter " "'Database is locked' errors. Consider upgrading.") if vers < (3, 6, 19): log_msg("NOTE: this old version of SQLite is not " "supported.") raise RuntimeError("unsupported SQLite version") self._start_evt = self.reactor.callWhenRunning(self._start) # patch the do methods to do verbose logging if necessary if debug: self.do = timed_do_fn(self.do) self.do_with_engine = timed_do_fn(self.do_with_engine) self.forbidded_callable_return_type = self.get_sqlalchemy_result_type() def get_sqlalchemy_result_type(self): try: from sqlalchemy.engine import ResultProxy # sqlalchemy 1.x - 1.3 return ResultProxy except ImportError: pass try: from sqlalchemy.engine import Result # sqlalchemy 1.4 and newer return Result except ImportError: pass raise ImportError("Could not import SQLAlchemy result type") def _start(self): self._start_evt = None if not self.running: self._pool.start() self._stop_evt = self.reactor.addSystemEventTrigger( 'during', 'shutdown', self._stop_nowait) self.running = True def _stop_nowait(self): self._stop_evt = None threads.deferToThreadPool(self.reactor, self._pool, self.engine.dispose) self._pool.stop() self.running = False @defer.inlineCallbacks def _stop(self): self._stop_evt = None yield threads.deferToThreadPool(self.reactor, self._pool, self.engine.dispose) self._pool.stop() self.running = False @defer.inlineCallbacks def shutdown(self): """Manually stop the pool. This is only necessary from tests, as the pool will stop itself when the reactor stops under normal circumstances.""" if not self._stop_evt: return # pool is already stopped self.reactor.removeSystemEventTrigger(self._stop_evt) yield self._stop() # Try about 170 times over the space of a day, with the last few tries # being about an hour apart. This is designed to span a reasonable amount # of time for repairing a broken database server, while still failing # actual problematic queries eventually BACKOFF_START = 1.0 BACKOFF_MULT = 1.05 MAX_OPERATIONALERROR_TIME = 3600 * 24 # one day def __thd(self, with_engine, callable, args, kwargs): # try to call callable(arg, *args, **kwargs) repeatedly until no # OperationalErrors occur, where arg is either the engine (with_engine) # or a connection (not with_engine) backoff = self.BACKOFF_START start = time.time() while True: if with_engine: arg = self.engine else: arg = self.engine.connect() try: try: rv = callable(arg, *args, **kwargs) assert not isinstance(rv, self.forbidded_callable_return_type), \ "do not return ResultProxy objects!" except sa.exc.OperationalError as e: if not self.engine.should_retry(e): log.err(e, 'Got fatal OperationalError on DB') raise elapsed = time.time() - start if elapsed > self.MAX_OPERATIONALERROR_TIME: log.err(e, ('Raising due to {0} seconds delay on DB ' 'query retries'.format(self.MAX_OPERATIONALERROR_TIME))) raise metrics.MetricCountEvent.log( "DBThreadPool.retry-on-OperationalError") # sleep (remember, we're in a thread..) time.sleep(backoff) backoff *= self.BACKOFF_MULT # and re-try log.err(e, 'retrying {} after sql error {}'.format(callable, e)) continue except Exception as e: # AlreadyClaimedError are normal especially in a multimaster # configuration if not isinstance(e, (AlreadyClaimedError, ChangeSourceAlreadyClaimedError, SchedulerAlreadyClaimedError, AlreadyCompleteError)): log.err(e, 'Got fatal Exception on DB') raise finally: if not with_engine: arg.close() break return rv @defer.inlineCallbacks def do(self, callable, *args, **kwargs): ret = yield threads.deferToThreadPool(self.reactor, self._pool, self.__thd, False, callable, args, kwargs) return ret @defer.inlineCallbacks def do_with_engine(self, callable, *args, **kwargs): ret = yield threads.deferToThreadPool(self.reactor, self._pool, self.__thd, True, callable, args, kwargs) return ret def get_sqlite_version(self): return sqlite3.sqlite_version_info buildbot-3.4.0/master/buildbot/db/schedulers.py000066400000000000000000000201271413250514000215160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa import sqlalchemy.exc from twisted.internet import defer from buildbot.db import NULL from buildbot.db import base class SchedulerAlreadyClaimedError(Exception): pass class SchedulersConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst # returns a Deferred that returns None def enable(self, schedulerid, v): def thd(conn): tbl = self.db.model.schedulers q = tbl.update(whereclause=(tbl.c.id == schedulerid)) conn.execute(q, enabled=int(v)) return self.db.pool.do(thd) # returns a Deferred that returns None def classifyChanges(self, schedulerid, classifications): def thd(conn): tbl = self.db.model.scheduler_changes ins_q = tbl.insert() upd_q = tbl.update( ((tbl.c.schedulerid == schedulerid) & (tbl.c.changeid == sa.bindparam('wc_changeid')))) for changeid, important in classifications.items(): transaction = conn.begin() # convert the 'important' value into an integer, since that # is the column type imp_int = int(bool(important)) try: conn.execute(ins_q, schedulerid=schedulerid, changeid=changeid, important=imp_int).close() except (sqlalchemy.exc.ProgrammingError, sqlalchemy.exc.IntegrityError): transaction.rollback() transaction = conn.begin() # insert failed, so try an update conn.execute(upd_q, wc_changeid=changeid, important=imp_int).close() transaction.commit() return self.db.pool.do(thd) # returns a Deferred that returns None def flushChangeClassifications(self, schedulerid, less_than=None): def thd(conn): sch_ch_tbl = self.db.model.scheduler_changes wc = (sch_ch_tbl.c.schedulerid == schedulerid) if less_than is not None: wc = wc & (sch_ch_tbl.c.changeid < less_than) q = sch_ch_tbl.delete(whereclause=wc) conn.execute(q).close() return self.db.pool.do(thd) # returns a Deferred that returns a value def getChangeClassifications(self, schedulerid, branch=-1, repository=-1, project=-1, codebase=-1): # -1 here stands for "argument not given", since None has meaning # as a branch def thd(conn): sch_ch_tbl = self.db.model.scheduler_changes ch_tbl = self.db.model.changes wc = (sch_ch_tbl.c.schedulerid == schedulerid) # may need to filter further based on branch, etc extra_wheres = [] if branch != -1: extra_wheres.append(ch_tbl.c.branch == branch) if repository != -1: extra_wheres.append(ch_tbl.c.repository == repository) if project != -1: extra_wheres.append(ch_tbl.c.project == project) if codebase != -1: extra_wheres.append(ch_tbl.c.codebase == codebase) # if we need to filter further append those, as well as a join # on changeid (but just once for that one) if extra_wheres: wc &= (sch_ch_tbl.c.changeid == ch_tbl.c.changeid) for w in extra_wheres: wc &= w q = sa.select( [sch_ch_tbl.c.changeid, sch_ch_tbl.c.important], whereclause=wc) return {r.changeid: [False, True][r.important] for r in conn.execute(q)} return self.db.pool.do(thd) def findSchedulerId(self, name): tbl = self.db.model.schedulers name_hash = self.hashColumns(name) return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name_hash == name_hash), insert_values=dict( name=name, name_hash=name_hash, )) # returns a Deferred that returns None def setSchedulerMaster(self, schedulerid, masterid): def thd(conn): sch_mst_tbl = self.db.model.scheduler_masters # handle the masterid=None case to get it out of the way if masterid is None: q = sch_mst_tbl.delete( whereclause=(sch_mst_tbl.c.schedulerid == schedulerid)) conn.execute(q).close() return None # try a blind insert.. try: q = sch_mst_tbl.insert() conn.execute(q, dict(schedulerid=schedulerid, masterid=masterid)).close() except (sa.exc.IntegrityError, sa.exc.ProgrammingError) as e: # someone already owns this scheduler, but who? join = self.db.model.masters.outerjoin( sch_mst_tbl, (self.db.model.masters.c.id == sch_mst_tbl.c.masterid)) q = sa.select([self.db.model.masters.c.name, sch_mst_tbl.c.masterid], from_obj=join, whereclause=( sch_mst_tbl.c.schedulerid == schedulerid)) row = conn.execute(q).fetchone() # ok, that was us, so we just do nothing if row['masterid'] == masterid: return None raise SchedulerAlreadyClaimedError( "already claimed by {}".format(row['name'])) from e return None return self.db.pool.do(thd) @defer.inlineCallbacks def getScheduler(self, schedulerid): sch = yield self.getSchedulers(_schedulerid=schedulerid) if sch: return sch[0] return None # returns a Deferred that returns a value def getSchedulers(self, active=None, masterid=None, _schedulerid=None): def thd(conn): sch_tbl = self.db.model.schedulers sch_mst_tbl = self.db.model.scheduler_masters # handle the trivial case of masterid=xx and active=False if masterid is not None and active is not None and not active: return [] join = sch_tbl.outerjoin(sch_mst_tbl, (sch_tbl.c.id == sch_mst_tbl.c.schedulerid)) # if we're given a _schedulerid, select only that row wc = None if _schedulerid: wc = (sch_tbl.c.id == _schedulerid) else: # otherwise, filter with active, if necessary if masterid is not None: wc = (sch_mst_tbl.c.masterid == masterid) elif active: wc = (sch_mst_tbl.c.masterid != NULL) elif active is not None: wc = (sch_mst_tbl.c.masterid == NULL) q = sa.select([sch_tbl.c.id, sch_tbl.c.name, sch_tbl.c.enabled, sch_mst_tbl.c.masterid], from_obj=join, whereclause=wc) return [dict(id=row.id, name=row.name, enabled=bool(row.enabled), masterid=row.masterid) for row in conn.execute(q).fetchall()] return self.db.pool.do(thd) buildbot-3.4.0/master/buildbot/db/sourcestamps.py000066400000000000000000000161661413250514000221150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from buildbot.db import base from buildbot.util import bytes2unicode from buildbot.util import epoch2datetime from buildbot.util import unicode2bytes class SsDict(dict): pass class SsList(list): pass class SourceStampsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/database.rst @defer.inlineCallbacks def findSourceStampId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): sourcestampid, _ = yield self.findOrCreateId( branch, revision, repository, project, codebase, patch_body, patch_level, patch_author, patch_comment, patch_subdir) return sourcestampid @defer.inlineCallbacks def findOrCreateId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): tbl = self.db.model.sourcestamps assert codebase is not None, "codebase cannot be None" assert project is not None, "project cannot be None" assert repository is not None, "repository cannot be None" self.checkLength(tbl.c.branch, branch) self.checkLength(tbl.c.revision, revision) self.checkLength(tbl.c.repository, repository) self.checkLength(tbl.c.project, project) # get a patchid, if we have a patch def thd(conn): patchid = None if patch_body: patch_body_bytes = unicode2bytes(patch_body) patch_base64_bytes = base64.b64encode(patch_body_bytes) ins = self.db.model.patches.insert() r = conn.execute(ins, dict( patchlevel=patch_level, patch_base64=bytes2unicode(patch_base64_bytes), patch_author=patch_author, patch_comment=patch_comment, subdir=patch_subdir)) patchid = r.inserted_primary_key[0] return patchid patchid = yield self.db.pool.do(thd) ss_hash = self.hashColumns(branch, revision, repository, project, codebase, patchid) sourcestampid, found = yield self.findOrCreateSomethingId( tbl=tbl, whereclause=tbl.c.ss_hash == ss_hash, insert_values={ 'branch': branch, 'revision': revision, 'repository': repository, 'codebase': codebase, 'project': project, 'patchid': patchid, 'ss_hash': ss_hash, 'created_at': int(self.master.reactor.seconds()), }) return sourcestampid, found # returns a Deferred that returns a value @base.cached("ssdicts") def getSourceStamp(self, ssid): def thd(conn): tbl = self.db.model.sourcestamps q = tbl.select(whereclause=(tbl.c.id == ssid)) res = conn.execute(q) row = res.fetchone() if not row: return None ssdict = self._rowToSsdict_thd(conn, row) res.close() return ssdict return self.db.pool.do(thd) # returns a Deferred that returns a value def getSourceStampsForBuild(self, buildid): assert buildid > 0 def thd(conn): # Get SourceStamps for the build builds_tbl = self.db.model.builds reqs_tbl = self.db.model.buildrequests bsets_tbl = self.db.model.buildsets bsss_tbl = self.db.model.buildset_sourcestamps sstamps_tbl = self.db.model.sourcestamps from_clause = builds_tbl.join(reqs_tbl, builds_tbl.c.buildrequestid == reqs_tbl.c.id) from_clause = from_clause.join(bsets_tbl, reqs_tbl.c.buildsetid == bsets_tbl.c.id) from_clause = from_clause.join(bsss_tbl, bsets_tbl.c.id == bsss_tbl.c.buildsetid) from_clause = from_clause.join(sstamps_tbl, bsss_tbl.c.sourcestampid == sstamps_tbl.c.id) q = sa.select([sstamps_tbl]).select_from( from_clause).where(builds_tbl.c.id == buildid) res = conn.execute(q) return [self._rowToSsdict_thd(conn, row) for row in res.fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns a value def getSourceStamps(self): def thd(conn): tbl = self.db.model.sourcestamps q = tbl.select() res = conn.execute(q) return [self._rowToSsdict_thd(conn, row) for row in res.fetchall()] return self.db.pool.do(thd) def _rowToSsdict_thd(self, conn, row): ssid = row.id ssdict = SsDict(ssid=ssid, branch=row.branch, revision=row.revision, patchid=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None, repository=row.repository, codebase=row.codebase, project=row.project, created_at=epoch2datetime(row.created_at)) patchid = row.patchid # fetch the patch, if necessary if patchid is not None: tbl = self.db.model.patches q = tbl.select(whereclause=(tbl.c.id == patchid)) res = conn.execute(q) row = res.fetchone() if row: # note the subtle renaming here ssdict['patchid'] = patchid ssdict['patch_level'] = row.patchlevel ssdict['patch_subdir'] = row.subdir ssdict['patch_author'] = row.patch_author ssdict['patch_comment'] = row.patch_comment ssdict['patch_body'] = base64.b64decode(row.patch_base64) else: log.msg('patchid %d, referenced from ssid %d, not found' % (patchid, ssid)) res.close() return ssdict buildbot-3.4.0/master/buildbot/db/state.py000066400000000000000000000152271413250514000205020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import sqlalchemy as sa import sqlalchemy.exc from buildbot.db import base class _IdNotFoundError(Exception): pass # used internally class ObjDict(dict): pass class StateConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst def getObjectId(self, name, class_name): # defer to a cached method that only takes one parameter (a tuple) d = self._getObjectId((name, class_name)) d.addCallback(lambda objdict: objdict['id']) return d # returns a Deferred that returns a value @base.cached('objectids') def _getObjectId(self, name_class_name_tuple): name, class_name = name_class_name_tuple def thd(conn): return self.thdGetObjectId(conn, name, class_name) return self.db.pool.do(thd) def thdGetObjectId(self, conn, name, class_name): objects_tbl = self.db.model.objects name = self.ensureLength(objects_tbl.c.name, name) self.checkLength(objects_tbl.c.class_name, class_name) def select(): q = sa.select([objects_tbl.c.id], whereclause=((objects_tbl.c.name == name) & (objects_tbl.c.class_name == class_name))) res = conn.execute(q) row = res.fetchone() res.close() if not row: raise _IdNotFoundError return row.id def insert(): res = conn.execute(objects_tbl.insert(), name=name, class_name=class_name) return res.inserted_primary_key[0] # we want to try selecting, then inserting, but if the insert fails # then try selecting again. We include an invocation of a hook # method to allow tests to exercise this particular behavior try: return ObjDict(id=select()) except _IdNotFoundError: pass self._test_timing_hook(conn) try: return ObjDict(id=insert()) except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.ProgrammingError): pass return ObjDict(id=select()) class Thunk: pass # returns a Deferred that returns a value def getState(self, objectid, name, default=Thunk): def thd(conn): return self.thdGetState(conn, objectid, name, default=default) return self.db.pool.do(thd) def thdGetState(self, conn, objectid, name, default=Thunk): object_state_tbl = self.db.model.object_state q = sa.select([object_state_tbl.c.value_json], whereclause=((object_state_tbl.c.objectid == objectid) & (object_state_tbl.c.name == name))) res = conn.execute(q) row = res.fetchone() res.close() if not row: if default is self.Thunk: raise KeyError("no such state value '{}' for object {}".format(name, objectid)) return default try: return json.loads(row.value_json) except ValueError as e: raise TypeError("JSON error loading state value '{}' for {}".format( name, objectid)) from e # returns a Deferred that returns a value def setState(self, objectid, name, value): def thd(conn): return self.thdSetState(conn, objectid, name, value) return self.db.pool.do(thd) def thdSetState(self, conn, objectid, name, value): object_state_tbl = self.db.model.object_state try: value_json = json.dumps(value) except (TypeError, ValueError) as e: raise TypeError("Error encoding JSON for %r" % (value,)) from e name = self.ensureLength(object_state_tbl.c.name, name) def update(): q = object_state_tbl.update( whereclause=((object_state_tbl.c.objectid == objectid) & (object_state_tbl.c.name == name))) res = conn.execute(q, value_json=value_json) # check whether that worked return res.rowcount > 0 def insert(): conn.execute(object_state_tbl.insert(), objectid=objectid, name=name, value_json=value_json) # try updating; if that fails, try inserting; if that fails, then # we raced with another instance to insert, so let that instance # win. if update(): return self._test_timing_hook(conn) try: insert() except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.ProgrammingError): pass # someone beat us to it - oh well def _test_timing_hook(self, conn): # called so tests can simulate another process inserting a database row # at an inopportune moment pass # returns a Deferred that returns a value def atomicCreateState(self, objectid, name, thd_create_callback): def thd(conn): object_state_tbl = self.db.model.object_state res = self.thdGetState(conn, objectid, name, default=None) if res is None: res = thd_create_callback() try: value_json = json.dumps(res) except (TypeError, ValueError) as e: raise TypeError("Error encoding JSON for %r" % (res,)) from e self._test_timing_hook(conn) try: conn.execute(object_state_tbl.insert(), objectid=objectid, name=name, value_json=value_json) except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.ProgrammingError): # someone beat us to it - oh well return that value return self.thdGetState(conn, objectid, name) return res return self.db.pool.do(thd) buildbot-3.4.0/master/buildbot/db/steps.py000066400000000000000000000160701413250514000205150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import sqlalchemy as sa from twisted.internet import defer from buildbot.db import base from buildbot.util import epoch2datetime class StepsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst url_lock = None @defer.inlineCallbacks def getStep(self, stepid=None, buildid=None, number=None, name=None): tbl = self.db.model.steps if stepid is not None: wc = (tbl.c.id == stepid) else: if buildid is None: raise RuntimeError('must supply either stepid or buildid') if number is not None: wc = (tbl.c.number == number) elif name is not None: wc = (tbl.c.name == name) else: raise RuntimeError('must supply either number or name') wc = wc & (tbl.c.buildid == buildid) def thd(conn): q = self.db.model.steps.select(whereclause=wc) res = conn.execute(q) row = res.fetchone() rv = None if row: rv = self._stepdictFromRow(row) res.close() return rv return (yield self.db.pool.do(thd)) # returns a Deferred that returns a value def getSteps(self, buildid): def thd(conn): tbl = self.db.model.steps q = tbl.select() q = q.where(tbl.c.buildid == buildid) q = q.order_by(tbl.c.number) res = conn.execute(q) return [self._stepdictFromRow(row) for row in res.fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns a value def addStep(self, buildid, name, state_string): def thd(conn): tbl = self.db.model.steps # get the highest current number r = conn.execute(sa.select([sa.func.max(tbl.c.number)], whereclause=(tbl.c.buildid == buildid))) number = r.scalar() number = 0 if number is None else number + 1 # note that there is no chance for a race condition here, # since only one master is inserting steps. If there is a # conflict, then the name is likely already taken. insert_row = dict(buildid=buildid, number=number, started_at=None, complete_at=None, state_string=state_string, urls_json='[]', name=name) try: r = conn.execute(self.db.model.steps.insert(), insert_row) got_id = r.inserted_primary_key[0] except (sa.exc.IntegrityError, sa.exc.ProgrammingError): got_id = None if got_id: return (got_id, number, name) # we didn't get an id, so calculate a unique name and use that # instead. Because names are truncated at the right to fit in a # 50-character identifier, this isn't a simple query. res = conn.execute(sa.select([tbl.c.name], whereclause=((tbl.c.buildid == buildid)))) names = {row[0] for row in res} num = 1 while True: numstr = '_%d' % num newname = name[:50 - len(numstr)] + numstr if newname not in names: break num += 1 insert_row['name'] = newname r = conn.execute(self.db.model.steps.insert(), insert_row) got_id = r.inserted_primary_key[0] return (got_id, number, newname) return self.db.pool.do(thd) @defer.inlineCallbacks def startStep(self, stepid): started_at = int(self.master.reactor.seconds()) def thd(conn): tbl = self.db.model.steps q = tbl.update(whereclause=(tbl.c.id == stepid)) conn.execute(q, started_at=started_at) yield self.db.pool.do(thd) # returns a Deferred that returns None def setStepStateString(self, stepid, state_string): def thd(conn): tbl = self.db.model.steps q = tbl.update(whereclause=(tbl.c.id == stepid)) conn.execute(q, state_string=state_string) return self.db.pool.do(thd) def addURL(self, stepid, name, url, _racehook=None): # This methods adds an URL to the db # This is a read modify write and thus there is a possibility # that several urls are added at the same time (e.g with a deferredlist # at the end of a step) # this race condition is only inside the same master, as only one master # is supposed to add urls to a buildstep. # so threading.lock is used, as we are in the thread pool if self.url_lock is None: # this runs in reactor thread, so no race here.. self.url_lock = defer.DeferredLock() def thd(conn): tbl = self.db.model.steps wc = (tbl.c.id == stepid) q = sa.select([tbl.c.urls_json], whereclause=wc) res = conn.execute(q) row = res.fetchone() if _racehook is not None: _racehook() urls = json.loads(row.urls_json) url_item = dict(name=name, url=url) if url_item not in urls: urls.append(url_item) q = tbl.update(whereclause=wc) conn.execute(q, urls_json=json.dumps(urls)) return self.url_lock.run(lambda: self.db.pool.do(thd)) # returns a Deferred that returns None def finishStep(self, stepid, results, hidden): def thd(conn): tbl = self.db.model.steps q = tbl.update(whereclause=(tbl.c.id == stepid)) conn.execute(q, complete_at=int(self.master.reactor.seconds()), results=results, hidden=1 if hidden else 0) return self.db.pool.do(thd) def _stepdictFromRow(self, row): return dict( id=row.id, number=row.number, name=row.name, buildid=row.buildid, started_at=epoch2datetime(row.started_at), complete_at=epoch2datetime(row.complete_at), state_string=row.state_string, results=row.results, urls=json.loads(row.urls_json), hidden=bool(row.hidden)) buildbot-3.4.0/master/buildbot/db/tags.py000066400000000000000000000021001413250514000203020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.db import base class TagsConnectorComponent(base.DBConnectorComponent): def findTagId(self, name): tbl = self.db.model.tags return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name == name), insert_values=dict( name=name, name_hash=self.hashColumns(name), )) buildbot-3.4.0/master/buildbot/db/test_result_sets.py000066400000000000000000000110521413250514000227650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.db import base class TestResultSetDict(dict): pass class TestResultSetAlreadyCompleted(Exception): pass class TestResultSetsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst @defer.inlineCallbacks def addTestResultSet(self, builderid, buildid, stepid, description, category, value_unit): # Returns the id of the new result set def thd(conn): sets_table = self.db.model.test_result_sets insert_values = { 'builderid': builderid, 'buildid': buildid, 'stepid': stepid, 'description': description, 'category': category, 'value_unit': value_unit, 'complete': 0 } q = sets_table.insert().values(insert_values) r = conn.execute(q) return r.inserted_primary_key[0] res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def getTestResultSet(self, test_result_setid): def thd(conn): sets_table = self.db.model.test_result_sets q = sets_table.select().where(sets_table.c.id == test_result_setid) res = conn.execute(q) row = res.fetchone() if not row: return None return self._thd_row2dict(conn, row) res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def getTestResultSets(self, builderid, buildid=None, stepid=None, complete=None, result_spec=None): def thd(conn): sets_table = self.db.model.test_result_sets q = sets_table.select().where(sets_table.c.builderid == builderid) if buildid is not None: q = q.where(sets_table.c.buildid == buildid) if stepid is not None: q = q.where(sets_table.c.stepid == stepid) if complete is not None: q = q.where(sets_table.c.complete == (1 if complete else 0)) if result_spec is not None: return result_spec.thd_execute(conn, q, lambda x: self._thd_row2dict(conn, x)) res = conn.execute(q) return [self._thd_row2dict(conn, row) for row in res.fetchall()] res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def completeTestResultSet(self, test_result_setid, tests_passed=None, tests_failed=None): def thd(conn): sets_table = self.db.model.test_result_sets values = {'complete': 1} if tests_passed is not None: values['tests_passed'] = tests_passed if tests_failed is not None: values['tests_failed'] = tests_failed q = sets_table.update().values(values) q = q.where((sets_table.c.id == test_result_setid) & (sets_table.c.complete == 0)) res = conn.execute(q) if res.rowcount == 0: raise TestResultSetAlreadyCompleted(('Test result set {} is already completed ' 'or does not exist').format(test_result_setid)) yield self.db.pool.do(thd) def _thd_row2dict(self, conn, row): return TestResultSetDict(id=row.id, builderid=row.builderid, buildid=row.buildid, stepid=row.stepid, description=row.description, category=row.category, value_unit=row.value_unit, tests_passed=row.tests_passed, tests_failed=row.tests_failed, complete=bool(row.complete)) buildbot-3.4.0/master/buildbot/db/test_results.py000066400000000000000000000277531413250514000221310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.internet import defer from buildbot.db import base class TestResultDict(dict): pass class TestResultsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst @defer.inlineCallbacks def _add_code_paths(self, builderid, paths): # returns a dictionary of path to id in the test_code_paths table. # For paths that already exist, the id of the row in the test_code_paths is retrieved. assert isinstance(paths, set) def thd(conn): paths_to_ids = {} paths_table = self.db.model.test_code_paths for path_batch in self.doBatch(paths, batch_n=3000): path_batch = set(path_batch) while path_batch: # Use expanding bindparam, because performance of sqlalchemy is very slow # when filtering large sets otherwise. q = paths_table.select().where( (paths_table.c.path.in_(sa.bindparam('paths', expanding=True))) & (paths_table.c.builderid == builderid)) res = conn.execute(q, {'paths': list(path_batch)}) for row in res.fetchall(): paths_to_ids[row.path] = row.id path_batch.remove(row.path) # paths now contains all the paths that need insertion. try: insert_values = [{'builderid': builderid, 'path': path} for path in path_batch] q = paths_table.insert().values(insert_values) if self.db.pool.engine.dialect.name in ['postgresql', 'mssql']: # Use RETURNING, this way we won't need an additional select query q = q.returning(paths_table.c.id, paths_table.c.path) res = conn.execute(q) for row in res.fetchall(): paths_to_ids[row.path] = row.id path_batch.remove(row.path) else: conn.execute(q) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # There was a competing addCodePaths() call that added a path for the same # builder. Depending on the DB driver, none or some rows were inserted, but # we will re-check what's got inserted in the next iteration of the loop pass return paths_to_ids paths_to_id = yield self.db.pool.do(thd) return paths_to_id @defer.inlineCallbacks def getTestCodePaths(self, builderid, path_prefix=None, result_spec=None): def thd(conn): paths_table = self.db.model.test_code_paths q = paths_table.select() if path_prefix is not None: q = q.where(paths_table.c.path.startswith(path_prefix)) if result_spec is not None: return result_spec.thd_execute(conn, q, lambda x: x['path']) res = conn.execute(q) return [row['path'] for row in res.fetchall()] res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def _add_names(self, builderid, names): # returns a dictionary of name to id in the test_names table. # For names that already exist, the id of the row in the test_names is retrieved. assert isinstance(names, set) def thd(conn): names_to_ids = {} names_table = self.db.model.test_names for name_batch in self.doBatch(names, batch_n=3000): name_batch = set(name_batch) while name_batch: # Use expanding bindparam, because performance of sqlalchemy is very slow # when filtering large sets otherwise. q = names_table.select().where( (names_table.c.name.in_(sa.bindparam('names', expanding=True))) & (names_table.c.builderid == builderid)) res = conn.execute(q, {'names': list(name_batch)}) for row in res.fetchall(): names_to_ids[row.name] = row.id name_batch.remove(row.name) # names now contains all the names that need insertion. try: insert_values = [{'builderid': builderid, 'name': name} for name in name_batch] q = names_table.insert().values(insert_values) if self.db.pool.engine.dialect.name in ['postgresql', 'mssql']: # Use RETURNING, this way we won't need an additional select query q = q.returning(names_table.c.id, names_table.c.name) res = conn.execute(q) for row in res.fetchall(): names_to_ids[row.name] = row.id name_batch.remove(row.name) else: conn.execute(q) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # There was a competing addNames() call that added a name for the same # builder. Depending on the DB driver, none or some rows were inserted, but # we will re-check what's got inserted in the next iteration of the loop pass return names_to_ids names_to_id = yield self.db.pool.do(thd) return names_to_id @defer.inlineCallbacks def getTestNames(self, builderid, name_prefix=None, result_spec=None): def thd(conn): names_table = self.db.model.test_names q = names_table.select().where(names_table.c.builderid == builderid) if name_prefix is not None: q = q.where(names_table.c.name.startswith(name_prefix)) if result_spec is not None: return result_spec.thd_execute(conn, q, lambda x: x['name']) res = conn.execute(q) return [row['name'] for row in res.fetchall()] res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def addTestResults(self, builderid, test_result_setid, result_values): # Adds multiple test results for a specific test result set. # result_values is a list of dictionaries each of which must contain 'value' key and at # least one of 'test_name', 'test_code_path'. 'line' key is optional. # The function returns nothing. # Build values list for insertion. insert_values = [] insert_names = set() insert_code_paths = set() for result_value in result_values: if 'value' not in result_value: raise KeyError('Each of result_values must contain \'value\' key') if 'test_name' not in result_value and 'test_code_path' not in result_value: raise KeyError('Each of result_values must contain at least one of ' '\'test_name\' or \'test_code_path\' keys') if 'test_name' in result_value: insert_names.add(result_value['test_name']) if 'test_code_path' in result_value: insert_code_paths.add(result_value['test_code_path']) code_path_to_id = yield self._add_code_paths(builderid, insert_code_paths) name_to_id = yield self._add_names(builderid, insert_names) for result_value in result_values: insert_value = { 'value': result_value['value'], 'builderid': builderid, 'test_result_setid': test_result_setid, 'test_nameid': None, 'test_code_pathid': None, 'line': None, 'duration_ns': None, } if 'test_name' in result_value: insert_value['test_nameid'] = name_to_id[result_value['test_name']] if 'test_code_path' in result_value: insert_value['test_code_pathid'] = code_path_to_id[result_value['test_code_path']] if 'line' in result_value: insert_value['line'] = result_value['line'] if 'duration_ns' in result_value: insert_value['duration_ns'] = result_value['duration_ns'] insert_values.append(insert_value) def thd(conn): results_table = self.db.model.test_results q = results_table.insert().values(insert_values) conn.execute(q) yield self.db.pool.do(thd) @defer.inlineCallbacks def getTestResult(self, test_resultid): def thd(conn): results_table = self.db.model.test_results code_paths_table = self.db.model.test_code_paths names_table = self.db.model.test_names j = results_table.outerjoin(code_paths_table).outerjoin(names_table) q = sa.select([results_table, code_paths_table.c.path, names_table.c.name]) q = q.select_from(j).where(results_table.c.id == test_resultid) res = conn.execute(q) row = res.fetchone() if not row: return None return self._thd_row2dict(conn, row) res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def getTestResults(self, builderid, test_result_setid, result_spec=None): def thd(conn): results_table = self.db.model.test_results code_paths_table = self.db.model.test_code_paths names_table = self.db.model.test_names # specify join ON clauses manually to force filtering of code_paths_table and # names_table before join j = results_table.outerjoin( code_paths_table, (results_table.c.test_code_pathid == code_paths_table.c.id) & (code_paths_table.c.builderid == builderid)) j = j.outerjoin( names_table, (results_table.c.test_nameid == names_table.c.id) & (names_table.c.builderid == builderid)) q = sa.select([results_table, code_paths_table.c.path, names_table.c.name]) q = q.select_from(j).where((results_table.c.builderid == builderid) & (results_table.c.test_result_setid == test_result_setid)) if result_spec is not None: return result_spec.thd_execute(conn, q, lambda x: self._thd_row2dict(conn, x)) res = conn.execute(q) return [self._thd_row2dict(conn, row) for row in res.fetchall()] res = yield self.db.pool.do(thd) return res def _thd_row2dict(self, conn, row): return TestResultDict(id=row.id, builderid=row.builderid, test_result_setid=row.test_result_setid, test_name=row.name, test_code_path=row.path, line=row.line, duration_ns=row.duration_ns, value=row.value) buildbot-3.4.0/master/buildbot/db/types/000077500000000000000000000000001413250514000201455ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/db/types/__init__.py000066400000000000000000000000001413250514000222440ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/db/types/json.py000066400000000000000000000023351413250514000214730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from sqlalchemy.types import Text from sqlalchemy.types import TypeDecorator class JsonObject(TypeDecorator): """Represents an immutable json-encoded string.""" cache_ok = True impl = Text def process_bind_param(self, value, dialect): if value is not None: value = json.dumps(value) return value def process_result_value(self, value, dialect): if value is not None: value = json.loads(value) else: value = {} return value buildbot-3.4.0/master/buildbot/db/users.py000066400000000000000000000220061413250514000205140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from sqlalchemy.sql.expression import and_ from buildbot.db import base from buildbot.util import identifiers class UsDict(dict): pass class UsersConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst # returns a Deferred that returns a value def findUserByAttr(self, identifier, attr_type, attr_data, _race_hook=None): # note that since this involves two tables, self.findSomethingId is not # helpful def thd(conn, no_recurse=False, identifier=identifier): tbl = self.db.model.users tbl_info = self.db.model.users_info self.checkLength(tbl.c.identifier, identifier) self.checkLength(tbl_info.c.attr_type, attr_type) self.checkLength(tbl_info.c.attr_data, attr_data) # try to find the user q = sa.select([tbl_info.c.uid], whereclause=and_(tbl_info.c.attr_type == attr_type, tbl_info.c.attr_data == attr_data)) rows = conn.execute(q).fetchall() if rows: return rows[0].uid _race_hook and _race_hook(conn) # try to do both of these inserts in a transaction, so that both # the new user and the corresponding attributes appear at the same # time from the perspective of other masters. transaction = conn.begin() inserted_user = False try: r = conn.execute(tbl.insert(), dict(identifier=identifier)) uid = r.inserted_primary_key[0] inserted_user = True conn.execute(tbl_info.insert(), dict(uid=uid, attr_type=attr_type, attr_data=attr_data)) transaction.commit() except (sa.exc.IntegrityError, sa.exc.ProgrammingError): transaction.rollback() # try it all over again, in case there was an overlapping, # identical call to findUserByAttr. If the identifier # collided, we'll try again indefinitely; otherwise, only once. if no_recurse: raise # if we failed to insert the user, then it's because the # identifier wasn't unique if not inserted_user: identifier = identifiers.incrementIdentifier( 256, identifier) else: no_recurse = True return thd(conn, no_recurse=no_recurse, identifier=identifier) return uid return self.db.pool.do(thd) # returns a Deferred that returns a value @base.cached("usdicts") def getUser(self, uid): def thd(conn): tbl = self.db.model.users tbl_info = self.db.model.users_info q = tbl.select(whereclause=(tbl.c.uid == uid)) users_row = conn.execute(q).fetchone() if not users_row: return None # gather all attr_type and attr_data entries from users_info table q = tbl_info.select(whereclause=(tbl_info.c.uid == uid)) rows = conn.execute(q).fetchall() return self.thd_createUsDict(users_row, rows) return self.db.pool.do(thd) def thd_createUsDict(self, users_row, rows): # make UsDict to return usdict = UsDict() for row in rows: usdict[row.attr_type] = row.attr_data # add the users_row data *after* the attributes in case attr_type # matches one of these keys. usdict['uid'] = users_row.uid usdict['identifier'] = users_row.identifier usdict['bb_username'] = users_row.bb_username usdict['bb_password'] = users_row.bb_password return usdict # returns a Deferred that returns a value def getUserByUsername(self, username): def thd(conn): tbl = self.db.model.users tbl_info = self.db.model.users_info q = tbl.select(whereclause=(tbl.c.bb_username == username)) users_row = conn.execute(q).fetchone() if not users_row: return None # gather all attr_type and attr_data entries from users_info table q = tbl_info.select(whereclause=(tbl_info.c.uid == users_row.uid)) rows = conn.execute(q).fetchall() return self.thd_createUsDict(users_row, rows) return self.db.pool.do(thd) # returns a Deferred that returns a value def getUsers(self): def thd(conn): tbl = self.db.model.users rows = conn.execute(tbl.select()).fetchall() dicts = [] if rows: for row in rows: ud = dict(uid=row.uid, identifier=row.identifier) dicts.append(ud) return dicts return self.db.pool.do(thd) # returns a Deferred that returns None def updateUser(self, uid=None, identifier=None, bb_username=None, bb_password=None, attr_type=None, attr_data=None, _race_hook=None): def thd(conn): transaction = conn.begin() tbl = self.db.model.users tbl_info = self.db.model.users_info update_dict = {} # first, add the identifier is it exists if identifier is not None: self.checkLength(tbl.c.identifier, identifier) update_dict['identifier'] = identifier # then, add the creds if they exist if bb_username is not None: assert bb_password is not None self.checkLength(tbl.c.bb_username, bb_username) self.checkLength(tbl.c.bb_password, bb_password) update_dict['bb_username'] = bb_username update_dict['bb_password'] = bb_password # update the users table if it needs to be updated if update_dict: q = tbl.update(whereclause=(tbl.c.uid == uid)) res = conn.execute(q, update_dict) # then, update the attributes, carefully handling the potential # update-or-insert race condition. if attr_type is not None: assert attr_data is not None self.checkLength(tbl_info.c.attr_type, attr_type) self.checkLength(tbl_info.c.attr_data, attr_data) # first update, then insert q = tbl_info.update( whereclause=(tbl_info.c.uid == uid) & (tbl_info.c.attr_type == attr_type)) res = conn.execute(q, attr_data=attr_data) if res.rowcount == 0: if _race_hook is not None: _race_hook(conn) # the update hit 0 rows, so try inserting a new one try: q = tbl_info.insert() res = conn.execute(q, uid=uid, attr_type=attr_type, attr_data=attr_data) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # someone else beat us to the punch inserting this row; # let them win. transaction.rollback() return transaction.commit() return self.db.pool.do(thd) # returns a Deferred that returns None def removeUser(self, uid): def thd(conn): # delete from dependent tables first, followed by 'users' for tbl in [ self.db.model.change_users, self.db.model.users_info, self.db.model.users, ]: conn.execute(tbl.delete(whereclause=(tbl.c.uid == uid))) return self.db.pool.do(thd) # returns a Deferred that returns a value def identifierToUid(self, identifier): def thd(conn): tbl = self.db.model.users q = tbl.select(whereclause=(tbl.c.identifier == identifier)) row = conn.execute(q).fetchone() if not row: return None return row.uid return self.db.pool.do(thd) buildbot-3.4.0/master/buildbot/db/workers.py000066400000000000000000000230771413250514000210600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.internet import defer from buildbot.db import base from buildbot.util import identifiers class WorkersConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/database.rst def findWorkerId(self, name): tbl = self.db.model.workers # callers should verify this and give good user error messages assert identifiers.isIdentifier(50, name) return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name == name), insert_values=dict( name=name, info={}, paused=0, graceful=0, )) def _deleteFromConfiguredWorkers_thd(self, conn, buildermasterids, workerid=None): cfg_tbl = self.db.model.configured_workers # batch deletes to avoid using too many variables for batch in self.doBatch(buildermasterids, 100): q = cfg_tbl.delete() q = q.where(cfg_tbl.c.buildermasterid.in_(batch)) if workerid: q = q.where(cfg_tbl.c.workerid == workerid) conn.execute(q).close() # returns a Deferred which returns None def deconfigureAllWorkersForMaster(self, masterid): def thd(conn): # first remove the old configured buildermasterids for this master and worker # as sqlalchemy does not support delete with join, we need to do # that in 2 queries cfg_tbl = self.db.model.configured_workers bm_tbl = self.db.model.builder_masters j = cfg_tbl j = j.outerjoin(bm_tbl) q = sa.select( [cfg_tbl.c.buildermasterid], from_obj=[j], distinct=True) q = q.where(bm_tbl.c.masterid == masterid) res = conn.execute(q) buildermasterids = [row['buildermasterid'] for row in res] res.close() self._deleteFromConfiguredWorkers_thd(conn, buildermasterids) return self.db.pool.do(thd) # returns a Deferred that returns None def workerConfigured(self, workerid, masterid, builderids): def thd(conn): cfg_tbl = self.db.model.configured_workers bm_tbl = self.db.model.builder_masters # get the buildermasterids that are configured if builderids: q = sa.select([bm_tbl.c.id], from_obj=[bm_tbl]) q = q.where(bm_tbl.c.masterid == masterid) q = q.where(bm_tbl.c.builderid.in_(builderids)) res = conn.execute(q) buildermasterids = {row['id'] for row in res} res.close() else: buildermasterids = set([]) j = cfg_tbl j = j.outerjoin(bm_tbl) q = sa.select( [cfg_tbl.c.buildermasterid], from_obj=[j], distinct=True) q = q.where(bm_tbl.c.masterid == masterid) q = q.where(cfg_tbl.c.workerid == workerid) res = conn.execute(q) oldbuildermasterids = {row['buildermasterid'] for row in res} res.close() todeletebuildermasterids = oldbuildermasterids - buildermasterids toinsertbuildermasterids = buildermasterids - oldbuildermasterids transaction = conn.begin() self._deleteFromConfiguredWorkers_thd(conn, todeletebuildermasterids, workerid) # and insert the new ones if toinsertbuildermasterids: q = cfg_tbl.insert() conn.execute(q, [{'workerid': workerid, 'buildermasterid': buildermasterid} for buildermasterid in toinsertbuildermasterids]).close() transaction.commit() return self.db.pool.do(thd) @defer.inlineCallbacks def getWorker(self, workerid=None, name=None, masterid=None, builderid=None): if workerid is None and name is None: return None workers = yield self.getWorkers(_workerid=workerid, _name=name, masterid=masterid, builderid=builderid) if workers: return workers[0] return None # returns a Deferred that returns a value def getWorkers(self, _workerid=None, _name=None, masterid=None, builderid=None, paused=None, graceful=None): def thd(conn): workers_tbl = self.db.model.workers conn_tbl = self.db.model.connected_workers cfg_tbl = self.db.model.configured_workers bm_tbl = self.db.model.builder_masters def selectWorker(q): return q # first, get the worker itself and the configured_on info j = workers_tbl j = j.outerjoin(cfg_tbl) j = j.outerjoin(bm_tbl) q = sa.select( [workers_tbl.c.id, workers_tbl.c.name, workers_tbl.c.info, workers_tbl.c.paused, workers_tbl.c.graceful, bm_tbl.c.builderid, bm_tbl.c.masterid], from_obj=[j], order_by=[workers_tbl.c.id]) if _workerid is not None: q = q.where(workers_tbl.c.id == _workerid) if _name is not None: q = q.where(workers_tbl.c.name == _name) if masterid is not None: q = q.where(bm_tbl.c.masterid == masterid) if builderid is not None: q = q.where(bm_tbl.c.builderid == builderid) if paused is not None: q = q.where(workers_tbl.c.paused == int(paused)) if graceful is not None: q = q.where(workers_tbl.c.graceful == int(graceful)) rv = {} res = None lastId = None cfgs = None for row in conn.execute(q): if row.id != lastId: lastId = row.id cfgs = [] res = { 'id': lastId, 'name': row.name, 'configured_on': cfgs, 'connected_to': [], 'workerinfo': row.info, 'paused': bool(row.paused), 'graceful': bool(row.graceful)} rv[lastId] = res if row.builderid and row.masterid: cfgs.append({'builderid': row.builderid, 'masterid': row.masterid}) # now go back and get the connection info for the same set of # workers j = conn_tbl if _name is not None: # note this is not an outer join; if there are unconnected # workers, they were captured in rv above j = j.join(workers_tbl) q = sa.select( [conn_tbl.c.workerid, conn_tbl.c.masterid], from_obj=[j], order_by=[conn_tbl.c.workerid]) if _workerid is not None: q = q.where(conn_tbl.c.workerid == _workerid) if _name is not None: q = q.where(workers_tbl.c.name == _name) if masterid is not None: q = q.where(conn_tbl.c.masterid == masterid) for row in conn.execute(q): id = row.workerid if id not in rv: continue rv[row.workerid]['connected_to'].append(row.masterid) return list(rv.values()) return self.db.pool.do(thd) # returns a Deferred that returns None def workerConnected(self, workerid, masterid, workerinfo): def thd(conn): conn_tbl = self.db.model.connected_workers q = conn_tbl.insert() try: conn.execute(q, {'workerid': workerid, 'masterid': masterid}) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # if the row is already present, silently fail.. pass bs_tbl = self.db.model.workers q = bs_tbl.update(whereclause=(bs_tbl.c.id == workerid)) conn.execute(q, info=workerinfo) return self.db.pool.do(thd) # returns a Deferred that returns None def workerDisconnected(self, workerid, masterid): def thd(conn): tbl = self.db.model.connected_workers q = tbl.delete(whereclause=(tbl.c.workerid == workerid) & (tbl.c.masterid == masterid)) conn.execute(q) return self.db.pool.do(thd) # returns a Deferred that returns None def setWorkerState(self, workerid, paused, graceful): def thd(conn): tbl = self.db.model.workers q = tbl.update(whereclause=(tbl.c.id == workerid)) conn.execute(q, paused=int(paused), graceful=int(graceful)) return self.db.pool.do(thd) buildbot-3.4.0/master/buildbot/errors.py000066400000000000000000000015361413250514000203070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Having them here prevents all kind of circular dependencies class PluginDBError(Exception): pass class CaptureCallbackError(Exception): pass buildbot-3.4.0/master/buildbot/interfaces.py000066400000000000000000000243761413250514000211250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """Interface documentation. Define the interfaces that are implemented by various buildbot classes. """ # disable pylint warnings triggered by interface definitions # pylint: disable=no-self-argument # pylint: disable=no-method-argument # pylint: disable=inherit-non-class from twisted.python.deprecate import deprecatedModuleAttribute from twisted.python.versions import Version from zope.interface import Attribute from zope.interface import Interface # exceptions that can be raised while trying to start a build class BuilderInUseError(Exception): pass class WorkerSetupError(Exception): pass WorkerTooOldError = WorkerSetupError deprecatedModuleAttribute( Version("buildbot", 2, 9, 0), message="Use WorkerSetupError instead.", moduleName="buildbot.interfaces", name="WorkerTooOldError", ) class LatentWorkerFailedToSubstantiate(Exception): pass class LatentWorkerCannotSubstantiate(Exception): pass class LatentWorkerSubstantiatiationCancelled(Exception): pass class IPlugin(Interface): """ Base interface for all Buildbot plugins """ class IChangeSource(IPlugin): """ Service which feeds Change objects to the changemaster. When files or directories are changed in version control, this object should represent the changes as a change dictionary and call:: self.master.data.updates.addChange(who=.., rev=.., ..) See 'Writing Change Sources' in the manual for more information. """ master = Attribute('master', 'Pointer to BuildMaster, automatically set when started.') def describe(): """Return a string which briefly describes this source.""" class ISourceStamp(Interface): """ @cvar branch: branch from which source was drawn @type branch: string or None @cvar revision: revision of the source, or None to use CHANGES @type revision: varies depending on VC @cvar patch: patch applied to the source, or None if no patch @type patch: None or tuple (level diff) @cvar changes: the source step should check out the latest revision in the given changes @type changes: tuple of L{buildbot.changes.changes.Change} instances, all of which are on the same branch @cvar project: project this source code represents @type project: string @cvar repository: repository from which source was drawn @type repository: string """ def canBeMergedWith(self, other): """ Can this SourceStamp be merged with OTHER? """ def mergeWith(self, others): """Generate a SourceStamp for the merger of me and all the other SourceStamps. This is called by a Build when it starts, to figure out what its sourceStamp should be.""" def getAbsoluteSourceStamp(self, got_revision): """Get a new SourceStamp object reflecting the actual revision found by a Source step.""" def getText(self): """Returns a list of strings to describe the stamp. These are intended to be displayed in a narrow column. If more space is available, the caller should join them together with spaces before presenting them to the user.""" class IEmailSender(Interface): """I know how to send email, and can be used by other parts of the Buildbot to contact developers.""" class IEmailLookup(Interface): def getAddress(user): """Turn a User-name string into a valid email address. Either return a string (with an @ in it), None (to indicate that the user cannot be reached by email), or a Deferred which will fire with the same.""" class ILogObserver(Interface): """Objects which provide this interface can be used in a BuildStep to watch the output of a LogFile and parse it incrementally. """ # internal methods def setStep(step): pass def setLog(log): pass # methods called by the LogFile def logChunk(build, step, log, channel, text): pass class IWorker(IPlugin): # callback methods from the manager pass class ILatentWorker(IWorker): """A worker that is not always running, but can run when requested. """ substantiated = Attribute('Substantiated', 'Whether the latent worker is currently ' 'substantiated with a real instance.') def substantiate(): """Request that the worker substantiate with a real instance. Returns a deferred that will callback when a real instance has attached.""" # there is an insubstantiate too, but that is not used externally ATM. def buildStarted(wfb): """Inform the latent worker that a build has started. @param wfb: a L{LatentWorkerForBuilder}. The wfb is the one for whom the build finished. """ def buildFinished(wfb): """Inform the latent worker that a build has finished. @param wfb: a L{LatentWorkerForBuilder}. The wfb is the one for whom the build finished. """ class IMachine(Interface): pass class IMachineAction(Interface): def perform(self, manager): """ Perform an action on the machine managed by manager. Returns a deferred evaluating to True if it was possible to execute the action. """ class ILatentMachine(IMachine): """ A machine that is not always running, but can be started when requested. """ class IRenderable(Interface): """An object that can be interpolated with properties from a build. """ def getRenderingFor(iprops): """Return a deferred that fires with interpolation with the given properties @param iprops: the L{IProperties} provider supplying the properties. """ class IProperties(Interface): """ An object providing access to build properties """ def getProperty(name, default=None): """Get the named property, returning the default if the property does not exist. @param name: property name @type name: string @param default: default value (default: @code{None}) @returns: property value """ def hasProperty(name): """Return true if the named property exists. @param name: property name @type name: string @returns: boolean """ def has_key(name): """Deprecated name for L{hasProperty}.""" def setProperty(name, value, source, runtime=False): """Set the given property, overwriting any existing value. The source describes the source of the value for human interpretation. @param name: property name @type name: string @param value: property value @type value: JSON-able value @param source: property source @type source: string @param runtime: (optional) whether this property was set during the build's runtime: usually left at its default value @type runtime: boolean """ def getProperties(): """Get the L{buildbot.process.properties.Properties} instance storing these properties. Note that the interface for this class is not stable, so where possible the other methods of this interface should be used. @returns: L{buildbot.process.properties.Properties} instance """ def getBuild(): """Get the L{buildbot.process.build.Build} instance for the current build. Note that this object is not available after the build is complete, at which point this method will return None. Try to avoid using this method, as the API of L{Build} instances is not well-defined. @returns L{buildbot.process.build.Build} instance """ def render(value): """Render @code{value} as an L{IRenderable}. This essentially coerces @code{value} to an L{IRenderable} and calls its @L{getRenderingFor} method. @name value: value to render @returns: rendered value """ class IScheduler(IPlugin): pass class ITriggerableScheduler(Interface): """ A scheduler that can be triggered by buildsteps. """ def trigger(waited_for, sourcestamps=None, set_props=None, parent_buildid=None, parent_relationship=None): """Trigger a build with the given source stamp and properties. """ class IBuildStepFactory(Interface): def buildStep(): pass class IBuildStep(IPlugin): """ A build step """ # Currently has nothing class IConfigured(Interface): def getConfigDict(): pass class IReportGenerator(Interface): def generate(self, master, reporter, key, build): pass class IConfigLoader(Interface): def loadConfig(): """ Load the specified configuration. :return MasterConfig: """ class IHttpResponse(Interface): def content(): """ :returns: raw (``bytes``) content of the response via deferred """ def json(): """ :returns: json decoded content of the response via deferred """ code = Attribute('code', "http status code of the request's response (e.g 200)") url = Attribute('url', "request's url (e.g https://api.github.com/endpoint')") class IConfigurator(Interface): def configure(config_dict): """ Alter the buildbot config_dict, as defined in master.cfg like the master.cfg, this is run out of the main reactor thread, so this can block, but this can't call most Buildbot facilities. :returns: None """ buildbot-3.4.0/master/buildbot/locks.py000066400000000000000000000404501413250514000201040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.util import service from buildbot.util import subscription from buildbot.util.eventual import eventually if False: # for debugging pylint: disable=using-constant-test debuglog = log.msg else: debuglog = lambda m: None # noqa class BaseLock: """ Class handling claiming and releasing of L{self}, and keeping track of current and waiting owners. We maintain the wait queue in FIFO order, and ensure that counting waiters in the queue behind exclusive waiters cannot acquire the lock. This ensures that exclusive waiters are not starved. """ description = "" def __init__(self, name, maxCount=1): super().__init__() # Name of the lock self.lockName = name # Current queue, tuples (waiter, LockAccess, deferred) self.waiting = [] # Current owners, tuples (owner, LockAccess) self.owners = [] # maximal number of counting owners self.maxCount = maxCount # current number of claimed exclusive locks (0 or 1), must match # self.owners self._claimed_excl = 0 # current number of claimed counting locks (0 to self.maxCount), must # match self.owners. Note that self.maxCount is not a strict limit, the # number of claimed counting locks may be higher than self.maxCount if # it was lowered by self._claimed_counting = 0 # subscriptions to this lock being released self.release_subs = subscription.SubscriptionPoint("%r releases" % (self,)) def __repr__(self): return self.description def setMaxCount(self, count): old_max_count = self.maxCount self.maxCount = count if count > old_max_count: self._tryWakeUp() def _find_waiting(self, requester): for idx, waiter in enumerate(self.waiting): if waiter[0] is requester: return idx return None def isAvailable(self, requester, access): """ Return a boolean whether the lock is available for claiming """ debuglog("{} isAvailable({}, {}): self.owners={}".format(self, requester, access, repr(self.owners))) num_excl, num_counting = self._claimed_excl, self._claimed_counting if not access.count: return True w_index = self._find_waiting(requester) if w_index is None: w_index = len(self.waiting) ahead = self.waiting[:w_index] if access.mode == 'counting': # Wants counting access return not num_excl \ and num_counting + len(ahead) + access.count <= self.maxCount \ and all([w[1].mode == 'counting' for w in ahead]) # else Wants exclusive access return not num_excl and not num_counting and not ahead def _addOwner(self, owner, access): self.owners.append((owner, access)) if access.mode == 'counting': self._claimed_counting += access.count else: self._claimed_excl += 1 assert (self._claimed_excl and not self._claimed_counting) \ or (not self._claimed_excl and self._claimed_excl <= self.maxCount) def _removeOwner(self, owner, access): # returns True if owner removed, False if the lock has been already # released entry = (owner, access) if entry not in self.owners: return False self.owners.remove(entry) if access.mode == 'counting': self._claimed_counting -= access.count else: self._claimed_excl -= 1 return True def claim(self, owner, access): """ Claim the lock (lock must be available) """ debuglog("{} claim({}, {})".format(self, owner, access.mode)) assert owner is not None assert self.isAvailable(owner, access), "ask for isAvailable() first" assert isinstance(access, LockAccess) assert access.mode in ['counting', 'exclusive'] assert isinstance(access.count, int) if access.mode == 'exclusive': assert access.count == 1 else: assert access.count >= 0 if not access.count: return self.waiting = [w for w in self.waiting if w[0] is not owner] self._addOwner(owner, access) debuglog(" {} is claimed '{}', {} units".format(self, access.mode, access.count)) def subscribeToReleases(self, callback): """Schedule C{callback} to be invoked every time this lock is released. Returns a L{Subscription}.""" return self.release_subs.subscribe(callback) def release(self, owner, access): """ Release the lock """ assert isinstance(access, LockAccess) if not access.count: return debuglog("{} release({}, {}, {})".format(self, owner, access.mode, access.count)) if not self._removeOwner(owner, access): debuglog("{} already released".format(self)) return self._tryWakeUp() # notify any listeners self.release_subs.deliver() def _tryWakeUp(self): # After an exclusive access, we may need to wake up several waiting. # Break out of the loop when the first waiting client should not be # awakened. num_excl, num_counting = self._claimed_excl, self._claimed_counting for i, (w_owner, w_access, d) in enumerate(self.waiting): if w_access.mode == 'counting': if num_excl > 0 or num_counting >= self.maxCount: break num_counting = num_counting + w_access.count else: # w_access.mode == 'exclusive' if num_excl > 0 or num_counting > 0: break num_excl = num_excl + w_access.count # If the waiter has a deferred, wake it up and clear the deferred # from the wait queue entry to indicate that it has been woken. if d: self.waiting[i] = (w_owner, w_access, None) eventually(d.callback, self) def waitUntilMaybeAvailable(self, owner, access): """Fire when the lock *might* be available. The deferred may be fired spuriously and the lock is not necessarily available, thus the caller will need to check with isAvailable() when the deferred fires. A single requester must not have more than one pending waitUntilMaybeAvailable() on a single lock. The caller must guarantee, that once the returned deferred is fired, either the lock is checked for availability and claimed if it's available, or the it is indicated as no longer interesting by calling stopWaitingUntilAvailable(). The caller does not need to do this immediately after deferred is fired, an eventual execution is sufficient. """ debuglog("{} waitUntilAvailable({})".format(self, owner)) assert isinstance(access, LockAccess) if self.isAvailable(owner, access): return defer.succeed(self) d = defer.Deferred() # Are we already in the wait queue? w_index = self._find_waiting(owner) if w_index is not None: _, _, old_d = self.waiting[w_index] assert old_d is None, "waitUntilMaybeAvailable() must not be called again before the " \ "previous deferred fired" self.waiting[w_index] = (owner, access, d) else: self.waiting.append((owner, access, d)) return d def stopWaitingUntilAvailable(self, owner, access, d): """ Stop waiting for lock to become available. `d` must be the result of a previous call to `waitUntilMaybeAvailable()`. If `d` has not been woken up already by calling its callback, it will be done as part of this function """ debuglog("{} stopWaitingUntilAvailable({})".format(self, owner)) assert isinstance(access, LockAccess) w_index = self._find_waiting(owner) assert w_index is not None, "The owner was not waiting for the lock" _, _, old_d = self.waiting[w_index] if old_d is not None: assert d is old_d, "The supplied deferred must be a result of waitUntilMaybeAvailable()" del self.waiting[w_index] d.callback(None) else: del self.waiting[w_index] # if the callback has already been woken up, then it must schedule another waiter, # otherwise we will have an available lock with a waiter list and no-one to wake the # waiters up. self._tryWakeUp() def isOwner(self, owner, access): return (owner, access) in self.owners class RealMasterLock(BaseLock, service.SharedService): def __init__(self, name): # the caller will want to call updateFromLockId after initialization super().__init__(name, 0) self.config_version = -1 self._updateDescription() def _updateDescription(self): self.description = "".format(self.lockName, self.maxCount) def getLockForWorker(self, workername): return self def updateFromLockId(self, lockid, config_version): assert self.lockName == lockid.name assert isinstance(config_version, int) self.config_version = config_version self.setMaxCount(lockid.maxCount) self._updateDescription() class RealWorkerLock(service.SharedService): def __init__(self, name): super().__init__() # the caller will want to call updateFromLockId after initialization self.lockName = name self.maxCount = None self.maxCountForWorker = None self.config_version = -1 self._updateDescription() self.locks = {} def __repr__(self): return self.description def getLockForWorker(self, workername): if workername not in self.locks: maxCount = self.maxCountForWorker.get(workername, self.maxCount) lock = self.locks[workername] = BaseLock(self.lockName, maxCount) self._updateDescriptionForLock(lock, workername) self.locks[workername] = lock return self.locks[workername] def _updateDescription(self): self.description = \ "".format(self.lockName, self.maxCount, self.maxCountForWorker) def _updateDescriptionForLock(self, lock, workername): lock.description = \ "".format(lock.lockName, lock.maxCount, workername, id(lock)) def updateFromLockId(self, lockid, config_version): assert self.lockName == lockid.name assert isinstance(config_version, int) self.config_version = config_version self.maxCount = lockid.maxCount self.maxCountForWorker = lockid.maxCountForWorker self._updateDescription() for workername, lock in self.locks.items(): maxCount = self.maxCountForWorker.get(workername, self.maxCount) lock.setMaxCount(maxCount) self._updateDescriptionForLock(lock, workername) class LockAccess(util.ComparableMixin): """ I am an object representing a way to access a lock. @param lockid: LockId instance that should be accessed. @type lockid: A MasterLock or WorkerLock instance. @param mode: Mode of accessing the lock. @type mode: A string, either 'counting' or 'exclusive'. @param count: How many units does the access occupy @type count: Integer, not negative, default is 1 for backwards compatibility """ compare_attrs = ('lockid', 'mode', 'count') def __init__(self, lockid, mode, count=1): self.lockid = lockid self.mode = mode self.count = count assert isinstance(lockid, (MasterLock, WorkerLock)) assert mode in ['counting', 'exclusive'] assert isinstance(count, int) if mode == 'exclusive': assert count == 1 else: assert count >= 0 class BaseLockId(util.ComparableMixin): """ Abstract base class for LockId classes. Sets up the 'access()' function for the LockId's available to the user (MasterLock and WorkerLock classes). Derived classes should add - Comparison with the L{util.ComparableMixin} via the L{compare_attrs} class variable. - Link to the actual lock class should be added with the L{lockClass} class variable. """ def access(self, mode, count=1): """ Express how the lock should be accessed """ assert mode in ['counting', 'exclusive'] assert isinstance(count, int) assert count >= 0 return LockAccess(self, mode, count) def defaultAccess(self): """ For buildbot 0.7.7 compatibility: When user doesn't specify an access mode, this one is chosen. """ return self.access('counting') # master.cfg should only reference the following MasterLock and WorkerLock # classes. They are identifiers that will be turned into real Locks later, # via the BotMaster.getLockByID method. class MasterLock(BaseLockId): """I am a semaphore that limits the number of simultaneous actions. Builds and BuildSteps can declare that they wish to claim me as they run. Only a limited number of such builds or steps will be able to run simultaneously. By default this number is one, but my maxCount parameter can be raised to allow two or three or more operations to happen at the same time. Use this to protect a resource that is shared among all builders and all workers, for example to limit the load on a common SVN repository. """ compare_attrs = ('name', 'maxCount') lockClass = RealMasterLock def __init__(self, name, maxCount=1): self.name = name self.maxCount = maxCount class WorkerLock(BaseLockId): """I am a semaphore that limits simultaneous actions on each worker. Builds and BuildSteps can declare that they wish to claim me as they run. Only a limited number of such builds or steps will be able to run simultaneously on any given worker. By default this number is one, but my maxCount parameter can be raised to allow two or three or more operations to happen on a single worker at the same time. Use this to protect a resource that is shared among all the builds taking place on each worker, for example to limit CPU or memory load on an underpowered machine. Each worker will get an independent copy of this semaphore. By default each copy will use the same owner count (set with maxCount), but you can provide maxCountForWorker with a dictionary that maps workername to owner count, to allow some workers more parallelism than others. """ compare_attrs = ('name', 'maxCount', '_maxCountForWorkerList') lockClass = RealWorkerLock def __init__(self, name, maxCount=1, maxCountForWorker=None): self.name = name self.maxCount = maxCount if maxCountForWorker is None: maxCountForWorker = {} self.maxCountForWorker = maxCountForWorker # for comparison purposes, turn this dictionary into a stably-sorted # list of tuples self._maxCountForWorkerList = tuple( sorted(self.maxCountForWorker.items())) buildbot-3.4.0/master/buildbot/machine/000077500000000000000000000000001413250514000200205ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/machine/__init__.py000066400000000000000000000013011413250514000221240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members buildbot-3.4.0/master/buildbot/machine/base.py000066400000000000000000000030161413250514000213040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members from twisted.internet import defer from zope.interface import implementer from buildbot import interfaces from buildbot.util import service @implementer(interfaces.IMachine) class Machine(service.BuildbotService): def checkConfig(self, name, **kwargs): super().checkConfig(**kwargs) self.name = name self.workers = [] @defer.inlineCallbacks def reconfigService(self, name, **kwargs): yield super().reconfigService(**kwargs) assert self.name == name def registerWorker(self, worker): assert worker.machine_name == self.name self.workers.append(worker) def unregisterWorker(self, worker): assert worker in self.workers self.workers.remove(worker) def __repr__(self): return "".format(self.name, id(self)) buildbot-3.4.0/master/buildbot/machine/generic.py000066400000000000000000000146641413250514000220210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import config from buildbot.interfaces import IMachineAction from buildbot.machine.latent import AbstractLatentMachine from buildbot.util import misc from buildbot.util import private_tempdir from buildbot.util import runprocess from buildbot.util.git import getSshArgsForKeys from buildbot.util.git import getSshKnownHostsContents class GenericLatentMachine(AbstractLatentMachine): def checkConfig(self, name, start_action, stop_action, **kwargs): super().checkConfig(name, **kwargs) for action, arg_name in [(start_action, 'start_action'), (stop_action, 'stop_action')]: if not IMachineAction.providedBy(action): msg = "{} of {} does not implement required " \ "interface".format(arg_name, self.name) raise Exception(msg) @defer.inlineCallbacks def reconfigService(self, name, start_action, stop_action, **kwargs): yield super().reconfigService(name, **kwargs) self.start_action = start_action self.stop_action = stop_action def start_machine(self): return self.start_action.perform(self) def stop_machine(self): return self.stop_action.perform(self) @defer.inlineCallbacks def runProcessLogFailures(reactor, args, expectedCode=0): code, stdout, stderr = yield runprocess.run_process(reactor, args) if code != expectedCode: log.err(('Got unexpected return code when running {}: ' 'code: {}, stdout: {}, stderr: {}').format(args, code, stdout, stderr)) return False return True class _LocalMachineActionMixin: def setupLocal(self, command): if not isinstance(command, list): config.error('command parameter must be a list') self._command = command @defer.inlineCallbacks def perform(self, manager): args = yield manager.renderSecrets(self._command) return (yield runProcessLogFailures(manager.master.reactor, args)) class _SshActionMixin: def setupSsh(self, sshBin, host, remoteCommand, sshKey=None, sshHostKey=None): if not isinstance(sshBin, str): config.error('sshBin parameter must be a string') if not isinstance(host, str): config.error('host parameter must be a string') if not isinstance(remoteCommand, list): config.error('remoteCommand parameter must be a list') self._sshBin = sshBin self._host = host self._remoteCommand = remoteCommand self._sshKey = sshKey self._sshHostKey = sshHostKey @defer.inlineCallbacks def _performImpl(self, manager, key_path, known_hosts_path): args = getSshArgsForKeys(key_path, known_hosts_path) args.append((yield manager.renderSecrets(self._host))) args.extend((yield manager.renderSecrets(self._remoteCommand))) return (yield runProcessLogFailures(manager.master.reactor, [self._sshBin] + args)) @defer.inlineCallbacks def _prepareSshKeys(self, manager, temp_dir_path): key_path = None if self._sshKey is not None: ssh_key_data = yield manager.renderSecrets(self._sshKey) key_path = os.path.join(temp_dir_path, 'ssh-key') misc.writeLocalFile(key_path, ssh_key_data, mode=stat.S_IRUSR) known_hosts_path = None if self._sshHostKey is not None: ssh_host_key_data = yield manager.renderSecrets(self._sshHostKey) ssh_host_key_data = getSshKnownHostsContents(ssh_host_key_data) known_hosts_path = os.path.join(temp_dir_path, 'ssh-known-hosts') misc.writeLocalFile(known_hosts_path, ssh_host_key_data) return (key_path, known_hosts_path) @defer.inlineCallbacks def perform(self, manager): if self._sshKey is not None or self._sshHostKey is not None: with private_tempdir.PrivateTemporaryDirectory( prefix='ssh-', dir=manager.master.basedir) as temp_dir: key_path, hosts_path = yield self._prepareSshKeys(manager, temp_dir) ret = yield self._performImpl(manager, key_path, hosts_path) else: ret = yield self._performImpl(manager, None, None) return ret @implementer(IMachineAction) class LocalWakeAction(_LocalMachineActionMixin): def __init__(self, command): self.setupLocal(command) class LocalWOLAction(LocalWakeAction): def __init__(self, wakeMac, wolBin='wakeonlan'): LocalWakeAction.__init__(self, [wolBin, wakeMac]) @implementer(IMachineAction) class RemoteSshWakeAction(_SshActionMixin): def __init__(self, host, remoteCommand, sshBin='ssh', sshKey=None, sshHostKey=None): self.setupSsh(sshBin, host, remoteCommand, sshKey=sshKey, sshHostKey=sshHostKey) class RemoteSshWOLAction(RemoteSshWakeAction): def __init__(self, host, wakeMac, wolBin='wakeonlan', sshBin='ssh', sshKey=None, sshHostKey=None): RemoteSshWakeAction.__init__(self, host, [wolBin, wakeMac], sshBin=sshBin, sshKey=sshKey, sshHostKey=sshHostKey) @implementer(IMachineAction) class RemoteSshSuspendAction(_SshActionMixin): def __init__(self, host, remoteCommand=None, sshBin='ssh', sshKey=None, sshHostKey=None): if remoteCommand is None: remoteCommand = ['systemctl', 'suspend'] self.setupSsh(sshBin, host, remoteCommand, sshKey=sshKey, sshHostKey=sshHostKey) buildbot-3.4.0/master/buildbot/machine/latent.py000066400000000000000000000147221413250514000216670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import enum from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import interfaces from buildbot.machine.base import Machine from buildbot.util import Notifier class States(enum.Enum): # Represents the state of LatentMachine STOPPED = 0 STARTING = 1 STARTED = 2 STOPPING = 3 @implementer(interfaces.ILatentMachine) class AbstractLatentMachine(Machine): DEFAULT_MISSING_TIMEOUT = 20 * 60 def checkConfig(self, name, build_wait_timeout=0, missing_timeout=DEFAULT_MISSING_TIMEOUT, **kwargs): super().checkConfig(name, **kwargs) self.state = States.STOPPED self.latent_workers = [] @defer.inlineCallbacks def reconfigService(self, name, build_wait_timeout=0, missing_timeout=DEFAULT_MISSING_TIMEOUT, **kwargs): yield super().reconfigService(name, **kwargs) self.build_wait_timeout = build_wait_timeout self.missing_timeout = missing_timeout for worker in self.workers: if not interfaces.ILatentWorker.providedBy(worker): raise Exception('Worker is not latent {}'.format( worker.name)) self.state = States.STOPPED self._start_notifier = Notifier() self._stop_notifier = Notifier() self._build_wait_timer = None self._missing_timer = None def start_machine(self): # Responsible for starting the machine. The function should return a # deferred which should result in True if the startup has been # successful, or False otherwise. raise NotImplementedError def stop_machine(self): # Responsible for shutting down the machine raise NotImplementedError @defer.inlineCallbacks def substantiate(self, starting_worker): if self.state == States.STOPPING: # wait until stop action finishes yield self._stop_notifier.wait() if self.state == States.STARTED: # may happen if we waited for stop to complete and in the mean # time the machine was successfully woken. return True # wait for already proceeding startup to finish, if any if self.state == States.STARTING: return (yield self._start_notifier.wait()) self.state = States.STARTING # substantiate all workers that will start if we wake the machine. We # do so before waking the machine to guarantee that we're already # waiting for worker connection as waking may take time confirming # machine came online. We'll call substantiate on the worker that # invoked this function again, but that's okay as that function is # reentrant. Note that we substantiate without gathering results # because the original call to substantiate will get them anyway and # we don't want to be slowed down by other workers on the machine. for worker in self.workers: if worker.starts_without_substantiate: worker.substantiate(None, None) # Start the machine. We don't need to wait for any workers to actually # come online as that's handled in their substantiate() functions. try: ret = yield self.start_machine() except Exception as e: log.err(e, 'while starting latent machine {0}'.format(self.name)) ret = False if not ret: yield defer.DeferredList([worker.insubstantiate() for worker in self.workers], consumeErrors=True) else: self._setMissingTimer() self.state = States.STARTED if ret else States.STOPPED self._start_notifier.notify(ret) return ret @defer.inlineCallbacks def _stop(self): if any(worker.building for worker in self.workers) or \ self.state == States.STARTING: return None if self.state == States.STOPPING: yield self._stop_notifier.wait() return None self.state = States.STOPPING # wait until workers insubstantiate, then stop yield defer.DeferredList([worker.insubstantiate() for worker in self.workers], consumeErrors=True) try: yield self.stop_machine() except Exception as e: log.err(e, 'while stopping latent machine {0}'.format( self.name)) self.state = States.STOPPED self._stop_notifier.notify(None) return None def notifyBuildStarted(self): self._clearMissingTimer() def notifyBuildFinished(self): if any(worker.building for worker in self.workers): self._clearBuildWaitTimer() else: self._setBuildWaitTimer() def _clearMissingTimer(self): if self._missing_timer is not None: if self._missing_timer.active(): self._missing_timer.cancel() self._missing_timer = None def _setMissingTimer(self): self._clearMissingTimer() self._missing_timer = self.master.reactor.callLater( self.missing_timeout, self._stop) def _clearBuildWaitTimer(self): if self._build_wait_timer is not None: if self._build_wait_timer.active(): self._build_wait_timer.cancel() self._build_wait_timer = None def _setBuildWaitTimer(self): self._clearBuildWaitTimer() self._build_wait_timer = self.master.reactor.callLater( self.build_wait_timeout, self._stop) def __repr__(self): return "".format(self.name, id(self)) buildbot-3.4.0/master/buildbot/machine/manager.py000066400000000000000000000023011413250514000220000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members from buildbot.util import service from buildbot.worker.manager import WorkerManager class MachineManager(service.BuildbotServiceManager): reconfig_priority = WorkerManager.reconfig_priority + 1 name = 'MachineManager' managed_services_name = 'machines' config_attr = 'machines' @property def machines(self): return self.namedServices def getMachineByName(self, name): if name in self.machines: return self.machines[name] return None buildbot-3.4.0/master/buildbot/manhole.py000066400000000000000000000250571413250514000204220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import binascii import os import types from twisted.application import strports from twisted.conch import manhole from twisted.conch import telnet from twisted.conch.insults import insults from twisted.cred import checkers from twisted.cred import portal from twisted.internet import protocol from twisted.python import log from zope.interface import implementer # requires Twisted-2.0 or later from buildbot import config from buildbot.util import ComparableMixin from buildbot.util import service from buildbot.util import unicode2bytes try: from twisted.conch import checkers as conchc, manhole_ssh from twisted.conch.openssh_compat.factory import OpenSSHFactory _hush_pyflakes = [manhole_ssh, conchc, OpenSSHFactory] del _hush_pyflakes except ImportError: manhole_ssh = None conchc = None OpenSSHFactory = None # makeTelnetProtocol and _TelnetRealm are for the TelnetManhole class makeTelnetProtocol: # this curries the 'portal' argument into a later call to # TelnetTransport() def __init__(self, portal): self.portal = portal def __call__(self): auth = telnet.AuthenticatingTelnetProtocol return telnet.TelnetTransport(auth, self.portal) @implementer(portal.IRealm) class _TelnetRealm: def __init__(self, namespace_maker): self.namespace_maker = namespace_maker def requestAvatar(self, avatarId, *interfaces): if telnet.ITelnetProtocol in interfaces: namespace = self.namespace_maker() p = telnet.TelnetBootstrapProtocol(insults.ServerProtocol, manhole.ColoredManhole, namespace) return (telnet.ITelnetProtocol, p, lambda: None) raise NotImplementedError() class chainedProtocolFactory: # this curries the 'namespace' argument into a later call to # chainedProtocolFactory() def __init__(self, namespace): self.namespace = namespace def __call__(self): return insults.ServerProtocol(manhole.ColoredManhole, self.namespace) if conchc: class AuthorizedKeysChecker(conchc.SSHPublicKeyDatabase): """Accept connections using SSH keys from a given file. SSHPublicKeyDatabase takes the username that the prospective client has requested and attempts to get a ~/.ssh/authorized_keys file for that username. This requires root access, so it isn't as useful as you'd like. Instead, this subclass looks for keys in a single file, given as an argument. This file is typically kept in the buildmaster's basedir. The file should have 'ssh-dss ....' lines in it, just like authorized_keys. """ def __init__(self, authorized_keys_file): self.authorized_keys_file = os.path.expanduser( authorized_keys_file) def checkKey(self, credentials): with open(self.authorized_keys_file, "rb") as f: for l in f.readlines(): l2 = l.split() if len(l2) < 2: continue try: if base64.decodebytes(l2[1]) == credentials.blob: return 1 except binascii.Error: continue return 0 class _BaseManhole(service.AsyncMultiService): """This provides remote access to a python interpreter (a read/exec/print loop) embedded in the buildmaster via an internal SSH server. This allows detailed inspection of the buildmaster state. It is of most use to buildbot developers. Connect to this by running an ssh client. """ def __init__(self, port, checker, ssh_hostkey_dir=None): """ @type port: string or int @param port: what port should the Manhole listen on? This is a strports specification string, like 'tcp:12345' or 'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a simple tcp port. @type checker: an object providing the L{twisted.cred.checkers.ICredentialsChecker} interface @param checker: if provided, this checker is used to authenticate the client instead of using the username/password scheme. You must either provide a username/password or a Checker. Some useful values are:: import twisted.cred.checkers as credc import twisted.conch.checkers as conchc c = credc.AllowAnonymousAccess # completely open c = credc.FilePasswordDB(passwd_filename) # file of name:passwd c = conchc.UNIXPasswordDatabase # getpwnam() (probably /etc/passwd) @type ssh_hostkey_dir: str @param ssh_hostkey_dir: directory which contains ssh host keys for this server """ # unfortunately, these don't work unless we're running as root # c = credc.PluggableAuthenticationModulesChecker: PAM # c = conchc.SSHPublicKeyDatabase() # ~/.ssh/authorized_keys # and I can't get UNIXPasswordDatabase to work super().__init__() if isinstance(port, int): port = "tcp:%d" % port self.port = port # for comparison later self.checker = checker # to maybe compare later def makeNamespace(): master = self.master namespace = { 'master': master, 'show': show, } return namespace def makeProtocol(): namespace = makeNamespace() p = insults.ServerProtocol(manhole.ColoredManhole, namespace) return p self.ssh_hostkey_dir = ssh_hostkey_dir if self.ssh_hostkey_dir: self.using_ssh = True if not self.ssh_hostkey_dir: raise ValueError("Most specify a value for ssh_hostkey_dir") r = manhole_ssh.TerminalRealm() r.chainedProtocolFactory = makeProtocol p = portal.Portal(r, [self.checker]) f = manhole_ssh.ConchFactory(p) openSSHFactory = OpenSSHFactory() openSSHFactory.dataRoot = self.ssh_hostkey_dir openSSHFactory.dataModuliRoot = self.ssh_hostkey_dir f.publicKeys = openSSHFactory.getPublicKeys() f.privateKeys = openSSHFactory.getPrivateKeys() else: self.using_ssh = False r = _TelnetRealm(makeNamespace) p = portal.Portal(r, [self.checker]) f = protocol.ServerFactory() f.protocol = makeTelnetProtocol(p) s = strports.service(self.port, f) s.setServiceParent(self) def startService(self): if self.using_ssh: via = "via SSH" else: via = "via telnet" log.msg("Manhole listening {} on port {}".format(via, self.port)) return super().startService() class TelnetManhole(_BaseManhole, ComparableMixin): compare_attrs = ("port", "username", "password") def __init__(self, port, username, password): self.username = username self.password = password c = checkers.InMemoryUsernamePasswordDatabaseDontUse() c.addUser(unicode2bytes(username), unicode2bytes(password)) super().__init__(port, c) class PasswordManhole(_BaseManhole, ComparableMixin): compare_attrs = ("port", "username", "password", "ssh_hostkey_dir") def __init__(self, port, username, password, ssh_hostkey_dir): if not manhole_ssh: config.error("cryptography required for ssh mahole.") self.username = username self.password = password self.ssh_hostkey_dir = ssh_hostkey_dir c = checkers.InMemoryUsernamePasswordDatabaseDontUse() c.addUser(unicode2bytes(username), unicode2bytes(password)) super().__init__(port, c, ssh_hostkey_dir) class AuthorizedKeysManhole(_BaseManhole, ComparableMixin): compare_attrs = ("port", "keyfile", "ssh_hostkey_dir") def __init__(self, port, keyfile, ssh_hostkey_dir): if not manhole_ssh: config.error("cryptography required for ssh mahole.") # TODO: expanduser this, and make it relative to the buildmaster's # basedir self.keyfile = keyfile c = AuthorizedKeysChecker(keyfile) super().__init__(port, c, ssh_hostkey_dir) class ArbitraryCheckerManhole(_BaseManhole, ComparableMixin): """This Manhole accepts ssh connections, but uses an arbitrary user-supplied 'checker' object to perform authentication.""" compare_attrs = ("port", "checker") def __init__(self, port, checker): """ @type port: string or int @param port: what port should the Manhole listen on? This is a strports specification string, like 'tcp:12345' or 'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a simple tcp port. @param checker: an instance of a twisted.cred 'checker' which will perform authentication """ if not manhole_ssh: config.error("cryptography required for ssh mahole.") super().__init__(port, checker) # utility functions for the manhole def show(x): """Display the data attributes of an object in a readable format""" print("data attributes of %r" % (x,)) names = dir(x) maxlen = max([0] + [len(n) for n in names]) for k in names: v = getattr(x, k) if isinstance(v, types.MethodType): continue if k[:2] == '__' and k[-2:] == '__': continue if isinstance(v, str): if len(v) > 80 - maxlen - 5: v = repr(v[:80 - maxlen - 5]) + "..." elif isinstance(v, (int, type(None))): v = str(v) elif isinstance(v, (list, tuple, dict)): v = "{} ({} elements)".format(v, len(v)) else: v = str(type(v)) print("{} : {}".format(k.ljust(maxlen), v)) return x buildbot-3.4.0/master/buildbot/master.py000066400000000000000000000434051413250514000202670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import signal import socket from twisted.application import internet from twisted.internet import defer from twisted.internet import task from twisted.internet import threads from twisted.python import failure from twisted.python import log import buildbot import buildbot.pbmanager from buildbot import config from buildbot import monkeypatches from buildbot.buildbot_net_usage_data import sendBuildbotNetUsageData from buildbot.changes.manager import ChangeManager from buildbot.data import connector as dataconnector from buildbot.data import graphql from buildbot.db import connector as dbconnector from buildbot.db import exceptions from buildbot.machine.manager import MachineManager from buildbot.mq import connector as mqconnector from buildbot.process import cache from buildbot.process import debug from buildbot.process import metrics from buildbot.process.botmaster import BotMaster from buildbot.process.users.manager import UserManagerManager from buildbot.schedulers.manager import SchedulerManager from buildbot.secrets.manager import SecretManager from buildbot.util import check_functional_environment from buildbot.util import service from buildbot.util.eventual import eventually from buildbot.wamp import connector as wampconnector from buildbot.worker import manager as workermanager from buildbot.www import service as wwwservice class LogRotation: def __init__(self): self.rotateLength = 1 * 1000 * 1000 self.maxRotatedFiles = 10 class BuildMaster(service.ReconfigurableServiceMixin, service.MasterService): # multiplier on RECLAIM_BUILD_INTERVAL at which a build is considered # unclaimed; this should be at least 2 to avoid false positives UNCLAIMED_BUILD_FACTOR = 6 def __init__(self, basedir, configFileName=None, umask=None, reactor=None, config_loader=None): super().__init__() if reactor is None: from twisted.internet import reactor self.reactor = reactor self.setName("buildmaster") self.umask = umask self.basedir = basedir if basedir is not None: # None is used in tests assert os.path.isdir(self.basedir) if config_loader is not None and configFileName is not None: raise config.ConfigErrors([ "Can't specify both `config_loader` and `configFilename`.", ]) if config_loader is None: if configFileName is None: configFileName = 'master.cfg' config_loader = config.FileLoader(self.basedir, configFileName) self.config_loader = config_loader self.configFileName = configFileName # flag so we don't try to do fancy things before the master is ready self._master_initialized = False self.initLock = defer.DeferredLock() # set up child services self._services_d = self.create_child_services() # db configured values self.configured_db_url = None # configuration / reconfiguration handling self.config = config.MasterConfig() self.config_version = 0 # increased by one on each reconfig self.reconfig_active = False self.reconfig_requested = False self.reconfig_notifier = None # this stores parameters used in the tac file, and is accessed by the # WebStatus to duplicate those values. self.log_rotation = LogRotation() # local cache for this master's object ID self._object_id = None # Check environment is sensible check_functional_environment(self.config) # figure out local hostname try: self.hostname = os.uname()[1] # only on unix except AttributeError: self.hostname = socket.getfqdn() # public attributes self.name = ("{}:{}".format(self.hostname, os.path.abspath(self.basedir or '.'))) if isinstance(self.name, bytes): self.name = self.name.decode('ascii', 'replace') self.masterid = None @defer.inlineCallbacks def create_child_services(self): # note that these are order-dependent. If you get the order wrong, # you'll know it, as the master will fail to start. self.metrics = metrics.MetricLogObserver() yield self.metrics.setServiceParent(self) self.caches = cache.CacheManager() yield self.caches.setServiceParent(self) self.pbmanager = buildbot.pbmanager.PBManager() yield self.pbmanager.setServiceParent(self) self.workers = workermanager.WorkerManager(self) yield self.workers.setServiceParent(self) self.change_svc = ChangeManager() yield self.change_svc.setServiceParent(self) self.botmaster = BotMaster() yield self.botmaster.setServiceParent(self) self.machine_manager = MachineManager() yield self.machine_manager.setServiceParent(self) self.scheduler_manager = SchedulerManager() yield self.scheduler_manager.setServiceParent(self) self.user_manager = UserManagerManager(self) yield self.user_manager.setServiceParent(self) self.db = dbconnector.DBConnector(self.basedir) yield self.db.setServiceParent(self) self.wamp = wampconnector.WampConnector() yield self.wamp.setServiceParent(self) self.mq = mqconnector.MQConnector() yield self.mq.setServiceParent(self) self.data = dataconnector.DataConnector() yield self.data.setServiceParent(self) self.graphql = graphql.GraphQLConnector() yield self.graphql.setServiceParent(self) self.www = wwwservice.WWWService() yield self.www.setServiceParent(self) self.debug = debug.DebugServices() yield self.debug.setServiceParent(self) self.secrets_manager = SecretManager() yield self.secrets_manager.setServiceParent(self) self.secrets_manager.reconfig_priority = 2000 self.service_manager = service.BuildbotServiceManager() yield self.service_manager.setServiceParent(self) self.service_manager.reconfig_priority = 1000 self.masterHouskeepingTimer = 0 @defer.inlineCallbacks def heartbeat(): if self.masterid is not None: yield self.data.updates.masterActive(name=self.name, masterid=self.masterid) yield self.data.updates.expireMasters() self.masterHeartbeatService = internet.TimerService(60, heartbeat) self.masterHeartbeatService.clock = self.reactor # we do setServiceParent only when the master is configured # master should advertise itself only at that time # setup and reconfig handling _already_started = False @defer.inlineCallbacks def startService(self): assert not self._already_started, "can only start the master once" self._already_started = True # ensure child services have been set up. Normally we would do this in serServiceParent, # but buildmaster is used in contexts we can't control. if self._services_d is not None: yield self._services_d self._services_d = None log.msg("Starting BuildMaster -- buildbot.version: {}".format(buildbot.version)) # Set umask if self.umask is not None: os.umask(self.umask) # first, apply all monkeypatches monkeypatches.patch_all() # we want to wait until the reactor is running, so we can call # reactor.stop() for fatal errors d = defer.Deferred() self.reactor.callWhenRunning(d.callback, None) yield d startup_succeed = False try: yield self.initLock.acquire() # load the configuration file, treating errors as fatal try: # run the master.cfg in thread, so that it can use blocking # code self.config = yield threads.deferToThreadPool( self.reactor, self.reactor.getThreadPool(), self.config_loader.loadConfig) except config.ConfigErrors as e: log.msg("Configuration Errors:") for msg in e.errors: log.msg(" " + msg) log.msg("Halting master.") self.reactor.stop() return except Exception: log.err(failure.Failure(), 'while starting BuildMaster') self.reactor.stop() return # set up services that need access to the config before everything # else gets told to reconfig try: yield self.db.setup() except exceptions.DatabaseNotReadyError: # (message was already logged) self.reactor.stop() return yield self.mq.setup() # the buildbot scripts send the SIGHUP signal to reconfig master if hasattr(signal, "SIGHUP"): def sighup(*args): eventually(self.reconfig) signal.signal(signal.SIGHUP, sighup) # the buildbot scripts send the SIGUSR1 signal to stop master if hasattr(signal, "SIGUSR1"): def sigusr1(*args): eventually(self.botmaster.cleanShutdown) signal.signal(signal.SIGUSR1, sigusr1) # get the masterid so other services can use it in # startup/reconfig. This goes directly to the DB since the data # API isn't initialized yet, and anyway, this method is aware of # the DB API since it just called its setup function self.masterid = yield self.db.masters.findMasterId( name=self.name) # mark this master as stopped, in case it crashed before yield self.data.updates.masterStopped(name=self.name, masterid=self.masterid) # call the parent method yield super().startService() # We make sure the housekeeping is done before configuring in order to cleanup # any remaining claimed schedulers or change sources from zombie # masters yield self.data.updates.expireMasters(forceHouseKeeping=True) # give all services a chance to load the new configuration, rather # than the base configuration yield self.reconfigServiceWithBuildbotConfig(self.config) # Mark the master as active now that mq is running yield self.data.updates.masterActive(name=self.name, masterid=self.masterid) # Start the heartbeat timer yield self.masterHeartbeatService.setServiceParent(self) # send the statistics to buildbot.net, without waiting self.sendBuildbotNetUsageData() startup_succeed = True except Exception: f = failure.Failure() log.err(f, 'while starting BuildMaster') self.reactor.stop() finally: if startup_succeed: log.msg("BuildMaster is running") else: log.msg("BuildMaster startup failed") yield self.initLock.release() self._master_initialized = True def sendBuildbotNetUsageData(self): if "TRIAL_PYTHONPATH" in os.environ and self.config.buildbotNetUsageData is not None: raise RuntimeError( "Should not enable buildbotNetUsageData in trial tests!") sendBuildbotNetUsageData(self) @defer.inlineCallbacks def stopService(self): try: yield self.initLock.acquire() if self.running: yield self.botmaster.cleanShutdown(quickMode=True, stopReactor=False) # Mark master as stopped only after all builds are shut down. Note that masterStopped # would forcibly mark all related build requests, builds, steps, logs, etc. as # complete, so this may make state inconsistent if done while the builds are still # running. if self.masterid is not None: yield self.data.updates.masterStopped( name=self.name, masterid=self.masterid) if self.running: yield super().stopService() log.msg("BuildMaster is stopped") self._master_initialized = False finally: yield self.initLock.release() @defer.inlineCallbacks def reconfig(self): # this method wraps doConfig, ensuring it is only ever called once at # a time, and alerting the user if the reconfig takes too long if self.reconfig_active: log.msg("reconfig already active; will reconfig again after") self.reconfig_requested = True return self.reconfig_active = self.reactor.seconds() metrics.MetricCountEvent.log("loaded_config", 1) # notify every 10 seconds that the reconfig is still going on, the duration of reconfigs is # longer on larger installations and may take a while. self.reconfig_notifier = task.LoopingCall( lambda: log.msg("reconfig is ongoing for {:.3f} s".format(self.reactor.seconds() - self.reconfig_active))) self.reconfig_notifier.start(10, now=False) timer = metrics.Timer("BuildMaster.reconfig") timer.start() try: yield self.doReconfig() except Exception as e: log.err(e, 'while reconfiguring') finally: timer.stop() self.reconfig_notifier.stop() self.reconfig_notifier = None self.reconfig_active = False if self.reconfig_requested: self.reconfig_requested = False self.reconfig() @defer.inlineCallbacks def doReconfig(self): log.msg("beginning configuration update") time_started = self.reactor.seconds() changes_made = False failed = False try: yield self.initLock.acquire() # Run the master.cfg in thread, so that it can use blocking code new_config = yield threads.deferToThreadPool( self.reactor, self.reactor.getThreadPool(), self.config_loader.loadConfig) changes_made = True self.config_version += 1 self.config = new_config yield self.reconfigServiceWithBuildbotConfig(new_config) except config.ConfigErrors as e: for msg in e.errors: log.msg(msg) failed = True except Exception: log.err(failure.Failure(), 'during reconfig:') failed = True finally: yield self.initLock.release() if failed: if changes_made: msg = "WARNING: configuration update partially applied; master may malfunction" else: msg = "configuration update aborted without making any changes" else: msg = "configuration update complete" log.msg("{} (took {:.3f} seconds)".format(msg, self.reactor.seconds() - time_started)) def reconfigServiceWithBuildbotConfig(self, new_config): if self.configured_db_url is None: self.configured_db_url = new_config.db['db_url'] elif (self.configured_db_url != new_config.db['db_url']): config.error( "Cannot change c['db']['db_url'] after the master has started", ) if self.config.mq['type'] != new_config.mq['type']: raise config.ConfigErrors([ "Cannot change c['mq']['type'] after the master has started", ]) return super().reconfigServiceWithBuildbotConfig(new_config) # informational methods def allSchedulers(self): return list(self.scheduler_manager) # state maintenance (private) def getObjectId(self): """ Return the object id for this master, for associating state with the master. @returns: ID, via Deferred """ # try to get the cached value if self._object_id is not None: return defer.succeed(self._object_id) # failing that, get it from the DB; multiple calls to this function # at the same time will not hurt d = self.db.state.getObjectId(self.name, "buildbot.master.BuildMaster") @d.addCallback def keep(id): self._object_id = id return id return d def _getState(self, name, default=None): "private wrapper around C{self.db.state.getState}" d = self.getObjectId() @d.addCallback def get(objectid): return self.db.state.getState(objectid, name, default) return d def _setState(self, name, value): "private wrapper around C{self.db.state.setState}" d = self.getObjectId() @d.addCallback def set(objectid): return self.db.state.setState(objectid, name, value) return d buildbot-3.4.0/master/buildbot/monkeypatches/000077500000000000000000000000001413250514000212665ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/monkeypatches/__init__.py000066400000000000000000000072751413250514000234120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import unittest from builtins import int from twisted.python import util def onlyOnce(fn): 'Set up FN to only run once within an interpreter instance' def wrap(*args, **kwargs): if hasattr(fn, 'called'): return None fn.called = 1 return fn(*args, **kwargs) util.mergeFunctionMetadata(fn, wrap) return wrap # NOTE: all of these patches test for applicability *before* importing the # patch module. This will help cut down on unnecessary imports where the # patches are not needed, and also avoid problems with patches importing # private things in external libraries that no longer exist. @onlyOnce def patch_testcase_timeout(): # any test that should take more than 5 second should be annotated so. unittest.TestCase.timeout = 5 # but we know that the DB tests are very slow, so we increase a bit that value for # real database tests if os.environ.get("BUILDBOT_TEST_DB_URL", None) is not None: unittest.TestCase.timeout = 120 @onlyOnce def patch_servicechecks(): from buildbot.monkeypatches import servicechecks servicechecks.patch() @onlyOnce def patch_mysqlclient_warnings(): try: from _mysql_exceptions import Warning # MySQLdb.compat is only present in mysqlclient import MySQLdb.compat # noqa pylint: disable=unused-import,import-outside-toplevel except ImportError: return # workaround for https://twistedmatrix.com/trac/ticket/9005 # mysqlclient is easier to patch than twisted # we swap _mysql_exceptions.Warning arguments so that the code is in second place def patched_init(self, *args): if isinstance(args[0], int): super().__init__("{} {}".format(args[0], args[1])) else: super().__init__(*args) Warning.__init__ = patched_init @onlyOnce def patch_decorators(): from buildbot.monkeypatches import decorators decorators.patch() @onlyOnce def patch_config_for_unit_tests(): from buildbot import config # by default, buildbot.config warns about not configured buildbotNetUsageData. # its important for users to not leak information, but unneeded and painful for tests config._in_unit_tests = True @onlyOnce def patch_unittest_testcase(): from twisted.trial.unittest import TestCase # In Python 3.2, # - assertRaisesRegexp() was renamed to assertRaisesRegex(), # and assertRaisesRegexp() was deprecated. # - assertRegexpMatches() was renamed to assertRegex() # and assertRegexpMatches() was deprecated. if not getattr(TestCase, "assertRaisesRegex", None): TestCase.assertRaisesRegex = TestCase.assertRaisesRegexp if not getattr(TestCase, "assertRegex", None): TestCase.assertRegex = TestCase.assertRegexpMatches def patch_all(for_tests=False): if for_tests: patch_servicechecks() patch_testcase_timeout() patch_decorators() patch_mysqlclient_warnings() patch_config_for_unit_tests() patch_unittest_testcase() buildbot-3.4.0/master/buildbot/monkeypatches/decorators.py000066400000000000000000000021231413250514000240030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import util def patch_noargs_decorator(decorator): def new_decorator(func): wrapper = decorator(func) wrapper.__wrapped__ = func return wrapper util.mergeFunctionMetadata(decorator, new_decorator) return new_decorator def patch(): defer.inlineCallbacks = patch_noargs_decorator(defer.inlineCallbacks) buildbot-3.4.0/master/buildbot/monkeypatches/servicechecks.py000066400000000000000000000024571413250514000244710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members def patch(): """ Patch startService and stopService so that they check the previous state first. (used for debugging only) """ from twisted.application.service import Service old_startService = Service.startService old_stopService = Service.stopService def startService(self): assert not self.running, "%r already running" % (self,) return old_startService(self) def stopService(self): assert self.running, "%r already stopped" % (self,) return old_stopService(self) Service.startService = startService Service.stopService = stopService buildbot-3.4.0/master/buildbot/mq/000077500000000000000000000000001413250514000170315ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/mq/__init__.py000066400000000000000000000000001413250514000211300ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/mq/base.py000066400000000000000000000047721413250514000203270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from twisted.python import log from buildbot.util import deferwaiter from buildbot.util import service class MQBase(service.AsyncService): name = 'mq-implementation' def __init__(self): super().__init__() self._deferwaiter = deferwaiter.DeferWaiter() @defer.inlineCallbacks def stopService(self): yield self._deferwaiter.wait() yield super().stopService() @defer.inlineCallbacks def waitUntilEvent(self, filter, check_callback): d = defer.Deferred() buildCompleteConsumer = yield self.startConsuming( lambda key, value: d.callback((key, value)), filter) check = yield check_callback() # we only wait if the check callback return true if not check: res = yield d else: res = None yield buildCompleteConsumer.stopConsuming() return res def invokeQref(self, qref, routingKey, data): self._deferwaiter.add(qref.invoke(routingKey, data)) class QueueRef: __slots__ = ['callback'] def __init__(self, callback): self.callback = callback def invoke(self, routing_key, data): # Potentially returns a Deferred if not self.callback: return None try: x = self.callback(routing_key, data) except Exception: log.err(failure.Failure(), 'while invoking %r' % (self.callback,)) return None if isinstance(x, defer.Deferred): x.addErrback(log.err, 'while invoking %r' % (self.callback,)) return x def stopConsuming(self): # This method may return a Deferred. # subclasses should set self.callback to None in this method. raise NotImplementedError buildbot-3.4.0/master/buildbot/mq/connector.py000066400000000000000000000054331413250514000214020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python.reflect import namedObject from buildbot.util import service class MQConnector(service.ReconfigurableServiceMixin, service.AsyncMultiService): classes = { 'simple': { 'class': "buildbot.mq.simple.SimpleMQ", 'keys': set(['debug']), }, 'wamp': { 'class': "buildbot.mq.wamp.WampMQ", 'keys': set(["router_url", "realm", "wamp_debug_level"]), }, } name = 'mq' def __init__(self): super().__init__() self.impl = None # set in setup self.impl_type = None # set in setup @defer.inlineCallbacks def setup(self): assert not self.impl # imports are done locally so that we don't try to import # implementation-specific modules unless they're required. typ = self.master.config.mq['type'] assert typ in self.classes # this is checked by MasterConfig self.impl_type = typ cls = namedObject(self.classes[typ]['class']) self.impl = cls() # set up the impl as a child service yield self.impl.setServiceParent(self) # configure it (early) self.impl.reconfigServiceWithBuildbotConfig(self.master.config) # copy the methods onto this object for ease of access self.produce = self.impl.produce self.startConsuming = self.impl.startConsuming self.waitUntilEvent = self.impl.waitUntilEvent def reconfigServiceWithBuildbotConfig(self, new_config): # double-check -- the master ensures this in config checks assert self.impl_type == new_config.mq['type'] return super().reconfigServiceWithBuildbotConfig(new_config) def produce(self, routing_key, data): # will be patched after configuration to point to the running # implementation's method raise NotImplementedError def startConsuming(self, callback, filter, persistent_name=None): # will be patched after configuration to point to the running # implementation's method raise NotImplementedError buildbot-3.4.0/master/buildbot/mq/simple.py000066400000000000000000000063641413250514000207050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import pprint from twisted.internet import defer from twisted.python import log from buildbot.mq import base from buildbot.util import service from buildbot.util import tuplematch class SimpleMQ(service.ReconfigurableServiceMixin, base.MQBase): def __init__(self): super().__init__() self.qrefs = [] self.persistent_qrefs = {} self.debug = False def reconfigServiceWithBuildbotConfig(self, new_config): self.debug = new_config.mq.get('debug', False) return super().reconfigServiceWithBuildbotConfig(new_config) def produce(self, routingKey, data): if self.debug: log.msg("MSG: {}\n{}".format(routingKey, pprint.pformat(data))) for qref in self.qrefs: if tuplematch.matchTuple(routingKey, qref.filter): self.invokeQref(qref, routingKey, data) def startConsuming(self, callback, filter, persistent_name=None): if any(not isinstance(k, str) and k is not None for k in filter): raise AssertionError("{} is not a filter".format(filter)) if persistent_name: if persistent_name in self.persistent_qrefs: qref = self.persistent_qrefs[persistent_name] qref.startConsuming(callback) else: qref = PersistentQueueRef(self, callback, filter) self.qrefs.append(qref) self.persistent_qrefs[persistent_name] = qref else: qref = QueueRef(self, callback, filter) self.qrefs.append(qref) return defer.succeed(qref) class QueueRef(base.QueueRef): __slots__ = ['mq', 'filter'] def __init__(self, mq, callback, filter): super().__init__(callback) self.mq = mq self.filter = filter def stopConsuming(self): self.callback = None try: self.mq.qrefs.remove(self) except ValueError: pass class PersistentQueueRef(QueueRef): __slots__ = ['active', 'queue'] def __init__(self, mq, callback, filter): super().__init__(mq, callback, filter) self.queue = [] def startConsuming(self, callback): self.callback = callback self.active = True # invoke for every message that was missed queue, self.queue = self.queue, [] for routingKey, data in queue: self.invoke(routingKey, data) def stopConsuming(self): self.callback = self.addToQueue self.active = False def addToQueue(self, routingKey, data): self.queue.append((routingKey, data)) buildbot-3.4.0/master/buildbot/mq/wamp.py000066400000000000000000000101211413250514000203420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from autobahn.wamp.exception import TransportLost from autobahn.wamp.types import PublishOptions from autobahn.wamp.types import SubscribeOptions from twisted.internet import defer from twisted.python import log from buildbot.mq import base from buildbot.util import service from buildbot.util import toJson class WampMQ(service.ReconfigurableServiceMixin, base.MQBase): NAMESPACE = "org.buildbot.mq" def produce(self, routingKey, data): d = self._produce(routingKey, data) d.addErrback( log.err, "Problem while producing message on topic " + repr(routingKey)) @classmethod def messageTopic(cls, routingKey): def ifNone(v, default): return default if v is None else v # replace None values by "" in routing key routingKey = [ifNone(key, "") for key in routingKey] # then join them with "dot", and add the prefix return cls.NAMESPACE + "." + ".".join(routingKey) @classmethod def routingKeyFromMessageTopic(cls, topic): # just split the topic, and remove the NAMESPACE prefix return tuple(topic[len(WampMQ.NAMESPACE) + 1:].split(".")) def _produce(self, routingKey, data): _data = json.loads(json.dumps(data, default=toJson)) options = PublishOptions(exclude_me=False) return self.master.wamp.publish(self.messageTopic(routingKey), _data, options=options) def startConsuming(self, callback, _filter, persistent_name=None): if persistent_name is not None: log.err('wampmq: persistent queues are not persisted: {} {}'.format(persistent_name, _filter)) qr = QueueRef(self, callback) self._startConsuming(qr, callback, _filter) return defer.succeed(qr) def _startConsuming(self, qr, callback, _filter, persistent_name=None): return qr.subscribe(self.master.wamp, self, _filter) class QueueRef(base.QueueRef): def __init__(self, mq, callback): super().__init__(callback) self.unreg = None self.mq = mq @defer.inlineCallbacks def subscribe(self, connector_service, wamp_service, _filter): self.filter = _filter self.emulated = False options = dict(details_arg=str('details')) if None in _filter: options["match"] = "wildcard" options = SubscribeOptions(**options) _filter = WampMQ.messageTopic(_filter) self.unreg = yield connector_service.subscribe(self.wampInvoke, _filter, options=options) if self.callback is None: yield self.stopConsuming() def wampInvoke(self, msg, details): if details.topic is not None: # in the case of a wildcard, wamp router sends the topic topic = WampMQ.routingKeyFromMessageTopic(details.topic) else: # in the case of an exact match, then we can use our own topic topic = self.filter self.mq.invokeQref(self, topic, msg) @defer.inlineCallbacks def stopConsuming(self): self.callback = None if self.unreg is not None: unreg = self.unreg self.unreg = None try: yield unreg.unsubscribe() except TransportLost: pass except Exception as e: log.err(e, 'When unsubscribing MQ connection ' + str(unreg)) buildbot-3.4.0/master/buildbot/pbmanager.py000066400000000000000000000157751413250514000207410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.application import strports from twisted.cred import checkers from twisted.cred import credentials from twisted.cred import error from twisted.cred import portal from twisted.internet import defer from twisted.python import log from twisted.spread import pb from zope.interface import implementer from buildbot.process.properties import Properties from buildbot.util import bytes2unicode from buildbot.util import service from buildbot.util import unicode2bytes from buildbot.util.eventual import eventually debug = False class PBManager(service.AsyncMultiService): """ A centralized manager for PB ports and authentication on them. Allows various pieces of code to request a (port, username) combo, along with a password and a perspective factory. """ def __init__(self): super().__init__() self.setName('pbmanager') self.dispatchers = {} @defer.inlineCallbacks def register(self, portstr, username, password, pfactory): """ Register a perspective factory PFACTORY to be executed when a PB connection arrives on PORTSTR with USERNAME/PASSWORD. Returns a Registration object which can be used to unregister later. """ # do some basic normalization of portstrs if isinstance(portstr, type(0)) or ':' not in portstr: portstr = "tcp:{}".format(portstr) reg = Registration(self, portstr, username) if portstr not in self.dispatchers: disp = self.dispatchers[portstr] = Dispatcher(portstr) yield disp.setServiceParent(self) else: disp = self.dispatchers[portstr] disp.register(username, password, pfactory) return reg @defer.inlineCallbacks def _unregister(self, registration): disp = self.dispatchers[registration.portstr] disp.unregister(registration.username) registration.username = None if not disp.users: del self.dispatchers[registration.portstr] yield disp.disownServiceParent() class Registration: def __init__(self, pbmanager, portstr, username): self.portstr = portstr "portstr this registration is active on" self.username = username "username of this registration" self.pbmanager = pbmanager def __repr__(self): return "".format(self.username, self.portstr) def unregister(self): """ Unregister this registration, removing the username from the port, and closing the port if there are no more users left. Returns a Deferred. """ return self.pbmanager._unregister(self) def getPort(self): """ Helper method for testing; returns the TCP port used for this registration, even if it was specified as 0 and thus allocated by the OS. """ disp = self.pbmanager.dispatchers[self.portstr] return disp.port.getHost().port @implementer(portal.IRealm, checkers.ICredentialsChecker) class Dispatcher(service.AsyncService): credentialInterfaces = [credentials.IUsernamePassword, credentials.IUsernameHashedPassword] def __init__(self, portstr): self.portstr = portstr self.users = {} # there's lots of stuff to set up for a PB connection! self.portal = portal.Portal(self) self.portal.registerChecker(self) self.serverFactory = pb.PBServerFactory(self.portal) self.serverFactory.unsafeTracebacks = True self.port = None def __repr__(self): return "".format(", ".join(list(self.users)), self.portstr) def startService(self): assert not self.port self.port = strports.listen(self.portstr, self.serverFactory) return super().startService() @defer.inlineCallbacks def stopService(self): # stop listening on the port when shut down assert self.port port, self.port = self.port, None yield port.stopListening() yield super().stopService() def register(self, username, password, pfactory): if debug: log.msg("registering username '{}' on pb port {}: {}".format(username, self.portstr, pfactory)) if username in self.users: raise KeyError("username '{}' is already registered on PB port {}".format(username, self.portstr)) self.users[username] = (password, pfactory) def unregister(self, username): if debug: log.msg("unregistering username '{}' on pb port {}".format(username, self.portstr)) del self.users[username] # IRealm @defer.inlineCallbacks def requestAvatar(self, username, mind, interface): assert interface == pb.IPerspective username = bytes2unicode(username) persp = None if username in self.users: _, afactory = self.users.get(username) persp = yield afactory(mind, username) if not persp: raise ValueError("no perspective for '{}'".format(username)) yield persp.attached(mind) return (pb.IPerspective, persp, lambda: persp.detached(mind)) # ICredentialsChecker @defer.inlineCallbacks def requestAvatarId(self, creds): p = Properties() p.master = self.master username = bytes2unicode(creds.username) try: yield self.master.initLock.acquire() if username in self.users: password, _ = self.users[username] password = yield p.render(password) matched = creds.checkPassword(unicode2bytes(password)) if not matched: log.msg("invalid login from user '{}'".format(username)) raise error.UnauthorizedLogin() return creds.username log.msg("invalid login from unknown user '{}'".format(username)) raise error.UnauthorizedLogin() finally: # brake the callback stack by returning to the reactor # before waking up other waiters eventually(self.master.initLock.release) buildbot-3.4.0/master/buildbot/pbutil.py000066400000000000000000000143651413250514000202760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """Base classes handy for use with PB clients. """ from twisted.internet import protocol from twisted.python import log from twisted.spread import pb from twisted.spread.pb import PBClientFactory from buildbot.util import bytes2unicode class NewCredPerspective(pb.Avatar): def attached(self, mind): return self def detached(self, mind): pass class ReconnectingPBClientFactory(PBClientFactory, protocol.ReconnectingClientFactory): """Reconnecting client factory for PB brokers. Like PBClientFactory, but if the connection fails or is lost, the factory will attempt to reconnect. Instead of using f.getRootObject (which gives a Deferred that can only be fired once), override the gotRootObject method. Instead of using the newcred f.login (which is also one-shot), call f.startLogin() with the credentials and client, and override the gotPerspective method. Instead of using the oldcred f.getPerspective (also one-shot), call f.startGettingPerspective() with the same arguments, and override gotPerspective. gotRootObject and gotPerspective will be called each time the object is received (once per successful connection attempt). You will probably want to use obj.notifyOnDisconnect to find out when the connection is lost. If an authorization error occurs, failedToGetPerspective() will be invoked. To use me, subclass, then hand an instance to a connector (like TCPClient). """ def __init__(self): super().__init__() self._doingLogin = False self._doingGetPerspective = False def clientConnectionFailed(self, connector, reason): super().clientConnectionFailed(connector, reason) # Twisted-1.3 erroneously abandons the connection on non-UserErrors. # To avoid this bug, don't upcall, and implement the correct version # of the method here. if self.continueTrying: self.connector = connector self.retry() def clientConnectionLost(self, connector, reason): super().clientConnectionLost(connector, reason, reconnecting=True) RCF = protocol.ReconnectingClientFactory RCF.clientConnectionLost(self, connector, reason) def clientConnectionMade(self, broker): self.resetDelay() super().clientConnectionMade(broker) if self._doingLogin: self.doLogin(self._root) if self._doingGetPerspective: self.doGetPerspective(self._root) self.gotRootObject(self._root) # oldcred methods def getPerspective(self, *args): raise RuntimeError("getPerspective is one-shot: use startGettingPerspective instead") def startGettingPerspective(self, username, password, serviceName, perspectiveName=None, client=None): self._doingGetPerspective = True if perspectiveName is None: perspectiveName = username self._oldcredArgs = (username, password, serviceName, perspectiveName, client) def doGetPerspective(self, root): # oldcred getPerspective() (username, password, serviceName, perspectiveName, client) = self._oldcredArgs d = self._cbAuthIdentity(root, username, password) d.addCallback(self._cbGetPerspective, serviceName, perspectiveName, client) d.addCallbacks(self.gotPerspective, self.failedToGetPerspective) # newcred methods def login(self, *args): raise RuntimeError("login is one-shot: use startLogin instead") def startLogin(self, credentials, client=None): self._credentials = credentials self._client = client self._doingLogin = True def doLogin(self, root): # newcred login() d = self._cbSendUsername(root, self._credentials.username, self._credentials.password, self._client) d.addCallbacks(self.gotPerspective, self.failedToGetPerspective) # methods to override def gotPerspective(self, perspective): """The remote avatar or perspective (obtained each time this factory connects) is now available.""" def gotRootObject(self, root): """The remote root object (obtained each time this factory connects) is now available. This method will be called each time the connection is established and the object reference is retrieved.""" def failedToGetPerspective(self, why): """The login process failed, most likely because of an authorization failure (bad password), but it is also possible that we lost the new connection before we managed to send our credentials. """ log.msg("ReconnectingPBClientFactory.failedToGetPerspective") if why.check(pb.PBConnectionLost): log.msg("we lost the brand-new connection") # retrying might help here, let clientConnectionLost decide return # probably authorization self.stopTrying() # logging in harder won't help log.err(why) def decode(data, encoding='utf-8', errors='strict'): """We need to convert a dictionary where keys and values are bytes, to unicode strings. This happens when a Python 2 worker sends a dictionary back to a Python 3 master. """ data_type = type(data) if data_type == bytes: return bytes2unicode(data, encoding, errors) if data_type in (dict, list, tuple): if data_type == dict: data = data.items() return data_type(map(decode, data)) return data buildbot-3.4.0/master/buildbot/plugins/000077500000000000000000000000001413250514000200755ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/plugins/__init__.py000066400000000000000000000031241413250514000222060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Buildbot plugin infrastructure """ from buildbot import statistics from buildbot.interfaces import IBuildStep from buildbot.interfaces import IChangeSource from buildbot.interfaces import IScheduler from buildbot.interfaces import IWorker from buildbot.plugins.db import get_plugins __all__ = [ 'changes', 'schedulers', 'steps', 'util', 'reporters', 'statistics', 'worker', 'secrets', 'webhooks' ] # Names here match the names of the corresponding Buildbot module, hence # 'changes', 'schedulers', but 'buildslave' changes = get_plugins('changes', IChangeSource) schedulers = get_plugins('schedulers', IScheduler) steps = get_plugins('steps', IBuildStep) util = get_plugins('util', None) reporters = get_plugins('reporters', None) secrets = get_plugins('secrets', None) webhooks = get_plugins('webhooks', None) # Worker entry point for new/updated plugins. worker = get_plugins('worker', IWorker) buildbot-3.4.0/master/buildbot/plugins/db.py000066400000000000000000000234161413250514000210420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # pylint: disable=C0111 import traceback import warnings from pkg_resources import iter_entry_points from zope.interface import Invalid from zope.interface.verify import verifyClass from buildbot.errors import PluginDBError from buildbot.interfaces import IPlugin # Base namespace for Buildbot specific plugins _NAMESPACE_BASE = 'buildbot' class _PluginEntry: def __init__(self, group, entry, loader): self._group = group self._entry = entry self._value = None self._loader = loader self._load_warnings = [] def load(self): if self._value is None: with warnings.catch_warnings(record=True) as all_warnings: warnings.simplefilter("always") self._value = self._loader(self._entry) self._load_warnings = list(all_warnings) @property def group(self): return self._group @property def name(self): return self._entry.name @property def info(self): dist = self._entry.dist return (dist.project_name, dist.version) def __ne__(self, other): return self.info != other.info @property def value(self): self.load() for w in self._load_warnings: warnings.warn_explicit(w.message, w.category, w.filename, w.lineno) return self._value class _PluginEntryProxy(_PluginEntry): """Proxy for specific entry with custom group name. Used to provided access to the same entry from different namespaces. """ def __init__(self, group, plugin_entry): assert isinstance(plugin_entry, _PluginEntry) self._plugin_entry = plugin_entry self._group = group def load(self): self._plugin_entry.load() @property def group(self): return self._group @property def name(self): return self._plugin_entry.name @property def info(self): return self._plugin_entry.info @property def value(self): return self._plugin_entry.value class _NSNode: # pylint: disable=W0212 def __init__(self): self._children = dict() def load(self): for child in self._children.values(): child.load() def add(self, name, entry): assert isinstance(name, str) and isinstance(entry, _PluginEntry) self._add(name, entry) def _add(self, name, entry): path = name.split('.', 1) key = path.pop(0) is_leaf = not path child = self._children.get(key) if is_leaf: if child is not None: assert isinstance(child, _PluginEntry) if child != entry: raise PluginDBError(('Duplicate entry point for "{}:{}".\n' ' Previous definition {}\n' ' This definition {}').format(child.group, child.name, child.info, entry.info)) else: self._children[key] = entry else: if child is None: child = _NSNode() assert isinstance(child, _NSNode) child._add(path[0], entry) self._children[key] = child def __getattr__(self, name): child = self._children.get(name) if child is None: raise PluginDBError('Unknown component name: {}'.format(name)) if isinstance(child, _PluginEntry): return child.value return child def info(self, name): assert isinstance(name, str) return self._get(name).info def get(self, name): assert isinstance(name, str) return self._get(name).value def _get(self, name): path = name.split('.', 1) key = path.pop(0) is_leaf = not path child = self._children.get(key) if isinstance(child, _PluginEntry): if not is_leaf: raise PluginDBError('Excessive namespace specification: {}'.format(path[0])) return child elif child is None: raise PluginDBError('Unknown component name: {}'.format(name)) else: return child._get(path[0]) def _info_all(self): result = [] for key, child in self._children.items(): if isinstance(child, _PluginEntry): result.append((key, child.info)) else: result.extend([ ('{}.{}'.format(key, name), value) for name, value in child.info_all().items() ]) return result def info_all(self): return dict(self._info_all()) class _Plugins: """ represent plugins within a namespace """ def __init__(self, namespace, interface=None, check_extras=True): if interface is not None: assert interface.isOrExtends(IPlugin) self._group = '{}.{}'.format(_NAMESPACE_BASE, namespace) self._interface = interface self._check_extras = check_extras self._real_tree = None def _load_entry(self, entry): # pylint: disable=W0703 if self._check_extras: try: entry.require() except Exception as e: raise PluginDBError(('Requirements are not satisfied ' 'for {}:{}: {}').format( self._group, entry.name, str(e))) from e try: result = entry.load() except Exception as e: # log full traceback of the bad entry to help support traceback.print_exc() raise PluginDBError('Unable to load {}:{}: {}'.format(self._group, entry.name, str(e))) from e if self._interface: try: verifyClass(self._interface, result) except Invalid as e: raise PluginDBError('Plugin {}:{} does not implement {}: {}'.format(self._group, entry.name, self._interface.__name__, str(e))) from e return result @property def _tree(self): if self._real_tree is None: self._real_tree = _NSNode() for entry in iter_entry_points(self._group): self._real_tree.add(entry.name, _PluginEntry(self._group, entry, self._load_entry)) return self._real_tree def load(self): self._tree.load() def info_all(self): return self._tree.info_all() @property def names(self): # Expensive operation return list(self.info_all()) def info(self, name): """ get information about a particular plugin if known in this namespace """ return self._tree.info(name) def __contains__(self, name): """ check if the given name is available as a plugin """ try: return not isinstance(self._tree.get(name), _NSNode) except PluginDBError: return False def get(self, name): """ get an instance of the plugin with the given name """ return self._tree.get(name) def _get_entry(self, name): return self._tree._get(name) def __getattr__(self, name): try: return getattr(self._tree, name) except PluginDBError as e: raise AttributeError(str(e)) from e class _PluginDB: """ Plugin infrastructure support for Buildbot """ def __init__(self): self._namespaces = dict() def add_namespace(self, namespace, interface=None, check_extras=True, load_now=False): """ register given namespace in global database of plugins in case it's already registered, return the registration """ tempo = self._namespaces.get(namespace) if tempo is None: tempo = _Plugins(namespace, interface, check_extras) self._namespaces[namespace] = tempo if load_now: tempo.load() return tempo @property def namespaces(self): """ get a list of registered namespaces """ return list(self._namespaces) def info(self): """ get information about all plugins in registered namespaces """ result = dict() for name, namespace in self._namespaces.items(): result[name] = namespace.info_all() return result _DB = _PluginDB() def namespaces(): """ provide information about known namespaces """ return _DB.namespaces def info(): """ provide information about all known plugins format of the output: {, { {: (, lock.config_version: lock.updateFromLockId(lockid, config_version) return lock def getLockFromLockAccess(self, access, config_version): # Convert a lock-access object into an actual Lock instance. if not isinstance(access, locks.LockAccess): # Buildbot 0.7.7 compatibility: user did not specify access access = access.defaultAccess() return self.getLockByID(access.lockid, config_version) @defer.inlineCallbacks def getLockFromLockAccesses(self, accesses, config_version): # converts locks to their real forms locks = yield defer.gatherResults([self.getLockFromLockAccess(access, config_version) for access in accesses]) return zip(locks, accesses) class BotMaster(service.ReconfigurableServiceMixin, service.AsyncMultiService, LockRetrieverMixin): """This is the master-side service which manages remote buildbot workers. It provides them with Workers, and distributes build requests to them.""" debug = 0 name = "botmaster" def __init__(self): super().__init__() self.builders = {} self.builderNames = [] # builders maps Builder names to instances of bb.p.builder.Builder, # which is the master-side object that defines and controls a build. self.watchers = {} self.shuttingDown = False # subscription to new build requests self.buildrequest_consumer = None # a distributor for incoming build requests; see below self.brd = BuildRequestDistributor(self) self.brd.setServiceParent(self) @defer.inlineCallbacks def cleanShutdown(self, quickMode=False, stopReactor=True): """Shut down the entire process, once all currently-running builds are complete. quickMode will mark all builds as retry (except the ones that were triggered) """ if self.shuttingDown: return log.msg("Initiating clean shutdown") self.shuttingDown = True # first, stop the distributor; this will finish any ongoing scheduling # operations before firing yield self.brd.disownServiceParent() # Double check that we're still supposed to be shutting down # The shutdown may have been cancelled! while self.shuttingDown: if quickMode: for builder in self.builders.values(): # As we stop the builds, builder.building might change during loop # so we need to copy the list for build in list(builder.building): # if build is waited for then this is a sub-build, so # no need to retry it if sum(br.waitedFor for br in build.requests): results = CANCELLED else: results = RETRY is_building = build.workerforbuilder.state == States.BUILDING build.stopBuild("Master Shutdown", results) if not is_building: # if it is not building, then it must be a latent worker # which is substantiating. Cancel it. build.workerforbuilder.worker.insubstantiate() # then wait for all builds to finish dl = [] for builder in self.builders.values(): for build in builder.building: # build may be waiting for ping to worker to succeed which # may never happen if the connection to worker was broken # without TCP connection being severed build.workerforbuilder.abortPingIfAny() dl.append(build.waitUntilFinished()) if not dl: log.msg("No running jobs, starting shutdown immediately") else: log.msg("Waiting for %i build(s) to finish" % len(dl)) yield defer.DeferredList(dl) # Check that there really aren't any running builds n = 0 for builder in self.builders.values(): if builder.building: num_builds = len(builder.building) log.msg("Builder %s has %i builds running" % (builder, num_builds)) n += num_builds if n > 0: log.msg( "Not shutting down, there are %i builds running" % n) log.msg("Trying shutdown sequence again") yield util.asyncSleep(1) else: if stopReactor and self.shuttingDown: log.msg("Stopping reactor") self.master.reactor.stop() break if not self.shuttingDown: yield self.brd.setServiceParent(self) def cancelCleanShutdown(self): """Cancel a clean shutdown that is already in progress, if any""" if not self.shuttingDown: return log.msg("Cancelling clean shutdown") self.shuttingDown = False @metrics.countMethod('BotMaster.workerLost()') def workerLost(self, bot): metrics.MetricCountEvent.log("BotMaster.attached_workers", -1) for name, b in self.builders.items(): if bot.workername in b.config.workernames: b.detached(bot) @metrics.countMethod('BotMaster.getBuildersForWorker()') def getBuildersForWorker(self, workername): return [b for b in self.builders.values() if workername in b.config.workernames] def getBuildernames(self): return self.builderNames def getBuilders(self): return list(self.builders.values()) @defer.inlineCallbacks def getBuilderById(self, builderid): for builder in self.getBuilders(): if builderid == (yield builder.getBuilderId()): return builder return None @defer.inlineCallbacks def startService(self): @defer.inlineCallbacks def buildRequestAdded(key, msg): builderid = msg['builderid'] builder = yield self.getBuilderById(builderid) if builder is not None: self.maybeStartBuildsForBuilder(builder.name) # consume both 'new' and 'unclaimed' build requests startConsuming = self.master.mq.startConsuming self.buildrequest_consumer_new = yield startConsuming( buildRequestAdded, ('buildrequests', None, "new")) self.buildrequest_consumer_unclaimed = yield startConsuming( buildRequestAdded, ('buildrequests', None, 'unclaimed')) yield super().startService() @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): timer = metrics.Timer("BotMaster.reconfigServiceWithBuildbotConfig") timer.start() # reconfigure builders yield self.reconfigServiceBuilders(new_config) # call up yield super().reconfigServiceWithBuildbotConfig(new_config) # try to start a build for every builder; this is necessary at master # startup, and a good idea in any other case self.maybeStartBuildsForAllBuilders() timer.stop() @defer.inlineCallbacks def reconfigServiceBuilders(self, new_config): timer = metrics.Timer("BotMaster.reconfigServiceBuilders") timer.start() # arrange builders by name old_by_name = {b.name: b for b in list(self) if isinstance(b, Builder)} old_set = set(old_by_name) new_by_name = {bc.name: bc for bc in new_config.builders} new_set = set(new_by_name) # calculate new builders, by name, and removed builders removed_names, added_names = util.diffSets(old_set, new_set) if removed_names or added_names: log.msg("adding %d new builders, removing %d" % (len(added_names), len(removed_names))) for n in removed_names: builder = old_by_name[n] del self.builders[n] builder.master = None builder.botmaster = None yield builder.disownServiceParent() for n in added_names: builder = Builder(n) self.builders[n] = builder builder.botmaster = self builder.master = self.master yield builder.setServiceParent(self) self.builderNames = list(self.builders) yield self.master.data.updates.updateBuilderList( self.master.masterid, [util.bytes2unicode(n) for n in self.builderNames]) metrics.MetricCountEvent.log("num_builders", len(self.builders), absolute=True) timer.stop() def stopService(self): if self.buildrequest_consumer_new: self.buildrequest_consumer_new.stopConsuming() self.buildrequest_consumer_new = None if self.buildrequest_consumer_unclaimed: self.buildrequest_consumer_unclaimed.stopConsuming() self.buildrequest_consumer_unclaimed = None return super().stopService() def maybeStartBuildsForBuilder(self, buildername): """ Call this when something suggests that a particular builder may now be available to start a build. @param buildername: the name of the builder """ self.brd.maybeStartBuildsOn([buildername]) def maybeStartBuildsForWorker(self, worker_name): """ Call this when something suggests that a particular worker may now be available to start a build. @param worker_name: the name of the worker """ builders = self.getBuildersForWorker(worker_name) self.brd.maybeStartBuildsOn([b.name for b in builders]) def maybeStartBuildsForAllBuilders(self): """ Call this when something suggests that this would be a good time to start some builds, but nothing more specific. """ self.brd.maybeStartBuildsOn(self.builderNames) buildbot-3.4.0/master/buildbot/process/build.py000066400000000000000000001000461413250514000215440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from functools import reduce from twisted.internet import defer from twisted.internet import error from twisted.python import failure from twisted.python import log from twisted.python.failure import Failure from buildbot import interfaces from buildbot.process import buildstep from buildbot.process import metrics from buildbot.process import properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import computeResultAndTermination from buildbot.process.results import statusToString from buildbot.process.results import worst_status from buildbot.reporters.utils import getURLForBuild from buildbot.util import Notifier from buildbot.util import bytes2unicode from buildbot.util.eventual import eventually class Build(properties.PropertiesMixin): """I represent a single build by a single worker. Specialized Builders can use subclasses of Build to hold status information unique to those build processes. I control B{how} the build proceeds. The actual build is broken up into a series of steps, saved in the .buildSteps[] array as a list of L{buildbot.process.step.BuildStep} objects. Each step is a single remote command, possibly a shell command. After the build, I go away. I can be used by a factory by setting buildClass on L{buildbot.process.factory.BuildFactory} @ivar requests: the list of L{BuildRequest}s that triggered me """ VIRTUAL_BUILDERNAME_PROP = "virtual_builder_name" VIRTUAL_BUILDERDESCRIPTION_PROP = "virtual_builder_description" VIRTUAL_BUILDERTAGS_PROP = "virtual_builder_tags" workdir = "build" reason = "changes" finished = False results = None stopped = False set_runtime_properties = True subs = None _sentinel = [] # used as a sentinel to indicate unspecified initial_value def __init__(self, requests): self.requests = requests self.locks = [] # build a source stamp self.sources = requests[0].mergeSourceStampsWith(requests[1:]) self.reason = requests[0].mergeReasons(requests[1:]) self.currentStep = None self.workerEnvironment = {} self.buildid = None self._buildid_notifier = Notifier() self.number = None self.executedSteps = [] self.stepnames = {} self.terminate = False self._acquiringLock = None self._builderid = None # overall results, may downgrade after each step self.results = SUCCESS self.properties = properties.Properties() # tracks execution during the build finish phase self._locks_released = False self._build_finished = False # tracks execution during substantiation self._is_substantiating = False # tracks the config version for locks self.config_version = None def getProperties(self): return self.properties def setBuilder(self, builder): """ Set the given builder as our builder. @type builder: L{buildbot.process.builder.Builder} """ self.builder = builder self.master = builder.master self.config_version = builder.config_version @defer.inlineCallbacks def setLocks(self, lockList): self.locks = yield self.builder.botmaster.getLockFromLockAccesses(lockList, self.config_version) def setWorkerEnvironment(self, env): # TODO: remove once we don't have anything depending on this method or attribute # e.g., old-style steps (ShellMixin pulls the environment out of the # builder directly) self.workerEnvironment = env def getSourceStamp(self, codebase=''): for source in self.sources: if source.codebase == codebase: return source return None def getAllSourceStamps(self): return list(self.sources) @staticmethod def allChangesFromSources(sources): for s in sources: for c in s.changes: yield c def allChanges(self): return Build.allChangesFromSources(self.sources) def allFiles(self): # return a list of all source files that were changed files = [] for c in self.allChanges(): for f in c.files: files.append(f) return files def __repr__(self): return "".format(self.builder.name, repr(self.number), statusToString(self.results)) def blamelist(self): # Note that this algorithm is also implemented in # buildbot.reporters.utils.getResponsibleUsersForBuild, but using the data api. # it is important for the UI to have the blamelist easily available. # The best way is to make sure the owners property is set to full blamelist blamelist = [] for c in self.allChanges(): if c.who not in blamelist: blamelist.append(c.who) for source in self.sources: if source.patch: # Add patch author to blamelist blamelist.append(source.patch_info[0]) blamelist.sort() return blamelist def changesText(self): changetext = "" for c in self.allChanges(): changetext += "-" * 60 + "\n\n" + c.asText() + "\n" # consider sorting these by number return changetext def setStepFactories(self, step_factories): """Set a list of 'step factories', which are tuples of (class, kwargs), where 'class' is generally a subclass of step.BuildStep . These are used to create the Steps themselves when the Build starts (as opposed to when it is first created). By creating the steps later, their __init__ method will have access to things like build.allFiles() .""" self.stepFactories = list(step_factories) useProgress = True def getWorkerCommandVersion(self, command, oldversion=None): return self.workerforbuilder.getWorkerCommandVersion(command, oldversion) def getWorkerName(self): return self.workername @staticmethod def setupPropertiesKnownBeforeBuildStarts(props, requests, builder, workerforbuilder=None): # Note that this function does not setup the 'builddir' worker property # It's not possible to know it until before the actual worker has # attached. # start with global properties from the configuration props.updateFromProperties(builder.master.config.properties) # from the SourceStamps, which have properties via Change sources = requests[0].mergeSourceStampsWith(requests[1:]) for change in Build.allChangesFromSources(sources): props.updateFromProperties(change.properties) # get any properties from requests (this is the path through which # schedulers will send us properties) for rq in requests: props.updateFromProperties(rq.properties) # get builder properties builder.setupProperties(props) # get worker properties # navigate our way back to the L{buildbot.worker.Worker} # object that came from the config, and get its properties if workerforbuilder is not None: workerforbuilder.worker.setupProperties(props) @staticmethod def setupBuildProperties(props, requests, sources=None, number=None): # now set some properties of our own, corresponding to the # build itself props.setProperty("buildnumber", number, "Build") if sources is None: sources = requests[0].mergeSourceStampsWith(requests[1:]) if sources and len(sources) == 1: # old interface for backwards compatibility source = sources[0] props.setProperty("branch", source.branch, "Build") props.setProperty("revision", source.revision, "Build") props.setProperty("repository", source.repository, "Build") props.setProperty("codebase", source.codebase, "Build") props.setProperty("project", source.project, "Build") def setupWorkerBuildirProperty(self, workerforbuilder): path_module = workerforbuilder.worker.path_module # navigate our way back to the L{buildbot.worker.Worker} # object that came from the config, and get its properties if workerforbuilder.worker.worker_basedir: builddir = path_module.join( bytes2unicode(workerforbuilder.worker.worker_basedir), bytes2unicode(self.builder.config.workerbuilddir)) self.setProperty("builddir", builddir, "Worker") def setupWorkerForBuilder(self, workerforbuilder): self.path_module = workerforbuilder.worker.path_module self.workername = workerforbuilder.worker.workername self.worker_info = workerforbuilder.worker.info @defer.inlineCallbacks def getBuilderId(self): if self._builderid is None: if self.hasProperty(self.VIRTUAL_BUILDERNAME_PROP): self._builderid = yield self.builder.getBuilderIdForName( self.getProperty(self.VIRTUAL_BUILDERNAME_PROP)) description = self.getProperty( self.VIRTUAL_BUILDERDESCRIPTION_PROP, self.builder.config.description) tags = self.getProperty( self.VIRTUAL_BUILDERTAGS_PROP, self.builder.config.tags) if type(tags) == type([]) and '_virtual_' not in tags: tags.append('_virtual_') self.master.data.updates.updateBuilderInfo(self._builderid, description, tags) else: self._builderid = yield self.builder.getBuilderId() return self._builderid @defer.inlineCallbacks def startBuild(self, workerforbuilder): """This method sets up the build, then starts it by invoking the first Step. It returns a Deferred which will fire when the build finishes. This Deferred is guaranteed to never errback.""" self.workerforbuilder = workerforbuilder self.conn = None worker = workerforbuilder.worker # Cache the worker information as variables instead of accessing via worker, as the worker # will disappear during disconnection and some of these properties may still be needed. self.workername = worker.workername self.worker_info = worker.info log.msg("{}.startBuild".format(self)) # TODO: this will go away when build collapsing is implemented; until # then we just assign the build to the first buildrequest brid = self.requests[0].id builderid = yield self.getBuilderId() self.buildid, self.number = \ yield self.master.data.updates.addBuild( builderid=builderid, buildrequestid=brid, workerid=worker.workerid) self._buildid_notifier.notify(self.buildid) self.stopBuildConsumer = yield self.master.mq.startConsuming(self.controlStopBuild, ("control", "builds", str(self.buildid), "stop")) # the preparation step counts the time needed for preparing the worker and getting the # locks. # we cannot use a real step as we don't have a worker yet. self.preparation_step = buildstep.BuildStep(name="worker_preparation") self.preparation_step.setBuild(self) yield self.preparation_step.addStep() Build.setupBuildProperties(self.getProperties(), self.requests, self.sources, self.number) # then narrow WorkerLocks down to the right worker self.locks = [(l.getLockForWorker(self.workername), a) for l, a in self.locks] metrics.MetricCountEvent.log('active_builds', 1) # make sure properties are available to people listening on 'new' # events yield self.master.data.updates.setBuildProperties(self.buildid, self) yield self.master.data.updates.setBuildStateString(self.buildid, 'starting') yield self.master.data.updates.generateNewBuildEvent(self.buildid) try: self.setupBuild() # create .steps except Exception: yield self.buildPreparationFailure(Failure(), "setupBuild") yield self.buildFinished(['Build.setupBuild', 'failed'], EXCEPTION) return # flush properties in the beginning of the build yield self.master.data.updates.setBuildProperties(self.buildid, self) yield self.master.data.updates.setBuildStateString(self.buildid, 'preparing worker') try: ready_or_failure = False if workerforbuilder.worker and workerforbuilder.worker.acquireLocks(): self._is_substantiating = True ready_or_failure = yield workerforbuilder.substantiate_if_needed(self) except Exception: ready_or_failure = Failure() finally: self._is_substantiating = False # If prepare returns True then it is ready and we start a build # If it returns failure then we don't start a new build. if ready_or_failure is not True: yield self.buildPreparationFailure(ready_or_failure, "worker_prepare") if self.stopped: yield self.buildFinished(["worker", "cancelled"], self.results) elif isinstance(ready_or_failure, Failure) and \ ready_or_failure.check(interfaces.LatentWorkerCannotSubstantiate): yield self.buildFinished(["worker", "cannot", "substantiate"], EXCEPTION) else: yield self.buildFinished(["worker", "not", "available"], RETRY) return # ping the worker to make sure they're still there. If they've # fallen off the map (due to a NAT timeout or something), this # will fail in a couple of minutes, depending upon the TCP # timeout. # # TODO: This can unnecessarily suspend the starting of a build, in # situations where the worker is live but is pushing lots of data to # us in a build. yield self.master.data.updates.setBuildStateString(self.buildid, 'pinging worker') log.msg("starting build {}.. pinging the worker {}".format(self, workerforbuilder)) try: ping_success_or_failure = yield workerforbuilder.ping() except Exception: ping_success_or_failure = Failure() if ping_success_or_failure is not True: yield self.buildPreparationFailure(ping_success_or_failure, "worker_ping") yield self.buildFinished(["worker", "not", "pinged"], RETRY) return self.conn = workerforbuilder.worker.conn # To retrieve the builddir property, the worker must be attached as we # depend on its path_module. Latent workers become attached only after # preparing them, so we can't setup the builddir property earlier like # the rest of properties self.setupWorkerBuildirProperty(workerforbuilder) self.setupWorkerForBuilder(workerforbuilder) self.subs = self.conn.notifyOnDisconnect(self.lostRemote) # tell the remote that it's starting a build, too try: yield self.conn.remoteStartBuild(self.builder.name) except Exception: yield self.buildPreparationFailure(Failure(), "start_build") yield self.buildFinished(["worker", "not", "building"], RETRY) return yield self.master.data.updates.setBuildStateString(self.buildid, 'acquiring locks') yield self.acquireLocks() readymsg = "worker {} ready".format(self.getWorkerName()) yield self.master.data.updates.setStepStateString(self.preparation_step.stepid, readymsg) yield self.master.data.updates.finishStep(self.preparation_step.stepid, SUCCESS, False) yield self.master.data.updates.setBuildStateString(self.buildid, 'building') # start the sequence of steps self.startNextStep() @defer.inlineCallbacks def buildPreparationFailure(self, why, state_string): if self.stopped: # if self.stopped, then this failure is a LatentWorker's failure to substantiate # which we triggered on purpose in stopBuild() log.msg("worker stopped while " + state_string, why) yield self.master.data.updates.finishStep(self.preparation_step.stepid, CANCELLED, False) else: log.err(why, "while " + state_string) self.workerforbuilder.worker.putInQuarantine() if isinstance(why, failure.Failure): yield self.preparation_step.addLogWithFailure(why) yield self.master.data.updates.setStepStateString(self.preparation_step.stepid, "error while " + state_string) yield self.master.data.updates.finishStep(self.preparation_step.stepid, EXCEPTION, False) @staticmethod def _canAcquireLocks(lockList, workerforbuilder): for lock, access in lockList: worker_lock = lock.getLockForWorker( workerforbuilder.worker.workername) if not worker_lock.isAvailable(None, access): return False return True def acquireLocks(self, res=None): self._acquiringLock = None if not self.locks: return defer.succeed(None) if self.stopped: return defer.succeed(None) log.msg("acquireLocks(build {}, locks {})".format(self, self.locks)) for lock, access in self.locks: if not lock.isAvailable(self, access): log.msg("Build {} waiting for lock {}".format(self, lock)) d = lock.waitUntilMaybeAvailable(self, access) d.addCallback(self.acquireLocks) self._acquiringLock = (lock, access, d) return d # all locks are available, claim them all for lock, access in self.locks: lock.claim(self, access) return defer.succeed(None) def setUniqueStepName(self, step): # If there are any name collisions, we add a count to the loser # until it is unique. name = step.name if name in self.stepnames: count = self.stepnames[name] count += 1 self.stepnames[name] = count name = "{}_{}".format(step.name, count) else: self.stepnames[name] = 0 step.name = name def setupBuildSteps(self, step_factories): steps = [] for factory in step_factories: step = buildstep.create_step_from_step_or_factory(factory) step.setBuild(self) step.setWorker(self.workerforbuilder.worker) steps.append(step) if self.useProgress: step.setupProgress() return steps def setupBuild(self): # create the actual BuildSteps. self.steps = self.setupBuildSteps(self.stepFactories) owners = set(self.blamelist()) # gather owners from build requests owners.update({r.properties['owner'] for r in self.requests if "owner" in r.properties}) if owners: self.setProperty('owners', sorted(owners), 'Build') self.text = [] # list of text string lists (text2) def addStepsAfterCurrentStep(self, step_factories): # Add the new steps after the step that is running. # The running step has already been popped from self.steps self.steps[0:0] = self.setupBuildSteps(step_factories) def addStepsAfterLastStep(self, step_factories): # Add the new steps to the end. self.steps.extend(self.setupBuildSteps(step_factories)) def getNextStep(self): """This method is called to obtain the next BuildStep for this build. When it returns None (or raises a StopIteration exception), the build is complete.""" if not self.steps: return None if not self.conn: return None if self.terminate or self.stopped: # Run any remaining alwaysRun steps, and skip over the others while True: s = self.steps.pop(0) if s.alwaysRun: return s if not self.steps: return None else: return self.steps.pop(0) def startNextStep(self): try: s = self.getNextStep() except StopIteration: s = None if not s: return self.allStepsDone() self.executedSteps.append(s) self.currentStep = s # the following function returns a deferred, but we don't wait for it self._start_next_step_impl(s) return defer.succeed(None) @defer.inlineCallbacks def _start_next_step_impl(self, step): try: results = yield step.startStep(self.conn) yield self.master.data.updates.setBuildProperties(self.buildid, self) self.currentStep = None if self.finished: return # build was interrupted, don't keep building terminate = yield self.stepDone(results, step) # interpret/merge results if terminate: self.terminate = True yield self.startNextStep() except Exception as e: log.msg("{} build got exception when running step {}".format(self, step)) log.err(e) yield self.master.data.updates.setBuildProperties(self.buildid, self) # Note that buildFinished can't throw exception yield self.buildFinished(["build", "exception"], EXCEPTION) @defer.inlineCallbacks def stepDone(self, results, step): """This method is called when the BuildStep completes. It is passed a status object from the BuildStep and is responsible for merging the Step's results into those of the overall Build.""" terminate = False text = None if isinstance(results, tuple): results, text = results assert isinstance(results, type(SUCCESS)), "got %r" % (results,) summary = yield step.getBuildResultSummary() if 'build' in summary: text = [summary['build']] log.msg(" step '{}' complete: {} ({})".format(step.name, statusToString(results), text)) if text: self.text.extend(text) self.master.data.updates.setBuildStateString(self.buildid, bytes2unicode(" ".join(self.text))) self.results, terminate = computeResultAndTermination(step, results, self.results) if not self.conn: # force the results to retry if the connection was lost self.results = RETRY terminate = True return terminate def lostRemote(self, conn=None): # the worker went away. There are several possible reasons for this, # and they aren't necessarily fatal. For now, kill the build, but # TODO: see if we can resume the build when it reconnects. log.msg("{}.lostRemote".format(self)) self.conn = None self.text = ["lost", "connection"] self.results = RETRY if self.currentStep and self.currentStep.results is None: # this should cause the step to finish. log.msg(" stopping currentStep", self.currentStep) self.currentStep.interrupt(Failure(error.ConnectionLost())) else: self.text = ["lost", "connection"] self.stopped = True if self._acquiringLock: lock, access, d = self._acquiringLock lock.stopWaitingUntilAvailable(self, access, d) def controlStopBuild(self, key, params): return self.stopBuild(**params) def stopBuild(self, reason="", results=CANCELLED): # the idea here is to let the user cancel a build because, e.g., # they realized they committed a bug and they don't want to waste # the time building something that they know will fail. Another # reason might be to abandon a stuck build. We want to mark the # build as failed quickly rather than waiting for the worker's # timeout to kill it on its own. log.msg(" {}: stopping build: {} {}".format(self, reason, results)) if self.finished: return # TODO: include 'reason' in this point event self.stopped = True if self.currentStep and self.currentStep.results is None: self.currentStep.interrupt(reason) self.results = results if self._acquiringLock: lock, access, d = self._acquiringLock lock.stopWaitingUntilAvailable(self, access, d) elif self._is_substantiating: # We're having a latent worker that hasn't been substantiated yet. We need to abort # that to not have a latent worker without an associated build self.workerforbuilder.insubstantiate_if_needed() def allStepsDone(self): if self.results == FAILURE: text = ["failed"] elif self.results == WARNINGS: text = ["warnings"] elif self.results == EXCEPTION: text = ["exception"] elif self.results == RETRY: text = ["retry"] elif self.results == CANCELLED: text = ["cancelled"] else: text = ["build", "successful"] text.extend(self.text) return self.buildFinished(text, self.results) @defer.inlineCallbacks def buildFinished(self, text, results): """This method must be called when the last Step has completed. It marks the Build as complete and returns the Builder to the 'idle' state. It takes two arguments which describe the overall build status: text, results. 'results' is one of the possible results (see buildbot.process.results). If 'results' is SUCCESS or WARNINGS, we will permit any dependent builds to start. If it is 'FAILURE', those builds will be abandoned. This method never throws.""" try: self.stopBuildConsumer.stopConsuming() self.finished = True if self.conn: self.subs.unsubscribe() self.subs = None self.conn = None log.msg(" {}: build finished".format(self)) self.results = worst_status(self.results, results) eventually(self.releaseLocks) metrics.MetricCountEvent.log('active_builds', -1) yield self.master.data.updates.setBuildStateString(self.buildid, bytes2unicode(" ".join(text))) yield self.master.data.updates.finishBuild(self.buildid, self.results) if self.results == EXCEPTION: # When a build has an exception, put the worker in quarantine for a few seconds # to make sure we try next build with another worker self.workerforbuilder.worker.putInQuarantine() elif self.results != RETRY: # This worker looks sane if status is neither retry or exception # Avoid a race in case the build step reboot the worker if self.workerforbuilder.worker is not None: self.workerforbuilder.worker.resetQuarantine() # mark the build as finished self.workerforbuilder.buildFinished() self.builder.buildFinished(self, self.workerforbuilder) self._tryScheduleBuildsAfterLockUnlock(build_finished=True) except Exception: log.err(None, 'from finishing a build; this is a ' 'serious error - please file a bug at http://buildbot.net') def releaseLocks(self): if self.locks: log.msg("releaseLocks({}): {}".format(self, self.locks)) for lock, access in self.locks: if lock.isOwner(self, access): lock.release(self, access) self._tryScheduleBuildsAfterLockUnlock(locks_released=True) def _tryScheduleBuildsAfterLockUnlock(self, locks_released=False, build_finished=False): # we need to inform the botmaster to attempt to schedule any pending # build request if we released any locks. This is because buildrequest # may be started for a completely unrelated builder and yet depend on # a lock released by this build. # # TODO: the current approach is dumb as we just attempt to schedule # all buildrequests. A much better idea would be to record the reason # of why a buildrequest was not scheduled in the BuildRequestDistributor # and then attempt to schedule only these buildrequests which may have # had that reason resolved. # this function is complicated by the fact that the botmaster must be # informed only when all locks have been released and the actions in # buildFinished have concluded. Since releaseLocks is called using # eventually this may happen in any order. self._locks_released = self._locks_released or locks_released self._build_finished = self._build_finished or build_finished if not self.locks: return if self._locks_released and self._build_finished: self.builder.botmaster.maybeStartBuildsForAllBuilders() def getSummaryStatistic(self, name, summary_fn, initial_value=_sentinel): step_stats_list = [ st.getStatistic(name) for st in self.executedSteps if st.hasStatistic(name)] if initial_value is self._sentinel: return reduce(summary_fn, step_stats_list) return reduce(summary_fn, step_stats_list, initial_value) @defer.inlineCallbacks def getUrl(self): builder_id = yield self.getBuilderId() return getURLForBuild(self.master, builder_id, self.number) @defer.inlineCallbacks def get_buildid(self): if self.buildid is not None: return self.buildid buildid = yield self._buildid_notifier.wait() return buildid @defer.inlineCallbacks def waitUntilFinished(self): buildid = yield self.get_buildid() yield self.master.mq.waitUntilEvent(('builds', str(buildid), 'finished'), lambda: self.finished) def getWorkerInfo(self): return self.worker_info buildbot-3.4.0/master/buildbot/process/builder.py000066400000000000000000000425231413250514000221000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings import weakref from twisted.application import service from twisted.internet import defer from twisted.python import log from buildbot import interfaces from buildbot.data import resultspec from buildbot.interfaces import IRenderable from buildbot.process import buildrequest from buildbot.process import workerforbuilder from buildbot.process.build import Build from buildbot.process.properties import Properties from buildbot.process.results import RETRY from buildbot.util import bytes2unicode from buildbot.util import epoch2datetime from buildbot.util import service as util_service def enforceChosenWorker(bldr, workerforbuilder, breq): if 'workername' in breq.properties: workername = breq.properties['workername'] if isinstance(workername, str): return workername == workerforbuilder.worker.workername return True class Builder(util_service.ReconfigurableServiceMixin, service.MultiService): # reconfigure builders before workers reconfig_priority = 196 @property def expectations(self): warnings.warn("'Builder.expectations' is deprecated.") return None def __init__(self, name): super().__init__() self.name = name # this is filled on demand by getBuilderId; don't access it directly self._builderid = None # build/wannabuild slots: Build objects move along this sequence self.building = [] # old_building holds active builds that were stolen from a predecessor self.old_building = weakref.WeakKeyDictionary() # workers which have connected but which are not yet available. # These are always in the ATTACHING state. self.attaching_workers = [] # workers at our disposal. Each WorkerForBuilder instance has a # .state that is IDLE, PINGING, or BUILDING. "PINGING" is used when a # Build is about to start, to make sure that they're still alive. self.workers = [] self.config = None # Tracks config version for locks self.config_version = None @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): # find this builder in the config for builder_config in new_config.builders: if builder_config.name == self.name: found_config = True break assert found_config, "no config found for builder '{}'".format(self.name) old_config = self.config self.config = builder_config self.config_version = self.master.config_version # allocate builderid now, so that the builder is visible in the web # UI; without this, the builder wouldn't appear until it preformed a # build. builderid = yield self.getBuilderId() if self._has_updated_config_info(old_config, builder_config): yield self.master.data.updates.updateBuilderInfo(builderid, builder_config.description, builder_config.tags) # if we have any workers attached which are no longer configured, # drop them. new_workernames = set(builder_config.workernames) self.workers = [w for w in self.workers if w.worker.workername in new_workernames] def _has_updated_config_info(self, old_config, new_config): if old_config is None: return True if old_config.description != new_config.description: return True if old_config.tags != new_config.tags: return True return False def __repr__(self): return "" % (self.name, id(self)) def getBuilderIdForName(self, name): # buildbot.config should ensure this is already unicode, but it doesn't # hurt to check again name = bytes2unicode(name) return self.master.data.updates.findBuilderId(name) def getBuilderId(self): # since findBuilderId is idempotent, there's no reason to add # additional locking around this function. if self._builderid: return defer.succeed(self._builderid) d = self.getBuilderIdForName(self.name) @d.addCallback def keep(builderid): self._builderid = builderid return builderid return d @defer.inlineCallbacks def getOldestRequestTime(self): """Returns the submitted_at of the oldest unclaimed build request for this builder, or None if there are no build requests. @returns: datetime instance or None, via Deferred """ bldrid = yield self.getBuilderId() unclaimed = yield self.master.data.get( ('builders', bldrid, 'buildrequests'), [resultspec.Filter('claimed', 'eq', [False])], order=['submitted_at'], limit=1) if unclaimed: return unclaimed[0]['submitted_at'] return None @defer.inlineCallbacks def getNewestCompleteTime(self): """Returns the complete_at of the latest completed build request for this builder, or None if there are no such build requests. @returns: datetime instance or None, via Deferred """ bldrid = yield self.getBuilderId() completed = yield self.master.data.get( ('builders', bldrid, 'buildrequests'), [resultspec.Filter('complete', 'eq', [True])], order=['-complete_at'], limit=1) if completed: return completed[0]['complete_at'] else: return None def getBuild(self, number): for b in self.building: if b.number == number: return b for b in self.old_building: if b.number == number: return b return None def addLatentWorker(self, worker): assert interfaces.ILatentWorker.providedBy(worker) for w in self.workers: if w == worker: break else: wfb = workerforbuilder.LatentWorkerForBuilder(worker, self) self.workers.append(wfb) self.botmaster.maybeStartBuildsForBuilder(self.name) @defer.inlineCallbacks def attached(self, worker, commands): """This is invoked by the Worker when the self.workername bot registers their builder. @type worker: L{buildbot.worker.Worker} @param worker: the Worker that represents the worker as a whole @type commands: dict: string -> string, or None @param commands: provides the worker's version of each RemoteCommand @rtype: L{twisted.internet.defer.Deferred} @return: a Deferred that fires (with 'self') when the worker-side builder is fully attached and ready to accept commands. """ for w in self.attaching_workers + self.workers: if w.worker == worker: # already attached to them. This is fairly common, since # attached() gets called each time we receive the builder # list from the worker, and we ask for it each time we add or # remove a builder. So if the worker is hosting builders # A,B,C, and the config file changes A, we'll remove A and # re-add it, triggering two builder-list requests, getting # two redundant calls to attached() for B, and another two # for C. # # Therefore, when we see that we're already attached, we can # just ignore it. return self wfb = workerforbuilder.WorkerForBuilder() wfb.setBuilder(self) self.attaching_workers.append(wfb) try: yield wfb.attached(worker, commands) self.attaching_workers.remove(wfb) self.workers.append(wfb) return self except Exception as e: # pragma: no cover # already log.err'ed by WorkerForBuilder._attachFailure # TODO: remove from self.workers (except that detached() should get # run first, right?) log.err(e, 'worker failed to attach') return None def detached(self, worker): """This is called when the connection to the bot is lost.""" for wfb in self.attaching_workers + self.workers: if wfb.worker == worker: break else: log.msg(("WEIRD: Builder.detached({}) ({})" " not in attaching_workers({})" " or workers({})").format(worker, worker.workername, self.attaching_workers, self.workers)) return if wfb in self.attaching_workers: self.attaching_workers.remove(wfb) if wfb in self.workers: self.workers.remove(wfb) # inform the WorkerForBuilder that their worker went away wfb.detached() def getAvailableWorkers(self): return [wfb for wfb in self.workers if wfb.isAvailable()] @defer.inlineCallbacks def canStartBuild(self, workerforbuilder, buildrequest): can_start = True # check whether the locks that the build will acquire can actually be # acquired locks = self.config.locks worker = workerforbuilder.worker props = None # don't unnecessarily setup properties for build def setupPropsIfNeeded(props): if props is not None: return props props = Properties() Build.setupPropertiesKnownBeforeBuildStarts(props, [buildrequest], self, workerforbuilder) return props if worker.builds_may_be_incompatible: # Check if the latent worker is actually compatible with the build. # The instance type of the worker may depend on the properties of # the build that substantiated it. props = setupPropsIfNeeded(props) can_start = yield worker.isCompatibleWithBuild(props) if not can_start: return False if IRenderable.providedBy(locks): # collect properties that would be set for a build if we # started it now and render locks using it props = setupPropsIfNeeded(props) locks = yield props.render(locks) locks = yield self.botmaster.getLockFromLockAccesses(locks, self.config_version) if locks: can_start = Build._canAcquireLocks(locks, workerforbuilder) if can_start is False: return can_start if callable(self.config.canStartBuild): can_start = yield self.config.canStartBuild(self, workerforbuilder, buildrequest) return can_start @defer.inlineCallbacks def _startBuildFor(self, workerforbuilder, buildrequests): build = self.config.factory.newBuild(buildrequests) build.setBuilder(self) props = build.getProperties() # give the properties a reference back to this build props.build = build Build.setupPropertiesKnownBeforeBuildStarts( props, build.requests, build.builder, workerforbuilder) log.msg("starting build {} using worker {}".format(build, workerforbuilder)) # set up locks locks = yield build.render(self.config.locks) yield build.setLocks(locks) if self.config.env: build.setWorkerEnvironment(self.config.env) # append the build to self.building self.building.append(build) # The worker is ready to go. workerforbuilder.buildStarted() sets its # state to BUILDING (so we won't try to use it for any other builds). # This gets set back to IDLE by the Build itself when it finishes. # Note: This can't be done in `Build.startBuild`, since it needs to be done # synchronously, before the BuildRequestDistributor looks at # another build request. workerforbuilder.buildStarted() # We put the result of startBuild into a fresh Deferred since _startBuildFor should not # wait until the build is finished. This uses `maybeDeferred` to ensure that any exceptions # raised by startBuild are treated as deferred errbacks (see # http://trac.buildbot.net/ticket/2428). d = defer.maybeDeferred(build.startBuild, workerforbuilder) # this shouldn't happen. if it does, the worker will be wedged d.addErrback(log.err, 'from a running build; this is a ' 'serious error - please file a bug at http://buildbot.net') return True def setupProperties(self, props): props.setProperty("buildername", self.name, "Builder") if self.config.properties: for propertyname in self.config.properties: props.setProperty(propertyname, self.config.properties[propertyname], "Builder") if self.config.defaultProperties: for propertyname in self.config.defaultProperties: if propertyname not in props: props.setProperty(propertyname, self.config.defaultProperties[propertyname], "Builder") def buildFinished(self, build, wfb): """This is called when the Build has finished (either success or failure). Any exceptions during the build are reported with results=FAILURE, not with an errback.""" # by the time we get here, the Build has already released the worker, # which will trigger a check for any now-possible build requests # (maybeStartBuilds) results = build.results self.building.remove(build) if results == RETRY: d = self._resubmit_buildreqs(build) d.addErrback(log.err, 'while resubmitting a build request') else: complete_at_epoch = self.master.reactor.seconds() complete_at = epoch2datetime(complete_at_epoch) brids = [br.id for br in build.requests] d = self.master.data.updates.completeBuildRequests( brids, results, complete_at=complete_at) # nothing in particular to do with this deferred, so just log it if # it fails.. d.addErrback(log.err, 'while marking build requests as completed') if wfb.worker: wfb.worker.releaseLocks() def _resubmit_buildreqs(self, build): brids = [br.id for br in build.requests] d = self.master.data.updates.unclaimBuildRequests(brids) @d.addCallback def notify(_): pass # XXX method does not exist # self._msg_buildrequests_unclaimed(build.requests) return d # Build Creation def maybeStartBuild(self, workerforbuilder, breqs): # This method is called by the botmaster whenever this builder should # start a set of buildrequests on a worker. Do not call this method # directly - use master.botmaster.maybeStartBuildsForBuilder, or one of # the other similar methods if more appropriate # first, if we're not running, then don't start builds; stopService # uses this to ensure that any ongoing maybeStartBuild invocations # are complete before it stops. if not self.running: return defer.succeed(False) # If the build fails from here on out (e.g., because a worker has failed), # it will be handled outside of this function. TODO: test that! return self._startBuildFor(workerforbuilder, breqs) # a few utility functions to make the maybeStartBuild a bit shorter and # easier to read def getCollapseRequestsFn(self): """Helper function to determine which collapseRequests function to use from L{_collapseRequests}, or None for no merging""" # first, seek through builder, global, and the default collapseRequests_fn = self.config.collapseRequests if collapseRequests_fn is None: collapseRequests_fn = self.master.config.collapseRequests if collapseRequests_fn is None: collapseRequests_fn = True # then translate False and True properly if collapseRequests_fn is False: collapseRequests_fn = None elif collapseRequests_fn is True: collapseRequests_fn = self._defaultCollapseRequestFn return collapseRequests_fn @staticmethod def _defaultCollapseRequestFn(master, builder, brdict1, brdict2): return buildrequest.BuildRequest.canBeCollapsed(master, brdict1, brdict2) buildbot-3.4.0/master/buildbot/process/buildrequest.py000066400000000000000000000334731413250514000231660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import calendar from twisted.internet import defer from buildbot.data import resultspec from buildbot.process import properties from buildbot.process.results import SKIPPED class BuildRequestCollapser: # brids is a list of the new added buildrequests id # This class is called before generated the 'new' event for the # buildrequest # Before adding new buildset/buildrequests, we must examine each unclaimed # buildrequest. # EG: # 1. get the list of all unclaimed buildrequests: # - We must exclude all buildsets which have at least 1 claimed buildrequest # 2. For each unclaimed buildrequests, if compatible with the new request # (sourcestamps match, except for revision) Then: # 2.1. claim it # 2.2. complete it with result SKIPPED def __init__(self, master, brids): self.master = master self.brids = brids @defer.inlineCallbacks def _getUnclaimedBrs(self, builderid): # Retrieve the list of Brs for all unclaimed builds unclaim_brs = yield self.master.data.get(('builders', builderid, 'buildrequests'), [resultspec.Filter('claimed', 'eq', [False])]) # sort by submitted_at, so the first is the oldest unclaim_brs.sort(key=lambda brd: brd['submitted_at']) return unclaim_brs @defer.inlineCallbacks def collapse(self): brids_to_collapse = set() for brid in self.brids: # Get the BuildRequest object br = yield self.master.data.get(('buildrequests', brid)) # Retrieve the buildername builderid = br['builderid'] bldrdict = yield self.master.data.get(('builders', builderid)) # Get the builder object bldr = self.master.botmaster.builders.get(bldrdict['name']) # Get the Collapse BuildRequest function (from the configuration) collapseRequestsFn = bldr.getCollapseRequestsFn() if bldr else None unclaim_brs = yield self._getUnclaimedBrs(builderid) # short circuit if there is no merging to do if not collapseRequestsFn or not unclaim_brs: continue for unclaim_br in unclaim_brs: if unclaim_br['buildrequestid'] == br['buildrequestid']: continue canCollapse = yield collapseRequestsFn(self.master, bldr, br, unclaim_br) if canCollapse is True: brids_to_collapse.add(unclaim_br['buildrequestid']) collapsed_brids = [] for brid in brids_to_collapse: claimed = yield self.master.data.updates.claimBuildRequests([brid]) if claimed: yield self.master.data.updates.completeBuildRequests([brid], SKIPPED) collapsed_brids.append(brid) return collapsed_brids class TempSourceStamp: # temporary fake sourcestamp ATTRS = ('branch', 'revision', 'repository', 'project', 'codebase') PATCH_ATTRS = ( ('patch_level', 'level'), ('patch_body', 'body'), ('patch_subdir', 'subdir'), ('patch_author', 'author'), ('patch_comment', 'comment') ) def __init__(self, ssdict): self._ssdict = ssdict def __getattr__(self, attr): patch = self._ssdict.get('patch') if attr == 'patch': if patch: return (patch['level'], patch['body'], patch['subdir']) return None elif attr == 'patch_info': if patch: return (patch['author'], patch['comment']) return (None, None) elif attr in self.ATTRS or attr == 'ssid': return self._ssdict[attr] raise AttributeError(attr) def asSSDict(self): return self._ssdict def asDict(self): # This return value should match the kwargs to # SourceStampsConnectorComponent.findSourceStampId result = {} for attr in self.ATTRS: result[attr] = self._ssdict.get(attr) patch = self._ssdict.get('patch') or {} for patch_attr, attr in self.PATCH_ATTRS: result[patch_attr] = patch.get(attr) assert all( isinstance(val, (str, int, bytes, type(None))) for attr, val in result.items() ), result return result class TempChange: # temporary fake change def __init__(self, d): self._chdict = d def __getattr__(self, attr): if attr == 'who': return self._chdict['author'] elif attr == 'properties': return properties.Properties.fromDict(self._chdict['properties']) return self._chdict[attr] def asChDict(self): return self._chdict class BuildRequest: """ A rolled-up encapsulation of all of the data relevant to a build request. This class is used by the C{nextBuild} and C{collapseRequests} configuration parameters, as well as in starting a build. Construction of a BuildRequest object is a heavyweight process involving a lot of database queries, so it should be avoided where possible. See bug #1894. @type reason: string @ivar reason: the reason this Build is being requested. Schedulers provide this, but for forced builds the user requesting the build will provide a string. It comes from the buildsets table. @type properties: L{properties.Properties} @ivar properties: properties that should be applied to this build, taken from the buildset containing this build request @ivar submittedAt: a timestamp (seconds since epoch) when this request was submitted to the Builder. This is used by the CVS step to compute a checkout timestamp, as well as by the master to prioritize build requests from oldest to newest. @ivar buildername: name of the requested builder @ivar priority: request priority @ivar id: build request ID @ivar bsid: ID of the parent buildset """ submittedAt = None sources = {} @classmethod def fromBrdict(cls, master, brdict): """ Construct a new L{BuildRequest} from a dictionary as returned by L{BuildRequestsConnectorComponent.getBuildRequest}. This method uses a cache, which may result in return of stale objects; for the most up-to-date information, use the database connector methods. @param master: current build master @param brdict: build request dictionary @returns: L{BuildRequest}, via Deferred """ cache = master.caches.get_cache("BuildRequests", cls._make_br) return cache.get(brdict['buildrequestid'], brdict=brdict, master=master) @classmethod @defer.inlineCallbacks def _make_br(cls, brid, brdict, master): buildrequest = cls() buildrequest.id = brid buildrequest.bsid = brdict['buildsetid'] builder = yield master.db.builders.getBuilder(brdict['builderid']) buildrequest.buildername = builder['name'] buildrequest.builderid = brdict['builderid'] buildrequest.priority = brdict['priority'] dt = brdict['submitted_at'] buildrequest.submittedAt = dt and calendar.timegm(dt.utctimetuple()) buildrequest.master = master buildrequest.waitedFor = brdict['waited_for'] # fetch the buildset to get the reason buildset = yield master.db.buildsets.getBuildset(brdict['buildsetid']) assert buildset # schema should guarantee this buildrequest.reason = buildset['reason'] # fetch the buildset properties, and convert to Properties buildset_properties = yield master.db.buildsets.getBuildsetProperties(brdict['buildsetid']) buildrequest.properties = properties.Properties.fromDict( buildset_properties) # make a fake sources dict (temporary) bsdata = yield master.data.get(('buildsets', str(buildrequest.bsid))) assert bsdata[ 'sourcestamps'], "buildset must have at least one sourcestamp" buildrequest.sources = {} for ssdata in bsdata['sourcestamps']: ss = buildrequest.sources[ssdata['codebase']] = TempSourceStamp(ssdata) changes = yield master.data.get(("sourcestamps", ss.ssid, "changes")) ss.changes = [TempChange(change) for change in changes] return buildrequest @staticmethod def filter_buildset_props_for_collapsing(bs_props): return {name: value for name, (value, source) in bs_props.items() if name != 'scheduler' and source == 'Scheduler'} @staticmethod @defer.inlineCallbacks def canBeCollapsed(master, new_br, old_br): """ Returns true if both buildrequest can be merged, via Deferred. This implements Buildbot's default collapse strategy. """ # short-circuit: if these are for the same buildset, collapse away if new_br['buildsetid'] == old_br['buildsetid']: return True # the new buildrequest must actually be newer than the old build request, otherwise we # may end up with situations where two build requests submitted at the same time will # cancel each other. if new_br['buildrequestid'] < old_br['buildrequestid']: return False # get the buildsets for each buildrequest selfBuildsets = yield master.data.get(('buildsets', str(new_br['buildsetid']))) otherBuildsets = yield master.data.get(('buildsets', str(old_br['buildsetid']))) # extract sourcestamps, as dictionaries by codebase selfSources = dict((ss['codebase'], ss) for ss in selfBuildsets['sourcestamps']) otherSources = dict((ss['codebase'], ss) for ss in otherBuildsets['sourcestamps']) # if the sets of codebases do not match, we can't collapse if set(selfSources) != set(otherSources): return False for c, selfSS in selfSources.items(): otherSS = otherSources[c] if selfSS['repository'] != otherSS['repository']: return False if selfSS['branch'] != otherSS['branch']: return False if selfSS['project'] != otherSS['project']: return False # anything with a patch won't be collapsed if selfSS['patch'] or otherSS['patch']: return False # get changes & compare selfChanges = yield master.data.get(('sourcestamps', selfSS['ssid'], 'changes')) otherChanges = yield master.data.get(('sourcestamps', otherSS['ssid'], 'changes')) # if both have changes, proceed, else fail - if no changes check revision instead if selfChanges and otherChanges: continue if selfChanges and not otherChanges: return False if not selfChanges and otherChanges: return False # else check revisions if selfSS['revision'] != otherSS['revision']: return False # don't collapse build requests if the properties injected by the scheduler differ new_bs_props = yield master.data.get(('buildsets', str(new_br['buildsetid']), 'properties')) old_bs_props = yield master.data.get(('buildsets', str(old_br['buildsetid']), 'properties')) new_bs_props = BuildRequest.filter_buildset_props_for_collapsing(new_bs_props) old_bs_props = BuildRequest.filter_buildset_props_for_collapsing(old_bs_props) if new_bs_props != old_bs_props: return False return True def mergeSourceStampsWith(self, others): """ Returns one merged sourcestamp for every codebase """ # get all codebases from all requests all_codebases = set(self.sources) for other in others: all_codebases |= set(other.sources) all_merged_sources = {} # walk along the codebases for codebase in all_codebases: all_sources = [] if codebase in self.sources: all_sources.append(self.sources[codebase]) for other in others: if codebase in other.sources: all_sources.append(other.sources[codebase]) assert all_sources, "each codebase should have at least one sourcestamp" # TODO: select the sourcestamp that best represents the merge, # preferably the latest one. This used to be accomplished by # looking at changeids and picking the highest-numbered. all_merged_sources[codebase] = all_sources[-1] return list(all_merged_sources.values()) def mergeReasons(self, others): """Return a reason for the merged build request.""" reasons = [] for req in [self] + others: if req.reason and req.reason not in reasons: reasons.append(req.reason) return ", ".join(reasons) def getSubmitTime(self): return self.submittedAt buildbot-3.4.0/master/buildbot/process/buildrequestdistributor.py000066400000000000000000000457521413250514000254640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import random from datetime import datetime from dateutil.tz import tzutc from twisted.internet import defer from twisted.python import log from twisted.python.failure import Failure from buildbot.data import resultspec from buildbot.process import metrics from buildbot.process.buildrequest import BuildRequest from buildbot.util import deferwaiter from buildbot.util import epoch2datetime from buildbot.util import service class BuildChooserBase: # # WARNING: This API is experimental and in active development. # # This internal object selects a new build+worker pair. It acts as a # generator, initializing its state on creation and offering up new # pairs until exhaustion. The object can be destroyed at any time # (eg, before the list exhausts), and can be "restarted" by abandoning # an old instance and creating a new one. # # The entry point is: # * bc.chooseNextBuild() - get the next (worker, [breqs]) or # (None, None) # # The default implementation of this class implements a default # chooseNextBuild() that delegates out to two other functions: # * bc.popNextBuild() - get the next (worker, breq) pair def __init__(self, bldr, master): self.bldr = bldr self.master = master self.breqCache = {} self.unclaimedBrdicts = None @defer.inlineCallbacks def chooseNextBuild(self): # Return the next build, as a (worker, [breqs]) pair worker, breq = yield self.popNextBuild() if not worker or not breq: return (None, None) return (worker, [breq]) # Must be implemented by subclass def popNextBuild(self): # Pick the next (worker, breq) pair; note this is pre-merge, so # it's just one breq raise NotImplementedError("Subclasses must implement this!") # - Helper functions that are generally useful to all subclasses - @defer.inlineCallbacks def _fetchUnclaimedBrdicts(self): # Sets up a cache of all the unclaimed brdicts. The cache is # saved at self.unclaimedBrdicts cache. If the cache already # exists, this function does nothing. If a refetch is desired, set # the self.unclaimedBrdicts to None before calling.""" if self.unclaimedBrdicts is None: # TODO: use order of the DATA API brdicts = yield self.master.data.get(('builders', (yield self.bldr.getBuilderId()), 'buildrequests'), [resultspec.Filter('claimed', 'eq', [False])]) # sort by submitted_at, so the first is the oldest brdicts.sort(key=lambda brd: brd['submitted_at']) self.unclaimedBrdicts = brdicts return self.unclaimedBrdicts @defer.inlineCallbacks def _getBuildRequestForBrdict(self, brdict): # Turn a brdict into a BuildRequest into a brdict. This is useful # for API like 'nextBuild', which operate on BuildRequest objects. breq = self.breqCache.get(brdict['buildrequestid']) if not breq: breq = yield BuildRequest.fromBrdict(self.master, brdict) if breq: self.breqCache[brdict['buildrequestid']] = breq return breq def _getBrdictForBuildRequest(self, breq): # Turn a BuildRequest back into a brdict. This operates from the # cache, which must be set up once via _fetchUnclaimedBrdicts if breq is None: return None brid = breq.id for brdict in self.unclaimedBrdicts: if brid == brdict['buildrequestid']: return brdict return None def _removeBuildRequest(self, breq): # Remove a BuildrRequest object (and its brdict) # from the caches if breq is None: return brdict = self._getBrdictForBuildRequest(breq) if brdict is not None: self.unclaimedBrdicts.remove(brdict) if breq.id in self.breqCache: del self.breqCache[breq.id] def _getUnclaimedBuildRequests(self): # Retrieve the list of BuildRequest objects for all unclaimed builds return defer.gatherResults([ self._getBuildRequestForBrdict(brdict) for brdict in self.unclaimedBrdicts]) class BasicBuildChooser(BuildChooserBase): # BasicBuildChooser generates build pairs via the configuration points: # * config.nextWorker (or random.choice if not set) # * config.nextBuild (or "pop top" if not set) # # For N workers, this will call nextWorker at most N times. If nextWorker # returns a worker that cannot satisfy the build chosen by nextBuild, # it will search for a worker that can satisfy the build. If one is found, # the workers that cannot be used are "recycled" back into a list # to be tried, in order, for the next chosen build. # # We check whether Builder.canStartBuild returns True for a particular # worker. It evaluates any Build properties that are known before build # and checks whether the worker may satisfy them. For example, the worker # must have the locks available. def __init__(self, bldr, master): super().__init__(bldr, master) self.nextWorker = self.bldr.config.nextWorker if not self.nextWorker: self.nextWorker = lambda _, workers, __: random.choice( workers) if workers else None self.workerpool = self.bldr.getAvailableWorkers() # Pick workers one at a time from the pool, and if the Builder says # they're usable (eg, locks can be satisfied), then prefer those # workers. self.preferredWorkers = [] self.nextBuild = self.bldr.config.nextBuild @defer.inlineCallbacks def popNextBuild(self): nextBuild = (None, None) while True: # 1. pick a build breq = yield self._getNextUnclaimedBuildRequest() if not breq: break # 2. pick a worker worker = yield self._popNextWorker(breq) if not worker: break # either satisfy this build or we leave it for another day self._removeBuildRequest(breq) # 3. make sure worker+ is usable for the breq recycledWorkers = [] while worker: canStart = yield self.canStartBuild(worker, breq) if canStart: break # try a different worker recycledWorkers.append(worker) worker = yield self._popNextWorker(breq) # recycle the workers that we didn't use to the head of the queue # this helps ensure we run 'nextWorker' only once per worker choice if recycledWorkers: self._unpopWorkers(recycledWorkers) # 4. done? otherwise we will try another build if worker: nextBuild = (worker, breq) break return nextBuild @defer.inlineCallbacks def _getNextUnclaimedBuildRequest(self): # ensure the cache is there yield self._fetchUnclaimedBrdicts() if not self.unclaimedBrdicts: return None if self.nextBuild: # nextBuild expects BuildRequest objects breqs = yield self._getUnclaimedBuildRequests() try: nextBreq = yield self.nextBuild(self.bldr, breqs) if nextBreq not in breqs: nextBreq = None except Exception: log.err(Failure(), "from _getNextUnclaimedBuildRequest for builder '{}'".format(self.bldr)) nextBreq = None else: # otherwise just return the first build brdict = self.unclaimedBrdicts[0] nextBreq = yield self._getBuildRequestForBrdict(brdict) return nextBreq @defer.inlineCallbacks def _popNextWorker(self, buildrequest): # use 'preferred' workers first, if we have some ready if self.preferredWorkers: worker = self.preferredWorkers.pop(0) return worker while self.workerpool: try: worker = yield self.nextWorker(self.bldr, self.workerpool, buildrequest) except Exception: log.err(Failure(), "from nextWorker for builder '{}'".format(self.bldr)) worker = None if not worker or worker not in self.workerpool: # bad worker or no worker returned break self.workerpool.remove(worker) return worker return None def _unpopWorkers(self, workers): # push the workers back to the front self.preferredWorkers[:0] = workers def canStartBuild(self, worker, breq): return self.bldr.canStartBuild(worker, breq) class BuildRequestDistributor(service.AsyncMultiService): """ Special-purpose class to handle distributing build requests to builders by calling their C{maybeStartBuild} method. This takes account of the C{prioritizeBuilders} configuration, and is highly re-entrant; that is, if a new build request arrives while builders are still working on the previous build request, then this class will correctly re-prioritize invocations of builders' C{maybeStartBuild} methods. """ BuildChooser = BasicBuildChooser def __init__(self, botmaster): super().__init__() self.botmaster = botmaster # lock to ensure builders are only sorted once at any time self.pending_builders_lock = defer.DeferredLock() # sorted list of names of builders that need their maybeStartBuild # method invoked. self._pending_builders = [] self.activity_lock = defer.DeferredLock() self.active = False self._deferwaiter = deferwaiter.DeferWaiter() self._activity_loop_deferred = None @defer.inlineCallbacks def stopService(self): # Lots of stuff happens asynchronously here, so we need to let it all # quiesce. First, let the parent stopService succeed between # activities; then the loop will stop calling itself, since # self.running is false. yield self.activity_lock.run(service.AsyncService.stopService, self) # now let any outstanding calls to maybeStartBuildsOn to finish, so # they don't get interrupted in mid-stride. This tends to be # particularly painful because it can occur when a generator is gc'd. # TEST-TODO: this behavior is not asserted in any way. yield self._deferwaiter.wait() @defer.inlineCallbacks def maybeStartBuildsOn(self, new_builders): """ Try to start any builds that can be started right now. This function returns immediately, and promises to trigger those builders eventually. @param new_builders: names of new builders that should be given the opportunity to check for new requests. """ if not self.running: return try: yield self._deferwaiter.add(self._maybeStartBuildsOn(new_builders)) except Exception as e: # pragma: no cover log.err(e, "while starting builds on {0}".format(new_builders)) @defer.inlineCallbacks def _maybeStartBuildsOn(self, new_builders): new_builders = set(new_builders) existing_pending = set(self._pending_builders) # if we won't add any builders, there's nothing to do if new_builders < existing_pending: return None # reset the list of pending builders @defer.inlineCallbacks def resetPendingBuildersList(new_builders): try: # re-fetch existing_pending, in case it has changed # while acquiring the lock existing_pending = set(self._pending_builders) # then sort the new, expanded set of builders self._pending_builders = \ yield self._sortBuilders( list(existing_pending | new_builders)) # start the activity loop, if we aren't already # working on that. if not self.active: self._activity_loop_deferred = self._activityLoop() except Exception: # pragma: no cover log.err(Failure(), "while attempting to start builds on {}".format(self.name)) yield self.pending_builders_lock.run( resetPendingBuildersList, new_builders) return None @defer.inlineCallbacks def _defaultSorter(self, master, builders): timer = metrics.Timer("BuildRequestDistributor._defaultSorter()") timer.start() # perform an asynchronous schwarzian transform, transforming None # into sys.maxint so that it sorts to the end def xform(bldr): d = defer.maybeDeferred(bldr.getOldestRequestTime) d.addCallback(lambda time: (((time is None) and None or time), bldr)) return d xformed = yield defer.gatherResults( [xform(bldr) for bldr in builders]) # sort the transformed list synchronously, comparing None to the end of # the list def xformedKey(a): """ Key function can be used to sort a list where each list element is a tuple: (datetime.datetime, Builder) @return: a tuple of (date, builder name) """ (date, builder) = a if date is None: # Choose a really big date, so that any # date set to 'None' will appear at the # end of the list during comparisons. date = datetime.max # Need to set the timezone on the date, in order # to perform comparisons with other dates which # have the time zone set. date = date.replace(tzinfo=tzutc()) return (date, builder.name) xformed.sort(key=xformedKey) # and reverse the transform rv = [xf[1] for xf in xformed] timer.stop() return rv @defer.inlineCallbacks def _sortBuilders(self, buildernames): timer = metrics.Timer("BuildRequestDistributor._sortBuilders()") timer.start() # note that this takes and returns a list of builder names # convert builder names to builders builders_dict = self.botmaster.builders builders = [builders_dict.get(n) for n in buildernames if n in builders_dict] # find a sorting function sorter = self.master.config.prioritizeBuilders if not sorter: sorter = self._defaultSorter # run it try: builders = yield sorter(self.master, builders) except Exception: log.err(Failure(), "prioritizing builders; order unspecified") # and return the names rv = [b.name for b in builders] timer.stop() return rv @defer.inlineCallbacks def _activityLoop(self): self.active = True timer = metrics.Timer('BuildRequestDistributor._activityLoop()') timer.start() pending_builders = [] while True: yield self.activity_lock.acquire() if not self.running: self.activity_lock.release() break if not pending_builders: # lock pending_builders, pop an element from it, and release yield self.pending_builders_lock.acquire() # bail out if we shouldn't keep looping if not self._pending_builders: self.pending_builders_lock.release() self.activity_lock.release() break # take that builder list, and run it until the end # we make a copy of it, as it could be modified meanwhile pending_builders = copy.copy(self._pending_builders) self._pending_builders = [] self.pending_builders_lock.release() bldr_name = pending_builders.pop(0) # get the actual builder object bldr = self.botmaster.builders.get(bldr_name) try: if bldr: yield self._maybeStartBuildsOnBuilder(bldr) except Exception: log.err(Failure(), "from maybeStartBuild for builder '{}'".format(bldr_name)) self.activity_lock.release() timer.stop() self.active = False @defer.inlineCallbacks def _maybeStartBuildsOnBuilder(self, bldr): # create a chooser to give us our next builds # this object is temporary and will go away when we're done bc = self.createBuildChooser(bldr, self.master) while True: worker, breqs = yield bc.chooseNextBuild() if not worker or not breqs: break # claim brid's brids = [br.id for br in breqs] claimed_at_epoch = self.master.reactor.seconds() claimed_at = epoch2datetime(claimed_at_epoch) if not (yield self.master.data.updates.claimBuildRequests( brids, claimed_at=claimed_at)): # some brids were already claimed, so start over bc = self.createBuildChooser(bldr, self.master) continue buildStarted = yield bldr.maybeStartBuild(worker, breqs) if not buildStarted: yield self.master.data.updates.unclaimBuildRequests(brids) # try starting builds again. If we still have a working worker, # then this may re-claim the same buildrequests self.botmaster.maybeStartBuildsForBuilder(self.name) def createBuildChooser(self, bldr, master): # just instantiate the build chooser requested return self.BuildChooser(bldr, master) @defer.inlineCallbacks def _waitForFinish(self): if self._activity_loop_deferred is not None: yield self._activity_loop_deferred buildbot-3.4.0/master/buildbot/process/buildstep.py000066400000000000000000001031141413250514000224370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect import sys from twisted.internet import defer from twisted.internet import error from twisted.python import deprecate from twisted.python import log from twisted.python import versions from twisted.python.deprecate import deprecatedModuleAttribute from twisted.python.failure import Failure from twisted.python.reflect import accumulateClassList from twisted.python.versions import Version from twisted.web.util import formatFailure from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot import util from buildbot.interfaces import IRenderable from buildbot.interfaces import WorkerSetupError from buildbot.process import log as plog from buildbot.process import logobserver from buildbot.process import properties from buildbot.process import remotecommand from buildbot.process import results # (WithProperties used to be available in this module) from buildbot.process.properties import WithProperties from buildbot.process.results import ALL_RESULTS from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.util import bytes2unicode from buildbot.util import debounce from buildbot.util import flatten from buildbot.util.test_result_submitter import TestResultSubmitter from buildbot.warnings import warn_deprecated class BuildStepFailed(Exception): pass class BuildStepCancelled(Exception): # used internally for signalling pass class CallableAttributeError(Exception): # attribute error raised from a callable run inside a property pass # old import paths for these classes RemoteCommand = remotecommand.RemoteCommand deprecatedModuleAttribute( Version("buildbot", 2, 10, 1), message="Use buildbot.process.remotecommand.RemoteCommand instead.", moduleName="buildbot.process.buildstep", name="RemoteCommand", ) LoggedRemoteCommand = remotecommand.LoggedRemoteCommand deprecatedModuleAttribute( Version("buildbot", 2, 10, 1), message="Use buildbot.process.remotecommand.LoggedRemoteCommand instead.", moduleName="buildbot.process.buildstep", name="LoggedRemoteCommand", ) RemoteShellCommand = remotecommand.RemoteShellCommand deprecatedModuleAttribute( Version("buildbot", 2, 10, 1), message="Use buildbot.process.remotecommand.RemoteShellCommand instead.", moduleName="buildbot.process.buildstep", name="RemoteShellCommand", ) LogObserver = logobserver.LogObserver deprecatedModuleAttribute( Version("buildbot", 2, 10, 1), message="Use buildbot.process.logobserver.LogObserver instead.", moduleName="buildbot.process.buildstep", name="LogObserver", ) LogLineObserver = logobserver.LogLineObserver deprecatedModuleAttribute( Version("buildbot", 2, 10, 1), message="Use buildbot.util.LogLineObserver instead.", moduleName="buildbot.process.buildstep", name="LogLineObserver", ) OutputProgressObserver = logobserver.OutputProgressObserver deprecatedModuleAttribute( Version("buildbot", 2, 10, 1), message="Use buildbot.process.logobserver.OutputProgressObserver instead.", moduleName="buildbot.process.buildstep", name="OutputProgressObserver", ) @implementer(interfaces.IBuildStepFactory) class _BuildStepFactory(util.ComparableMixin): """ This is a wrapper to record the arguments passed to as BuildStep subclass. We use an instance of this class, rather than a closure mostly to make it easier to test that the right factories are getting created. """ compare_attrs = ('factory', 'args', 'kwargs') def __init__(self, factory, *args, **kwargs): self.factory = factory self.args = args self.kwargs = kwargs def buildStep(self): try: return self.factory(*self.args, **self.kwargs) except Exception: log.msg("error while creating step, factory={}, args={}, kwargs={}".format(self.factory, self.args, self.kwargs)) raise class BuildStepStatus: # used only for old-style steps pass def get_factory_from_step_or_factory(step_or_factory): if hasattr(step_or_factory, 'get_step_factory'): factory = step_or_factory.get_step_factory() else: factory = step_or_factory # make sure the returned value actually implements IBuildStepFactory return interfaces.IBuildStepFactory(factory) def create_step_from_step_or_factory(step_or_factory): return get_factory_from_step_or_factory(step_or_factory).buildStep() @implementer(interfaces.IBuildStep) class BuildStep(results.ResultComputingConfigMixin, properties.PropertiesMixin, util.ComparableMixin): # Note that the BuildStep is at the same time a template from which per-build steps are # constructed. This works by creating a new IBuildStepFactory in __new__, retrieving it via # get_step_factory() and then calling buildStep() on that factory. alwaysRun = False doStepIf = True hideStepIf = False compare_attrs = ("_factory",) # properties set on a build step are, by nature, always runtime properties set_runtime_properties = True renderables = results.ResultComputingConfigMixin.resultConfig + [ 'alwaysRun', 'description', 'descriptionDone', 'descriptionSuffix', 'doStepIf', 'hideStepIf', 'workdir', ] # 'parms' holds a list of all the parameters we care about, to allow # users to instantiate a subclass of BuildStep with a mixture of # arguments, some of which are for us, some of which are for the subclass # (or a delegate of the subclass, like how ShellCommand delivers many # arguments to the RemoteShellCommand that it creates). Such delegating # subclasses will use this list to figure out which arguments are meant # for us and which should be given to someone else. parms = [ 'alwaysRun', 'description', 'descriptionDone', 'descriptionSuffix', 'doStepIf', 'flunkOnFailure', 'flunkOnWarnings', 'haltOnFailure', 'updateBuildSummaryPolicy', 'hideStepIf', 'locks', 'logEncoding', 'name', 'progressMetrics', 'useProgress', 'warnOnFailure', 'warnOnWarnings', 'workdir', ] name = "generic" description = None # set this to a list of short strings to override descriptionDone = None # alternate description when the step is complete descriptionSuffix = None # extra information to append to suffix updateBuildSummaryPolicy = None locks = [] progressMetrics = () # 'time' is implicit useProgress = True # set to False if step is really unpredictable build = None step_status = None progress = None logEncoding = None cmd = None rendered = False # true if attributes are rendered _workdir = None _waitingForLocks = False def __init__(self, **kwargs): self.worker = None for p in self.__class__.parms: if p in kwargs: setattr(self, p, kwargs.pop(p)) if kwargs: config.error("{}.__init__ got unexpected keyword argument(s) {}".format(self.__class__, list(kwargs))) self._pendingLogObservers = [] if not isinstance(self.name, str) and not IRenderable.providedBy(self.name): config.error("BuildStep name must be a string or a renderable object: " "%r" % (self.name,)) if isinstance(self.description, str): self.description = [self.description] if isinstance(self.descriptionDone, str): self.descriptionDone = [self.descriptionDone] if isinstance(self.descriptionSuffix, str): self.descriptionSuffix = [self.descriptionSuffix] if self.updateBuildSummaryPolicy is None: # compute default value for updateBuildSummaryPolicy self.updateBuildSummaryPolicy = [EXCEPTION, RETRY, CANCELLED] if self.flunkOnFailure or self.haltOnFailure or self.warnOnFailure: self.updateBuildSummaryPolicy.append(FAILURE) if self.warnOnWarnings or self.flunkOnWarnings: self.updateBuildSummaryPolicy.append(WARNINGS) if self.updateBuildSummaryPolicy is False: self.updateBuildSummaryPolicy = [] if self.updateBuildSummaryPolicy is True: self.updateBuildSummaryPolicy = ALL_RESULTS if not isinstance(self.updateBuildSummaryPolicy, list): config.error("BuildStep updateBuildSummaryPolicy must be " "a list of result ids or boolean but it is %r" % (self.updateBuildSummaryPolicy,)) self._acquiringLocks = [] self.stopped = False self.master = None self.statistics = {} self.logs = {} self._running = False self.stepid = None self.results = None self._start_unhandled_deferreds = None self._test_result_submitters = {} def __new__(klass, *args, **kwargs): self = object.__new__(klass) self._factory = _BuildStepFactory(klass, *args, **kwargs) return self def __str__(self): args = [repr(x) for x in self._factory.args] args.extend([str(k) + "=" + repr(v) for k, v in self._factory.kwargs.items()]) return "{}({})".format( self.__class__.__name__, ", ".join(args)) __repr__ = __str__ def setBuild(self, build): self.build = build self.master = self.build.master def setWorker(self, worker): self.worker = worker @deprecate.deprecated(versions.Version("buildbot", 0, 9, 0)) def setDefaultWorkdir(self, workdir): if self._workdir is None: self._workdir = workdir @property def workdir(self): # default the workdir appropriately if self._workdir is not None or self.build is None: return self._workdir else: # see :ref:`Factory-Workdir-Functions` for details on how to # customize this if callable(self.build.workdir): try: return self.build.workdir(self.build.sources) except AttributeError as e: # if the callable raises an AttributeError # python thinks it is actually workdir that is not existing. # python will then swallow the attribute error and call # __getattr__ from worker_transition _, _, traceback = sys.exc_info() raise CallableAttributeError(e).with_traceback(traceback) # we re-raise the original exception by changing its type, # but keeping its stacktrace else: return self.build.workdir @workdir.setter def workdir(self, workdir): self._workdir = workdir def getProperties(self): return self.build.getProperties() def get_step_factory(self): return self._factory def setupProgress(self): # this function temporarily does nothing pass def setProgress(self, metric, value): # this function temporarily does nothing pass def getCurrentSummary(self): if self.description is not None: stepsumm = util.join_list(self.description) if self.descriptionSuffix: stepsumm += ' ' + util.join_list(self.descriptionSuffix) else: stepsumm = 'running' return {'step': stepsumm} def getResultSummary(self): if self.descriptionDone is not None or self.description is not None: stepsumm = util.join_list(self.descriptionDone or self.description) if self.descriptionSuffix: stepsumm += ' ' + util.join_list(self.descriptionSuffix) else: stepsumm = 'finished' if self.results != SUCCESS: stepsumm += ' ({})'.format(Results[self.results]) return {'step': stepsumm} @defer.inlineCallbacks def getBuildResultSummary(self): summary = yield self.getResultSummary() if self.results in self.updateBuildSummaryPolicy and \ 'build' not in summary and 'step' in summary: summary['build'] = summary['step'] return summary @debounce.method(wait=1) @defer.inlineCallbacks def updateSummary(self): def methodInfo(m): lines = inspect.getsourcelines(m) return "\nat {}:{}:\n {}".format(inspect.getsourcefile(m), lines[1], "\n".join(lines[0])) if not self._running: summary = yield self.getResultSummary() if not isinstance(summary, dict): raise TypeError('getResultSummary must return a dictionary: ' + methodInfo(self.getResultSummary)) else: summary = yield self.getCurrentSummary() if not isinstance(summary, dict): raise TypeError('getCurrentSummary must return a dictionary: ' + methodInfo(self.getCurrentSummary)) stepResult = summary.get('step', 'finished') if not isinstance(stepResult, str): raise TypeError("step result string must be unicode (got %r)" % (stepResult,)) if self.stepid is not None: stepResult = self.build.properties.cleanupTextFromSecrets( stepResult) yield self.master.data.updates.setStepStateString(self.stepid, stepResult) if not self._running: buildResult = summary.get('build', None) if buildResult and not isinstance(buildResult, str): raise TypeError("build result string must be unicode") @defer.inlineCallbacks def addStep(self): # create and start the step, noting that the name may be altered to # ensure uniqueness self.name = yield self.build.render(self.name) self.build.setUniqueStepName(self) self.stepid, self.number, self.name = yield self.master.data.updates.addStep( buildid=self.build.buildid, name=util.bytes2unicode(self.name)) yield self.master.data.updates.startStep(self.stepid) @defer.inlineCallbacks def startStep(self, remote): self.remote = remote yield self.addStep() self.locks = yield self.build.render(self.locks) # convert all locks into their real form botmaster = self.build.builder.botmaster self.locks = yield botmaster.getLockFromLockAccesses(self.locks, self.build.config_version) # then narrow WorkerLocks down to the worker that this build is being # run on self.locks = [(l.getLockForWorker(self.build.workerforbuilder.worker.workername), la) for l, la in self.locks] for l, la in self.locks: if l in self.build.locks: log.msg(("Hey, lock {} is claimed by both a Step ({}) and the" " parent Build ({})").format(l, self, self.build)) raise RuntimeError("lock claimed by both Step and Build") try: # set up locks yield self.acquireLocks() if self.stopped: raise BuildStepCancelled # render renderables in parallel renderables = [] accumulateClassList(self.__class__, 'renderables', renderables) def setRenderable(res, attr): setattr(self, attr, res) dl = [] for renderable in renderables: d = self.build.render(getattr(self, renderable)) d.addCallback(setRenderable, renderable) dl.append(d) yield defer.gatherResults(dl) self.rendered = True # we describe ourselves only when renderables are interpolated self.updateSummary() # check doStepIf (after rendering) if isinstance(self.doStepIf, bool): doStep = self.doStepIf else: doStep = yield self.doStepIf(self) # run -- or skip -- the step if doStep: yield self.addTestResultSets() try: self._running = True self.results = yield self.run() finally: self._running = False else: self.results = SKIPPED # NOTE: all of these `except` blocks must set self.results immediately! except BuildStepCancelled: self.results = CANCELLED except BuildStepFailed: self.results = FAILURE except error.ConnectionLost: self.results = RETRY except Exception: self.results = EXCEPTION why = Failure() log.err(why, "BuildStep.failed; traceback follows") yield self.addLogWithFailure(why) if self.stopped and self.results != RETRY: # We handle this specially because we don't care about # the return code of an interrupted command; we know # that this should just be exception due to interrupt # At the same time we must respect RETRY status because it's used # to retry interrupted build due to some other issues for example # due to worker lost if self.results != CANCELLED: self.results = EXCEPTION # determine whether we should hide this step hidden = self.hideStepIf if callable(hidden): try: hidden = hidden(self.results, self) except Exception: why = Failure() log.err(why, "hidden callback failed; traceback follows") yield self.addLogWithFailure(why) self.results = EXCEPTION hidden = False # perform final clean ups success = yield self._cleanup_logs() if not success: self.results = EXCEPTION # update the summary one last time, make sure that completes, # and then don't update it any more. self.updateSummary() yield self.updateSummary.stop() for sub in self._test_result_submitters.values(): yield sub.finish() self.releaseLocks() yield self.master.data.updates.finishStep(self.stepid, self.results, hidden) return self.results def setBuildData(self, name, value, source): # returns a Deferred that yields nothing return self.master.data.updates.setBuildData(self.build.buildid, name, value, source) @defer.inlineCallbacks def _cleanup_logs(self): all_success = True not_finished_logs = [v for (k, v) in self.logs.items() if not v.finished] finish_logs = yield defer.DeferredList([v.finish() for v in not_finished_logs], consumeErrors=True) for success, res in finish_logs: if not success: log.err(res, "when trying to finish a log") all_success = False for log_ in self.logs.values(): if log_.had_errors(): all_success = False return all_success def addTestResultSets(self): return defer.succeed(None) @defer.inlineCallbacks def addTestResultSet(self, description, category, value_unit): sub = TestResultSubmitter() yield sub.setup(self, description, category, value_unit) setid = sub.get_test_result_set_id() self._test_result_submitters[setid] = sub return setid def addTestResult(self, setid, value, test_name=None, test_code_path=None, line=None, duration_ns=None): self._test_result_submitters[setid].add_test_result(value, test_name=test_name, test_code_path=test_code_path, line=line, duration_ns=duration_ns) def acquireLocks(self, res=None): if not self.locks: return defer.succeed(None) if self.stopped: return defer.succeed(None) log.msg("acquireLocks(step {}, locks {})".format(self, self.locks)) for lock, access in self.locks: for waited_lock, _, _ in self._acquiringLocks: if lock is waited_lock: continue if not lock.isAvailable(self, access): self._waitingForLocks = True log.msg("step {} waiting for lock {}".format(self, lock)) d = lock.waitUntilMaybeAvailable(self, access) self._acquiringLocks.append((lock, access, d)) d.addCallback(self.acquireLocks) return d # all locks are available, claim them all for lock, access in self.locks: lock.claim(self, access) self._acquiringLocks = [] self._waitingForLocks = False return defer.succeed(None) def run(self): raise NotImplementedError("A custom build step must implement run()") def isNewStyle(self): warn_deprecated('3.0.0', 'BuildStep.isNewStyle() always returns True') return True @defer.inlineCallbacks def interrupt(self, reason): if self.stopped: return self.stopped = True if self._acquiringLocks: for (lock, access, d) in self._acquiringLocks: lock.stopWaitingUntilAvailable(self, access, d) self._acquiringLocks = [] if self._waitingForLocks: yield self.addCompleteLog( 'cancelled while waiting for locks', str(reason)) else: yield self.addCompleteLog('cancelled', str(reason)) if self.cmd: d = self.cmd.interrupt(reason) d.addErrback(log.err, 'while cancelling command') yield d def releaseLocks(self): log.msg("releaseLocks({}): {}".format(self, self.locks)) for lock, access in self.locks: if lock.isOwner(self, access): lock.release(self, access) else: # This should only happen if we've been interrupted assert self.stopped # utility methods that BuildSteps may find useful def workerVersion(self, command, oldversion=None): return self.build.getWorkerCommandVersion(command, oldversion) def workerVersionIsOlderThan(self, command, minversion): sv = self.build.getWorkerCommandVersion(command, None) if sv is None: return True if [int(s) for s in sv.split(".")] < [int(m) for m in minversion.split(".")]: return True return False def checkWorkerHasCommand(self, command): if not self.workerVersion(command): message = "worker is too old, does not know about {}".format(command) raise WorkerSetupError(message) def getWorkerName(self): return self.build.getWorkerName() def addLog(self, name, type='s', logEncoding=None): if self.stepid is None: raise BuildStepCancelled d = self.master.data.updates.addLog(self.stepid, util.bytes2unicode(name), str(type)) @d.addCallback def newLog(logid): return self._newLog(name, type, logid, logEncoding) return d def getLog(self, name): return self.logs[name] @defer.inlineCallbacks def addCompleteLog(self, name, text): if self.stepid is None: raise BuildStepCancelled logid = yield self.master.data.updates.addLog(self.stepid, util.bytes2unicode(name), 't') _log = self._newLog(name, 't', logid) yield _log.addContent(text) yield _log.finish() @defer.inlineCallbacks def addHTMLLog(self, name, html): if self.stepid is None: raise BuildStepCancelled logid = yield self.master.data.updates.addLog(self.stepid, util.bytes2unicode(name), 'h') _log = self._newLog(name, 'h', logid) html = bytes2unicode(html) yield _log.addContent(html) yield _log.finish() @defer.inlineCallbacks def addLogWithFailure(self, why, logprefix=""): # helper for showing exceptions to the users try: yield self.addCompleteLog(logprefix + "err.text", why.getTraceback()) yield self.addHTMLLog(logprefix + "err.html", formatFailure(why)) except Exception: log.err(Failure(), "error while formatting exceptions") def addLogWithException(self, why, logprefix=""): return self.addLogWithFailure(Failure(why), logprefix) def addLogObserver(self, logname, observer): assert interfaces.ILogObserver.providedBy(observer) observer.setStep(self) self._pendingLogObservers.append((logname, observer)) self._connectPendingLogObservers() def _newLog(self, name, type, logid, logEncoding=None): if not logEncoding: logEncoding = self.logEncoding if not logEncoding: logEncoding = self.master.config.logEncoding log = plog.Log.new(self.master, name, type, logid, logEncoding) self.logs[name] = log self._connectPendingLogObservers() return log def _connectPendingLogObservers(self): for logname, observer in self._pendingLogObservers[:]: if logname in self.logs: observer.setLog(self.logs[logname]) self._pendingLogObservers.remove((logname, observer)) @defer.inlineCallbacks def addURL(self, name, url): yield self.master.data.updates.addStepURL(self.stepid, str(name), str(url)) return None @defer.inlineCallbacks def runCommand(self, command): if self.stopped: return CANCELLED self.cmd = command command.worker = self.worker try: res = yield command.run(self, self.remote, self.build.builder.name) finally: self.cmd = None return res def hasStatistic(self, name): return name in self.statistics def getStatistic(self, name, default=None): return self.statistics.get(name, default) def getStatistics(self): return self.statistics.copy() def setStatistic(self, name, value): self.statistics[name] = value class CommandMixin: @defer.inlineCallbacks def _runRemoteCommand(self, cmd, abandonOnFailure, args, makeResult=None): cmd = remotecommand.RemoteCommand(cmd, args) try: log = self.getLog('stdio') except Exception: log = yield self.addLog('stdio') cmd.useLog(log, False) yield self.runCommand(cmd) if abandonOnFailure and cmd.didFail(): raise BuildStepFailed() if makeResult: return makeResult(cmd) else: return not cmd.didFail() def runRmdir(self, dir, log=None, abandonOnFailure=True): return self._runRemoteCommand('rmdir', abandonOnFailure, {'dir': dir, 'logEnviron': False}) def pathExists(self, path, log=None): return self._runRemoteCommand('stat', False, {'file': path, 'logEnviron': False}) def runMkdir(self, dir, log=None, abandonOnFailure=True): return self._runRemoteCommand('mkdir', abandonOnFailure, {'dir': dir, 'logEnviron': False}) def runGlob(self, path): return self._runRemoteCommand( 'glob', True, {'path': path, 'logEnviron': False}, makeResult=lambda cmd: cmd.updates['files'][0]) class ShellMixin: command = None env = {} want_stdout = True want_stderr = True usePTY = None logfiles = {} lazylogfiles = {} timeout = 1200 maxTime = None logEnviron = True interruptSignal = 'KILL' sigtermTime = None initialStdin = None decodeRC = {0: SUCCESS} _shellMixinArgs = [ 'command', 'workdir', 'env', 'want_stdout', 'want_stderr', 'usePTY', 'logfiles', 'lazylogfiles', 'timeout', 'maxTime', 'logEnviron', 'interruptSignal', 'sigtermTime', 'initialStdin', 'decodeRC', ] renderables = _shellMixinArgs def setupShellMixin(self, constructorArgs, prohibitArgs=None): constructorArgs = constructorArgs.copy() if prohibitArgs is None: prohibitArgs = [] def bad(arg): config.error("invalid {} argument {}".format(self.__class__.__name__, arg)) for arg in self._shellMixinArgs: if arg not in constructorArgs: continue if arg in prohibitArgs: bad(arg) else: setattr(self, arg, constructorArgs[arg]) del constructorArgs[arg] for arg in list(constructorArgs): if arg not in BuildStep.parms: bad(arg) del constructorArgs[arg] return constructorArgs @defer.inlineCallbacks def makeRemoteShellCommand(self, collectStdout=False, collectStderr=False, stdioLogName='stdio', **overrides): kwargs = {arg: getattr(self, arg) for arg in self._shellMixinArgs} kwargs.update(overrides) stdio = None if stdioLogName is not None: # Reuse an existing log if possible; otherwise, create one. try: stdio = yield self.getLog(stdioLogName) except KeyError: stdio = yield self.addLog(stdioLogName) kwargs['command'] = flatten(kwargs['command'], (list, tuple)) # store command away for display self.command = kwargs['command'] # check for the usePTY flag if kwargs['usePTY'] is not None: if self.workerVersionIsOlderThan("shell", "2.7"): if stdio is not None: yield stdio.addHeader( "NOTE: worker does not allow master to override usePTY\n") del kwargs['usePTY'] # check for the interruptSignal flag if kwargs["interruptSignal"] and self.workerVersionIsOlderThan("shell", "2.15"): if stdio is not None: yield stdio.addHeader( "NOTE: worker does not allow master to specify interruptSignal\n") del kwargs['interruptSignal'] # lazylogfiles are handled below del kwargs['lazylogfiles'] # merge the builder's environment with that supplied here builderEnv = self.build.builder.config.env kwargs['env'] = yield self.build.render(builderEnv) kwargs['env'].update(self.env) kwargs['stdioLogName'] = stdioLogName # default the workdir appropriately if not kwargs.get('workdir') and not self.workdir: if callable(self.build.workdir): kwargs['workdir'] = self.build.workdir(self.build.sources) else: kwargs['workdir'] = self.build.workdir # the rest of the args go to RemoteShellCommand cmd = remotecommand.RemoteShellCommand( collectStdout=collectStdout, collectStderr=collectStderr, **kwargs ) # set up logging if stdio is not None: cmd.useLog(stdio, False) for logname, remotefilename in self.logfiles.items(): if self.lazylogfiles: # it's OK if this does, or does not, return a Deferred def callback(cmd_arg, local_logname=logname): return self.addLog(local_logname) cmd.useLogDelayed(logname, callback, True) else: # add a LogFile newlog = yield self.addLog(logname) # and tell the RemoteCommand to feed it cmd.useLog(newlog, False) return cmd def getResultSummary(self): if self.descriptionDone is not None: return super().getResultSummary() summary = util.command_to_string(self.command) if summary: if self.results != SUCCESS: summary += ' ({})'.format(Results[self.results]) return {'step': summary} return super().getResultSummary() _hush_pyflakes = [WithProperties] del _hush_pyflakes buildbot-3.4.0/master/buildbot/process/cache.py000066400000000000000000000055151413250514000215150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.util import lru from buildbot.util import service class CacheManager(service.ReconfigurableServiceMixin, service.AsyncService): """ A manager for a collection of caches, each for different types of objects and with potentially-overlapping key spaces. There is generally only one instance of this class, available at C{master.caches}. """ # a cache of length one still has many benefits: it collects objects that # remain referenced elsewhere; it collapses simultaneous misses into one # miss function; and it will optimize repeated fetches of the same object. DEFAULT_CACHE_SIZE = 1 def __init__(self): self.setName('caches') self.config = {} self._caches = {} def get_cache(self, cache_name, miss_fn): """ Get an L{AsyncLRUCache} object with the given name. If such an object does not exist, it will be created. Since the cache is permanent, this method can be called only once, e.g., in C{startService}, and it value stored indefinitely. @param cache_name: name of the cache (usually the name of the type of object it stores) @param miss_fn: miss function for the cache; see L{AsyncLRUCache} constructor. @returns: L{AsyncLRUCache} instance """ try: return self._caches[cache_name] except KeyError: max_size = self.config.get(cache_name, self.DEFAULT_CACHE_SIZE) assert max_size >= 1 c = self._caches[cache_name] = lru.AsyncLRUCache(miss_fn, max_size) return c def reconfigServiceWithBuildbotConfig(self, new_config): self.config = new_config.caches for name, cache in self._caches.items(): cache.set_max_size(new_config.caches.get(name, self.DEFAULT_CACHE_SIZE)) return super().reconfigServiceWithBuildbotConfig(new_config) def get_metrics(self): return { n: {'hits': c.hits, 'refhits': c.refhits, 'misses': c.misses, 'max_size': c.max_size} for n, c in self._caches.items()} buildbot-3.4.0/master/buildbot/process/debug.py000066400000000000000000000033731413250514000215400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.util import service class DebugServices(service.ReconfigurableServiceMixin, service.AsyncMultiService): name = 'debug_services' def __init__(self): super().__init__() self.debug_port = None self.debug_password = None self.debug_registration = None self.manhole = None @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): if new_config.manhole != self.manhole: if self.manhole: yield self.manhole.disownServiceParent() self.manhole = None if new_config.manhole: self.manhole = new_config.manhole yield self.manhole.setServiceParent(self) # chain up yield super().reconfigServiceWithBuildbotConfig(new_config) @defer.inlineCallbacks def stopService(self): # manhole will get stopped as a sub-service yield super().stopService() # clean up if self.manhole: self.manhole = None buildbot-3.4.0/master/buildbot/process/factory.py000066400000000000000000000230341413250514000221150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from contextlib import contextmanager from twisted.python import deprecate from twisted.python import versions from buildbot import interfaces from buildbot import util from buildbot.process import buildstep from buildbot.process.build import Build from buildbot.steps.download_secret_to_worker import DownloadSecretsToWorker from buildbot.steps.download_secret_to_worker import RemoveWorkerFileSecret from buildbot.steps.shell import Compile from buildbot.steps.shell import Configure from buildbot.steps.shell import PerlModuleTest from buildbot.steps.shell import ShellCommand from buildbot.steps.shell import Test from buildbot.steps.source.cvs import CVS from buildbot.steps.source.svn import SVN # deprecated, use BuildFactory.addStep @deprecate.deprecated(versions.Version("buildbot", 0, 8, 6)) def s(steptype, **kwargs): # convenience function for master.cfg files, to create step # specification tuples return buildstep.get_factory_from_step_or_factory(steptype(**kwargs)) class BuildFactory(util.ComparableMixin): """ @cvar buildClass: class to use when creating builds @type buildClass: L{buildbot.process.build.Build} """ buildClass = Build useProgress = 1 workdir = "build" compare_attrs = ('buildClass', 'steps', 'useProgress', 'workdir') def __init__(self, steps=None): self.steps = [] if steps: self.addSteps(steps) def newBuild(self, requests): """Create a new Build instance. @param requests: a list of buildrequest dictionaries describing what is to be built """ b = self.buildClass(requests) b.useProgress = self.useProgress b.workdir = self.workdir b.setStepFactories(self.steps) return b def addStep(self, step): if not interfaces.IBuildStep.providedBy(step) and \ not interfaces.IBuildStepFactory.providedBy(step): raise TypeError('step must be an instance of a BuildStep') self.steps.append(buildstep.get_factory_from_step_or_factory(step)) def addSteps(self, steps, withSecrets=None): if withSecrets is None: withSecrets = [] if withSecrets: self.addStep(DownloadSecretsToWorker(withSecrets)) for s in steps: self.addStep(s) if withSecrets: self.addStep(RemoveWorkerFileSecret(withSecrets)) @contextmanager def withSecrets(self, secrets): self.addStep(DownloadSecretsToWorker(secrets)) yield self self.addStep(RemoveWorkerFileSecret(secrets)) # BuildFactory subclasses for common build tools class _DefaultCommand: # Used to indicate a default command to the step. pass class GNUAutoconf(BuildFactory): def __init__(self, source, configure="./configure", configureEnv=None, configureFlags=None, reconf=None, compile=_DefaultCommand, test=_DefaultCommand, distcheck=_DefaultCommand): if configureEnv is None: configureEnv = {} if configureFlags is None: configureFlags = [] if compile is _DefaultCommand: compile = ["make", "all"] if test is _DefaultCommand: test = ["make", "check"] if distcheck is _DefaultCommand: distcheck = ["make", "distcheck"] super().__init__([source]) if reconf is True: reconf = ["autoreconf", "-si"] if reconf is not None: self.addStep( ShellCommand(name="autoreconf", command=reconf, env=configureEnv)) if configure is not None: # we either need to wind up with a string (which will be # space-split), or with a list of strings (which will not). The # list of strings is the preferred form. if isinstance(configure, str): if configureFlags: assert " " not in configure # please use list instead command = [configure] + configureFlags else: command = configure else: assert isinstance(configure, (list, tuple)) command = configure + configureFlags self.addStep(Configure(command=command, env=configureEnv)) if compile is not None: self.addStep(Compile(command=compile, env=configureEnv)) if test is not None: self.addStep(Test(command=test, env=configureEnv)) if distcheck is not None: self.addStep(Test(command=distcheck, env=configureEnv)) class CPAN(BuildFactory): def __init__(self, source, perl="perl"): super().__init__([source]) self.addStep(Configure(command=[perl, "Makefile.PL"])) self.addStep(Compile(command=["make"])) self.addStep(PerlModuleTest(command=["make", "test"])) class Distutils(BuildFactory): def __init__(self, source, python="python", test=None): super().__init__([source]) self.addStep(Compile(command=[python, "./setup.py", "build"])) if test is not None: self.addStep(Test(command=test)) class Trial(BuildFactory): """Build a python module that uses distutils and trial. Set 'tests' to the module in which the tests can be found, or set useTestCaseNames=True to always have trial figure out which tests to run (based upon which files have been changed). See docs/factories.xhtml for usage samples. Not all of the Trial BuildStep options are available here, only the most commonly used ones. To get complete access, you will need to create a custom BuildFactory.""" trial = "trial" randomly = False recurse = False def __init__(self, source, buildpython=None, trialpython=None, trial=None, testpath=".", randomly=None, recurse=None, tests=None, useTestCaseNames=False, env=None): super().__init__([source]) assert tests or useTestCaseNames, "must use one or the other" if buildpython is None: buildpython = ["python"] if trialpython is None: trialpython = [] if trial is not None: self.trial = trial if randomly is not None: self.randomly = randomly if recurse is not None: self.recurse = recurse from buildbot.steps.python_twisted import Trial buildcommand = buildpython + ["./setup.py", "build"] self.addStep(Compile(command=buildcommand, env=env)) self.addStep(Trial( python=trialpython, trial=self.trial, testpath=testpath, tests=tests, testChanges=useTestCaseNames, randomly=self.randomly, recurse=self.recurse, env=env, )) # compatibility classes, will go away. Note that these only offer # compatibility at the constructor level: if you have subclassed these # factories, your subclasses are unlikely to still work correctly. ConfigurableBuildFactory = BuildFactory class BasicBuildFactory(GNUAutoconf): # really a "GNU Autoconf-created tarball -in-CVS tree" builder def __init__(self, cvsroot, cvsmodule, configure=None, configureEnv=None, compile="make all", test="make check", cvsCopy=False): if configureEnv is None: configureEnv = {} mode = "full" method = "clobber" if cvsCopy: method = "copy" source = CVS( cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode, method=method) super().__init__(source, configure=configure, configureEnv=configureEnv, compile=compile, test=test) class QuickBuildFactory(BasicBuildFactory): useProgress = False def __init__(self, cvsroot, cvsmodule, configure=None, configureEnv=None, compile="make all", test="make check", cvsCopy=False): if configureEnv is None: configureEnv = {} mode = "incremental" source = CVS(cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode) super().__init__(source, configure=configure, configureEnv=configureEnv, compile=compile, test=test) class BasicSVN(GNUAutoconf): def __init__(self, svnurl, configure=None, configureEnv=None, compile="make all", test="make check"): if configureEnv is None: configureEnv = {} source = SVN(svnurl=svnurl, mode="incremental") super().__init__(source, configure=configure, configureEnv=configureEnv, compile=compile, test=test) buildbot-3.4.0/master/buildbot/process/log.py000066400000000000000000000140101413250514000212210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.util import lineboundaries class Log: _byType = {} def __init__(self, master, name, type, logid, decoder): self.type = type self.logid = logid self.master = master self.name = name self.subPoint = util.subscription.SubscriptionPoint("%r log" % (name,)) self.subscriptions = {} self._finishing = False self.finished = False self.finishWaiters = [] self._had_errors = False self.lock = defer.DeferredLock() self.decoder = decoder @staticmethod def _decoderFromString(cfg): """ Return a decoder function. If cfg is a string such as 'latin-1' or u'latin-1', then we return a new lambda, s.decode(). If cfg is already a lambda or function, then we return that. """ if isinstance(cfg, (bytes, str)): return lambda s: s.decode(cfg, 'replace') return cfg @classmethod def new(cls, master, name, type, logid, logEncoding): type = str(type) try: subcls = cls._byType[type] except KeyError as e: raise RuntimeError("Invalid log type %r" % (type,)) from e decoder = Log._decoderFromString(logEncoding) return subcls(master, name, type, logid, decoder) def getName(self): return self.name # subscriptions def subscribe(self, callback): return self.subPoint.subscribe(callback) # adding lines @defer.inlineCallbacks def addRawLines(self, lines): # used by subclasses to add lines that are already appropriately # formatted for the log type, and newline-terminated assert lines[-1] == '\n' assert not self.finished yield self.lock.run(lambda: self.master.data.updates.appendLog(self.logid, lines)) # completion def isFinished(self): return self.finished def waitUntilFinished(self): d = defer.Deferred() if self.finished: d.succeed(None) else: self.finishWaiters.append(d) return d def had_errors(self): return self._had_errors @defer.inlineCallbacks def finish(self): assert not self._finishing, "Did you maybe forget to yield the method?" assert not self.finished self._finishing = True def fToRun(): self.finished = True return self.master.data.updates.finishLog(self.logid) yield self.lock.run(fToRun) # notify subscribers *after* finishing the log self.subPoint.deliver(None, None) yield self.subPoint.waitForDeliveriesToFinish() # notify those waiting for finish for d in self.finishWaiters: d.callback(None) self._had_errors = len(self.subPoint.pop_exceptions()) > 0 # start a compressLog call but don't make our caller wait for # it to complete d = self.master.data.updates.compressLog(self.logid) d.addErrback( log.err, "while compressing log %d (ignored)" % self.logid) self._finishing = False class PlainLog(Log): def __init__(self, master, name, type, logid, decoder): super().__init__(master, name, type, logid, decoder) def wholeLines(lines): self.subPoint.deliver(None, lines) return self.addRawLines(lines) self.lbf = lineboundaries.LineBoundaryFinder(wholeLines) def addContent(self, text): if not isinstance(text, str): text = self.decoder(text) # add some text in the log's default stream return self.lbf.append(text) @defer.inlineCallbacks def finish(self): yield self.lbf.flush() yield super().finish() class TextLog(PlainLog): pass Log._byType['t'] = TextLog class HtmlLog(PlainLog): pass Log._byType['h'] = HtmlLog class StreamLog(Log): pat = re.compile('^', re.M) def __init__(self, step, name, type, logid, decoder): super().__init__(step, name, type, logid, decoder) self.lbfs = {} def _getLbf(self, stream): try: return self.lbfs[stream] except KeyError: def wholeLines(lines): # deliver the un-annotated version to subscribers self.subPoint.deliver(stream, lines) # strip the last character, as the regexp will add a # prefix character after the trailing newline return self.addRawLines(self.pat.sub(stream, lines)[:-1]) lbf = self.lbfs[stream] = \ lineboundaries.LineBoundaryFinder(wholeLines) return lbf def addStdout(self, text): if not isinstance(text, str): text = self.decoder(text) return self._getLbf('o').append(text) def addStderr(self, text): if not isinstance(text, str): text = self.decoder(text) return self._getLbf('e').append(text) def addHeader(self, text): if not isinstance(text, str): text = self.decoder(text) return self._getLbf('h').append(text) @defer.inlineCallbacks def finish(self): for lbf in self.lbfs.values(): yield lbf.flush() yield super().finish() Log._byType['s'] = StreamLog buildbot-3.4.0/master/buildbot/process/logobserver.py000066400000000000000000000114101413250514000227720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from zope.interface import implementer from buildbot import interfaces @implementer(interfaces.ILogObserver) class LogObserver: def setStep(self, step): self.step = step def setLog(self, loog): loog.subscribe(self.gotData) def gotData(self, stream, data): if data is None: self.finishReceived() elif stream is None or stream == 'o': self.outReceived(data) elif stream == 'e': self.errReceived(data) elif stream == 'h': self.headerReceived(data) def finishReceived(self): pass def outReceived(self, data): pass def errReceived(self, data): pass def headerReceived(self, data): pass class LogLineObserver(LogObserver): stdoutDelimiter = "\n" stderrDelimiter = "\n" headerDelimiter = "\n" def __init__(self): super().__init__() self.max_length = 16384 def setMaxLineLength(self, max_length): """ Set the maximum line length: lines longer than max_length are dropped. Default is 16384 bytes. Use sys.maxint for effective infinity. """ self.max_length = max_length def _lineReceived(self, data, delimiter, funcReceived): for line in data.rstrip().split(delimiter): if len(line) > self.max_length: continue funcReceived(line) def outReceived(self, data): self._lineReceived(data, self.stdoutDelimiter, self.outLineReceived) def errReceived(self, data): self._lineReceived(data, self.stderrDelimiter, self.errLineReceived) def headerReceived(self, data): self._lineReceived(data, self.headerDelimiter, self.headerLineReceived) def outLineReceived(self, line): """This will be called with complete stdout lines (not including the delimiter). Override this in your observer.""" def errLineReceived(self, line): """This will be called with complete lines of stderr (not including the delimiter). Override this in your observer.""" def headerLineReceived(self, line): """This will be called with complete lines of stderr (not including the delimiter). Override this in your observer.""" class LineConsumerLogObserver(LogLineObserver): def __init__(self, consumerFunction): super().__init__() self.generator = None self.consumerFunction = consumerFunction def feed(self, input): # note that we defer starting the generator until the first bit of # data, since the observer may be instantiated during configuration as # well as for each execution of the step. self.generator = self.consumerFunction() next(self.generator) # shortcut all remaining feed operations self.feed = self.generator.send self.feed(input) def outLineReceived(self, line): self.feed(('o', line)) def errLineReceived(self, line): self.feed(('e', line)) def headerLineReceived(self, line): self.feed(('h', line)) def finishReceived(self): if self.generator: self.generator.close() class OutputProgressObserver(LogObserver): length = 0 def __init__(self, name): self.name = name def gotData(self, stream, data): if data: self.length += len(data) self.step.setProgress(self.name, self.length) class BufferLogObserver(LogObserver): def __init__(self, wantStdout=True, wantStderr=False): super().__init__() self.stdout = [] if wantStdout else None self.stderr = [] if wantStderr else None def outReceived(self, data): if self.stdout is not None: self.stdout.append(data) def errReceived(self, data): if self.stderr is not None: self.stderr.append(data) def _get(self, chunks): if chunks is None or not chunks: return '' return ''.join(chunks) def getStdout(self): return self._get(self.stdout) def getStderr(self): return self._get(self.stderr) buildbot-3.4.0/master/buildbot/process/measured_service.py000066400000000000000000000025621413250514000237760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process import metrics from buildbot.util.service import BuildbotServiceManager class MeasuredBuildbotServiceManager(BuildbotServiceManager): managed_services_name = "services" @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): timer = metrics.Timer( "{0}.reconfigServiceWithBuildbotConfig".format(self.name)) timer.start() yield super().reconfigServiceWithBuildbotConfig(new_config) metrics.MetricCountEvent.log("num_{0}".format(self.managed_services_name), len(list(self)), absolute=True) timer.stop() buildbot-3.4.0/master/buildbot/process/metrics.py000066400000000000000000000331761413250514000221240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members r""" Buildbot metrics module Keeps track of counts and timings of various internal buildbot activities. Basic architecture: MetricEvent.log(...) || \/ MetricLogObserver || \/ MetricHandler || \/ MetricWatcher """ import gc import os import sys from collections import defaultdict from collections import deque from twisted.application import service from twisted.internet import reactor from twisted.internet.task import LoopingCall from twisted.python import log from buildbot import util from buildbot.util import service as util_service # Make use of the resource module if we can try: import resource assert resource except ImportError: resource = None class MetricEvent: @classmethod def log(cls, *args, **kwargs): log.msg(metric=cls(*args, **kwargs)) class MetricCountEvent(MetricEvent): def __init__(self, counter, count=1, absolute=False): self.counter = counter self.count = count self.absolute = absolute class MetricTimeEvent(MetricEvent): def __init__(self, timer, elapsed): self.timer = timer self.elapsed = elapsed ALARM_OK, ALARM_WARN, ALARM_CRIT = list(range(3)) ALARM_TEXT = ["OK", "WARN", "CRIT"] class MetricAlarmEvent(MetricEvent): def __init__(self, alarm, msg=None, level=ALARM_OK): self.alarm = alarm self.level = level self.msg = msg def countMethod(counter): def decorator(func): def wrapper(*args, **kwargs): MetricCountEvent.log(counter=counter) return func(*args, **kwargs) return wrapper return decorator class Timer: # For testing _reactor = None def __init__(self, name): self.name = name self.started = None def startTimer(self, func): def wrapper(*args, **kwargs): self.start() return func(*args, **kwargs) return wrapper def stopTimer(self, func): def wrapper(*args, **kwargs): try: return func(*args, **kwargs) finally: self.stop() return wrapper def start(self): self.started = util.now(self._reactor) def stop(self): if self.started is not None: elapsed = util.now(self._reactor) - self.started MetricTimeEvent.log(timer=self.name, elapsed=elapsed) self.started = None def timeMethod(name, _reactor=None): def decorator(func): t = Timer(name) t._reactor = _reactor def wrapper(*args, **kwargs): t.start() try: return func(*args, **kwargs) finally: t.stop() return wrapper return decorator class FiniteList(deque): def __init__(self, maxlen=10): self._maxlen = maxlen super().__init__() def append(self, o): deque.append(self, o) if len(self) > self._maxlen: self.popleft() class AveragingFiniteList(FiniteList): def __init__(self, maxlen=10): super().__init__(maxlen) self.average = 0 def append(self, o): super().append(o) self._calc() def _calc(self): if not self: self.average = 0 else: self.average = float(sum(self)) / len(self) return self.average class MetricHandler: def __init__(self, metrics): self.metrics = metrics self.watchers = [] self.reset() def addWatcher(self, watcher): self.watchers.append(watcher) def removeWatcher(self, watcher): self.watchers.remove(watcher) # For subclasses to define def reset(self): raise NotImplementedError def handle(self, eventDict, metric): raise NotImplementedError def get(self, metric): raise NotImplementedError def keys(self): raise NotImplementedError def report(self): raise NotImplementedError def asDict(self): raise NotImplementedError class MetricCountHandler(MetricHandler): _counters = None def reset(self): self._counters = defaultdict(int) def handle(self, eventDict, metric): if metric.absolute: self._counters[metric.counter] = metric.count else: self._counters[metric.counter] += metric.count def keys(self): return list(self._counters) def get(self, counter): return self._counters[counter] def report(self): retval = [] for counter in sorted(self.keys()): retval.append("Counter %s: %i" % (counter, self.get(counter))) return "\n".join(retval) def asDict(self): retval = {} for counter in sorted(self.keys()): retval[counter] = self.get(counter) return dict(counters=retval) class MetricTimeHandler(MetricHandler): _timers = None def reset(self): self._timers = defaultdict(AveragingFiniteList) def handle(self, eventDict, metric): self._timers[metric.timer].append(metric.elapsed) def keys(self): return list(self._timers) def get(self, timer): return self._timers[timer].average def report(self): retval = [] for timer in sorted(self.keys()): retval.append("Timer %s: %.3g" % (timer, self.get(timer))) return "\n".join(retval) def asDict(self): retval = {} for timer in sorted(self.keys()): retval[timer] = self.get(timer) return dict(timers=retval) class MetricAlarmHandler(MetricHandler): _alarms = None def reset(self): self._alarms = defaultdict(lambda x: ALARM_OK) def handle(self, eventDict, metric): self._alarms[metric.alarm] = (metric.level, metric.msg) def report(self): retval = [] for alarm, (level, msg) in sorted(self._alarms.items()): if msg: retval.append("{} {}: {}".format(ALARM_TEXT[level], alarm, msg)) else: retval.append("{} {}".format(ALARM_TEXT[level], alarm)) return "\n".join(retval) def asDict(self): retval = {} for alarm, (level, msg) in sorted(self._alarms.items()): retval[alarm] = (ALARM_TEXT[level], msg) return dict(alarms=retval) class AttachedWorkersWatcher: def __init__(self, metrics): self.metrics = metrics def run(self): # Check if 'BotMaster.attached_workers' equals # 'AbstractWorker.attached_workers' h = self.metrics.getHandler(MetricCountEvent) if not h: log.msg("Couldn't get MetricCountEvent handler") MetricAlarmEvent.log('AttachedWorkersWatcher', msg="Coudln't get MetricCountEvent handler", level=ALARM_WARN) return botmaster_count = h.get('BotMaster.attached_workers') worker_count = h.get('AbstractWorker.attached_workers') # We let these be off by one since they're counted at slightly # different times if abs(botmaster_count - worker_count) > 1: level = ALARM_WARN else: level = ALARM_OK MetricAlarmEvent.log('attached_workers', msg='{} {}'.format(botmaster_count, worker_count), level=level) def _get_rss(): if sys.platform == 'linux': try: with open("/proc/%i/statm" % os.getpid()) as f: return int(f.read().split()[1]) except Exception: return 0 return 0 def periodicCheck(_reactor=reactor): try: # Measure how much garbage we have garbage_count = len(gc.garbage) MetricCountEvent.log('gc.garbage', garbage_count, absolute=True) if garbage_count == 0: level = ALARM_OK else: level = ALARM_WARN MetricAlarmEvent.log('gc.garbage', level=level) if resource: r = resource.getrusage(resource.RUSAGE_SELF) attrs = ['ru_utime', 'ru_stime', 'ru_maxrss', 'ru_ixrss', 'ru_idrss', 'ru_isrss', 'ru_minflt', 'ru_majflt', 'ru_nswap', 'ru_inblock', 'ru_oublock', 'ru_msgsnd', 'ru_msgrcv', 'ru_nsignals', 'ru_nvcsw', 'ru_nivcsw'] for i, a in enumerate(attrs): # Linux versions prior to 2.6.32 didn't report this value, but we # can calculate it from /proc//statm v = r[i] if a == 'ru_maxrss' and v == 0: v = _get_rss() * resource.getpagesize() / 1024 MetricCountEvent.log('resource.{}'.format(a), v, absolute=True) MetricCountEvent.log( 'resource.pagesize', resource.getpagesize(), absolute=True) # Measure the reactor delay then = util.now(_reactor) dt = 0.1 def cb(): now = util.now(_reactor) delay = (now - then) - dt MetricTimeEvent.log("reactorDelay", delay) _reactor.callLater(dt, cb) except Exception: log.err(None, "while collecting VM metrics") class MetricLogObserver(util_service.ReconfigurableServiceMixin, service.MultiService): _reactor = reactor def __init__(self): super().__init__() self.setName('metrics') self.enabled = False self.periodic_task = None self.periodic_interval = None self.log_task = None self.log_interval = None # Mapping of metric type to handlers for that type self.handlers = {} # Register our default handlers self.registerHandler(MetricCountEvent, MetricCountHandler(self)) self.registerHandler(MetricTimeEvent, MetricTimeHandler(self)) self.registerHandler(MetricAlarmEvent, MetricAlarmHandler(self)) self.getHandler(MetricCountEvent).addWatcher( AttachedWorkersWatcher(self)) def reconfigServiceWithBuildbotConfig(self, new_config): # first, enable or disable if new_config.metrics is None: self.disable() else: self.enable() metrics_config = new_config.metrics # Start up periodic logging log_interval = metrics_config.get('log_interval', 60) if log_interval != self.log_interval: if self.log_task: self.log_task.stop() self.log_task = None if log_interval: self.log_task = LoopingCall(self.report) self.log_task.clock = self._reactor self.log_task.start(log_interval) # same for the periodic task periodic_interval = metrics_config.get('periodic_interval', 10) if periodic_interval != self.periodic_interval: if self.periodic_task: self.periodic_task.stop() self.periodic_task = None if periodic_interval: self.periodic_task = LoopingCall(periodicCheck, self._reactor) self.periodic_task.clock = self._reactor self.periodic_task.start(periodic_interval) # upcall return super().reconfigServiceWithBuildbotConfig(new_config) def stopService(self): self.disable() super().stopService() def enable(self): if self.enabled: return log.addObserver(self.emit) self.enabled = True def disable(self): if not self.enabled: return if self.periodic_task: self.periodic_task.stop() self.periodic_task = None if self.log_task: self.log_task.stop() self.log_task = None log.removeObserver(self.emit) self.enabled = False def registerHandler(self, interface, handler): old = self.getHandler(interface) self.handlers[interface] = handler return old def getHandler(self, interface): return self.handlers.get(interface) def emit(self, eventDict): # Ignore non-statistic events metric = eventDict.get('metric') if not metric or not isinstance(metric, MetricEvent): return if metric.__class__ not in self.handlers: return h = self.handlers[metric.__class__] h.handle(eventDict, metric) for w in h.watchers: w.run() def asDict(self): retval = {} for interface, handler in self.handlers.items(): retval.update(handler.asDict()) return retval def report(self): try: for interface, handler in self.handlers.items(): report = handler.report() if not report: continue for line in report.split("\n"): log.msg(line) except Exception: log.err(None, "generating metric report") buildbot-3.4.0/master/buildbot/process/properties.py000066400000000000000000000760521413250514000226520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import collections import json import re import weakref from twisted.internet import defer from twisted.python.components import registerAdapter from zope.interface import implementer from buildbot import config from buildbot import util from buildbot.interfaces import IProperties from buildbot.interfaces import IRenderable from buildbot.util import flatten @implementer(IProperties) class Properties(util.ComparableMixin): """ I represent a set of properties that can be interpolated into various strings in buildsteps. @ivar properties: dictionary mapping property values to tuples (value, source), where source is a string identifying the source of the property. Objects of this class can be read like a dictionary -- in this case, only the property value is returned. As a special case, a property value of None is returned as an empty string when used as a mapping. """ compare_attrs = ('properties',) def __init__(self, **kwargs): """ @param kwargs: initial property values (for testing) """ self.properties = {} # Track keys which are 'runtime', and should not be # persisted if a build is rebuilt self.runtime = set() self.build = None # will be set by the Build when starting self._used_secrets = {} if kwargs: self.update(kwargs, "TEST") self._master = None self._sourcestamps = None self._changes = None @property def master(self): if self.build is not None: return self.build.master return self._master @master.setter def master(self, value): self._master = value @property def sourcestamps(self): if self.build is not None: return [b.asDict() for b in self.build.getAllSourceStamps()] elif self._sourcestamps is not None: return self._sourcestamps raise AttributeError('neither build nor _sourcestamps are set') @sourcestamps.setter def sourcestamps(self, value): self._sourcestamps = value def getSourceStamp(self, codebase=''): for source in self.sourcestamps: if source['codebase'] == codebase: return source return None @property def changes(self): if self.build is not None: return [c.asChDict() for c in self.build.allChanges()] elif self._changes is not None: return self._changes raise AttributeError('neither build nor _changes are set') @changes.setter def changes(self, value): self._changes = value @property def files(self): if self.build is not None: return self.build.allFiles() files = [] # self.changes, not self._changes to raise AttributeError if unset for chdict in self.changes: files.extend(chdict['files']) return files @classmethod def fromDict(cls, propDict): properties = cls() for name, (value, source) in propDict.items(): properties.setProperty(name, value, source) return properties def __getstate__(self): d = self.__dict__.copy() d['build'] = None return d def __setstate__(self, d): self.__dict__ = d if not hasattr(self, 'runtime'): self.runtime = set() def __contains__(self, name): return name in self.properties def __getitem__(self, name): """Just get the value for this property.""" rv = self.properties[name][0] return rv def __bool__(self): return bool(self.properties) def getPropertySource(self, name): return self.properties[name][1] def asList(self): """Return the properties as a sorted list of (name, value, source)""" ret = sorted([(k, v[0], v[1]) for k, v in self.properties.items()]) return ret def asDict(self): """Return the properties as a simple key:value dictionary, properly unicoded""" return self.properties.copy() def __repr__(self): return ('Properties(**' + repr(dict((k, v[0]) for k, v in self.properties.items())) + ')') def update(self, dict, source, runtime=False): """Update this object from a dictionary, with an explicit source specified.""" for k, v in dict.items(): self.setProperty(k, v, source, runtime=runtime) def updateFromProperties(self, other): """Update this object based on another object; the other object's """ self.properties.update(other.properties) self.runtime.update(other.runtime) def updateFromPropertiesNoRuntime(self, other): """Update this object based on another object, but don't include properties that were marked as runtime.""" for k, v in other.properties.items(): if k not in other.runtime: self.properties[k] = v # IProperties methods def getProperty(self, name, default=None): return self.properties.get(name, (default,))[0] def hasProperty(self, name): return name in self.properties has_key = hasProperty def setProperty(self, name, value, source, runtime=False): name = util.bytes2unicode(name) if not IRenderable.providedBy(value): json.dumps(value) # Let the exception propagate ... source = util.bytes2unicode(source) self.properties[name] = (value, source) if runtime: self.runtime.add(name) def getProperties(self): return self def getBuild(self): return self.build def render(self, value): renderable = IRenderable(value) return defer.maybeDeferred(renderable.getRenderingFor, self) # as the secrets are used in the renderable, they can pretty much arrive anywhere # in the log of state strings # so we have the renderable record here which secrets are used that we must remove def useSecret(self, secret_value, secret_name): if secret_value.strip(): self._used_secrets[secret_value] = "<" + secret_name + ">" # This method shall then be called to remove secrets from any text that could be logged # somewhere and that could contain secrets def cleanupTextFromSecrets(self, text): # Better be correct and inefficient than efficient and wrong secrets = self._used_secrets for k in sorted(secrets, key=len, reverse=True): text = text.replace(k, secrets[k]) return text class PropertiesMixin: """ A mixin to add L{IProperties} methods to a class which does not implement the full interface, only getProperties() function. This is useful because L{IProperties} methods are often called on L{Build} objects without first coercing them. @ivar set_runtime_properties: the default value for the C{runtime} parameter of L{setProperty}. """ set_runtime_properties = False def getProperty(self, propname, default=None): return self.getProperties().getProperty(propname, default) def hasProperty(self, propname): return self.getProperties().hasProperty(propname) has_key = hasProperty def setProperty(self, propname, value, source='Unknown', runtime=None): # source is not optional in IProperties, but is optional here to avoid # breaking user-supplied code that fails to specify a source props = self.getProperties() if runtime is None: runtime = self.set_runtime_properties props.setProperty(propname, value, source, runtime=runtime) def render(self, value): return self.getProperties().render(value) @implementer(IRenderable) class RenderableOperatorsMixin: """ Properties and Interpolate instances can be manipulated with standard operators. """ def __eq__(self, other): return _OperatorRenderer(self, other, "==", lambda v1, v2: v1 == v2) def __ne__(self, other): return _OperatorRenderer(self, other, "!=", lambda v1, v2: v1 != v2) def __lt__(self, other): return _OperatorRenderer(self, other, "<", lambda v1, v2: v1 < v2) def __le__(self, other): return _OperatorRenderer(self, other, "<=", lambda v1, v2: v1 <= v2) def __gt__(self, other): return _OperatorRenderer(self, other, ">", lambda v1, v2: v1 > v2) def __ge__(self, other): return _OperatorRenderer(self, other, ">=", lambda v1, v2: v1 >= v2) def __add__(self, other): return _OperatorRenderer(self, other, "+", lambda v1, v2: v1 + v2) def __sub__(self, other): return _OperatorRenderer(self, other, "-", lambda v1, v2: v1 - v2) def __mul__(self, other): return _OperatorRenderer(self, other, "*", lambda v1, v2: v1 * v2) def __truediv__(self, other): return _OperatorRenderer(self, other, "/", lambda v1, v2: v1 / v2) def __floordiv__(self, other): return _OperatorRenderer(self, other, "//", lambda v1, v2: v1 // v2) def __mod__(self, other): return _OperatorRenderer(self, other, "%", lambda v1, v2: v1 % v2) # we cannot use this trick to overload the 'in' operator, as python will force the result # of __contains__ to a boolean, forcing it to True all the time # so we mimic sqlalchemy and make a in_ method def in_(self, other): return _OperatorRenderer(self, other, "in", lambda v1, v2: v1 in v2) @implementer(IRenderable) class _OperatorRenderer(RenderableOperatorsMixin, util.ComparableMixin): """ An instance of this class renders a comparison given by a operator function with v1 and v2 """ compare_attrs = ('fn',) def __init__(self, v1, v2, cstr, comparator): self.v1, self.v2, self.comparator, self.cstr = v1, v2, comparator, cstr @defer.inlineCallbacks def getRenderingFor(self, props): v1, v2 = yield props.render((self.v1, self.v2)) return self.comparator(v1, v2) def __repr__(self): return '%r %s %r' % (self.v1, self.cstr, self.v2) class _PropertyMap: """ Privately-used mapping object to implement WithProperties' substitutions, including the rendering of None as ''. """ colon_minus_re = re.compile(r"(.*):-(.*)") colon_tilde_re = re.compile(r"(.*):~(.*)") colon_plus_re = re.compile(r"(.*):\+(.*)") def __init__(self, properties): # use weakref here to avoid a reference loop self.properties = weakref.ref(properties) self.temp_vals = {} def __getitem__(self, key): properties = self.properties() assert properties is not None def colon_minus(mo): # %(prop:-repl)s # if prop exists, use it; otherwise, use repl prop, repl = mo.group(1, 2) if prop in self.temp_vals: return self.temp_vals[prop] elif prop in properties: return properties[prop] return repl def colon_tilde(mo): # %(prop:~repl)s # if prop exists and is true (nonempty), use it; otherwise, use # repl prop, repl = mo.group(1, 2) if prop in self.temp_vals and self.temp_vals[prop]: return self.temp_vals[prop] elif prop in properties and properties[prop]: return properties[prop] return repl def colon_plus(mo): # %(prop:+repl)s # if prop exists, use repl; otherwise, an empty string prop, repl = mo.group(1, 2) if prop in properties or prop in self.temp_vals: return repl return '' for regexp, fn in [ (self.colon_minus_re, colon_minus), (self.colon_tilde_re, colon_tilde), (self.colon_plus_re, colon_plus), ]: mo = regexp.match(key) if mo: rv = fn(mo) break else: # If explicitly passed as a kwarg, use that, # otherwise, use the property value. if key in self.temp_vals: rv = self.temp_vals[key] else: rv = properties[key] # translate 'None' to an empty string if rv is None: rv = '' return rv def add_temporary_value(self, key, val): 'Add a temporary value (to support keyword arguments to WithProperties)' self.temp_vals[key] = val @implementer(IRenderable) class WithProperties(util.ComparableMixin): """ This is a marker class, used fairly widely to indicate that we want to interpolate build properties. """ compare_attrs = ('fmtstring', 'args', 'lambda_subs') def __init__(self, fmtstring, *args, **lambda_subs): self.fmtstring = fmtstring self.args = args if not self.args: self.lambda_subs = lambda_subs for key, val in self.lambda_subs.items(): if not callable(val): raise ValueError( 'Value for lambda substitution "{}" must be callable.'.format(key)) elif lambda_subs: raise ValueError( 'WithProperties takes either positional or keyword substitutions, not both.') def getRenderingFor(self, build): pmap = _PropertyMap(build.getProperties()) if self.args: strings = [] for name in self.args: strings.append(pmap[name]) s = self.fmtstring % tuple(strings) else: for k, v in self.lambda_subs.items(): pmap.add_temporary_value(k, v(build)) s = self.fmtstring % pmap return s class _NotHasKey(util.ComparableMixin): """A marker for missing ``hasKey`` parameter. To withstand ``deepcopy``, ``reload`` and pickle serialization round trips, check it with ``==`` or ``!=``. """ compare_attrs = () # any instance of _NotHasKey would do, yet we don't want to create and delete # them all the time _notHasKey = _NotHasKey() @implementer(IRenderable) class _Lookup(util.ComparableMixin): compare_attrs = ( 'value', 'index', 'default', 'defaultWhenFalse', 'hasKey', 'elideNoneAs') def __init__(self, value, index, default=None, defaultWhenFalse=True, hasKey=_notHasKey, elideNoneAs=None): self.value = value self.index = index self.default = default self.defaultWhenFalse = defaultWhenFalse self.hasKey = hasKey self.elideNoneAs = elideNoneAs def __repr__(self): return '_Lookup({}, {}{}{}{}{})'.format( repr(self.value), repr(self.index), ', default={}'.format(repr(self.default)) if self.default is not None else '', ', defaultWhenFalse=False' if not self.defaultWhenFalse else '', ', hasKey={}'.format(repr(self.hasKey)) if self.hasKey != _notHasKey else '', ', elideNoneAs={}'.format(repr(self.elideNoneAs)) if self.elideNoneAs is not None else '' ) @defer.inlineCallbacks def getRenderingFor(self, build): value = build.render(self.value) index = build.render(self.index) value, index = yield defer.gatherResults([value, index]) if index not in value: rv = yield build.render(self.default) else: if self.defaultWhenFalse: rv = yield build.render(value[index]) if not rv: rv = yield build.render(self.default) elif self.hasKey != _notHasKey: rv = yield build.render(self.hasKey) elif self.hasKey != _notHasKey: rv = yield build.render(self.hasKey) else: rv = yield build.render(value[index]) if rv is None: rv = yield build.render(self.elideNoneAs) return rv def _getInterpolationList(fmtstring): # TODO: Verify that no positional substitutions are requested dd = collections.defaultdict(str) fmtstring % dd return list(dd) @implementer(IRenderable) class _PropertyDict: def getRenderingFor(self, build): return build.getProperties() _thePropertyDict = _PropertyDict() @implementer(IRenderable) class _WorkerPropertyDict: def getRenderingFor(self, build): return build.getBuild().getWorkerInfo() _theWorkerPropertyDict = _WorkerPropertyDict() @implementer(IRenderable) class _SecretRenderer: def __init__(self, secret_name): self.secret_name = secret_name @defer.inlineCallbacks def getRenderingFor(self, properties): secretsSrv = properties.master.namedServices.get("secrets") if not secretsSrv: error_message = "secrets service not started, need to configure" \ " SecretManager in c['services'] to use 'secrets'" \ "in Interpolate" raise KeyError(error_message) credsservice = properties.master.namedServices['secrets'] secret_detail = yield credsservice.get(self.secret_name) if secret_detail is None: raise KeyError("secret key {} is not found in any provider".format(self.secret_name)) properties.useSecret(secret_detail.value, self.secret_name) return secret_detail.value class Secret(_SecretRenderer): def __repr__(self): return "Secret({0})".format(self.secret_name) class _SecretIndexer: def __contains__(self, password): return True def __getitem__(self, password): return _SecretRenderer(password) @implementer(IRenderable) class _SourceStampDict(util.ComparableMixin): compare_attrs = ('codebase',) def __init__(self, codebase): self.codebase = codebase def getRenderingFor(self, props): ss = props.getSourceStamp(self.codebase) if ss: return ss return {} @implementer(IRenderable) class _Lazy(util.ComparableMixin): compare_attrs = ('value',) def __init__(self, value): self.value = value def getRenderingFor(self, build): return self.value def __repr__(self): return '_Lazy(%r)' % self.value @implementer(IRenderable) class Interpolate(RenderableOperatorsMixin, util.ComparableMixin): """ This is a marker class, used fairly widely to indicate that we want to interpolate build properties. """ compare_attrs = ('fmtstring', 'args', 'kwargs') identifier_re = re.compile(r'^[\w._-]*$') def __init__(self, fmtstring, *args, **kwargs): self.fmtstring = fmtstring self.args = args self.kwargs = kwargs if self.args and self.kwargs: config.error("Interpolate takes either positional or keyword " "substitutions, not both.") if not self.args: self.interpolations = {} self._parse(fmtstring) def __repr__(self): if self.args: return 'Interpolate(%r, *%r)' % (self.fmtstring, self.args) elif self.kwargs: return 'Interpolate(%r, **%r)' % (self.fmtstring, self.kwargs) return 'Interpolate(%r)' % (self.fmtstring,) @staticmethod def _parse_prop(arg): try: prop, repl = arg.split(":", 1) except ValueError: prop, repl = arg, None if not Interpolate.identifier_re.match(prop): config.error( "Property name must be alphanumeric for prop Interpolation '{}'".format(arg)) prop = repl = None return _thePropertyDict, prop, repl @staticmethod def _parse_secret(arg): try: secret, repl = arg.split(":", 1) except ValueError: secret, repl = arg, None return _SecretIndexer(), secret, repl @staticmethod def _parse_src(arg): # TODO: Handle changes try: codebase, attr, repl = arg.split(":", 2) except ValueError: try: codebase, attr = arg.split(":", 1) repl = None except ValueError: config.error(("Must specify both codebase and attribute for " "src Interpolation '{}'").format(arg)) return {}, None, None if not Interpolate.identifier_re.match(codebase): config.error( "Codebase must be alphanumeric for src Interpolation '{}'".format(arg)) codebase = attr = repl = None if not Interpolate.identifier_re.match(attr): config.error( "Attribute must be alphanumeric for src Interpolation '{}'".format(arg)) codebase = attr = repl = None return _SourceStampDict(codebase), attr, repl def _parse_worker(self, arg): try: prop, repl = arg.split(":", 1) except ValueError: prop, repl = arg, None return _theWorkerPropertyDict, prop, repl def _parse_kw(self, arg): try: kw, repl = arg.split(":", 1) except ValueError: kw, repl = arg, None if not Interpolate.identifier_re.match(kw): config.error( "Keyword must be alphanumeric for kw Interpolation '{}'".format(arg)) kw = repl = None return _Lazy(self.kwargs), kw, repl def _parseSubstitution(self, fmt): try: key, arg = fmt.split(":", 1) except ValueError: config.error( "invalid Interpolate substitution without selector '{}'".format(fmt)) return None fn = getattr(self, "_parse_" + key, None) if not fn: config.error("invalid Interpolate selector '{}'".format(key)) return None return fn(arg) @staticmethod def _splitBalancedParen(delim, arg): parenCount = 0 for i, val in enumerate(arg): if arg[i] == "(": parenCount += 1 if arg[i] == ")": parenCount -= 1 if parenCount < 0: raise ValueError if parenCount == 0 and arg[i] == delim: return arg[0:i], arg[i + 1:] return arg def _parseColon_minus(self, d, kw, repl): return _Lookup(d, kw, default=Interpolate(repl, **self.kwargs), defaultWhenFalse=False, elideNoneAs='') def _parseColon_tilde(self, d, kw, repl): return _Lookup(d, kw, default=Interpolate(repl, **self.kwargs), defaultWhenFalse=True, elideNoneAs='') def _parseColon_plus(self, d, kw, repl): return _Lookup(d, kw, hasKey=Interpolate(repl, **self.kwargs), default='', defaultWhenFalse=False, elideNoneAs='') def _parseColon_ternary(self, d, kw, repl, defaultWhenFalse=False): delim = repl[0] if delim == '(': config.error("invalid Interpolate ternary delimiter '('") return None try: truePart, falsePart = self._splitBalancedParen(delim, repl[1:]) except ValueError: config.error("invalid Interpolate ternary expression '{}' with delimiter '{}'".format( repl[1:], repl[0])) return None return _Lookup(d, kw, hasKey=Interpolate(truePart, **self.kwargs), default=Interpolate(falsePart, **self.kwargs), defaultWhenFalse=defaultWhenFalse, elideNoneAs='') def _parseColon_ternary_hash(self, d, kw, repl): return self._parseColon_ternary(d, kw, repl, defaultWhenFalse=True) def _parse(self, fmtstring): keys = _getInterpolationList(fmtstring) for key in keys: if key not in self.interpolations: d, kw, repl = self._parseSubstitution(key) if repl is None: repl = '-' for pattern, fn in [ ("-", self._parseColon_minus), ("~", self._parseColon_tilde), ("+", self._parseColon_plus), ("?", self._parseColon_ternary), ("#?", self._parseColon_ternary_hash) ]: junk, matches, tail = repl.partition(pattern) if not junk and matches: self.interpolations[key] = fn(d, kw, tail) break if key not in self.interpolations: config.error("invalid Interpolate default type '{}'".format(repl[0])) def getRenderingFor(self, build): props = build.getProperties() if self.args: d = props.render(self.args) d.addCallback(lambda args: self.fmtstring % tuple(args)) else: d = props.render(self.interpolations) d.addCallback(lambda res: self.fmtstring % res) return d @implementer(IRenderable) class Property(RenderableOperatorsMixin, util.ComparableMixin): """ An instance of this class renders a property of a build. """ compare_attrs = ('key', 'default', 'defaultWhenFalse') def __init__(self, key, default=None, defaultWhenFalse=True): """ @param key: Property to render. @param default: Value to use if property isn't set. @param defaultWhenFalse: When true (default), use default value if property evaluates to False. Otherwise, use default value only when property isn't set. """ self.key = key self.default = default self.defaultWhenFalse = defaultWhenFalse def __repr__(self): return "Property({0})".format(self.key) def getRenderingFor(self, props): if self.defaultWhenFalse: d = props.render(props.getProperty(self.key)) @d.addCallback def checkDefault(rv): if rv: return rv return props.render(self.default) return d if props.hasProperty(self.key): return props.render(props.getProperty(self.key)) return props.render(self.default) @implementer(IRenderable) class FlattenList(RenderableOperatorsMixin, util.ComparableMixin): """ An instance of this class flattens all nested lists in a list """ compare_attrs = ('nestedlist') def __init__(self, nestedlist, types=(list, tuple)): """ @param nestedlist: a list of values to render @param types: only flatten these types. defaults to (list, tuple) """ self.nestedlist = nestedlist self.types = types def getRenderingFor(self, props): d = props.render(self.nestedlist) @d.addCallback def flat(r): return flatten(r, self.types) return d def __add__(self, b): if isinstance(b, FlattenList): b = b.nestedlist return FlattenList(self.nestedlist + b, self.types) @implementer(IRenderable) class _Renderer(util.ComparableMixin): compare_attrs = ('fn',) def __init__(self, fn): self.fn = fn self.args = [] self.kwargs = {} def withArgs(self, *args, **kwargs): new_renderer = _Renderer(self.fn) new_renderer.args = self.args + list(args) new_renderer.kwargs = dict(self.kwargs) new_renderer.kwargs.update(kwargs) return new_renderer @defer.inlineCallbacks def getRenderingFor(self, props): args = yield props.render(self.args) kwargs = yield props.render(self.kwargs) # We allow the renderer fn to return a renderable for convenience result = yield self.fn(props, *args, **kwargs) result = yield props.render(result) return result def __repr__(self): if self.args or self.kwargs: return 'renderer(%r, args=%r, kwargs=%r)' % (self.fn, self.args, self.kwargs) return 'renderer(%r)' % (self.fn,) def renderer(fn): return _Renderer(fn) @implementer(IRenderable) class _DefaultRenderer: """ Default IRenderable adaptor. Calls .getRenderingFor if available, otherwise returns argument unchanged. """ def __init__(self, value): try: self.renderer = value.getRenderingFor except AttributeError: self.renderer = lambda _: value def getRenderingFor(self, build): return self.renderer(build) registerAdapter(_DefaultRenderer, object, IRenderable) @implementer(IRenderable) class _ListRenderer: """ List IRenderable adaptor. Maps Build.render over the list. """ def __init__(self, value): self.value = value def getRenderingFor(self, build): return defer.gatherResults([build.render(e) for e in self.value]) registerAdapter(_ListRenderer, list, IRenderable) @implementer(IRenderable) class _TupleRenderer: """ Tuple IRenderable adaptor. Maps Build.render over the tuple. """ def __init__(self, value): self.value = value def getRenderingFor(self, build): d = defer.gatherResults([build.render(e) for e in self.value]) d.addCallback(tuple) return d registerAdapter(_TupleRenderer, tuple, IRenderable) @implementer(IRenderable) class _DictRenderer: """ Dict IRenderable adaptor. Maps Build.render over the keys and values in the dict. """ def __init__(self, value): self.value = _ListRenderer( [_TupleRenderer((k, v)) for k, v in value.items()]) def getRenderingFor(self, build): d = self.value.getRenderingFor(build) d.addCallback(dict) return d registerAdapter(_DictRenderer, dict, IRenderable) @implementer(IRenderable) class Transform: """ A renderable that combines other renderables' results using an arbitrary function. """ def __init__(self, function, *args, **kwargs): if not callable(function) and not IRenderable.providedBy(function): config.error( "function given to Transform neither callable nor renderable") self._function = function self._args = args self._kwargs = kwargs @defer.inlineCallbacks def getRenderingFor(self, iprops): rfunction = yield iprops.render(self._function) rargs = yield iprops.render(self._args) rkwargs = yield iprops.render(self._kwargs) return rfunction(*rargs, **rkwargs) buildbot-3.4.0/master/buildbot/process/remotecommand.py000066400000000000000000000375651413250514000233160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import error from twisted.python import log from twisted.python.failure import Failure from twisted.spread import pb from buildbot import util from buildbot.pbutil import decode from buildbot.process import metrics from buildbot.process.results import CANCELLED from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.util.eventual import eventually from buildbot.worker.protocols import base class RemoteException(Exception): pass class RemoteCommand(base.RemoteCommandImpl): # class-level unique identifier generator for command ids _commandCounter = 0 active = False rc = None debug = False def __init__(self, remote_command, args, ignore_updates=False, collectStdout=False, collectStderr=False, decodeRC=None, stdioLogName='stdio'): if decodeRC is None: decodeRC = {0: SUCCESS} self.logs = {} self.delayedLogs = {} self._closeWhenFinished = {} self.collectStdout = collectStdout self.collectStderr = collectStderr self.stdout = '' self.stderr = '' self.updates = {} self.stdioLogName = stdioLogName self._startTime = None self._remoteElapsed = None self.remote_command = remote_command self.args = args self.ignore_updates = ignore_updates self.decodeRC = decodeRC self.conn = None self.worker = None self.step = None self.builder_name = None self.commandID = None self.deferred = None self.interrupted = False # a lock to make sure that only one log-handling method runs at a time. # This is really only a problem with old-style steps, which do not # wait for the Deferred from one method before invoking the next. self.loglock = defer.DeferredLock() def __repr__(self): return "".format(self.remote_command, id(self)) def run(self, step, conn, builder_name): self.active = True self.step = step self.conn = conn self.builder_name = builder_name # generate a new command id cmd_id = RemoteCommand._commandCounter RemoteCommand._commandCounter += 1 self.commandID = "%d" % cmd_id log.msg("{}: RemoteCommand.run [{}]".format(self, self.commandID)) self.deferred = defer.Deferred() d = defer.maybeDeferred(self._start) # _finished is called with an error for unknown commands, errors # that occur while the command is starting (including OSErrors in # exec()), StaleBroker (when the connection was lost before we # started), and pb.PBConnectionLost (when the worker isn't responding # over this connection, perhaps it had a power failure, or NAT # weirdness). If this happens, self.deferred is fired right away. d.addErrback(self._finished) # Connections which are lost while the command is running are caught # when our parent Step calls our .lostRemote() method. return self.deferred def useLog(self, log_, closeWhenFinished=False, logfileName=None): # NOTE: log may be a SyngLogFileWrapper or a Log instance, depending on # the step if not logfileName: logfileName = log_.getName() assert logfileName not in self.logs assert logfileName not in self.delayedLogs self.logs[logfileName] = log_ self._closeWhenFinished[logfileName] = closeWhenFinished def useLogDelayed(self, logfileName, activateCallBack, closeWhenFinished=False): assert logfileName not in self.logs assert logfileName not in self.delayedLogs self.delayedLogs[logfileName] = (activateCallBack, closeWhenFinished) def _start(self): self._startTime = util.now() # This method only initiates the remote command. # We will receive remote_update messages as the command runs. # We will get a single remote_complete when it finishes. # We should fire self.deferred when the command is done. d = self.conn.remoteStartCommand(self, self.builder_name, self.commandID, self.remote_command, self.args) return d @defer.inlineCallbacks def _finished(self, failure=None): self.active = False # the rc is send asynchronously and there is a chance it is still in the callback queue # when finished is received, we have to workaround in the master because worker might be # older timeout = 10 while self.rc is None and timeout > 0: yield util.asyncSleep(.1) timeout -= 1 try: yield self.remoteComplete(failure) # this fires the original deferred we returned from .run(), self.deferred.callback(self) except Exception as e: self.deferred.errback(e) @defer.inlineCallbacks def interrupt(self, why): log.msg("RemoteCommand.interrupt", self, why) if not self.active or self.interrupted: log.msg(" but this RemoteCommand is already inactive") return if not self.conn: log.msg(" but our .conn went away") return if isinstance(why, Failure) and why.check(error.ConnectionLost): log.msg("RemoteCommand.disconnect: lost worker") self.conn = None self._finished(why) return self.interrupted = True # tell the remote command to halt. Returns a Deferred that will fire # when the interrupt command has been delivered. try: yield self.conn.remoteInterruptCommand(self.builder_name, self.commandID, str(why)) # the worker may not have remote_interruptCommand except Exception as e: log.msg("RemoteCommand.interrupt failed", self, e) def remote_update(self, updates): """ I am called by the worker's L{buildbot_worker.base.WorkerForBuilderBase.sendUpdate} so I can receive updates from the running remote command. @type updates: list of [object, int] @param updates: list of updates from the remote command """ updates = decode(updates) self.worker.messageReceivedFromWorker() max_updatenum = 0 for (update, num) in updates: # log.msg("update[%d]:" % num) try: if self.active and not self.ignore_updates: self.remoteUpdate(update) except Exception: # log failure, terminate build, let worker retire the update self._finished(Failure()) # TODO: what if multiple updates arrive? should # skip the rest but ack them all if num > max_updatenum: max_updatenum = num return max_updatenum def remote_complete(self, failure=None): """ Called by the worker's L{buildbot_worker.base.WorkerForBuilderBase.commandComplete} to notify me the remote command has finished. @type failure: L{twisted.python.failure.Failure} or None @rtype: None """ self.worker.messageReceivedFromWorker() # call the real remoteComplete a moment later, but first return an # acknowledgement so the worker can retire the completion message. if self.active: eventually(self._finished, failure) return None @util.deferredLocked('loglock') def addStdout(self, data): if self.collectStdout: self.stdout += data if self.stdioLogName is not None and self.stdioLogName in self.logs: self.logs[self.stdioLogName].addStdout(data) return defer.succeed(None) @util.deferredLocked('loglock') def addStderr(self, data): if self.collectStderr: self.stderr += data if self.stdioLogName is not None and self.stdioLogName in self.logs: self.logs[self.stdioLogName].addStderr(data) return defer.succeed(None) @util.deferredLocked('loglock') def addHeader(self, data): if self.stdioLogName is not None and self.stdioLogName in self.logs: self.logs[self.stdioLogName].addHeader(data) return defer.succeed(None) @util.deferredLocked('loglock') @defer.inlineCallbacks def addToLog(self, logname, data): # Activate delayed logs on first data. if logname in self.delayedLogs: (activateCallBack, closeWhenFinished) = self.delayedLogs[logname] del self.delayedLogs[logname] loog = yield activateCallBack(self) self.logs[logname] = loog self._closeWhenFinished[logname] = closeWhenFinished if logname in self.logs: yield self.logs[logname].addStdout(data) else: log.msg("{}.addToLog: no such log {}".format(self, logname)) @metrics.countMethod('RemoteCommand.remoteUpdate()') @defer.inlineCallbacks def remoteUpdate(self, update): def cleanup(data): if self.step is None: return data return self.step.build.properties.cleanupTextFromSecrets(data) if self.debug: for k, v in update.items(): log.msg("Update[{}]: {}".format(k, v)) if "stdout" in update: # 'stdout': data yield self.addStdout(cleanup(update['stdout'])) if "stderr" in update: # 'stderr': data yield self.addStderr(cleanup(update['stderr'])) if "header" in update: # 'header': data yield self.addHeader(cleanup(update['header'])) if "log" in update: # 'log': (logname, data) logname, data = update['log'] yield self.addToLog(logname, cleanup(data)) if "rc" in update: rc = self.rc = update['rc'] log.msg("{} rc={}".format(self, rc)) yield self.addHeader("program finished with exit code %d\n" % rc) if "elapsed" in update: self._remoteElapsed = update['elapsed'] # TODO: these should be handled at the RemoteCommand level for k in update: if k not in ('stdout', 'stderr', 'header', 'rc'): if k not in self.updates: self.updates[k] = [] self.updates[k].append(update[k]) @util.deferredLocked('loglock') @defer.inlineCallbacks def remoteComplete(self, maybeFailure): if self._startTime and self._remoteElapsed: delta = (util.now() - self._startTime) - self._remoteElapsed metrics.MetricTimeEvent.log("RemoteCommand.overhead", delta) for name, loog in self.logs.items(): if self._closeWhenFinished[name]: if maybeFailure: loog = yield self._unwrap(loog) yield loog.addHeader("\nremoteFailed: {}".format(maybeFailure)) else: log.msg("closing log {}".format(loog)) loog.finish() if maybeFailure: # workaround http://twistedmatrix.com/trac/ticket/5507 # CopiedFailure cannot be raised back, this make debug difficult if isinstance(maybeFailure, pb.CopiedFailure): maybeFailure.value = RemoteException("{}: {}\n{}".format(maybeFailure.type, maybeFailure.value, maybeFailure.traceback)) maybeFailure.type = RemoteException maybeFailure.raiseException() def results(self): if self.interrupted: return CANCELLED if self.rc in self.decodeRC: return self.decodeRC[self.rc] return FAILURE def didFail(self): return self.results() == FAILURE LoggedRemoteCommand = RemoteCommand class RemoteShellCommand(RemoteCommand): def __init__(self, workdir, command, env=None, want_stdout=1, want_stderr=1, timeout=20 * 60, maxTime=None, sigtermTime=None, logfiles=None, usePTY=None, logEnviron=True, collectStdout=False, collectStderr=False, interruptSignal=None, initialStdin=None, decodeRC=None, stdioLogName='stdio'): if logfiles is None: logfiles = {} if decodeRC is None: decodeRC = {0: SUCCESS} self.command = command # stash .command, set it later if isinstance(self.command, (str, bytes)): # Single string command doesn't support obfuscation. self.fake_command = command else: # Try to obfuscate command. def obfuscate(arg): if isinstance(arg, tuple) and len(arg) == 3 and arg[0] == 'obfuscated': return arg[2] return arg self.fake_command = [obfuscate(c) for c in self.command] if env is not None: # avoid mutating the original master.cfg dictionary. Each # ShellCommand gets its own copy, any start() methods won't be # able to modify the original. env = env.copy() args = {'workdir': workdir, 'env': env, 'want_stdout': want_stdout, 'want_stderr': want_stderr, 'logfiles': logfiles, 'timeout': timeout, 'maxTime': maxTime, 'sigtermTime': sigtermTime, 'usePTY': usePTY, 'logEnviron': logEnviron, 'initial_stdin': initialStdin } if interruptSignal is not None: args['interruptSignal'] = interruptSignal super().__init__("shell", args, collectStdout=collectStdout, collectStderr=collectStderr, decodeRC=decodeRC, stdioLogName=stdioLogName) def _start(self): if self.args['usePTY'] is None: if self.step.workerVersionIsOlderThan("shell", "3.0"): # Old worker default of usePTY is to use worker-configuration. self.args['usePTY'] = "slave-config" else: # buildbot-worker doesn't support worker-configured usePTY, # and usePTY defaults to False. self.args['usePTY'] = False self.args['command'] = self.command if self.remote_command == "shell": # non-ShellCommand worker commands are responsible for doing this # fixup themselves if self.step.workerVersion("shell", "old") == "old": self.args['dir'] = self.args['workdir'] if self.step.workerVersionIsOlderThan("shell", "2.16"): self.args.pop('sigtermTime', None) what = "command '{}' in dir '{}'".format(self.fake_command, self.args['workdir']) log.msg(what) return super()._start() def __repr__(self): return "".format(repr(self.fake_command)) buildbot-3.4.0/master/buildbot/process/remotetransfer.py000066400000000000000000000132721413250514000235110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import tarfile import tempfile from io import BytesIO from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.worker.protocols import base """ module for regrouping all FileWriterImpl and FileReaderImpl away from steps """ class FileWriter(base.FileWriterImpl): """ Helper class that acts as a file-object with write access """ def __init__(self, destfile, maxsize, mode): # Create missing directories. destfile = os.path.abspath(destfile) dirname = os.path.dirname(destfile) if not os.path.exists(dirname): os.makedirs(dirname) self.destfile = destfile self.mode = mode fd, self.tmpname = tempfile.mkstemp(dir=dirname) self.fp = os.fdopen(fd, 'wb') self.remaining = maxsize def remote_write(self, data): """ Called from remote worker to write L{data} to L{fp} within boundaries of L{maxsize} @type data: C{string} @param data: String of data to write """ data = unicode2bytes(data) if self.remaining is not None: if len(data) > self.remaining: data = data[:self.remaining] self.fp.write(data) self.remaining = self.remaining - len(data) else: self.fp.write(data) def remote_utime(self, accessed_modified): os.utime(self.destfile, accessed_modified) def remote_close(self): """ Called by remote worker to state that no more data will be transferred """ self.fp.close() self.fp = None # on windows, os.rename does not automatically unlink, so do it # manually if os.path.exists(self.destfile): os.unlink(self.destfile) os.rename(self.tmpname, self.destfile) self.tmpname = None if self.mode is not None: os.chmod(self.destfile, self.mode) def cancel(self): # unclean shutdown, the file is probably truncated, so delete it # altogether rather than deliver a corrupted file fp = getattr(self, "fp", None) if fp: fp.close() if self.destfile and os.path.exists(self.destfile): os.unlink(self.destfile) if self.tmpname and os.path.exists(self.tmpname): os.unlink(self.tmpname) class DirectoryWriter(FileWriter): """ A DirectoryWriter is implemented as a FileWriter, with an added post-processing step to unpack the archive, once the transfer has completed. """ def __init__(self, destroot, maxsize, compress, mode): self.destroot = destroot self.compress = compress self.fd, self.tarname = tempfile.mkstemp() os.close(self.fd) super().__init__(self.tarname, maxsize, mode) def remote_unpack(self): """ Called by remote worker to state that no more data will be transferred """ # Make sure remote_close is called, otherwise atomic rename won't happen self.remote_close() # Map configured compression to a TarFile setting if self.compress == 'bz2': mode = 'r|bz2' elif self.compress == 'gz': mode = 'r|gz' else: mode = 'r' # Unpack archive and clean up after self archive = tarfile.open(name=self.tarname, mode=mode) archive.extractall(path=self.destroot) archive.close() os.remove(self.tarname) class FileReader(base.FileReaderImpl): """ Helper class that acts as a file-object with read access """ def __init__(self, fp): self.fp = fp def remote_read(self, maxlength): """ Called from remote worker to read at most L{maxlength} bytes of data @type maxlength: C{integer} @param maxlength: Maximum number of data bytes that can be returned @return: Data read from L{fp} @rtype: C{string} of bytes read from file """ if self.fp is None: return '' data = self.fp.read(maxlength) return data def remote_close(self): """ Called by remote worker to state that no more data will be transferred """ if self.fp is not None: self.fp.close() self.fp = None class StringFileWriter(base.FileWriterImpl): """ FileWriter class that just puts received data into a buffer. Used to upload a file from worker for inline processing rather than writing into a file on master. """ def __init__(self): self.buffer = "" def remote_write(self, data): self.buffer += bytes2unicode(data) def remote_close(self): pass class StringFileReader(FileReader): """ FileWriter class that just buid send data from a string. Used to download a file to worker from local string rather than first writing into a file on master. """ def __init__(self, s): s = unicode2bytes(s) super().__init__(BytesIO(s)) buildbot-3.4.0/master/buildbot/process/results.py000066400000000000000000000053201413250514000221450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members ALL_RESULTS = list(range(7)) SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY, CANCELLED = ALL_RESULTS Results = ["success", "warnings", "failure", "skipped", "exception", "retry", "cancelled"] MultipleResults = ["successes", "warnings", "failures", "skipped", "exceptions", "retries", "cancelled"] def statusToString(status, count=1): if status is None: return "not finished" if status < 0 or status >= len(Results): return "Invalid status" if count > 1: return MultipleResults[status] return Results[status] def worst_status(a, b): # SKIPPED > SUCCESS > WARNINGS > FAILURE > EXCEPTION > RETRY > CANCELLED # CANCELLED needs to be considered the worst. for s in (CANCELLED, RETRY, EXCEPTION, FAILURE, WARNINGS, SUCCESS, SKIPPED): if s in (a, b): return s return None def computeResultAndTermination(obj, result, previousResult): possible_overall_result = result terminate = False if result == FAILURE: if not obj.flunkOnFailure: possible_overall_result = SUCCESS if obj.warnOnFailure: possible_overall_result = WARNINGS if obj.flunkOnFailure: possible_overall_result = FAILURE if obj.haltOnFailure: terminate = True elif result == WARNINGS: if not obj.warnOnWarnings: possible_overall_result = SUCCESS else: possible_overall_result = WARNINGS if obj.flunkOnWarnings: possible_overall_result = FAILURE elif result in (EXCEPTION, RETRY, CANCELLED): terminate = True result = worst_status(previousResult, possible_overall_result) return result, terminate class ResultComputingConfigMixin: haltOnFailure = False flunkOnWarnings = False flunkOnFailure = True warnOnWarnings = False warnOnFailure = False resultConfig = [ "haltOnFailure", "flunkOnWarnings", "flunkOnFailure", "warnOnWarnings", "warnOnFailure", ] buildbot-3.4.0/master/buildbot/process/subunitlogobserver.py000066400000000000000000000015541413250514000244140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this used to be referenced here, so we keep a link for old time's sake import buildbot.steps.subunit SubunitShellCommand = buildbot.steps.subunit.SubunitShellCommand buildbot-3.4.0/master/buildbot/process/users/000077500000000000000000000000001413250514000212335ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/process/users/__init__.py000066400000000000000000000000001413250514000233320ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/process/users/manager.py000066400000000000000000000032231413250514000232170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.application import service from twisted.internet import defer from buildbot.util import service as util_service class UserManagerManager(util_service.ReconfigurableServiceMixin, service.MultiService): # this class manages a fleet of user managers; hence the name.. def __init__(self, master): super().__init__() self.setName('user_manager_manager') self.master = master @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): # this is easy - kick out all of the old managers, and add the # new ones. # pylint: disable=cell-var-from-loop for mgr in list(self): yield mgr.disownServiceParent() for mgr in new_config.user_managers: yield mgr.setServiceParent(self) # reconfig any newly-added change sources, as well as existing yield super().reconfigServiceWithBuildbotConfig(new_config) buildbot-3.4.0/master/buildbot/process/users/manual.py000066400000000000000000000212721413250514000230660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import pbutil from buildbot.util import service # this class is known to contain cruft and will be looked at later, so # no current implementation utilizes it aside from scripts.runner. class CommandlineUserManagerPerspective(pbutil.NewCredPerspective): """ Perspective registered in buildbot.pbmanager and contains the real workings of `buildbot user` by working with the database when perspective_commandline is called. """ def __init__(self, master): self.master = master def formatResults(self, op, results): """ This formats the results of the database operations for printing back to the caller @param op: operation to perform (add, remove, update, get) @type op: string @param results: results from db queries in perspective_commandline @type results: list @returns: string containing formatted results """ formatted_results = "" if op == 'add': # list, alternating ident, uid formatted_results += "user(s) added:\n" for user in results: if isinstance(user, str): formatted_results += "identifier: {}\n".format(user) else: formatted_results += "uid: %d\n\n" % user elif op == 'remove': # list of dictionaries formatted_results += "user(s) removed:\n" for user in results: if user: formatted_results += "identifier: {}\n".format(user) elif op == 'update': # list, alternating ident, None formatted_results += "user(s) updated:\n" for user in results: if user: formatted_results += "identifier: {}\n".format(user) elif op == 'get': # list of dictionaries formatted_results += "user(s) found:\n" for user in results: if user: for key in sorted(user.keys()): if key != 'bb_password': formatted_results += "{}: {}\n".format(key, user[key]) formatted_results += "\n" else: formatted_results += "no match found\n" return formatted_results @defer.inlineCallbacks def perspective_commandline(self, op, bb_username, bb_password, ids, info): """ This performs the requested operations from the `buildbot user` call by calling the proper buildbot.db.users methods based on the operation. It yields a deferred instance with the results from the database methods. @param op: operation to perform (add, remove, update, get) @type op: string @param bb_username: username portion of auth credentials @type bb_username: string @param bb_password: hashed password portion of auth credentials @type bb_password: hashed string @param ids: user identifiers used to find existing users @type ids: list of strings or None @param info: type/value pairs for each user that will be added or updated in the database @type info: list of dictionaries or None @returns: results from db.users methods via deferred """ log.msg("perspective_commandline called") results = [] # pylint: disable=too-many-nested-blocks if ids: for user in ids: # get identifier, guaranteed to be in user from checks # done in C{scripts.runner} uid = yield self.master.db.users.identifierToUid( identifier=user) result = None if op == 'remove': if uid: yield self.master.db.users.removeUser(uid) result = user else: log.msg("Unable to find uid for identifier {}".format(user)) elif op == 'get': if uid: result = yield self.master.db.users.getUser(uid) else: log.msg("Unable to find uid for identifier {}".format(user)) results.append(result) else: for user in info: # get identifier, guaranteed to be in user from checks # done in C{scripts.runner} ident = user.pop('identifier') uid = yield self.master.db.users.identifierToUid( identifier=ident) # if only an identifier was in user, we're updating only # the bb_username and bb_password. if not user: if uid: result = yield self.master.db.users.updateUser( uid=uid, identifier=ident, bb_username=bb_username, bb_password=bb_password) results.append(ident) else: log.msg("Unable to find uid for identifier {}".format(user)) else: # when adding, we update the user after the first attr once_through = False for attr in user: result = None if op == 'update' or once_through: if uid: result = yield self.master.db.users.updateUser( uid=uid, identifier=ident, bb_username=bb_username, bb_password=bb_password, attr_type=attr, attr_data=user[attr]) else: log.msg("Unable to find uid for identifier {}".format(user)) elif op == 'add': result = yield self.master.db.users.findUserByAttr( identifier=ident, attr_type=attr, attr_data=user[attr]) once_through = True results.append(ident) # result is None from updateUser calls if result: results.append(result) uid = result results = self.formatResults(op, results) return results class CommandlineUserManager(service.AsyncMultiService): """ Service that runs to set up and register CommandlineUserManagerPerspective so `buildbot user` calls get to perspective_commandline. """ def __init__(self, username=None, passwd=None, port=None): super().__init__() assert username and passwd, ("A username and password pair must be given " "to connect and use `buildbot user`") self.username = username self.passwd = passwd assert port, "A port must be specified for a PB connection" self.port = port self.registration = None @defer.inlineCallbacks def startService(self): # set up factory and register with buildbot.pbmanager def factory(mind, username): return CommandlineUserManagerPerspective(self.master) self.registration = yield self.master.pbmanager.register(self.port, self.username, self.passwd, factory) yield super().startService() def stopService(self): d = defer.maybeDeferred(service.AsyncMultiService.stopService, self) @d.addCallback def unreg(_): if self.registration: return self.registration.unregister() return None return d buildbot-3.4.0/master/buildbot/process/users/users.py000066400000000000000000000102771413250514000227550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from binascii import hexlify from hashlib import sha1 from twisted.internet import defer from twisted.python import log from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes # TODO: fossil comes from a plugin. We should have an API that plugins could use to # register allowed user types. srcs = ['git', 'svn', 'hg', 'cvs', 'darcs', 'bzr', 'fossil'] salt_len = 8 def createUserObject(master, author, src=None): """ Take a Change author and source and translate them into a User Object, storing the user in master.db, or returning None if the src is not specified. @param master: link to Buildmaster for database operations @type master: master.Buildmaster instance @param authors: Change author if string or Authz instance @type authors: string or www.authz instance @param src: source from which the User Object will be created @type src: string """ if not src: log.msg("No vcs information found, unable to create User Object") return defer.succeed(None) if src in srcs: usdict = dict(identifier=author, attr_type=src, attr_data=author) else: log.msg("Unrecognized source argument: {}".format(src)) return defer.succeed(None) return master.db.users.findUserByAttr( identifier=usdict['identifier'], attr_type=usdict['attr_type'], attr_data=usdict['attr_data']) def _extractContact(usdict, contact_types, uid): if usdict: for type in contact_types: contact = usdict.get(type) if contact: break else: contact = None if contact is None: log.msg(format="Unable to find any of %(contact_types)r for uid: %(uid)r", contact_types=contact_types, uid=uid) return contact def getUserContact(master, contact_types, uid): """ This is a simple getter function that returns a user attribute that matches the contact_types argument, or returns None if no uid/match is found. @param master: BuildMaster used to query the database @type master: BuildMaster instance @param contact_types: list of contact attributes to look for in in a given user, such as 'email' or 'nick' @type contact_types: list of strings @param uid: user that is searched for the contact_types match @type uid: integer @returns: string of contact information or None via deferred """ d = master.db.users.getUser(uid) d.addCallback(_extractContact, contact_types, uid) return d def encrypt(passwd): """ Encrypts the incoming password after adding some salt to store it in the database. @param passwd: password portion of user credentials @type passwd: string @returns: encrypted/salted string """ m = sha1() salt = hexlify(os.urandom(salt_len)) m.update(unicode2bytes(passwd) + salt) crypted = bytes2unicode(salt) + m.hexdigest() return crypted def check_passwd(guess, passwd): """ Tests to see if the guess, after salting and hashing, matches the passwd from the database. @param guess: incoming password trying to be used for authentication @param passwd: already encrypted password from the database @returns: boolean """ m = sha1() salt = passwd[:salt_len * 2] # salt_len * 2 due to encode('hex_codec') m.update(unicode2bytes(guess) + unicode2bytes(salt)) crypted_guess = bytes2unicode(salt) + m.hexdigest() return (crypted_guess == bytes2unicode(passwd)) buildbot-3.4.0/master/buildbot/process/workerforbuilder.py000066400000000000000000000167571413250514000240530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from twisted.python.constants import NamedConstant from twisted.python.constants import Names class States(Names): # The worker isn't attached, or is in the process of attaching. DETACHED = NamedConstant() # The worker is available to build: either attached, or a latent worker. AVAILABLE = NamedConstant() # The worker is building. BUILDING = NamedConstant() class AbstractWorkerForBuilder: def __init__(self): self.ping_watchers = [] self.state = None # set in subclass self.worker = None self.builder_name = None self.locks = None def __repr__(self): r = ["<", self.__class__.__name__] if self.builder_name: r.extend([" builder=", repr(self.builder_name)]) if self.worker: r.extend([" worker=", repr(self.worker.workername)]) r.extend([" state=", self.state.name, ">"]) return ''.join(r) def setBuilder(self, b): self.builder = b self.builder_name = b.name def getWorkerCommandVersion(self, command, oldversion=None): if self.remoteCommands is None: # the worker is 0.5.0 or earlier return oldversion return self.remoteCommands.get(command) def isAvailable(self): # if this WorkerForBuilder is busy, then it's definitely not available if self.isBusy(): return False # otherwise, check in with the Worker if self.worker: return self.worker.canStartBuild() # no worker? not very available. return False def isBusy(self): return self.state != States.AVAILABLE def buildStarted(self): self.state = States.BUILDING # AbstractWorker doesn't always have a buildStarted method # so only call it if it is available. try: worker_buildStarted = self.worker.buildStarted except AttributeError: pass else: worker_buildStarted(self) def buildFinished(self): self.state = States.AVAILABLE if self.worker: self.worker.buildFinished(self) @defer.inlineCallbacks def attached(self, worker, commands): """ @type worker: L{buildbot.worker.Worker} @param worker: the Worker that represents the worker as a whole @type commands: dict: string -> string, or None @param commands: provides the worker's version of each RemoteCommand """ self.remoteCommands = commands # maps command name to version if self.worker is None: self.worker = worker self.worker.addWorkerForBuilder(self) else: assert self.worker == worker log.msg("Worker {} attached to {}".format(worker.workername, self.builder_name)) yield self.worker.conn.remotePrint(message="attached") def substantiate_if_needed(self, build): return defer.succeed(True) def insubstantiate_if_needed(self): pass def ping(self): """Ping the worker to make sure it is still there. Returns a Deferred that fires with True if it is. """ newping = not self.ping_watchers d = defer.Deferred() self.ping_watchers.append(d) if newping: Ping().ping(self.worker.conn).addBoth(self._pong) return d def abortPingIfAny(self): watchers, self.ping_watchers = self.ping_watchers, [] for d in watchers: d.errback(PingException('aborted ping')) def _pong(self, res): watchers, self.ping_watchers = self.ping_watchers, [] for d in watchers: d.callback(res) def detached(self): log.msg("Worker {} detached from {}".format(self.worker.workername, self.builder_name)) if self.worker: self.worker.removeWorkerForBuilder(self) self.worker = None self.remoteCommands = None class PingException(Exception): pass class Ping: running = False def ping(self, conn): assert not self.running if not conn: # clearly the ping must fail return defer.fail(PingException("Worker not connected?")) self.running = True log.msg("sending ping") self.d = defer.Deferred() # TODO: add a distinct 'ping' command on the worker.. using 'print' # for this purpose is kind of silly. conn.remotePrint(message="ping").addCallbacks(self._pong, self._ping_failed, errbackArgs=(conn,)) return self.d def _pong(self, res): log.msg("ping finished: success") self.d.callback(True) def _ping_failed(self, res, conn): log.msg("ping finished: failure") # the worker has some sort of internal error, disconnect them. If we # don't, we'll requeue a build and ping them again right away, # creating a nasty loop. conn.loseConnection() self.d.errback(res) class WorkerForBuilder(AbstractWorkerForBuilder): def __init__(self): super().__init__() self.state = States.DETACHED @defer.inlineCallbacks def attached(self, worker, commands): yield super().attached(worker, commands) # Only set available on non-latent workers, since latent workers # only attach while a build is in progress. self.state = States.AVAILABLE def detached(self): super().detached() if self.worker: self.worker.removeWorkerForBuilder(self) self.worker = None self.state = States.DETACHED class LatentWorkerForBuilder(AbstractWorkerForBuilder): def __init__(self, worker, builder): super().__init__() self.worker = worker self.state = States.AVAILABLE self.setBuilder(builder) self.worker.addWorkerForBuilder(self) log.msg("Latent worker {} attached to {}".format(worker.workername, self.builder_name)) def substantiate_if_needed(self, build): self.state = States.DETACHED d = self.substantiate(build) return d def insubstantiate_if_needed(self): self.worker.insubstantiate() def attached(self, worker, commands): # When a latent worker is attached, it is actually because it prepared for a build # thus building and not available like for normal worker if self.state == States.DETACHED: self.state = States.BUILDING return super().attached(worker, commands) def substantiate(self, build): return self.worker.substantiate(self, build) def ping(self): if not self.worker.substantiated: return defer.fail(PingException("worker is not substantiated")) return super().ping() buildbot-3.4.0/master/buildbot/reporters/000077500000000000000000000000001413250514000204415ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/reporters/__init__.py000066400000000000000000000000001413250514000225400ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/reporters/base.py000066400000000000000000000064461413250514000217370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import abc from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.reporters import utils from buildbot.util import service from buildbot.util import tuplematch ENCODING = 'utf-8' class ReporterBase(service.BuildbotService): name = None __meta__ = abc.ABCMeta compare_attrs = ['generators'] def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.generators = None self._event_consumers = [] def checkConfig(self, generators): if not isinstance(generators, list): config.error('{}: generators argument must be a list') for g in generators: g.check() if self.name is None: self.name = self.__class__.__name__ for g in generators: self.name += "_" + g.generate_name() @defer.inlineCallbacks def reconfigService(self, generators): for consumer in self._event_consumers: yield consumer.stopConsuming() self._event_consumers = [] self.generators = generators wanted_event_keys = set() for g in self.generators: wanted_event_keys.update(g.wanted_event_keys) for key in sorted(list(wanted_event_keys)): consumer = yield self.master.mq.startConsuming(self._got_event, key) self._event_consumers.append(consumer) @defer.inlineCallbacks def stopService(self): for consumer in self._event_consumers: yield consumer.stopConsuming() self._event_consumers = [] yield super().stopService() def _does_generator_want_key(self, generator, key): for filter in generator.wanted_event_keys: if tuplematch.matchTuple(key, filter): return True return False @defer.inlineCallbacks def _got_event(self, key, msg): try: reports = [] for g in self.generators: if self._does_generator_want_key(g, key): report = yield g.generate(self.master, self, key, msg) if report is not None: reports.append(report) if reports: yield self.sendMessage(reports) except Exception as e: log.err(e, 'Got exception when handling reporter events') def getResponsibleUsersForBuild(self, master, buildid): # Use library method but subclassers may want to override that return utils.getResponsibleUsersForBuild(master, buildid) @abc.abstractmethod def sendMessage(self, reports): pass buildbot-3.4.0/master/buildbot/reporters/bitbucket.py000066400000000000000000000144371413250514000230000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from urllib.parse import urlparse from twisted.internet import defer from twisted.python import log from buildbot.process.properties import Properties from buildbot.process.properties import Property from buildbot.process.results import SUCCESS from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.reporters.message import MessageFormatter from buildbot.util import httpclientservice # Magic words understood by Butbucket REST API BITBUCKET_INPROGRESS = 'INPROGRESS' BITBUCKET_SUCCESSFUL = 'SUCCESSFUL' BITBUCKET_FAILED = 'FAILED' _BASE_URL = 'https://api.bitbucket.org/2.0/repositories' _OAUTH_URL = 'https://bitbucket.org/site/oauth2/access_token' _GET_TOKEN_DATA = { 'grant_type': 'client_credentials' } class BitbucketStatusPush(ReporterBase): name = "BitbucketStatusPush" def checkConfig(self, oauth_key, oauth_secret, base_url=_BASE_URL, oauth_url=_OAUTH_URL, debug=None, verify=None, status_key=None, status_name=None, generators=None, **kwargs): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators, **kwargs) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, oauth_key, oauth_secret, base_url=_BASE_URL, oauth_url=_OAUTH_URL, debug=None, verify=None, status_key=None, status_name=None, generators=None, **kwargs): oauth_key, oauth_secret = yield self.renderSecrets(oauth_key, oauth_secret) self.base_url = base_url self.debug = debug self.verify = verify self.status_key = status_key or Property('buildername') self.status_name = status_name or Property('buildername') if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators, **kwargs) base_url = base_url.rstrip('/') self._http = yield httpclientservice.HTTPClientService.getService( self.master, base_url, debug=self.debug, verify=self.verify) self.oauthhttp = yield httpclientservice.HTTPClientService.getService( self.master, oauth_url, auth=(oauth_key, oauth_secret), debug=self.debug, verify=self.verify) def _create_default_generators(self): return [ BuildStartEndStatusGenerator( start_formatter=MessageFormatter(subject=""), end_formatter=MessageFormatter(subject="") ) ] @defer.inlineCallbacks def sendMessage(self, reports): request = yield self.oauthhttp.post("", data=_GET_TOKEN_DATA) if request.code != 200: content = yield request.content() log.msg(f"{request.code}: unable to authenticate to Bitbucket {content}") return token = (yield request.json())['access_token'] self._http.updateHeaders({'Authorization': f'Bearer {token}'}) build = reports[0]['builds'][0] if build['complete']: status = BITBUCKET_SUCCESSFUL if build['results'] == SUCCESS else BITBUCKET_FAILED else: status = BITBUCKET_INPROGRESS props = Properties.fromDict(build['properties']) props.master = self.master body = { 'state': status, 'key': (yield props.render(self.status_key)), 'name': (yield props.render(self.status_name)), 'description': reports[0]['subject'], 'url': build['url'] } for sourcestamp in build['buildset']['sourcestamps']: if not sourcestamp['repository']: log.msg(f"Empty repository URL for Bitbucket status {body}") continue owner, repo = self.get_owner_and_repo(sourcestamp['repository']) endpoint = (owner, repo, 'commit', sourcestamp['revision'], 'statuses', 'build') bitbucket_uri = f"/{'/'.join(endpoint)}" if self.debug: log.msg(f"Bitbucket status {bitbucket_uri} {body}") response = yield self._http.post(bitbucket_uri, json=body) if response.code != 200 and response.code != 201: content = yield response.content() log.msg(f"{response.code}: unable to upload Bitbucket status {content}") def get_owner_and_repo(self, repourl): """ Takes a git repository URL from Bitbucket and tries to determine the owner and repository name :param repourl: Bitbucket git repo in the form of git@bitbucket.org:OWNER/REPONAME.git https://bitbucket.org/OWNER/REPONAME.git ssh://git@bitbucket.org/OWNER/REPONAME.git https://api.bitbucket.org/2.0/repositories/OWNER/REPONAME :return: owner, repo: The owner of the repository and the repository name """ parsed = urlparse(repourl) base_parsed = urlparse(self.base_url) if parsed.path.startswith(base_parsed.path): path = parsed.path.replace(base_parsed.path, "") elif parsed.scheme: path = parsed.path else: # we assume git@host:owner/repo.git here path = parsed.path.split(':', 1)[-1] path = path.lstrip('/') if path.endswith('.git'): path = path[:-4] path = path.rstrip('/') parts = path.split('/') assert len(parts) == 2, 'OWNER/REPONAME is expected' return parts buildbot-3.4.0/master/buildbot/reporters/bitbucketserver.py000066400000000000000000000406661413250514000242320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from urllib.parse import urlparse from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.plugins import util from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import SUCCESS from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.reporters.message import MessageFormatterRenderable from buildbot.util import bytes2unicode from buildbot.util import httpclientservice from buildbot.util import unicode2bytes from .utils import merge_reports_prop # Magic words understood by Bitbucket Server REST API INPROGRESS = 'INPROGRESS' SUCCESSFUL = 'SUCCESSFUL' FAILED = 'FAILED' STATUS_API_URL = '/rest/build-status/1.0/commits/{sha}' STATUS_CORE_API_URL = '/rest/api/1.0/projects/{proj_key}/repos/{repo_slug}/commits/{sha}/builds' COMMENT_API_URL = '/rest/api/1.0{path}/comments' HTTP_PROCESSED = 204 HTTP_CREATED = 201 class BitbucketServerStatusPush(ReporterBase): name = "BitbucketServerStatusPush" def checkConfig(self, base_url, user, password, key=None, statusName=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators, **kwargs) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, base_url, user, password, key=None, statusName=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): user, password = yield self.renderSecrets(user, password) self.debug = debug self.verify = verify self.verbose = verbose if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators, **kwargs) self.key = key or Interpolate('%(prop:buildername)s') self.context = statusName self._http = yield httpclientservice.HTTPClientService.getService( self.master, base_url, auth=(user, password), debug=self.debug, verify=self.verify) def _create_default_generators(self): start_formatter = MessageFormatterRenderable('Build started.') end_formatter = MessageFormatterRenderable('Build done.') return [ BuildStartEndStatusGenerator(start_formatter=start_formatter, end_formatter=end_formatter) ] def createStatus(self, sha, state, url, key, description=None, context=None): payload = { 'state': state, 'url': url, 'key': key, } if description: payload['description'] = description if context: payload['name'] = context return self._http.post(STATUS_API_URL.format(sha=sha), json=payload) @defer.inlineCallbacks def sendMessage(self, reports): report = reports[0] build = reports[0]['builds'][0] props = Properties.fromDict(build['properties']) props.master = self.master description = report.get('body', None) results = build['results'] if build['complete']: state = SUCCESSFUL if results == SUCCESS else FAILED else: state = INPROGRESS key = yield props.render(self.key) context = yield props.render(self.context) if self.context else None sourcestamps = build['buildset']['sourcestamps'] for sourcestamp in sourcestamps: try: sha = sourcestamp['revision'] if sha is None: log.msg("Unable to get the commit hash") continue url = build['url'] res = yield self.createStatus( sha=sha, state=state, url=url, key=key, description=description, context=context ) if res.code not in (HTTP_PROCESSED,): content = yield res.content() log.msg("{code}: Unable to send Bitbucket Server status: " "{content}".format(code=res.code, content=content)) elif self.verbose: log.msg('Status "{state}" sent for {sha}.'.format( state=state, sha=sha)) except Exception as e: log.err( e, 'Failed to send status "{state}" for ' '{repo} at {sha}'.format( state=state, repo=sourcestamp['repository'], sha=sha )) class BitbucketServerCoreAPIStatusPush(ReporterBase): name = "BitbucketServerCoreAPIStatusPush" secrets = ["token", "auth"] def checkConfig(self, base_url, token=None, auth=None, statusName=None, statusSuffix=None, key=None, parentName=None, buildNumber=None, ref=None, duration=None, testResults=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators, **kwargs) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) if not base_url: config.error("Parameter base_url has to be given") if token is not None and auth is not None: config.error("Only one authentication method can be given " "(token or auth)") @defer.inlineCallbacks def reconfigService(self, base_url, token=None, auth=None, statusName=None, statusSuffix=None, key=None, parentName=None, buildNumber=None, ref=None, duration=None, testResults=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): self.status_name = statusName self.status_suffix = statusSuffix self.key = key or Interpolate('%(prop:buildername)s') self.parent_name = parentName self.build_number = buildNumber or Interpolate('%(prop:buildnumber)s') self.ref = ref self.duration = duration self.debug = debug self.verify = verify self.verbose = verbose if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators, **kwargs) if testResults: self.test_results = testResults else: @util.renderer def r_testresults(props): failed = props.getProperty("tests_failed", 0) skipped = props.getProperty("tests_skipped", 0) successful = props.getProperty("tests_successful", 0) if any([failed, skipped, successful]): return { "failed": failed, "skipped": skipped, "successful": successful } return None self.test_results = r_testresults headers = {} if token: headers["Authorization"] = "Bearer {}".format(token) self._http = yield httpclientservice.HTTPClientService.getService( self.master, base_url, auth=auth, headers=headers, debug=debug, verify=verify) def _create_default_generators(self): start_formatter = MessageFormatterRenderable('Build started.') end_formatter = MessageFormatterRenderable('Build done.') return [ BuildStartEndStatusGenerator(start_formatter=start_formatter, end_formatter=end_formatter) ] def createStatus(self, proj_key, repo_slug, sha, state, url, key, parent, build_number, ref, description, name, duration, test_results): payload = { 'state': state, 'url': url, 'key': key, 'parent': parent, 'ref': ref, 'buildNumber': build_number, 'description': description, 'name': name, 'duration': duration, 'testResults': test_results } if self.verbose: log.msg("Sending payload: '{}' for {}/{} {}.".format( payload, proj_key, repo_slug, sha )) _url = STATUS_CORE_API_URL.format(proj_key=proj_key, repo_slug=repo_slug, sha=sha) return self._http.post(_url, json=payload) @defer.inlineCallbacks def sendMessage(self, reports): report = reports[0] build = reports[0]['builds'][0] props = Properties.fromDict(build['properties']) props.master = self.master description = report.get('body', None) duration = None test_results = None if build['complete']: state = SUCCESSFUL if build['results'] == SUCCESS else FAILED if self.duration: duration = yield props.render(self.duration) else: td = build['complete_at'] - build['started_at'] duration = int(td.seconds * 1000) if self.test_results: test_results = yield props.render(self.test_results) else: state = INPROGRESS duration = None parent_name = (build['parentbuilder'] or {}).get('name') if self.parent_name: parent = yield props.render(self.parent_name) elif parent_name: parent = parent_name else: parent = build['builder']['name'] if self.status_name: status_name = yield props.render(self.status_name) else: status_name = "{} #{}".format(props.getProperty("buildername"), props.getProperty("buildnumber")) if parent_name: status_name = "{} #{} \u00BB {}".format( parent_name, build['parentbuild']['number'], status_name ) if self.status_suffix: status_name = status_name + (yield props.render(self.status_suffix)) key = yield props.render(self.key) build_number = yield props.render(self.build_number) url = build['url'] sourcestamps = build['buildset']['sourcestamps'] for sourcestamp in sourcestamps: try: ssid = sourcestamp.get('ssid') sha = sourcestamp.get('revision') branch = sourcestamp.get('branch') repo = sourcestamp.get('repository') if not sha: log.msg("Unable to get the commit hash for SSID: " "{}".format(ssid)) continue ref = None if self.ref is None: if branch is not None: if branch.startswith("refs/"): ref = branch else: ref = "refs/heads/{}".format(branch) else: ref = yield props.render(self.ref) if not ref: log.msg("WARNING: Unable to resolve ref for SSID: {}. " "Build status will not be visible on Builds or " "PullRequest pages only for commits".format(ssid)) r = re.search(r"^.*?/([^/]+)/([^/]+?)(?:\.git)?$", repo or "") if r: proj_key = r.group(1) repo_slug = r.group(2) else: log.msg("Unable to parse repository info from '{}' for " "SSID: {}".format(repo, ssid)) continue res = yield self.createStatus( proj_key=proj_key, repo_slug=repo_slug, sha=sha, state=state, url=url, key=key, parent=parent, build_number=build_number, ref=ref, description=description, name=status_name, duration=duration, test_results=test_results ) if res.code not in (HTTP_PROCESSED,): content = yield res.content() log.msg("{}: Unable to send Bitbucket Server status for " "{}/{} {}: {}".format(res.code, proj_key, repo_slug, sha, content)) elif self.verbose: log.msg('Status "{}" sent for {}/{} {}'.format( state, proj_key, repo_slug, sha )) except Exception as e: log.err( e, 'Failed to send status "{}" for {}/{} {}'.format( state, proj_key, repo_slug, sha )) class BitbucketServerPRCommentPush(ReporterBase): name = "BitbucketServerPRCommentPush" @defer.inlineCallbacks def reconfigService(self, base_url, user, password, verbose=False, debug=None, verify=None, generators=None, **kwargs): user, password = yield self.renderSecrets(user, password) self.verbose = verbose if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators, **kwargs) self._http = yield httpclientservice.HTTPClientService.getService( self.master, base_url, auth=(user, password), debug=debug, verify=verify) def checkConfig(self, base_url, user, password, verbose=False, debug=None, verify=None, generators=None, **kwargs): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators, **kwargs) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) def _create_default_generators(self): return [BuildStatusGenerator()] def sendComment(self, pr_url, text): path = urlparse(unicode2bytes(pr_url)).path payload = {'text': text} return self._http.post(COMMENT_API_URL.format( path=bytes2unicode(path)), json=payload) @defer.inlineCallbacks def sendMessage(self, reports): body = merge_reports_prop(reports, 'body') builds = merge_reports_prop(reports, 'builds') pr_urls = set() for build in builds: props = Properties.fromDict(build['properties']) pr_urls.add(props.getProperty("pullrequesturl")) for pr_url in pr_urls: if pr_url is None: continue try: res = yield self.sendComment( pr_url=pr_url, text=body ) if res.code not in (HTTP_CREATED,): content = yield res.content() log.msg("{code}: Unable to send a comment: " "{content}".format(code=res.code, content=content)) elif self.verbose: log.msg('Comment sent to {url}'.format(url=pr_url)) except Exception as e: log.err(e, 'Failed to send a comment to "{}"'.format(pr_url)) buildbot-3.4.0/master/buildbot/reporters/generators/000077500000000000000000000000001413250514000226125ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/reporters/generators/__init__.py000066400000000000000000000000001413250514000247110ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/reporters/generators/build.py000066400000000000000000000110101413250514000242540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from zope.interface import implementer from buildbot import interfaces from buildbot.reporters import utils from buildbot.reporters.message import MessageFormatter from buildbot.reporters.message import MessageFormatterRenderable from .utils import BuildStatusGeneratorMixin @implementer(interfaces.IReportGenerator) class BuildStatusGenerator(BuildStatusGeneratorMixin): wanted_event_keys = [ ('builds', None, 'finished'), ] compare_attrs = ['formatter'] def __init__(self, mode=("failing", "passing", "warnings"), tags=None, builders=None, schedulers=None, branches=None, subject="Buildbot %(result)s in %(title)s on %(builder)s", add_logs=False, add_patch=False, report_new=False, message_formatter=None): super().__init__(mode, tags, builders, schedulers, branches, subject, add_logs, add_patch) self.formatter = message_formatter if self.formatter is None: self.formatter = MessageFormatter() if report_new: self.wanted_event_keys = [ ('builds', None, 'finished'), ('builds', None, 'new'), ] @defer.inlineCallbacks def generate(self, master, reporter, key, build): _, _, event = key is_new = event == 'new' want_previous_build = False if is_new else self._want_previous_build() yield utils.getDetailsForBuild(master, build, want_properties=self.formatter.want_properties, want_steps=self.formatter.want_steps, want_previous_build=want_previous_build, want_logs=self.formatter.want_logs, want_logs_content=self.formatter.want_logs_content) if not self.is_message_needed_by_props(build): return None if not is_new and not self.is_message_needed_by_results(build): return None report = yield self.build_message(self.formatter, master, reporter, build) return report def _want_previous_build(self): return "change" in self.mode or "problem" in self.mode @implementer(interfaces.IReportGenerator) class BuildStartEndStatusGenerator(BuildStatusGeneratorMixin): wanted_event_keys = [ ('builds', None, 'new'), ('builds', None, 'finished'), ] compare_attrs = ['start_formatter', 'end_formatter'] def __init__(self, tags=None, builders=None, schedulers=None, branches=None, add_logs=False, add_patch=False, start_formatter=None, end_formatter=None): super().__init__('all', tags, builders, schedulers, branches, None, add_logs, add_patch) self.start_formatter = start_formatter if self.start_formatter is None: self.start_formatter = MessageFormatterRenderable('Build started.') self.end_formatter = end_formatter if self.end_formatter is None: self.end_formatter = MessageFormatterRenderable('Build done.') @defer.inlineCallbacks def generate(self, master, reporter, key, build): _, _, event = key is_new = event == 'new' formatter = self.start_formatter if is_new else self.end_formatter yield utils.getDetailsForBuild(master, build, want_properties=formatter.want_properties, want_steps=formatter.want_steps, want_logs=formatter.want_logs, want_logs_content=formatter.want_logs_content) if not self.is_message_needed_by_props(build): return None report = yield self.build_message(formatter, master, reporter, build) return report buildbot-3.4.0/master/buildbot/reporters/generators/buildrequest.py000066400000000000000000000063521413250514000257020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from zope.interface import implementer from buildbot import interfaces from buildbot.process.build import Build from buildbot.process.buildrequest import BuildRequest from buildbot.process.properties import Properties from buildbot.reporters import utils from buildbot.reporters.message import MessageFormatterRenderable from .utils import BuildStatusGeneratorMixin @implementer(interfaces.IReportGenerator) class BuildRequestGenerator(BuildStatusGeneratorMixin): wanted_event_keys = [ ('buildrequests', None, 'new') ] compare_attrs = ['formatter'] def __init__(self, tags=None, builders=None, schedulers=None, branches=None, add_patch=False, formatter=None): super().__init__('all', tags, builders, schedulers, branches, None, False, add_patch) self.formatter = formatter if self.formatter is None: self.formatter = MessageFormatterRenderable('Build pending.') @defer.inlineCallbacks def partial_build_dict(self, master, buildrequest): brdict = yield master.db.buildrequests.getBuildRequest(buildrequest['buildrequestid']) bdict = dict() props = Properties() buildrequest = yield BuildRequest.fromBrdict(master, brdict) builder = yield master.botmaster.getBuilderById(brdict['builderid']) Build.setupPropertiesKnownBeforeBuildStarts(props, [buildrequest], builder) Build.setupBuildProperties(props, [buildrequest]) bdict['properties'] = props.asDict() yield utils.get_details_for_buildrequest(master, brdict, bdict) return bdict @defer.inlineCallbacks def generate(self, master, reporter, key, buildrequest): build = yield self.partial_build_dict(master, buildrequest) if not self.is_message_needed_by_props(build): return None report = yield self.buildrequest_message(master, build) return report @defer.inlineCallbacks def buildrequest_message(self, master, build): patches = self._get_patches_for_build(build) users = [] buildmsg = yield self.formatter.format_message_for_build(master, build, mode=self.mode, users=users) return { 'body': buildmsg['body'], 'subject': buildmsg['subject'], 'type': buildmsg['type'], 'results': build['results'], 'builds': [build], 'users': list(users), 'patches': patches, 'logs': [] } buildbot-3.4.0/master/buildbot/reporters/generators/buildset.py000066400000000000000000000111731413250514000250020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from zope.interface import implementer from buildbot import interfaces from buildbot.process.results import statusToString from buildbot.reporters import utils from buildbot.reporters.message import MessageFormatter from .utils import BuildStatusGeneratorMixin @implementer(interfaces.IReportGenerator) class BuildSetStatusGenerator(BuildStatusGeneratorMixin): wanted_event_keys = [ ('buildsets', None, 'complete'), ] compare_attrs = ['formatter'] def __init__(self, mode=("failing", "passing", "warnings"), tags=None, builders=None, schedulers=None, branches=None, subject="Buildbot %(result)s in %(title)s on %(builder)s", add_logs=False, add_patch=False, message_formatter=None): super().__init__(mode, tags, builders, schedulers, branches, subject, add_logs, add_patch) self.formatter = message_formatter if self.formatter is None: self.formatter = MessageFormatter() @defer.inlineCallbacks def generate(self, master, reporter, key, message): bsid = message['bsid'] res = yield utils.getDetailsForBuildset(master, bsid, want_properties=self.formatter.want_properties, want_steps=self.formatter.want_steps, want_previous_build=self._want_previous_build(), want_logs=self.formatter.want_logs, want_logs_content=self.formatter.want_logs_content) builds = res['builds'] buildset = res['buildset'] # only include builds for which isMessageNeeded returns true builds = [build for build in builds if self.is_message_needed_by_props(build) and self.is_message_needed_by_results(build)] if not builds: return None report = yield self.buildset_message(self.formatter, master, reporter, builds, buildset['results']) return report @defer.inlineCallbacks def buildset_message(self, formatter, master, reporter, builds, results): # The given builds must refer to builds from a single buildset patches = [] logs = [] body = None subject = None msgtype = None users = set() for build in builds: patches.extend(self._get_patches_for_build(build)) build_logs = yield self._get_logs_for_build(master, build) logs.extend(build_logs) blamelist = yield reporter.getResponsibleUsersForBuild(master, build['buildid']) users.update(set(blamelist)) buildmsg = yield formatter.format_message_for_build(master, build, mode=self.mode, users=blamelist) msgtype, ok = self._merge_msgtype(msgtype, buildmsg['type']) if not ok: continue subject = self._merge_subject(subject, buildmsg['subject']) body, ok = self._merge_body(body, buildmsg['body']) if not ok: continue if subject is None and self.subject is not None: subject = self.subject % {'result': statusToString(results), 'projectName': master.config.title, 'title': master.config.title, 'builder': 'whole buildset'} return { 'body': body, 'subject': subject, 'type': msgtype, 'results': results, 'builds': builds, 'users': list(users), 'patches': patches, 'logs': logs } def _want_previous_build(self): return "change" in self.mode or "problem" in self.mode buildbot-3.4.0/master/buildbot/reporters/generators/utils.py000066400000000000000000000220241413250514000243240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot import util from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import statusToString class BuildStatusGeneratorMixin(util.ComparableMixin): possible_modes = ("change", "failing", "passing", "problem", "warnings", "exception", "cancelled") compare_attrs = ['mode', 'tags', 'builders', 'schedulers', 'branches', 'subject', 'add_logs', 'add_patch'] def __init__(self, mode, tags, builders, schedulers, branches, subject, add_logs, add_patch): self.mode = self._compute_shortcut_modes(mode) self.tags = tags self.builders = builders self.schedulers = schedulers self.branches = branches self.subject = subject self.add_logs = add_logs self.add_patch = add_patch def check(self): self._verify_build_generator_mode(self.mode) if self.subject is not None and '\n' in self.subject: config.error('Newlines are not allowed in message subjects') list_or_none_params = [ ('tags', self.tags), ('builders', self.builders), ('schedulers', self.schedulers), ('branches', self.branches), ] for name, param in list_or_none_params: self._verify_list_or_none_param(name, param) # you should either limit on builders or tags, not both if self.builders is not None and self.tags is not None: config.error("Please specify only builders or tags to include - not both.") def generate_name(self): name = self.__class__.__name__ if self.tags is not None: name += "_tags_" + "+".join(self.tags) if self.builders is not None: name += "_builders_" + "+".join(self.builders) if self.schedulers is not None: name += "_schedulers_" + "+".join(self.schedulers) if self.branches is not None: name += "_branches_" + "+".join(self.branches) name += "_".join(self.mode) return name def _should_attach_log(self, log): if isinstance(self.add_logs, bool): return self.add_logs if log['name'] in self.add_logs: return True long_name = "{}.{}".format(log['stepname'], log['name']) if long_name in self.add_logs: return True return False def is_message_needed_by_props(self, build): # here is where we actually do something. builder = build['builder'] scheduler = build['properties'].get('scheduler', [None])[0] branch = build['properties'].get('branch', [None])[0] if self.builders is not None and builder['name'] not in self.builders: return False if self.schedulers is not None and scheduler not in self.schedulers: return False if self.branches is not None and branch not in self.branches: return False if self.tags is not None and not self._matches_any_tag(builder['tags']): return False return True def is_message_needed_by_results(self, build): results = build['results'] if "change" in self.mode: prev = build['prev_build'] if prev and prev['results'] != results: return True if "failing" in self.mode and results == FAILURE: return True if "passing" in self.mode and results == SUCCESS: return True if "problem" in self.mode and results == FAILURE: prev = build['prev_build'] if prev and prev['results'] != FAILURE: return True if "warnings" in self.mode and results == WARNINGS: return True if "exception" in self.mode and results == EXCEPTION: return True if "cancelled" in self.mode and results == CANCELLED: return True return False def _merge_msgtype(self, msgtype, new_msgtype): if new_msgtype is None: return msgtype, False if msgtype is None: return new_msgtype, True if msgtype != new_msgtype: log.msg(('{}: Incompatible message types for multiple builds ({} and {}). Ignoring' ).format(self, msgtype, new_msgtype)) return msgtype, False return msgtype, True def _merge_subject(self, subject, new_subject): if subject is None and new_subject is not None: return new_subject return subject def _merge_body(self, body, new_body): if body is None: return new_body, True if new_body is None: return body, True if isinstance(body, str) and isinstance(new_body, str): return body + new_body, True if isinstance(body, list) and isinstance(new_body, list): return body + new_body, True log.msg(('{}: Incompatible message body types for multiple builds ({} and {}). Ignoring' ).format(self, type(body), type(new_body))) return body, False def _get_patches_for_build(self, build): if not self.add_patch: return [] ss_list = build['buildset']['sourcestamps'] return [ss['patch'] for ss in ss_list if 'patch' in ss and ss['patch'] is not None] @defer.inlineCallbacks def build_message(self, formatter, master, reporter, build): patches = self._get_patches_for_build(build) logs = yield self._get_logs_for_build(master, build) users = yield reporter.getResponsibleUsersForBuild(master, build['buildid']) buildmsg = yield formatter.format_message_for_build(master, build, mode=self.mode, users=users) results = build['results'] subject = buildmsg['subject'] if subject is None and self.subject is not None: subject = self.subject % {'result': statusToString(results), 'projectName': master.config.title, 'title': master.config.title, 'builder': build['builder']['name']} return { 'body': buildmsg['body'], 'subject': subject, 'type': buildmsg['type'], 'results': results, 'builds': [build], 'users': list(users), 'patches': patches, 'logs': logs } @defer.inlineCallbacks def _get_logs_for_build(self, master, build): if not self.add_logs: return [] all_logs = [] steps = yield master.data.get(('builds', build['buildid'], "steps")) for step in steps: logs = yield master.data.get(("steps", step['stepid'], 'logs')) for l in logs: l['stepname'] = step['name'] if self._should_attach_log(l): l['content'] = yield master.data.get(("logs", l['logid'], 'contents')) all_logs.append(l) return all_logs def _verify_build_generator_mode(self, mode): for m in self._compute_shortcut_modes(mode): if m not in self.possible_modes: if m == "all": config.error("mode 'all' is not valid in an iterator and must be " "passed in as a separate string") else: config.error("mode {} is not a valid mode".format(m)) def _verify_list_or_none_param(self, name, param): if param is not None and not isinstance(param, list): config.error("{} must be a list or None".format(name)) def _compute_shortcut_modes(self, mode): if isinstance(mode, str): if mode == "all": mode = ("failing", "passing", "warnings", "exception", "cancelled") elif mode == "warnings": mode = ("failing", "warnings") else: mode = (mode,) return mode def _matches_any_tag(self, tags): return self.tags and any(tag for tag in self.tags if tag in tags) buildbot-3.4.0/master/buildbot/reporters/generators/worker.py000066400000000000000000000053071413250514000245020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot import util from buildbot.reporters.message import MessageFormatterMissingWorker ENCODING = 'utf-8' @implementer(interfaces.IReportGenerator) class WorkerMissingGenerator(util.ComparableMixin): compare_attrs = ['workers', 'formatter'] wanted_event_keys = [ ('workers', None, 'missing'), ] def __init__(self, workers='all', message_formatter=None): self.workers = workers self.formatter = message_formatter if self.formatter is None: self.formatter = MessageFormatterMissingWorker() def check(self): if not (self.workers == 'all' or isinstance(self.workers, (list, tuple, set))): config.error("workers must be 'all', or list of worker names") @defer.inlineCallbacks def generate(self, master, reporter, key, worker): if not self._is_message_needed(worker): return None msg = yield self.formatter.formatMessageForMissingWorker(master, worker) body = msg['body'].encode(ENCODING) subject = msg['subject'] if subject is None: subject = "Buildbot worker {name} missing".format(**worker) assert msg['type'] in ('plain', 'html'), \ "'{}' message type must be 'plain' or 'html'.".format(msg['type']) return { 'body': body, 'subject': subject, 'type': msg['type'], 'results': None, 'builds': None, 'users': worker['notify'], 'patches': None, 'logs': None, 'worker': worker['name'] } def generate_name(self): name = self.__class__.__name__ if self.workers is not None: name += "_workers_" + "+".join(self.workers) return name def _is_message_needed(self, worker): return (self.workers == 'all' or worker['name'] in self.workers) and worker['notify'] buildbot-3.4.0/master/buildbot/reporters/gerrit.py000066400000000000000000000404141413250514000223120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Push events to Gerrit """ import time import warnings from pkg_resources import parse_version from twisted.internet import defer from twisted.internet import reactor from twisted.internet.protocol import ProcessProtocol from twisted.python import log from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.reporters import utils from buildbot.util import bytes2unicode from buildbot.util import service # Cache the version that the gerrit server is running for this many seconds GERRIT_VERSION_CACHE_TIMEOUT = 600 GERRIT_LABEL_VERIFIED = 'Verified' GERRIT_LABEL_REVIEWED = 'Code-Review' def makeReviewResult(message, *labels): """ helper to produce a review result """ return dict(message=message, labels=dict(labels)) def _handleLegacyResult(result): """ make sure the result is backward compatible """ if not isinstance(result, dict): warnings.warn('The Gerrit status callback uses the old way to ' 'communicate results. The outcome might be not what is ' 'expected.') message, verified, reviewed = result result = makeReviewResult(message, (GERRIT_LABEL_VERIFIED, verified), (GERRIT_LABEL_REVIEWED, reviewed)) return result def _old_add_label(label, value): if label == GERRIT_LABEL_VERIFIED: return ["--verified %d" % int(value)] elif label == GERRIT_LABEL_REVIEWED: return ["--code-review %d" % int(value)] warnings.warn(('Gerrit older than 2.6 does not support custom labels. ' 'Setting {} is ignored.').format(label)) return [] def _new_add_label(label, value): return ["--label {}={}".format(label, int(value))] def defaultReviewCB(builderName, build, result, master, arg): if result == RETRY: return makeReviewResult(None) message = "Buildbot finished compiling your patchset\n" message += "on configuration: {}\n".format(builderName) message += "The result is: {}\n".format(Results[result].upper()) return makeReviewResult(message, (GERRIT_LABEL_VERIFIED, result == SUCCESS or -1)) def defaultSummaryCB(buildInfoList, results, master, arg): success = False failure = False msgs = [] for buildInfo in buildInfoList: msg = "Builder %(name)s %(resultText)s (%(text)s)" % buildInfo link = buildInfo.get('url', None) if link: msg += " - " + link else: msg += "." msgs.append(msg) if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement success = True else: failure = True if success and not failure: verified = 1 else: verified = -1 return makeReviewResult('\n\n'.join(msgs), (GERRIT_LABEL_VERIFIED, verified)) # These are just sentinel values for GerritStatusPush.__init__ args class DEFAULT_REVIEW: pass class DEFAULT_SUMMARY: pass class GerritStatusPush(service.BuildbotService): """Event streamer to a gerrit ssh server.""" name = "GerritStatusPush" gerrit_server = None gerrit_username = None gerrit_port = None gerrit_version_time = None gerrit_version = None gerrit_identity_file = None reviewCB = None reviewArg = None startCB = None startArg = None summaryCB = None summaryArg = None wantSteps = False wantLogs = False _gerrit_notify = None def reconfigService(self, server, username, reviewCB=DEFAULT_REVIEW, startCB=None, port=29418, reviewArg=None, startArg=None, summaryCB=DEFAULT_SUMMARY, summaryArg=None, identity_file=None, builders=None, notify=None, wantSteps=False, wantLogs=False): # If neither reviewCB nor summaryCB were specified, default to sending # out "summary" reviews. But if we were given a reviewCB and only a # reviewCB, disable the "summary" reviews, so we don't send out both # by default. if reviewCB is DEFAULT_REVIEW and summaryCB is DEFAULT_SUMMARY: reviewCB = None summaryCB = defaultSummaryCB if reviewCB is DEFAULT_REVIEW: reviewCB = None if summaryCB is DEFAULT_SUMMARY: summaryCB = None # Parameters. self.gerrit_server = server self.gerrit_username = username self.gerrit_port = port self.gerrit_version = None self.gerrit_version_time = 0 self.gerrit_identity_file = identity_file self.reviewCB = reviewCB self.reviewArg = reviewArg self.startCB = startCB self.startArg = startArg self.summaryCB = summaryCB self.summaryArg = summaryArg self.builders = builders self._gerrit_notify = notify self.wantSteps = wantSteps self.wantLogs = wantLogs def _gerritCmd(self, *args): '''Construct a command as a list of strings suitable for :func:`subprocess.call`. ''' if self.gerrit_identity_file is not None: options = ['-i', self.gerrit_identity_file] else: options = [] return ['ssh', '-o', 'BatchMode=yes'] + options + [ '@'.join((self.gerrit_username, self.gerrit_server)), '-p', str(self.gerrit_port), 'gerrit' ] + list(args) class VersionPP(ProcessProtocol): def __init__(self, func): self.func = func self.gerrit_version = None def outReceived(self, data): vstr = b"gerrit version " if not data.startswith(vstr): log.msg(b"Error: Cannot interpret gerrit version info: " + data) return vers = data[len(vstr):].strip() log.msg(b"gerrit version: " + vers) self.gerrit_version = parse_version(bytes2unicode(vers)) def errReceived(self, data): log.msg(b"gerriterr: " + data) def processEnded(self, status_object): if status_object.value.exitCode: log.msg("gerrit version status: ERROR:", status_object) return if self.gerrit_version: self.func(self.gerrit_version) def getCachedVersion(self): if self.gerrit_version is None: return None if time.time() - self.gerrit_version_time > GERRIT_VERSION_CACHE_TIMEOUT: # cached version has expired self.gerrit_version = None return self.gerrit_version def processVersion(self, gerrit_version, func): self.gerrit_version = gerrit_version self.gerrit_version_time = time.time() func() def callWithVersion(self, func): command = self._gerritCmd("version") def callback(gerrit_version): return self.processVersion(gerrit_version, func) self.spawnProcess(self.VersionPP(callback), command[0], command, env=None) class LocalPP(ProcessProtocol): def __init__(self, status): self.status = status def outReceived(self, data): log.msg("gerritout:", data) def errReceived(self, data): log.msg("gerriterr:", data) def processEnded(self, status_object): if status_object.value.exitCode: log.msg("gerrit status: ERROR:", status_object) else: log.msg("gerrit status: OK") @defer.inlineCallbacks def startService(self): yield super().startService() startConsuming = self.master.mq.startConsuming self._buildsetCompleteConsumer = yield startConsuming( self.buildsetComplete, ('buildsets', None, 'complete')) self._buildCompleteConsumer = yield startConsuming( self.buildComplete, ('builds', None, 'finished')) self._buildStartedConsumer = yield startConsuming( self.buildStarted, ('builds', None, 'new')) def stopService(self): self._buildsetCompleteConsumer.stopConsuming() self._buildCompleteConsumer.stopConsuming() self._buildStartedConsumer.stopConsuming() @defer.inlineCallbacks def _got_event(self, key, msg): # This function is used only from tests if key[0] == 'builds': if key[2] == 'new': yield self.buildStarted(key, msg) return elif key[2] == 'finished': yield self.buildComplete(key, msg) return if key[0] == 'buildsets' and key[2] == 'complete': # pragma: no cover yield self.buildsetComplete(key, msg) return raise Exception('Invalid key for _got_event: {}'.format(key)) # pragma: no cover @defer.inlineCallbacks def buildStarted(self, key, build): if self.startCB is None: return yield self.getBuildDetails(build) if self.isBuildReported(build): result = yield self.startCB(build['builder']['name'], build, self.startArg) self.sendCodeReviews(build, result) @defer.inlineCallbacks def buildComplete(self, key, build): if self.reviewCB is None: return yield self.getBuildDetails(build) if self.isBuildReported(build): result = yield self.reviewCB(build['builder']['name'], build, build['results'], self.master, self.reviewArg) result = _handleLegacyResult(result) self.sendCodeReviews(build, result) @defer.inlineCallbacks def getBuildDetails(self, build): br = yield self.master.data.get(("buildrequests", build['buildrequestid'])) buildset = yield self.master.data.get(("buildsets", br['buildsetid'])) yield utils.getDetailsForBuilds(self.master, buildset, [build], want_properties=True, want_steps=self.wantSteps) def isBuildReported(self, build): return self.builders is None or build['builder']['name'] in self.builders @defer.inlineCallbacks def buildsetComplete(self, key, msg): if not self.summaryCB: return bsid = msg['bsid'] res = yield utils.getDetailsForBuildset(self.master, bsid, want_properties=True, want_steps=self.wantSteps, want_logs=self.wantLogs, want_logs_content=self.wantLogs) builds = res['builds'] buildset = res['buildset'] self.sendBuildSetSummary(buildset, builds) @defer.inlineCallbacks def sendBuildSetSummary(self, buildset, builds): builds = [build for build in builds if self.isBuildReported(build)] if builds and self.summaryCB: def getBuildInfo(build): result = build['results'] resultText = { SUCCESS: "succeeded", FAILURE: "failed", WARNINGS: "completed with warnings", EXCEPTION: "encountered an exception", }.get(result, "completed with unknown result %d" % result) return {'name': build['builder']['name'], 'result': result, 'resultText': resultText, 'text': build['state_string'], 'url': utils.getURLForBuild(self.master, build['builder']['builderid'], build['number']), 'build': build } buildInfoList = sorted( [getBuildInfo(build) for build in builds], key=lambda bi: bi['name']) result = yield self.summaryCB(buildInfoList, Results[buildset['results']], self.master, self.summaryArg) result = _handleLegacyResult(result) self.sendCodeReviews(builds[0], result) def sendCodeReviews(self, build, result): message = result.get('message', None) if message is None: return def getProperty(build, name): return build['properties'].get(name, [None])[0] # Gerrit + Repo downloads = getProperty(build, "repo_downloads") downloaded = getProperty(build, "repo_downloaded") if downloads is not None and downloaded is not None: downloaded = downloaded.split(" ") if downloads and 2 * len(downloads) == len(downloaded): for i, download in enumerate(downloads): try: project, change1 = download.split(" ") except ValueError: return # something is wrong, abort change2 = downloaded[2 * i] revision = downloaded[2 * i + 1] if change1 == change2: self.sendCodeReview(project, revision, result) else: return # something is wrong, abort return # Gerrit + Git # used only to verify Gerrit source if getProperty(build, "event.change.id") is not None: project = getProperty(build, "event.change.project") codebase = getProperty(build, "codebase") revision = (getProperty(build, "event.patchSet.revision") or getProperty(build, "got_revision") or getProperty(build, "revision")) if isinstance(revision, dict): # in case of the revision is a codebase revision, we just take # the revisionfor current codebase if codebase is not None: revision = revision[codebase] else: revision = None if project is not None and revision is not None: self.sendCodeReview(project, revision, result) return def sendCodeReview(self, project, revision, result): gerrit_version = self.getCachedVersion() if gerrit_version is None: self.callWithVersion( lambda: self.sendCodeReview(project, revision, result)) return assert gerrit_version command = self._gerritCmd("review", "--project {}".format(project)) if gerrit_version >= parse_version("2.13"): command.append('--tag autogenerated:buildbot') if self._gerrit_notify is not None: command.append('--notify {}'.format(str(self._gerrit_notify))) message = result.get('message', None) if message: command.append("--message '{}'".format(message.replace("'", "\""))) labels = result.get('labels', None) if labels: if gerrit_version < parse_version("2.6"): add_label = _old_add_label else: add_label = _new_add_label for label, value in labels.items(): command.extend(add_label(label, value)) command.append(revision) command = [str(s) for s in command] self.spawnProcess(self.LocalPP(self), command[0], command, env=None) def spawnProcess(self, *arg, **kw): reactor.spawnProcess(*arg, **kw) buildbot-3.4.0/master/buildbot/reporters/gerrit_verify_status.py000066400000000000000000000222421413250514000253000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.reporters.message import MessageFormatterRenderable from buildbot.util import httpclientservice from buildbot.util.logger import Logger log = Logger() class GerritVerifyStatusPush(ReporterBase): name = "GerritVerifyStatusPush" # overridable constants RESULTS_TABLE = { SUCCESS: 1, WARNINGS: 1, FAILURE: -1, SKIPPED: 0, EXCEPTION: 0, RETRY: 0, CANCELLED: 0 } DEFAULT_RESULT = -1 def checkConfig(self, baseURL, auth, verification_name=None, abstain=False, category=None, reporter=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators, **kwargs) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, baseURL, auth, verification_name=None, abstain=False, category=None, reporter=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): auth = yield self.renderSecrets(auth) self.debug = debug self.verify = verify self.verbose = verbose if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators, **kwargs) if baseURL.endswith('/'): baseURL = baseURL[:-1] self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, auth=auth, debug=self.debug, verify=self.verify) self._verification_name = verification_name or Interpolate( '%(prop:buildername)s') self._reporter = reporter or "buildbot" self._abstain = abstain self._category = category self._verbose = verbose def _create_default_generators(self): start_formatter = MessageFormatterRenderable('Build started.') end_formatter = MessageFormatterRenderable('Build done.') return [ BuildStartEndStatusGenerator(start_formatter=start_formatter, end_formatter=end_formatter) ] def createStatus(self, change_id, revision_id, name, value, abstain=None, rerun=None, comment=None, url=None, reporter=None, category=None, duration=None): """ Abstract the POST REST api documented here: https://gerrit.googlesource.com/plugins/verify-status/+/master/src/main/resources/Documentation/rest-api-changes.md :param change_id: The change_id for the change tested (can be in the long form e.g: myProject~master~I8473b95934b5732ac55d26311a706c9c2bde9940 or in the short integer form). :param revision_id: the revision_id tested can be the patchset number or the commit id (short or long). :param name: The name of the job. :param value: The pass/fail result for this job: -1: fail 0: unstable, 1: succeed :param abstain: Whether the value counts as a vote (defaults to false) :param rerun: Whether this result is from a re-test on the same patchset :param comment: A short comment about this job :param url: The url link to more info about this job :reporter: The user that verified this job :category: A category for this job "duration": The time it took to run this job :return: A deferred with the result from Gerrit. """ payload = {'name': name, 'value': value} if abstain is not None: payload['abstain'] = abstain if rerun is not None: payload['rerun'] = rerun if comment is not None: payload['comment'] = comment if url is not None: payload['url'] = url if reporter is not None: payload['reporter'] = reporter if category is not None: payload['category'] = category if duration is not None: payload['duration'] = duration if self._verbose: log.debug( 'Sending Gerrit status for {change_id}/{revision_id}: data={data}', change_id=change_id, revision_id=revision_id, data=payload) return self._http.post( '/'.join([ '/a/changes', str(change_id), 'revisions', str(revision_id), 'verify-status~verifications' ]), json=payload) def formatDuration(self, duration): """Format the duration. This method could be overridden if really needed, as the duration format in gerrit is an arbitrary string. :param duration: duration in timedelta """ days = duration.days hours, remainder = divmod(duration.seconds, 3600) minutes, seconds = divmod(remainder, 60) if days: return '{} day{} {}h {}m {}s'.format(days, "s" if days > 1 else "", hours, minutes, seconds) elif hours: return '{}h {}m {}s'.format(hours, minutes, seconds) return '{}m {}s'.format(minutes, seconds) @staticmethod def getGerritChanges(props): """ Get the gerrit changes This method could be overridden if really needed to accommodate for other custom steps method for fetching gerrit changes. :param props: an IProperty :return: (optionally via deferred) a list of dictionary with at list change_id, and revision_id, which format is the one accepted by the gerrit REST API as of /changes/:change_id/revision/:revision_id paths (see gerrit doc) """ if 'gerrit_changes' in props: return props.getProperty('gerrit_changes') if 'event.change.number' in props: return [{ 'change_id': props.getProperty('event.change.number'), 'revision_id': props.getProperty('event.patchSet.number') }] return [] @defer.inlineCallbacks def sendMessage(self, reports): report = reports[0] build = reports[0]['builds'][0] props = Properties.fromDict(build['properties']) props.master = self.master comment = report.get('body', None) if build['complete']: value = self.RESULTS_TABLE.get(build['results'], self.DEFAULT_RESULT) duration = self.formatDuration(build['complete_at'] - build['started_at']) else: value = 0 duration = 'pending' name = yield props.render(self._verification_name) reporter = yield props.render(self._reporter) category = yield props.render(self._category) abstain = yield props.render(self._abstain) # TODO: find reliable way to find out whether its a rebuild rerun = None changes = yield self.getGerritChanges(props) for change in changes: try: yield self.createStatus( change['change_id'], change['revision_id'], name, value, abstain=abstain, rerun=rerun, comment=comment, url=build['url'], reporter=reporter, category=category, duration=duration) except Exception: log.failure( 'Failed to send status!', failure=failure.Failure()) buildbot-3.4.0/master/buildbot/reporters/github.py000066400000000000000000000262241413250514000223030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from twisted.python import log from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.reporters.generators.buildrequest import BuildRequestGenerator from buildbot.reporters.message import MessageFormatterRenderable from buildbot.util import httpclientservice from buildbot.util.giturlparse import giturlparse HOSTED_BASE_URL = 'https://api.github.com' class GitHubStatusPush(ReporterBase): name = "GitHubStatusPush" def checkConfig(self, token, context=None, baseURL=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators, **kwargs) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, token, context=None, baseURL=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): token = yield self.renderSecrets(token) self.debug = debug self.verify = verify self.verbose = verbose self.context = self.setup_context(context) if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators, **kwargs) if baseURL is None: baseURL = HOSTED_BASE_URL if baseURL.endswith('/'): baseURL = baseURL[:-1] self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, headers={ 'Authorization': 'token ' + token, 'User-Agent': 'Buildbot' }, debug=self.debug, verify=self.verify) def setup_context(self, context): return context or Interpolate('buildbot/%(prop:buildername)s') def _create_default_generators(self): start_formatter = MessageFormatterRenderable('Build started.') end_formatter = MessageFormatterRenderable('Build done.') pending_formatter = MessageFormatterRenderable('Build pending.') return [ BuildRequestGenerator(formatter=pending_formatter), BuildStartEndStatusGenerator(start_formatter=start_formatter, end_formatter=end_formatter) ] def createStatus(self, repo_user, repo_name, sha, state, target_url=None, context=None, issue=None, description=None): """ :param repo_user: GitHub user or organization :param repo_name: Name of the repository :param sha: Full sha to create the status for. :param state: one of the following 'pending', 'success', 'error' or 'failure'. :param target_url: Target url to associate with this status. :param description: Short description of the status. :param context: Build context :return: A deferred with the result from GitHub. This code comes from txgithub by @tomprince. txgithub is based on twisted's webclient agent, which is much less reliable and featureful as txrequest (support for proxy, connection pool, keep alive, retry, etc) """ payload = {'state': state} if description is not None: payload['description'] = description if target_url is not None: payload['target_url'] = target_url if context is not None: payload['context'] = context return self._http.post( '/'.join(['/repos', repo_user, repo_name, 'statuses', sha]), json=payload) def is_status_2xx(self, code): return code // 100 == 2 def _extract_issue(self, props): branch = props.getProperty('branch') if branch: m = re.search(r"refs/pull/([0-9]*)/(head|merge)", branch) if m: return m.group(1) return None def _extract_github_info(self, sourcestamp): repo_owner = None repo_name = None project = sourcestamp['project'] repository = sourcestamp['repository'] if project and "/" in project: repo_owner, repo_name = project.split('/') elif repository: giturl = giturlparse(repository) if giturl: repo_owner = giturl.owner repo_name = giturl.repo return repo_owner, repo_name @defer.inlineCallbacks def sendMessage(self, reports): report = reports[0] build = reports[0]['builds'][0] props = Properties.fromDict(build['properties']) props.master = self.master description = report.get('body', None) if build['complete']: state = { SUCCESS: 'success', WARNINGS: 'success', FAILURE: 'failure', SKIPPED: 'success', EXCEPTION: 'error', RETRY: 'pending', CANCELLED: 'error' }.get(build['results'], 'error') else: state = 'pending' context = yield props.render(self.context) sourcestamps = build['buildset'].get('sourcestamps') if not sourcestamps: return issue = self._extract_issue(props) for sourcestamp in sourcestamps: repo_owner, repo_name = self._extract_github_info(sourcestamp) if not repo_owner or not repo_name: log.msg('Skipped status update because required repo information is missing.') continue sha = sourcestamp['revision'] response = None # If the scheduler specifies multiple codebases, don't bother updating # the ones for which there is no revision if not sha: log.msg( 'Skipped status update for codebase {codebase}, ' 'context "{context}", issue {issue}.'.format( codebase=sourcestamp['codebase'], issue=issue, context=context)) continue try: if self.verbose: log.msg("Updating github status: repo_owner={}, repo_name={}".format( repo_owner, repo_name)) response = yield self.createStatus(repo_user=repo_owner, repo_name=repo_name, sha=sha, state=state, target_url=build['url'], context=context, issue=issue, description=description) if not response: # the implementation of createStatus refused to post update due to missing data continue if not self.is_status_2xx(response.code): raise Exception() if self.verbose: log.msg( 'Updated status with "{state}" for {repo_owner}/{repo_name} ' 'at {sha}, context "{context}", issue {issue}.'.format( state=state, repo_owner=repo_owner, repo_name=repo_name, sha=sha, issue=issue, context=context)) except Exception as e: if response: content = yield response.content() code = response.code else: content = code = "n/a" log.err( e, 'Failed to update "{state}" for {repo_owner}/{repo_name} ' 'at {sha}, context "{context}", issue {issue}. ' 'http {code}, {content}'.format( state=state, repo_owner=repo_owner, repo_name=repo_name, sha=sha, issue=issue, context=context, code=code, content=content)) class GitHubCommentPush(GitHubStatusPush): name = "GitHubCommentPush" def setup_context(self, context): return '' def _create_default_generators(self): start_formatter = MessageFormatterRenderable(None) end_formatter = MessageFormatterRenderable('Build done.') return [ BuildStartEndStatusGenerator(start_formatter=start_formatter, end_formatter=end_formatter) ] @defer.inlineCallbacks def sendMessage(self, reports): report = reports[0] if 'body' not in report or report['body'] is None: return yield super().sendMessage(reports) @defer.inlineCallbacks def createStatus(self, repo_user, repo_name, sha, state, target_url=None, context=None, issue=None, description=None): """ :param repo_user: GitHub user or organization :param repo_name: Name of the repository :param issue: Pull request number :param state: one of the following 'pending', 'success', 'error' or 'failure'. :param description: Short description of the status. :return: A deferred with the result from GitHub. This code comes from txgithub by @tomprince. txgithub is based on twisted's webclient agent, which is much less reliable and featureful as txrequest (support for proxy, connection pool, keep alive, retry, etc) """ payload = {'body': description} if issue is None: log.msg('Skipped status update for repo {} sha {} as issue is not specified'.format( repo_name, sha)) return None url = '/'.join(['/repos', repo_user, repo_name, 'issues', issue, 'comments']) ret = yield self._http.post(url, json=payload) return ret buildbot-3.4.0/master/buildbot/reporters/gitlab.py000066400000000000000000000204261413250514000222610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from urllib.parse import quote_plus as urlquote_plus from twisted.internet import defer from twisted.python import log from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.reporters.generators.buildrequest import BuildRequestGenerator from buildbot.reporters.message import MessageFormatterRenderable from buildbot.util import giturlparse from buildbot.util import httpclientservice HOSTED_BASE_URL = 'https://gitlab.com' class GitLabStatusPush(ReporterBase): name = "GitLabStatusPush" def checkConfig(self, token, context=None, baseURL=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators, **kwargs) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, token, context=None, baseURL=None, verbose=False, debug=None, verify=None, generators=None, **kwargs): token = yield self.renderSecrets(token) self.debug = debug self.verify = verify self.verbose = verbose self.context = context or Interpolate('buildbot/%(prop:buildername)s') if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators, **kwargs) if baseURL is None: baseURL = HOSTED_BASE_URL if baseURL.endswith('/'): baseURL = baseURL[:-1] self.baseURL = baseURL self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, headers={'PRIVATE-TOKEN': token}, debug=self.debug, verify=self.verify) self.project_ids = {} def _create_default_generators(self): start_formatter = MessageFormatterRenderable('Build started.') end_formatter = MessageFormatterRenderable('Build done.') pending_formatter = MessageFormatterRenderable('Build pending.') return [ BuildRequestGenerator(formatter=pending_formatter), BuildStartEndStatusGenerator(start_formatter=start_formatter, end_formatter=end_formatter) ] def createStatus(self, project_id, branch, sha, state, target_url=None, description=None, context=None): """ :param project_id: Project ID from GitLab :param branch: Branch name to create the status for. :param sha: Full sha to create the status for. :param state: one of the following 'pending', 'success', 'failed' or 'cancelled'. :param target_url: Target url to associate with this status. :param description: Short description of the status. :param context: Context of the result :return: A deferred with the result from GitLab. """ payload = {'state': state, 'ref': branch} if description is not None: payload['description'] = description if target_url is not None: payload['target_url'] = target_url if context is not None: payload['name'] = context return self._http.post('/api/v4/projects/{}/statuses/{}'.format(project_id, sha), json=payload) @defer.inlineCallbacks def getProjectId(self, sourcestamp): # retrieve project id via cache url = giturlparse(sourcestamp['repository']) if url is None: return None project_full_name = "{}/{}".format(url.owner, url.repo) # gitlab needs project name to be fully url quoted to get the project id project_full_name = urlquote_plus(project_full_name) if project_full_name not in self.project_ids: response = yield self._http.get('/api/v4/projects/{}'.format(project_full_name)) proj = yield response.json() if response.code not in (200, ): log.msg( 'Unknown (or hidden) gitlab project' '{repo}: {message}'.format( repo=project_full_name, message=proj.get('message'))) return None self.project_ids[project_full_name] = proj['id'] return self.project_ids[project_full_name] @defer.inlineCallbacks def sendMessage(self, reports): report = reports[0] build = reports[0]['builds'][0] props = Properties.fromDict(build['properties']) props.master = self.master description = report.get('body', None) if build['complete']: state = { SUCCESS: 'success', WARNINGS: 'success', FAILURE: 'failed', SKIPPED: 'success', EXCEPTION: 'failed', RETRY: 'pending', CANCELLED: 'cancelled' }.get(build['results'], 'failed') else: state = 'running' context = yield props.render(self.context) sourcestamps = build['buildset']['sourcestamps'] # FIXME: probably only want to report status for the last commit in the changeset for sourcestamp in sourcestamps: sha = sourcestamp['revision'] if 'source_project_id' in props: proj_id = props['source_project_id'] else: proj_id = yield self.getProjectId(sourcestamp) if proj_id is None: continue try: if 'source_branch' in props: branch = props['source_branch'] else: branch = sourcestamp['branch'] target_url = build['url'] res = yield self.createStatus( project_id=proj_id, branch=branch, sha=sha, state=state, target_url=target_url, context=context, description=description ) if res.code not in (200, 201, 204): message = yield res.json() message = message.get('message', 'unspecified error') log.msg( 'Could not send status "{state}" for ' '{repo} at {sha}: {message}'.format( state=state, repo=sourcestamp['repository'], sha=sha, message=message)) elif self.verbose: log.msg( 'Status "{state}" sent for ' '{repo} at {sha}.'.format( state=state, repo=sourcestamp['repository'], sha=sha)) except Exception as e: log.err( e, 'Failed to send status "{state}" for ' '{repo} at {sha}'.format( state=state, repo=sourcestamp['repository'], sha=sha )) buildbot-3.4.0/master/buildbot/reporters/http.py000066400000000000000000000051301413250514000217710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.reporters.message import MessageFormatterFunction from buildbot.util import httpclientservice class HttpStatusPush(ReporterBase): name = "HttpStatusPush" secrets = ["auth"] def checkConfig(self, serverUrl, auth=None, headers=None, debug=None, verify=None, generators=None, **kwargs): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators, **kwargs) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, serverUrl, auth=None, headers=None, debug=None, verify=None, generators=None, **kwargs): self.debug = debug self.verify = verify if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators, **kwargs) self._http = yield httpclientservice.HTTPClientService.getService( self.master, serverUrl, auth=auth, headers=headers, debug=self.debug, verify=self.verify) def _create_default_generators(self): formatter = MessageFormatterFunction(lambda context: context['build'], 'json') return [ BuildStatusGenerator(message_formatter=formatter, report_new=True) ] def is_status_2xx(self, code): return code // 100 == 2 @defer.inlineCallbacks def sendMessage(self, reports): response = yield self._http.post("", json=reports[0]['body']) if not self.is_status_2xx(response.code): log.msg("{}: unable to upload status: {}".format(response.code, response.content)) buildbot-3.4.0/master/buildbot/reporters/irc.py000066400000000000000000000516431413250514000216010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 from twisted.application import internet from twisted.internet import defer from twisted.internet import reactor from twisted.internet import task from twisted.python import log from twisted.words.protocols import irc from buildbot import config from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.words import Channel from buildbot.reporters.words import Contact from buildbot.reporters.words import StatusBot from buildbot.reporters.words import ThrottledClientFactory from buildbot.reporters.words import dangerousCommand from buildbot.util import service from buildbot.util import ssl class UsageError(ValueError): # pylint: disable=useless-super-delegation def __init__(self, string="Invalid usage", *more): # This is not useless as we change the default value of an argument. # This bug is reported as "fixed" but apparently, it is not. # https://github.com/PyCQA/pylint/issues/1085 # (Maybe there is a problem with builtin exceptions). super().__init__(string, *more) _irc_colors = ( 'WHITE', 'BLACK', 'NAVY_BLUE', 'GREEN', 'RED', 'BROWN', 'PURPLE', 'OLIVE', 'YELLOW', 'LIME_GREEN', 'TEAL', 'AQUA_LIGHT', 'ROYAL_BLUE', 'PINK', 'DARK_GRAY', 'LIGHT_GRAY' ) class IRCChannel(Channel): def __init__(self, bot, channel): super().__init__(bot, channel) self.muted = False def send(self, message, **kwargs): if self.id[0] in irc.CHANNEL_PREFIXES: send = self.bot.groupSend else: send = self.bot.msg if not self.muted: send(self.id, message) def act(self, action): if self.muted: return self.bot.groupDescribe(self.id, action) class IRCContact(Contact): def __init__(self, user, channel=None): if channel is None: channel = user super().__init__(user, channel) def act(self, action): return self.channel.act(action) def handleAction(self, action): # this is sent when somebody performs an action that mentions the # buildbot (like '/me kicks buildbot'). 'self.user' is the name/nick/id of # the person who performed the action, so if their action provokes a # response, they can be named. This is 100% silly. if not action.endswith("s " + self.bot.nickname): return words = action.split() verb = words[-2] if verb == "kicks": response = "{} back".format(verb) elif verb == "threatens": response = "hosts a red wedding for {}".format(self.user_id) else: response = "{} {} too".format(verb, self.user_id) self.act(response) @defer.inlineCallbacks def op_required(self, command): if self.is_private_chat or self.user_id in self.bot.authz.get(command.upper(), ()): return False ops = yield self.bot.getChannelOps(self.channel.id) return self.user_id not in ops # IRC only commands @dangerousCommand def command_JOIN(self, args, **kwargs): """join a channel""" args = self.splitArgs(args) for channel in args: self.bot.join(channel) command_JOIN.usage = "join #channel - join a channel #channel" @dangerousCommand def command_LEAVE(self, args, **kwargs): """join a channel""" args = self.splitArgs(args) for channel in args: self.bot.leave(channel) command_LEAVE.usage = "leave #channel - leave a channel #channel" @defer.inlineCallbacks def command_MUTE(self, args, **kwargs): if (yield self.op_required('mute')): yield self.send("Only channel operators or explicitly allowed users " "can mute me here, {}... Blah, blah, blah...".format(self.user_id)) return # The order of these is important! ;) yield self.send("Shutting up for now.") self.channel.muted = True command_MUTE.usage = "mute - suppress all messages until a corresponding 'unmute' is issued" @defer.inlineCallbacks def command_UNMUTE(self, args, **kwargs): if self.channel.muted: if (yield self.op_required('mute')): return # The order of these is important! ;) self.channel.muted = False yield self.send("I'm baaaaaaaaaaack!") else: yield self.send( "No one had told me to be quiet, but it's the thought that counts, right?") command_UNMUTE.usage = "unmute - disable a previous 'mute'" @defer.inlineCallbacks @Contact.overrideCommand def command_NOTIFY(self, args, **kwargs): if not self.is_private_chat: argv = self.splitArgs(args) if argv and argv[0] in ('on', 'off') and \ (yield self.op_required('notify')): yield self.send(("Only channel operators can change notified events for this " "channel. And you, {}, are neither!").format(self.user_id)) return super().command_NOTIFY(args, **kwargs) def command_DANCE(self, args, **kwargs): """dance, dance academy...""" reactor.callLater(1.0, self.send, "<(^.^<)") reactor.callLater(2.0, self.send, "<(^.^)>") reactor.callLater(3.0, self.send, "(>^.^)>") reactor.callLater(3.5, self.send, "(7^.^)7") reactor.callLater(5.0, self.send, "(>^.^<)") def command_DESTROY(self, args): if self.bot.nickname not in args: self.act("readies phasers") else: self.send("Better destroy yourself, {}!".format(self.user_id)) def command_HUSTLE(self, args): self.act("does the hustle") command_HUSTLE.usage = "dondon on #qutebrowser: qutebrowser-bb needs to learn to do the hustle" class IrcStatusBot(StatusBot, irc.IRCClient): """I represent the buildbot to an IRC server. """ contactClass = IRCContact channelClass = IRCChannel def __init__(self, nickname, password, join_channels, pm_to_nicks, noticeOnChannel, *args, useColors=False, useSASL=False, **kwargs): super().__init__(*args, **kwargs) self.nickname = nickname self.join_channels = join_channels self.pm_to_nicks = pm_to_nicks self.password = password self.hasQuit = 0 self.noticeOnChannel = noticeOnChannel self.useColors = useColors self.useSASL = useSASL self._keepAliveCall = task.LoopingCall( lambda: self.ping(self.nickname)) self._channel_names = {} def register(self, nickname, hostname="foo", servername="bar"): if not self.useSASL: super().register(nickname, hostname, servername) return if self.password is not None: self.sendLine("CAP REQ :sasl") self.setNick(nickname) if self.username is None: self.username = nickname self.sendLine( "USER {} {} {} :{}".format( self.username, hostname, servername, self.realname ) ) if self.password is not None: self.sendLine("AUTHENTICATE PLAIN") def irc_AUTHENTICATE(self, prefix, params): nick = self.nickname.encode() passwd = self.password.encode() code = base64.b64encode(nick + b'\0' + nick + b'\0' + passwd) self.sendLine("AUTHENTICATE " + code.decode()) self.sendLine("CAP END") def connectionMade(self): super().connectionMade() self._keepAliveCall.start(60) def connectionLost(self, reason): if self._keepAliveCall.running: self._keepAliveCall.stop() super().connectionLost(reason) # The following methods are called when we write something. def groupSend(self, channel, message): if self.noticeOnChannel: self.notice(channel, message) else: self.msg(channel, message) def groupDescribe(self, channel, action): self.describe(channel, action) def getContact(self, user, channel=None): # nicknames and channel names are case insensitive user = user.lower() if channel is None: channel = user channel = channel.lower() return super().getContact(user, channel) # the following irc.IRCClient methods are called when we have input def privmsg(self, user, channel, message): user = user.split('!', 1)[0] # rest is ~user@hostname # channel is '#twisted' or 'buildbot' (for private messages) if channel == self.nickname: # private message contact = self.getContact(user=user) d = contact.handleMessage(message) return d # else it's a broadcast message, maybe for us, maybe not. 'channel' # is '#twisted' or the like. contact = self.getContact(user=user, channel=channel) if message.startswith("{}:".format(self.nickname)) or \ message.startswith("{},".format(self.nickname)): message = message[len("{}:".format(self.nickname)):] d = contact.handleMessage(message) return d return None def action(self, user, channel, data): user = user.split('!', 1)[0] # rest is ~user@hostname # somebody did an action (/me actions) in the broadcast channel contact = self.getContact(user=user, channel=channel) if self.nickname in data: contact.handleAction(data) def signedOn(self): if self.password: self.msg("Nickserv", "IDENTIFY " + self.password) for c in self.join_channels: if isinstance(c, dict): channel = c.get('channel', None) password = c.get('password', None) else: channel = c password = None self.join(channel=channel, key=password) for c in self.pm_to_nicks: contact = self.getContact(c) contact.channel.add_notification_events(self.notify_events) self.loadState() def getNames(self, channel): channel = channel.lower() d = defer.Deferred() callbacks = self._channel_names.setdefault(channel, ([], []))[0] callbacks.append(d) self.sendLine("NAMES {}".format(channel)) return d def irc_RPL_NAMREPLY(self, prefix, params): channel = params[2].lower() if channel not in self._channel_names: return nicks = params[3].split(' ') nicklist = self._channel_names[channel][1] nicklist += nicks def irc_RPL_ENDOFNAMES(self, prefix, params): channel = params[1].lower() try: callbacks, namelist = self._channel_names.pop(channel) except KeyError: return for cb in callbacks: cb.callback(namelist) @defer.inlineCallbacks def getChannelOps(self, channel): names = yield self.getNames(channel) return [n[1:] for n in names if n[0] in '@&~%'] def joined(self, channel): self.log("Joined {}".format(channel)) # trigger contact constructor, which in turn subscribes to notify events channel = self.getChannel(channel=channel) channel.add_notification_events(self.notify_events) def left(self, channel): self.log("Left {}".format(channel)) def kickedFrom(self, channel, kicker, message): self.log("I have been kicked from {} by {}: {}".format(channel, kicker, message)) def userLeft(self, user, channel): if user: user = user.lower() if channel: channel = channel.lower() if (channel, user) in self.contacts: del self.contacts[(channel, user)] def userKicked(self, kickee, channel, kicker, message): self.userLeft(kickee, channel) def userQuit(self, user, quitMessage=None): if user: user = user.lower() for c, u in list(self.contacts): if u == user: del self.contacts[(c, u)] results_colors = { SUCCESS: 'GREEN', WARNINGS: 'YELLOW', FAILURE: 'RED', SKIPPED: 'ROYAL_BLUE', EXCEPTION: 'PURPLE', RETRY: 'AQUA_LIGHT', CANCELLED: 'PINK', } short_results_descriptions = { SUCCESS: ", Success", WARNINGS: ", Warnings", FAILURE: ", Failure", SKIPPED: ", Skipped", EXCEPTION: ", Exception", RETRY: ", Retry", CANCELLED: ", Cancelled", } def format_build_status(self, build, short=False): br = build['results'] if short: text = self.short_results_descriptions[br] else: text = self.results_descriptions[br] if self.useColors: return "\x03{:d}{}\x0f".format( _irc_colors.index(self.results_colors[br]), text) else: return text class IrcStatusFactory(ThrottledClientFactory): protocol = IrcStatusBot shuttingDown = False p = None def __init__(self, nickname, password, join_channels, pm_to_nicks, authz, tags, notify_events, noticeOnChannel=False, useRevisions=False, showBlameList=False, useSASL=False, parent=None, lostDelay=None, failedDelay=None, useColors=True): super().__init__(lostDelay=lostDelay, failedDelay=failedDelay) self.nickname = nickname self.password = password self.join_channels = join_channels self.pm_to_nicks = pm_to_nicks self.tags = tags self.authz = authz self.parent = parent self.notify_events = notify_events self.noticeOnChannel = noticeOnChannel self.useRevisions = useRevisions self.showBlameList = showBlameList self.useColors = useColors self.useSASL = useSASL def __getstate__(self): d = self.__dict__.copy() del d['p'] return d def shutdown(self): self.shuttingDown = True if self.p: self.p.quit("buildmaster reconfigured: bot disconnecting") def buildProtocol(self, address): if self.p: self.p.disownServiceParent() p = self.protocol(self.nickname, self.password, self.join_channels, self.pm_to_nicks, self.noticeOnChannel, self.authz, self.tags, self.notify_events, useColors=self.useColors, useSASL=self.useSASL, useRevisions=self.useRevisions, showBlameList=self.showBlameList) p.setServiceParent(self.parent) p.factory = self self.p = p return p # TODO: I think a shutdown that occurs while the connection is being # established will make this explode def clientConnectionLost(self, connector, reason): if self.shuttingDown: log.msg("not scheduling reconnection attempt") return super().clientConnectionLost(connector, reason) def clientConnectionFailed(self, connector, reason): if self.shuttingDown: log.msg("not scheduling reconnection attempt") return super().clientConnectionFailed(connector, reason) class IRC(service.BuildbotService): name = "IRC" in_test_harness = False f = None compare_attrs = ("host", "port", "nick", "password", "authz", "channels", "pm_to_nicks", "useSSL", "useSASL", "useRevisions", "tags", "useColors", "allowForce", "allowShutdown", "lostDelay", "failedDelay") secrets = ['password'] def checkConfig(self, host, nick, channels, pm_to_nicks=None, port=6667, allowForce=None, tags=None, password=None, notify_events=None, showBlameList=True, useRevisions=False, useSSL=False, useSASL=False, lostDelay=None, failedDelay=None, useColors=True, allowShutdown=None, noticeOnChannel=False, authz=None, **kwargs ): deprecated_params = list(kwargs) if deprecated_params: config.error("{} are deprecated".format(",".join(deprecated_params))) # deprecated if allowForce is not None: if authz is not None: config.error("If you specify authz, you must not use allowForce anymore") if allowForce not in (True, False): config.error("allowForce must be boolean, not %r" % (allowForce,)) log.msg('IRC: allowForce is deprecated: use authz instead') if allowShutdown is not None: if authz is not None: config.error("If you specify authz, you must not use allowShutdown anymore") if allowShutdown not in (True, False): config.error("allowShutdown must be boolean, not %r" % (allowShutdown,)) log.msg('IRC: allowShutdown is deprecated: use authz instead') # ### if noticeOnChannel not in (True, False): config.error("noticeOnChannel must be boolean, not %r" % (noticeOnChannel,)) if useSSL: # SSL client needs a ClientContextFactory for some SSL mumbo-jumbo ssl.ensureHasSSL(self.__class__.__name__) if authz is not None: for acl in authz.values(): if not isinstance(acl, (list, tuple, bool)): config.error( "authz values must be bool or a list of nicks") def reconfigService(self, host, nick, channels, pm_to_nicks=None, port=6667, allowForce=None, tags=None, password=None, notify_events=None, showBlameList=True, useRevisions=False, useSSL=False, useSASL=False, lostDelay=None, failedDelay=None, useColors=True, allowShutdown=None, noticeOnChannel=False, authz=None, **kwargs ): # need to stash these so we can detect changes later self.host = host self.port = port self.nick = nick self.join_channels = channels if pm_to_nicks is None: pm_to_nicks = [] self.pm_to_nicks = pm_to_nicks self.password = password if authz is None: self.authz = {} else: self.authz = authz self.useRevisions = useRevisions self.tags = tags if notify_events is None: notify_events = {} self.notify_events = notify_events self.noticeOnChannel = noticeOnChannel # deprecated... if allowForce is not None: self.authz[('force', 'stop')] = allowForce if allowShutdown is not None: self.authz[('shutdown')] = allowShutdown # ### # This function is only called in case of reconfig with changes # We don't try to be smart here. Just restart the bot if config has # changed. if self.f is not None: self.f.shutdown() self.f = IrcStatusFactory(self.nick, self.password, self.join_channels, self.pm_to_nicks, self.authz, self.tags, self.notify_events, parent=self, noticeOnChannel=noticeOnChannel, useRevisions=useRevisions, useSASL=useSASL, showBlameList=showBlameList, lostDelay=lostDelay, failedDelay=failedDelay, useColors=useColors) if useSSL: cf = ssl.ClientContextFactory() c = internet.SSLClient(self.host, self.port, self.f, cf) else: c = internet.TCPClient(self.host, self.port, self.f) c.setServiceParent(self) buildbot-3.4.0/master/buildbot/reporters/mail.py000066400000000000000000000315201413250514000217360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from email import charset from email import encoders from email.header import Header from email.message import Message from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email.utils import formatdate from email.utils import parseaddr from io import BytesIO from twisted.internet import defer from twisted.internet import reactor from twisted.python import log as twlog from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot import util from buildbot.process.properties import Properties from buildbot.reporters.base import ENCODING from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.reporters.generators.worker import WorkerMissingGenerator from buildbot.util import ssl from buildbot.util import unicode2bytes from .utils import merge_reports_prop from .utils import merge_reports_prop_take_first # this incantation teaches email to output utf-8 using 7- or 8-bit encoding, # although it has no effect before python-2.7. # needs to match notifier.ENCODING charset.add_charset(ENCODING, charset.SHORTEST, None, ENCODING) try: from twisted.mail.smtp import ESMTPSenderFactory [ESMTPSenderFactory] # for pyflakes except ImportError: ESMTPSenderFactory = None # Email parsing can be complex. We try to take a very liberal # approach. The local part of an email address matches ANY non # whitespace character. Rather allow a malformed email address than # croaking on a valid (the matching of domains should be correct # though; requiring the domain to not be a top level domain). With # these regular expressions, we can match the following: # # full.name@example.net # Full Name # VALID_EMAIL_ADDR = r"(?:\S+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)+\.?)" VALID_EMAIL = re.compile(r"^(?:{0}|(.+\s+)?<{0}>\s*)$".format(VALID_EMAIL_ADDR)) VALID_EMAIL_ADDR = re.compile(VALID_EMAIL_ADDR) @implementer(interfaces.IEmailLookup) class Domain(util.ComparableMixin): compare_attrs = ("domain") def __init__(self, domain): assert "@" not in domain self.domain = domain def getAddress(self, name): """If name is already an email address, pass it through.""" if '@' in name: return name return name + "@" + self.domain @implementer(interfaces.IEmailSender) class MailNotifier(ReporterBase): secrets = ["smtpUser", "smtpPassword"] def checkConfig(self, fromaddr, relayhost="localhost", lookup=None, extraRecipients=None, sendToInterestedUsers=True, extraHeaders=None, useTls=False, useSmtps=False, smtpUser=None, smtpPassword=None, smtpPort=25, dumpMailsToLog=False, generators=None): if ESMTPSenderFactory is None: config.error("twisted-mail is not installed - cannot " "send mail") if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators) if extraRecipients is None: extraRecipients = [] if not isinstance(extraRecipients, (list, tuple)): config.error("extraRecipients must be a list or tuple") else: for r in extraRecipients: if not isinstance(r, str) or not VALID_EMAIL.search(r): config.error( "extra recipient {} is not a valid email".format(r)) if lookup is not None: if not isinstance(lookup, str): assert interfaces.IEmailLookup.providedBy(lookup) if extraHeaders: if not isinstance(extraHeaders, dict): config.error("extraHeaders must be a dictionary") if useSmtps: ssl.ensureHasSSL(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, fromaddr, relayhost="localhost", lookup=None, extraRecipients=None, sendToInterestedUsers=True, extraHeaders=None, useTls=False, useSmtps=False, smtpUser=None, smtpPassword=None, smtpPort=25, dumpMailsToLog=False, generators=None): if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators) if extraRecipients is None: extraRecipients = [] self.extraRecipients = extraRecipients self.sendToInterestedUsers = sendToInterestedUsers self.fromaddr = fromaddr self.relayhost = relayhost if lookup is not None: if isinstance(lookup, str): lookup = Domain(str(lookup)) self.lookup = lookup self.extraHeaders = extraHeaders self.useTls = useTls self.useSmtps = useSmtps self.smtpUser = smtpUser self.smtpPassword = smtpPassword self.smtpPort = smtpPort self.dumpMailsToLog = dumpMailsToLog def _create_default_generators(self): return [ BuildStatusGenerator(add_patch=True), WorkerMissingGenerator(workers='all'), ] def patch_to_attachment(self, patch, index): # patches are specifically converted to unicode before entering the db a = MIMEText(patch['body'].encode(ENCODING), _charset=ENCODING) # convert to base64 to conform with RFC 5322 2.1.1 del a['Content-Transfer-Encoding'] encoders.encode_base64(a) a.add_header('Content-Disposition', "attachment", filename="source patch " + str(index)) return a @defer.inlineCallbacks def createEmail(self, msgdict, title, results, builds=None, patches=None, logs=None): text = msgdict['body'] type = msgdict['type'] subject = msgdict['subject'] assert '\n' not in subject, \ "Subject cannot contain newlines" assert type in ('plain', 'html'), \ "'{}' message type must be 'plain' or 'html'.".format(type) if patches or logs: m = MIMEMultipart() txt = MIMEText(text, type, ENCODING) m.attach(txt) else: m = Message() m.set_payload(text, ENCODING) m.set_type("text/{}".format(type)) m['Date'] = formatdate(localtime=True) m['Subject'] = subject m['From'] = self.fromaddr # m['To'] is added later if patches: for (i, patch) in enumerate(patches): a = self.patch_to_attachment(patch, i) m.attach(a) if logs: for log in logs: # Use distinct filenames for the e-mail summary name = "{}.{}".format(log['stepname'], log['name']) if len(builds) > 1: filename = "{}.{}".format(log['buildername'], name) else: filename = name text = log['content']['content'] a = MIMEText(text.encode(ENCODING), _charset=ENCODING) # convert to base64 to conform with RFC 5322 2.1.1 del a['Content-Transfer-Encoding'] encoders.encode_base64(a) a.add_header('Content-Disposition', "attachment", filename=filename) m.attach(a) # @todo: is there a better way to do this? # Add any extra headers that were requested, doing WithProperties # interpolation if only one build was given if self.extraHeaders: extraHeaders = self.extraHeaders if builds is not None and len(builds) == 1: props = Properties.fromDict(builds[0]['properties']) props.master = self.master extraHeaders = yield props.render(extraHeaders) for k, v in extraHeaders.items(): if k in m: twlog.msg("Warning: Got header " + k + " in self.extraHeaders " "but it already exists in the Message - " "not adding it.") m[k] = v return m @defer.inlineCallbacks def sendMessage(self, reports): body = merge_reports_prop(reports, 'body') subject = merge_reports_prop_take_first(reports, 'subject') type = merge_reports_prop_take_first(reports, 'type') results = merge_reports_prop(reports, 'results') builds = merge_reports_prop(reports, 'builds') users = merge_reports_prop(reports, 'users') patches = merge_reports_prop(reports, 'patches') logs = merge_reports_prop(reports, 'logs') worker = merge_reports_prop_take_first(reports, 'worker') body = unicode2bytes(body) msgdict = {'body': body, 'subject': subject, 'type': type} # ensure message body ends with double carriage return if not body.endswith(b"\n\n"): msgdict['body'] = body + b'\n\n' m = yield self.createEmail(msgdict, self.master.config.title, results, builds, patches, logs) # now, who is this message going to? if worker is None: recipients = yield self.findInterrestedUsersEmails(list(users)) all_recipients = self.processRecipients(recipients, m) else: all_recipients = list(users) yield self.sendMail(m, all_recipients) @defer.inlineCallbacks def findInterrestedUsersEmails(self, users): recipients = set() if self.sendToInterestedUsers: if self.lookup: dl = [] for u in users: dl.append(defer.maybeDeferred(self.lookup.getAddress, u)) users = yield defer.gatherResults(dl) for r in users: if r is None: # getAddress didn't like this address continue # Git can give emails like 'User' @foo.com so check # for two @ and chop the last if r.count('@') > 1: r = r[:r.rindex('@')] if VALID_EMAIL.search(r): recipients.add(r) else: twlog.msg("INVALID EMAIL: {}".format(r)) return recipients def formatAddress(self, addr): r = parseaddr(addr) if not r[0]: return r[1] return "\"{}\" <{}>".format(Header(r[0], 'utf-8').encode(), r[1]) def processRecipients(self, blamelist, m): to_recipients = set(blamelist) cc_recipients = set() # If we're sending to interested users put the extras in the # CC list so they can tell if they are also interested in the # change: if self.sendToInterestedUsers and to_recipients: cc_recipients.update(self.extraRecipients) else: to_recipients.update(self.extraRecipients) m['To'] = ", ".join([self.formatAddress(addr) for addr in sorted(to_recipients)]) if cc_recipients: m['CC'] = ", ".join([self.formatAddress(addr) for addr in sorted(cc_recipients)]) return list(to_recipients | cc_recipients) def sendMail(self, m, recipients): s = m.as_string() twlog.msg("sending mail ({} bytes) to".format(len(s)), recipients) if self.dumpMailsToLog: # pragma: no cover twlog.msg("mail data:\n{0}".format(s)) result = defer.Deferred() useAuth = self.smtpUser and self.smtpPassword s = unicode2bytes(s) recipients = [parseaddr(r)[1] for r in recipients] sender_factory = ESMTPSenderFactory( unicode2bytes(self.smtpUser), unicode2bytes(self.smtpPassword), parseaddr(self.fromaddr)[1], recipients, BytesIO(s), result, requireTransportSecurity=self.useTls, requireAuthentication=useAuth) if self.useSmtps: reactor.connectSSL(self.relayhost, self.smtpPort, sender_factory, ssl.ClientContextFactory()) else: reactor.connectTCP(self.relayhost, self.smtpPort, sender_factory) return result buildbot-3.4.0/master/buildbot/reporters/message.py000066400000000000000000000300551413250514000224420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import jinja2 from twisted.internet import defer from buildbot import util from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import statusToString from buildbot.reporters import utils from buildbot.warnings import warn_deprecated def get_detected_status_text(mode, results, previous_results): if results == FAILURE: if ('change' in mode or 'problem' in mode) and previous_results is not None \ and previous_results != FAILURE: text = "new failure" else: text = "failed build" elif results == WARNINGS: text = "problem in the build" elif results == SUCCESS: if "change" in mode and previous_results is not None and previous_results != results: text = "restored build" else: text = "passing build" elif results == EXCEPTION: text = "build exception" else: text = "{} build".format(statusToString(results)) return text def get_message_summary_text(build, results): t = build['state_string'] if t: t = ": " + t else: t = "" if results == SUCCESS: text = "Build succeeded!" elif results == WARNINGS: text = "Build Had Warnings{}".format(t) elif results == CANCELLED: text = "Build was cancelled" else: text = "BUILD FAILED{}".format(t) return text def get_message_source_stamp_text(source_stamps): text = "" for ss in source_stamps: source = "" if ss['branch']: source += "[branch {}] ".format(ss['branch']) if ss['revision']: source += str(ss['revision']) else: source += "HEAD" if ss['patch'] is not None: source += " (plus patch)" discriminator = "" if ss['codebase']: discriminator = " '{}'".format(ss['codebase']) text += "Build Source Stamp{}: {}\n".format(discriminator, source) return text def get_projects_text(source_stamps, master): projects = set() for ss in source_stamps: if ss['project']: projects.add(ss['project']) if not projects: projects = [master.config.title] return ', '.join(list(projects)) def create_context_for_build(mode, build, master, blamelist): buildset = build['buildset'] ss_list = buildset['sourcestamps'] results = build['results'] if 'prev_build' in build and build['prev_build'] is not None: previous_results = build['prev_build']['results'] else: previous_results = None return { 'results': build['results'], 'mode': mode, 'buildername': build['builder']['name'], 'workername': build['properties'].get('workername', [""])[0], 'buildset': buildset, 'build': build, 'projects': get_projects_text(ss_list, master), 'previous_results': previous_results, 'status_detected': get_detected_status_text(mode, results, previous_results), 'build_url': utils.getURLForBuild(master, build['builder']['builderid'], build['number']), 'buildbot_url': master.config.buildbotURL, 'blamelist': blamelist, 'summary': get_message_summary_text(build, results), 'sourcestamps': get_message_source_stamp_text(ss_list) } def create_context_for_worker(master, worker): return { 'buildbot_title': master.config.title, 'buildbot_url': master.config.buildbotURL, 'worker': worker, } class MessageFormatterBase(util.ComparableMixin): template_type = 'plain' def __init__(self, ctx=None, want_properties=True, wantProperties=None, want_steps=False, wantSteps=None, wantLogs=None, want_logs=False, want_logs_content=False): if ctx is None: ctx = {} self.context = ctx if wantProperties is not None: warn_deprecated('3.4.0', f'{self.__class__.__name__}: wantProperties has been ' 'deprecated, use want_properties') self.want_properties = wantProperties else: self.want_properties = want_properties if wantSteps is not None: warn_deprecated('3.4.0', f'{self.__class__.__name__}: wantSteps has been deprecated, ' + 'use want_steps') self.want_steps = wantSteps else: self.want_steps = want_steps if wantLogs is not None: warn_deprecated('3.4.0', f'{self.__class__.__name__}: wantLogs has been deprecated, ' + 'use want_logs and want_logs_content') else: wantLogs = False self.want_logs = want_logs or wantLogs self.want_logs_content = want_logs_content or wantLogs def buildAdditionalContext(self, master, ctx): pass @defer.inlineCallbacks def render_message_dict(self, master, context): """Generate a buildbot reporter message and return a dictionary containing the message body, type and subject.""" ''' This is an informal description of what message dictionaries are expected to be produced. It is an internal API and expected to change even within bugfix releases, if needed. The message dictionary contains the 'body', 'type' and 'subject' keys: - 'subject' is a string that defines a subject of the message. It's not necessarily used on all reporters. It may be None. - 'type' must be 'plain', 'html' or 'json'. - 'body' is the content of the message. It may be None. The type of the data depends on the value of the 'type' parameter: - 'plain': Must be a string - 'html': Must be a string - 'json': Must be a non-encoded jsonnable value. The root element must be either of dictionary, list or string. This must not change during all invocations of a particular instance of the formatter. In case of a report being created for multiple builds (e.g. in the case of a buildset), the values returned by message formatter are concatenated. If this is not possible (e.g. if the body is a dictionary), any subsequent messages are ignored. ''' yield self.buildAdditionalContext(master, context) context.update(self.context) return { 'body': (yield self.render_message_body(context)), 'type': self.template_type, 'subject': (yield self.render_message_subject(context)) } def render_message_body(self, context): return None def render_message_subject(self, context): return None def format_message_for_build(self, master, build, **kwargs): # Known kwargs keys: mode, users raise NotImplementedError class MessageFormatterEmpty(MessageFormatterBase): def format_message_for_build(self, master, build, **kwargs): return { 'body': None, 'type': 'plain', 'subject': None } class MessageFormatterFunction(MessageFormatterBase): def __init__(self, function, template_type, **kwargs): super().__init__(**kwargs) self.template_type = template_type self._function = function @defer.inlineCallbacks def format_message_for_build(self, master, build, **kwargs): msgdict = yield self.render_message_dict(master, {'build': build}) return msgdict def render_message_body(self, context): return self._function(context) def render_message_subject(self, context): return None class MessageFormatterRenderable(MessageFormatterBase): template_type = 'plain' def __init__(self, template, subject=None): super().__init__() self.template = template self.subject = subject @defer.inlineCallbacks def format_message_for_build(self, master, build, **kwargs): msgdict = yield self.render_message_dict(master, {'build': build, 'master': master}) return msgdict @defer.inlineCallbacks def render_message_body(self, context): props = Properties.fromDict(context['build']['properties']) props.master = context['master'] body = yield props.render(self.template) return body @defer.inlineCallbacks def render_message_subject(self, context): props = Properties.fromDict(context['build']['properties']) props.master = context['master'] body = yield props.render(self.subject) return body default_body_template = '''\ The Buildbot has detected a {{ status_detected }} on builder {{ buildername }} while building {{ projects }}. Full details are available at: {{ build_url }} Buildbot URL: {{ buildbot_url }} Worker for this Build: {{ workername }} Build Reason: {{ build['properties'].get('reason', [""])[0] }} Blamelist: {{ ", ".join(blamelist) }} {{ summary }} Sincerely, -The Buildbot ''' # noqa pylint: disable=line-too-long class MessageFormatterBaseJinja(MessageFormatterBase): compare_attrs = ['body_template', 'subject_template', 'template_type'] subject_template = None template_type = 'plain' def __init__(self, template=None, subject=None, template_type=None, **kwargs): if template is None: template = default_body_template self.body_template = jinja2.Template(template) if subject is not None: self.subject_template = jinja2.Template(subject) if template_type is not None: self.template_type = template_type super().__init__(**kwargs) def buildAdditionalContext(self, master, ctx): pass def render_message_body(self, context): return self.body_template.render(context) def render_message_subject(self, context): if self.subject_template is None: return None return self.subject_template.render(context) class MessageFormatter(MessageFormatterBaseJinja): @defer.inlineCallbacks def format_message_for_build(self, master, build, users=None, mode=None): ctx = create_context_for_build(mode, build, master, users) msgdict = yield self.render_message_dict(master, ctx) return msgdict default_missing_template = '''\ The Buildbot working for '{{buildbot_title}}' has noticed that the worker named {{worker.name}} went away. It last disconnected at {{worker.last_connection}}. {% if 'admin' in worker['workerinfo'] %} The admin on record (as reported by WORKER:info/admin) was {{worker.workerinfo.admin}}. {% endif %} Sincerely, -The Buildbot ''' # noqa pylint: disable=line-too-long class MessageFormatterMissingWorker(MessageFormatterBaseJinja): template_filename = 'missing_mail.txt' def __init__(self, template=None, **kwargs): if template is None: template = default_missing_template super().__init__(template=template, **kwargs) @defer.inlineCallbacks def formatMessageForMissingWorker(self, master, worker): ctx = create_context_for_worker(master, worker) msgdict = yield self.render_message_dict(master, ctx) return msgdict buildbot-3.4.0/master/buildbot/reporters/pushjet.py000066400000000000000000000070531413250514000225020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log as twlog from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.reporters.message import MessageFormatter from buildbot.util import httpclientservice from .utils import merge_reports_prop from .utils import merge_reports_prop_take_first ENCODING = 'utf8' LEVELS = { CANCELLED: 'cancelled', EXCEPTION: 'exception', FAILURE: 'failing', SUCCESS: 'passing', WARNINGS: 'warnings' } DEFAULT_MSG_TEMPLATE = \ ('The Buildbot has detected a {{ status_detected }}' + 'of {{ buildername }} while building {{ projects }} on {{ workername }}.') class PushjetNotifier(ReporterBase): def checkConfig(self, secret, levels=None, base_url='https://api.pushjet.io', generators=None): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, secret, levels=None, base_url='https://api.pushjet.io', generators=None): secret = yield self.renderSecrets(secret) if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators) self.secret = secret if levels is None: self.levels = {} else: self.levels = levels self._http = yield httpclientservice.HTTPClientService.getService( self.master, base_url) def _create_default_generators(self): formatter = MessageFormatter(template_type='html', template=DEFAULT_MSG_TEMPLATE) return [BuildStatusGenerator(message_formatter=formatter)] def sendMessage(self, reports): body = merge_reports_prop(reports, 'body') subject = merge_reports_prop_take_first(reports, 'subject') results = merge_reports_prop(reports, 'results') worker = merge_reports_prop_take_first(reports, 'worker') msg = { 'message': body, 'title': subject } level = self.levels.get(LEVELS[results] if worker is None else 'worker_missing') if level is not None: msg['level'] = level return self.sendNotification(msg) def sendNotification(self, params): twlog.msg("sending pushjet notification") params.update(dict(secret=self.secret)) return self._http.post('/message', data=params) buildbot-3.4.0/master/buildbot/reporters/pushover.py000066400000000000000000000106251413250514000226720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log as twlog from buildbot import config from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.reporters.message import MessageFormatter from buildbot.util import httpclientservice from .utils import merge_reports_prop from .utils import merge_reports_prop_take_first ENCODING = 'utf8' VALID_PARAMS = {"sound", "callback", "timestamp", "url", "url_title", "device", "retry", "expire", "html"} PRIORITIES = { CANCELLED: 'cancelled', EXCEPTION: 'exception', FAILURE: 'failing', SUCCESS: 'passing', WARNINGS: 'warnings' } DEFAULT_MSG_TEMPLATE = \ ('The Buildbot has detected a {{ status_detected }}' + 'of {{ buildername }} while building {{ projects }} on {{ workername }}.') class PushoverNotifier(ReporterBase): def checkConfig(self, user_key, api_token, priorities=None, otherParams=None, generators=None): if generators is None: generators = self._create_default_generators() super().checkConfig(generators=generators) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) if otherParams is not None and set(otherParams.keys()) - VALID_PARAMS: config.error("otherParams can be only 'sound', 'callback', 'timestamp', " "'url', 'url_title', 'device', 'retry', 'expire', or 'html'") @defer.inlineCallbacks def reconfigService(self, user_key, api_token, priorities=None, otherParams=None, generators=None): user_key, api_token = yield self.renderSecrets(user_key, api_token) if generators is None: generators = self._create_default_generators() yield super().reconfigService(generators=generators) self.user_key = user_key self.api_token = api_token if priorities is None: self.priorities = {} else: self.priorities = priorities if otherParams is None: self.otherParams = {} else: self.otherParams = otherParams self._http = yield httpclientservice.HTTPClientService.getService( self.master, 'https://api.pushover.net') def _create_default_generators(self): formatter = MessageFormatter(template_type='html', template=DEFAULT_MSG_TEMPLATE) return [BuildStatusGenerator(message_formatter=formatter)] def sendMessage(self, reports): body = merge_reports_prop(reports, 'body') subject = merge_reports_prop_take_first(reports, 'subject') type = merge_reports_prop_take_first(reports, 'type') results = merge_reports_prop(reports, 'results') worker = merge_reports_prop_take_first(reports, 'worker') msg = { 'message': body, 'title': subject } if type == 'html': msg['html'] = '1' try: priority_name = PRIORITIES[results] if worker is None else 'worker_missing' msg['priority'] = self.priorities[priority_name] except KeyError: pass return self.sendNotification(msg) def sendNotification(self, params): twlog.msg("sending pushover notification") params.update(dict(user=self.user_key, token=self.api_token)) params.update(self.otherParams) return self._http.post('/1/messages.json', params=params) buildbot-3.4.0/master/buildbot/reporters/telegram.py000066400000000000000000001154011413250514000226150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import io import json import random import shlex from twisted.internet import defer from twisted.internet import reactor from buildbot import config from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.words import Channel from buildbot.reporters.words import Contact from buildbot.reporters.words import StatusBot from buildbot.reporters.words import UsageError from buildbot.reporters.words import WebhookResource from buildbot.schedulers.forcesched import CollectedValidationError from buildbot.schedulers.forcesched import ForceScheduler from buildbot.util import Notifier from buildbot.util import asyncSleep from buildbot.util import bytes2unicode from buildbot.util import epoch2datetime from buildbot.util import httpclientservice from buildbot.util import service from buildbot.util import unicode2bytes class TelegramChannel(Channel): def __init__(self, bot, channel): assert isinstance(channel, dict), "channel must be a dict provided by Telegram API" super().__init__(bot, channel['id']) self.chat_info = channel @defer.inlineCallbacks def list_notified_events(self): if self.notify_events: yield self.send("The following events are being notified:\n{}" .format("\n".join(sorted( "🔔 **{}**".format(n) for n in self.notify_events)))) else: yield self.send("🔕 No events are being notified.") def collect_fields(fields): for field in fields: if field['fullName']: yield field if 'fields' in field: yield from collect_fields(field['fields']) class TelegramContact(Contact): def __init__(self, user, channel=None): assert isinstance(user, dict), "user must be a dict provided by Telegram API" self.user_info = user super().__init__(user['id'], channel) self.template = None @property def chat_id(self): return self.channel.id @property def user_full_name(self): fullname = " ".join((self.user_info['first_name'], self.user_info.get('last_name', ''))).strip() return fullname @property def user_name(self): return self.user_info['first_name'] def describeUser(self): user = self.user_full_name try: user += ' (@{})'.format(self.user_info['username']) except KeyError: pass if not self.is_private_chat: chat_title = self.channel.chat_info.get('title') if chat_title: user += " on '{}'".format(chat_title) return user ACCESS_DENIED_MESSAGES = [ "🧙‍♂️ You shall not pass! 👹", "😨 Oh NO! You are simply not allowed to to this! 😢", "⛔ You cannot do this. Better go outside and relax... 🌳", "⛔ ACCESS DENIED! This incident has ben reported to NSA, KGB, and George Soros! 🕵", "🚫 Unauthorized access detected! Your device will explode in 3... 2... 1... 💣", "☢ Radiation level too high! Continuation of the procedure forbidden! 🛑", ] def access_denied(self, *args, tmessage, **kwargs): self.send( random.choice(self.ACCESS_DENIED_MESSAGES), reply_to_message_id=tmessage['message_id']) def query_button(self, caption, payload): if isinstance(payload, str) and len(payload) < 64: return {'text': caption, 'callback_data': payload} key = hash(repr(payload)) while True: cached = self.bot.query_cache.get(key) if cached is None: self.bot.query_cache[key] = payload break if cached == payload: break key += 1 return {'text': caption, 'callback_data': key} @defer.inlineCallbacks def command_START(self, args, **kwargs): yield self.command_HELLO(args) reactor.callLater(0.2, self.command_HELP, '') def command_NAY(self, args, tmessage, **kwargs): """forget the current command""" replied_message = tmessage.get('reply_to_message') if replied_message: if 'reply_markup' in replied_message: self.bot.edit_keyboard(self.channel.id, replied_message['message_id']) if self.is_private_chat: self.send("Never mind...") else: self.send("Never mind, {}...".format(self.user_name)) command_NAY.usage = "nay - never mind the command we are currently discussing" @classmethod def describe_commands(cls): commands = cls.build_commands() response = [] for command in commands: if command == 'start': continue meth = getattr(cls, 'command_' + command.upper()) doc = getattr(meth, '__doc__', None) if not doc: doc = command response.append("{} - {}".format(command, doc)) return response @Contact.overrideCommand def command_COMMANDS(self, args, **kwargs): if args.lower() == 'botfather': response = self.describe_commands() if response: self.send('\n'.join(response)) else: return super().command_COMMANDS(args) return None @defer.inlineCallbacks def command_GETID(self, args, **kwargs): """get user and chat ID""" if self.is_private_chat: self.send("Your ID is {}.".format(self.user_id)) else: yield self.send("{}, your ID is {}.".format(self.user_name, self.user_id)) self.send("This {} ID is {}.".format(self.channel.chat_info.get('type', "group"), self.chat_id)) command_GETID.usage = "getid - get user and chat ID that can be put in the master " \ "configuration file" @defer.inlineCallbacks @Contact.overrideCommand def command_LIST(self, args, **kwargs): args = self.splitArgs(args) if not args: keyboard = [ [self.query_button("👷️ Builders", '/list builders'), self.query_button("👷️ (including old ones)", '/list all builders')], [self.query_button("⚙ Workers", '/list workers'), self.query_button("⚙ (including old ones)", '/list all workers')], [self.query_button("📄 Changes (last 10)", '/list changes')], ] self.send("What do you want to list?", reply_markup={'inline_keyboard': keyboard}) return all = False num = 10 try: num = int(args[0]) del args[0] except ValueError: if args[0] == 'all': all = True del args[0] except IndexError: pass if not args: raise UsageError("Try '" + self.bot.commandPrefix + "list [all|N] builders|workers|changes'.") if args[0] == 'builders': bdicts = yield self.bot.getAllBuilders() online_builderids = yield self.bot.getOnlineBuilders() response = ["I found the following **builders**:"] for bdict in bdicts: if bdict['builderid'] in online_builderids: response.append("`{}`".format(bdict['name'])) elif all: response.append("`{}` ❌".format(bdict['name'])) self.send('\n'.join(response)) elif args[0] == 'workers': workers = yield self.master.data.get(('workers',)) response = ["I found the following **workers**:"] for worker in workers: if worker['configured_on']: response.append("`{}`".format(worker['name'])) if not worker['connected_to']: response[-1] += " ⚠️" elif all: response.append("`{}` ❌".format(worker['name'])) self.send('\n'.join(response)) elif args[0] == 'changes': wait_message = yield self.send("⏳ Getting your changes...") if all: changes = yield self.master.data.get(('changes',)) self.bot.delete_message(self.channel.id, wait_message['message_id']) num = len(changes) if num > 50: keyboard = [ [self.query_button("‼ Yes, flood me with all of them!", '/list {} changes'.format(num))], [self.query_button("✅ No, just show last 50", '/list 50 changes')] ] self.send("I found {} changes. Do you really want me " "to list them all?".format(num), reply_markup={'inline_keyboard': keyboard}) return else: changes = yield self.master.data.get(('changes',), order=['-changeid'], limit=num) self.bot.delete_message(self.channel.id, wait_message['message_id']) response = ["I found the following recent **changes**:\n"] for change in reversed(changes): change['comment'] = change['comments'].split('\n')[0] change['date'] = epoch2datetime(change['when_timestamp']).strftime('%Y-%m-%d %H:%M') response.append( "[{comment}]({revlink})\n" "_Author_: {author}\n" "_Date_: {date}\n" "_Repository_: {repository}\n" "_Branch_: {branch}\n" "_Revision_: {revision}\n".format(**change)) self.send('\n'.join(response)) @defer.inlineCallbacks def get_running_builders(self): builders = [] for bdict in (yield self.bot.getAllBuilders()): if (yield self.bot.getRunningBuilds(bdict['builderid'])): builders.append(bdict['name']) return builders @defer.inlineCallbacks @Contact.overrideCommand def command_WATCH(self, args, **kwargs): if args: super().command_WATCH(args) else: builders = yield self.get_running_builders() if builders: keyboard = [ [self.query_button("🔎 " + b, '/watch {}'.format(b))] for b in builders ] self.send("Which builder do you want to watch?", reply_markup={'inline_keyboard': keyboard}) else: self.send("There are no currently running builds.") @Contact.overrideCommand def command_NOTIFY(self, args, tquery=None, **kwargs): if args: want_list = args == 'list' if want_list and tquery: self.bot.delete_message(self.chat_id, tquery['message']['message_id']) super().command_NOTIFY(args) if want_list or not tquery: return keyboard = [ [ self.query_button("{} {}".format(e.capitalize(), '🔔' if e in self.channel.notify_events else '🔕'), '/notify {}-quiet {}'.format( 'off' if e in self.channel.notify_events else 'on', e)) for e in evs ] for evs in (('started', 'finished'), ('success', 'failure'), ('warnings', 'exception'), ('problem', 'recovery'), ('worse', 'better'), ('cancelled', 'worker')) ] + [[self.query_button("Hide...", '/notify list')]] if tquery: self.bot.edit_keyboard(self.chat_id, tquery['message']['message_id'], keyboard) else: self.send("Here are available notifications and their current state. " "Click to turn them on/off.", reply_markup={'inline_keyboard': keyboard}) def ask_for_reply(self, prompt, greeting='Ok'): kwargs = {} if not self.is_private_chat: username = self.user_info.get('username', '') if username: if greeting: prompt = "{} @{}, now {}...".format(greeting, username, prompt) else: prompt = "@{}, now {}...".format(username, prompt) kwargs['reply_markup'] = { 'force_reply': True, 'selective': True } else: if greeting: prompt = "{}, now reply to this message and {}...".format(greeting, prompt) else: prompt = "Reply to this message and {}...".format(prompt) else: if greeting: prompt = "{}, now {}...".format(greeting, prompt) else: prompt = prompt[0].upper() + prompt[1:] + "..." # Telegram seems to have a bug, which causes reply request to pop sometimes again. # So we do not force reply to avoid it... # kwargs['reply_markup'] = { # 'force_reply': True # } self.send(prompt, **kwargs) @defer.inlineCallbacks @Contact.overrideCommand def command_STOP(self, args, **kwargs): argv = self.splitArgs(args) if len(argv) >= 3 or \ argv and argv[0] != 'build': super().command_STOP(args) return argv = argv[1:] if not argv: builders = yield self.get_running_builders() if builders: keyboard = [ [self.query_button("🚫 " + b, '/stop build {}'.format(b))] for b in builders ] self.send("Select builder to stop...", reply_markup={'inline_keyboard': keyboard}) else: # len(argv) == 1 self.template = '/stop ' + args + ' {}' self.ask_for_reply("give me the reason to stop build on `{}`".format(argv[0])) @Contact.overrideCommand def command_SHUTDOWN(self, args, **kwargs): if args: return super().command_SHUTDOWN(args) if self.master.botmaster.shuttingDown: keyboard = [[ self.query_button("🔙 Stop Shutdown", '/shutdown stop'), self.query_button("‼️ Shutdown Now", '/shutdown now') ]] text = "Buildbot is currently shutting down.\n\n" else: keyboard = [[ self.query_button("↘️ Begin Shutdown", '/shutdown start'), self.query_button("‼️ Shutdown Now", '/shutdown now') ]] text = "" self.send(text + "What do you want to do?", reply_markup={'inline_keyboard': keyboard}) return None @defer.inlineCallbacks def command_FORCE(self, args, tquery=None, partial=None, **kwargs): """force a build""" try: forceschedulers = yield self.master.data.get(('forceschedulers',)) except AttributeError: forceschedulers = None else: forceschedulers = dict((s['name'], s) for s in forceschedulers) if not forceschedulers: raise UsageError("no force schedulers configured for use by /force") argv = self.splitArgs(args) try: sched = argv[0] except IndexError: if len(forceschedulers) == 1: sched = next(iter(forceschedulers)) else: keyboard = [ [self.query_button(s['label'], '/force {}'.format(s['name']))] for s in forceschedulers.values() ] self.send("Which force scheduler do you want to activate?", reply_markup={'inline_keyboard': keyboard}) return else: if sched in forceschedulers: del argv[0] elif len(forceschedulers) == 1: sched = next(iter(forceschedulers)) else: raise UsageError("Try '/force' and follow the instructions" " (no force scheduler {})".format(sched)) scheduler = forceschedulers[sched] try: task = argv.pop(0) except IndexError: task = 'config' if tquery and task != 'config': self.bot.edit_keyboard(self.chat_id, tquery['message']['message_id']) if not argv: keyboard = [ [self.query_button(b, '/force {} {} {}'.format(sched, task, b))] for b in scheduler['builder_names'] ] self.send("Which builder do you want to start?", reply_markup={'inline_keyboard': keyboard}) return if task == 'ask': try: what = argv.pop(0) except IndexError as e: raise UsageError("Try '/force' and follow the instructions") from e else: what = None # silence PyCharm warnings bldr = argv.pop(0) if bldr not in scheduler['builder_names']: raise UsageError(("Try '/force' and follow the instructions " "(`{}` not configured for _{}_ scheduler)" ).format(bldr, scheduler['label'])) try: params = dict(arg.split('=', 1) for arg in argv) except ValueError as e: raise UsageError("Try '/force' and follow the instructions ({})".format(e)) from e all_fields = list(collect_fields(scheduler['all_fields'])) required_params = [f['fullName'] for f in all_fields if f['required'] and f['fullName'] not in ('username', 'owner')] missing_params = [p for p in required_params if p not in params] if task == 'build': # TODO This should probably be moved to the upper class, # however, it will change the force command totally try: if missing_params: # raise UsageError task = 'config' else: params.update(dict( (f['fullName'], f['default']) for f in all_fields if f['type'] == 'fixed' and f['fullName'] not in ('username', 'owner') )) builder = yield self.bot.getBuilder(buildername=bldr) for scheduler in self.master.allSchedulers(): if scheduler.name == sched and isinstance(scheduler, ForceScheduler): break else: raise ValueError("There is no force scheduler '{}'".format(sched)) try: yield scheduler.force(builderid=builder['builderid'], owner=self.describeUser(), **params) except CollectedValidationError as e: raise ValueError(e.errors) from e else: self.send("Force build successfully requested.") return except (IndexError, ValueError) as e: raise UsageError("Try '/force' and follow the instructions ({})".format(e)) from e if task == 'config': msg = "{}, you are about to start a new build on `{}`!"\ .format(self.user_full_name, bldr) keyboard = [] args = ' '.join(shlex.quote("{}={}".format(*p)) for p in params.items()) fields = [f for f in all_fields if f['type'] != 'fixed' and f['fullName'] not in ('username', 'owner')] if fields: msg += "\n\nThe current build parameters are:" for field in fields: if field['type'] == 'nested': msg += "\n{}".format(field['label']) else: field_name = field['fullName'] value = params.get(field_name, field['default']).strip() msg += "\n {} `{}`".format(field['label'], value) if value: key = "Change " else: key = "Set " key += field_name.replace('_', ' ').title() if field_name in missing_params: key = "⚠️ " + key msg += " ⚠️" keyboard.append( [self.query_button(key, '/force {} ask {} {} {}' .format(sched, field_name, bldr, args))] ) msg += "\n\nWhat do you want to do?" if missing_params: msg += " You must set values for all parameters marked with ⚠️" if not missing_params: keyboard.append( [self.query_button("🚀 Start Build", '/force {} build {} {}' .format(sched, bldr, args))], ) self.send(msg, reply_markup={'inline_keyboard': keyboard}) elif task == 'ask': prompt = "enter the new value for the " + what.replace('_', ' ').lower() args = ' '.join(shlex.quote("{}={}".format(*p)) for p in params.items() if p[0] != what) self.template = '/force {} config {} {} {}={{}}'.format(sched, bldr, args, what) self.ask_for_reply(prompt, '') else: raise UsageError("Try '/force' and follow the instructions") command_FORCE.usage = "force - Force a build" class TelegramStatusBot(StatusBot): contactClass = TelegramContact channelClass = TelegramChannel commandPrefix = '/' offline_string = "offline ❌" idle_string = "idle 💤" running_string = "running 🌀:" query_cache = {} @property def commandSuffix(self): if self.nickname is not None: return '@' + self.nickname return None def __init__(self, token, outgoing_http, chat_ids, *args, retry_delay=30, **kwargs): super().__init__(*args, **kwargs) self.http_client = outgoing_http self.retry_delay = retry_delay self.token = token self.chat_ids = chat_ids self.nickname = None @defer.inlineCallbacks def startService(self): yield super().startService() for c in self.chat_ids: channel = self.getChannel(c) channel.add_notification_events(self.notify_events) yield self.loadState() results_emoji = { SUCCESS: ' ✅', WARNINGS: ' ⚠️', FAILURE: '❗', EXCEPTION: ' ‼️', RETRY: ' 🔄', CANCELLED: ' 🚫', } def format_build_status(self, build, short=False): br = build['results'] if short: return self.results_emoji[br] else: return self.results_descriptions[br] + \ self.results_emoji[br] def getContact(self, user, channel): """ get a Contact instance for ``user`` on ``channel`` """ assert isinstance(user, dict), "user must be a dict provided by Telegram API" assert isinstance(channel, dict), "channel must be a dict provided by Telegram API" uid = user['id'] cid = channel['id'] try: contact = self.contacts[(cid, uid)] except KeyError: valid = self.isValidUser(uid) contact = self.contactClass(user=user, channel=self.getChannel(channel, valid)) if valid: self.contacts[(cid, uid)] = contact else: if isinstance(user, dict): contact.user_info.update(user) if isinstance(channel, dict): contact.channel.chat_info.update(channel) return contact def getChannel(self, channel, valid=True): if not isinstance(channel, dict): channel = {'id': channel} cid = channel['id'] try: return self.channels[cid] except KeyError: new_channel = self.channelClass(self, channel) if valid: self.channels[cid] = new_channel new_channel.setServiceParent(self) return new_channel @defer.inlineCallbacks def process_update(self, update): data = {} message = update.get('message') if message is None: query = update.get('callback_query') if query is None: self.log('No message in Telegram update object') return 'no message' original_message = query.get('message', {}) data = query.get('data', 0) try: data = self.query_cache[int(data)] except ValueError: text, data, notify = data, {}, None except KeyError: text, data, notify = None, {}, "Sorry, button is no longer valid!" if original_message: try: self.edit_keyboard( original_message['chat']['id'], original_message['message_id']) except KeyError: pass else: if isinstance(data, dict): data = data.copy() text = data.pop('command') try: notify = data.pop('notify') except KeyError: notify = None else: text, data, notify = data, {}, None data['tquery'] = query self.answer_query(query['id'], notify) message = { 'from': query['from'], 'chat': original_message.get('chat'), 'text': text, } if 'reply_to_message' in original_message: message['reply_to_message'] = original_message['reply_to_message'] chat = message['chat'] user = message.get('from') if user is None: self.log('No user in incoming message') return 'no user' text = message.get('text') if not text: return 'no text in the message' contact = self.getContact(user=user, channel=chat) data['tmessage'] = message template, contact.template = contact.template, None if text.startswith(self.commandPrefix): result = yield contact.handleMessage(text, **data) else: if template: text = template.format(shlex.quote(text)) result = yield contact.handleMessage(text, **data) return result @defer.inlineCallbacks def post(self, path, **kwargs): logme = True while True: try: res = yield self.http_client.post(path, **kwargs) except AssertionError as err: # just for tests raise err except Exception as err: msg = "ERROR: problem sending Telegram request {} (will try again): {}".format(path, err) if logme: self.log(msg) logme = False yield asyncSleep(self.retry_delay) else: ans = yield res.json() if not ans.get('ok'): self.log("ERROR: cannot send Telegram request {}: " "[{}] {}".format(path, res.code, ans.get('description'))) return None return ans.get('result', True) @defer.inlineCallbacks def set_nickname(self): res = yield self.post('/getMe') if res: self.nickname = res.get('username') @defer.inlineCallbacks def answer_query(self, query_id, notify=None): params = dict(callback_query_id=query_id) if notify is not None: params.update(dict(text=notify)) return (yield self.post('/answerCallbackQuery', json=params)) @defer.inlineCallbacks def send_message(self, chat, message, parse_mode='Markdown', reply_to_message_id=None, reply_markup=None, **kwargs): result = None message = message.strip() while message: params = dict(chat_id=chat) if parse_mode is not None: params['parse_mode'] = parse_mode if reply_to_message_id is not None: params['reply_to_message_id'] = reply_to_message_id reply_to_message_id = None # we only mark first message as a reply if len(message) <= 4096: params['text'], message = message, None else: n = message[:4096].rfind('\n') n = n + 1 if n != -1 else 4096 params['text'], message = message[:n].rstrip(), message[n:].lstrip() if not message and reply_markup is not None: params['reply_markup'] = reply_markup params.update(kwargs) result = yield self.post('/sendMessage', json=params) return result @defer.inlineCallbacks def edit_message(self, chat, msg, message, parse_mode='Markdown', **kwargs): params = dict(chat_id=chat, message_id=msg, text=message) if parse_mode is not None: params['parse_mode'] = parse_mode params.update(kwargs) return (yield self.post('/editMessageText', json=params)) @defer.inlineCallbacks def edit_keyboard(self, chat, msg, keyboard=None): params = dict(chat_id=chat, message_id=msg) if keyboard is not None: params['reply_markup'] = {'inline_keyboard': keyboard} return (yield self.post('/editMessageReplyMarkup', json=params)) @defer.inlineCallbacks def delete_message(self, chat, msg): params = dict(chat_id=chat, message_id=msg) return (yield self.post('/deleteMessage', json=params)) @defer.inlineCallbacks def send_sticker(self, chat, sticker, **kwargs): params = dict(chat_id=chat, sticker=sticker) params.update(kwargs) return (yield self.post('/sendSticker', json=params)) class TelegramWebhookBot(TelegramStatusBot): name = "TelegramWebhookBot" def __init__(self, token, *args, certificate=None, **kwargs): TelegramStatusBot.__init__(self, token, *args, **kwargs) self._certificate = certificate self.webhook = WebhookResource('telegram' + token) self.webhook.setServiceParent(self) @defer.inlineCallbacks def startService(self): yield super().startService() url = bytes2unicode(self.master.config.buildbotURL) if not url.endswith('/'): url += '/' yield self.set_webhook(url + self.webhook.path, self._certificate) def process_webhook(self, request): update = self.get_update(request) return self.process_update(update) def get_update(self, request): content = request.content.read() content = bytes2unicode(content) content_type = request.getHeader(b'Content-Type') content_type = bytes2unicode(content_type) if content_type is not None and \ content_type.startswith('application/json'): update = json.loads(content) else: raise ValueError('Unknown content type: {}' .format(content_type)) return update @defer.inlineCallbacks def set_webhook(self, url, certificate=None): if not certificate: self.log("Setting up webhook to: {}".format(url)) yield self.post('/setWebhook', json=dict(url=url)) else: self.log("Setting up webhook to: {} (custom certificate)".format(url)) certificate = io.BytesIO(unicode2bytes(certificate)) yield self.post('/setWebhook', data=dict(url=url), files=dict(certificate=certificate)) class TelegramPollingBot(TelegramStatusBot): name = "TelegramPollingBot" def __init__(self, *args, poll_timeout=120, **kwargs): super().__init__(*args, **kwargs) self._polling_finished_notifier = Notifier() self.poll_timeout = poll_timeout def startService(self): super().startService() self._polling_continue = True self.do_polling() @defer.inlineCallbacks def stopService(self): self._polling_continue = False yield self._polling_finished_notifier.wait() yield super().stopService() @defer.inlineCallbacks def do_polling(self): yield self.post('/deleteWebhook') offset = 0 kwargs = {'json': {'timeout': self.poll_timeout}} logme = True while self._polling_continue: if offset: kwargs['json']['offset'] = offset try: res = yield self.http_client.post('/getUpdates', timeout=self.poll_timeout + 2, **kwargs) ans = yield res.json() if not ans.get('ok'): raise ValueError("[{}] {}".format(res.code, ans.get('description'))) updates = ans.get('result') except AssertionError as err: raise err except Exception as err: msg = ("ERROR: cannot send Telegram request /getUpdates (will try again): {}" ).format(err) if logme: self.log(msg) logme = False yield asyncSleep(self.retry_delay) else: logme = True if updates: offset = max(update['update_id'] for update in updates) + 1 for update in updates: yield self.process_update(update) self._polling_finished_notifier.notify(None) class TelegramBot(service.BuildbotService): name = "TelegramBot" in_test_harness = False compare_attrs = ["bot_token", "chat_ids", "authz", "tags", "notify_events", "showBlameList", "useRevisions", "certificate", "useWebhook", "pollTimeout", "retryDelay"] secrets = ["bot_token"] def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.bot = None def _get_http(self, bot_token): base_url = "https://api.telegram.org/bot" + bot_token return httpclientservice.HTTPClientService.getService( self.master, base_url) def checkConfig(self, bot_token, chat_ids=None, authz=None, bot_username=None, tags=None, notify_events=None, showBlameList=True, useRevisions=False, useWebhook=False, certificate=None, pollTimeout=120, retryDelay=30): super().checkConfig(self.name) if authz is not None: for acl in authz.values(): if not isinstance(acl, (list, tuple, bool)): config.error("authz values must be bool or a list of user ids") if isinstance(certificate, io.TextIOBase): config.error("certificate file must be open in binary mode") @defer.inlineCallbacks def reconfigService(self, bot_token, chat_ids=None, authz=None, bot_username=None, tags=None, notify_events=None, showBlameList=True, useRevisions=False, useWebhook=False, certificate=None, pollTimeout=120, retryDelay=30): # need to stash these so we can detect changes later self.bot_token = bot_token if chat_ids is None: chat_ids = [] self.chat_ids = chat_ids self.authz = authz self.useRevisions = useRevisions self.tags = tags if notify_events is None: notify_events = set() self.notify_events = notify_events self.useWebhook = useWebhook self.certificate = certificate self.pollTimeout = pollTimeout self.retryDelay = retryDelay # This function is only called in case of reconfig with changes # We don't try to be smart here. Just restart the bot if config has # changed. http = yield self._get_http(bot_token) if self.bot is not None: self.removeService(self.bot) if not useWebhook: self.bot = TelegramPollingBot(bot_token, http, chat_ids, authz, tags=tags, notify_events=notify_events, useRevisions=useRevisions, showBlameList=showBlameList, poll_timeout=self.pollTimeout, retry_delay=self.retryDelay) else: self.bot = TelegramWebhookBot(bot_token, http, chat_ids, authz, tags=tags, notify_events=notify_events, useRevisions=useRevisions, showBlameList=showBlameList, retry_delay=self.retryDelay, certificate=self.certificate) if bot_username is not None: self.bot.nickname = bot_username else: yield self.bot.set_nickname() if self.bot.nickname is None: raise RuntimeError("No bot username specified and I cannot get it from Telegram") yield self.bot.setServiceParent(self) buildbot-3.4.0/master/buildbot/reporters/utils.py000066400000000000000000000236121413250514000221570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import UserList from twisted.internet import defer from twisted.python import log from buildbot.data import resultspec from buildbot.process.properties import renderer from buildbot.process.results import RETRY from buildbot.util import flatten @defer.inlineCallbacks def getPreviousBuild(master, build): # naive n-1 algorithm. Still need to define what we should skip # SKIP builds? forced builds? rebuilds? # don't hesitate to contribute improvements to that algorithm n = build['number'] - 1 while n >= 0: prev = yield master.data.get(("builders", build['builderid'], "builds", n)) if prev and prev['results'] != RETRY: return prev n -= 1 return None @defer.inlineCallbacks def getDetailsForBuildset(master, bsid, want_properties=False, want_steps=False, want_previous_build=False, want_logs=False, want_logs_content=False): # Here we will do a bunch of data api calls on behalf of the reporters # We do try to make *some* calls in parallel with the help of gatherResults, but don't commit # to much in that. The idea is to do parallelism while keeping the code readable # and maintainable. # first, just get the buildset and all build requests for our buildset id dl = [master.data.get(("buildsets", bsid)), master.data.get(('buildrequests', ), filters=[resultspec.Filter('buildsetid', 'eq', [bsid])])] (buildset, breqs) = yield defer.gatherResults(dl) # next, get the bdictlist for each build request dl = [master.data.get(("buildrequests", breq['buildrequestid'], 'builds')) for breq in breqs] builds = yield defer.gatherResults(dl) builds = flatten(builds, types=(list, UserList)) if builds: yield getDetailsForBuilds(master, buildset, builds, want_properties=want_properties, want_steps=want_steps, want_previous_build=want_previous_build, want_logs=want_logs, want_logs_content=want_logs_content) return dict(buildset=buildset, builds=builds) @defer.inlineCallbacks def getDetailsForBuild(master, build, want_properties=False, want_steps=False, want_previous_build=False, want_logs=False, want_logs_content=False): buildrequest = yield master.data.get(("buildrequests", build['buildrequestid'])) buildset = yield master.data.get(("buildsets", buildrequest['buildsetid'])) build['buildrequest'], build['buildset'] = buildrequest, buildset parentbuild = None parentbuilder = None if buildset['parent_buildid']: parentbuild = yield master.data.get(("builds", buildset['parent_buildid'])) parentbuilder = yield master.data.get(("builders", parentbuild['builderid'])) build['parentbuild'] = parentbuild build['parentbuilder'] = parentbuilder ret = yield getDetailsForBuilds(master, buildset, [build], want_properties=want_properties, want_steps=want_steps, want_previous_build=want_previous_build, want_logs=want_logs, want_logs_content=want_logs_content) return ret @defer.inlineCallbacks def get_details_for_buildrequest(master, buildrequest, build): buildset = yield master.data.get(("buildsets", buildrequest['buildsetid'])) builder = yield master.data.get(("builders", buildrequest['builderid'])) build['buildrequest'] = buildrequest build['buildset'] = buildset build['builderid'] = buildrequest['builderid'] build['builder'] = builder build['url'] = getURLForBuildrequest(master, buildrequest['buildrequestid']) build['results'] = None build['complete'] = False @defer.inlineCallbacks def getDetailsForBuilds(master, buildset, builds, want_properties=False, want_steps=False, want_previous_build=False, want_logs=False, want_logs_content=False): builderids = {build['builderid'] for build in builds} builders = yield defer.gatherResults([master.data.get(("builders", _id)) for _id in builderids]) buildersbyid = {builder['builderid']: builder for builder in builders} if want_properties: buildproperties = yield defer.gatherResults( [master.data.get(("builds", build['buildid'], 'properties')) for build in builds]) else: # we still need a list for the big zip buildproperties = list(range(len(builds))) if want_previous_build: prev_builds = yield defer.gatherResults( [getPreviousBuild(master, build) for build in builds]) else: # we still need a list for the big zip prev_builds = list(range(len(builds))) if want_logs_content: want_logs = True if want_logs: want_steps = True if want_steps: # pylint: disable=too-many-nested-blocks buildsteps = yield defer.gatherResults( [master.data.get(("builds", build['buildid'], 'steps')) for build in builds]) if want_logs: for build, build_steps in zip(builds, buildsteps): for s in build_steps: logs = yield master.data.get(("steps", s['stepid'], 'logs')) s['logs'] = list(logs) for l in s['logs']: l['url'] = get_url_for_log(master, build['builderid'], build['number'], s['number'], l['slug']) if want_logs_content: l['content'] = yield master.data.get(("logs", l['logid'], 'contents')) else: # we still need a list for the big zip buildsteps = list(range(len(builds))) # a big zip to connect everything together for build, properties, steps, prev in zip(builds, buildproperties, buildsteps, prev_builds): build['builder'] = buildersbyid[build['builderid']] build['buildset'] = buildset build['url'] = getURLForBuild( master, build['builderid'], build['number']) if want_properties: build['properties'] = properties if want_steps: build['steps'] = list(steps) if want_previous_build: build['prev_build'] = prev # perhaps we need data api for users with sourcestamps/:id/users @defer.inlineCallbacks def getResponsibleUsersForSourceStamp(master, sourcestampid): changesd = master.data.get(("sourcestamps", sourcestampid, "changes")) sourcestampd = master.data.get(("sourcestamps", sourcestampid)) changes, sourcestamp = yield defer.gatherResults([changesd, sourcestampd]) blamelist = set() # normally, we get only one, but just assume there might be several for c in changes: blamelist.add(c['author']) # Add patch author to blamelist if 'patch' in sourcestamp and sourcestamp['patch'] is not None: blamelist.add(sourcestamp['patch']['author']) blamelist = list(blamelist) blamelist.sort() return blamelist # perhaps we need data api for users with builds/:id/users @defer.inlineCallbacks def getResponsibleUsersForBuild(master, buildid): dl = [ master.data.get(("builds", buildid, "changes")), master.data.get(("builds", buildid, 'properties')) ] changes, properties = yield defer.gatherResults(dl) blamelist = set() # add users from changes for c in changes: blamelist.add(c['author']) # add owner from properties if 'owner' in properties: owner = properties['owner'][0] if isinstance(owner, str): blamelist.add(owner) else: blamelist.update(owner) log.msg( "Warning: owner property is a list for buildid {}. ".format(buildid)) log.msg("Please report a bug: changes: {}. properties: {}".format( changes, properties)) # add owner from properties if 'owners' in properties: blamelist.update(properties['owners'][0]) blamelist = list(blamelist) blamelist.sort() return blamelist def getURLForBuild(master, builderid, build_number): prefix = master.config.buildbotURL return prefix + "#builders/%d/builds/%d" % ( builderid, build_number) def getURLForBuildrequest(master, buildrequestid): prefix = master.config.buildbotURL return "{}#buildrequests/{}".format(prefix, buildrequestid) def get_url_for_log(master, builderid, build_number, step_number, log_slug): prefix = master.config.buildbotURL return f"{prefix}#builders/{builderid}/builds/{build_number}/" + \ f"steps/{step_number}/logs/{log_slug}" @renderer def URLForBuild(props): build = props.getBuild() return build.getUrl() def merge_reports_prop(reports, prop): result = None for report in reports: if prop in report and report[prop] is not None: if result is None: result = report[prop] else: result += report[prop] return result def merge_reports_prop_take_first(reports, prop): for report in reports: if prop in report and report[prop] is not None: return report[prop] return None buildbot-3.4.0/master/buildbot/reporters/words.py000066400000000000000000001434641413250514000221650ustar00rootroot00000000000000# coding: utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import random import re import shlex from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.python import log from twisted.python import usage from twisted.web import resource from twisted.web import server from buildbot import util from buildbot import version from buildbot.data import resultspec from buildbot.plugins.db import get_plugins from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import statusToString from buildbot.reporters import utils from buildbot.util import epoch2datetime from buildbot.util import service from buildbot.util import unicode2bytes # Used in command_HELLO and it's test. 'Hi' in 100 languages. GREETINGS = [ "ږغ كول ، هركلى كول ږغ، هركلى", "Goeie dag", "Tungjatjeta", "Yatasay", "Ahlan bik", "Voghdzuyin", "hola", "kaixo", "Horas", "Pryvitańnie", "Nomoskar", "Oki", "Selam", "Dez-mat", "Zdrávejte", "Mingala ba", "Hola", "Hafa dai", "Oh-see-YOH", "Nín hao", "Bonjou", "Zdravo", "Nazdar", "Hallo", "Hallo", "Iiti", "Kotáka", "Saluton", "Tere", "Hallo", "Hallo", "Bula", "Helo", "Hei", "Goede morgen", "Bonjour", "Hoi", "Ola", "Gamardžoba", "Guten Tag", "Mauri", "Geia!", "Inuugujoq", "Kem cho", "Sannu", "Aloha", "Shalóm", "Namasté", "Szia", "Halló", "Hai", "Kiana", "Dia is muire dhuit", "Buongiorno", "Kónnichi wa", "Salam", "Annyeonghaseyo", "Na", "Sabai dii", "Ave", "Es mīlu tevi", "Labas.", "Selamat petang", "Ni hao", "Kia ora", "Yokwe", "Kwe", "sain baina uu", "niltze", "Yá'át'ééh", "Namaste", "Hallo.", "Salâm", "Witajcie", "Olá", "Kâils", "Aroha", "Salut", "Privét", "Talofa", "Namo namah", "ćao", "Nazdar", "Zdravo", "Hola", "Jambo", "Hej", "Sälü", "Halo", "Selam", "Sàwàtdee kráp", "Dumela", "Merhaba", "Pryvít", "Adaab arz hai", "Chào", "Glidis", "Helo", "Sawubona", "Hoi"] class UsageError(ValueError): # pylint: disable=useless-super-delegation def __init__(self, string="Invalid usage", *more): # This is not useless as we change the default value of an argument. # This bug is reported as "fixed" but apparently, it is not. # https://github.com/PyCQA/pylint/issues/1085 # (Maybe there is a problem with builtin exceptions). super().__init__(string, *more) class ForceOptions(usage.Options): optParameters = [ ["builder", None, None, "which Builder to start"], ["codebase", None, "", "which codebase to build"], ["branch", None, "master", "which branch to build"], ["revision", None, "HEAD", "which revision to build"], ["project", None, "", "which project to build"], ["reason", None, None, "the reason for starting the build"], ["props", None, None, "A set of properties made available in the build environment, " "format is --properties=prop1=value1,prop2=value2,.. " "option can be specified multiple times."], ] def parseArgs(self, *args): args = list(args) if args: if self['builder'] is not None: raise UsageError("--builder provided in two ways") self['builder'] = args.pop(0) if args: # args might be modified above if self['reason'] is not None: raise UsageError("--reason provided in two ways") self['reason'] = " ".join(args) dangerous_commands = [] def dangerousCommand(method): command = method.__name__ if not command.startswith('command_'): raise ValueError('@dangerousCommand can be used only for commands') dangerous_commands.append(command[8:]) return method class Channel(service.AsyncService): """ This class holds what should be shared between users on a single channel. In particular it is responsible for maintaining notification states and send notifications. """ def __init__(self, bot, channel): self.name = "Channel({})".format(channel) self.id = channel self.bot = bot self.notify_events = set() self.subscribed = [] self.build_subscriptions = [] self.reported_builds = [] # tuples (when, buildername, buildnum) self.missing_workers = set() self.useRevisions = bot.useRevisions def send(self, message, **kwargs): return self.bot.send_message(self.id, message, **kwargs) def stopService(self): if self.subscribed: self.unsubscribe_from_build_events() def validate_notification_event(self, event): if not re.compile("^(started|finished|success|warnings|failure|exception|" "cancelled|problem|recovery|worse|better|worker|" # this is deprecated list "(success|warnings|failure|exception)To" "(Success|Warnings|Failure|Exception))$").match(event): raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off _EVENT_'.") @defer.inlineCallbacks def list_notified_events(self): if self.notify_events: yield self.send("The following events are being notified: {}." .format(", ".join(sorted(self.notify_events)))) else: yield self.send("No events are being notified.") def notify_for(self, *events): for event in events: if event in self.notify_events: return True return False @defer.inlineCallbacks def subscribe_to_build_events(self): startConsuming = self.master.mq.startConsuming def buildStarted(key, msg): return self.buildStarted(msg) def buildFinished(key, msg): return self.buildFinished(msg) def workerEvent(key, msg): if key[2] == 'missing': return self.workerMissing(msg) if key[2] == 'connected': return self.workerConnected(msg) return None for e, f in (("new", buildStarted), # BuilderStarted ("finished", buildFinished)): # BuilderFinished handle = yield startConsuming(f, ('builders', None, 'builds', None, e)) self.subscribed.append(handle) handle = yield startConsuming(workerEvent, ('workers', None, None)) self.subscribed.append(handle) def unsubscribe_from_build_events(self): # Cancel all the subscriptions we have old_list, self.subscribed = self.subscribed, [] for handle in old_list: handle.stopConsuming() def add_notification_events(self, events): for event in events: self.validate_notification_event(event) self.notify_events.add(event) if not self.subscribed: self.subscribe_to_build_events() def remove_notification_events(self, events): for event in events: self.validate_notification_event(event) self.notify_events.remove(event) if not self.notify_events: self.unsubscribe_from_build_events() def remove_all_notification_events(self): self.notify_events = set() if self.subscribed: self.unsubscribe_from_build_events() def shouldReportBuild(self, builder, buildnum): """Returns True if this build should be reported for this contact (eliminating duplicates), and also records the report for later""" for w, b, n in self.reported_builds: if b == builder and n == buildnum: return False self.reported_builds.append([util.now(), builder, buildnum]) # clean the reported builds horizon = util.now() - 60 while self.reported_builds and self.reported_builds[0][0] < horizon: self.reported_builds.pop(0) # and return True, since this is a new one return True @defer.inlineCallbacks def buildStarted(self, build): builder = yield self.bot.getBuilder(builderid=build['builderid']) builderName = builder['name'] buildNumber = build['number'] log.msg('[Contact] Builder {} started'.format(builder['name'], )) # only notify about builders we are interested in if (self.bot.tags is not None and not self.builderMatchesAnyTag(builder.get('tags', []))): log.msg('Not notifying for a build that does not match any tags') return if not self.notify_for('started'): return if self.useRevisions: revisions = yield self.getRevisionsForBuild(build) r = "Build containing revision(s) {} on {} started" \ .format(','.join(revisions), builderName) else: # Abbreviate long lists of changes to simply two # revisions, and the number of additional changes. # TODO: We can't get the list of the changes related to a build in # nine changes_str = "" url = utils.getURLForBuild(self.master, builder['builderid'], build['number']) r = "Build [#{:d}]({}) of `{}` started".format(buildNumber, url, builderName) if changes_str: r += " ({})".format(changes_str) self.send(r + ".") @defer.inlineCallbacks def buildFinished(self, build, watched=False): builder = yield self.bot.getBuilder(builderid=build['builderid']) builderName = builder['name'] buildNumber = build['number'] # only notify about builders we are interested in if (self.bot.tags is not None and not self.bot.builderMatchesAnyTag(builder.get('tags', []))): log.msg('Not notifying for a build that does not match any tags') return if not (watched or (yield self.notify_for_finished(build))): return if not self.shouldReportBuild(builderName, buildNumber): return url = utils.getURLForBuild(self.master, builder['builderid'], buildNumber) if self.useRevisions: revisions = yield self.getRevisionsForBuild(build) r = "Build on `{}` containing revision(s) {} {}" \ .format(builderName, ','.join(revisions), self.bot.format_build_status(build)) else: r = "Build [#{:d}]({}) of `{}` {}" \ .format(buildNumber, url, builderName, self.bot.format_build_status(build)) s = build.get('status_string') if build['results'] != SUCCESS and s is not None: r += ": " + s else: r += "." # FIXME: where do we get the list of changes for a build ? # if self.bot.showBlameList and buildResult != SUCCESS and len(build.changes) != 0: # r += ' blamelist: ' + ', '.join(list(set([c.who for c in build.changes]))) self.send(r) @defer.inlineCallbacks def notify_for_finished(self, build): if self.notify_for('finished'): return True result = build['results'] result_name = statusToString(result) if self.notify_for(result_name): return True if result in self.bot.results_severity and \ (self.notify_for('better', 'worse', 'problem', 'recovery') or any('To' in e for e in self.notify_events)): prev_build = yield self.master.data.get( ('builders', build['builderid'], 'builds', build['number'] - 1)) if prev_build: prev_result = prev_build['results'] if prev_result in self.bot.results_severity: result_severity = self.bot.results_severity.index(result) prev_result_severity = self.bot.results_severity.index(prev_result) if self.notify_for('better') and \ result_severity < prev_result_severity: return True if self.notify_for('worse') and \ result_severity > prev_result_severity: return True if self.notify_for('problem') \ and prev_result in (SUCCESS, WARNINGS) \ and result in (FAILURE, EXCEPTION): return True if self.notify_for('recovery') \ and prev_result in (FAILURE, EXCEPTION) \ and result in (SUCCESS, WARNINGS): return True # DEPRECATED required_notification_control_string = ''.join( (statusToString(prev_result).lower(), 'To', result_name.capitalize())) if (self.notify_for(required_notification_control_string)): return True return False @defer.inlineCallbacks def workerMissing(self, worker): self.missing_workers.add(worker['workerid']) if self.notify_for('worker'): self.send(("Worker `{name}` is missing. It was seen last on " "{last_connection}.").format(**worker)) yield self.bot.saveMissingWorkers() @defer.inlineCallbacks def workerConnected(self, worker): workerid = worker['workerid'] if workerid in self.missing_workers: self.missing_workers.remove(workerid) if self.notify_for('worker'): self.send("Worker `{name}` is back online.".format(**worker)) yield self.bot.saveMissingWorkers() class Contact: """I hold the state for a single user's interaction with the buildbot. There will be one instance of me for each user who interacts personally with the buildbot. There will be an additional instance for each 'broadcast contact' (chat rooms, IRC channels as a whole). """ def __init__(self, user, channel): """ :param StatusBot bot: StatusBot this Contact belongs to :param user: User ID representing this contact :param channel: Channel this contact is on """ self.user_id = user self.channel = channel @property def bot(self): return self.channel.bot @property def master(self): return self.channel.bot.master @property def is_private_chat(self): return self.user_id == self.channel.id @staticmethod def overrideCommand(meth): try: base_meth = getattr(Contact, meth.__name__) except AttributeError: pass else: try: meth.__doc__ = base_meth.__doc__ except AttributeError: pass try: meth.usage = base_meth.usage except AttributeError: pass return meth # Communication with the user def send(self, message, **kwargs): return self.channel.send(message, **kwargs) def access_denied(self, *args, **kwargs): return self.send("Thou shall not pass, {}!!!".format(self.user_id)) # Main dispatchers for incoming messages def getCommandMethod(self, command): command = command.upper() try: method = getattr(self, 'command_' + command) except AttributeError: return None get_authz = self.bot.authz.get acl = get_authz(command) if acl is None: if command in dangerous_commands: acl = get_authz('!', False) else: acl = get_authz('', True) acl = get_authz('*', acl) if isinstance(acl, (list, tuple)): acl = self.user_id in acl elif acl not in (True, False, None): acl = self.user_id == acl if not acl: return self.access_denied return method @defer.inlineCallbacks def handleMessage(self, message, **kwargs): message = message.lstrip() parts = message.split(' ', 1) if len(parts) == 1: parts = parts + [''] cmd, args = parts cmd_suffix = self.bot.commandSuffix if cmd_suffix and cmd.endswith(cmd_suffix): cmd = cmd[:-len(cmd_suffix)] self.bot.log("Received command `{}` from {}".format(cmd, self.describeUser())) if cmd.startswith(self.bot.commandPrefix): meth = self.getCommandMethod(cmd[len(self.bot.commandPrefix):]) else: meth = None if not meth: if message[-1] == '!': self.send("What you say!") return None elif cmd.startswith(self.bot.commandPrefix): self.send("I don't get this '{}'...".format(cmd)) meth = self.command_COMMANDS else: if self.is_private_chat: self.send("Say what?") return None try: result = yield meth(args.strip(), **kwargs) except UsageError as e: self.send(str(e)) return None except Exception as e: self.bot.log_err(e) self.send("Something bad happened (see logs)") return None return result def splitArgs(self, args): """Returns list of arguments parsed by shlex.split() or raise UsageError if failed""" try: return shlex.split(args) except ValueError as e: raise UsageError(e) from e def command_HELLO(self, args, **kwargs): """say hello""" self.send(random.choice(GREETINGS)) def command_VERSION(self, args, **kwargs): """show buildbot version""" self.send("This is buildbot-{} at your service".format(version)) @defer.inlineCallbacks def command_LIST(self, args, **kwargs): """list configured builders or workers""" args = self.splitArgs(args) all = False num = 10 try: num = int(args[0]) del args[0] except ValueError: if args[0] == 'all': all = True del args[0] except IndexError: pass if not args: raise UsageError(("Try '{}list [all|N] builders|workers|changes'." ).format(self.bot.commandPrefix)) if args[0] == 'builders': bdicts = yield self.bot.getAllBuilders() online_builderids = yield self.bot.getOnlineBuilders() response = ["I found the following builders:"] for bdict in bdicts: if bdict['builderid'] in online_builderids: response.append(bdict['name']) elif all: response.append(bdict['name']) response.append("[offline]") self.send(' '.join(response)) elif args[0] == 'workers': workers = yield self.master.data.get(('workers',)) response = ["I found the following workers:"] for worker in workers: if worker['configured_on']: response.append(worker['name']) if not worker['connected_to']: response.append("[disconnected]") elif all: response.append(worker['name']) response.append("[offline]") self.send(' '.join(response)) elif args[0] == 'changes': if all: self.send("Do you really want me to list all changes? It can be thousands!\n" "If you want to be flooded, specify the maximum number of changes " "to show.\n" "Right now, I will show up to 100 recent changes.") num = 100 changes = yield self.master.data.get(('changes',), order=['-changeid'], limit=num) response = ["I found the following recent changes:"] for change in reversed(changes): change['comment'] = change['comments'].split('\n')[0] change['date'] = epoch2datetime(change['when_timestamp']).strftime('%Y-%m-%d %H:%M') response.append( "{comment})\n" "Author: {author}\n" "Date: {date}\n" "Repository: {repository}\n" "Branch: {branch}\n" "Revision: {revision}\n".format(**change)) self.send('\n\n'.join(response)) command_LIST.usage = "list [all|N] builders|workers|changes - " \ "list configured builders, workers, or N recent changes" @defer.inlineCallbacks def command_STATUS(self, args, **kwargs): """list status of a builder (or all builders)""" args = self.splitArgs(args) if not args: which = "" elif len(args) == 1: which = args[0] else: raise UsageError("Try '" + self.bot.commandPrefix + "status _builder_'.") response = [] if which == "": builders = yield self.bot.getAllBuilders() online_builderids = yield self.bot.getOnlineBuilders() for builder in builders: if builder['builderid'] in online_builderids: status = yield self.bot.getBuildStatus(builder['name'], short=True) response.append(status) elif which == "all": builders = yield self.bot.getAllBuilders() for builder in builders: status = yield self.bot.getBuildStatus(builder['name'], short=True) response.append(status) else: status = yield self.bot.getBuildStatus(which) response.append(status) if response: self.send('\n'.join(response)) command_STATUS.usage = "status [_which_] - list status of a builder (or all builders)" @defer.inlineCallbacks def command_NOTIFY(self, args, **kwargs): """notify me about build events""" args = self.splitArgs(args) if not args: raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off|list [_EVENT_]'.") action = args.pop(0) events = args if action in ("on", "on-quiet"): if not events: events = ('started', 'finished') self.channel.add_notification_events(events) if action == "on": yield self.channel.list_notified_events() self.bot.saveNotifyEvents() elif action in ("off", "off-quiet"): if events: self.channel.remove_notification_events(events) else: self.channel.remove_all_notification_events() if action == "off": yield self.channel.list_notified_events() self.bot.saveNotifyEvents() elif action == "list": yield self.channel.list_notified_events() else: raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off|list [_EVENT_]'.") command_NOTIFY.usage = ("notify on|off|list [_EVENT_] ... - notify me about build events;" " event should be one or more of: 'started', 'finished', 'failure'," " 'success', 'exception', 'problem', 'recovery', 'better', or 'worse'") @defer.inlineCallbacks def command_WATCH(self, args, **kwargs): """announce the completion of an active build""" args = self.splitArgs(args) if len(args) != 1: raise UsageError("Try '" + self.bot.commandPrefix + "watch _builder_'.") which = args[0] builder = yield self.bot.getBuilder(buildername=which) # Get current builds on this builder. builds = yield self.bot.getRunningBuilds(builder['builderid']) if not builds: self.send("There are no currently running builds.") return def watchForCompleteEvent(key, msg): if key[-1] in ('finished', 'complete'): return self.channel.buildFinished(msg, watched=True) return None for build in builds: startConsuming = self.master.mq.startConsuming handle = yield startConsuming( watchForCompleteEvent, ('builds', str(build['buildid']), None)) self.channel.build_subscriptions.append((build['buildid'], handle)) url = utils.getURLForBuild(self.master, builder['builderid'], build['number']) if self.bot.useRevisions: revisions = yield self.bot.getRevisionsForBuild(build) r = "Watching build on `{}` containing revision(s) {} until it finishes..." \ .format(which, ','.join(revisions)) else: r = "Watching build [#{:d}]({}) of `{}` until it finishes..." \ .format(build['number'], url, which) self.send(r) command_WATCH.usage = "watch _which_ - announce the completion of an active build" @defer.inlineCallbacks @dangerousCommand def command_FORCE(self, args, **kwargs): """force a build""" # FIXME: NEED TO THINK ABOUT! errReply = "Try '{}{}'".format(self.bot.commandPrefix, self.command_FORCE.usage) args = self.splitArgs(args) if not args: raise UsageError(errReply) what = args.pop(0) if what != "build": raise UsageError(errReply) opts = ForceOptions() opts.parseOptions(args) builderName = opts['builder'] builder = yield self.bot.getBuilder(buildername=builderName) branch = opts['branch'] revision = opts['revision'] codebase = opts['codebase'] project = opts['project'] reason = opts['reason'] props = opts['props'] if builderName is None: raise UsageError("you must provide a Builder, " + errReply) # keep weird stuff out of the branch, revision, and properties args. branch_validate = self.master.config.validation['branch'] revision_validate = self.master.config.validation['revision'] pname_validate = self.master.config.validation['property_name'] pval_validate = self.master.config.validation['property_value'] if branch and not branch_validate.match(branch): self.bot.log("Force: bad branch '{}'".format(branch)) self.send("Sorry, bad branch '{}'".format(branch)) return if revision and not revision_validate.match(revision): self.bot.log("Force: bad revision '{}'".format(revision)) self.send("Sorry, bad revision '{}'".format(revision)) return properties = Properties() properties.master = self.master if props: # split props into name:value dict pdict = {} propertylist = props.split(",") for prop in propertylist: splitproperty = prop.split("=", 1) pdict[splitproperty[0]] = splitproperty[1] # set properties for prop in pdict: pname = prop pvalue = pdict[prop] if not pname_validate.match(pname) \ or not pval_validate.match(pvalue): self.bot.log("Force: bad property name='{}', value='{}'" .format(pname, pvalue)) self.send("Sorry, bad property name='{}', value='{}'" .format(pname, pvalue)) return properties.setProperty(pname, pvalue, "Force Build Chat") properties.setProperty("reason", reason, "Force Build Chat") properties.setProperty("owner", self.describeUser(), "Force Build Chat") reason = "forced: by {}: {}".format(self.describeUser(), reason) try: yield self.master.data.updates.addBuildset(builderids=[builder['builderid']], # For now, we just use # this as the id. scheduler="status.words", sourcestamps=[{ 'codebase': codebase, 'branch': branch, 'revision': revision, 'project': project, 'repository': ""}], reason=reason, properties=properties.asDict(), waited_for=False) except AssertionError as e: self.send("I can't: " + str(e)) else: self.send("Force build successfully requested.") command_FORCE.usage = ("force build [--codebase=CODEBASE] [--branch=branch] " "[--revision=revision] [--props=prop1=val1,prop2=val2...] " "_which_ _reason_ - Force a build") @defer.inlineCallbacks @dangerousCommand def command_STOP(self, args, **kwargs): """stop a running build""" args = self.splitArgs(args) if len(args) < 3 or args[0] != 'build': raise UsageError("Try '" + self.bot.commandPrefix + "stop build _which_ _reason_'.") which = args[1] reason = ' '.join(args[2:]) r = "stopped: by {}: {}".format(self.describeUser(), reason) # find an in-progress build builder = yield self.bot.getBuilder(buildername=which) builderid = builder['builderid'] builds = yield self.bot.getRunningBuilds(builderid) if not builds: self.send("Sorry, no build is currently running.") return for bdict in builds: num = bdict['number'] yield self.master.data.control('stop', {'reason': r}, ('builders', builderid, 'builds', num)) if self.bot.useRevisions: revisions = yield self.bot.getRevisionsForBuild(bdict) response = "Build containing revision(s) {} interrupted".format(','.join( revisions)) else: url = utils.getURLForBuild(self.master, builderid, num) response = "Build [#{:d}]({}) of `{}` interrupted.".format(num, url, which) self.send(response) command_STOP.usage = "stop build _which_ _reason_ - Stop a running build" @defer.inlineCallbacks def command_LAST(self, args, **kwargs): """list last build status for a builder""" # FIXME: NEED TO THINK ABOUT! args = self.splitArgs(args) if not args: builders = yield self.bot.getAllBuilders() online_builderids = yield self.bot.getOnlineBuilders() builders = [b for b in builders if b['builderid'] in online_builderids] elif len(args) == 1: arg = args[0] if arg == 'all': builders = yield self.bot.getAllBuilders() else: builder = yield self.bot.getBuilder(buildername=arg) if not builder: raise UsageError("no such builder") builders = [builder] else: raise UsageError("Try '" + self.bot.commandPrefix + "last _builder_'.") messages = [] for builder in builders: lastBuild = yield self.bot.getLastCompletedBuild(builder['builderid']) if not lastBuild: status = "no builds run since last restart" else: complete_at = lastBuild['complete_at'] if complete_at: complete_at = util.datetime2epoch(complete_at) ago = util.fuzzyInterval(int(reactor.seconds() - complete_at)) else: ago = "??" status = self.bot.format_build_status(lastBuild) status = 'last build {} ({} ago)'.format(status, ago) if lastBuild['results'] != SUCCESS: status += ': {}'.format(lastBuild['state_string']) messages.append("`{}`: {}".format(builder['name'], status)) if messages: self.send('\n'.join(messages)) command_LAST.usage = "last [_which_] - list last build status for builder _which_" @classmethod def build_commands(cls): commands = [] for k in dir(cls): if k.startswith('command_'): commands.append(k[8:].lower()) commands.sort() return commands def describeUser(self): if self.is_private_chat: return self.user_id return "{} on {}".format(self.user_id, self.channel.id) # commands def command_HELP(self, args, **kwargs): """give help for a command or one of it's arguments""" args = self.splitArgs(args) if not args: commands = self.build_commands() response = [] for command in commands: meth = getattr(self, 'command_' + command.upper()) doc = getattr(meth, '__doc__', None) if doc: response.append("{} - {}".format(command, doc)) if response: self.send('\n'.join(response)) return command = args[0] if command.startswith(self.bot.commandPrefix): command = command[len(self.bot.commandPrefix):] meth = getattr(self, 'command_' + command.upper(), None) if not meth: raise UsageError("There is no such command '{}'.".format(args[0])) doc = getattr(meth, 'usage', None) if isinstance(doc, dict): if len(args) == 1: k = None # command elif len(args) == 2: k = args[1] # command arg else: k = tuple(args[1:]) # command arg subarg ... doc = doc.get(k, None) elif callable(doc): try: doc = doc(*args[1:]) except (TypeError, ValueError): doc = None if doc: self.send("Usage: {}{}".format(self.bot.commandPrefix, doc)) else: self.send( "No usage info for " + ' '.join(["'{}'".format(arg) for arg in args])) command_HELP.usage = ("help [_command_ _arg_ [_subarg_ ...]] - " "Give help for _command_ or one of it's arguments") def command_SOURCE(self, args, **kwargs): "the source code for buildbot" self.send("My source can be found at " "https://github.com/buildbot/buildbot") command_SOURCE.usage = "source - the source code for Buildbot" def command_COMMANDS(self, args, **kwargs): """list available commands""" commands = self.build_commands() str = "Buildbot commands: " + ", ".join(self.bot.commandPrefix + c for c in commands) self.send(str) command_COMMANDS.usage = "commands - List available commands" @dangerousCommand def command_SHUTDOWN(self, args, **kwargs): """shutdown the buildbot master""" # FIXME: NEED TO THINK ABOUT! if args not in ('check', 'start', 'stop', 'now'): raise UsageError("Try '" + self.bot.commandPrefix + "shutdown check|start|stop|now'.") botmaster = self.channel.master.botmaster shuttingDown = botmaster.shuttingDown if args == 'check': if shuttingDown: self.send("Status: buildbot is shutting down.") else: self.send("Status: buildbot is running.") elif args == 'start': if shuttingDown: self.send("Shutdown already started.") else: self.send("Starting clean shutdown.") botmaster.cleanShutdown() elif args == 'stop': if not shuttingDown: self.send("There is no ongoing shutdown to stop.") else: self.send("Stopping clean shutdown.") botmaster.cancelCleanShutdown() elif args == 'now': self.send("Stopping buildbot.") reactor.stop() command_SHUTDOWN.usage = { None: "shutdown check|start|stop|now - shutdown the buildbot master", "check": "shutdown check - check if the buildbot master is running or shutting down", "start": "shutdown start - start a clean shutdown", "stop": "shutdown cancel - stop the clean shutdown", "now": "shutdown now - shutdown immediately without waiting for the builders to finish"} class StatusBot(service.AsyncMultiService): """ Abstract status bot """ contactClass = Contact channelClass = Channel commandPrefix = '' commandSuffix = None offline_string = "offline" idle_string = "idle" running_string = "running:" def __init__(self, authz=None, tags=None, notify_events=None, useRevisions=False, showBlameList=False): super().__init__() self.tags = tags if notify_events is None: notify_events = {} self.notify_events = notify_events self.useRevisions = useRevisions self.showBlameList = showBlameList self.authz = self.expand_authz(authz) self.contacts = {} self.channels = {} @staticmethod def expand_authz(authz): if authz is None: authz = {} expanded_authz = {} for cmds, val in authz.items(): if not isinstance(cmds, (tuple, list)): cmds = (cmds,) for cmd in cmds: expanded_authz[cmd.upper()] = val return expanded_authz def isValidUser(self, user): for auth in self.authz.values(): if auth is True \ or (isinstance(auth, (list, tuple)) and user in auth)\ or user == auth: return True # If user is in '', we have already returned; otherwise check if defaults apply return '' not in self.authz def getContact(self, user, channel): """ get a Contact instance for ``user`` on ``channel`` """ try: return self.contacts[(channel, user)] except KeyError: valid = self.isValidUser(user) new_contact = self.contactClass(user=user, channel=self.getChannel(channel, valid)) if valid: self.contacts[(channel, user)] = new_contact return new_contact def getChannel(self, channel, valid=True): try: return self.channels[channel] except KeyError: new_channel = self.channelClass(self, channel) if valid: self.channels[channel] = new_channel new_channel.setServiceParent(self) return new_channel def _get_object_id(self): return self.master.db.state.getObjectId( self.nickname, '{0.__module__}.{0.__name__}'.format(self.__class__)) @defer.inlineCallbacks def _save_channels_state(self, attr, json_type=None): if json_type is None: json_type = lambda x: x data = [(k, v) for k, v in ((channel.id, json_type(getattr(channel, attr))) for channel in self.channels.values()) if v] try: objectid = yield self._get_object_id() yield self.master.db.state.setState(objectid, attr, data) except Exception as err: self.log_err(err, "saveState '{}'".format(attr)) @defer.inlineCallbacks def _load_channels_state(self, attr, setter): try: objectid = yield self._get_object_id() data = yield self.master.db.state.getState(objectid, attr, ()) except Exception as err: self.log_err(err, "loadState ({})".format(attr)) else: if data is not None: for c, d in data: try: setter(self.getChannel(c), d) except Exception as err: self.log_err(err, "loadState '{}' ({})".format(attr, c)) @defer.inlineCallbacks def loadState(self): yield self._load_channels_state('notify_events', lambda c, e: c.add_notification_events(e)) yield self._load_channels_state('missing_workers', lambda c, w: c.missing_workers.update(w)) @defer.inlineCallbacks def saveNotifyEvents(self): yield self._save_channels_state('notify_events', list) @defer.inlineCallbacks def saveMissingWorkers(self): yield self._save_channels_state('missing_workers', list) def send_message(self, chat, message, **kwargs): raise NotImplementedError() def _get_log_system(self, source): if source is None: source = self.__class__.__name__ try: parent = self.parent.name except AttributeError: parent = '-' name = "{},{}".format(parent, source) return name def log(self, msg, source=None): log.callWithContext({"system": self._get_log_system(source)}, log.msg, msg) def log_err(self, error=None, why=None, source=None): log.callWithContext({"system": (self._get_log_system(source))}, log.err, error, why) def builderMatchesAnyTag(self, builder_tags): return any(tag for tag in builder_tags if tag in self.tags) def getRunningBuilds(self, builderid): d = self.master.data.get(('builds',), filters=[resultspec.Filter('builderid', 'eq', [builderid]), resultspec.Filter('complete', 'eq', [False])]) return d def getLastCompletedBuild(self, builderid): d = self.master.data.get(('builds',), filters=[resultspec.Filter('builderid', 'eq', [builderid]), resultspec.Filter('complete', 'eq', [True])], order=['-number'], limit=1) @d.addCallback def listAsOneOrNone(res): if res: return res[0] return None return d def getCurrentBuildstep(self, build): d = self.master.data.get(('builds', build['buildid'], 'steps'), filters=[ resultspec.Filter('complete', 'eq', [False])], order=['number'], limit=1) return d @defer.inlineCallbacks def getBuildStatus(self, which, short=False): response = '`{}`: '.format(which) builder = yield self.getBuilder(buildername=which) builderid = builder['builderid'] runningBuilds = yield self.getRunningBuilds(builderid) # pylint: disable=too-many-nested-blocks if not runningBuilds: onlineBuilders = yield self.getOnlineBuilders() if builderid in onlineBuilders: response += self.idle_string lastBuild = yield self.getLastCompletedBuild(builderid) if lastBuild: complete_at = lastBuild['complete_at'] if complete_at: complete_at = util.datetime2epoch(complete_at) ago = util.fuzzyInterval(int(reactor.seconds() - complete_at)) else: ago = "??" status = self.format_build_status(lastBuild, short=short) if not short: status = ", " + status if lastBuild['results'] != SUCCESS: status_string = lastBuild.get('status_string') if status_string: status += ": " + status_string response += ' last build {} ago{}'.format(ago, status) else: response += self.offline_string else: response += self.running_string buildInfo = [] for build in runningBuilds: step = yield self.getCurrentBuildstep(build) if step: s = "({})".format(step[-1]['state_string']) else: s = "(no current step)" bnum = build['number'] url = utils.getURLForBuild(self.master, builderid, bnum) buildInfo.append("build [#{:d}]({}) {}".format(bnum, url, s)) response += ' ' + ', '.join(buildInfo) return response @defer.inlineCallbacks def getBuilder(self, buildername=None, builderid=None): if buildername: bdicts = yield self.master.data.get(('builders',), filters=[resultspec.Filter('name', 'eq', [buildername])]) if bdicts: # Could there be more than one? One is enough. bdict = bdicts[0] else: bdict = None elif builderid: bdict = yield self.master.data.get(('builders', builderid)) else: raise UsageError("no builder specified") if bdict is None: if buildername: which = buildername else: which = 'number {}'.format(builderid) raise UsageError("no such builder '{}'".format(which)) return bdict def getAllBuilders(self): d = self.master.data.get(('builders',)) return d @defer.inlineCallbacks def getOnlineBuilders(self): all_workers = yield self.master.data.get(('workers',)) online_builderids = set() for worker in all_workers: connected = worker['connected_to'] if not connected: continue builders = worker['configured_on'] builderids = [builder['builderid'] for builder in builders] online_builderids.update(builderids) return list(online_builderids) @defer.inlineCallbacks def getRevisionsForBuild(self, bdict): # FIXME: Need to get revision info! (build -> buildreq -> buildset -> # sourcestamps) return ["TODO"] results_descriptions = { SKIPPED: "was skipped", SUCCESS: "completed successfully", WARNINGS: "completed with warnings", FAILURE: "failed", EXCEPTION: "stopped with exception", RETRY: "has been retried", CANCELLED: "was cancelled", } results_severity = ( SKIPPED, SUCCESS, WARNINGS, FAILURE, CANCELLED, EXCEPTION ) def format_build_status(self, build, short=False): """ Optionally add color to the message """ return self.results_descriptions[build['results']] class ThrottledClientFactory(protocol.ClientFactory): lostDelay = random.randint(1, 5) failedDelay = random.randint(45, 60) def __init__(self, lostDelay=None, failedDelay=None): if lostDelay is not None: self.lostDelay = lostDelay if failedDelay is not None: self.failedDelay = failedDelay def clientConnectionLost(self, connector, reason): reactor.callLater(self.lostDelay, connector.connect) def clientConnectionFailed(self, connector, reason): reactor.callLater(self.failedDelay, connector.connect) class WebhookResource(resource.Resource, service.AsyncService): """ This is a service be used by chat bots based on web-hooks. It automatically sets and deletes the resource and calls ``process_webhook`` method of its parent. """ def __init__(self, path): resource.Resource.__init__(self) www = get_plugins('www', None, load_now=True) if 'base' not in www: raise RuntimeError("could not find buildbot-www; is it installed?") self._root = www.get('base').resource self.path = path def startService(self): self._root.putChild(unicode2bytes(self.path), self) try: super().startService() except AttributeError: pass def stopService(self): try: super().stopService() except AttributeError: pass self._root.delEntity(unicode2bytes(self.path)) def render_GET(self, request): return self.render_POST(request) def render_POST(self, request): try: d = self.parent.process_webhook(request) except Exception: d = defer.fail() def ok(_): request.setResponseCode(202) request.finish() def err(error): try: self.parent.log_err(error, "processing telegram request", self.__class__.__name__) except AttributeError: log.err(error, "processing telegram request") request.setResponseCode(500) request.finish() d.addCallbacks(ok, err) return server.NOT_DONE_YET buildbot-3.4.0/master/buildbot/reporters/zulip.py000066400000000000000000000060231413250514000221570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import config from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.util import httpclientservice from buildbot.util.logger import Logger log = Logger() class ZulipStatusPush(ReporterBase): name = "ZulipStatusPush" def checkConfig(self, endpoint, token, stream=None, debug=None, verify=None): if not isinstance(endpoint, str): config.error("Endpoint must be a string") if not isinstance(token, str): config.error("Token must be a string") super().checkConfig(generators=[BuildStartEndStatusGenerator()]) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, endpoint, token, stream=None, debug=None, verify=None): self.debug = debug self.verify = verify yield super().reconfigService(generators=[BuildStartEndStatusGenerator()]) self._http = yield httpclientservice.HTTPClientService.getService( self.master, endpoint, debug=self.debug, verify=self.verify) self.token = token self.stream = stream @defer.inlineCallbacks def sendMessage(self, reports): build = reports[0]['builds'][0] event = ("new", "finished")[0 if build["complete"] is False else 1] jsondata = dict(event=event, buildid=build["buildid"], buildername=build["builder"]["name"], url=build["url"], project=build["properties"]["project"][0]) if event == "new": jsondata["timestamp"] = int(build["started_at"].timestamp()) elif event == "finished": jsondata["timestamp"] = int(build["complete_at"].timestamp()) jsondata["results"] = build["results"] if self.stream is not None: url = "/api/v1/external/buildbot?api_key={}&stream={}".format(self.token, self.stream) else: url = "/api/v1/external/buildbot?api_key={}".format(self.token) response = yield self._http.post(url, json=jsondata) if response.code != 200: content = yield response.content() log.error("{code}: Error pushing build status to Zulip: {content}", code=response.code, content=content) buildbot-3.4.0/master/buildbot/revlinks.py000066400000000000000000000065601413250514000206320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re class RevlinkMatch: def __init__(self, repo_urls, revlink): if isinstance(repo_urls, str): repo_urls = [repo_urls] self.repo_urls = [re.compile(url) for url in repo_urls] self.revlink = revlink def __call__(self, rev, repo): for url in self.repo_urls: m = url.match(repo) if m: return m.expand(self.revlink) % rev return None GithubRevlink = RevlinkMatch( repo_urls=[r'https://github.com/([^/]*)/([^/]*?)(?:\.git)?$', r'git://github.com/([^/]*)/([^/]*?)(?:\.git)?$', r'git@github.com:([^/]*)/([^/]*?)(?:\.git)?$', r'ssh://git@github.com/([^/]*)/([^/]*?)(?:\.git)?$' ], revlink=r'https://github.com/\1/\2/commit/%s') BitbucketRevlink = RevlinkMatch( repo_urls=[r'https://[^@]*@bitbucket.org/([^/]*)/([^/]*?)(?:\.git)?$', r'git@bitbucket.org:([^/]*)/([^/]*?)(?:\.git)?$'], revlink=r'https://bitbucket.org/\1/\2/commits/%s') class GitwebMatch(RevlinkMatch): def __init__(self, repo_urls, revlink): super().__init__(repo_urls=repo_urls, revlink=revlink + r'?p=\g;a=commit;h=%s') SourceforgeGitRevlink = GitwebMatch( repo_urls=[r'^git://([^.]*).git.sourceforge.net/gitroot/(?P.*)$', r'[^@]*@([^.]*).git.sourceforge.net:gitroot/(?P.*)$', r'ssh://(?:[^@]*@)?([^.]*).git.sourceforge.net/gitroot/(?P.*)$', ], revlink=r'http://\1.git.sourceforge.net/git/gitweb.cgi') # SourceForge recently upgraded to another platform called Allura # See introduction: # https://sourceforge.net/p/forge/documentation/Classic%20vs%20New%20SourceForge%20projects/ # And as reference: # https://sourceforge.net/p/forge/community-docs/SVN%20and%20project%20upgrades/ SourceforgeGitRevlink_AlluraPlatform = RevlinkMatch( repo_urls=[r'git://git.code.sf.net/p/(?P.*)$', r'http://git.code.sf.net/p/(?P.*)$', r'ssh://(?:[^@]*@)?git.code.sf.net/p/(?P.*)$' ], revlink=r'https://sourceforge.net/p/\1/ci/%s/') class RevlinkMultiplexer: def __init__(self, *revlinks): self.revlinks = revlinks def __call__(self, rev, repo): for revlink in self.revlinks: url = revlink(rev, repo) if url: return url return None default_revlink_matcher = RevlinkMultiplexer(GithubRevlink, BitbucketRevlink, SourceforgeGitRevlink, SourceforgeGitRevlink_AlluraPlatform) buildbot-3.4.0/master/buildbot/scheduler.py000066400000000000000000000023731413250514000207510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.schedulers.basic import AnyBranchScheduler from buildbot.schedulers.basic import Scheduler from buildbot.schedulers.dependent import Dependent from buildbot.schedulers.timed import Nightly from buildbot.schedulers.timed import Periodic from buildbot.schedulers.triggerable import Triggerable from buildbot.schedulers.trysched import Try_Jobdir from buildbot.schedulers.trysched import Try_Userpass _hush_pyflakes = [Scheduler, AnyBranchScheduler, Dependent, Periodic, Nightly, Triggerable, Try_Jobdir, Try_Userpass] del _hush_pyflakes buildbot-3.4.0/master/buildbot/schedulers/000077500000000000000000000000001413250514000205555ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/schedulers/__init__.py000066400000000000000000000000001413250514000226540ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/schedulers/base.py000066400000000000000000000360361413250514000220510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from twisted.python import log from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot.changes import changes from buildbot.process.properties import Properties from buildbot.util.service import ClusteredBuildbotService from buildbot.util.state import StateMixin @implementer(interfaces.IScheduler) class BaseScheduler(ClusteredBuildbotService, StateMixin): DEFAULT_CODEBASES = {'': {}} compare_attrs = ClusteredBuildbotService.compare_attrs + \ ('builderNames', 'properties', 'codebases') def __init__(self, name, builderNames, properties=None, codebases=DEFAULT_CODEBASES): super().__init__(name=name) ok = True if interfaces.IRenderable.providedBy(builderNames): pass elif isinstance(builderNames, (list, tuple)): for b in builderNames: if not isinstance(b, str) and \ not interfaces.IRenderable.providedBy(b): ok = False else: ok = False if not ok: config.error( "The builderNames argument to a scheduler must be a list " "of Builder names or an IRenderable object that will render" "to a list of builder names.") self.builderNames = builderNames if properties is None: properties = {} self.properties = Properties() self.properties.update(properties, "Scheduler") self.properties.setProperty("scheduler", name, "Scheduler") self.objectid = None # Set the codebases that are necessary to process the changes # These codebases will always result in a sourcestamp with or without # changes known_keys = set(['branch', 'repository', 'revision']) if codebases is None: config.error("Codebases cannot be None") elif isinstance(codebases, list): codebases = dict((codebase, {}) for codebase in codebases) elif not isinstance(codebases, dict): config.error( "Codebases must be a dict of dicts, or list of strings") else: for codebase, attrs in codebases.items(): if not isinstance(attrs, dict): config.error("Codebases must be a dict of dicts") else: unk = set(attrs) - known_keys if unk: config.error("Unknown codebase keys {} for codebase {}".format( ', '.join(unk), codebase)) self.codebases = codebases # internal variables self._change_consumer = None self._enable_consumer = None self._change_consumption_lock = defer.DeferredLock() self.enabled = True def reconfigService(self, *args, **kwargs): raise NotImplementedError() # activity handling @defer.inlineCallbacks def activate(self): if not self.enabled: return None # even if we aren't called via _activityPoll(), at this point we # need to ensure the service id is set correctly if self.serviceid is None: self.serviceid = yield self._getServiceId() assert self.serviceid is not None schedulerData = yield self._getScheduler(self.serviceid) if schedulerData: self.enabled = schedulerData['enabled'] if not self._enable_consumer: yield self.startConsumingEnableEvents() return None def _enabledCallback(self, key, msg): if msg['enabled']: self.enabled = True d = self.activate() else: d = self.deactivate() def fn(x): self.enabled = False d.addCallback(fn) return d @defer.inlineCallbacks def deactivate(self): if not self.enabled: return None yield self._stopConsumingChanges() return None # service handling def _getServiceId(self): return self.master.data.updates.findSchedulerId(self.name) def _getScheduler(self, sid): return self.master.db.schedulers.getScheduler(sid) def _claimService(self): return self.master.data.updates.trySetSchedulerMaster(self.serviceid, self.master.masterid) def _unclaimService(self): return self.master.data.updates.trySetSchedulerMaster(self.serviceid, None) # status queries # deprecated: these aren't compatible with distributed schedulers def listBuilderNames(self): return self.builderNames # change handling @defer.inlineCallbacks def startConsumingChanges(self, fileIsImportant=None, change_filter=None, onlyImportant=False): assert fileIsImportant is None or callable(fileIsImportant) # register for changes with the data API assert not self._change_consumer self._change_consumer = yield self.master.mq.startConsuming( lambda k, m: self._changeCallback(k, m, fileIsImportant, change_filter, onlyImportant), ('changes', None, 'new')) @defer.inlineCallbacks def startConsumingEnableEvents(self): assert not self._enable_consumer self._enable_consumer = yield self.master.mq.startConsuming( self._enabledCallback, ('schedulers', str(self.serviceid), 'updated')) @defer.inlineCallbacks def _changeCallback(self, key, msg, fileIsImportant, change_filter, onlyImportant): # ignore changes delivered while we're not running if not self._change_consumer: return # get a change object, since the API requires it chdict = yield self.master.db.changes.getChange(msg['changeid']) change = yield changes.Change.fromChdict(self.master, chdict) # filter it if change_filter and not change_filter.filter_change(change): return if change.codebase not in self.codebases: log.msg(format='change contains codebase %(codebase)s that is ' 'not processed by scheduler %(name)s', codebase=change.codebase, name=self.name) return if fileIsImportant: try: important = fileIsImportant(change) if not important and onlyImportant: return except Exception: log.err(failure.Failure(), 'in fileIsImportant check for {}'.format(change)) return else: important = True # use change_consumption_lock to ensure the service does not stop # while this change is being processed d = self._change_consumption_lock.run( self.gotChange, change, important) d.addErrback(log.err, 'while processing change') def _stopConsumingChanges(self): # (note: called automatically in deactivate) # acquire the lock change consumption lock to ensure that any change # consumption is complete before we are done stopping consumption def stop(): if self._change_consumer: self._change_consumer.stopConsuming() self._change_consumer = None return self._change_consumption_lock.run(stop) def gotChange(self, change, important): raise NotImplementedError # starting builds @defer.inlineCallbacks def addBuildsetForSourceStampsWithDefaults(self, reason, sourcestamps=None, waited_for=False, properties=None, builderNames=None, **kw): if sourcestamps is None: sourcestamps = [] # convert sourcestamps to a dictionary keyed by codebase stampsByCodebase = {} for ss in sourcestamps: cb = ss['codebase'] if cb in stampsByCodebase: raise RuntimeError("multiple sourcestamps with same codebase") stampsByCodebase[cb] = ss # Merge codebases with the passed list of sourcestamps # This results in a new sourcestamp for each codebase stampsWithDefaults = [] for codebase in self.codebases: cb = yield self.getCodebaseDict(codebase) ss = { 'codebase': codebase, 'repository': cb.get('repository', ''), 'branch': cb.get('branch', None), 'revision': cb.get('revision', None), 'project': '', } # apply info from passed sourcestamps onto the configured default # sourcestamp attributes for this codebase. ss.update(stampsByCodebase.get(codebase, {})) stampsWithDefaults.append(ss) # fill in any supplied sourcestamps that aren't for a codebase in the # scheduler's codebase dictionary for codebase in set(stampsByCodebase) - set(self.codebases): cb = stampsByCodebase[codebase] ss = { 'codebase': codebase, 'repository': cb.get('repository', ''), 'branch': cb.get('branch', None), 'revision': cb.get('revision', None), 'project': '', } stampsWithDefaults.append(ss) rv = yield self.addBuildsetForSourceStamps( sourcestamps=stampsWithDefaults, reason=reason, waited_for=waited_for, properties=properties, builderNames=builderNames, **kw) return rv def getCodebaseDict(self, codebase): # Hook for subclasses to change codebase parameters when a codebase does # not have a change associated with it. try: return defer.succeed(self.codebases[codebase]) except KeyError: return defer.fail() @defer.inlineCallbacks def addBuildsetForChanges(self, waited_for=False, reason='', external_idstring=None, changeids=None, builderNames=None, properties=None, **kw): if changeids is None: changeids = [] changesByCodebase = {} def get_last_change_for_codebase(codebase): return max(changesByCodebase[codebase], key=lambda change: change["changeid"]) # Changes are retrieved from database and grouped by their codebase for changeid in changeids: chdict = yield self.master.db.changes.getChange(changeid) changesByCodebase.setdefault(chdict["codebase"], []).append(chdict) sourcestamps = [] for codebase in sorted(self.codebases): if codebase not in changesByCodebase: # codebase has no changes # create a sourcestamp that has no changes cb = yield self.getCodebaseDict(codebase) ss = { 'codebase': codebase, 'repository': cb.get('repository', ''), 'branch': cb.get('branch', None), 'revision': cb.get('revision', None), 'project': '', } else: lastChange = get_last_change_for_codebase(codebase) ss = lastChange['sourcestampid'] sourcestamps.append(ss) # add one buildset, using the calculated sourcestamps bsid, brids = yield self.addBuildsetForSourceStamps( waited_for, sourcestamps=sourcestamps, reason=reason, external_idstring=external_idstring, builderNames=builderNames, properties=properties, **kw) return (bsid, brids) @defer.inlineCallbacks def addBuildsetForSourceStamps(self, waited_for=False, sourcestamps=None, reason='', external_idstring=None, properties=None, builderNames=None, **kw): if sourcestamps is None: sourcestamps = [] # combine properties if properties: properties.updateFromProperties(self.properties) else: properties = self.properties # make a fresh copy that we actually can modify safely properties = Properties.fromDict(properties.asDict()) # make extra info available from properties.render() properties.master = self.master properties.sourcestamps = [] properties.changes = [] for ss in sourcestamps: if isinstance(ss, int): # fetch actual sourcestamp and changes from data API properties.sourcestamps.append( (yield self.master.data.get(('sourcestamps', ss)))) properties.changes.extend( (yield self.master.data.get(('sourcestamps', ss, 'changes')))) else: # sourcestamp with no change, see addBuildsetForChanges properties.sourcestamps.append(ss) for c in properties.changes: properties.updateFromProperties(Properties.fromDict(c['properties'])) # apply the default builderNames if not builderNames: builderNames = self.builderNames # dynamically get the builder list to schedule builderNames = yield properties.render(builderNames) # Get the builder ids # Note that there is a data.updates.findBuilderId(name) # but that would merely only optimize the single builder case, while # probably the multiple builder case will be severely impacted by the # several db requests needed. builderids = list() for bldr in (yield self.master.data.get(('builders', ))): if bldr['name'] in builderNames: builderids.append(bldr['builderid']) # translate properties object into a dict as required by the # addBuildset method properties_dict = yield properties.render(properties.asDict()) bsid, brids = yield self.master.data.updates.addBuildset( scheduler=self.name, sourcestamps=sourcestamps, reason=reason, waited_for=waited_for, properties=properties_dict, builderids=builderids, external_idstring=external_idstring, **kw) return (bsid, brids) buildbot-3.4.0/master/buildbot/schedulers/basic.py000066400000000000000000000251731413250514000222200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import defaultdict from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot import config from buildbot import util from buildbot.changes import changes from buildbot.changes.filter import ChangeFilter from buildbot.schedulers import base from buildbot.schedulers import dependent from buildbot.util import NotABranch from buildbot.util.codebase import AbsoluteSourceStampsMixin class BaseBasicScheduler(base.BaseScheduler): """ @param onlyImportant: If True, only important changes will be added to the buildset. @type onlyImportant: boolean """ compare_attrs = ('treeStableTimer', 'change_filter', 'fileIsImportant', 'onlyImportant', 'reason') _reactor = reactor # for tests fileIsImportant = None reason = '' class NotSet: pass def __init__(self, name, shouldntBeSet=NotSet, treeStableTimer=None, builderNames=None, branch=NotABranch, branches=NotABranch, fileIsImportant=None, categories=None, reason="The %(classname)s scheduler named '%(name)s' triggered this build", change_filter=None, onlyImportant=False, **kwargs): if shouldntBeSet is not self.NotSet: config.error( "pass arguments to schedulers using keyword arguments") if fileIsImportant and not callable(fileIsImportant): config.error( "fileIsImportant must be a callable") # initialize parent classes super().__init__(name, builderNames, **kwargs) self.treeStableTimer = treeStableTimer if fileIsImportant is not None: self.fileIsImportant = fileIsImportant self.onlyImportant = onlyImportant self.change_filter = self.getChangeFilter(branch=branch, branches=branches, change_filter=change_filter, categories=categories) # the IDelayedCall used to wake up when this scheduler's # treeStableTimer expires. self._stable_timers = defaultdict(lambda: None) self._stable_timers_lock = defer.DeferredLock() self.reason = util.bytes2unicode(reason % { 'name': name, 'classname': self.__class__.__name__ }) def getChangeFilter(self, branch, branches, change_filter, categories): raise NotImplementedError @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return yield self.startConsumingChanges(fileIsImportant=self.fileIsImportant, change_filter=self.change_filter, onlyImportant=self.onlyImportant) # if we have a treeStableTimer, if there are classified changes # out there, start their timers again if self.treeStableTimer: yield self.scanExistingClassifiedChanges() # otherwise, we don't care about classified # changes, so get rid of any hanging around from previous # configurations else: yield self.master.db.schedulers.flushChangeClassifications(self.serviceid) @defer.inlineCallbacks def deactivate(self): # the base deactivate will unsubscribe from new changes yield super().deactivate() if not self.enabled: return @util.deferredLocked(self._stable_timers_lock) def cancel_timers(): for timer in self._stable_timers.values(): if timer: timer.cancel() self._stable_timers.clear() yield cancel_timers() @util.deferredLocked('_stable_timers_lock') def gotChange(self, change, important): if not self.treeStableTimer: # if there's no treeStableTimer, we can completely ignore # unimportant changes if not important: return defer.succeed(None) # otherwise, we'll build it right away return self.addBuildsetForChanges(reason=self.reason, changeids=[change.number]) timer_name = self.getTimerNameForChange(change) # if we have a treeStableTimer # - for an important change, start the timer # - for an unimportant change, reset the timer if it is running if important or self._stable_timers[timer_name]: if self._stable_timers[timer_name]: self._stable_timers[timer_name].cancel() def fire_timer(): d = self.stableTimerFired(timer_name) d.addErrback(log.err, "while firing stable timer") self._stable_timers[timer_name] = self._reactor.callLater( self.treeStableTimer, fire_timer) # record the change's importance return self.master.db.schedulers.classifyChanges( self.serviceid, {change.number: important}) @defer.inlineCallbacks def scanExistingClassifiedChanges(self): # call gotChange for each classified change. This is called at startup # and is intended to re-start the treeStableTimer for any changes that # had not yet been built when the scheduler was stopped. # NOTE: this may double-call gotChange for changes that arrive just as # the scheduler starts up. In practice, this doesn't hurt anything. classifications = \ yield self.master.db.schedulers.getChangeClassifications(self.serviceid) # call gotChange for each change, after first fetching it from the db for changeid, important in classifications.items(): chdict = yield self.master.db.changes.getChange(changeid) if not chdict: continue change = yield changes.Change.fromChdict(self.master, chdict) yield self.gotChange(change, important) def getTimerNameForChange(self, change): raise NotImplementedError # see subclasses def getChangeClassificationsForTimer(self, sched_id, timer_name): """similar to db.schedulers.getChangeClassifications, but given timer name""" raise NotImplementedError # see subclasses @util.deferredLocked('_stable_timers_lock') @defer.inlineCallbacks def stableTimerFired(self, timer_name): # delete this now-fired timer, if the service has already been stopped # then just bail out if not self._stable_timers.pop(timer_name, None): return classifications = \ yield self.getChangeClassificationsForTimer(self.serviceid, timer_name) # just in case: databases do weird things sometimes! if not classifications: # pragma: no cover return changeids = sorted(classifications.keys()) yield self.addBuildsetForChanges(reason=self.reason, changeids=changeids) max_changeid = changeids[-1] # (changeids are sorted) yield self.master.db.schedulers.flushChangeClassifications( self.serviceid, less_than=max_changeid + 1) class SingleBranchScheduler(AbsoluteSourceStampsMixin, BaseBasicScheduler): def __init__(self, name, createAbsoluteSourceStamps=False, **kwargs): self.createAbsoluteSourceStamps = createAbsoluteSourceStamps super().__init__(name, **kwargs) @defer.inlineCallbacks def gotChange(self, change, important): if self.createAbsoluteSourceStamps: yield self.recordChange(change) yield super().gotChange(change, important) def getCodebaseDict(self, codebase): if self.createAbsoluteSourceStamps: return super().getCodebaseDict(codebase) return self.codebases[codebase] def getChangeFilter(self, branch, branches, change_filter, categories): if branch is NotABranch and not change_filter: config.error( "the 'branch' argument to SingleBranchScheduler is " + "mandatory unless change_filter is provided") elif branches is not NotABranch: config.error( "the 'branches' argument is not allowed for " + "SingleBranchScheduler") return ChangeFilter.fromSchedulerConstructorArgs( change_filter=change_filter, branch=branch, categories=categories) def getTimerNameForChange(self, change): return "only" # this class only uses one timer def getChangeClassificationsForTimer(self, sched_id, timer_name): return self.master.db.schedulers.getChangeClassifications(sched_id) class Scheduler(SingleBranchScheduler): "alias for SingleBranchScheduler" def __init__(self, *args, **kwargs): log.msg("WARNING: the name 'Scheduler' is deprecated; use " + "buildbot.schedulers.basic.SingleBranchScheduler instead " + "(note that this may require you to change your import " + "statement)") super().__init__(*args, **kwargs) class AnyBranchScheduler(BaseBasicScheduler): def getChangeFilter(self, branch, branches, change_filter, categories): assert branch is NotABranch return ChangeFilter.fromSchedulerConstructorArgs( change_filter=change_filter, branch=branches, categories=categories) def getTimerNameForChange(self, change): # Py2.6+: could be a namedtuple return (change.codebase, change.project, change.repository, change.branch) def getChangeClassificationsForTimer(self, sched_id, timer_name): # set in getTimerNameForChange codebase, project, repository, branch = timer_name return self.master.db.schedulers.getChangeClassifications( sched_id, branch=branch, repository=repository, codebase=codebase, project=project) # now at buildbot.schedulers.dependent, but keep the old name alive Dependent = dependent.Dependent buildbot-3.4.0/master/buildbot/schedulers/canceller.py000066400000000000000000000317731413250514000230720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import config from buildbot.data import resultspec from buildbot.util.service import BuildbotService from buildbot.util.ssfilter import SourceStampFilter from buildbot.util.ssfilter import extract_filter_values class _OldBuildFilterSet: def __init__(self): self._by_builder = {} def add_filter(self, builders, filter): assert builders is not None for builder in builders: self._by_builder.setdefault(builder, []).append(filter) def is_matched(self, builder_name, props): assert builder_name is not None filters = self._by_builder.get(builder_name, []) for filter in filters: if filter.is_matched(props): return True return False class _TrackedCancellable: def __init__(self, id_tuple, ss_tuples): self.id_tuple = id_tuple self.ss_tuples = ss_tuples class _OldBuildTracker: def __init__(self, filter, on_cancel_cancellable): self.filter = filter self.on_cancel_cancellable = on_cancel_cancellable # We need to track builds by build IDs so that when such build finishes we know what we # no longer need to track. We also need to track builds by source code branch, so that # we can cancel builds when branch sees new commits. Branch is identified by a tuple # of project, codebase, repository and branch. # # Note that a single branch may run multiple builds. # (is_build, id) -> _TrackedCancellable self.tracked_by_id_tuple = {} self.tracked_by_ss = {} def reconfig(self, filter): self.filter = filter def is_build_tracked(self, build_id): return (True, build_id) in self.tracked_by_id_tuple def is_buildrequest_tracked(self, br_id): return (False, br_id) in self.tracked_by_id_tuple def on_new_build(self, build_id, builder_name, sourcestamps): self._on_new_cancellable((True, build_id), builder_name, sourcestamps) def on_new_buildrequest(self, breq_id, builder_name, sourcestamps): self._on_new_cancellable((False, breq_id), builder_name, sourcestamps) def _on_new_cancellable(self, id_tuple, builder_name, sourcestamps): matched_ss = [] for ss in sourcestamps: if ss['branch'] is None: return # Note that it's enough to match build by a single branch from a single codebase if self.filter.is_matched(builder_name, ss): matched_ss.append(ss) if not matched_ss: return ss_tuples = [(ss['project'], ss['codebase'], ss['repository'], ss['branch']) for ss in matched_ss] tracked_canc = _TrackedCancellable(id_tuple, ss_tuples) self.tracked_by_id_tuple[id_tuple] = tracked_canc for ss_tuple in ss_tuples: canc_dict = self.tracked_by_ss.setdefault(ss_tuple, {}) canc_dict[tracked_canc.id_tuple] = tracked_canc def on_finished_build(self, build_id): self._on_complete_cancellable((True, build_id)) def on_complete_buildrequest(self, br_id): self._on_complete_cancellable((False, br_id)) def _on_complete_cancellable(self, id_tuple): tracked_canc = self.tracked_by_id_tuple.pop(id_tuple, None) if tracked_canc is None: return for ss_tuple in tracked_canc.ss_tuples: canc_dict = self.tracked_by_ss.get(ss_tuple, None) if canc_dict is None: raise KeyError('{}: Could not find finished builds by tuple {}'.format( self.__class__.__name__, ss_tuple)) del canc_dict[tracked_canc.id_tuple] if not canc_dict: del self.tracked_by_ss[ss_tuple] def on_change(self, change): ss_tuple = (change['project'], change['codebase'], change['repository'], change['branch']) canc_dict = self.tracked_by_ss.pop(ss_tuple, None) if canc_dict is None: return for tracked_canc in canc_dict.values(): del self.tracked_by_id_tuple[tracked_canc.id_tuple] if len(tracked_canc.ss_tuples) == 1: # majority of configurations will only contain single-codebase builds and for these # tracked_by_ss has been cleared above already. continue for i_ss_tuple in tracked_canc.ss_tuples: if i_ss_tuple == ss_tuple: continue # the current sourcestamp, which has already been cleared other_canc_dict = self.tracked_by_ss.get(i_ss_tuple, None) if other_canc_dict is None: raise KeyError('{}: Could not find running builds by tuple {}'.format( self.__class__.__name__, i_ss_tuple)) del other_canc_dict[tracked_canc.id_tuple] if not other_canc_dict: del self.tracked_by_ss[i_ss_tuple] for id_tuple in canc_dict.keys(): self.on_cancel_cancellable(id_tuple) class OldBuildCanceller(BuildbotService): compare_attrs = BuildbotService.compare_attrs + ('filters',) def checkConfig(self, name, filters): OldBuildCanceller.check_filters(filters) self.name = name self._change_consumer = None self._build_new_consumer = None self._build_finished_consumer = None self._buildrequest_new_consumer = None self._buildrequest_complete_consumer = None self._build_tracker = None self._reconfiguring = False self._finished_builds_while_reconfiguring = [] self._completed_buildrequests_while_reconfiguring = [] @defer.inlineCallbacks def reconfigService(self, name, filters): # While reconfiguring we acquire a list of currently running builds or pending build # requests and seed the build tracker with these. We need to ensure that even if some # builds or build requests finish during this process, the tracker gets to know about # the changes in correct order. In order to do that, we defer all build finish # notifications to after the reconfig finishes. # # Note that old builds are cancelled according to the configuration that was live when they # were created, so for already tracked builds we don't need to do anything. self._reconfiguring = True filter_set_object = OldBuildCanceller.filter_tuples_to_filter_set_object(filters) if self._build_tracker is None: self._build_tracker = _OldBuildTracker(filter_set_object, self._cancel_cancellable) else: self._build_tracker.reconfig(filter_set_object) all_running_buildrequests = yield self.master.data.get( ('buildrequests',), filters=[resultspec.Filter('complete', 'eq', [False])]) for breq in all_running_buildrequests: if self._build_tracker.is_buildrequest_tracked(breq['buildrequestid']): continue yield self._on_buildrequest_new(None, breq) all_running_builds = yield self.master.data.get( ('builds',), filters=[resultspec.Filter('complete', 'eq', [False])]) for build in all_running_builds: if self._build_tracker.is_build_tracked(build['buildid']): continue yield self._on_build_new(None, build) self._reconfiguring = False finished_builds = self._finished_builds_while_reconfiguring self._finished_builds_while_reconfiguring = [] completed_breqs = self._completed_buildrequests_while_reconfiguring self._completed_buildrequests_while_reconfiguring = [] for build in finished_builds: self._build_tracker.on_finished_build(build['buildid']) for breq in completed_breqs: self._build_tracker.on_complete_buildrequest(breq['buildrequestid']) @defer.inlineCallbacks def startService(self): yield super().startService() self._change_consumer = \ yield self.master.mq.startConsuming(self._on_change, ('changes', None, 'new')) self._build_new_consumer = \ yield self.master.mq.startConsuming(self._on_build_new, ('builds', None, 'new')) self._build_finished_consumer = \ yield self.master.mq.startConsuming(self._on_build_finished, ('builds', None, 'finished')) self._buildrequest_new_consumer = \ yield self.master.mq.startConsuming(self._on_buildrequest_new, ('buildrequests', None, 'new')) self._buildrequest_complete_consumer = \ yield self.master.mq.startConsuming(self._on_buildrequest_complete, ('buildrequests', None, 'complete')) @defer.inlineCallbacks def stopService(self): yield self._change_consumer.stopConsuming() yield self._build_new_consumer.stopConsuming() yield self._build_finished_consumer.stopConsuming() yield self._buildrequest_new_consumer.stopConsuming() yield self._buildrequest_complete_consumer.stopConsuming() @classmethod def check_filters(cls, filters): if not isinstance(filters, list): config.error('{}: The filters argument must be a list of tuples'.format( cls.__name__)) for filter in filters: if not isinstance(filter, tuple) or \ len(filter) != 2 or \ not isinstance(filter[1], SourceStampFilter): config.error(('{}: The filters argument must be a list of tuples each of which ' + 'contains builders as the first item and SourceStampFilter as ' + 'the second').format(cls.__name__)) builders, ss_filter = filter try: extract_filter_values(builders, 'builders') except Exception as e: config.error('{}: When processing filter builders: {}'.format( cls.__name__, str(e))) @classmethod def filter_tuples_to_filter_set_object(cls, filters): filter_set = _OldBuildFilterSet() for filter in filters: builders, ss_filter = filter filter_set.add_filter(extract_filter_values(builders, 'builders'), ss_filter) return filter_set def _on_change(self, key, change): self._build_tracker.on_change(change) @defer.inlineCallbacks def _on_build_new(self, key, build): buildrequest = yield self.master.data.get(('buildrequests', build['buildrequestid'])) builder = yield self.master.data.get(("builders", build['builderid'])) buildset = yield self.master.data.get(('buildsets', buildrequest['buildsetid'])) self._build_tracker.on_new_build(build['buildid'], builder['name'], buildset['sourcestamps']) @defer.inlineCallbacks def _on_buildrequest_new(self, key, breq): builder = yield self.master.data.get(("builders", breq['builderid'])) buildset = yield self.master.data.get(('buildsets', breq['buildsetid'])) self._build_tracker.on_new_buildrequest(breq['buildrequestid'], builder['name'], buildset['sourcestamps']) def _on_build_finished(self, key, build): if self._reconfiguring: self._finished_builds_while_reconfiguring.append(build) return self._build_tracker.on_finished_build(build['buildid']) def _on_buildrequest_complete(self, key, breq): if self._reconfiguring: self._completed_buildrequests_while_reconfiguring.append(breq) return self._build_tracker.on_complete_buildrequest(breq['buildrequestid']) def _cancel_cancellable(self, id_tuple): is_build, id = id_tuple if is_build: self.master.data.control('stop', {'reason': 'Build has been obsoleted by a newer commit'}, ('builds', str(id))) else: self.master.data.control('cancel', {'reason': 'Build request has been obsoleted by a newer commit'}, ('buildrequests', str(id))) buildbot-3.4.0/master/buildbot/schedulers/canceller_buildset.py000066400000000000000000000137731413250514000247650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import config from buildbot.data import resultspec from buildbot.process.results import FAILURE from buildbot.util.service import BuildbotService from buildbot.util.ssfilter import SourceStampFilter from buildbot.util.ssfilter import extract_filter_values class _FailingSingleBuilderConfig: def __init__(self, builders_to_cancel, filter): self.builders_to_cancel = builders_to_cancel self.filter = filter class _FailingBuilderConfig: def __init__(self): self._by_builder = {} def add_config(self, builders, builders_to_cancel, filter): assert builders is not None config = _FailingSingleBuilderConfig(builders_to_cancel, filter) for builder in builders: self._by_builder.setdefault(builder, []).append(config) def get_all_matched(self, builder_name, props): assert builder_name is not None configs = self._by_builder.get(builder_name, []) return [c for c in configs if c.filter.is_matched(props)] class FailingBuildsetCanceller(BuildbotService): compare_attrs = BuildbotService.compare_attrs + ('filters',) def checkConfig(self, name, filters): FailingBuildsetCanceller.check_filters(filters) self.name = name self._build_finished_consumer = None def reconfigService(self, name, filters): self.filters = FailingBuildsetCanceller.filter_tuples_to_filter_set_object(filters) @defer.inlineCallbacks def startService(self): yield super().startService() self._build_finished_consumer = \ yield self.master.mq.startConsuming(self._on_build_finished, ('builds', None, 'finished')) @defer.inlineCallbacks def stopService(self): yield self._build_finished_consumer.stopConsuming() @classmethod def check_filters(cls, filters): if not isinstance(filters, list): config.error('{}: The filters argument must be a list of tuples'.format( cls.__name__)) for filter in filters: if not isinstance(filter, tuple) or \ len(filter) != 3 or \ not isinstance(filter[2], SourceStampFilter): config.error(('{}: The filters argument must be a list of tuples each of which ' + 'contains builders to track as the first item, builders to cancel ' + 'as the second and SourceStampFilter as the third' ).format(cls.__name__)) builders, builders_to_cancel, ss_filter = filter try: extract_filter_values(builders, 'builders') if builders_to_cancel is not None: extract_filter_values(builders_to_cancel, 'builders_to_cancel') except Exception as e: config.error('{}: When processing filter builders: {}'.format( cls.__name__, str(e))) @classmethod def filter_tuples_to_filter_set_object(cls, filters): filter_set = _FailingBuilderConfig() for filter in filters: builders, builders_to_cancel, ss_filter = filter extract_filter_values(builders, 'builders') if builders_to_cancel is not None: builders_to_cancel = extract_filter_values(builders_to_cancel, 'builders_to_cancel') filter_set.add_config(builders, builders_to_cancel, ss_filter) return filter_set @defer.inlineCallbacks def _on_build_finished(self, key, build): if build['results'] != FAILURE: return buildrequest = yield self.master.data.get(('buildrequests', build['buildrequestid'])) builder = yield self.master.data.get(("builders", build['builderid'])) buildset = yield self.master.data.get(('buildsets', buildrequest['buildsetid'])) sourcestamps = buildset['sourcestamps'] builders_to_cancel = set() for ss in sourcestamps: configs = self.filters.get_all_matched(builder['name'], ss) for c in configs: if builders_to_cancel is not None: if c.builders_to_cancel is None: builders_to_cancel = None else: builders_to_cancel.update(c.builders_to_cancel) all_bs_buildrequests = yield self.master.data.get( ('buildrequests',), filters=[resultspec.Filter('buildsetid', 'eq', [buildset['bsid']]), resultspec.Filter('complete', 'eq', [False])]) all_bs_buildrequests = [br for br in all_bs_buildrequests if br['buildrequestid'] != buildrequest['buildrequestid']] for br in all_bs_buildrequests: brid = br['buildrequestid'] if brid == buildrequest['buildrequestid']: continue # this one has just failed br_builder = yield self.master.data.get(("builders", br['builderid'])) if builders_to_cancel is not None and br_builder['name'] not in builders_to_cancel: continue reason = 'Build has been cancelled because another build in the same buildset failed' self.master.data.control('cancel', {'reason': reason}, ('buildrequests', str(brid))) buildbot-3.4.0/master/buildbot/schedulers/dependent.py000066400000000000000000000142551413250514000231040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import config from buildbot import interfaces from buildbot import util from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.schedulers import base class Dependent(base.BaseScheduler): compare_attrs = ('upstream_name',) def __init__(self, name, upstream, builderNames, **kwargs): super().__init__(name, builderNames, **kwargs) if not interfaces.IScheduler.providedBy(upstream): config.error( "upstream must be another Scheduler instance") self.upstream_name = upstream.name self._buildset_new_consumer = None self._buildset_complete_consumer = None self._cached_upstream_bsids = None # the subscription lock makes sure that we're done inserting a # subscription into the DB before registering that the buildset is # complete. self._subscription_lock = defer.DeferredLock() @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return self._buildset_new_consumer = yield self.master.mq.startConsuming( self._buildset_new_cb, ('buildsets', None, 'new')) # TODO: refactor to subscribe only to interesting buildsets, and # subscribe to them directly, via the data API self._buildset_complete_consumer = yield self.master.mq.startConsuming( self._buildset_complete_cb, ('buildsets', None, 'complete')) # check for any buildsets completed before we started yield self._checkCompletedBuildsets(None, ) @defer.inlineCallbacks def deactivate(self): # the base deactivate will unsubscribe from new changes yield super().deactivate() if not self.enabled: return if self._buildset_new_consumer: self._buildset_new_consumer.stopConsuming() if self._buildset_complete_consumer: self._buildset_complete_consumer.stopConsuming() self._cached_upstream_bsids = None @util.deferredLocked('_subscription_lock') def _buildset_new_cb(self, key, msg): # check if this was submitted by our upstream if msg['scheduler'] != self.upstream_name: return None # record our interest in this buildset return self._addUpstreamBuildset(msg['bsid']) def _buildset_complete_cb(self, key, msg): return self._checkCompletedBuildsets(msg['bsid']) @util.deferredLocked('_subscription_lock') @defer.inlineCallbacks def _checkCompletedBuildsets(self, bsid): subs = yield self._getUpstreamBuildsets() sub_bsids = [] for (sub_bsid, sub_ssids, sub_complete, sub_results) in subs: # skip incomplete builds, handling the case where the 'complete' # column has not been updated yet if not sub_complete and sub_bsid != bsid: continue # build a dependent build if the status is appropriate. Note that # this uses the sourcestamps from the buildset, not from any of the # builds performed to complete the buildset (since those might # differ from one another) if sub_results in (SUCCESS, WARNINGS): yield self.addBuildsetForSourceStamps( sourcestamps=sub_ssids.copy(), reason='downstream') sub_bsids.append(sub_bsid) # and regardless of status, remove the subscriptions yield self._removeUpstreamBuildsets(sub_bsids) @defer.inlineCallbacks def _updateCachedUpstreamBuilds(self): if self._cached_upstream_bsids is None: bsids = yield self.master.db.state.getState(self.objectid, 'upstream_bsids', []) self._cached_upstream_bsids = bsids @defer.inlineCallbacks def _getUpstreamBuildsets(self): # get a list of (bsid, ssids, complete, results) for all # upstream buildsets yield self._updateCachedUpstreamBuilds() changed = False rv = [] for bsid in self._cached_upstream_bsids[:]: buildset = yield self.master.data.get(('buildsets', str(bsid))) if not buildset: self._cached_upstream_bsids.remove(bsid) changed = True continue ssids = [ss['ssid'] for ss in buildset['sourcestamps']] rv.append((bsid, ssids, buildset['complete'], buildset['results'])) if changed: yield self.master.db.state.setState(self.objectid, 'upstream_bsids', self._cached_upstream_bsids) return rv @defer.inlineCallbacks def _addUpstreamBuildset(self, bsid): yield self._updateCachedUpstreamBuilds() if bsid not in self._cached_upstream_bsids: self._cached_upstream_bsids.append(bsid) yield self.master.db.state.setState(self.objectid, 'upstream_bsids', self._cached_upstream_bsids) @defer.inlineCallbacks def _removeUpstreamBuildsets(self, bsids): yield self._updateCachedUpstreamBuilds() old = set(self._cached_upstream_bsids) self._cached_upstream_bsids = list(old - set(bsids)) yield self.master.db.state.setState(self.objectid, 'upstream_bsids', self._cached_upstream_bsids) buildbot-3.4.0/master/buildbot/schedulers/filter.py000066400000000000000000000015261413250514000224200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # old (pre-0.8.4) location for ChangeFilter from buildbot.changes.filter import ChangeFilter _hush_pyflakes = ChangeFilter # keep pyflakes happy buildbot-3.4.0/master/buildbot/schedulers/forcesched.py000066400000000000000000000752671413250514000232550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import traceback from twisted.internet import defer from twisted.python.reflect import accumulateClassList from buildbot import config from buildbot.process.properties import Properties from buildbot.reporters.mail import VALID_EMAIL_ADDR from buildbot.schedulers import base from buildbot.util import identifiers class ValidationError(ValueError): pass class CollectedValidationError(ValueError): def __init__(self, errors): self.errors = errors super().__init__("\n".join([k + ":" + v for k, v in errors.items()])) class ValidationErrorCollector: def __init__(self): self.errors = {} @defer.inlineCallbacks def collectValidationErrors(self, name, fn, *args, **kwargs): res = None try: res = yield fn(*args, **kwargs) except CollectedValidationError as e: for error_name, e in e.errors.items(): self.errors[error_name] = e except ValueError as e: self.errors[name] = str(e) return res def maybeRaiseCollectedErrors(self): errors = self.errors if errors: raise CollectedValidationError(errors) DefaultField = object() # sentinel object to signal default behavior class BaseParameter: """ BaseParameter provides a base implementation for property customization """ spec_attributes = ["name", "fullName", "label", "tablabel", "type", "default", "required", "multiple", "regex", "hide", "maxsize", "autopopulate"] name = "" parentName = None label = "" tablabel = "" type = "" default = "" required = False multiple = False regex = None debug = True hide = False maxsize = None autopopulate = None @property def fullName(self): """A full name, intended to uniquely identify a parameter""" # join with '_' if both are set (cannot put '.', because it is used as # **kwargs) if self.parentName and self.name: return self.parentName + '_' + self.name # otherwise just use the one that is set # (this allows empty name for "anonymous nests") return self.name or self.parentName def setParent(self, parent): self.parentName = parent.fullName if parent else None def __init__(self, name, label=None, tablabel=None, regex=None, **kw): """ @param name: the name of the field, used during posting values back to the scheduler. This is not necessarily a UI value, and there may be restrictions on the characters allowed for this value. For example, HTML would require this field to avoid spaces and other punctuation ('-', '.', and '_' allowed) @type name: unicode @param label: (optional) the name of the field, used for UI display. @type label: unicode or None (to use 'name') @param regex: (optional) regex to validate the value with. Not used by all subclasses @type regex: unicode or regex """ if name in ["owner", "builderNames", "builderid"]: config.error("{} cannot be used as a parameter name, because it is reserved".format( name)) self.name = name self.label = name if label is None else label self.tablabel = self.label if tablabel is None else tablabel if regex: self.regex = re.compile(regex) if 'value' in kw: config.error(("Use default='{}' instead of value=... to give a " "default Parameter value").format(kw['value'])) # all other properties are generically passed via **kw self.__dict__.update(kw) def getFromKwargs(self, kwargs): """Simple customization point for child classes that do not need the other parameters supplied to updateFromKwargs. Return the value for the property named 'self.name'. The default implementation converts from a list of items, validates using the optional regex field and calls 'parse_from_args' for the final conversion. """ args = kwargs.get(self.fullName, []) # delete white space for args for arg in args: if isinstance(arg, str) and not arg.strip(): args.remove(arg) if not args: if self.required: raise ValidationError("'{}' needs to be specified".format(self.label)) if self.multiple: args = self.default else: args = [self.default] if self.regex: for arg in args: if not self.regex.match(arg): raise ValidationError("{}:'{}' does not match pattern '{}'".format(self.label, arg, self.regex.pattern)) if self.maxsize is not None: for arg in args: if len(arg) > self.maxsize: raise ValidationError("{}: is too large {} > {}".format(self.label, len(arg), self.maxsize)) try: arg = self.parse_from_args(args) except Exception as e: # an exception will just display an alert in the web UI # also log the exception if self.debug: traceback.print_exc() raise e if arg is None: raise ValidationError("need {}: no default provided by config".format(self.fullName)) return arg def updateFromKwargs(self, properties, kwargs, collector, **unused): """Primary entry point to turn 'kwargs' into 'properties'""" properties[self.name] = self.getFromKwargs(kwargs) def parse_from_args(self, l): """Secondary customization point, called from getFromKwargs to turn a validated value into a single property value""" if self.multiple: return [self.parse_from_arg(arg) for arg in l] return self.parse_from_arg(l[0]) def parse_from_arg(self, s): return s def getSpec(self): spec_attributes = [] accumulateClassList(self.__class__, 'spec_attributes', spec_attributes) ret = {} for i in spec_attributes: ret[i] = getattr(self, i) return ret class FixedParameter(BaseParameter): """A fixed parameter that cannot be modified by the user.""" type = "fixed" hide = True default = "" def parse_from_args(self, l): return self.default class StringParameter(BaseParameter): """A simple string parameter""" spec_attributes = ["size"] type = "text" size = 10 def parse_from_arg(self, s): return s class TextParameter(StringParameter): """A generic string parameter that may span multiple lines""" spec_attributes = ["cols", "rows"] type = "textarea" cols = 80 rows = 20 def value_to_text(self, value): return str(value) class IntParameter(StringParameter): """An integer parameter""" type = "int" default = 0 parse_from_arg = int # will throw an exception if parse fail class BooleanParameter(BaseParameter): """A boolean parameter""" type = "bool" def getFromKwargs(self, kwargs): return kwargs.get(self.fullName, [self.default]) == [True] class UserNameParameter(StringParameter): """A username parameter to supply the 'owner' of a build""" spec_attributes = ["need_email"] type = "username" default = "" size = 30 need_email = True def __init__(self, name="username", label="Your name:", **kw): super().__init__(name, label, **kw) def parse_from_arg(self, s): if not s and not self.required: return s if self.need_email: res = VALID_EMAIL_ADDR.search(s) if res is None: raise ValidationError(("{}: please fill in email address in the " "form 'User '").format(self.name)) return s class ChoiceStringParameter(BaseParameter): """A list of strings, allowing the selection of one of the predefined values. The 'strict' parameter controls whether values outside the predefined list of choices are allowed""" spec_attributes = ["choices", "strict"] type = "list" choices = [] strict = True def parse_from_arg(self, s): if self.strict and s not in self.choices: raise ValidationError("'{}' does not belong to list of available choices '{}'".format(s, self.choices)) return s def getChoices(self, master, scheduler, buildername): return self.choices class InheritBuildParameter(ChoiceStringParameter): """A parameter that takes its values from another build""" type = ChoiceStringParameter.type name = "inherit" compatible_builds = None def getChoices(self, master, scheduler, buildername): return self.compatible_builds(master, buildername) def getFromKwargs(self, kwargs): raise ValidationError( "InheritBuildParameter can only be used by properties") def updateFromKwargs(self, master, properties, changes, kwargs, **unused): arg = kwargs.get(self.fullName, [""])[0] split_arg = arg.split(" ")[0].split("/") if len(split_arg) != 2: raise ValidationError("bad build: {}".format(arg)) builder_name, build_num = split_arg builder_dict = master.data.get(('builders', builder_name)) if builder_dict is None: raise ValidationError("unknown builder: {} in {}".format(builder_name, arg)) build_dict = master.data.get(('builders', builder_name, 'builds', build_num), fields=['properties']) if build_dict is None: raise ValidationError("unknown build: {} in {}".format(builder_name, arg)) props = {self.name: (arg.split(" ")[0])} for name, (value, source) in build_dict['properties']: if source == "Force Build Form": if name == "owner": name = "orig_owner" props[name] = value properties.update(props) # FIXME: this does not do what we expect, but updateFromKwargs is not used either. # This needs revisiting when the build parameters are fixed: # changes.extend(b.changes) class WorkerChoiceParameter(ChoiceStringParameter): """A parameter that lets the worker name be explicitly chosen. This parameter works in conjunction with 'buildbot.process.builder.enforceChosenWorker', which should be added as the 'canStartBuild' parameter to the Builder. The "anySentinel" parameter represents the sentinel value to specify that there is no worker preference. """ anySentinel = '-any-' label = 'Worker' required = False strict = False def __init__(self, name='workername', **kwargs): super().__init__(name, **kwargs) def updateFromKwargs(self, kwargs, **unused): workername = self.getFromKwargs(kwargs) if workername == self.anySentinel: # no preference, so don't set a parameter at all return super().updateFromKwargs(kwargs=kwargs, **unused) @defer.inlineCallbacks def getChoices(self, master, scheduler, buildername): if buildername is None: # this is the "Force All Builds" page workers = yield self.master.data.get(('workers',)) else: builder = yield self.master.data.get(('builders', buildername)) workers = yield self.master.data.get(('builders', builder['builderid'], 'workers')) workernames = [worker['name'] for worker in workers] workernames.sort() workernames.insert(0, self.anySentinel) return workernames class FileParameter(BaseParameter): """A parameter which allows to download a whole file and store it as a property or patch """ type = 'file' maxsize = 1024 * 1024 * 10 # 10M class NestedParameter(BaseParameter): """A 'parent' parameter for a set of related parameters. This provides a logical grouping for the child parameters. Typically, the 'fullName' of the child parameters mix in the parent's 'fullName'. This allows for a field to appear multiple times in a form (for example, two codebases each have a 'branch' field). If the 'name' of the parent is the empty string, then the parent's name does not mix in with the child 'fullName'. This is useful when a field will not appear multiple time in a scheduler but the logical grouping is helpful. The result of a NestedParameter is typically a dictionary, with the key/value being the name/value of the children. """ spec_attributes = [ "layout", "columns"] # field is recursive, and thus managed in custom getSpec type = 'nested' layout = 'vertical' fields = None columns = None def __init__(self, name, fields, **kwargs): super().__init__(fields=fields, name=name, **kwargs) # reasonable defaults for the number of columns if self.columns is None: num_visible_fields = len( [field for field in fields if not field.hide]) if num_visible_fields >= 4: self.columns = 2 else: self.columns = 1 if self.columns > 4: config.error( "UI only support up to 4 columns in nested parameters") # fix up the child nodes with the parent (use None for now): self.setParent(None) def setParent(self, parent): super().setParent(parent) for field in self.fields: # pylint: disable=not-an-iterable field.setParent(self) @defer.inlineCallbacks def collectChildProperties(self, kwargs, properties, collector, **kw): """Collapse the child values into a dictionary. This is intended to be called by child classes to fix up the fullName->name conversions.""" childProperties = {} for field in self.fields: # pylint: disable=not-an-iterable yield collector.collectValidationErrors(field.fullName, field.updateFromKwargs, kwargs=kwargs, properties=childProperties, collector=collector, **kw) kwargs[self.fullName] = childProperties @defer.inlineCallbacks def updateFromKwargs(self, kwargs, properties, collector, **kw): """By default, the child values will be collapsed into a dictionary. If the parent is anonymous, this dictionary is the top-level properties.""" yield self.collectChildProperties(kwargs=kwargs, properties=properties, collector=collector, **kw) # default behavior is to set a property # -- use setdefault+update in order to collapse 'anonymous' nested # parameters correctly if self.name: d = properties.setdefault(self.name, {}) else: # if there's no name, collapse this nest all the way d = properties d.update(kwargs[self.fullName]) def getSpec(self): ret = super().getSpec() # pylint: disable=not-an-iterable ret['fields'] = [field.getSpec() for field in self.fields] return ret ParameterGroup = NestedParameter class AnyPropertyParameter(NestedParameter): """A generic property parameter, where both the name and value of the property must be given.""" type = NestedParameter.type def __init__(self, name, **kw): fields = [ StringParameter(name='name', label="Name:"), StringParameter(name='value', label="Value:"), ] super().__init__(name, label='', fields=fields, **kw) def getFromKwargs(self, kwargs): raise ValidationError( "AnyPropertyParameter can only be used by properties") @defer.inlineCallbacks def updateFromKwargs(self, master, properties, kwargs, collector, **kw): yield self.collectChildProperties(master=master, properties=properties, kwargs=kwargs, collector=collector, **kw) pname = kwargs[self.fullName].get("name", "") pvalue = kwargs[self.fullName].get("value", "") if not pname: return validation = master.config.validation pname_validate = validation['property_name'] pval_validate = validation['property_value'] if not pname_validate.match(pname) \ or not pval_validate.match(pvalue): raise ValidationError("bad property name='{}', value='{}'".format(pname, pvalue)) properties[pname] = pvalue class CodebaseParameter(NestedParameter): """A parameter whose result is a codebase specification instead of a property""" type = NestedParameter.type codebase = '' def __init__(self, codebase, name=None, label=None, branch=DefaultField, revision=DefaultField, repository=DefaultField, project=DefaultField, patch=None, **kwargs): """ A set of properties that will be used to generate a codebase dictionary. The branch/revision/repository/project should each be a parameter that will map to the corresponding value in the sourcestamp. Use None to disable the field. @param codebase: name of the codebase; used as key for the sourcestamp set @type codebase: unicode @param name: optional override for the name-currying for the subfields @type codebase: unicode @param label: optional override for the label for this set of parameters @type codebase: unicode """ name = name or codebase if label is None and codebase: label = "Codebase: " + codebase fields_dict = dict(branch=branch, revision=revision, repository=repository, project=project) for k, v in fields_dict.items(): if v is DefaultField: v = StringParameter(name=k, label=k.capitalize() + ":") elif isinstance(v, str): v = FixedParameter(name=k, default=v) fields_dict[k] = v fields = [val for k, val in sorted(fields_dict.items(), key=lambda x: x[0]) if val] if patch is not None: if patch.name != "patch": config.error( "patch parameter of a codebase must be named 'patch'") fields.append(patch) if self.columns is None and 'columns' not in kwargs: self.columns = 1 super().__init__(name=name, label=label, codebase=codebase, fields=fields, **kwargs) def createSourcestamp(self, properties, kwargs): # default, just return the things we put together return kwargs.get(self.fullName, {}) @defer.inlineCallbacks def updateFromKwargs(self, sourcestamps, kwargs, properties, collector, **kw): yield self.collectChildProperties(sourcestamps=sourcestamps, properties=properties, kwargs=kwargs, collector=collector, **kw) # convert the "property" to a sourcestamp ss = self.createSourcestamp(properties, kwargs) if ss is not None: patch = ss.pop('patch', None) if patch is not None: for k, v in patch.items(): ss['patch_' + k] = v sourcestamps[self.codebase] = ss def oneCodebase(**kw): return [CodebaseParameter('', **kw)] class PatchParameter(NestedParameter): """A patch parameter contains pre-configure UI for all the needed components for a sourcestamp patch """ columns = 1 def __init__(self, **kwargs): name = kwargs.pop('name', 'patch') default_fields = [ FileParameter('body'), IntParameter('level', default=1), StringParameter('author', default=""), StringParameter('comment', default=""), StringParameter('subdir', default=".") ] fields = [ kwargs.pop(field.name, field) for field in default_fields ] super().__init__(name, fields=fields, **kwargs) class ForceScheduler(base.BaseScheduler): """ ForceScheduler implements the backend for a UI to allow customization of builds. For example, a web form be populated to trigger a build. """ compare_attrs = base.BaseScheduler.compare_attrs + \ ('builderNames', 'reason', 'username', 'forcedProperties') def __init__(self, name, builderNames, username=UserNameParameter(), reason=StringParameter( name="reason", default="force build", size=20), reasonString="A build was forced by '%(owner)s': %(reason)s", buttonName=None, codebases=None, label=None, properties=None): """ Initialize a ForceScheduler. The UI will provide a set of fields to the user; these fields are driven by a corresponding child class of BaseParameter. Use NestedParameter to provide logical groupings for parameters. The branch/revision/repository/project fields are deprecated and provided only for backwards compatibility. Using a Codebase(name='') will give the equivalent behavior. @param name: name of this scheduler (used as a key for state) @type name: unicode @param builderNames: list of builders this scheduler may start @type builderNames: list of unicode @param username: the "owner" for a build (may not be shown depending on the Auth configuration for the master) @type username: BaseParameter @param reason: the "reason" for a build @type reason: BaseParameter @param codebases: the codebases for a build @type codebases: list of string's or CodebaseParameter's; None will generate a default, but CodebaseParameter(codebase='', hide=True) will remove all codebases @param properties: extra properties to configure the build @type properties: list of BaseParameter's """ if not self.checkIfType(name, str): config.error("ForceScheduler name must be a unicode string: %r" % name) if not name: config.error("ForceScheduler name must not be empty: %r" % name) if not identifiers.ident_re.match(name): config.error("ForceScheduler name must be an identifier: %r" % name) if not self.checkIfListOfType(builderNames, (str,)): config.error("ForceScheduler '{}': builderNames must be a list of strings: {}".format( name, repr(builderNames))) if self.checkIfType(reason, BaseParameter): self.reason = reason else: config.error("ForceScheduler '{}': reason must be a StringParameter: {}".format( name, repr(reason))) if properties is None: properties = [] if not self.checkIfListOfType(properties, BaseParameter): config.error(("ForceScheduler '{}': properties must be " "a list of BaseParameters: {}").format(name, repr(properties))) if self.checkIfType(username, BaseParameter): self.username = username else: config.error("ForceScheduler '{}': username must be a StringParameter: {}".format(name, repr(username))) self.forcedProperties = [] self.label = name if label is None else label # Use the default single codebase form if none are provided if codebases is None: codebases = [CodebaseParameter(codebase='')] elif not codebases: config.error(("ForceScheduler '{}': 'codebases' cannot be empty;" " use [CodebaseParameter(codebase='', hide=True)] if needed: {} ").format( name, repr(codebases))) elif not isinstance(codebases, list): config.error(("ForceScheduler '{}': 'codebases' should be a list of strings " "or CodebaseParameter, not {}").format(name, type(codebases))) codebase_dict = {} for codebase in codebases: if isinstance(codebase, str): codebase = CodebaseParameter(codebase=codebase) elif not isinstance(codebase, CodebaseParameter): config.error(("ForceScheduler '{}': 'codebases' must be a list of strings " "or CodebaseParameter objects: {}").format(name, repr(codebases))) self.forcedProperties.append(codebase) codebase_dict[codebase.codebase] = dict( branch='', repository='', revision='') super().__init__(name=name, builderNames=builderNames, properties={}, codebases=codebase_dict) if properties: self.forcedProperties.extend(properties) # this is used to simplify the template self.all_fields = [NestedParameter(name='', fields=[username, reason])] self.all_fields.extend(self.forcedProperties) self.reasonString = reasonString self.buttonName = buttonName or name def checkIfType(self, obj, chkType): return isinstance(obj, chkType) def checkIfListOfType(self, obj, chkType): isListOfType = True if self.checkIfType(obj, list): for item in obj: if not self.checkIfType(item, chkType): isListOfType = False break else: isListOfType = False return isListOfType @defer.inlineCallbacks def gatherPropertiesAndChanges(self, collector, **kwargs): properties = {} changeids = [] sourcestamps = {} for param in self.forcedProperties: yield collector.collectValidationErrors(param.fullName, param.updateFromKwargs, master=self.master, properties=properties, changes=changeids, sourcestamps=sourcestamps, collector=collector, kwargs=kwargs) changeids = [type(a) == int and a or a.number for a in changeids] real_properties = Properties() for pname, pvalue in properties.items(): real_properties.setProperty(pname, pvalue, "Force Build Form") return (real_properties, changeids, sourcestamps) @defer.inlineCallbacks def computeBuilderNames(self, builderNames=None, builderid=None): if builderNames is None: if builderid is not None: builder = yield self.master.data.get(('builders', str(builderid))) builderNames = [builder['name']] else: builderNames = self.builderNames else: builderNames = sorted( set(builderNames).intersection(self.builderNames)) return builderNames @defer.inlineCallbacks def force(self, owner, builderNames=None, builderid=None, **kwargs): """ We check the parameters, and launch the build, if everything is correct """ builderNames = yield self.computeBuilderNames(builderNames, builderid) if not builderNames: raise KeyError("builderNames not specified or not supported") # Currently the validation code expects all kwargs to be lists # I don't want to refactor that now so much sure we comply... kwargs = dict((k, [v]) if not isinstance(v, list) else (k, v) for k, v in kwargs.items()) # probably need to clean that out later as the IProperty is already a # validation mechanism collector = ValidationErrorCollector() reason = yield collector.collectValidationErrors(self.reason.fullName, self.reason.getFromKwargs, kwargs) if owner is None or owner == "anonymous": owner = yield collector.collectValidationErrors(self.username.fullName, self.username.getFromKwargs, kwargs) properties, changeids, sourcestamps = yield self.gatherPropertiesAndChanges( collector, **kwargs) collector.maybeRaiseCollectedErrors() properties.setProperty("reason", reason, "Force Build Form") properties.setProperty("owner", owner, "Force Build Form") r = self.reasonString % {'owner': owner, 'reason': reason} # turn sourcestamps into a list for cb, ss in sourcestamps.items(): ss['codebase'] = cb sourcestamps = list(sourcestamps.values()) # everything is validated, we can create our source stamp, and # buildrequest res = yield self.addBuildsetForSourceStampsWithDefaults( reason=r, sourcestamps=sourcestamps, properties=properties, builderNames=builderNames, ) return res buildbot-3.4.0/master/buildbot/schedulers/manager.py000066400000000000000000000016601413250514000225440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.measured_service import MeasuredBuildbotServiceManager class SchedulerManager(MeasuredBuildbotServiceManager): name = "SchedulerManager" managed_services_name = "schedulers" config_attr = "schedulers" buildbot-3.4.0/master/buildbot/schedulers/timed.py000066400000000000000000000424751413250514000222450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from zope.interface import implementer from buildbot import config from buildbot import util from buildbot.changes.filter import ChangeFilter from buildbot.interfaces import ITriggerableScheduler from buildbot.process import buildstep from buildbot.process import properties from buildbot.schedulers import base from buildbot.util import croniter from buildbot.util.codebase import AbsoluteSourceStampsMixin class Timed(AbsoluteSourceStampsMixin, base.BaseScheduler): """ Parent class for timed schedulers. This takes care of the (surprisingly subtle) mechanics of ensuring that each timed actuation runs to completion before the service stops. """ compare_attrs = ('reason', 'createAbsoluteSourceStamps', 'onlyIfChanged', 'branch', 'fileIsImportant', 'change_filter', 'onlyImportant') reason = '' class NoBranch: pass def __init__(self, name, builderNames, reason='', createAbsoluteSourceStamps=False, onlyIfChanged=False, branch=NoBranch, change_filter=None, fileIsImportant=None, onlyImportant=False, **kwargs): super().__init__(name, builderNames, **kwargs) # tracking for when to start the next build self.lastActuated = None # A lock to make sure that each actuation occurs without interruption. # This lock governs actuateAt, actuateAtTimer, and actuateOk self.actuationLock = defer.DeferredLock() self.actuateOk = False self.actuateAt = None self.actuateAtTimer = None self.reason = util.bytes2unicode(reason % {'name': name}) self.branch = branch self.change_filter = ChangeFilter.fromSchedulerConstructorArgs( change_filter=change_filter) self.createAbsoluteSourceStamps = createAbsoluteSourceStamps self.onlyIfChanged = onlyIfChanged if fileIsImportant and not callable(fileIsImportant): config.error( "fileIsImportant must be a callable") self.fileIsImportant = fileIsImportant # If True, only important changes will be added to the buildset. self.onlyImportant = onlyImportant self._reactor = reactor # patched by tests @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return None # no need to lock this # nothing else can run before the service is started self.actuateOk = True # get the scheduler's last_build time (note: only done at startup) self.lastActuated = yield self.getState('last_build', None) # schedule the next build yield self.scheduleNextBuild() if self.onlyIfChanged or self.createAbsoluteSourceStamps: yield self.startConsumingChanges(fileIsImportant=self.fileIsImportant, change_filter=self.change_filter, onlyImportant=self.onlyImportant) else: yield self.master.db.schedulers.flushChangeClassifications(self.serviceid) return None @defer.inlineCallbacks def deactivate(self): yield super().deactivate() if not self.enabled: return None # shut down any pending actuation, and ensure that we wait for any # current actuation to complete by acquiring the lock. This ensures # that no build will be scheduled after deactivate is complete. def stop_actuating(): self.actuateOk = False self.actuateAt = None if self.actuateAtTimer: self.actuateAtTimer.cancel() self.actuateAtTimer = None yield self.actuationLock.run(stop_actuating) return None # Scheduler methods def gotChange(self, change, important): # both important and unimportant changes on our branch are recorded, as # we will include all such changes in any buildsets we start. Note # that we must check the branch here because it is not included in the # change filter. if self.branch is not Timed.NoBranch and change.branch != self.branch: return defer.succeed(None) # don't care about this change d = self.master.db.schedulers.classifyChanges( self.serviceid, {change.number: important}) if self.createAbsoluteSourceStamps: d.addCallback(lambda _: self.recordChange(change)) return d @defer.inlineCallbacks def startBuild(self): if not self.enabled: log.msg(format='ignoring build from %(name)s because scheduler ' 'has been disabled by the user', name=self.name) return # use the collected changes to start a build scheds = self.master.db.schedulers classifications = yield scheds.getChangeClassifications(self.serviceid) # if onlyIfChanged is True, then we will skip this build if no # important changes have occurred since the last invocation if self.onlyIfChanged and not any(classifications.values()): log.msg(("{} scheduler <{}>: skipping build " + "- No important changes").format(self.__class__.__name__, self.name)) return changeids = sorted(classifications.keys()) if changeids: max_changeid = changeids[-1] # (changeids are sorted) yield self.addBuildsetForChanges(reason=self.reason, changeids=changeids) yield scheds.flushChangeClassifications(self.serviceid, less_than=max_changeid + 1) else: # There are no changes, but onlyIfChanged is False, so start # a build of the latest revision, whatever that is sourcestamps = [dict(codebase=cb) for cb in self.codebases] yield self.addBuildsetForSourceStampsWithDefaults( reason=self.reason, sourcestamps=sourcestamps) def getCodebaseDict(self, codebase): if self.createAbsoluteSourceStamps: return super().getCodebaseDict(codebase) return self.codebases[codebase] # Timed methods def getNextBuildTime(self, lastActuation): """ Called by to calculate the next time to actuate a BuildSet. Override in subclasses. To trigger a fresh call to this method, use L{rescheduleNextBuild}. @param lastActuation: the time of the last actuation, or None for never @returns: a Deferred firing with the next time a build should occur (in the future), or None for never. """ raise NotImplementedError def scheduleNextBuild(self): """ Schedule the next build, re-invoking L{getNextBuildTime}. This can be called at any time, and it will avoid contention with builds being started concurrently. @returns: Deferred """ return self.actuationLock.run(self._scheduleNextBuild_locked) # utilities def now(self): "Similar to util.now, but patchable by tests" return util.now(self._reactor) @defer.inlineCallbacks def _scheduleNextBuild_locked(self): # clear out the existing timer if self.actuateAtTimer: self.actuateAtTimer.cancel() self.actuateAtTimer = None # calculate the new time actuateAt = yield self.getNextBuildTime(self.lastActuated) if actuateAt is None: self.actuateAt = None else: # set up the new timer now = self.now() self.actuateAt = max(actuateAt, now) untilNext = self.actuateAt - now if untilNext == 0: log.msg(("{} scheduler <{}>: missed scheduled build time" " - building immediately").format(self.__class__.__name__, self.name)) self.actuateAtTimer = self._reactor.callLater(untilNext, self._actuate) @defer.inlineCallbacks def _actuate(self): # called from the timer when it's time to start a build self.actuateAtTimer = None self.lastActuated = self.actuateAt @defer.inlineCallbacks def set_state_and_start(): # bail out if we shouldn't be actuating anymore if not self.actuateOk: return # mark the last build time self.actuateAt = None yield self.setState('last_build', self.lastActuated) try: # start the build yield self.startBuild() except Exception as e: log.err(e, 'while actuating') finally: # schedule the next build (noting the lock is already held) yield self._scheduleNextBuild_locked() yield self.actuationLock.run(set_state_and_start) class Periodic(Timed): compare_attrs = ('periodicBuildTimer',) def __init__(self, name, builderNames, periodicBuildTimer, reason="The Periodic scheduler named '%(name)s' triggered this build", **kwargs): super().__init__(name, builderNames, reason=reason, **kwargs) if periodicBuildTimer <= 0: config.error("periodicBuildTimer must be positive") self.periodicBuildTimer = periodicBuildTimer def getNextBuildTime(self, lastActuated): if lastActuated is None: return defer.succeed(self.now()) # meaning "ASAP" return defer.succeed(lastActuated + self.periodicBuildTimer) class NightlyBase(Timed): compare_attrs = ('minute', 'hour', 'dayOfMonth', 'month', 'dayOfWeek') def __init__(self, name, builderNames, minute=0, hour='*', dayOfMonth='*', month='*', dayOfWeek='*', **kwargs): super().__init__(name, builderNames, **kwargs) self.minute = minute self.hour = hour self.dayOfMonth = dayOfMonth self.month = month self.dayOfWeek = dayOfWeek def _timeToCron(self, time, isDayOfWeek=False): if isinstance(time, int): if isDayOfWeek: # Convert from Mon = 0 format to Sun = 0 format for use in # croniter time = (time + 1) % 7 return time if isinstance(time, str): if isDayOfWeek: # time could be a comma separated list of values, e.g. "5,sun" time_array = str(time).split(',') for i, time_val in enumerate(time_array): try: # try to convert value in place # Conversion for croniter (see above) time_array[i] = (int(time_val) + 1) % 7 except ValueError: # all non-int values are kept pass # Convert the list to a string return ','.join([str(s) for s in time_array]) return time if isDayOfWeek: # Conversion for croniter (see above) time = [(t + 1) % 7 for t in time] return ','.join([str(s) for s in time]) # Convert the list to a string def getNextBuildTime(self, lastActuated): dateTime = lastActuated or self.now() sched = '{} {} {} {} {}'.format(self._timeToCron(self.minute), self._timeToCron(self.hour), self._timeToCron(self.dayOfMonth), self._timeToCron(self.month), self._timeToCron(self.dayOfWeek, True)) cron = croniter.croniter(sched, dateTime) nextdate = cron.get_next(float) return defer.succeed(nextdate) class Nightly(NightlyBase): def __init__(self, name, builderNames, minute=0, hour='*', dayOfMonth='*', month='*', dayOfWeek='*', reason="The Nightly scheduler named '%(name)s' triggered this build", **kwargs): super().__init__(name=name, builderNames=builderNames, minute=minute, hour=hour, dayOfMonth=dayOfMonth, month=month, dayOfWeek=dayOfWeek, reason=reason, **kwargs) @implementer(ITriggerableScheduler) class NightlyTriggerable(NightlyBase): def __init__(self, name, builderNames, minute=0, hour='*', dayOfMonth='*', month='*', dayOfWeek='*', reason="The NightlyTriggerable scheduler named '%(name)s' triggered this build", **kwargs): super().__init__(name=name, builderNames=builderNames, minute=minute, hour=hour, dayOfMonth=dayOfMonth, month=month, dayOfWeek=dayOfWeek, reason=reason, **kwargs) self._lastTrigger = None @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return lastTrigger = yield self.getState('lastTrigger', None) self._lastTrigger = None if lastTrigger: try: if isinstance(lastTrigger[0], list): self._lastTrigger = (lastTrigger[0], properties.Properties.fromDict( lastTrigger[1]), lastTrigger[2], lastTrigger[3]) # handle state from before Buildbot-0.9.0 elif isinstance(lastTrigger[0], dict): self._lastTrigger = (list(lastTrigger[0].values()), properties.Properties.fromDict( lastTrigger[1]), None, None) except Exception: pass # If the lastTrigger isn't of the right format, ignore it if not self._lastTrigger: log.msg( format="NightlyTriggerable Scheduler <%(scheduler)s>: " "could not load previous state; starting fresh", scheduler=self.name) def trigger(self, waited_for, sourcestamps=None, set_props=None, parent_buildid=None, parent_relationship=None): """Trigger this scheduler with the given sourcestamp ID. Returns a deferred that will fire when the buildset is finished.""" assert isinstance(sourcestamps, list), \ "trigger requires a list of sourcestamps" self._lastTrigger = (sourcestamps, set_props, parent_buildid, parent_relationship) if set_props: propsDict = set_props.asDict() else: propsDict = {} # record the trigger in the db d = self.setState('lastTrigger', (sourcestamps, propsDict, parent_buildid, parent_relationship)) # Trigger expects a callback with the success of the triggered # build, if waitForFinish is True. # Just return SUCCESS, to indicate that the trigger was successful, # don't wait for the nightly to run. return (defer.succeed((None, {})), d.addCallback(lambda _: buildstep.SUCCESS)) @defer.inlineCallbacks def startBuild(self): if not self.enabled: log.msg(format='ignoring build from %(name)s because scheduler ' 'has been disabled by the user', name=self.name) return if self._lastTrigger is None: return (sourcestamps, set_props, parent_buildid, parent_relationship) = self._lastTrigger self._lastTrigger = None yield self.setState('lastTrigger', None) # properties for this buildset are composed of our own properties, # potentially overridden by anything from the triggering build props = properties.Properties() props.updateFromProperties(self.properties) if set_props: props.updateFromProperties(set_props) yield self.addBuildsetForSourceStampsWithDefaults( reason=self.reason, sourcestamps=sourcestamps, properties=props, parent_buildid=parent_buildid, parent_relationship=parent_relationship) buildbot-3.4.0/master/buildbot/schedulers/triggerable.py000066400000000000000000000113641413250514000234230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from zope.interface import implementer from buildbot.interfaces import ITriggerableScheduler from buildbot.process.properties import Properties from buildbot.schedulers import base from buildbot.util import debounce @implementer(ITriggerableScheduler) class Triggerable(base.BaseScheduler): compare_attrs = base.BaseScheduler.compare_attrs + ('reason',) def __init__(self, name, builderNames, reason=None, **kwargs): super().__init__(name, builderNames, **kwargs) self._waiters = {} self._buildset_complete_consumer = None self.reason = reason def trigger(self, waited_for, sourcestamps=None, set_props=None, parent_buildid=None, parent_relationship=None): """Trigger this scheduler with the optional given list of sourcestamps Returns two deferreds: idsDeferred -- yields the ids of the buildset and buildrequest, as soon as they are available. resultsDeferred -- yields the build result(s), when they finish.""" # properties for this buildset are composed of our own properties, # potentially overridden by anything from the triggering build props = Properties() props.updateFromProperties(self.properties) reason = self.reason if set_props: props.updateFromProperties(set_props) reason = set_props.getProperty('reason') if reason is None: reason = "The Triggerable scheduler named '{}' triggered this build".format(self.name) # note that this does not use the buildset subscriptions mechanism, as # the duration of interest to the caller is bounded by the lifetime of # this process. idsDeferred = self.addBuildsetForSourceStampsWithDefaults( reason, sourcestamps, waited_for, properties=props, parent_buildid=parent_buildid, parent_relationship=parent_relationship) resultsDeferred = defer.Deferred() @idsDeferred.addCallback def setup_waiter(ids): bsid, brids = ids self._waiters[bsid] = (resultsDeferred, brids) self._updateWaiters() return ids return idsDeferred, resultsDeferred @defer.inlineCallbacks def startService(self): yield super().startService() self._updateWaiters.start() @defer.inlineCallbacks def stopService(self): # finish any _updateWaiters calls yield self._updateWaiters.stop() # cancel any outstanding subscription if self._buildset_complete_consumer: self._buildset_complete_consumer.stopConsuming() self._buildset_complete_consumer = None # and errback any outstanding deferreds if self._waiters: msg = 'Triggerable scheduler stopped before build was complete' for d, brids in self._waiters.values(): d.errback(failure.Failure(RuntimeError(msg))) self._waiters = {} yield super().stopService() @debounce.method(wait=0) @defer.inlineCallbacks def _updateWaiters(self): if self._waiters and not self._buildset_complete_consumer: startConsuming = self.master.mq.startConsuming self._buildset_complete_consumer = yield startConsuming( self._buildset_complete_cb, ('buildsets', None, 'complete')) elif not self._waiters and self._buildset_complete_consumer: self._buildset_complete_consumer.stopConsuming() self._buildset_complete_consumer = None def _buildset_complete_cb(self, key, msg): if msg['bsid'] not in self._waiters: return # pop this bsid from the waiters list, d, brids = self._waiters.pop(msg['bsid']) # ..and potentially stop consuming buildset completion notifications self._updateWaiters() # fire the callback to indicate that the triggered build is complete d.callback((msg['results'], brids)) buildbot-3.4.0/master/buildbot/schedulers/trysched.py000066400000000000000000000433471413250514000227670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import json import os from twisted.internet import defer from twisted.protocols import basic from twisted.python import log from twisted.spread import pb from buildbot import pbutil from buildbot.process.properties import Properties from buildbot.schedulers import base from buildbot.util import bytes2unicode from buildbot.util import netstrings from buildbot.util import unicode2bytes from buildbot.util.maildir import MaildirService class TryBase(base.BaseScheduler): def filterBuilderList(self, builderNames): """ Make sure that C{builderNames} is a subset of the configured C{self.builderNames}, returning an empty list if not. If C{builderNames} is empty, use C{self.builderNames}. @returns: list of builder names to build on """ # self.builderNames is the configured list of builders # available for try. If the user supplies a list of builders, # it must be restricted to the configured list. If not, build # on all of the configured builders. if builderNames: for b in builderNames: if b not in self.builderNames: log.msg("{} got with builder {}".format(self, b)) log.msg(" but that wasn't in our list: {}".format(self.builderNames)) return [] else: builderNames = self.builderNames return builderNames class BadJobfile(Exception): pass class JobdirService(MaildirService): # NOTE: tightly coupled with Try_Jobdir, below. We used to track it as a "parent" # via the MultiService API, but now we just track it as the member # "self.scheduler" name = 'JobdirService' def __init__(self, scheduler, basedir=None): self.scheduler = scheduler super().__init__(basedir) def messageReceived(self, filename): with self.moveToCurDir(filename) as f: rv = self.scheduler.handleJobFile(filename, f) return rv class Try_Jobdir(TryBase): compare_attrs = ('jobdir',) def __init__(self, name, builderNames, jobdir, **kwargs): super().__init__(name, builderNames, **kwargs) self.jobdir = jobdir self.watcher = JobdirService(scheduler=self) # TryBase used to be a MultiService and managed the JobdirService via a parent/child # relationship. We stub out the addService/removeService and just keep track of # JobdirService as self.watcher. We'll refactor these things later and remove # the need for this. def addService(self, child): pass def removeService(self, child): pass # activation handlers @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return # set the watcher's basedir now that we have a master jobdir = os.path.join(self.master.basedir, self.jobdir) self.watcher.setBasedir(jobdir) for subdir in "cur new tmp".split(): if not os.path.exists(os.path.join(jobdir, subdir)): os.mkdir(os.path.join(jobdir, subdir)) # bridge the activate/deactivate to a startService/stopService on the # child service self.watcher.startService() @defer.inlineCallbacks def deactivate(self): yield super().deactivate() if not self.enabled: return # bridge the activate/deactivate to a startService/stopService on the # child service self.watcher.stopService() def parseJob(self, f): # jobfiles are serialized build requests. Each is a list of # serialized netstrings, in the following order: # format version number: # "1" the original # "2" introduces project and repository # "3" introduces who # "4" introduces comment # "5" introduces properties and JSON serialization of values after # version # "6" sends patch_body as base64-encoded string in the patch_body_base64 attribute # jobid: arbitrary string, used to find the buildSet later # branch: branch name, "" for default-branch # baserev: revision, "" for HEAD # patch_level: usually "1" # patch_body: patch to be applied for build (as string) # patch_body_base64: patch to be applied for build (as base64-encoded bytes) # repository # project # who: user requesting build # comment: comment from user about diff and/or build # builderNames: list of builder names # properties: dict of build properties p = netstrings.NetstringParser() f.seek(0, 2) if f.tell() > basic.NetstringReceiver.MAX_LENGTH: raise BadJobfile("The patch size is greater that NetStringReceiver.MAX_LENGTH. " "Please Set this higher in the master.cfg") f.seek(0, 0) try: p.feed(f.read()) except basic.NetstringParseError as e: raise BadJobfile("unable to parse netstrings") from e if not p.strings: raise BadJobfile("could not find any complete netstrings") ver = bytes2unicode(p.strings.pop(0)) v1_keys = ['jobid', 'branch', 'baserev', 'patch_level', 'patch_body'] v2_keys = v1_keys + ['repository', 'project'] v3_keys = v2_keys + ['who'] v4_keys = v3_keys + ['comment'] keys = [v1_keys, v2_keys, v3_keys, v4_keys] # v5 introduces properties and uses JSON serialization parsed_job = {} def extract_netstrings(p, keys): for i, key in enumerate(keys): if key == 'patch_body': parsed_job[key] = p.strings[i] else: parsed_job[key] = bytes2unicode(p.strings[i]) def postprocess_parsed_job(): # apply defaults and handle type casting parsed_job['branch'] = parsed_job['branch'] or None parsed_job['baserev'] = parsed_job['baserev'] or None parsed_job['patch_level'] = int(parsed_job['patch_level']) for key in 'repository project who comment'.split(): parsed_job[key] = parsed_job.get(key, '') parsed_job['properties'] = parsed_job.get('properties', {}) if ver <= "4": i = int(ver) - 1 extract_netstrings(p, keys[i]) parsed_job['builderNames'] = [bytes2unicode(s) for s in p.strings[len(keys[i]):]] postprocess_parsed_job() elif ver == "5": try: data = bytes2unicode(p.strings[0]) parsed_job = json.loads(data) parsed_job['patch_body'] = unicode2bytes(parsed_job['patch_body']) except ValueError as e: raise BadJobfile("unable to parse JSON") from e postprocess_parsed_job() elif ver == "6": try: data = bytes2unicode(p.strings[0]) parsed_job = json.loads(data) parsed_job['patch_body'] = base64.b64decode(parsed_job['patch_body_base64']) del parsed_job['patch_body_base64'] except ValueError as e: raise BadJobfile("unable to parse JSON") from e postprocess_parsed_job() else: raise BadJobfile("unknown version '{}'".format(ver)) return parsed_job def handleJobFile(self, filename, f): try: parsed_job = self.parseJob(f) builderNames = parsed_job['builderNames'] except BadJobfile: log.msg("{} reports a bad jobfile in {}".format(self, filename)) log.err() return defer.succeed(None) # Validate/fixup the builder names. builderNames = self.filterBuilderList(builderNames) if not builderNames: log.msg( "incoming Try job did not specify any allowed builder names") return defer.succeed(None) who = "" if parsed_job['who']: who = parsed_job['who'] comment = "" if parsed_job['comment']: comment = parsed_job['comment'] sourcestamp = dict(branch=parsed_job['branch'], codebase='', revision=parsed_job['baserev'], patch_body=parsed_job['patch_body'], patch_level=parsed_job['patch_level'], patch_author=who, patch_comment=comment, # TODO: can't set this remotely - #1769 patch_subdir='', project=parsed_job['project'], repository=parsed_job['repository']) reason = "'try' job" if parsed_job['who']: reason += " by user {}".format(bytes2unicode(parsed_job['who'])) properties = parsed_job['properties'] requested_props = Properties() requested_props.update(properties, "try build") return self.addBuildsetForSourceStamps( sourcestamps=[sourcestamp], reason=reason, external_idstring=bytes2unicode(parsed_job['jobid']), builderNames=builderNames, properties=requested_props) class RemoteBuildSetStatus(pb.Referenceable): def __init__(self, master, bsid, brids): self.master = master self.bsid = bsid self.brids = brids @defer.inlineCallbacks def remote_getBuildRequests(self): brids = dict() for builderid, brid in self.brids.items(): builderDict = yield self.master.data.get(('builders', builderid)) brids[builderDict['name']] = brid return [(n, RemoteBuildRequest(self.master, n, brid)) for n, brid in brids.items()] class RemoteBuildRequest(pb.Referenceable): def __init__(self, master, builderName, brid): self.master = master self.builderName = builderName self.brid = brid self.consumer = None @defer.inlineCallbacks def remote_subscribe(self, subscriber): brdict = yield self.master.data.get(('buildrequests', self.brid)) if not brdict: return builderId = brdict['builderid'] # make sure we aren't double-reporting any builds reportedBuilds = set([]) # subscribe to any new builds.. def gotBuild(key, msg): if msg['buildrequestid'] != self.brid or key[-1] != 'new': return None if msg['buildid'] in reportedBuilds: return None reportedBuilds.add(msg['buildid']) return subscriber.callRemote('newbuild', RemoteBuild( self.master, msg, self.builderName), self.builderName) self.consumer = yield self.master.mq.startConsuming( gotBuild, ('builders', str(builderId), 'builds', None, None)) subscriber.notifyOnDisconnect(lambda _: self.remote_unsubscribe(subscriber)) # and get any existing builds builds = yield self.master.data.get(('buildrequests', self.brid, 'builds')) for build in builds: if build['buildid'] in reportedBuilds: continue reportedBuilds.add(build['buildid']) yield subscriber.callRemote('newbuild', RemoteBuild( self.master, build, self.builderName), self.builderName) def remote_unsubscribe(self, subscriber): if self.consumer: self.consumer.stopConsuming() self.consumer = None class RemoteBuild(pb.Referenceable): def __init__(self, master, builddict, builderName): self.master = master self.builddict = builddict self.builderName = builderName self.consumer = None @defer.inlineCallbacks def remote_subscribe(self, subscriber, interval): # subscribe to any new steps.. def stepChanged(key, msg): if key[-1] == 'started': return subscriber.callRemote('stepStarted', self.builderName, self, msg['name'], None) elif key[-1] == 'finished': return subscriber.callRemote('stepFinished', self.builderName, self, msg['name'], None, msg['results']) return None self.consumer = yield self.master.mq.startConsuming( stepChanged, ('builds', str(self.builddict['buildid']), 'steps', None, None)) subscriber.notifyOnDisconnect(lambda _: self.remote_unsubscribe(subscriber)) def remote_unsubscribe(self, subscriber): if self.consumer: self.consumer.stopConsuming() self.consumer = None @defer.inlineCallbacks def remote_waitUntilFinished(self): d = defer.Deferred() def buildEvent(key, msg): if key[-1] == 'finished': d.callback(None) consumer = yield self.master.mq.startConsuming( buildEvent, ('builds', str(self.builddict['buildid']), None)) yield d # wait for event consumer.stopConsuming() return self # callers expect result=self @defer.inlineCallbacks def remote_getResults(self): buildid = self.builddict['buildid'] builddict = yield self.master.data.get(('builds', buildid)) return builddict['results'] @defer.inlineCallbacks def remote_getText(self): buildid = self.builddict['buildid'] builddict = yield self.master.data.get(('builds', buildid)) return [builddict['state_string']] class Try_Userpass_Perspective(pbutil.NewCredPerspective): def __init__(self, scheduler, username): self.scheduler = scheduler self.username = username @defer.inlineCallbacks def perspective_try(self, branch, revision, patch, repository, project, builderNames, who="", comment="", properties=None): log.msg("user {} requesting build on builders {}".format(self.username, builderNames)) if properties is None: properties = {} # build the intersection of the request and our configured list builderNames = self.scheduler.filterBuilderList(builderNames) if not builderNames: return None branch = bytes2unicode(branch) revision = bytes2unicode(revision) patch_level = patch[0] patch_body = unicode2bytes(patch[1]) repository = bytes2unicode(repository) project = bytes2unicode(project) who = bytes2unicode(who) comment = bytes2unicode(comment) reason = "'try' job" if who: reason += " by user {}".format(bytes2unicode(who)) if comment: reason += " ({})".format(bytes2unicode(comment)) sourcestamp = dict( branch=branch, revision=revision, repository=repository, project=project, patch_level=patch_level, patch_body=patch_body, patch_subdir='', patch_author=who or '', patch_comment=comment or '', codebase='', ) # note: no way to specify patch subdir - #1769 requested_props = Properties() requested_props.update(properties, "try build") (bsid, brids) = yield self.scheduler.addBuildsetForSourceStamps( sourcestamps=[sourcestamp], reason=reason, properties=requested_props, builderNames=builderNames) # return a remotely-usable BuildSetStatus object bss = RemoteBuildSetStatus(self.scheduler.master, bsid, brids) return bss def perspective_getAvailableBuilderNames(self): # Return a list of builder names that are configured # for the try service # This is mostly intended for integrating try services # into other applications return self.scheduler.listBuilderNames() class Try_Userpass(TryBase): compare_attrs = ('name', 'builderNames', 'port', 'userpass', 'properties') def __init__(self, name, builderNames, port, userpass, **kwargs): super().__init__(name, builderNames, **kwargs) self.port = port self.userpass = userpass self.registrations = [] @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return # register each user/passwd with the pbmanager def factory(mind, username): return Try_Userpass_Perspective(self, username) for user, passwd in self.userpass: reg = yield self.master.pbmanager.register(self.port, user, passwd, factory) self.registrations.append(reg) @defer.inlineCallbacks def deactivate(self): yield super().deactivate() if not self.enabled: return yield defer.gatherResults( [reg.unregister() for reg in self.registrations]) buildbot-3.4.0/master/buildbot/scripts/000077500000000000000000000000001413250514000201035ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/scripts/__init__.py000066400000000000000000000000001413250514000222020ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/scripts/base.py000066400000000000000000000247301413250514000213750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import errno import os import stat import sys import traceback from contextlib import contextmanager from twisted.python import runtime from twisted.python import usage from buildbot import config as config_module @contextmanager def captureErrors(errors, msg): try: yield except errors as e: print(msg) print(e) return 1 return None class BusyError(RuntimeError): pass def checkPidFile(pidfile): """ mostly comes from _twistd_unix.py which is not twisted public API :-/ except it returns an exception instead of exiting """ if os.path.exists(pidfile): try: with open(pidfile) as f: pid = int(f.read()) except ValueError as e: raise ValueError('Pidfile {} contains non-numeric value'.format(pidfile)) from e try: os.kill(pid, 0) except OSError as why: if why.errno == errno.ESRCH: # The pid doesn't exist. print('Removing stale pidfile {}'.format(pidfile)) os.remove(pidfile) else: raise OSError("Can't check status of PID {} from pidfile {}: {}".format( pid, pidfile, why)) from why else: raise BusyError("'{}' exists - is this master still running?".format(pidfile)) def checkBasedir(config): if not config['quiet']: print("checking basedir") if not isBuildmasterDir(config['basedir']): return False if runtime.platformType != 'win32': # no pids on win32 if not config['quiet']: print("checking for running master") pidfile = os.path.join(config['basedir'], 'twistd.pid') try: checkPidFile(pidfile) except Exception as e: print(str(e)) return False tac = getConfigFromTac(config['basedir']) if tac: if isinstance(tac.get('rotateLength', 0), str): print("ERROR: rotateLength is a string, it should be a number") print("ERROR: Please, edit your buildbot.tac file and run again") print( "ERROR: See http://trac.buildbot.net/ticket/2588 for more details") return False if isinstance(tac.get('maxRotatedFiles', 0), str): print("ERROR: maxRotatedFiles is a string, it should be a number") print("ERROR: Please, edit your buildbot.tac file and run again") print( "ERROR: See http://trac.buildbot.net/ticket/2588 for more details") return False return True def loadConfig(config, configFileName='master.cfg'): if not config['quiet']: print("checking {}".format(configFileName)) try: master_cfg = config_module.FileLoader( config['basedir'], configFileName).loadConfig() except config_module.ConfigErrors as e: print("Errors loading configuration:") for msg in e.errors: print(" " + msg) return None except Exception: print("Errors loading configuration:") traceback.print_exc(file=sys.stdout) return None return master_cfg def isBuildmasterDir(dir): def print_error(error_message): print("{}\ninvalid buildmaster directory '{}'".format(error_message, dir)) buildbot_tac = os.path.join(dir, "buildbot.tac") try: with open(buildbot_tac) as f: contents = f.read() except IOError as exception: print_error("error reading '{}': {}".format(buildbot_tac, exception.strerror)) return False if "Application('buildmaster')" not in contents: print_error("unexpected content in '{}'".format(buildbot_tac)) return False return True def getConfigFromTac(basedir, quiet=False): tacFile = os.path.join(basedir, 'buildbot.tac') if os.path.exists(tacFile): # don't mess with the global namespace, but set __file__ for # relocatable buildmasters tacGlobals = {'__file__': tacFile} try: with open(tacFile) as f: exec(f.read(), tacGlobals) except Exception: if not quiet: traceback.print_exc() raise return tacGlobals return None def getConfigFileFromTac(basedir, quiet=False): # execute the .tac file to see if its configfile location exists config = getConfigFromTac(basedir, quiet=quiet) if config: return config.get("configfile", "master.cfg") return "master.cfg" class SubcommandOptions(usage.Options): # subclasses should set this to a list-of-lists in order to source the # .buildbot/options file. Note that this *only* works with optParameters, # not optFlags. Example: # buildbotOptions = [ [ 'optfile-name', 'parameter-name' ], .. ] buildbotOptions = None # set this to options that must have non-None values requiredOptions = [] def __init__(self, *args): # for options in self.buildbotOptions, optParameters, and the options # file, change the default in optParameters to the value in the options # file, call through to the constructor, and then change it back. # Options uses reflect.accumulateClassList, so this *must* be a class # attribute; however, we do not want to permanently change the class. # So we patch it temporarily and restore it after. cls = self.__class__ if hasattr(cls, 'optParameters'): old_optParameters = cls.optParameters cls.optParameters = op = copy.deepcopy(cls.optParameters) if self.buildbotOptions: optfile = self.optionsFile = self.loadOptionsFile() # pylint: disable=not-an-iterable for optfile_name, option_name in self.buildbotOptions: for i, val in enumerate(op): if (op[i][0] == option_name and optfile_name in optfile): op[i] = list(op[i]) op[i][2] = optfile[optfile_name] super().__init__(*args) if hasattr(cls, 'optParameters'): cls.optParameters = old_optParameters def loadOptionsFile(self, _here=None): """Find the .buildbot/options file. Crawl from the current directory up towards the root, and also look in ~/.buildbot . The first directory that's owned by the user and has the file we're looking for wins. Windows skips the owned-by-user test. @rtype: dict @return: a dictionary of names defined in the options file. If no options file was found, return an empty dict. """ here = _here or os.path.abspath(os.getcwd()) if runtime.platformType == 'win32': # never trust env-vars, use the proper API from win32com.shell import shellcon, shell appdata = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, 0, 0) home = os.path.join(appdata, "buildbot") else: home = os.path.expanduser("~/.buildbot") searchpath = [] toomany = 20 while True: searchpath.append(os.path.join(here, ".buildbot")) next = os.path.dirname(here) if next == here: break # we've hit the root here = next toomany -= 1 # just in case if toomany == 0: print("I seem to have wandered up into the infinite glories " "of the heavens. Oops.") break searchpath.append(home) localDict = {} for d in searchpath: if os.path.isdir(d): if runtime.platformType != 'win32': if os.stat(d)[stat.ST_UID] != os.getuid(): print("skipping {} because you don't own it".format(d)) continue # security, skip other people's directories optfile = os.path.join(d, "options") if os.path.exists(optfile): try: with open(optfile, "r") as f: options = f.read() exec(options, localDict) except Exception: print("error while reading {}".format(optfile)) raise break for k in list(localDict.keys()): # pylint: disable=consider-iterating-dictionary if k.startswith("__"): del localDict[k] return localDict def postOptions(self): missing = [k for k in self.requiredOptions if self[k] is None] if missing: if len(missing) > 1: msg = 'Required arguments missing: ' + ', '.join(missing) else: msg = 'Required argument missing: ' + missing[0] raise usage.UsageError(msg) class BasedirMixin: """SubcommandOptions Mixin to handle subcommands that take a basedir argument""" # on tab completion, suggest directories as first argument if hasattr(usage, 'Completions'): # only set completion suggestion if running with # twisted version (>=11.1.0) that supports it compData = usage.Completions( extraActions=[usage.CompleteDirs(descr="buildbot base directory")]) def parseArgs(self, *args): if args: self['basedir'] = args[0] else: # Use the current directory if no basedir was specified. self['basedir'] = os.getcwd() if len(args) > 1: raise usage.UsageError("I wasn't expecting so many arguments") def postOptions(self): # get an unambiguous, expanded basedir, including expanding '~', which # may be useful in a .buildbot/config file self['basedir'] = os.path.abspath(os.path.expanduser(self['basedir'])) buildbot-3.4.0/master/buildbot/scripts/buildbot_tac.tmpl000066400000000000000000000024351413250514000234400ustar00rootroot00000000000000import os from twisted.application import service from buildbot.master import BuildMaster {% if relocatable -%} basedir = '.' {% else -%} basedir = {{ basedir|repr }} {%- endif %} {% if not no_logrotate -%} rotateLength = {{ '%d' | format(log_size) }} maxRotatedFiles = {{ ('%d' | format(log_count)) if log_count != None else 'None' }} {%- endif %} configfile = {{ config|repr }} # Default umask for server umask = None # if this is a relocatable tac file, get the directory containing the TAC if basedir == '.': basedir = os.path.abspath(os.path.dirname(__file__)) # note: this line is matched against to check that this is a buildmaster # directory; do not edit it. application = service.Application('buildmaster') {% if not no_logrotate -%} from twisted.python.logfile import LogFile from twisted.python.log import ILogObserver, FileLogObserver logfile = LogFile.fromFullPath(os.path.join(basedir, "twistd.log"), rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) application.setComponent(ILogObserver, FileLogObserver(logfile).emit) {%- endif %} m = BuildMaster(basedir, configfile, umask) m.setServiceParent(application) {% if not no_logrotate -%} m.log_rotation.rotateLength = rotateLength m.log_rotation.maxRotatedFiles = maxRotatedFiles {%- endif %} buildbot-3.4.0/master/buildbot/scripts/checkconfig.py000066400000000000000000000035341413250514000227250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from buildbot import config from buildbot.scripts.base import getConfigFileFromTac from buildbot.util import in_reactor def _loadConfig(basedir, configFile, quiet): try: config.FileLoader(basedir, configFile).loadConfig() except config.ConfigErrors as e: if not quiet: print("Configuration Errors:", file=sys.stderr) for e in e.errors: print(" " + e, file=sys.stderr) return 1 if not quiet: print("Config file is good!") return 0 @in_reactor def checkconfig(config): quiet = config.get('quiet') configFile = config.get('configFile', os.getcwd()) if os.path.isdir(configFile): basedir = configFile try: configFile = getConfigFileFromTac(basedir, quiet=quiet) except Exception: if not quiet: # the exception is already printed in base.py print("Unable to load 'buildbot.tac' from '{}':".format(basedir)) return 1 else: basedir = os.getcwd() return _loadConfig(basedir=basedir, configFile=configFile, quiet=quiet) __all__ = ['checkconfig'] buildbot-3.4.0/master/buildbot/scripts/cleanupdb.py000066400000000000000000000071631413250514000224210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from twisted.internet import defer from buildbot import config as config_module from buildbot import monkeypatches from buildbot.master import BuildMaster from buildbot.scripts import base from buildbot.util import in_reactor @defer.inlineCallbacks def doCleanupDatabase(config, master_cfg): if not config['quiet']: print("cleaning database ({})".format(master_cfg.db['db_url'])) master = BuildMaster(config['basedir']) master.config = master_cfg db = master.db yield db.setup(check_version=False, verbose=not config['quiet']) res = yield db.logs.getLogs() i = 0 percent = 0 saved = 0 for log in res: saved += yield db.logs.compressLog(log['id'], force=config['force']) i += 1 if not config['quiet'] and percent != i * 100 / len(res): percent = i * 100 / len(res) print(" {0}% {1} saved".format(percent, saved)) saved = 0 sys.stdout.flush() if master_cfg.db['db_url'].startswith("sqlite"): if not config['quiet']: print("executing sqlite vacuum function...") # sqlite vacuum function rebuild the whole database to claim # free disk space back def thd(engine): # In Python 3.6 and higher, sqlite3 no longer commits an # open transaction before DDL statements. # It is necessary to set the isolation_level to none # for auto-commit mode before doing a VACUUM. # See: https://bugs.python.org/issue28518 # Get the underlying sqlite connection from SQLAlchemy. sqlite_conn = engine.connection.connection # Set isolation_level to 'auto-commit mode' sqlite_conn.isolation_level = None sqlite_conn.execute("vacuum;").close() yield db.pool.do(thd) @in_reactor def cleanupDatabase(config, _noMonkey=False): # pragma: no cover # we separate the actual implementation to protect unit tests # from @in_reactor which stops the reactor if not _noMonkey: monkeypatches.patch_all() return _cleanupDatabase(config, _noMonkey=False) @defer.inlineCallbacks def _cleanupDatabase(config, _noMonkey=False): if not base.checkBasedir(config): return 1 config['basedir'] = os.path.abspath(config['basedir']) os.chdir(config['basedir']) with base.captureErrors((SyntaxError, ImportError), "Unable to load 'buildbot.tac' from '{}':".format(config['basedir'])): configFile = base.getConfigFileFromTac(config['basedir']) with base.captureErrors(config_module.ConfigErrors, "Unable to load '{}' from '{}':".format(configFile, config['basedir'])): master_cfg = base.loadConfig(config, configFile) if not master_cfg: return 1 yield doCleanupDatabase(config, master_cfg) if not config['quiet']: print("cleanup complete") return 0 buildbot-3.4.0/master/buildbot/scripts/create_master.py000066400000000000000000000071041413250514000232750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import jinja2 from twisted.internet import defer from twisted.python import util from buildbot import config as config_module from buildbot import monkeypatches from buildbot.master import BuildMaster from buildbot.util import in_reactor def makeBasedir(config): if os.path.exists(config['basedir']): if not config['quiet']: print("updating existing installation") return if not config['quiet']: print("mkdir", config['basedir']) os.mkdir(config['basedir']) def makeTAC(config): # render buildbot_tac.tmpl using the config loader = jinja2.FileSystemLoader(os.path.dirname(__file__)) env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined) env.filters['repr'] = repr tpl = env.get_template('buildbot_tac.tmpl') cxt = dict((k.replace('-', '_'), v) for k, v in config.items()) contents = tpl.render(cxt) tacfile = os.path.join(config['basedir'], "buildbot.tac") if os.path.exists(tacfile): with open(tacfile, "rt") as f: oldcontents = f.read() if oldcontents == contents: if not config['quiet']: print("buildbot.tac already exists and is correct") return if not config['quiet']: print("not touching existing buildbot.tac") print("creating buildbot.tac.new instead") tacfile += ".new" with open(tacfile, "wt") as f: f.write(contents) def makeSampleConfig(config): source = util.sibpath(__file__, "sample.cfg") target = os.path.join(config['basedir'], "master.cfg.sample") if not config['quiet']: print("creating {}".format(target)) with open(source, "rt") as f: config_sample = f.read() if config['db']: config_sample = config_sample.replace('sqlite:///state.sqlite', config['db']) with open(target, "wt") as f: f.write(config_sample) os.chmod(target, 0o600) @defer.inlineCallbacks def createDB(config, _noMonkey=False): # apply the db monkeypatches (and others - no harm) if not _noMonkey: # pragma: no cover monkeypatches.patch_all() # create a master with the default configuration, but with db_url # overridden master_cfg = config_module.MasterConfig() master_cfg.db['db_url'] = config['db'] master = BuildMaster(config['basedir']) master.config = master_cfg db = master.db yield db.setup(check_version=False, verbose=not config['quiet']) if not config['quiet']: print("creating database ({})".format(master_cfg.db['db_url'])) yield db.model.upgrade() @in_reactor @defer.inlineCallbacks def createMaster(config): makeBasedir(config) makeTAC(config) makeSampleConfig(config) yield createDB(config) if not config['quiet']: print("buildmaster configured in {}".format(config['basedir'])) return 0 buildbot-3.4.0/master/buildbot/scripts/dataspec.py000066400000000000000000000027231413250514000222450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import sys from twisted.internet import defer from buildbot.data import connector from buildbot.test.fake import fakemaster from buildbot.util import in_reactor @in_reactor @defer.inlineCallbacks def dataspec(config): master = yield fakemaster.make_master(None, wantRealReactor=True) data = connector.DataConnector() yield data.setServiceParent(master) if config['out'] != '--': dirs = os.path.dirname(config['out']) if dirs and not os.path.exists(dirs): os.makedirs(dirs) f = open(config['out'], "w") else: f = sys.stdout if config['global'] is not None: f.write("window." + config['global'] + '=') f.write(json.dumps(data.allEndpoints(), indent=2)) f.close() return 0 buildbot-3.4.0/master/buildbot/scripts/devproxy.py000066400000000000000000000173501413250514000223430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import asyncio import json import logging import aiohttp # dev-proxy command requires aiohttp! run 'pip install aiohttp' import aiohttp.web import jinja2 from buildbot.plugins.db import get_plugins log = logging.getLogger(__name__) class DevProxy: MAX_CONNECTIONS = 10 def __init__(self, port, next_url, plugins, unsafe_ssl, auth_cookie): while next_url.endswith('/'): next_url = next_url[:-1] self.next_url = next_url self.app = app = aiohttp.web.Application() self.apps = get_plugins('www', None, load_now=True) self.unsafe_ssl = unsafe_ssl cookies = {} if auth_cookie: if "TWISTED_SESSION" in auth_cookie: # user pasted the whole document.cookie part! cookies = dict(c.split("=") for c in auth_cookie.split(";")) auth_cookie = cookies["TWISTED_SESSION"] cookies = {'TWISTED_SESSION': auth_cookie} logging.basicConfig(level=logging.DEBUG) if plugins is None: plugins = {} else: plugins = json.loads(plugins) self.plugins = plugins app.router.add_route('*', '/ws', self.ws_handler) for path in ['/api', '/auth', '/sse', '/avatar']: app.router.add_route('*', path + '{path:.*}', self.proxy_handler) app.router.add_route('*', '/', self.index_handler) for plugin in self.apps.names: if plugin != 'base': staticdir = self.apps.get(plugin).static_dir app.router.add_static('/' + plugin, staticdir) staticdir = self.staticdir = self.apps.get('base').static_dir loader = jinja2.FileSystemLoader(staticdir) self.jinja = jinja2.Environment( loader=loader, undefined=jinja2.StrictUndefined) app.router.add_static('/', staticdir) conn = aiohttp.TCPConnector( limit=self.MAX_CONNECTIONS, verify_ssl=(not self.unsafe_ssl)) self.session = aiohttp.ClientSession(connector=conn, trust_env=True, cookies=cookies) self.config = None self.buildbotURL = "http://localhost:{}/".format(port) app.on_startup.append(self.on_startup) app.on_cleanup.append(self.on_cleanup) aiohttp.web.run_app(app, host="localhost", port=port) async def on_startup(self, app): try: await self.fetch_config_from_upstream() except aiohttp.ClientConnectionError as e: raise RuntimeError("Unable to connect to buildbot master" + str(e)) from e async def on_cleanup(self, app): await self.session.close() async def ws_handler(self, req): # based on https://github.com/oetiker/aio-reverse-proxy/blob/master/paraview-proxy.py ws_server = aiohttp.web.WebSocketResponse() await ws_server.prepare(req) async with self.session.ws_connect( self.next_url + "/ws", headers=req.headers ) as ws_client: async def ws_forward(ws_from, ws_to): async for msg in ws_from: if ws_to.closed: await ws_to.close(code=ws_to.close_code, message=msg.extra) return if msg.type == aiohttp.WSMsgType.TEXT: await ws_to.send_str(msg.data) elif msg.type == aiohttp.WSMsgType.BINARY: await ws_to.send_bytes(msg.data) elif msg.type == aiohttp.WSMsgType.PING: await ws_to.ping() elif msg.type == aiohttp.WSMsgType.PONG: await ws_to.pong() else: raise ValueError('unexpected message type: {}'.format(msg)) # keep forwarding websocket data in both directions await asyncio.wait( [ ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server) ], return_when=asyncio.FIRST_COMPLETED) return ws_server async def proxy_handler(self, req): method = getattr(self.session, req.method.lower()) upstream_url = self.next_url + req.path headers = req.headers.copy() query = req.query try: # note that req.content is a StreamReader, so the data is streamed # and not fully loaded in memory (unlike with python-requests) async with method(upstream_url, headers=headers, params=query, allow_redirects=False, data=req.content) as request: response = aiohttp.web.StreamResponse( status=request.status, headers=request.headers) writer = await response.prepare(req) while True: chunk = await request.content.readany() if not chunk: break # using writer.write instead of response.write saves a few checks await writer.write(chunk) return response except aiohttp.ClientConnectionError as e: return self.connection_error(e) def connection_error(self, error): return aiohttp.web.Response(text='Unable to connect to upstream server {} ({!s})'.format( self.next_url, error), status=502) async def fetch_config_from_upstream(self): async with self.session.get(self.next_url) as request: index = await request.content.read() if request.status != 200: raise RuntimeError("Unable to fetch buildbot config: " + index.decode()) # hack to parse the configjson from upstream buildbot config start_delimiter = b'angular.module("buildbot_config", []).constant("config", ' start_index = index.index(start_delimiter) last_index = index.index(b')') self.config = json.loads( index[start_index + len(start_delimiter):last_index].decode()) # keep the original config, but remove the plugins that we don't know for plugin in list(self.config['plugins'].keys()): if plugin not in self.apps: del self.config['plugins'][plugin] log.warn("warning: Missing plugin compared to original buildbot: %s", plugin) # add the plugins configs passed in cmdline for k, v in self.plugins.items(): self.config['plugins'][k] = v self.config['buildbotURL'] = self.buildbotURL self.config['buildbotURLs'] = [self.buildbotURL, self.next_url + "/"] async def index_handler(self, req): tpl = self.jinja.get_template('index.html') index = tpl.render(configjson=json.dumps(self.config), custom_templates={}, config=self.config) return aiohttp.web.Response(body=index, content_type='text/html') def devproxy(config): DevProxy(config['port'], config['buildbot_url'], config['plugins'], config['unsafe_ssl'], config['auth_cookie']) buildbot-3.4.0/master/buildbot/scripts/gengraphql.py000066400000000000000000000033011413250514000226020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from twisted.internet import defer from buildbot.data import connector from buildbot.data.graphql import GraphQLConnector from buildbot.test.fake import fakemaster from buildbot.util import in_reactor @in_reactor @defer.inlineCallbacks def gengraphql(config): master = yield fakemaster.make_master(None, wantRealReactor=True) data = connector.DataConnector() yield data.setServiceParent(master) graphql = GraphQLConnector() yield graphql.setServiceParent(master) graphql.data = data master.config.www = dict( graphql={"debug": True}) graphql.reconfigServiceWithBuildbotConfig(master.config) yield master.startService() if config['out'] != '--': dirs = os.path.dirname(config['out']) if dirs and not os.path.exists(dirs): os.makedirs(dirs) f = open(config['out'], "w") else: f = sys.stdout schema = graphql.get_schema() f.write(schema) f.close() yield master.stopService() return 0 buildbot-3.4.0/master/buildbot/scripts/logwatcher.py000066400000000000000000000146521413250514000226240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import platform from twisted.internet import defer from twisted.internet import error from twisted.internet import protocol from twisted.internet import reactor from twisted.python.failure import Failure from buildbot.util import unicode2bytes class FakeTransport: disconnecting = False class BuildmasterTimeoutError(Exception): pass class BuildmasterStartupError(Exception): pass class ReconfigError(Exception): pass class TailProcess(protocol.ProcessProtocol): def outReceived(self, data): self.lw.dataReceived(data) def errReceived(self, data): self.lw.print_output("ERR: '{}'".format(data)) class LineOnlyLongLineReceiver(protocol.Protocol): """ This is almost the same as Twisted's LineOnlyReceiver except that long lines are handled appropriately. """ _buffer = b'' delimiter = b'\r\n' MAX_LENGTH = 16384 def dataReceived(self, data): lines = (self._buffer + data).split(self.delimiter) self._buffer = lines.pop(-1) for line in lines: if self.transport.disconnecting: # this is necessary because the transport may be told to lose # the connection by a line within a larger packet, and it is # important to disregard all the lines in that packet following # the one that told it to close. return if len(line) > self.MAX_LENGTH: self.lineLengthExceeded(line) else: self.lineReceived(line) def lineReceived(self, line): raise NotImplementedError def lineLengthExceeded(self, line): raise NotImplementedError class LogWatcher(LineOnlyLongLineReceiver): POLL_INTERVAL = 0.1 TIMEOUT_DELAY = 10.0 delimiter = unicode2bytes(os.linesep) def __init__(self, logfile, timeout=None, _reactor=reactor): self.logfile = logfile self.in_reconfig = False self.transport = FakeTransport() self.pp = TailProcess() self.pp.lw = self self.timer = None self._reactor = _reactor self._timeout_delay = timeout or self.TIMEOUT_DELAY def start(self): # If the log file doesn't exist, create it now. self.create_logfile(self.logfile) # return a Deferred that fires when the reconfig process has # finished. It errbacks with TimeoutError if the startup has not # progressed for 10 seconds, and with ReconfigError if the error # line was seen. If the logfile could not be opened, it errbacks with # an IOError. if platform.system().lower() == 'sunos' and os.path.exists('/usr/xpg4/bin/tail'): tailBin = "/usr/xpg4/bin/tail" elif platform.system().lower() == 'haiku' and os.path.exists('/bin/tail'): tailBin = "/bin/tail" else: tailBin = "/usr/bin/tail" args = ("tail", "-f", "-n", "0", self.logfile) self.p = self._reactor.spawnProcess(self.pp, tailBin, args, env=os.environ) self.running = True d = defer.maybeDeferred(self._start) return d def _start(self): self.d = defer.Deferred() self.startTimer() return self.d def startTimer(self): self.timer = self._reactor.callLater(self._timeout_delay, self.timeout) def timeout(self): # was the timeout set to be ignored? if so, restart it if not self.timer: self.startTimer() return self.timer = None e = BuildmasterTimeoutError() self.finished(Failure(e)) def finished(self, results): try: self.p.signalProcess("KILL") except error.ProcessExitedAlready: pass if self.timer: self.timer.cancel() self.timer = None self.running = False self.in_reconfig = False self.d.callback(results) def create_logfile(self, path): # pragma: no cover if not os.path.exists(path): open(path, 'a').close() def print_output(self, output): # pragma: no cover print(output) def lineLengthExceeded(self, line): msg = 'Got an a very long line in the log (length {} bytes), ignoring'.format(len(line)) self.print_output(msg) def lineReceived(self, line): if not self.running: return None if b"Log opened." in line: self.in_reconfig = True if b"beginning configuration update" in line: self.in_reconfig = True if self.in_reconfig: self.print_output(line.decode()) # certain lines indicate progress, so we "cancel" the timeout # and it will get re-added when it fires PROGRESS_TEXT = [b'Starting BuildMaster', b'Loading configuration from', b'added builder', b'adding scheduler', b'Loading builder', b'Starting factory'] for progressText in PROGRESS_TEXT: if progressText in line: self.timer = None break if b"message from master: attached" in line: return self.finished("worker") if b"configuration update aborted" in line or \ b'configuration update partially applied' in line: return self.finished(Failure(ReconfigError())) if b"Server Shut Down" in line: return self.finished(Failure(ReconfigError())) if b"configuration update complete" in line: return self.finished("buildmaster") if b"BuildMaster is running" in line: return self.finished("buildmaster") if b"BuildMaster startup failed" in line: return self.finished(Failure(BuildmasterStartupError())) return None buildbot-3.4.0/master/buildbot/scripts/reconfig.py000066400000000000000000000064011413250514000222520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import platform import signal from twisted.internet import defer from twisted.internet import reactor from buildbot.scripts.logwatcher import BuildmasterTimeoutError from buildbot.scripts.logwatcher import LogWatcher from buildbot.scripts.logwatcher import ReconfigError from buildbot.util import in_reactor from buildbot.util import rewrap class Reconfigurator: @defer.inlineCallbacks def run(self, basedir, quiet, timeout=None): # Returns "Microsoft" for Vista and "Windows" for other versions if platform.system() in ("Windows", "Microsoft"): print("Reconfig (through SIGHUP) is not supported on Windows.") return None with open(os.path.join(basedir, "twistd.pid"), "rt") as f: self.pid = int(f.read().strip()) if quiet: os.kill(self.pid, signal.SIGHUP) return None # keep reading twistd.log. Display all messages between "loading # configuration from ..." and "configuration update complete" or # "I will keep using the previous config file instead.", or until # `timeout` seconds have elapsed. self.sent_signal = False reactor.callLater(0.2, self.sighup) lw = LogWatcher(os.path.join(basedir, "twistd.log"), timeout=timeout) try: yield lw.start() print("Reconfiguration appears to have completed successfully") return 0 except BuildmasterTimeoutError: print("Never saw reconfiguration finish.") except ReconfigError: print(rewrap("""\ Reconfiguration failed. Please inspect the master.cfg file for errors, correct them, then try 'buildbot reconfig' again. """)) except IOError: # we were probably unable to open the file in the first place self.sighup() except Exception as e: print("Error while following twistd.log: {}".format(e)) return 1 def sighup(self): if self.sent_signal: return print("sending SIGHUP to process %d" % self.pid) self.sent_signal = True os.kill(self.pid, signal.SIGHUP) @in_reactor def reconfig(config): basedir = config['basedir'] quiet = config['quiet'] timeout = config.get('progress_timeout', None) if timeout is not None: try: timeout = float(timeout) except ValueError: print('Progress timeout must be a number') return 1 r = Reconfigurator() return r.run(basedir, quiet, timeout=timeout) buildbot-3.4.0/master/buildbot/scripts/restart.py000066400000000000000000000021321413250514000221370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.scripts import base from buildbot.scripts import start from buildbot.scripts import stop def restart(config): basedir = config['basedir'] quiet = config['quiet'] if not base.isBuildmasterDir(basedir): return 1 if stop.stop(config, wait=True) != 0: return 1 if not quiet: print("now restarting buildbot process..") return start.start(config) buildbot-3.4.0/master/buildbot/scripts/runner.py000066400000000000000000000724161413250514000220000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # N.B.: don't import anything that might pull in a reactor yet. Some of our # subcommands want to load modules that need the gtk reactor. # # Also don't forget to mirror your changes on command-line options in manual # pages and reStructuredText documentation. import getpass import sys import textwrap import sqlalchemy as sa from twisted.python import reflect from twisted.python import usage import buildbot from buildbot.scripts import base from buildbot.util import check_functional_environment # Note that the terms 'options' and 'config' are used interchangeably here - in # fact, they are interchanged several times. Caveat legator. def validateMasterOption(master): """ Validate master (-m, --master) command line option. Checks that option is a string of the 'hostname:port' form, otherwise raises an UsageError exception. @type master: string @param master: master option @raise usage.UsageError: on invalid master option """ try: hostname, port = master.split(":") port = int(port) except (TypeError, ValueError) as e: raise usage.UsageError("master must have the form 'hostname:port'") from e class UpgradeMasterOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.upgrade_master.upgradeMaster" optFlags = [ ["quiet", "q", "Do not emit the commands being run"], ["develop", "d", "link to buildbot dir rather than copy, with no " "JS optimization (UNIX only)"], ["replace", "r", "Replace any modified files without confirmation."], ] optParameters = [ ] def getSynopsis(self): return "Usage: buildbot upgrade-master [options] []" longdesc = textwrap.dedent(""" This command takes an existing buildmaster working directory and adds/modifies the files there to work with the current version of buildbot. When this command is finished, the buildmaster directory should look much like a brand-new one created by the 'create-master' command. Use this after you've upgraded your buildbot installation and before you restart the buildmaster to use the new version. If you have modified the files in your working directory, this command will leave them untouched, but will put the new recommended contents in a .new file (for example, if index.html has been modified, this command will create index.html.new). You can then look at the new version and decide how to merge its contents into your modified file. When upgrading the database, this command uses the database specified in the master configuration file. If you wish to use a database other than the default (sqlite), be sure to set that parameter before upgrading. """) class CreateMasterOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.create_master.createMaster" optFlags = [ ["quiet", "q", "Do not emit the commands being run"], ["force", "f", "Re-use an existing directory (will not overwrite master.cfg file)"], ["relocatable", "r", "Create a relocatable buildbot.tac"], ["develop", "d", "link to buildbot dir rather than copy, with no " "JS optimization (UNIX only)"], ["no-logrotate", "n", "Do not permit buildmaster rotate logs by itself"] ] optParameters = [ ["config", "c", "master.cfg", "name of the buildmaster config file"], ["log-size", "s", 10000000, "size at which to rotate twisted log files", int], ["log-count", "l", 10, "limit the number of kept old twisted log files"], ["db", None, "sqlite:///state.sqlite", "which DB to use for scheduler/status state. See below for syntax."], ] def getSynopsis(self): return "Usage: buildbot create-master [options] []" longdesc = textwrap.dedent(""" This command creates a buildmaster working directory and buildbot.tac file. The master will live in (defaults to the current directory) and create various files there. If --relocatable is given, then the resulting buildbot.tac file will be written such that its containing directory is assumed to be the basedir. This is generally a good idea. At runtime, the master will read a configuration file (named 'master.cfg' by default) in its basedir. This file should contain python code which eventually defines a dictionary named 'BuildmasterConfig'. The elements of this dictionary are used to configure the Buildmaster. See doc/config.xhtml for details about what can be controlled through this interface. The --db string is evaluated to build the DB object, which specifies which database the buildmaster should use to hold scheduler state and status information. The default (which creates an SQLite database in BASEDIR/state.sqlite) is equivalent to: --db='sqlite:///state.sqlite' To use a remote MySQL database instead, use something like: --db='mysql://bbuser:bbpasswd@dbhost/bbdb' The --db string is stored verbatim in the master.cfg.sample file, and evaluated at 'buildbot start' time to pass a DBConnector instance into the newly-created BuildMaster object. """) def postOptions(self): super().postOptions() # validate 'log-count' parameter if self['log-count'] == 'None': self['log-count'] = None else: try: self['log-count'] = int(self['log-count']) except ValueError as e: raise usage.UsageError( "log-count parameter needs to be an int or None") from e # validate 'db' parameter try: # check if sqlalchemy will be able to parse specified URL sa.engine.url.make_url(self['db']) except sa.exc.ArgumentError as e: raise usage.UsageError("could not parse database URL '{}'".format(self['db'])) from e class StopOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.stop.stop" optFlags = [ ["quiet", "q", "Do not emit the commands being run"], ["clean", "c", "Clean shutdown master"], ["no-wait", None, "Don't wait for complete master shutdown"], ] def getSynopsis(self): return "Usage: buildbot stop []" class RestartOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.restart.restart" optFlags = [ ['quiet', 'q', "Don't display startup log messages"], ['nodaemon', None, "Don't daemonize (stay in foreground)"], ["clean", "c", "Clean shutdown master"], ] optParameters = [ ['start_timeout', None, None, 'The amount of time the script waits for the master to restart until ' 'it declares the operation as failure'], ] def getSynopsis(self): return "Usage: buildbot restart []" class StartOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.start.start" optFlags = [ ['quiet', 'q', "Don't display startup log messages"], ['nodaemon', None, "Don't daemonize (stay in foreground)"], ] optParameters = [ ['start_timeout', None, None, 'The amount of time the script waits for the master to start until it ' 'declares the operation as failure'], ] def getSynopsis(self): return "Usage: buildbot start []" class ReconfigOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.reconfig.reconfig" optFlags = [ ['quiet', 'q', "Don't display log messages about reconfiguration"], ] optParameters = [ ['progress_timeout', None, None, 'The amount of time the script waits for messages in the logs that indicate progress.'], ] def getSynopsis(self): return "Usage: buildbot reconfig []" class SendChangeOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.sendchange.sendchange" def __init__(self): super().__init__() self['properties'] = {} optParameters = [ ("master", "m", None, "Location of the buildmaster's PBChangeSource (host:port)"), # deprecated in 0.8.3; remove in 0.8.5 (bug #1711) ("auth", "a", 'change:changepw', "Authentication token - username:password, or prompt for password"), ("who", "W", None, "Author of the commit"), ("repository", "R", '', "Repository specifier"), ("vc", "s", None, "The VC system in use, one of: cvs, svn, darcs, hg, " "bzr, git, mtn, p4"), ("project", "P", '', "Project specifier"), ("branch", "b", None, "Branch specifier"), ("category", "C", None, "Category of repository"), ("codebase", None, None, "Codebase this change is in (requires 0.8.7 master or later)"), ("revision", "r", None, "Revision specifier"), ("revision_file", None, None, "Filename containing revision spec"), ("property", "p", None, "A property for the change, in the format: name:value"), ("comments", "c", None, "log message"), ("logfile", "F", None, "Read the log messages from this file (- for stdin)"), ("when", "w", None, "timestamp to use as the change time"), ("revlink", "l", '', "Revision link (revlink)"), ("encoding", "e", 'utf8', "Encoding of other parameters"), ] buildbotOptions = [ ['master', 'master'], ['who', 'who'], ['branch', 'branch'], ['category', 'category'], ['vc', 'vc'], ] requiredOptions = ['who', 'master'] def getSynopsis(self): return "Usage: buildbot sendchange [options] filenames.." def parseArgs(self, *args): self['files'] = args def opt_property(self, property): name, value = property.split(':', 1) self['properties'][name] = value def postOptions(self): super().postOptions() if self.get("revision_file"): with open(self["revision_file"], "r") as f: self['revision'] = f.read() if self.get('when'): try: self['when'] = float(self['when']) except (TypeError, ValueError) as e: raise usage.UsageError('invalid "when" value {}'.format(self['when'])) from e else: self['when'] = None if not self.get('comments') and self.get('logfile'): if self['logfile'] == "-": self['comments'] = sys.stdin.read() else: with open(self['logfile'], "rt") as f: self['comments'] = f.read() if self.get('comments') is None: self['comments'] = "" # fix up the auth with a password if none was given auth = self.get('auth') if ':' not in auth: pw = getpass.getpass("Enter password for '{}': ".format(auth)) auth = "{}:{}".format(auth, pw) self['auth'] = tuple(auth.split(':', 1)) vcs = ['cvs', 'svn', 'darcs', 'hg', 'bzr', 'git', 'mtn', 'p4'] if self.get('vc') and self.get('vc') not in vcs: raise usage.UsageError("vc must be one of {}".format(', '.join(vcs))) validateMasterOption(self.get('master')) class TryOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.trycmd.trycmd" optParameters = [ ["connect", "c", None, "How to reach the buildmaster, either 'ssh' or 'pb'"], # for ssh, use --host, --username, --jobdir and optionally # --ssh ["host", None, None, "Hostname (used by ssh) for the buildmaster"], ["port", None, None, "Port (used by ssh) for the buildmaster"], ["jobdir", None, None, "Directory (on the buildmaster host) where try jobs are deposited"], ["ssh", None, None, "Command to use instead of the default \"ssh\""], ["username", "u", None, "Username performing the try build"], # for PB, use --master, --username, and --passwd ["master", "m", None, "Location of the buildmaster's Try server (host:port)"], ["passwd", None, None, "Password for PB authentication"], ["who", "w", None, "Who is responsible for the try build"], ["comment", "C", None, "A comment which can be used in notifications for this build"], # for ssh to accommodate running in a virtualenv on the buildmaster ["buildbotbin", None, "buildbot", "buildbot binary to use on the buildmaster host"], ["diff", None, None, "Filename of a patch to use instead of scanning a local tree. " "Use '-' for stdin."], ["patchlevel", "p", 0, "Number of slashes to remove from patch pathnames, " "like the -p option to 'patch'"], ["baserev", None, None, "Base revision to use instead of scanning a local tree."], ["vc", None, None, "The VC system in use, one of: bzr, cvs, darcs, git, hg, " "mtn, p4, svn"], ["branch", None, None, "The branch in use, for VC systems that can't figure it out " "themselves"], ["repository", None, None, "Repository to use, instead of path to working directory."], ["builder", "b", None, "Run the trial build on this Builder. Can be used multiple times."], ["properties", None, None, "A set of properties made available in the build environment, " "format is --properties=prop1=value1,prop2=value2,.. " "option can be specified multiple times."], ["property", None, None, "A property made available in the build environment, " "format:prop=value. Can be used multiple times."], ["topfile", None, None, "Name of a file at the top of the tree, used to find the top. " "Only needed for SVN and CVS."], ["topdir", None, None, "Path to the top of the working copy. Only needed for SVN and CVS."], ] optFlags = [ ["wait", None, "wait until the builds have finished"], ["dryrun", 'n', "Gather info, but don't actually submit."], ["get-builder-names", None, "Get the names of available builders. Doesn't submit anything. " "Only supported for 'pb' connections."], ["quiet", "q", "Don't print status of current builds while waiting."], ] # Mapping of .buildbot/options names to command-line options buildbotOptions = [ ['try_connect', 'connect'], # [ 'try_builders', 'builders' ], <-- handled in postOptions ['try_vc', 'vc'], ['try_branch', 'branch'], ['try_repository', 'repository'], ['try_topdir', 'topdir'], ['try_topfile', 'topfile'], ['try_host', 'host'], ['try_username', 'username'], ['try_jobdir', 'jobdir'], ['try_ssh', 'ssh'], ['try_buildbotbin', 'buildbotbin'], ['try_passwd', 'passwd'], ['try_master', 'master'], ['try_who', 'who'], ['try_comment', 'comment'], # [ 'try_wait', 'wait' ], <-- handled in postOptions # [ 'try_quiet', 'quiet' ], <-- handled in postOptions # Deprecated command mappings from the quirky old days: ['try_masterstatus', 'master'], ['try_dir', 'jobdir'], ['try_password', 'passwd'], ] def __init__(self): super().__init__() self['builders'] = [] self['properties'] = {} def opt_builder(self, option): self['builders'].append(option) def opt_properties(self, option): # We need to split the value of this option # into a dictionary of properties propertylist = option.split(",") for prop in propertylist: splitproperty = prop.split("=", 1) self['properties'][splitproperty[0]] = splitproperty[1] def opt_property(self, option): name, _, value = option.partition("=") self['properties'][name] = value def opt_patchlevel(self, option): self['patchlevel'] = int(option) def getSynopsis(self): return "Usage: buildbot try [options]" def postOptions(self): super().postOptions() opts = self.optionsFile if not self['builders']: self['builders'] = opts.get('try_builders', []) if opts.get('try_wait', False): self['wait'] = True if opts.get('try_quiet', False): self['quiet'] = True # get the global 'masterstatus' option if it's set and no master # was specified otherwise if not self['master']: self['master'] = opts.get('masterstatus', None) if self['connect'] == 'pb': if not self['master']: raise usage.UsageError("master location must be specified" "for 'pb' connections") validateMasterOption(self['master']) class TryServerOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.tryserver.tryserver" optParameters = [ ["jobdir", None, None, "the jobdir (maildir) for submitting jobs"], ] requiredOptions = ['jobdir'] def getSynopsis(self): return "Usage: buildbot tryserver [options]" def postOptions(self): if not self['jobdir']: raise usage.UsageError('jobdir is required') class CheckConfigOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.checkconfig.checkconfig" optFlags = [ ['quiet', 'q', "Don't display error messages or tracebacks"], ] # on tab completion, suggest files as first argument if hasattr(usage, 'Completions'): # only set completion suggestion if running with # twisted version (>=11.1.0) that supports it compData = usage.Completions(extraActions=[usage.CompleteFiles()]) def getSynopsis(self): return "Usage:\t\tbuildbot checkconfig [configFile]\n" + \ "\t\tIf not specified, the config file specified in " + \ "'buildbot.tac' from the current directory will be used" def parseArgs(self, *args): if len(args) >= 1: self['configFile'] = args[0] class UserOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.user.user" optParameters = [ ["master", "m", None, "Location of the buildmaster's user service (host:port)"], ["username", "u", None, "Username for PB authentication"], ["passwd", "p", None, "Password for PB authentication"], ["op", None, None, "User management operation: add, remove, update, get"], ["bb_username", None, None, "Username to set for a given user. Only available on 'update', " "and bb_password must be given as well."], ["bb_password", None, None, "Password to set for a given user. Only available on 'update', " "and bb_username must be given as well."], ["ids", None, None, "User's identifiers, used to find users in 'remove' and 'get' " "Can be specified multiple times (--ids=id1,id2,id3)"], ["info", None, None, "User information in the form: --info=type=value,type=value,.. " "Used in 'add' and 'update', can be specified multiple times. " "Note that 'update' requires --info=id:type=value..."] ] buildbotOptions = [ ['master', 'master'], ['user_master', 'master'], ['user_username', 'username'], ['user_passwd', 'passwd'], ] requiredOptions = ['master'] longdesc = textwrap.dedent(""" Currently implemented types for --info= are:\n git, svn, hg, cvs, darcs, bzr, email """) def __init__(self): super().__init__() self['ids'] = [] self['info'] = [] def opt_ids(self, option): id_list = option.split(",") self['ids'].extend(id_list) def opt_info(self, option): # splits info into type/value dictionary, appends to info info_list = option.split(",") info_elem = {} if len(info_list) == 1 and '=' not in info_list[0]: info_elem["identifier"] = info_list[0] self['info'].append(info_elem) else: for info_item in info_list: split_info = info_item.split("=", 1) # pull identifier from update --info if ":" in split_info[0]: split_id = split_info[0].split(":") info_elem["identifier"] = split_id[0] split_info[0] = split_id[1] info_elem[split_info[0]] = split_info[1] self['info'].append(info_elem) def getSynopsis(self): return "Usage: buildbot user [options]" def _checkValidTypes(self, info): from buildbot.process.users import users valid = set(['identifier', 'email'] + users.srcs) for user in info: for attr_type in user: if attr_type not in valid: raise usage.UsageError("Type not a valid attr_type, must be in: {}".format( ', '.join(valid))) def postOptions(self): super().postOptions() validateMasterOption(self.get('master')) op = self.get('op') if not op: raise usage.UsageError("you must specify an operation: add, " "remove, update, get") if op not in ['add', 'remove', 'update', 'get']: raise usage.UsageError("bad op %r, use 'add', 'remove', 'update', " "or 'get'" % op) if not self.get('username') or not self.get('passwd'): raise usage.UsageError("A username and password must be given") bb_username = self.get('bb_username') bb_password = self.get('bb_password') if bb_username or bb_password: if op != 'update': raise usage.UsageError("bb_username and bb_password only work " "with update") if not bb_username or not bb_password: raise usage.UsageError("Must specify both bb_username and " "bb_password or neither.") info = self.get('info') ids = self.get('ids') # check for erroneous args if not info and not ids: raise usage.UsageError("must specify either --ids or --info") if op in ('add', 'update'): if ids: raise usage.UsageError("cannot use --ids with 'add' or " "'update'") self._checkValidTypes(info) if op == 'update': for user in info: if 'identifier' not in user: raise usage.UsageError("no ids found in update info; " "use: --info=id:type=value,type=value,..") if op == 'add': for user in info: if 'identifier' in user: raise usage.UsageError("identifier found in add info, " "use: --info=type=value,type=value,..") if op in ('remove', 'get'): if info: raise usage.UsageError("cannot use --info with 'remove' " "or 'get'") class DataSpecOption(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.dataspec.dataspec" optParameters = [ ['out', 'o', "dataspec.json", "output to specified path"], ['global', 'g', None, "output a js script, that sets a global, for inclusion in testsuite"], ] def getSynopsis(self): return "Usage: buildbot dataspec [options]" class GenGraphQLOption(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.gengraphql.gengraphql" optParameters = [ ['out', 'o', "graphql.schema", "output to specified path"], ] def getSynopsis(self): return "Usage: buildbot graphql-schema [options]" class DevProxyOptions(base.BasedirMixin, base.SubcommandOptions): """Run a fake web server serving the local ui frontend and a distant rest and websocket api. This command required aiohttp to be installed in the virtualenv""" subcommandFunction = "buildbot.scripts.devproxy.devproxy" optFlags = [ ["unsafe_ssl", None, "Bypass ssl certificate validation"], ] optParameters = [ ["port", "p", 8011, "http port to use"], ["plugins", None, None, "plugin config to use. As json string e.g: " "--plugins='{\"custom_plugin\": {\"option1\": true}}'"], ["auth_cookie", None, None, "TWISTED_SESSION cookie to be used for auth " "(taken in developer console: in document.cookie variable)"], ["buildbot_url", "b", "https://buildbot.buildbot.net", "real buildbot url to proxy to (can be http or https)"] ] class CleanupDBOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.cleanupdb.cleanupDatabase" optFlags = [ ["quiet", "q", "Do not emit the commands being run"], ["force", "f", "Force log recompression (useful when changing compression algorithm)"], # when this command has several maintenance jobs, we should make # them optional here. For now there is only one. ] optParameters = [ ] def getSynopsis(self): return "Usage: buildbot cleanupdb [options] []" longdesc = textwrap.dedent(""" This command takes an existing buildmaster working directory and do some optimization on the database. This command is frontend for various database maintenance jobs: - optimiselogs: This optimization groups logs into bigger chunks to apply higher level of compression. This command uses the database specified in the master configuration file. If you wish to use a database other than the default (sqlite), be sure to set that parameter before upgrading. """) class Options(usage.Options): synopsis = "Usage: buildbot [command options]" subCommands = [ ['create-master', None, CreateMasterOptions, "Create and populate a directory for a new buildmaster"], ['upgrade-master', None, UpgradeMasterOptions, "Upgrade an existing buildmaster directory for the current version"], ['start', None, StartOptions, "Start a buildmaster"], ['stop', None, StopOptions, "Stop a buildmaster"], ['restart', None, RestartOptions, "Restart a buildmaster"], ['reconfig', None, ReconfigOptions, "SIGHUP a buildmaster to make it re-read the config file"], ['sighup', None, ReconfigOptions, "SIGHUP a buildmaster to make it re-read the config file"], ['sendchange', None, SendChangeOptions, "Send a change to the buildmaster"], ['try', None, TryOptions, "Run a build with your local changes"], ['tryserver', None, TryServerOptions, "buildmaster-side 'try' support function, not for users"], ['checkconfig', None, CheckConfigOptions, "test the validity of a master.cfg config file"], ['user', None, UserOptions, "Manage users in buildbot's database"], ['dataspec', None, DataSpecOption, "Output data api spec"], ['dev-proxy', None, DevProxyOptions, "Run a fake web server serving the local ui frontend and a distant rest and websocket api." ], ['graphql-schema', None, GenGraphQLOption, "Output graphql api schema"], ['cleanupdb', None, CleanupDBOptions, "cleanup the database" ] ] def opt_version(self): print("Buildbot version: {}".format(buildbot.version)) super().opt_version() def opt_verbose(self): from twisted.python import log log.startLogging(sys.stderr) def postOptions(self): if not hasattr(self, 'subOptions'): raise usage.UsageError("must specify a command") def run(): config = Options() check_functional_environment(buildbot.config) try: config.parseOptions(sys.argv[1:]) except usage.error as e: print("{}: {}".format(sys.argv[0], e)) print() c = getattr(config, 'subOptions', config) print(str(c)) sys.exit(1) subconfig = config.subOptions subcommandFunction = reflect.namedObject(subconfig.subcommandFunction) sys.exit(subcommandFunction(subconfig)) buildbot-3.4.0/master/buildbot/scripts/sample.cfg000066400000000000000000000077171413250514000220610ustar00rootroot00000000000000# -*- python -*- # ex: set filetype=python: from buildbot.plugins import * # This is a sample buildmaster config file. It must be installed as # 'master.cfg' in your buildmaster's base directory. # This is the dictionary that the buildmaster pays attention to. We also use # a shorter alias to save typing. c = BuildmasterConfig = {} ####### WORKERS # The 'workers' list defines the set of recognized workers. Each element is # a Worker object, specifying a unique worker name and password. The same # worker name and password must be configured on the worker. c['workers'] = [worker.Worker("example-worker", "pass")] # 'protocols' contains information about protocols which master will use for # communicating with workers. You must define at least 'port' option that workers # could connect to your master with this protocol. # 'port' must match the value configured into the workers (with their # --master option) c['protocols'] = {'pb': {'port': 9989}} ####### CHANGESOURCES # the 'change_source' setting tells the buildmaster how it should find out # about source code changes. Here we point to the buildbot version of a python hello-world project. c['change_source'] = [] c['change_source'].append(changes.GitPoller( 'git://github.com/buildbot/hello-world.git', workdir='gitpoller-workdir', branch='master', pollInterval=300)) ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. In this # case, just kick off a 'runtests' build c['schedulers'] = [] c['schedulers'].append(schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch='master'), treeStableTimer=None, builderNames=["runtests"])) c['schedulers'].append(schedulers.ForceScheduler( name="force", builderNames=["runtests"])) ####### BUILDERS # The 'builders' list defines the Builders, which tell Buildbot how to perform a build: # what steps, and which workers can execute them. Note that any particular build will # only take place on one worker. factory = util.BuildFactory() # check out the source factory.addStep(steps.Git(repourl='git://github.com/buildbot/hello-world.git', mode='incremental')) # run the tests (note that this will require that 'trial' is installed) factory.addStep(steps.ShellCommand(command=["trial", "hello"], env={"PYTHONPATH": "."})) c['builders'] = [] c['builders'].append( util.BuilderConfig(name="runtests", workernames=["example-worker"], factory=factory)) ####### BUILDBOT SERVICES # 'services' is a list of BuildbotService items like reporter targets. The # status of each build will be pushed to these targets. buildbot/reporters/*.py # has a variety to choose from, like IRC bots. c['services'] = [] ####### PROJECT IDENTITY # the 'title' string will appear at the top of this buildbot installation's # home pages (linked to the 'titleURL'). c['title'] = "Hello World CI" c['titleURL'] = "https://buildbot.github.io/hello-world/" # the 'buildbotURL' string should point to the location where the buildbot's # internal web server is visible. This typically uses the port number set in # the 'www' entry below, but with an externally-visible host name which the # buildbot cannot figure out without some help. c['buildbotURL'] = "http://localhost:8010/" # minimalistic config to activate new web UI c['www'] = dict(port=8010, plugins=dict(waterfall_view={}, console_view={}, grid_view={})) ####### DB URL c['db'] = { # This specifies what database buildbot uses to store its state. # It's easy to start with sqlite, but it's recommended to switch to a dedicated # database, such as PostgreSQL or MySQL, for use in production environments. # http://docs.buildbot.net/current/manual/configuration/global.html#database-specification 'db_url' : "sqlite:///state.sqlite", } buildbot-3.4.0/master/buildbot/scripts/sendchange.py000066400000000000000000000040121413250514000225510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sys import traceback from twisted.internet import defer from buildbot.clients import sendchange as sendchange_client from buildbot.util import in_reactor @in_reactor @defer.inlineCallbacks def sendchange(config): encoding = config.get('encoding', 'utf8') who = config.get('who') auth = config.get('auth') master = config.get('master') branch = config.get('branch') category = config.get('category') revision = config.get('revision') properties = config.get('properties', {}) repository = config.get('repository', '') vc = config.get('vc', None) project = config.get('project', '') revlink = config.get('revlink', '') when = config.get('when') comments = config.get('comments') files = config.get('files', ()) codebase = config.get('codebase', None) s = sendchange_client.Sender(master, auth, encoding=encoding) try: yield s.send(branch, revision, comments, files, who=who, category=category, when=when, properties=properties, repository=repository, vc=vc, project=project, revlink=revlink, codebase=codebase) except Exception: print("change not sent:") traceback.print_exc(file=sys.stdout) return 1 else: print("change sent successfully") return 0 buildbot-3.4.0/master/buildbot/scripts/start.py000066400000000000000000000121551413250514000216160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from twisted.internet import protocol from twisted.internet import reactor from twisted.python.runtime import platformType from buildbot.scripts import base from buildbot.scripts.logwatcher import BuildmasterStartupError from buildbot.scripts.logwatcher import BuildmasterTimeoutError from buildbot.scripts.logwatcher import LogWatcher from buildbot.scripts.logwatcher import ReconfigError from buildbot.util import rewrap class Follower: def follow(self, basedir, timeout=None): self.rc = 0 self._timeout = timeout if timeout else 10.0 print("Following twistd.log until startup finished..") lw = LogWatcher(os.path.join(basedir, "twistd.log"), timeout=self._timeout) d = lw.start() d.addCallbacks(self._success, self._failure) reactor.run() return self.rc def _success(self, _): print("The buildmaster appears to have (re)started correctly.") self.rc = 0 reactor.stop() def _failure(self, why): if why.check(BuildmasterTimeoutError): print(rewrap("""\ The buildmaster took more than {0} seconds to start, so we were unable to confirm that it started correctly. Please 'tail twistd.log' and look for a line that says 'BuildMaster is running' to verify correct startup. """.format(self._timeout))) elif why.check(ReconfigError): print(rewrap("""\ The buildmaster appears to have encountered an error in the master.cfg config file during startup. Please inspect and fix master.cfg, then restart the buildmaster. """)) elif why.check(BuildmasterStartupError): print(rewrap("""\ The buildmaster startup failed. Please see 'twistd.log' for possible reason. """)) else: print(rewrap("""\ Unable to confirm that the buildmaster started correctly. You may need to stop it, fix the config file, and restart. """)) print(why) self.rc = 1 reactor.stop() def launchNoDaemon(config): os.chdir(config['basedir']) sys.path.insert(0, os.path.abspath(config['basedir'])) argv = ["twistd", "--no_save", "--nodaemon", "--logfile=twistd.log", # windows doesn't use the same default "--python=buildbot.tac"] if platformType != 'win32': # windows doesn't use pidfile option. argv.extend(["--pidfile="]) sys.argv = argv # this is copied from bin/twistd. twisted-2.0.0 through 2.4.0 use # _twistw.run . Twisted-2.5.0 and later use twistd.run, even for # windows. from twisted.scripts import twistd twistd.run() def launch(config): os.chdir(config['basedir']) sys.path.insert(0, os.path.abspath(config['basedir'])) # see if we can launch the application without actually having to # spawn twistd, since spawning processes correctly is a real hassle # on windows. argv = [sys.executable, "-c", # this is copied from bin/twistd. twisted-2.0.0 through 2.4.0 use # _twistw.run . Twisted-2.5.0 and later use twistd.run, even for # windows. "from twisted.scripts import twistd; twistd.run()", "--no_save", "--logfile=twistd.log", # windows doesn't use the same default "--python=buildbot.tac"] # ProcessProtocol just ignores all output proc = reactor.spawnProcess( protocol.ProcessProtocol(), sys.executable, argv, env=os.environ) if platformType == "win32": with open("twistd.pid", "w") as pidfile: pidfile.write("{0}".format(proc.pid)) def start(config): if not base.isBuildmasterDir(config['basedir']): return 1 if config['nodaemon']: launchNoDaemon(config) return 0 launch(config) # We don't have tail on windows if platformType == "win32" or config['quiet']: return 0 # this is the parent timeout = config.get('start_timeout', None) if timeout is not None: try: timeout = float(timeout) except ValueError: print('Start timeout must be a number') return 1 rc = Follower().follow(config['basedir'], timeout=timeout) return rc buildbot-3.4.0/master/buildbot/scripts/stop.py000066400000000000000000000045031413250514000214440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import errno import os import signal import time from twisted.python.runtime import platformType from buildbot.scripts import base def stop(config, signame="TERM", wait=None): basedir = config['basedir'] quiet = config['quiet'] if wait is None: wait = not config['no-wait'] if config['clean']: signame = 'USR1' if not base.isBuildmasterDir(config['basedir']): return 1 pidfile = os.path.join(basedir, 'twistd.pid') try: with open(pidfile, "rt") as f: pid = int(f.read().strip()) except Exception: if not config['quiet']: print("buildmaster not running") return 0 signum = getattr(signal, "SIG" + signame) try: os.kill(pid, signum) except OSError as e: if e.errno != errno.ESRCH and platformType != "win32": raise if not config['quiet']: print("buildmaster not running") try: os.unlink(pidfile) except OSError: pass return 0 if not wait: if not quiet: print("sent SIG{} to process".format(signame)) return 0 time.sleep(0.1) # poll once per second until twistd.pid goes away, up to 10 seconds, # unless we're doing a clean stop, in which case wait forever count = 0 while count < 10 or config['clean']: try: os.kill(pid, 0) except OSError: if not quiet: print("buildbot process %d is dead" % pid) return 0 time.sleep(1) count += 1 if not quiet: print("never saw process go away") return 1 buildbot-3.4.0/master/buildbot/scripts/trycmd.py000066400000000000000000000014711413250514000217620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members def trycmd(config): from buildbot.clients import tryclient t = tryclient.Try(config) t.run() return 0 buildbot-3.4.0/master/buildbot/scripts/tryserver.py000066400000000000000000000027111413250514000225230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys import time from hashlib import md5 from buildbot.util import unicode2bytes def tryserver(config): jobdir = os.path.expanduser(config["jobdir"]) job = sys.stdin.read() # now do a 'safecat'-style write to jobdir/tmp, then move atomically to # jobdir/new . Rather than come up with a unique name randomly, I'm just # going to MD5 the contents and prepend a timestamp. timestring = "%d" % time.time() m = md5() job = unicode2bytes(job) m.update(job) jobhash = m.hexdigest() fn = "{}-{}".format(timestring, jobhash) tmpfile = os.path.join(jobdir, "tmp", fn) newfile = os.path.join(jobdir, "new", fn) with open(tmpfile, "wb") as f: f.write(job) os.rename(tmpfile, newfile) return 0 buildbot-3.4.0/master/buildbot/scripts/upgrade_master.py000066400000000000000000000120441413250514000234600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import signal import sys import traceback from twisted.internet import defer from twisted.python import util from buildbot import monkeypatches from buildbot.db import connector from buildbot.master import BuildMaster from buildbot.scripts import base from buildbot.util import in_reactor from buildbot.util import stripUrlPassword def installFile(config, target, source, overwrite=False): with open(source, "rt") as f: new_contents = f.read() if os.path.exists(target): with open(target, "rt") as f: old_contents = f.read() if old_contents != new_contents: if overwrite: if not config['quiet']: print("{} has old/modified contents".format(target)) print(" overwriting it with new contents") with open(target, "wt") as f: f.write(new_contents) else: if not config['quiet']: print("{} has old/modified contents".format(target)) print(" writing new contents to {}.new".format(target)) with open(target + ".new", "wt") as f: f.write(new_contents) # otherwise, it's up to date else: if not config['quiet']: print("creating {}".format(target)) with open(target, "wt") as f: f.write(new_contents) def upgradeFiles(config): if not config['quiet']: print("upgrading basedir") webdir = os.path.join(config['basedir'], "public_html") if os.path.exists(webdir): print("Notice: public_html is not used starting from Buildbot 0.9.0") print(" consider using third party HTTP server for serving " "static files") installFile(config, os.path.join(config['basedir'], "master.cfg.sample"), util.sibpath(__file__, "sample.cfg"), overwrite=True) @defer.inlineCallbacks def upgradeDatabase(config, master_cfg): if not config['quiet']: print("upgrading database ({})".format(stripUrlPassword(master_cfg.db['db_url']))) print("Warning: Stopping this process might cause data loss") def sighandler(signum, frame): msg = " ".join(""" WARNING: ignoring signal {}. This process should not be interrupted to avoid database corruption. If you really need to terminate it, use SIGKILL. """.split()) print(msg.format(signum)) prev_handlers = {} try: for signame in ("SIGTERM", "SIGINT", "SIGQUIT", "SIGHUP", "SIGUSR1", "SIGUSR2", "SIGBREAK"): if hasattr(signal, signame): signum = getattr(signal, signame) prev_handlers[signum] = signal.signal(signum, sighandler) master = BuildMaster(config['basedir']) master.config = master_cfg master.db.disownServiceParent() db = connector.DBConnector(basedir=config['basedir']) yield db.setServiceParent(master) yield db.setup(check_version=False, verbose=not config['quiet']) yield db.model.upgrade() yield db.masters.setAllMastersActiveLongTimeAgo() finally: # restore previous signal handlers for signum, handler in prev_handlers.items(): signal.signal(signum, handler) @in_reactor def upgradeMaster(config, _noMonkey=False): if not _noMonkey: # pragma: no cover monkeypatches.patch_all() if not base.checkBasedir(config): return defer.succeed(1) os.chdir(config['basedir']) try: configFile = base.getConfigFileFromTac(config['basedir']) except (SyntaxError, ImportError): print("Unable to load 'buildbot.tac' from '{}':".format(config['basedir']), file=sys.stderr) e = traceback.format_exc() print(e, file=sys.stderr) return defer.succeed(1) master_cfg = base.loadConfig(config, configFile) if not master_cfg: return defer.succeed(1) return _upgradeMaster(config, master_cfg) @defer.inlineCallbacks def _upgradeMaster(config, master_cfg): try: upgradeFiles(config) yield upgradeDatabase(config, master_cfg) except Exception: e = traceback.format_exc() print("problem while upgrading!:\n" + e, file=sys.stderr) return 1 else: if not config['quiet']: print("upgrade complete") return 0 buildbot-3.4.0/master/buildbot/scripts/user.py000066400000000000000000000032101413250514000214270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.clients import usersclient from buildbot.process.users import users from buildbot.util import in_reactor @in_reactor @defer.inlineCallbacks def user(config): master = config.get('master') op = config.get('op') username = config.get('username') passwd = config.get('passwd') master, port = master.split(":") port = int(port) bb_username = config.get('bb_username') bb_password = config.get('bb_password') if bb_username or bb_password: bb_password = users.encrypt(bb_password) info = config.get('info') ids = config.get('ids') # find identifier if op == add if info and op == 'add': for user in info: user['identifier'] = sorted(user.values())[0] uc = usersclient.UsersClient(master, username, passwd, port) output = yield uc.send(op, bb_username, bb_password, ids, info) if output: print(output) return 0 buildbot-3.4.0/master/buildbot/scripts/windows_service.py000077500000000000000000000537011413250514000237000ustar00rootroot00000000000000# pylint: disable=import-outside-toplevel # # Runs the build-bot as a Windows service. # To use: # * Install and configure buildbot as per normal (ie, running # 'setup.py install' from the source directory). # # * Configure any number of build-bot directories (workers or masters), as # per the buildbot instructions. Test these directories normally by # using the (possibly modified) "buildbot.bat" file and ensure everything # is working as expected. # # * Install the buildbot service. Execute the command: # % buildbot_windows_service # To see installation options. You probably want to specify: # + --username and --password options to specify the user to run the # + --startup auto to have the service start at boot time. # # For example: # % buildbot_windows_service --user mark --password secret \ # --startup auto install # Alternatively, you could execute: # % buildbot_windows_service install # to install the service with default options, then use Control Panel # to configure it. # # * Start the service specifying the name of all buildbot directories as # service args. This can be done one of 2 ways: # - Execute the command: # % buildbot_windows_service start "dir_name1" "dir_name2" # or: # - Start Control Panel->Administrative Tools->Services # - Locate the previously installed buildbot service. # - Open the "properties" for the service. # - Enter the directory names into the "Start Parameters" textbox. The # directory names must be fully qualified, and surrounded in quotes if # they include spaces. # - Press the "Start"button. # Note that the service will automatically use the previously specified # directories if no arguments are specified. This means the directories # need only be specified when the directories to use have changed (and # therefore also the first time buildbot is configured) # # * The service should now be running. You should check the Windows # event log. If all goes well, you should see some information messages # telling you the buildbot has successfully started. # # * If you change the buildbot configuration, you must restart the service. # There is currently no way to ask a running buildbot to reload the # config. You can restart by executing: # % buildbot_windows_service restart # # Troubleshooting: # * Check the Windows event log for any errors. # * Check the "twistd.log" file in your buildbot directories - once each # bot has been started it just writes to this log as normal. # * Try executing: # % python buildbot_service.py debug # This will execute the buildbot service in "debug" mode, and allow you to # see all messages etc generated. If the service works in debug mode but # not as a real service, the error probably relates to the environment or # permissions of the user configured to run the service (debug mode runs as # the currently logged in user, not the service user) # * Ensure you have the latest pywin32 build available, at least version 206. # Written by Mark Hammond, 2006. import os import sys import threading import pywintypes import servicemanager import win32api import win32con import win32event import win32file import win32pipe import win32process import win32security import win32service import win32serviceutil import winerror # Are we running in a py2exe environment? is_frozen = hasattr(sys, "frozen") # Taken from the Zope service support - each "child" is run as a sub-process # (trying to run multiple twisted apps in the same process is likely to screw # stdout redirection etc). # Note that unlike the Zope service, we do *not* attempt to detect a failed # client and perform restarts - buildbot itself does a good job # at reconnecting, and Windows itself provides restart semantics should # everything go pear-shaped. # We execute a new thread that captures the tail of the output from our child # process. If the child fails, it is written to the event log. # This process is unconditional, and the output is never written to disk # (except obviously via the event log entry) # Size of the blocks we read from the child process's output. CHILDCAPTURE_BLOCK_SIZE = 80 # The number of BLOCKSIZE blocks we keep as process output. CHILDCAPTURE_MAX_BLOCKS = 200 class BBService(win32serviceutil.ServiceFramework): _svc_name_ = 'BuildBot' _svc_display_name_ = _svc_name_ _svc_description_ = 'Manages local buildbot workers and masters - ' \ 'see http://buildbot.net' def __init__(self, args): super().__init__(args) # Create an event which we will use to wait on. The "service stop" # request will set this event. # * We must make it inheritable so we can pass it to the child # process via the cmd-line # * Must be manual reset so each child process and our service # all get woken from a single set of the event. sa = win32security.SECURITY_ATTRIBUTES() sa.bInheritHandle = True self.hWaitStop = win32event.CreateEvent(sa, True, False, None) self.args = args self.dirs = None self.runner_prefix = None # Patch up the service messages file in a frozen exe. # (We use the py2exe option that magically bundles the .pyd files # into the .zip file - so servicemanager.pyd doesn't exist.) if is_frozen and servicemanager.RunningAsService(): msg_file = os.path.join(os.path.dirname(sys.executable), "buildbot.msg") if os.path.isfile(msg_file): servicemanager.Initialize("BuildBot", msg_file) else: self.warning("Strange - '{}' does not exist".format(msg_file)) def _checkConfig(self): # Locate our child process runner (but only when run from source) if not is_frozen: # Running from source python_exe = os.path.join(sys.prefix, "python.exe") if not os.path.isfile(python_exe): # for ppl who build Python itself from source. python_exe = os.path.join(sys.prefix, "PCBuild", "python.exe") if not os.path.isfile(python_exe): # virtualenv support python_exe = os.path.join(sys.prefix, "Scripts", "python.exe") if not os.path.isfile(python_exe): self.error("Can not find python.exe to spawn subprocess") return False me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = me[:-1] self.runner_prefix = '"{}" "{}"'.format(python_exe, me) else: # Running from a py2exe built executable - our child process is # us (but with the funky cmdline args!) self.runner_prefix = '"' + sys.executable + '"' # Now our arg processing - this may be better handled by a # twisted/buildbot style config file - but as of time of writing, # MarkH is clueless about such things! # Note that the "arguments" you type into Control Panel for the # service do *not* persist - they apply only when you click "start" # on the service. When started by Windows, args are never presented. # Thus, it is the responsibility of the service to persist any args. # so, when args are presented, we save them as a "custom option". If # they are not presented, we load them from the option. self.dirs = [] if len(self.args) > 1: dir_string = os.pathsep.join(self.args[1:]) save_dirs = True else: dir_string = win32serviceutil.GetServiceCustomOption(self, "directories") save_dirs = False if not dir_string: self.error("You must specify the buildbot directories as " "parameters to the service.\nStopping the service.") return False dirs = dir_string.split(os.pathsep) for d in dirs: d = os.path.abspath(d) sentinal = os.path.join(d, "buildbot.tac") if os.path.isfile(sentinal): self.dirs.append(d) else: msg = "Directory '{}' is not a buildbot dir - ignoring".format(d) self.warning(msg) if not self.dirs: self.error("No valid buildbot directories were specified.\n" "Stopping the service.") return False if save_dirs: dir_string = os.pathsep.join(self.dirs) win32serviceutil.SetServiceCustomOption(self, "directories", dir_string) return True def SvcStop(self): # Tell the SCM we are starting the stop process. self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # Set the stop event - the main loop takes care of termination. win32event.SetEvent(self.hWaitStop) # SvcStop only gets triggered when the user explicitly stops (or restarts) # the service. To shut the service down cleanly when Windows is shutting # down, we also need to hook SvcShutdown. SvcShutdown = SvcStop def SvcDoRun(self): if not self._checkConfig(): # stopped status set by caller. return self.logmsg(servicemanager.PYS_SERVICE_STARTED) child_infos = [] for bbdir in self.dirs: self.info("Starting BuildBot in directory '{}'".format(bbdir)) # hWaitStop is a Handle but the command needs the int associated # to that Handle hstop = int(self.hWaitStop) cmd = '{} --spawn {} start --nodaemon {}'.format(self.runner_prefix, hstop, bbdir) h, t, output = self.createProcess(cmd) child_infos.append((bbdir, h, t, output)) while child_infos: handles = [self.hWaitStop] + [i[1] for i in child_infos] rc = win32event.WaitForMultipleObjects(handles, 0, # bWaitAll win32event.INFINITE) if rc == win32event.WAIT_OBJECT_0: # user sent a stop service request break # A child process died. For now, just log the output # and forget the process. index = rc - win32event.WAIT_OBJECT_0 - 1 bbdir, dead_handle, _, output_blocks = \ child_infos[index] status = win32process.GetExitCodeProcess(dead_handle) output = "".join(output_blocks) if not output: output = ("The child process generated no output. " "Please check the twistd.log file in the " "indicated directory.") self.warning(("BuildBot for directory {} terminated with " "exit code {}.\n{}").format(repr(bbdir), status, output)) del child_infos[index] if not child_infos: self.warning("All BuildBot child processes have " "terminated. Service stopping.") # Either no child processes left, or stop event set. self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # The child processes should have also seen our stop signal # so wait for them to terminate. for bbdir, h, t, output in child_infos: for i in range(10): # 30 seconds to shutdown... self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) rc = win32event.WaitForSingleObject(h, 3000) if rc == win32event.WAIT_OBJECT_0: break # Process terminated - no need to try harder. if rc == win32event.WAIT_OBJECT_0: break self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # If necessary, kill it if win32process.GetExitCodeProcess(h) == win32con.STILL_ACTIVE: self.warning("BuildBot process at %r failed to terminate - " "killing it" % (bbdir, )) win32api.TerminateProcess(h, 3) self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # Wait for the redirect thread - it should have died as the remote # process terminated. # As we are shutting down, we do the join with a little more care, # reporting progress as we wait (even though we never will ) for i in range(5): t.join(1) self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) if not t.is_alive(): break else: self.warning("Redirect thread did not stop!") # All done. self.logmsg(servicemanager.PYS_SERVICE_STOPPED) # # Error reporting/logging functions. # def logmsg(self, event): # log a service event using servicemanager.LogMsg try: servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, event, (self._svc_name_, " ({})".format(self._svc_display_name_))) except win32api.error as details: # Failed to write a log entry - most likely problem is # that the event log is full. We don't want this to kill us try: print("FAILED to write INFO event", event, ":", details) except IOError: # No valid stdout! Ignore it. pass def _dolog(self, func, msg): try: func(msg) except win32api.error as details: # Failed to write a log entry - most likely problem is # that the event log is full. We don't want this to kill us try: print("FAILED to write event log entry:", details) print(msg) except IOError: pass def info(self, s): self._dolog(servicemanager.LogInfoMsg, s) def warning(self, s): self._dolog(servicemanager.LogWarningMsg, s) def error(self, s): self._dolog(servicemanager.LogErrorMsg, s) # Functions that spawn a child process, redirecting any output. # Although buildbot itself does this, it is very handy to debug issues # such as ImportErrors that happen before buildbot has redirected. def createProcess(self, cmd): hInputRead, hInputWriteTemp = self.newPipe() hOutReadTemp, hOutWrite = self.newPipe() pid = win32api.GetCurrentProcess() # This one is duplicated as inheritable. hErrWrite = win32api.DuplicateHandle(pid, hOutWrite, pid, 0, 1, win32con.DUPLICATE_SAME_ACCESS) # These are non-inheritable duplicates. hOutRead = self.dup(hOutReadTemp) hInputWrite = self.dup(hInputWriteTemp) # dup() closed hOutReadTemp, hInputWriteTemp si = win32process.STARTUPINFO() si.hStdInput = hInputRead si.hStdOutput = hOutWrite si.hStdError = hErrWrite si.dwFlags = win32process.STARTF_USESTDHANDLES | \ win32process.STARTF_USESHOWWINDOW si.wShowWindow = win32con.SW_HIDE # pass True to allow handles to be inherited. Inheritance is # problematic in general, but should work in the controlled # circumstances of a service process. create_flags = win32process.CREATE_NEW_CONSOLE # info is (hProcess, hThread, pid, tid) info = win32process.CreateProcess(None, cmd, None, None, True, create_flags, None, None, si) # (NOTE: these really aren't necessary for Python - they are closed # as soon as they are collected) hOutWrite.Close() hErrWrite.Close() hInputRead.Close() # We don't use stdin hInputWrite.Close() # start a thread collecting output blocks = [] t = threading.Thread(target=self.redirectCaptureThread, args=(hOutRead, blocks)) t.start() return info[0], t, blocks def redirectCaptureThread(self, handle, captured_blocks): # One of these running per child process we are watching. It # handles both stdout and stderr on a single handle. The read data is # never referenced until the thread dies - so no need for locks # around self.captured_blocks. # self.info("Redirect thread starting") while True: try: ec, data = win32file.ReadFile(handle, CHILDCAPTURE_BLOCK_SIZE) except pywintypes.error as err: # ERROR_BROKEN_PIPE means the child process closed the # handle - ie, it terminated. if err[0] != winerror.ERROR_BROKEN_PIPE: self.warning("Error reading output from process: {}".format(err)) break captured_blocks.append(data) del captured_blocks[CHILDCAPTURE_MAX_BLOCKS:] handle.Close() # self.info("Redirect capture thread terminating") def newPipe(self): sa = win32security.SECURITY_ATTRIBUTES() sa.bInheritHandle = True return win32pipe.CreatePipe(sa, 0) def dup(self, pipe): # create a duplicate handle that is not inherited, so that # it can be closed in the parent. close the original pipe in # the process. pid = win32api.GetCurrentProcess() dup = win32api.DuplicateHandle(pid, pipe, pid, 0, 0, win32con.DUPLICATE_SAME_ACCESS) pipe.Close() return dup # Service registration and startup def RegisterWithFirewall(exe_name, description): # Register our executable as an exception with Windows Firewall. # taken from http://msdn.microsoft.com/library/default.asp?url=\ # /library/en-us/ics/ics/wf_adding_an_application.asp from win32com.client import Dispatch # Scope NET_FW_SCOPE_ALL = 0 # IP Version - ANY is the only allowable setting for now NET_FW_IP_VERSION_ANY = 2 fwMgr = Dispatch("HNetCfg.FwMgr") # Get the current profile for the local firewall policy. profile = fwMgr.LocalPolicy.CurrentProfile app = Dispatch("HNetCfg.FwAuthorizedApplication") app.ProcessImageFileName = exe_name app.Name = description app.Scope = NET_FW_SCOPE_ALL # Use either Scope or RemoteAddresses, but not both # app.RemoteAddresses = "*" app.IpVersion = NET_FW_IP_VERSION_ANY app.Enabled = True # Use this line if you want to add the app, but disabled. # app.Enabled = False profile.AuthorizedApplications.Add(app) # A custom install function. def CustomInstall(opts): # Register this process with the Windows Firewall import pythoncom try: RegisterWithFirewall(sys.executable, "BuildBot") except pythoncom.com_error as why: print("FAILED to register with the Windows firewall") print(why) # Magic code to allow shutdown. Note that this code is executed in # the *child* process, by way of the service process executing us with # special cmdline args (which includes the service stop handle!) def _RunChild(runfn): del sys.argv[1] # The --spawn arg. # Create a new thread that just waits for the event to be signalled. t = threading.Thread(target=_WaitForShutdown, args=(int(sys.argv[1]), ) ) del sys.argv[1] # The stop handle # This child process will be sent a console handler notification as # users log off, or as the system shuts down. We want to ignore these # signals as the service parent is responsible for our shutdown. def ConsoleHandler(what): # We can ignore *everything* - ctrl+c will never be sent as this # process is never attached to a console the user can press the # key in! return True win32api.SetConsoleCtrlHandler(ConsoleHandler, True) t.setDaemon(True) # we don't want to wait for this to stop! t.start() if hasattr(sys, "frozen"): # py2exe sets this env vars that may screw our child process - reset del os.environ["PYTHONPATH"] # Start the buildbot/worker app runfn() print("Service child process terminating normally.") def _WaitForShutdown(h): win32event.WaitForSingleObject(h, win32event.INFINITE) print("Shutdown requested") from twisted.internet import reactor reactor.callLater(0, reactor.stop) def DetermineRunner(bbdir): '''Checks if the given directory is a worker or a master and returns the appropriate run function.''' tacfile = os.path.join(bbdir, 'buildbot.tac') if not os.path.exists(tacfile): # No tac-file - use master runner by default. import buildbot.scripts.runner return buildbot.scripts.runner.run with open(tacfile, 'r') as f: contents = f.read() try: if 'import Worker' in contents: import buildbot_worker.scripts.runner return buildbot_worker.scripts.runner.run except ImportError: # Not a worker. pass try: if 'import BuildSlave' in contents: import buildslave.scripts.runner return buildslave.scripts.runner.run except ImportError: # Not an old buildslave. pass # Treat as master by default. import buildbot.scripts.runner return buildbot.scripts.runner.run # This function is also called by the py2exe startup code. def HandleCommandLine(): if len(sys.argv) > 1 and sys.argv[1] == "--spawn": # Special command-line created by the service to execute the # child-process. # First arg is the handle to wait on # Fourth arg is the config directory to use for the buildbot/worker _RunChild(DetermineRunner(sys.argv[5])) else: win32serviceutil.HandleCommandLine(BBService, customOptionHandler=CustomInstall) if __name__ == '__main__': HandleCommandLine() buildbot-3.4.0/master/buildbot/secrets/000077500000000000000000000000001413250514000200645ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/secrets/__init__.py000066400000000000000000000013011413250514000221700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members buildbot-3.4.0/master/buildbot/secrets/manager.py000066400000000000000000000030201413250514000220430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ manage providers and handle secrets """ from twisted.internet import defer from buildbot.secrets.secret import SecretDetails from buildbot.util import service class SecretManager(service.BuildbotServiceManager): """ Secret manager """ name = 'secrets' config_attr = "secretsProviders" @defer.inlineCallbacks def get(self, secret, *args, **kwargs): """ get secrets from the provider defined in the secret using args and kwargs @secrets: secrets keys @type: string @return type: SecretDetails """ for provider in self.services: value = yield provider.get(secret) source_name = provider.__class__.__name__ if value is not None: return SecretDetails(source_name, secret, value) return None buildbot-3.4.0/master/buildbot/secrets/providers/000077500000000000000000000000001413250514000221015ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/secrets/providers/__init__.py000066400000000000000000000013011413250514000242050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members buildbot-3.4.0/master/buildbot/secrets/providers/base.py000066400000000000000000000017731413250514000233750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ secret provider interface """ import abc from buildbot.util.service import BuildbotService class SecretProviderBase(BuildbotService): """ Secret provider base """ @abc.abstractmethod def get(self, *args, **kwargs): """ this should be an abstract method """ buildbot-3.4.0/master/buildbot/secrets/providers/file.py000066400000000000000000000062441413250514000234000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ file based provider """ import os import stat from buildbot import config from buildbot.secrets.providers.base import SecretProviderBase class SecretInAFile(SecretProviderBase): """ secret is stored in a separate file under the given directory name """ name = "SecretInAFile" def checkFileIsReadOnly(self, dirname, secretfile): filepath = os.path.join(dirname, secretfile) obs_stat = stat.S_IMODE(os.stat(filepath).st_mode) if (obs_stat & 0o77) != 0 and os.name == "posix": config.error(("Permissions {} on file {} are too open." " It is required that your secret files are NOT" " accessible by others!").format(oct(obs_stat), secretfile)) def checkSecretDirectoryIsAvailableAndReadable(self, dirname, suffixes): if not os.access(dirname, os.F_OK): config.error("directory {} does not exists".format(dirname)) for secretfile in os.listdir(dirname): for suffix in suffixes: if secretfile.endswith(suffix): self.checkFileIsReadOnly(dirname, secretfile) def loadSecrets(self, dirname, suffixes, strip): secrets = {} for secretfile in os.listdir(dirname): secretvalue = None for suffix in suffixes: if secretfile.endswith(suffix): with open(os.path.join(dirname, secretfile)) as source: secretvalue = source.read() if suffix: secretfile = secretfile[:-len(suffix)] if strip: secretvalue = secretvalue.rstrip("\r\n") secrets[secretfile] = secretvalue return secrets def checkConfig(self, dirname, suffixes=None, strip=True): self._dirname = dirname if suffixes is None: suffixes = [""] self.checkSecretDirectoryIsAvailableAndReadable(dirname, suffixes=suffixes) def reconfigService(self, dirname, suffixes=None, strip=True): self._dirname = dirname self.secrets = {} if suffixes is None: suffixes = [""] self.secrets = self.loadSecrets(self._dirname, suffixes=suffixes, strip=strip) def get(self, entry): """ get the value from the file identified by 'entry' """ return self.secrets.get(entry) buildbot-3.4.0/master/buildbot/secrets/providers/passwordstore.py000066400000000000000000000046561413250514000254050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ password store based provider """ import os from pathlib import Path from twisted.internet import defer from buildbot import config from buildbot.secrets.providers.base import SecretProviderBase from buildbot.util import runprocess class SecretInPass(SecretProviderBase): """ secret is stored in a password store """ name = "SecretInPass" def checkPassIsInPath(self): if not any([(Path(p) / "pass").is_file() for p in os.environ["PATH"].split(":")]): config.error("pass does not exist in PATH") def checkPassDirectoryIsAvailableAndReadable(self, dirname): if not os.access(dirname, os.F_OK): config.error("directory {} does not exist".format(dirname)) def checkConfig(self, gpgPassphrase=None, dirname=None): self.checkPassIsInPath() if dirname: self.checkPassDirectoryIsAvailableAndReadable(dirname) def reconfigService(self, gpgPassphrase=None, dirname=None): self._env = {**os.environ} if gpgPassphrase: self._env["PASSWORD_STORE_GPG_OPTS"] = "--passphrase {}".format(gpgPassphrase) if dirname: self._env["PASSWORD_STORE_DIR"] = dirname @defer.inlineCallbacks def get(self, entry): """ get the value from pass identified by 'entry' """ try: rc, output = yield runprocess.run_process(self.master.reactor, ['pass', entry], env=self._env, collect_stderr=False, stderr_is_error=True) if rc != 0: return None return output.decode("utf-8", "ignore").splitlines()[0] except IOError: return None buildbot-3.4.0/master/buildbot/secrets/providers/vault.py000066400000000000000000000074061413250514000236150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ vault based providers """ from twisted.internet import defer from buildbot import config from buildbot.secrets.providers.base import SecretProviderBase from buildbot.util import httpclientservice from buildbot.warnings import warn_deprecated class HashiCorpVaultSecretProvider(SecretProviderBase): """ basic provider where each secret is stored in Vault KV secret engine """ name = 'SecretInVault' def checkConfig(self, vaultServer=None, vaultToken=None, secretsmount=None, apiVersion=1): warn_deprecated("3.4.0", "Use of HashiCorpVaultSecretProvider is deprecated and will be " "removed in future releases. Use HashiCorpVaultKvSecretProvider instead") if not isinstance(vaultServer, str): config.error("vaultServer must be a string while it is {}".format(type(vaultServer))) if not isinstance(vaultToken, str): config.error("vaultToken must be a string while it is {}".format(type(vaultToken))) if apiVersion not in [1, 2]: config.error("apiVersion {} is not supported".format(apiVersion)) @defer.inlineCallbacks def reconfigService(self, vaultServer=None, vaultToken=None, secretsmount=None, apiVersion=1): if secretsmount is None: self.secretsmount = "secret" else: self.secretsmount = secretsmount self.vaultServer = vaultServer self.vaultToken = vaultToken self.apiVersion = apiVersion if vaultServer.endswith('/'): vaultServer = vaultServer[:-1] self._http = yield httpclientservice.HTTPClientService.getService( self.master, self.vaultServer, headers={'X-Vault-Token': self.vaultToken}) @defer.inlineCallbacks def get(self, entry): """ get the value from vault secret backend """ parts = entry.rsplit('/', maxsplit=1) name = parts[0] if len(parts) > 1: key = parts[1] else: key = 'value' if self.apiVersion == 1: path = self.secretsmount + '/' + name else: path = self.secretsmount + '/data/' + name # note that the HTTP path contains v1 for both versions of the key-value # secret engine. Different versions of the key-value engine are # effectively separate secret engines in vault, with the same base HTTP # API, but with different paths within it. proj = yield self._http.get(f"/v1/{path}") code = yield proj.code if code != 200: raise KeyError(("The secret {} does not exist in Vault provider: request" " return code: {}.").format(entry, code)) json = yield proj.json() if self.apiVersion == 1: secrets = json.get('data', {}) else: secrets = json.get('data', {}).get('data', {}) try: return secrets[key] except KeyError as e: raise KeyError( "The secret {} does not exist in Vault provider: {}".format(entry, e)) from e buildbot-3.4.0/master/buildbot/secrets/providers/vault_hvac.py000066400000000000000000000160361413250514000246150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ HVAC based providers """ from twisted.internet import defer from twisted.internet import threads from buildbot import config from buildbot.secrets.providers.base import SecretProviderBase class VaultAuthenticator: """ base HVAC authenticator class """ def authenticate(self, client): pass class VaultAuthenticatorToken(VaultAuthenticator): """ HVAC authenticator for static token """ def __init__(self, token): self.token = token def authenticate(self, client): client.token = self.token class VaultAuthenticatorApprole(VaultAuthenticator): """ HVAC authenticator for Approle login method """ def __init__(self, roleId, secretId): self.roleId = roleId self.secretId = secretId def authenticate(self, client): client.auth.approle.login(role_id=self.roleId, secret_id=self.secretId) class HashiCorpVaultKvSecretProvider(SecretProviderBase): """ Basic provider where each secret is stored in Vault KV secret engine. In case more secret engines are going to be supported, each engine should have it's own class. """ name = 'SecretInVaultKv' def checkConfig(self, vault_server=None, authenticator=None, secrets_mount=None, api_version=2, path_delimiter='|', path_escape='\\'): try: import hvac [hvac] except ImportError: # pragma: no cover config.error(f"{self.__class__.__name__} needs the hvac package installed " + "(pip install hvac)") if not isinstance(vault_server, str): config.error("vault_server must be a string while it is {}".format(type(vault_server))) if not isinstance(path_delimiter, str) or len(path_delimiter) > 1: config.error("path_delimiter must be a single character") if not isinstance(path_escape, str) or len(path_escape) > 1: config.error("path_escape must be a single character") if not isinstance(authenticator, VaultAuthenticator): config.error("authenticator must be instance of VaultAuthenticator while it is {}" .format(type(authenticator))) if api_version not in [1, 2]: config.error("api_version {} is not supported".format(api_version)) def reconfigService(self, vault_server=None, authenticator=None, secrets_mount=None, api_version=2, path_delimiter='|', path_escape='\\'): try: import hvac except ImportError: # pragma: no cover config.error(f"{self.__class__.__name__} needs the hvac package installed " + "(pip install hvac)") if secrets_mount is None: secrets_mount = "secret" self.secrets_mount = secrets_mount self.path_delimiter = path_delimiter self.path_escape = path_escape self.authenticator = authenticator self.api_version = api_version if vault_server.endswith('/'): # pragma: no cover vault_server = vault_server[:-1] self.client = hvac.Client(vault_server) self.client.secrets.kv.default_kv_version = api_version return self def escaped_split(self, s): """ parse and split string, respecting escape characters """ ret = [] current = [] itr = iter(s) for ch in itr: if ch == self.path_escape: try: # skip the next character; it has been escaped and remove # escape character current.append(next(itr)) except StopIteration: # escape character on end of the string is safest to ignore, as buildbot for # each secret identifier tries all secret providers until value is found, # meaning we may end up parsing identifiers for other secret providers, where # our escape character may be valid on end of string pass elif ch == self.path_delimiter: # split! (add current to the list and reset it) ret.append(''.join(current)) current = [] else: current.append(ch) ret.append(''.join(current)) return ret def thd_hvac_wrap_read(self, path): if self.api_version == 1: return self.client.secrets.kv.v1.read_secret(path=path, mount_point=self.secrets_mount) else: return self.client.secrets.kv.v2.read_secret_version(path=path, mount_point=self.secrets_mount) def thd_hvac_get(self, path): """ query secret from Vault and try to re-authenticate in case Unauthorized exception when active token reaches its TTL """ # no need to "try" import, it was already handled by reconfigService() import hvac try: response = self.thd_hvac_wrap_read(path=path) except (hvac.exceptions.Unauthorized, hvac.exceptions.InvalidRequest): self.authenticator.authenticate(self.client) response = self.thd_hvac_wrap_read(path=path) return response @defer.inlineCallbacks def get(self, entry): """ get the value from vault secret backend """ parts = self.escaped_split(entry) if len(parts) == 1: raise KeyError("Vault secret specification must contain attribute name separated from " "path by '{}'".format(self.path_delimiter)) if len(parts) > 2: raise KeyError("Multiple separators ('{0}') found in vault path '{1}'. All occurences " "of '{0}' in path or attribute name must be escaped using '{2}'" .format(self.path_delimiter, entry, self.path_escape)) name = parts[0] key = parts[1] response = yield threads.deferToThread(self.thd_hvac_get, path=name) # in KVv2 we have extra "data" dictionary, as vault provides metadata as well if self.api_version == 2: response = response['data'] try: return response['data'][key] except KeyError as e: raise KeyError( "The secret {} does not exist in Vault provider: {}".format(entry, e)) from e buildbot-3.4.0/master/buildbot/secrets/secret.py000066400000000000000000000031331413250514000217230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class SecretDetails: """ A SecretDetails object has secrets attributes: - source: provider where the secret was retrieved - key: secret key identifier - value: secret value """ def __init__(self, source, key, value): self._source = source self._value = value self._key = key @property def source(self): """ source of the secret """ return self._source @property def value(self): """ secret value """ return self._value @property def key(self): """ secret value """ return self._key def __str__(self): return '{} {}: {!r}'.format(self._source, self._key, self.value) def __eq__(self, other): return (self._source == other._source and self.key == other.key and self.value == other.value) buildbot-3.4.0/master/buildbot/spec/000077500000000000000000000000001413250514000173465ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/spec/api.raml000066400000000000000000000752431413250514000210070ustar00rootroot00000000000000#%RAML 1.0 title: Buildbot Web API version: v2 mediaType: application/json traits: bbget: responses: 200: body: application/json: type: <>[] 404: body: text/plain: example: "not found" bbpost: body: type: <> responses: 200: body: application/json: type: <> 404: body: text/plain: example: "not found" bbgetraw: responses: 200: headers: content-disposition: description: content disposition header allows browser to save data with proper filename example: attachment; filename=stdio body: text/html: description: "html data if the object is html" text/plain: description: "plain text data if the object is text" application/octet-stream: description: "binary data if binary is binary" types: build: !include types/build.raml builder: !include types/builder.raml buildrequest: !include types/buildrequest.raml buildset: !include types/buildset.raml build_data: !include types/build_data.raml worker: !include types/worker.raml change: !include types/change.raml changesource: !include types/changesource.raml forcescheduler: !include types/forcescheduler.raml identifier: !include types/identifier.raml log: !include types/log.raml logchunk: !include types/logchunk.raml master: !include types/master.raml rootlink: !include types/rootlink.raml scheduler: !include types/scheduler.raml sourcedproperties: !include types/sourcedproperties.raml sourcestamp: !include types/sourcestamp.raml patch: !include types/patch.raml spec: !include types/spec.raml step: !include types/step.raml test_result: !include types/test_result.raml test_result_set: !include types/test_result_set.raml /: get: is: - bbget: {bbtype: rootlink} /application.spec: get: is: - bbget: {bbtype: spec} /builders: description: This path selects all builders get: is: - bbget: {bbtype: builder} /{builderid_or_buildername}: uriParameters: builderid_or_buildername: type: number|identifier description: the ID or name of the builder description: This path selects a builder by builderid get: is: - bbget: {bbtype: builder} /forceschedulers: description: This path selects all force-schedulers for a given builder get: is: - bbget: {bbtype: forcescheduler} /buildrequests: description: This path selects all buildrequests for a given builder (can return lots of data!) get: is: - bbget: {bbtype: buildrequest} /builds: description: This path selects all builds for a builder (can return lots of data!) get: is: - bbget: {bbtype: build} /{build_number}: uriParameters: build_number: type: number description: the number of the build within the builder description: This path selects a specific build by builderid and buildnumber get: is: - bbget: {bbtype: build} /actions/stop: post: description: | stops one build. body: application/json: properties: reason: type: string required: false description: The reason why the build was stopped results: type: integer required: false description: optionally results value override (default CANCELLED) /actions/rebuild: post: description: | rebuilds one build. body: application/json: description: no parameter are needed /data: description: This path selects all build data set for the build get: is: - bbget: {bbtype: build_data} /{build_data_name}: uriParameters: build_data_name: type: string description: the name of build data description: This path selects a build data with specific name get: is: - bbget: {bbtype: build_data} /value: description: This path returns the value of build data. get: is: - bbgetraw: /changes: description: | This path selects all changes tested by a build get: is: - bbget: {bbtype: change} /properties: description: | This path selects all properties of a build get: is: - bbget: {bbtype: sourcedproperties} /steps: description: This path selects all steps for the given build get: is: - bbget: {bbtype: step} /{step_name}: uriParameters: step_name: type: identifier description: the slug name of the step description: This path selects a specific step for the given build get: is: - bbget: {bbtype: step} /logs: description: This path selects all logs for the given step. get: is: - bbget: {bbtype: log} /{log_slug}: uriParameters: log_slug: type: identifier description: the slug name of the log get: description: | This path selects a specific log in the given step. is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log in the given step. is: - bbget: {bbtype: logchunk} /raw: get: description: | This endpoint allows to get the raw logs for downloading into a file. This endpoint does not provide paging capabilities. For stream log types, the type line header characters are dropped. 'text/plain' is used as the mime type except for html logs, where 'text/html' is used. The 'slug' is used as the filename for the resulting download. Some browsers are appending ``".txt"`` or ``".html"`` to this filename according to the mime-type. is: - bbgetraw: /{step_number}: uriParameters: step_number: type: number description: the number of the step description: This path selects a specific step given its step number get: is: - bbget: {bbtype: step} /logs: description: This path selects all log of a specific step get: is: - bbget: {bbtype: log} /{log_slug}: uriParameters: log_slug: type: identifier description: the slug name of the log description: This path selects one log of a specific step get: is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log in the given step. is: - bbget: {bbtype: logchunk} /raw: get: description: | This path downloads the whole log is: - bbgetraw: /workers: description: | This path selects all workers configured for a given builder get: is: - bbget: {bbtype: worker} /{name}: description: | This path selects a worker by name filtered by given builderid uriParameters: name: type: identifier description: the name of the worker get: is: - bbget: {bbtype: worker} /{workerid}: description: | This path selects a worker by id filtered by given builderid uriParameters: workerid: type: number description: the id of the worker get: is: - bbget: {bbtype: worker} /actions/stop: post: description: | gracefully shutdown one worker. body: application/json: properties: reason: type: string required: false description: The reason why the worker was stopped /actions/kill: post: description: | forcefully shutdown one worker. body: application/json: properties: reason: type: string required: false description: The reason why the worker was stopped /actions/pause: post: description: | Pause one worker. The worker will stop taking new build. body: application/json: properties: reason: type: string required: false description: The reason why the worker was paused /actions/unpause: post: description: | Unpause one worker. The worker will re-start taking builds. body: application/json: properties: reason: type: string required: false description: The reason why the worker was un-paused /masters: description: | This path selects all masters supporting a given builder get: is: - bbget: {bbtype: master} /{masterid}: uriParameters: masterid: type: number description: the id of the master description: | This path selects a master by id filtered by given builderid get: is: - bbget: {bbtype: master} /test_result_sets: description: | This selects all test result sets that have been created for a particular builder get: is: - bbget: {bbtype: test_result_set} /test_code_paths: description: | This selects all test code paths that have been created for a particular builder get: is: - bbget: {bbtype: string} /test_names: description: | This selects all test names that have been created for a particular builder get: is: - bbget: {bbtype: string} /buildrequests: /{buildrequestid}: uriParameters: buildrequestid: type: number description: the id of the buildrequest get: is: - bbget: {bbtype: buildrequest} /builds: get: is: - bbget: {bbtype: build} /actions/cancel: post: description: | Cancel one buildrequest. If necessary, this will stop the builds generated by the buildrequest, including triggered builds. body: application/json: properties: reason: type: string required: false description: The reason why the buildrequest was cancelled get: is: - bbget: {bbtype: buildrequest} /builds: get: is: - bbget: {bbtype: build} /{buildid}: description: | This path selects a build by id uriParameters: buildid: type: number description: the id of the build get: is: - bbget: {bbtype: build} /actions/stop: post: description: | stops one build. body: application/json: properties: reason: type: string required: false description: The reason why the build was stopped /actions/rebuild: post: description: | rebuilds one build. body: application/json: description: no parameter are needed /changes: description: | This path selects all changes tested by a build get: is: - bbget: {bbtype: change} /properties: description: | This path selects all properties of a build get: is: - bbget: {bbtype: sourcedproperties} /data: description: This path selects all build data set for the build get: is: - bbget: {bbtype: build_data} /{build_data_name}: uriParameters: build_data_name: type: string description: the name of build data description: This path selects a build data with specific name get: is: - bbget: {bbtype: build_data} /value: description: This path returns the value of build data. get: is: - bbgetraw: /steps: description: | This path selects all steps of a build get: is: - bbget: {bbtype: step} /{step_number_or_name}: uriParameters: step_number_or_name: type: identifier|number description: the name or number of the step description: | This path selects one step of a build get: is: - bbget: {bbtype: step} /logs: description: | This path selects all logs of a step of a build get: is: - bbget: {bbtype: log} /{log_slug}: uriParameters: log_slug: type: identifier description: the slug name of the log description: This path selects one log of a a specific step get: is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log in the given step. is: - bbget: {bbtype: logchunk} /raw: get: description: | This path downloads the whole log is: - bbgetraw: /test_result_sets: description: | This selects all test result sets that have been created for a particular step get: is: - bbget: {bbtype: test_result_set} /test_result_sets: description: | This selects all test result sets that have been created for a particular build get: is: - bbget: {bbtype: test_result_set} /buildsets: description: This path selects all buildsets get: is: - bbget: {bbtype: buildset} /{bsid}: description: This path selects a buildset by id uriParameters: bsid: type: identifier description: the id of the buildset get: is: - bbget: {bbtype: buildset} /properties: description: | This path selects all properties of a buildset. Buildset properties are part of the initial properties of a build. get: is: - bbget: {bbtype: sourcedproperties} /workers: description: This path selects all workers get: is: - bbget: {bbtype: worker} /{name_or_id}: description: This path selects a worker by name or id uriParameters: name_or_id: type: number|identifier description: the name or id of a worker get: is: - bbget: {bbtype: worker} /changes: description: | This path selects **all** changes. On a reasonably loaded master, this can quickly return a very large result, taking minutes to process. A specific query configuration is optimized which allows to get the recent changes: ``order:-changeid&limit=`` get: is: - bbget: {bbtype: change} /{changeid}: description: This path selects one change by id uriParameters: changeid: type: number description: the id of a change get: is: - bbget: {bbtype: change} /changesources: description: | This path selects all changesource get: is: - bbget: {bbtype: changesource} /{changesourceid}: uriParameters: changesourceid: type: number description: the id of a changesource description: | This path selects one changesource given its id get: is: - bbget: {bbtype: changesource} /forceschedulers: description: | This path selects all forceschedulers get: is: - bbget: {bbtype: forcescheduler} /{schedulername}: description: | This path selects all changesource uriParameters: schedulername: type: identifier description: the name of a scheduler get: is: - bbget: {bbtype: forcescheduler} /actions/force: post: description: | Triggers the forcescheduler body: application/json: properties: owner: type: string required: false description: The user who wants to create the buildrequest '[]': description: content of the forcescheduler parameter is dependent on the configuration of the forcescheduler /logs/{logid}: uriParameters: logid: type: number description: the id of the log description: This path selects one log get: is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log is: - bbget: {bbtype: logchunk} /raw: get: description: | This path downloads the whole log is: - bbgetraw: /masters: description: This path selects all masters get: is: - bbget: {bbtype: master} /{masterid}: description: This path selects one master given its id uriParameters: masterid: type: number description: the id of the master get: is: - bbget: {bbtype: master} /builders: description: This path selects all builders of a given master get: is: - bbget: {bbtype: builder} /{builderid}: description: This path selects one builder by id of a given master uriParameters: builderid: type: number description: the id of the builder get: is: - bbget: {bbtype: builder} /workers: description: This path selects all workers for a given builder and a given master get: is: - bbget: {bbtype: worker} /{name}: description: This path selects one worker by name for a given builder and a given master uriParameters: name: type: identifier description: the name of the worker get: is: - bbget: {bbtype: worker} /{workerid}: description: This path selects one worker by name for a given builder and a given master uriParameters: workerid: type: number description: the id of the worker get: is: - bbget: {bbtype: worker} /workers: description: This path selects all workers for a given master get: is: - bbget: {bbtype: worker} /{name}: description: This path selects one worker by name for a given master uriParameters: name: type: identifier description: the name of the worker get: is: - bbget: {bbtype: worker} /{workerid}: description: This path selects one worker by id for a given master uriParameters: workerid: type: number description: the id of the worker get: is: - bbget: {bbtype: worker} /changesources: description: This path selects all changesources for a given master get: is: - bbget: {bbtype: changesource} /{changesourceid}: description: This path selects one changesource by id for a given master get: is: - bbget: {bbtype: changesource} /schedulers: description: This path selects all schedulers for a given master get: is: - bbget: {bbtype: scheduler} /{schedulerid}: description: This path selects one scheduler by id for a given master uriParameters: schedulerid: type: number description: the id of the scheduler get: is: - bbget: {bbtype: scheduler} /schedulers: description: This path selects all schedulers get: is: - bbget: {bbtype: scheduler} /{schedulerid}: uriParameters: schedulerid: type: number description: the id of the scheduler description: This path selects one scheduler by id get: is: - bbget: {bbtype: scheduler} /sourcestamps: description: This path selects all sourcestamps (can return lots of data!) get: is: - bbget: {bbtype: sourcestamp} /{ssid}: description: This path selects one sourcestamp by id uriParameters: ssid: type: number description: the id of the sourcestamp get: is: - bbget: {bbtype: sourcestamp} /changes: description: This path selects all changes associated to one sourcestamp get: is: - bbget: {bbtype: change} /steps: /{stepid}: description: This path selects one step by id uriParameters: stepid: type: number description: the id of the step /logs: description: This path selects all logs for the given step get: is: - bbget: {bbtype: log} /{log_slug}: uriParameters: log_slug: type: identifier description: the slug name of the log get: description: | This path selects a specific log in the given step. is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log in the given step. is: - bbget: {bbtype: logchunk} /raw: get: description: | This path downloads the whole log is: - bbgetraw: /test_result_sets: description: | This selects all test result sets that have been created for a particular step get: is: - bbget: {bbtype: test_result_set} /test_result_sets: /{test_result_setid}: description: Selects a test result set by id uriParameters: test_result_setid: type: number description: the id of the test result set get: description: This path selects specific test result set. is: - bbget: {bbtype: test_result_set} /results: description: This path selects all test results for the given test result set get: is: - bbget: {bbtype: test_result} /raw_results: description: This path selects the raw data for the test result set, if available get: is: - bbget: {bbtype: test_raw_result} buildbot-3.4.0/master/buildbot/spec/indent.py000066400000000000000000000014021413250514000211760ustar00rootroot00000000000000#!/usr/bin/python import re import sys spaces = re.compile("^ +") for fn in sys.argv[1:]: lines = [] with open(fn, 'r') as f: for line in f: lines.append(line) def getIndent(i): res = spaces.match(lines[i]) if res is None: return 0 return len(res.group(0)) def IndentBlock(i, numspaces): initIndent = getIndent(i) while i < len(lines) and initIndent <= getIndent(i): lines[i] = " " * numspaces + lines[i] i += 1 for i, line in enumerate(lines): missingIndent = 4 - (getIndent(i) % 4) if missingIndent != 4: IndentBlock(i, missingIndent) with open(fn, 'w') as f: for line in lines: f.write(line) buildbot-3.4.0/master/buildbot/spec/types/000077500000000000000000000000001413250514000205125ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/spec/types/build.raml000066400000000000000000000075071413250514000224770ustar00rootroot00000000000000#%RAML 1.0 DataType displayName: build description: | This resource type describes completed and in-progress builds. Much of the contextual data for a build is associated with the build request, and through it the buildset. .. note:: The properties field of a build is only filled out if the `properties filterspec` is set. That means the ``property`` filter allows one to request properties through the Builds DATA API like so: * api/v2/builds?property=* (returns all properties) * api/v2/builds?property=propKey1&property=propKey2 (returns the properties that match the given keys) * api/v2/builds?property=propKey1&property=propKey2&limit=30 (filters combination) .. important:: When combined with the ``field`` filter, for someone to get the build properties, they should ensure the **properties** field is set: * api/v2/builds?field=buildid&field=properties&property=workername&property=user Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.builds.Build .. py:method:: newBuild(builderid, buildrequestid, workerid) :param integer builderid: id of the builder performing this build :param integer buildrequestid: id of the build request being built :param integer workerid: id of the worker on which this build is performed :returns: (buildid, number) via Deferred Create a new build resource and return its ID. The state strings for the new build will be set to 'starting'. .. py:method:: setBuildStateString(buildid, state_string) :param integer buildid: the build to modify :param unicode state_string: new state string for this build Replace the existing state strings for a build with a new list. .. py:method:: finishBuild(buildid, results) :param integer buildid: the build to modify :param integer results: the build's results Mark the build as finished at the current time, with the given results. properties: buildid: description: the unique ID of this build type: integer number: description: the number of this build (sequential for a given builder) type: integer builderid: description: id of the builder for this build type: integer buildrequestid: description: build request for which this build was performed, or None if no such request exists type: integer workerid: description: the worker this build ran on type: integer masterid: description: the master this build ran on type: integer started_at: description: time at which this build started type: date complete: description: | true if this build is complete. Note that this is a calculated field (from complete_at != None). Ordering by this field is not optimized by the database layer. type: boolean complete_at?: description: time at which this build was complete, or None if it's still running type: date properties?: description: a dictionary of properties attached to build. type: sourcedproperties results?: description: the results of the build (see :ref:`Build-Result-Codes`), or None if not complete type: integer state_string: description: a string giving detail on the state of the build. type: string type: object example: builderid: 10 buildid: 100 buildrequestid: 13 workerid: 20 complete: false complete_at: null masterid: 824 number: 1 results: null started_at: 1451001600 state_string: created properties: {} buildbot-3.4.0/master/buildbot/spec/types/build_data.raml000066400000000000000000000031651413250514000234640ustar00rootroot00000000000000#%RAML 1.0 DataType displayName: build_data description: | This resource represents a key-value data pair associated to a build. A build can have any number of key-value pairs. The data is intended to be used for temporary purposes, until the build and all actions associated to it (such as reporters) are finished. The value is a binary of potentially large size. There are two sets of APIs. One returns the properties of the key-value data pairs, such as key name and value length. Another returns the actual value as binary data. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.build_data.BuildData .. py:method:: setBuildData(buildid, name, value, source) :param integer buildid: build id to attach data to :param unicode name: the name of the data :param bytestr value: the value of the data as ``bytes`` :param unicode source: a string identifying the source of the data :returns: Deferred Adds or replaces build data attached to the build. properties: buildid: description: id of the build the build data is attached to type: integer name: description: the name of the build data type: string length: description: the number of bytes in the build data type: integer source: description: a string identifying the source of the data type: string type: object example: buildid: 31 name: "stored_data_name" length: 10 source: "Step XYZ" buildbot-3.4.0/master/buildbot/spec/types/builder.raml000066400000000000000000000022111413250514000230110ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes a builder. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.builders.Builder .. py:method:: updateBuilderList(masterid, builderNames) :param integer masterid: this master's master ID :param list builderNames: list of names of currently-configured builders (unicode strings) :returns: Deferred Record the given builders as the currently-configured set of builders on this master. Masters should call this every time the list of configured builders changes. properties: builderid: description: the ID of this builder type: integer description?: description: The description for that builder type: string masterids[]: description: the ID of the masters this builder is running on type: integer name: description: builder name type: identifier tags[]: description: list of tags for this builder type: string type: object buildbot-3.4.0/master/buildbot/spec/types/buildrequest.raml000066400000000000000000000064361413250514000241100ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes completed and in-progress buildrequests. Much of the contextual data for a buildrequest is associated with the buildset that contains this buildrequest. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.buildrequests.BuildRequest .. py:method:: claimBuildRequests(brids, claimed_at=None) :param list(integer) brids: list of buildrequest id to claim :param datetime claimed_at: date and time when the buildrequest is claimed :returns: (boolean) whether claim succeeded or not Claim a list of buildrequests .. py:method:: unclaimBuildRequests(brids) :param list(integer) brids: list of buildrequest id to unclaim Unclaim a list of buildrequests .. py:method:: completeBuildRequests(brids, results, complete_at=None) :param list(integer) brids: list of buildrequest id to complete :param integer results: the results of the buildrequest (see :ref:`Build-Result-Codes`) :param datetime complete_at: date and time when the buildrequest is completed Complete a list of buildrequest with the ``results`` status properties: buildrequestid: description: the unique ID of this buildrequest type: integer builderid: description: the id of the builder linked to this buildrequest type: integer buildsetid: description: the id of the buildset that contains this buildrequest type: integer claimed: description: | True if this buildrequest has been claimed. Note that this is a calculated field (from claimed_at != None). Ordering by this field is not optimized by the database layer. type: boolean claimed_at?: description: | time at which this build has last been claimed. None if this buildrequest has never been claimed or has been unclaimed type: date claimed_by_masterid?: description: | the id of the master that claimed this buildrequest. None if this buildrequest has never been claimed or has been unclaimed type: integer complete: description: true if this buildrequest is complete type: boolean complete_at?: description: | time at which this buildrequest was completed, or None if it's still running type: date priority: description: the priority of this buildrequest type: integer properties?: description: a dictionary of properties corresponding to buildrequest. type: sourcedproperties results?: description: | the results of this buildrequest (see :ref:`Build-Result-Codes`), or None if not complete type: integer submitted_at: description: time at which this buildrequest was submitted type: date waited_for: description: | True if the entity that triggered this buildrequest is waiting for it to complete. Should be used by an (unimplemented so far) clean shutdown to only start br that are waited_for. type: boolean type: object buildbot-3.4.0/master/buildbot/spec/types/buildset.raml000066400000000000000000000073551413250514000232140ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A buildset gathers build requests that were scheduled at the same time, and which share a source stamp, properties, and so on. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.buildsets.Buildset .. py:method:: addBuildset(scheduler=None, sourcestamps=[], reason='', properties={}, builderids=[], external_idstring=None, parent_buildid=None, parent_relationship=None) :param string scheduler: the name of the scheduler creating this buildset :param list sourcestamps: sourcestamps for the new buildset; see below :param unicode reason: the reason for this build :param properties: properties to set on this buildset :type properties: dictionary with unicode keys and (source, property value) values :param list builderids: names of the builders for which build requests should be created :param unicode external_idstring: arbitrary identifier to recognize this buildset later :param int parent_buildid: optional build id that is the parent for this buildset :param unicode parent_relationship: relationship identifier for the parent, this is a configured relationship between the parent build, and the childs buildsets :returns: (buildset id, dictionary mapping builder ids to build request ids) via Deferred .. warning: The ``scheduler`` parameter will be replaced with a ``schedulerid`` parameter in future releases. Create a new buildset and corresponding buildrequests based on the given parameters. This is the low-level interface for scheduling builds. Each sourcestamp in the list of sourcestamps can be given either as an integer, assumed to be a sourcestamp ID, or a dictionary of keyword arguments to be passed to :py:meth:`~buildbot.db.sourcestamps.SourceStampsConnectorComponent.findSourceStampId`. .. py:method:: maybeBuildsetComplete(bsid) :param integer bsid: id of the buildset that may be complete :returns: Deferred This method should be called when a build request is finished. It checks the given buildset to see if all of its buildrequests are finished. If so, it updates the status of the buildset and sends the appropriate messages. properties: bsid: description: the ID of this buildset type: integer complete: description: true if all of the build requests in this buildset are complete type: boolean complete_at?: description: the time this buildset was completed, or None if not complete type: integer external_idstring?: description: | an identifier that external applications can use to identify a submitted buildset; can be None type: string parent_buildid?: description: optional build id that is the parent for this buildset type: integer parent_relationship?: description: | relationship identifier for the parent, this is a configured relationship between the parent build, and the childs buildsets type: string reason: description: the reason this buildset was scheduled type: string results?: description: the results of the buildset (see :ref:`Build-Result-Codes`), or None if not complete type: integer sourcestamps[]: description: | the sourcestamps for this buildset; each element is a valid :bb:rtype:`sourcestamp` entity type: sourcestamp submitted_at: description: the time this buildset was submitted type: integer type: object buildbot-3.4.0/master/buildbot/spec/types/change.raml000066400000000000000000000074271413250514000226260ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A change resource represents a change to the source code monitored by Buildbot. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.changes.Change .. py:method:: addChange(files=None, comments=None, author=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties={}, repository='', codebase=None, project='', src=None) :param files: a list of filenames that were changed :type files: list of unicode strings :param unicode comments: user comments on the change :param unicode author: the author of this change :param unicode revision: the revision identifier for this change :param integer when_timestamp: when this change occurred (seconds since the epoch), or the current time if None :param unicode branch: the branch on which this change took place :param unicode category: category for this change :param string revlink: link to a web view of this revision :param properties: properties to set on this change. Note that the property source is *not* included in this dictionary. :type properties: dictionary with unicode keys and simple values (JSON-able). :param unicode repository: the repository in which this change took place :param unicode project: the project this change is a part of :param unicode src: source of the change (vcs or other) :returns: The ID of the new change, via Deferred Add a new change to Buildbot. This method is the interface between change sources and the rest of Buildbot. All parameters should be passed as keyword arguments. All parameters labeled 'unicode' must be unicode strings and not bytestrings. Filenames in ``files``, and property names, must also be unicode strings. This is tested by the fake implementation. properties: changeid: description: the ID of this change type: integer author: description: | the author of the change in "name", "name " or just "email" (with @) format type: string branch?: description: | branch on which the change took place, or none for the "default branch", whatever that might mean type: string category?: description: user-defined category of this change, or none type: string codebase: description: codebase in this repository type: string comments: description: user comments for this change (aka commit) type: string files[]: description: list of source-code filenames changed type: string parent_changeids[]: description: | The ID of the parents. The data api allows for several parents, but the core Buildbot does not yet support it type: integer project: description: user-defined project to which this change corresponds type: string properties: description: user-specified properties for this change, represented as an object mapping keys to tuple (value, source) type: sourcedproperties repository: description: repository where this change occurred type: string revision?: description: revision for this change, or none if unknown type: string revlink?: description: link to a web view of this change type: string sourcestamp: description: the sourcestamp resource for this change type: sourcestamp when_timestamp: description: time of the change type: integer type: object buildbot-3.4.0/master/buildbot/spec/types/changesource.raml000066400000000000000000000025331413250514000240400ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A changesource generates change objects, for example in response to an update in some repository. A particular changesource (by name) runs on at most one master at a time. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.changesources.ChangeSource .. py:method:: findChangeSourceId(name) :param string name: changesource name :returns: changesource ID via Deferred Get the ID for the given changesource name, inventing one if necessary. .. py:method:: trySetChangeSourceMaster(changesourceid, masterid) :param integer changesourceid: changesource ID to try to claim :param integer masterid: this master's master ID :returns: ``True`` or ``False``, via Deferred Try to claim the given scheduler for the given master and return ``True`` if the scheduler is to be activated on that master. properties: changesourceid: description: the ID of this changesource type: integer master?: description: the master on which this worker is running, or None if it is inactive type: master name: description: name of this changesource type: string type: object buildbot-3.4.0/master/buildbot/spec/types/forcescheduler.raml000066400000000000000000000015751413250514000243740ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A forcescheduler initiates builds, via a formular in the web UI. At the moment, forceschedulers must be defined on all the masters where a web ui is configured. A particular forcescheduler runs on the master where the web request was sent. .. note:: This datatype and associated endpoints will be deprecated when :bug:`2673` will be resolved. properties: all_fields[]: description: '' type: object builder_names[]: description: names of the builders that this scheduler can trigger type: identifier button_name: description: label of the button to use in the UI type: string label: description: label of this scheduler to be displayed in the UI type: string name: description: name of this scheduler type: identifier type: object buildbot-3.4.0/master/buildbot/spec/types/identifier.raml000066400000000000000000000001051413250514000235050ustar00rootroot00000000000000#%RAML 1.0 DataType pattern: ^[a-zA-Z_-][a-zA-Z0-9_-]*$ type: string buildbot-3.4.0/master/buildbot/spec/types/log.raml000066400000000000000000000062021413250514000221500ustar00rootroot00000000000000#%RAML 1.0 DataType displayName: log description: | A log represents a stream of textual output from a step. The actual output is encoded as a sequence of :bb:rtype:`logchunk` resources. In-progress logs append logchunks as new data is added to the end, and event subscription allows a client to "follow" the log. Each log has a "slug" which is unique within the step, and which can be used in paths. The slug is generated by :py:meth:`~buildbot.data.logs.Log.addLog` based on the name, using :py:func:`~buildbot.util.identifiers.forceIdentifier` and :py:func:`~buildbot.util.identifiers.incrementIdentifier` to guarantee uniqueness. .. todo:: .. bb:event:: build.$buildid.step.$number.log.$logid.newlog The log has just started. Logs are started when they are created, so this also indicates the creation of a new log. .. bb:event:: build.$buildid.step.$number.log.$logid.complete The log is complete. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.logs.Log .. py:method:: addLog(stepid, name, type) :param integer stepid: stepid containing this log :param string name: name for the log :raises KeyError: if a log by the given name already exists :returns: logid via Deferred Create a new log and return its ID. The name need not be unique. This method will generate a unique slug based on the name. .. py:method:: appendLog(logid, content): :param integer logid: the log to which content should be appended :param unicode content: the content to append Append the given content to the given log. The content must end with a newline. All newlines in the content should be UNIX-style (``\n``). .. py:method:: finishLog(logid) :param integer logid: the log to finish Mark the log as complete. .. py:method:: compressLog(logid) :param integer logid: the log to compress Compress the given log, after it is finished. This operation may take some time. properties: complete: description: true if this log is complete and will not generate additional logchunks type: boolean logid: description: the unique ID of this log type: integer name: description: the name of this log (e.g., ``err.html``) type: string num_lines: description: total number of line of this log type: integer slug: description: the "slug", suitable for use in a URL, of this log (e.g., ``err_html``) type: identifier stepid: description: id of the step containing this log type: integer type: description: log type, identified by a single ASCII letter; see :bb:rtype:`logchunk` for details type: identifier type: object example: 'logid': 60 'name': 'stdio' 'slug': 'stdio' 'stepid': 50 'complete': false 'num_lines': 0 'type': 's' buildbot-3.4.0/master/buildbot/spec/types/logchunk.raml000066400000000000000000000053731413250514000232110ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A logchunk represents a contiguous sequence of lines in a logfile. Logs are not individually addressable in the data API; instead, they must be requested by line number range. In a strict REST sense, many logchunk resources will contain the same line. The chunk contents is represented as a single unicode string. This string is the concatenation of each newline terminated-line. Each log has a type, as identified by the "type" field of the corresponding :bb:rtype:`log`. While all logs are sequences of unicode lines, the type gives additional information of interpreting the contents. The defined types are: * ``t`` -- text, a simple sequence of lines of text * ``s`` -- stdio, like text but with each line tagged with a stream * ``h`` -- HTML, represented as plain text * ``d`` -- Deleted, logchunks for this log have been deleted by the Janitor In the stream type, each line is prefixed by a character giving the stream type for that line. The types are ``i`` for input, ``o`` for stdout, ``e`` for stderr, and ``h`` for header. The first three correspond to normal UNIX standard streams, while the header stream contains metadata produced by Buildbot itself. The ``offset`` and ``limit`` parameters can be used to select the desired lines. These are specified as query parameters via the REST interface, or as arguments to the :py:meth:`~buildbot.data.connector.DataConnector.get` method in Python. The result will begin with line ``offset`` (so the resulting ``firstline`` will be equal to the given ``offset``), and will contain up to ``limit`` lines. Following example will get the first 100 lines of a log:: from buildbot.data import resultspec first_100_lines = yield self.master.data.get(("logs", log['logid'], "contents"), resultSpec=resultspec.ResultSpec(limit=100)) Following example will get the last 100 lines of a log:: from buildbot.data import resultspec last_100_lines = yield self.master.data.get(("logs", log['logid'], "contents"), resultSpec=resultspec.ResultSpec(offset=log['num_lines']-100)) .. note:: There is no event for a new chunk. Instead, the log resource is updated when new chunks are added, with the new number of lines. Consumers can then request those lines, if desired. Update Methods -------------- Log chunks are updated via :bb:rtype:`log`. properties: content: description: content of the chunk type: string firstline: description: zero-based line number of the first line in this chunk type: integer logid: description: the ID of log containing this chunk type: integer type: object buildbot-3.4.0/master/buildbot/spec/types/master.raml000066400000000000000000000035241413250514000226660ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes buildmasters in the buildmaster cluster. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.masters.Master .. py:method:: masterActive(name, masterid) :param unicode name: the name of this master (generally ``hostname:basedir``) :param integer masterid: this master's master ID :returns: Deferred Mark this master as still active. This method should be called at startup and at least once per minute. The master ID is acquired directly from the database early in the master startup process. .. py:method:: expireMasters() :returns: Deferred Scan the database for masters that have not checked in for ten minutes. This method should be called about once per minute. .. py:method:: masterStopped(name, masterid) :param unicode name: the name of this master :param integer masterid: this master's master ID :returns: Deferred Mark this master as inactive. Masters should call this method before completing an expected shutdown, and on startup. This method will take care of deactivating or removing configuration resources like builders and schedulers as well as marking lost builds and build requests for retry. properties: active: description: true if the master is active type: boolean last_active: description: time this master was last marked active type: date masterid: description: the ID of this master type: integer name: description: master name (in the form "hostname:basedir") type: string type: object buildbot-3.4.0/master/buildbot/spec/types/patch.raml000066400000000000000000000016721413250514000224740ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes a patch. Patches have unique IDs, but only appear embedded in sourcestamps, so those IDs are not especially useful. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.patches.Patch (no update methods) properties: patchid: description: the unique ID of this patch type: integer body: description: patch body as a binary string type: string level: description: patch level - the number of directory names to strip from filenames in the patch type: integer subdir: description: subdirectory in which patch should be applied type: string author?: description: patch author, or None type: string comment?: description: patch comment, or None type: string buildbot-3.4.0/master/buildbot/spec/types/rootlink.raml000066400000000000000000000001441413250514000232270ustar00rootroot00000000000000#%RAML 1.0 DataType properties: name: description: '' type: string type: object buildbot-3.4.0/master/buildbot/spec/types/scheduler.raml000066400000000000000000000026231413250514000233500ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A scheduler initiates builds, often in response to changes from change sources. A particular scheduler (by name) runs on at most one master at a time. .. note:: This data type and associated endpoints is planned to be merged with forcescheduler data type when :bug:`2673` will be resolved. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.schedulers.Scheduler .. py:method:: findSchedulerId(name) :param string name: scheduler name :returns: scheduler ID via Deferred Get the ID for the given scheduler name, inventing one if necessary. .. py:method:: trySetSchedulerMaster(schedulerid, masterid) :param integer schedulerid: scheduler ID to try to claim :param integer masterid: this master's master ID :returns: ``True`` or ``False``, via Deferred Try to claim the given scheduler for the given master and return ``True`` if the scheduler is to be activated on that master. properties: master?: description: the master on which this scheduler is running, or None if it is inactive type: master name: description: name of this scheduler type: string schedulerid: description: the ID of this scheduler type: integer type: object buildbot-3.4.0/master/buildbot/spec/types/sourcedproperties.raml000066400000000000000000000034461413250514000251570ustar00rootroot00000000000000#%RAML 1.0 DataType description: | User-specified properties for this change, represented as an object mapping keys to (value, source) tuples Properties are present in several data resources, but have a separate endpoints, because they can represent a large dataset. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.properties.Properties .. py:method:: setBuildProperty(buildid, name, value, source) :param integer buildid: build ID :param unicode name: name of the property to set :param value: value of the property :type value: Any JSON-able type is accepted (lists, dicts, strings and numbers) :param unicode source: source of the property to set Set a build property. If no property with that name exists in that build, a new property will be created. .. py:method:: setBuildProperties(buildid, props) :param integer buildid: build ID :param IProperties props: name of the property to set Synchronize build properties with the db. This sends only one event in the end of the sync, and only if properties changed. The event contains only the updated properties, for network efficiency reasons. properties: '[]': description: | Each key of this map is the name of a defined property. The value consists of a (value, source) tuple properties: 1: type: string description: source of the property 2: type: integer | string | object | array | boolean description: value of the property type: object type: object buildbot-3.4.0/master/buildbot/spec/types/sourcestamp.raml000066400000000000000000000035131413250514000237360ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A sourcestamp represents a particular version of the source code. Absolute sourcestamps specify this completely, while relative sourcestamps (with revision = None) specify the latest source at the current time. Source stamps can also have patches; such stamps describe the underlying revision with the given patch applied. Note that depending on the underlying version-control system, the same revision may describe different code in different branches (e.g., SVN) or may be independent of the branch (e.g., Git). The ``created_at`` timestamp can be used to indicate the first time a sourcestamp was seen by Buildbot. This provides a reasonable default ordering for sourcestamps when more reliable information is not available. properties: ssid: description: | the ID of this sourcestamp .. note:: For legacy reasons, the abbreviated name ``ssid`` is used instead of canonical ``sourcestampid``. This might change in the future (:bug:`3509`). type: integer branch?: description: code branch, or none for the "default branch", whatever that might mean type: string codebase: description: revision for this sourcestamp, or none if unknown type: string created_at: description: the timestamp when this sourcestamp was created type: date patch?: description: the patch for this sourcestamp, or none type: patch project: description: user-defined project to which this sourcestamp corresponds type: string repository: description: repository where this sourcestamp occurred type: string revision?: description: revision for this sourcestamp, or none if unknown type: string type: object buildbot-3.4.0/master/buildbot/spec/types/spec.raml000066400000000000000000000004201413250514000223150ustar00rootroot00000000000000#%RAML 1.0 DataType properties: path: description: '' type: string plural: description: '' type: string type: description: '' type: string type_spec: description: '' type: object type: object buildbot-3.4.0/master/buildbot/spec/types/step.raml000066400000000000000000000063331413250514000223470ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes a step in a build. Steps have unique IDs, but are most commonly accessed by name in the context of their containing builds. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.steps.Step .. py:method:: newStep(buildid, name) :param integer buildid: buildid containing this step :param name: name for the step :type name: 50-character :ref:`identifier ` :returns: (stepid, number, name) via Deferred Create a new step and return its ID, number, and name. Note that the name may be different from the requested name if that name was already in use. The state strings for the new step will be set to 'pending'. .. py:method:: startStep(stepid) :param integer stepid: the step to modify Start the step. .. py:method:: setStepStateString(stepid, state_string) :param integer stepid: the step to modify :param unicode state_string: new state strings for this step Replace the existing state string for a step with a new list. .. py:method:: addStepURL(stepid, name, url): :param integer stepid: the step to modify :param string name: the url name :param string url: the actual url :returns: None via deferred Add a new url to a step. The new url is added to the list of urls. .. py:method:: finishStep(stepid, results, hidden) :param integer stepid: the step to modify :param integer results: the step's results :param boolean hidden: true if the step should not be displayed Mark the step as finished at the current time, with the given results. properties: stepid: description: the unique ID of this step type: integer buildid: description: ID of the build containing this step type: integer complete: description: true if this step is complete type: boolean complete_at?: description: time at which this step was complete, or None if it's still running type: date hidden: description: true if the step should not be displayed type: boolean name: description: the step name, unique within the build type: identifier number: description: the number of this step (sequential within the build) type: integer results?: description: the results of the step (see :ref:`Build-Result-Codes`), or None if not complete type: integer started_at?: description: time at which this step started, or None if it hasn't started yet type: date state_string: description: | a string giving detail on the state of the build. The first is usually one word or phrase; the remainder are sized for one-line display type: string urls[]: description: a list of URLs associated with this step properties: name: string url: string type: object buildbot-3.4.0/master/buildbot/spec/types/test_result.raml000066400000000000000000000054041413250514000237470ustar00rootroot00000000000000#%RAML 1.0 DataType displayName: test_result description: | This resource represents a test result. Test results that are produced by a single test run are grouped by a relation to a test result set. Single test result set may represent thousands of test results. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.test_result_sets.TestResult .. py:method:: addTestResults(builderid, test_result_setid, result_values) :param integer builderid: The ID of the builder corresponding to the test result set :param integer test_result_setid: The ID of the test result set for which to add results :param integer result_values: A list of dictionaries that define the test results Creates one or more new test results. This is a batch-based method as large number of test results are usually associated to a single test result set. The dictionaries in ``result_value`` may have the following keys: - ``value`` (required): A string containing the value of the test result - ``test_name`` (optional): A string containing the name of the test - ``test_code_path`` (optional): A string containing the path of the test - ``line`` (optional): An integer containing the line within the source file corresponding to the test - ``duration_ns`` (optional): An integer defining the duration of the test in nanoseconds At least one of ``test_name`` and ``test_code_path`` must be specified. The function returns nothing. properties: test_resultid: description: the unique ID of this test result type: integer builderid: description: id of the builder for this test result type: integer test_result_setid: description: id of the test result set that the test result belongs to type: integer test_name?: description: the name of the test, if any type: string test_code_path?: description: the code path associated to test, if any type: string line?: description: the number of the line in the code path that produced this result, if any type: string duration_ns?: description: the number of nanoseconds it took to perform the test, if available type: integer value: description: the value of the test type: string type: object example: test_resultid: 1042 builderid: 14 test_result_setid: 412 test_name: 'test.perf.buildbot.api.123' test_code_path: 'master/buildbot/spec/types/test_result.raml' duration_ns: 120410 line: 123 value: '31.1382' buildbot-3.4.0/master/buildbot/spec/types/test_result_set.raml000066400000000000000000000146341413250514000246270ustar00rootroot00000000000000#%RAML 1.0 DataType displayName: test_result_set description: | This resource represents a test result set. A test result set consists of a number of related test results. These test results need to be related in that they represent the same type of data and are produced by a single step. In reasonably tested codebases the number of test results in a test result set will approach several or even tens of thousands. There may be a long delay between the creation of the test result set and full creation of the corresponding test results. This is tracked by the ``complete`` property. If it's ``true``, then the full set of test results have been committed to the database. The ``test_result_unparsed_set`` object tracks test result sets that have not been parsed yet. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.test_result_sets.TestResultSet .. py:method:: addTestResultSet(builderid, buildid, stepid, description, category, value_unit) :param integer builderid: The ID of the builder for which the test result set is to be created :param integer buildid: The ID of the build for which the test result set is to be created :param integer stepid: The ID of the step for which the test result set is to be created :param description: Description of the test result set :param category: The category of the test result set :param value_unit: Defines the unit of the values stored in the test results Creates a new test result set. Returns the ID of the new test result set. .. py:method:: completeTestResultSet(test_result_setid, tests_passed=None, tests_failed=None): :param integer test_result_setid: The ID of the test result set to complete :param integer tests_passed: The number of passed tests, if known :param integer tests_failed: The number of failed tests, if known Marks a test result set as complete. The total number of passed and failed tests may be passed to have this information cached as part of a test result set so that expensive re-computations don't need to be performed. properties: test_result_setid: description: the unique ID of this test result set type: integer builderid: description: id of the builder for this test result set type: integer buildid: description: id of the build for this test result set type: integer stepid: description: id of the step for this test result set type: integer description: description: | Free-form description of the source of the test data type: string category: description: | The category of the test result set. This describes what data the test results contain. Any value is allowed. The following standard categories are defined: - ``pass_fail``: The test result set contains results that can indicate success or failure of specific test. The values of test results contain success or failure values. - ``pass_only``: The test result set contains results that can only indicate success of specific test. This is used in cases when failed tests are not reported. - ``fail_only``: The test result set contains results that can only indicate failure of specific test. This is used in tests when passed tests are not reported. - ``code_issue``: The test result set contains issues within the code reported by various tooling. This is effectively a subset of ``fail_only``. - ``performance``: The test result set contains performance results. The values of test results contain some kind of performance metric such as time per operation or the number of operations completed in a time period. - ``binary_size``: The test result set contains evaluation of binary size. The values of test results contain a binary size metric. - ``memory_use``: The test result set contains evaluation of dynamic memory use. The values of test results contain a memory use metric. type: string value_unit: description: | Describes the unit of the values stored within the test results. Any value is allowed. The following standard units are defined: - ``ps``: Picoseconds - ``ns``: Nanoseconds - ``us``: Microseconds - ``ms``: Milliseconds - ``s``: Seconds - ``boolean``: A boolean value (0 or 1) - ``B``: Bytes - ``KB``: Kilobytes (1000-based) - ``KiB``: Kibibytes (1024-based) - ``MB``: Megabytes (1000-based) - ``MiB``: Mebibytes (1024-based) - ``GB``: Gigabytes (1000-based) - ``GiB``: Gibibytes (1024-based) - ``TB``: Gigabytes (1000-based) - ``TiB``: Gibibytes (1024-based) - ``message``: Arbitrary string message Note that the value of the test result is always stored as string. type: string tests_passed?: description: | The number of passed tests in cases when the pass or fail criteria depends only on how that single test runs. For example, performance tests that track regressions across multiple tests do not have the number of passed tests defined. type: integer tests_failed?: description: | The number of failed tests in cases when the pass or fail criteria depends only on how that single test runs. For example, performance tests that track regressions across multiple tests do not have the number of failed tests defined. type: integer complete: description: | ``true`` if all test results associated with test result set have been generated. Once set to ``true`` this property will never be set back to ``false`` type: boolean type: object example: test_result_setid: 412 builderid: 14 buildid: 31 stepid: 3 description: "Performance test via BenchmarkDotNet" category: "performance" value_unit: "ms" complete: True buildbot-3.4.0/master/buildbot/spec/types/worker.raml000066400000000000000000000031161413250514000227010ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A worker resource represents a worker to the source code monitored by Buildbot. The contents of the ``connected_to`` and ``configured_on`` attributes are sensitive to the context of the request. If a builder or master is specified in the path, then only the corresponding connections and configurations are included in the result. properties: workerid: description: the ID of this worker type: integer configured_on[]: description: list of builders on masters this worker is configured on properties: builderid: integer masterid: integer connected_to[]: description: list of masters this worker is attached to properties: masterid: integer name: description: the name of the worker type: string paused: description: the worker is paused if it is connected but doesn't accept new builds type: bool graceful: description: the worker is graceful if it doesn't accept new builds, and will shutdown when builds are finished type: bool workerinfo: description: | information about the worker The worker information can be any JSON-able object. In practice, it contains the following keys, based on information provided by the worker: * ``admin`` (the admin information) * ``host`` (the name of the host) * ``access_uri`` (the access URI) * ``version`` (the version on the worker) type: object type: object buildbot-3.4.0/master/buildbot/statistics/000077500000000000000000000000001413250514000206065ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/statistics/__init__.py000066400000000000000000000035011413250514000227160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.statistics.capture import CaptureBuildDuration from buildbot.statistics.capture import CaptureBuildDurationAllBuilders from buildbot.statistics.capture import CaptureBuildEndTime from buildbot.statistics.capture import CaptureBuildEndTimeAllBuilders from buildbot.statistics.capture import CaptureBuildStartTime from buildbot.statistics.capture import CaptureBuildStartTimeAllBuilders from buildbot.statistics.capture import CaptureData from buildbot.statistics.capture import CaptureDataAllBuilders from buildbot.statistics.capture import CaptureProperty from buildbot.statistics.capture import CapturePropertyAllBuilders from buildbot.statistics.stats_service import StatsService from buildbot.statistics.storage_backends.influxdb_client import InfluxStorageService __all__ = [ 'CaptureBuildDuration', 'CaptureBuildDurationAllBuilders', 'CaptureBuildEndTime', 'CaptureBuildEndTimeAllBuilders', 'CaptureBuildStartTime', 'CaptureBuildStartTimeAllBuilders', 'CaptureData', 'CaptureDataAllBuilders', 'CaptureProperty', 'CapturePropertyAllBuilders', 'InfluxStorageService', 'StatsService' ] buildbot-3.4.0/master/buildbot/statistics/capture.py000066400000000000000000000306671413250514000226370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import abc import re from twisted.internet import defer from twisted.internet import threads from buildbot import config from buildbot.errors import CaptureCallbackError class Capture: """ Base class for all Capture* classes. """ __metaclass__ = abc.ABCMeta def __init__(self, routingKey, callback): self.routingKey = routingKey self._callback = callback # parent service and buildmaster to be set when StatsService # initialized self.parent_svcs = [] self.master = None def _defaultContext(self, msg, builder_name): return { "builder_name": builder_name, "build_number": str(msg['number']) } @abc.abstractmethod def consume(self, routingKey, msg): pass @defer.inlineCallbacks def _store(self, post_data, series_name, context): for svc in self.parent_svcs: yield threads.deferToThread(svc.thd_postStatsValue, post_data, series_name, context) class CapturePropertyBase(Capture): """ A base class for CaptureProperty* classes. """ def __init__(self, property_name, callback=None, regex=False): self._property_name = property_name self._regex = regex routingKey = ("builders", None, "builds", None, "finished") def default_callback(props, property_name): # index: 0 - prop_value, 1 - prop_source return props[property_name][0] if not callback: callback = default_callback super().__init__(routingKey, callback) @defer.inlineCallbacks def consume(self, routingKey, msg): """ Consumer for this (CaptureProperty) class. Gets the properties from data api and send them to the storage backends. """ builder_info = yield self.master.data.get(("builders", msg['builderid'])) if self._builder_name_matches(builder_info): properties = yield self.master.data.get(("builds", msg['buildid'], "properties")) if self._regex: filtered_prop_names = [ pn for pn in properties if re.match(self._property_name, pn)] else: filtered_prop_names = [self._property_name] for pn in filtered_prop_names: try: ret_val = self._callback(properties, pn) except KeyError as e: raise CaptureCallbackError(("CaptureProperty failed." " The property {} not found for build number {} on" " builder {}.").format(pn, msg['number'], builder_info['name'])) from e context = self._defaultContext(msg, builder_info['name']) series_name = "{}-{}".format(builder_info['name'], pn) post_data = { "name": pn, "value": ret_val } yield self._store(post_data, series_name, context) else: yield defer.succeed(None) @abc.abstractmethod def _builder_name_matches(self, builder_info): pass class CaptureProperty(CapturePropertyBase): """ Convenience wrapper for getting statistics for filtering. Filters out build properties specifies in the config file. """ def __init__(self, builder_name, property_name, callback=None, regex=False): self._builder_name = builder_name super().__init__(property_name, callback, regex) def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CapturePropertyAllBuilders(CapturePropertyBase): """ Capture class for filtering out build properties for all builds. """ def _builder_name_matches(self, builder_info): # Since we need to match all builders, we simply return True here. return True class CaptureBuildTimes(Capture): """ Capture methods for capturing build start times. """ def __init__(self, builder_name, callback, time_type): self._builder_name = builder_name routingKey = ("builders", None, "builds", None, "finished") self._time_type = time_type super().__init__(routingKey, callback) @defer.inlineCallbacks def consume(self, routingKey, msg): """ Consumer for CaptureBuildStartTime. Gets the build start time. """ builder_info = yield self.master.data.get(("builders", msg['builderid'])) if self._builder_name_matches(builder_info): try: ret_val = self._callback(*self._retValParams(msg)) except Exception as e: # catching generic exceptions is okay here since we propagate # it raise CaptureCallbackError("{} Exception raised: {} with message: {}".format( self._err_msg(msg, builder_info['name']), type(e).__name__, str(e))) from e context = self._defaultContext(msg, builder_info['name']) post_data = { self._time_type: ret_val } series_name = "{}-build-times".format(builder_info['name']) yield self._store(post_data, series_name, context) else: yield defer.succeed(None) def _err_msg(self, build_data, builder_name): msg = "{} failed on build {} on builder {}.".format(self.__class__.__name__, build_data['number'], builder_name) return msg @abc.abstractmethod def _retValParams(self, msg): pass @abc.abstractmethod def _builder_name_matches(self, builder_info): pass class CaptureBuildStartTime(CaptureBuildTimes): """ Capture methods for capturing build start times. """ def __init__(self, builder_name, callback=None): def default_callback(start_time): return start_time.isoformat() if not callback: callback = default_callback super().__init__(builder_name, callback, "start-time") def _retValParams(self, msg): return [msg['started_at']] def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CaptureBuildStartTimeAllBuilders(CaptureBuildStartTime): """ Capture methods for capturing build start times for all builders. """ def __init__(self, callback=None): super().__init__(None, callback) def _builder_name_matches(self, builder_info): # Match all builders so simply return True return True class CaptureBuildEndTime(CaptureBuildTimes): """ Capture methods for capturing build end times. """ def __init__(self, builder_name, callback=None): def default_callback(end_time): return end_time.isoformat() if not callback: callback = default_callback super().__init__(builder_name, callback, "end-time") def _retValParams(self, msg): return [msg['complete_at']] def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CaptureBuildEndTimeAllBuilders(CaptureBuildEndTime): """ Capture methods for capturing build end times on all builders. """ def __init__(self, callback=None): super().__init__(None, callback) def _builder_name_matches(self, builder_info): # Match all builders so simply return True return True class CaptureBuildDuration(CaptureBuildTimes): """ Capture methods for capturing build start times. """ def __init__(self, builder_name, report_in='seconds', callback=None): if report_in not in ['seconds', 'minutes', 'hours']: config.error(("Error during initialization of class {}." " `report_in` parameter must be one of 'seconds', 'minutes' or 'hours'" ).format(self.__class__.__name__)) def default_callback(start_time, end_time): divisor = 1 # it's a closure if report_in == 'minutes': divisor = 60 elif report_in == 'hours': divisor = 60 * 60 duration = end_time - start_time # cannot use duration.total_seconds() on Python 2.6 duration = ((duration.microseconds + (duration.seconds + duration.days * 24 * 3600) * 1e6) / 1e6) return duration / divisor if not callback: callback = default_callback super().__init__(builder_name, callback, "duration") def _retValParams(self, msg): return [msg['started_at'], msg['complete_at']] def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CaptureBuildDurationAllBuilders(CaptureBuildDuration): """ Capture methods for capturing build durations on all builders. """ def __init__(self, report_in='seconds', callback=None): super().__init__(None, report_in, callback) def _builder_name_matches(self, builder_info): # Match all builders so simply return True return True class CaptureDataBase(Capture): """ Base class for CaptureData methods. """ def __init__(self, data_name, callback=None): self._data_name = data_name def identity(x): return x if not callback: callback = identity # this is the routing key which is used to register consumers on to mq layer # this following key created in StatsService.yieldMetricsValue and used # here routingKey = ("stats-yieldMetricsValue", "stats-yield-data") super().__init__(routingKey, callback) @defer.inlineCallbacks def consume(self, routingKey, msg): """ Consumer for this (CaptureData) class. Gets the data sent from yieldMetricsValue and sends it to the storage backends. """ build_data = msg['build_data'] builder_info = yield self.master.data.get(("builders", build_data['builderid'])) if self._builder_name_matches(builder_info) and self._data_name == msg['data_name']: try: ret_val = self._callback(msg['post_data']) except Exception as e: raise CaptureCallbackError(("CaptureData failed for build {} of builder {}." " Exception generated: {} with message {}" ).format(build_data['number'], builder_info['name'], type(e).__name__, str(e))) from e post_data = ret_val series_name = '{}-{}'.format(builder_info['name'], self._data_name) context = self._defaultContext(build_data, builder_info['name']) yield self._store(post_data, series_name, context) @abc.abstractmethod def _builder_name_matches(self, builder_info): pass class CaptureData(CaptureDataBase): """ Capture methods for arbitrary data that may not be stored in the Buildbot database. """ def __init__(self, data_name, builder_name, callback=None): self._builder_name = builder_name super().__init__(data_name, callback) def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CaptureDataAllBuilders(CaptureDataBase): """ Capture methods for arbitrary data that may not be stored in the Buildbot database. """ def _builder_name_matches(self, builder_info): return True buildbot-3.4.0/master/buildbot/statistics/stats_service.py000066400000000000000000000066771413250514000240560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.statistics.storage_backends.base import StatsStorageBase from buildbot.util import service class StatsService(service.BuildbotService): """ A middleware for passing on statistics data to all storage backends. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.consumers = [] def checkConfig(self, storage_backends): for wfb in storage_backends: if not isinstance(wfb, StatsStorageBase): raise TypeError("Invalid type of stats storage service {0!r}. " "Should be of type StatsStorageBase, " "is: {0!r}".format(type(StatsStorageBase))) @defer.inlineCallbacks def reconfigService(self, storage_backends): log.msg( "Reconfiguring StatsService with config: {0!r}".format(storage_backends)) self.checkConfig(storage_backends) self.registeredStorageServices = [] for svc in storage_backends: self.registeredStorageServices.append(svc) yield self.removeConsumers() yield self.registerConsumers() @defer.inlineCallbacks def registerConsumers(self): self.consumers = [] for svc in self.registeredStorageServices: for cap in svc.captures: cap.parent_svcs.append(svc) cap.master = self.master consumer = yield self.master.mq.startConsuming(cap.consume, cap.routingKey) self.consumers.append(consumer) @defer.inlineCallbacks def stopService(self): yield super().stopService() yield self.removeConsumers() @defer.inlineCallbacks def removeConsumers(self): for consumer in self.consumers: yield consumer.stopConsuming() self.consumers = [] @defer.inlineCallbacks def yieldMetricsValue(self, data_name, post_data, buildid): """ A method to allow posting data that is not generated and stored as build-data in the database. This method generates the `stats-yield-data` event to the mq layer which is then consumed in self.postData. @params data_name: (str) The unique name for identifying this data. post_data: (dict) A dictionary of key-value pairs that'll be sent for storage. buildid: The buildid of the current Build. """ build_data = yield self.master.data.get(('builds', buildid)) routingKey = ("stats-yieldMetricsValue", "stats-yield-data") msg = { 'data_name': data_name, 'post_data': post_data, 'build_data': build_data } self.master.mq.produce(routingKey, msg) buildbot-3.4.0/master/buildbot/statistics/storage_backends/000077500000000000000000000000001413250514000241045ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/statistics/storage_backends/__init__.py000066400000000000000000000013011413250514000262100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members buildbot-3.4.0/master/buildbot/statistics/storage_backends/base.py000066400000000000000000000017451413250514000253770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import abc class StatsStorageBase: """ Base class for sub service responsible for passing on stats data to a storage backend """ __metaclass__ = abc.ABCMeta @abc.abstractmethod def thd_postStatsValue(self, post_data, series_name, context=None): pass buildbot-3.4.0/master/buildbot/statistics/storage_backends/influxdb_client.py000066400000000000000000000041351413250514000276320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from buildbot import config from buildbot.statistics.storage_backends.base import StatsStorageBase try: from influxdb import InfluxDBClient except ImportError: InfluxDBClient = None class InfluxStorageService(StatsStorageBase): """ Delegates data to InfluxDB """ def __init__(self, url, port, user, password, db, captures, name="InfluxStorageService"): if not InfluxDBClient: config.error("Python client for InfluxDB not installed.") return self.url = url self.port = port self.user = user self.password = password self.db = db self.name = name self.captures = captures self.client = InfluxDBClient(self.url, self.port, self.user, self.password, self.db) self._inited = True def thd_postStatsValue(self, post_data, series_name, context=None): if not self._inited: log.err("Service {0} not initialized".format(self.name)) return data = { 'measurement': series_name, 'fields': post_data } log.msg("Sending data to InfluxDB") log.msg("post_data: {0!r}".format(post_data)) if context: log.msg("context: {0!r}".format(context)) data['tags'] = context self.client.write_points([data]) buildbot-3.4.0/master/buildbot/steps/000077500000000000000000000000001413250514000175525ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/steps/__init__.py000066400000000000000000000000001413250514000216510ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/steps/cmake.py000066400000000000000000000051701413250514000212070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import config from buildbot.interfaces import IRenderable from buildbot.process.buildstep import BuildStep from buildbot.process.buildstep import ShellMixin class CMake(ShellMixin, BuildStep): DEFAULT_CMAKE = 'cmake' name = 'cmake' description = ['running', 'cmake'] descriptionDone = ['cmake'] renderables = ( 'cmake', 'definitions', 'generator', 'options', 'path' ) haltOnFailure = True def __init__(self, path=None, generator=None, definitions=None, options=None, cmake=DEFAULT_CMAKE, **kwargs): self.path = path self.generator = generator if not (definitions is None or isinstance(definitions, dict) or IRenderable.providedBy(definitions)): config.error('definitions must be a dictionary or implement IRenderable') self.definitions = definitions if not (options is None or isinstance(options, (list, tuple)) or IRenderable.providedBy(options)): config.error('options must be a list, a tuple or implement IRenderable') self.options = options self.cmake = cmake kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) @defer.inlineCallbacks def run(self): """ run CMake """ command = [self.cmake] if self.generator: command.extend([ '-G', self.generator ]) if self.path: command.append(self.path) if self.definitions is not None: for item in self.definitions.items(): command.append('-D%s=%s' % item) if self.options is not None: command.extend(self.options) cmd = yield self.makeRemoteShellCommand(command=command) yield self.runCommand(cmd) return cmd.results() buildbot-3.4.0/master/buildbot/steps/cppcheck.py000066400000000000000000000073761413250514000217210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from buildbot.process import logobserver from buildbot.process.buildstep import BuildStep from buildbot.process.buildstep import ShellMixin from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS class Cppcheck(ShellMixin, BuildStep): # Highly inspired from the Pylint step. name = "cppcheck" description = ["running", "cppcheck"] descriptionDone = ["cppcheck"] flunkingIssues = ('error',) MESSAGES = ( 'error', 'warning', 'style', 'performance', 'portability', 'information') renderables = ('binary', 'source', 'extra_args') def __init__(self, *args, **kwargs): for name, default in [('binary', 'cppcheck'), ('source', ['.']), ('enable', []), ('inconclusive', False), ('extra_args', [])]: setattr(self, name, kwargs.pop(name, default)) kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(*args, **kwargs) self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self._log_consumer)) self.counts = {} summaries = self.summaries = {} for m in self.MESSAGES: self.counts[m] = 0 summaries[m] = [] def _log_consumer(self): line_re = re.compile( r'(?:\[.+\]: )?\((?P{})\) .+'.format('|'.join(self.MESSAGES))) while True: stream, line = yield m = line_re.match(line) if m is not None: msgsev = m.group('severity') self.summaries[msgsev].append(line) self.counts[msgsev] += 1 @defer.inlineCallbacks def run(self): command = [self.binary] command.extend(self.source) if self.enable: command.append('--enable={}'.format(','.join(self.enable))) if self.inconclusive: command.append('--inconclusive') command.extend(self.extra_args) cmd = yield self.makeRemoteShellCommand(command=command) yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() self.descriptionDone = self.descriptionDone[:] for msg in self.MESSAGES: self.setProperty('cppcheck-{}'.format(msg), self.counts[msg], 'Cppcheck') if not self.counts[msg]: continue self.descriptionDone.append("{}={}".format(msg, self.counts[msg])) yield self.addCompleteLog(msg, '\n'.join(self.summaries[msg])) self.setProperty('cppcheck-total', sum(self.counts.values()), 'Cppcheck') yield self.updateSummary() if cmd.results() != SUCCESS: return cmd.results() for msg in self.flunkingIssues: if self.counts[msg] != 0: return FAILURE if sum(self.counts.values()) > 0: return WARNINGS return SUCCESS buildbot-3.4.0/master/buildbot/steps/download_secret_to_worker.py000066400000000000000000000053271413250514000254020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import stat from twisted.internet import defer from buildbot.process.buildstep import FAILURE from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep from buildbot.process.results import worst_status from buildbot.steps.worker import CompositeStepMixin class DownloadSecretsToWorker(BuildStep, CompositeStepMixin): renderables = ['secret_to_be_populated'] def __init__(self, populated_secret_list, **kwargs): super().__init__(**kwargs) self.secret_to_be_populated = populated_secret_list @defer.inlineCallbacks def runPopulateSecrets(self): result = SUCCESS for path, secretvalue in self.secret_to_be_populated: if not isinstance(path, str): raise ValueError("Secret path {} is not a string".format(path)) self.secret_to_be_interpolated = secretvalue res = yield self.downloadFileContentToWorker(path, self.secret_to_be_interpolated, mode=stat.S_IRUSR | stat.S_IWUSR) result = worst_status(result, res) return result @defer.inlineCallbacks def run(self): res = yield self.runPopulateSecrets() return res class RemoveWorkerFileSecret(BuildStep, CompositeStepMixin): renderables = ['secret_to_be_populated'] def __init__(self, populated_secret_list, logEnviron=False, **kwargs): super().__init__(**kwargs) self.logEnviron = logEnviron self.secret_to_be_populated = populated_secret_list @defer.inlineCallbacks def runRemoveWorkerFileSecret(self): all_results = [] for path, _ in self.secret_to_be_populated: res = yield self.runRmFile(path, abandonOnFailure=False) all_results.append(res) if FAILURE in all_results: result = FAILURE else: result = SUCCESS return result @defer.inlineCallbacks def run(self): res = yield self.runRemoveWorkerFileSecret() return res buildbot-3.4.0/master/buildbot/steps/gitdiffinfo.py000066400000000000000000000070431413250514000224200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from buildbot import config from buildbot.process import buildstep from buildbot.process import logobserver from buildbot.process import results class GitDiffInfo(buildstep.ShellMixin, buildstep.BuildStep): name = 'GitDiffInfo' description = 'running GitDiffInfo' descriptionDone = 'GitDiffInfo' def __init__(self, compareToRef='master', dataName='diffinfo-master', **kwargs): try: from unidiff import PatchSet [PatchSet] # silence pylint except ImportError: config.error('unidiff package must be installed in order to use GitDiffInfo') kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) self._compare_to_ref = compareToRef self._data_name = dataName self._observer = logobserver.BufferLogObserver() def _convert_hunk(self, hunk): # TODO: build an intermediate class that would handle serialization. We want to output # as few data as possible, even if the json is not human-readable return { 'ss': hunk.source_start, 'sl': hunk.source_length, 'ts': hunk.target_start, 'tl': hunk.target_length, } def _convert_file(self, file): return { 'source_file': file.source_file, 'target_file': file.target_file, 'is_binary': file.is_binary_file, 'is_rename': file.is_rename, 'hunks': [self._convert_hunk(hunk) for hunk in file] } def _convert_patchset(self, patchset): return [self._convert_file(file) for file in patchset] @defer.inlineCallbacks def run(self): command = ['git', 'merge-base', 'HEAD', self._compare_to_ref] cmd = yield self.makeRemoteShellCommand(command=command, stdioLogName='stdio-merge-base', collectStdout=True) yield self.runCommand(cmd) log = yield self.getLog("stdio-merge-base") log.finish() if cmd.results() != results.SUCCESS: return cmd.results() commit = cmd.stdout.strip() self.setProperty('diffinfo-merge-base-commit', commit, 'GitDiffInfo') self.addLogObserver('stdio-diff', self._observer) command = ['git', 'diff', '--no-prefix', '-U0', commit, 'HEAD'] cmd = yield self.makeRemoteShellCommand(command=command, stdioLogName='stdio-diff') yield self.runCommand(cmd) if cmd.results() != results.SUCCESS: return cmd.results() from unidiff import PatchSet patchset = PatchSet(self._observer.getStdout(), metadata_only=True) data = json.dumps(self._convert_patchset(patchset)).encode('utf-8') yield self.setBuildData(self._data_name, data, 'GitDiffInfo') return cmd.results() buildbot-3.4.0/master/buildbot/steps/http.py000066400000000000000000000151701413250514000211070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from buildbot import config from buildbot.process.buildstep import FAILURE from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep # use the 'requests' lib: https://requests.readthedocs.io/en/master/ try: import txrequests import requests except ImportError: txrequests = None # This step uses a global Session object, which encapsulates a thread pool as # well as state such as cookies and authentication. This state may pose # problems for users, where one step may get a cookie that is subsequently used # by another step in a different build. _session = None def getSession(): global _session if _session is None: _session = txrequests.Session() reactor.addSystemEventTrigger("before", "shutdown", closeSession) return _session def setSession(session): global _session _session = session def closeSession(): global _session if _session is not None: _session.close() _session = None def _headerSet(headers): return frozenset(map(lambda x: x.casefold(), headers)) class HTTPStep(BuildStep): name = 'HTTPStep' description = 'Requesting' descriptionDone = 'Requested' requestsParams = ["params", "data", "json", "headers", "cookies", "files", "auth", "timeout", "allow_redirects", "proxies", "hooks", "stream", "verify", "cert"] renderables = requestsParams + ["method", "url"] session = None def __init__(self, url, method, hide_request_headers=None, hide_response_headers=None, **kwargs): if txrequests is None: config.error( "Need to install txrequest to use this step:\n\n pip install txrequests") if method not in ('POST', 'GET', 'PUT', 'DELETE', 'HEAD', 'OPTIONS'): config.error("Wrong method given: '{}' is not known".format(method)) self.method = method self.url = url self.hide_request_headers = _headerSet(hide_request_headers or []) self.hide_response_headers = _headerSet(hide_response_headers or []) for param in self.requestsParams: setattr(self, param, kwargs.pop(param, None)) super().__init__(**kwargs) @defer.inlineCallbacks def run(self): # create a new session if it doesn't exist self.session = getSession() requestkwargs = { 'method': self.method, 'url': self.url } for param in self.requestsParams: value = getattr(self, param, None) if value is not None: requestkwargs[param] = value log = yield self.addLog('log') # known methods already tested in __init__ yield log.addHeader('Performing {} request to {}\n'.format(self.method, self.url)) if self.params: yield log.addHeader('Parameters:\n') params = sorted(self.params.items(), key=lambda x: x[0]) requestkwargs['params'] = params for k, v in params: yield log.addHeader('\t{}: {}\n'.format(k, v)) data = requestkwargs.get("data", None) if data: yield log.addHeader('Data:\n') if isinstance(data, dict): for k, v in data.items(): yield log.addHeader('\t{}: {}\n'.format(k, v)) else: yield log.addHeader('\t{}\n'.format(data)) try: r = yield self.session.request(**requestkwargs) except requests.exceptions.ConnectionError as e: yield log.addStderr('An exception occurred while performing the request: {}'.format(e)) return FAILURE if r.history: yield log.addStdout('\nRedirected %d times:\n\n' % len(r.history)) for rr in r.history: yield self.log_response(log, rr) yield log.addStdout('=' * 60 + '\n') yield self.log_response(log, r) yield log.finish() self.descriptionDone = ["Status code: %d" % r.status_code] if (r.status_code < 400): return SUCCESS else: return FAILURE @defer.inlineCallbacks def log_response(self, log, response): yield log.addHeader('Request Headers:\n') for k, v in response.request.headers.items(): if k.casefold() in self.hide_request_headers: v = '' yield log.addHeader('\t{}: {}\n'.format(k, v)) yield log.addStdout('URL: {}\n'.format(response.url)) if response.status_code == requests.codes.ok: yield log.addStdout('Status: {}\n'.format(response.status_code)) else: yield log.addStderr('Status: {}\n'.format(response.status_code)) yield log.addHeader('Response Headers:\n') for k, v in response.headers.items(): if k.casefold() in self.hide_response_headers: v = '' yield log.addHeader('\t{}: {}\n'.format(k, v)) yield log.addStdout(' ------ Content ------\n{}'.format(response.text)) content_log = yield self.addLog('content') yield content_log.addStdout(response.text) class POST(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='POST', **kwargs) class GET(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='GET', **kwargs) class PUT(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='PUT', **kwargs) class DELETE(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='DELETE', **kwargs) class HEAD(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='HEAD', **kwargs) class OPTIONS(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='OPTIONS', **kwargs) buildbot-3.4.0/master/buildbot/steps/master.py000066400000000000000000000207301413250514000214210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import pprint import re from twisted.internet import defer from twisted.internet import error from twisted.internet import reactor from twisted.internet.protocol import ProcessProtocol from twisted.python import runtime from buildbot.process.buildstep import FAILURE from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep from buildbot.util import deferwaiter class MasterShellCommand(BuildStep): """ Run a shell command locally - on the buildmaster. The shell command COMMAND is specified just as for a RemoteShellCommand. Note that extra logfiles are not supported. """ name = 'MasterShellCommand' description = 'Running' descriptionDone = 'Ran' descriptionSuffix = None renderables = ['command', 'env'] haltOnFailure = True flunkOnFailure = True def __init__(self, command, **kwargs): self.env = kwargs.pop('env', None) self.usePTY = kwargs.pop('usePTY', 0) self.interruptSignal = kwargs.pop('interruptSignal', 'KILL') self.logEnviron = kwargs.pop('logEnviron', True) super().__init__(**kwargs) self.command = command self.masterWorkdir = self.workdir self._deferwaiter = deferwaiter.DeferWaiter() self._status_object = None class LocalPP(ProcessProtocol): def __init__(self, step): self.step = step self._finish_d = defer.Deferred() self.step._deferwaiter.add(self._finish_d) def outReceived(self, data): self.step._deferwaiter.add(self.step.stdio_log.addStdout(data)) def errReceived(self, data): self.step._deferwaiter.add(self.step.stdio_log.addStderr(data)) def processEnded(self, status_object): if status_object.value.exitCode is not None: msg = "exit status {}\n".format(status_object.value.exitCode) self.step._deferwaiter.add(self.step.stdio_log.addHeader(msg)) if status_object.value.signal is not None: msg = "signal {}\n".format(status_object.value.signal) self.step._deferwaiter.add(self.step.stdio_log.addHeader(msg)) self.step._status_object = status_object self._finish_d.callback(None) @defer.inlineCallbacks def run(self): # render properties command = self.command # set up argv if isinstance(command, (str, bytes)): if runtime.platformType == 'win32': # allow %COMSPEC% to have args argv = os.environ['COMSPEC'].split() if '/c' not in argv: argv += ['/c'] argv += [command] else: # for posix, use /bin/sh. for other non-posix, well, doesn't # hurt to try argv = ['/bin/sh', '-c', command] else: if runtime.platformType == 'win32': # allow %COMSPEC% to have args argv = os.environ['COMSPEC'].split() if '/c' not in argv: argv += ['/c'] argv += list(command) else: argv = command self.stdio_log = yield self.addLog("stdio") if isinstance(command, (str, bytes)): yield self.stdio_log.addHeader(command.strip() + "\n\n") else: yield self.stdio_log.addHeader(" ".join(command) + "\n\n") yield self.stdio_log.addHeader("** RUNNING ON BUILDMASTER **\n") yield self.stdio_log.addHeader(" in dir {}\n".format(os.getcwd())) yield self.stdio_log.addHeader(" argv: {}\n".format(argv)) if self.env is None: env = os.environ else: assert isinstance(self.env, dict) env = self.env for key, v in self.env.items(): if isinstance(v, list): # Need to do os.pathsep translation. We could either do that # by replacing all incoming ':'s with os.pathsep, or by # accepting lists. I like lists better. # If it's not a string, treat it as a sequence to be # turned in to a string. self.env[key] = os.pathsep.join(self.env[key]) # do substitution on variable values matching pattern: ${name} p = re.compile(r'\${([0-9a-zA-Z_]*)}') def subst(match): return os.environ.get(match.group(1), "") newenv = {} for key, v in env.items(): if v is not None: if not isinstance(v, (str, bytes)): raise RuntimeError(("'env' values must be strings or " "lists; key '{}' is incorrect").format(key)) newenv[key] = p.sub(subst, env[key]) env = newenv if self.logEnviron: yield self.stdio_log.addHeader(" env: %r\n" % (env,)) # TODO add a timeout? self.process = reactor.spawnProcess(self.LocalPP(self), argv[0], argv, path=self.masterWorkdir, usePTY=self.usePTY, env=env) # self._deferwaiter will yield only after LocalPP finishes yield self._deferwaiter.wait() status_value = self._status_object.value if status_value.signal is not None: self.descriptionDone = ["killed ({})".format(status_value.signal)] return FAILURE elif status_value.exitCode != 0: self.descriptionDone = ["failed ({})".format(status_value.exitCode)] return FAILURE else: return SUCCESS def interrupt(self, reason): try: self.process.signalProcess(self.interruptSignal) except KeyError: # Process not started yet pass except error.ProcessExitedAlready: pass super().interrupt(reason) class SetProperty(BuildStep): name = 'SetProperty' description = ['Setting'] descriptionDone = ['Set'] renderables = ['property', 'value'] def __init__(self, property, value, **kwargs): super().__init__(**kwargs) self.property = property self.value = value def run(self): properties = self.build.getProperties() properties.setProperty( self.property, self.value, self.name, runtime=True) return defer.succeed(SUCCESS) class SetProperties(BuildStep): name = 'SetProperties' description = ['Setting Properties..'] descriptionDone = ['Properties Set'] renderables = ['properties'] def __init__(self, properties=None, **kwargs): super().__init__(**kwargs) self.properties = properties def run(self): if self.properties is None: return defer.succeed(SUCCESS) for k, v in self.properties.items(): self.setProperty(k, v, self.name, runtime=True) return defer.succeed(SUCCESS) class Assert(BuildStep): name = 'Assert' description = ['Checking..'] descriptionDone = ["checked"] renderables = ['check'] def __init__(self, check, **kwargs): super().__init__(**kwargs) self.check = check self.descriptionDone = ["checked {}".format(repr(self.check))] def run(self): if self.check: return defer.succeed(SUCCESS) return defer.succeed(FAILURE) class LogRenderable(BuildStep): name = 'LogRenderable' description = ['Logging'] descriptionDone = ['Logged'] renderables = ['content'] def __init__(self, content, **kwargs): super().__init__(**kwargs) self.content = content @defer.inlineCallbacks def run(self): content = pprint.pformat(self.content) yield self.addCompleteLog(name='Output', text=content) return SUCCESS buildbot-3.4.0/master/buildbot/steps/maxq.py000066400000000000000000000045601413250514000210770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import config from buildbot.process import buildstep from buildbot.process import logobserver from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS class MaxQObserver(logobserver.LogLineObserver): def __init__(self): super().__init__() self.failures = 0 def outLineReceived(self, line): if line.startswith('TEST FAILURE:'): self.failures += 1 class MaxQ(buildstep.ShellMixin, buildstep.BuildStep): flunkOnFailure = True name = "maxq" binary = 'run_maxq.py' failures = 0 def __init__(self, testdir=None, **kwargs): if not testdir: config.error("please pass testdir") self.testdir = testdir kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) self.observer = MaxQObserver() self.addLogObserver('stdio', self.observer) @defer.inlineCallbacks def run(self): command = [self.binary] command.append(self.testdir) cmd = yield self.makeRemoteShellCommand(command=command) yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() self.failures = self.observer.failures # treat a nonzero exit status as a failure, if no other failures are # detected if not self.failures and cmd.didFail(): self.failures = 1 if self.failures: return FAILURE return SUCCESS def getResultSummary(self): if self.failures: return {'step': "{} maxq failures".format(self.failures)} return {'step': 'success'} buildbot-3.4.0/master/buildbot/steps/mswin.py000066400000000000000000000071351413250514000212670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.process.buildstep import BuildStep from buildbot.process.buildstep import ShellMixin from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS class Robocopy(ShellMixin, BuildStep): """ Robocopy build step. This is just a wrapper around the standard shell command that will handle arguments and return codes accordingly for Robocopy. """ renderables = [ 'custom_opts', 'destination', 'exclude_dirs', 'exclude_files', 'files', 'source' ] # Robocopy exit flags (they are combined to make up the exit code) # See: http://ss64.com/nt/robocopy-exit.html return_flags = { FAILURE: [8, 16], WARNINGS: [2, 4], SUCCESS: [0, 1] } def __init__(self, source, destination, exclude=None, exclude_files=None, **kwargs): self.source = source self.destination = destination self.files = kwargs.pop('files', None) self.recursive = kwargs.pop('recursive', False) self.mirror = kwargs.pop('mirror', False) self.move = kwargs.pop('move', False) self.exclude_files = exclude_files if exclude and not exclude_files: self.exclude_files = exclude self.exclude_dirs = kwargs.pop('exclude_dirs', None) self.custom_opts = kwargs.pop('custom_opts', None) self.verbose = kwargs.pop('verbose', False) super().__init__(**kwargs) @defer.inlineCallbacks def run(self): command = ['robocopy', self.source, self.destination] if self.files: command += self.files if self.recursive: command.append('/E') if self.mirror: command.append('/MIR') if self.move: command.append('/MOVE') if self.exclude_files: command.append('/XF') command += self.exclude_files if self.exclude_dirs: command.append('/XD') command += self.exclude_dirs if self.verbose: command += ['/V', '/TS', '/FP'] if self.custom_opts: command += self.custom_opts command += ['/TEE', '/NP'] cmd = yield self.makeRemoteShellCommand(command=command) yield self.runCommand(cmd) # If we have a "clean" return code, it's good. # Otherwise, look for errors first, warnings second. if cmd.rc == 0 or cmd.rc == 1: return SUCCESS for result in [FAILURE, WARNINGS]: for flag in self.return_flags[result]: if (cmd.rc & flag) == flag: return result log.msg("Unknown return code for Robocopy: {}".format(cmd.rc)) return EXCEPTION buildbot-3.4.0/master/buildbot/steps/package/000077500000000000000000000000001413250514000211455ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/steps/package/__init__.py000066400000000000000000000014771413250514000232670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Steve 'Ashcrow' Milner """ Steps specific to package formats. """ buildbot-3.4.0/master/buildbot/steps/package/deb/000077500000000000000000000000001413250514000216775ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/steps/package/deb/__init__.py000066400000000000000000000000001413250514000237760ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/steps/package/deb/lintian.py000066400000000000000000000055051413250514000237140ustar00rootroot00000000000000# This program is free software; you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Marius Rieder """ Steps and objects related to lintian """ from twisted.internet import defer from buildbot import config from buildbot.process import buildstep from buildbot.process import logobserver from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps.package import util as pkgutil class MaxQObserver(logobserver.LogLineObserver): def __init__(self): super().__init__() self.failures = 0 def outLineReceived(self, line): if line.startswith('TEST FAILURE:'): self.failures += 1 class DebLintian(buildstep.ShellMixin, buildstep.BuildStep): name = "lintian" description = "Lintian running" descriptionDone = "Lintian" fileloc = None suppressTags = [] flunkOnFailure = False warnOnFailure = True def __init__(self, fileloc=None, suppressTags=None, **kwargs): kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) if fileloc: self.fileloc = fileloc if suppressTags: self.suppressTags = suppressTags if not self.fileloc: config.error("You must specify a fileloc") self.command = ["lintian", "-v", self.fileloc] if self.suppressTags: for tag in self.suppressTags: self.command += ['--suppress-tags', tag] self.obs = pkgutil.WEObserver() self.addLogObserver('stdio', self.obs) @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() warnings = self.obs.warnings errors = self.obs.errors if warnings: yield self.addCompleteLog('%d Warnings' % len(warnings), "\n".join(warnings)) if errors: yield self.addCompleteLog('%d Errors' % len(errors), "\n".join(errors)) if cmd.rc != 0 or errors: return FAILURE if warnings: return WARNINGS return SUCCESS buildbot-3.4.0/master/buildbot/steps/package/deb/pbuilder.py000066400000000000000000000166021413250514000240640ustar00rootroot00000000000000# This program is free software; you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Marius Rieder """ Steps and objects related to pbuilder """ import re import stat import time from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.process import logobserver from buildbot.process import remotecommand from buildbot.process import results from buildbot.steps.shell import WarningCountingShellCommand class DebPbuilder(WarningCountingShellCommand): """Build a debian package with pbuilder inside of a chroot.""" name = "pbuilder" haltOnFailure = 1 flunkOnFailure = 1 description = ["building"] descriptionDone = ["built"] warningPattern = r".*(warning[: ]|\sW: ).*" architecture = None distribution = 'stable' basetgz = None _default_basetgz = "/var/cache/pbuilder/{distribution}-{architecture}-buildbot.tgz" mirror = "http://cdn.debian.net/debian/" othermirror = "" extrapackages = [] keyring = None components = None maxAge = 60 * 60 * 24 * 7 pbuilder = '/usr/sbin/pbuilder' baseOption = '--basetgz' renderables = ['architecture', 'distribution', 'basetgz', 'mirror', 'othermirror', 'extrapackages', 'keyring', 'components'] def __init__(self, architecture=None, distribution=None, basetgz=None, mirror=None, othermirror=None, extrapackages=None, keyring=None, components=None, **kwargs): super().__init__(**kwargs) if architecture: self.architecture = architecture if distribution: self.distribution = distribution if mirror: self.mirror = mirror if othermirror: self.othermirror = "|".join(othermirror) if extrapackages: self.extrapackages = extrapackages if keyring: self.keyring = keyring if components: self.components = components if basetgz: self.basetgz = basetgz if not self.distribution: config.error("You must specify a distribution.") self.suppressions.append( (None, re.compile(r"\.pbuilderrc does not exist"), None, None)) self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) @defer.inlineCallbacks def run(self): if self.basetgz is None: self.basetgz = self._default_basetgz kwargs = {} if self.architecture: kwargs['architecture'] = self.architecture else: kwargs['architecture'] = 'local' kwargs['distribution'] = self.distribution self.basetgz = self.basetgz.format(**kwargs) self.command = ['pdebuild', '--buildresult', '.', '--pbuilder', self.pbuilder] if self.architecture: self.command += ['--architecture', self.architecture] self.command += ['--', '--buildresult', '.', self.baseOption, self.basetgz] if self.extrapackages: self.command += ['--extrapackages', " ".join(self.extrapackages)] res = yield self.checkBasetgz() if res != results.SUCCESS: return res res = yield super().run() return res @defer.inlineCallbacks def checkBasetgz(self): cmd = remotecommand.RemoteCommand('stat', {'file': self.basetgz}) yield self.runCommand(cmd) if cmd.rc != 0: log.msg("basetgz not found, initializing it.") command = ['sudo', self.pbuilder, '--create', self.baseOption, self.basetgz, '--distribution', self.distribution, '--mirror', self.mirror] if self.othermirror: command += ['--othermirror', self.othermirror] if self.architecture: command += ['--architecture', self.architecture] if self.extrapackages: command += ['--extrapackages', " ".join(self.extrapackages)] if self.keyring: command += ['--debootstrapopts', "--keyring={}".format(self.keyring)] if self.components: command += ['--components', self.components] cmd = remotecommand.RemoteShellCommand(self.workdir, command) stdio_log = yield self.addLog("pbuilder") cmd.useLog(stdio_log, True, "stdio") self.description = ["PBuilder", "create."] yield self.updateSummary() yield self.runCommand(cmd) if cmd.rc != 0: log.msg("Failure when running {}.".format(cmd)) return results.FAILURE return results.SUCCESS s = cmd.updates["stat"][-1] # basetgz will be a file when running in pbuilder # and a directory in case of cowbuilder if stat.S_ISREG(s[stat.ST_MODE]) or stat.S_ISDIR(s[stat.ST_MODE]): log.msg("{} found.".format(self.basetgz)) age = time.time() - s[stat.ST_MTIME] if age >= self.maxAge: log.msg("basetgz outdated, updating") command = ['sudo', self.pbuilder, '--update', self.baseOption, self.basetgz] cmd = remotecommand.RemoteShellCommand(self.workdir, command) stdio_log = yield self.addLog("pbuilder") cmd.useLog(stdio_log, True, "stdio") yield self.runCommand(cmd) if cmd.rc != 0: log.msg("Failure when running {}.".format(cmd)) return results.FAILURE return results.SUCCESS log.msg("{} is not a file or a directory.".format(self.basetgz)) return results.FAILURE def logConsumer(self): r = re.compile(r"dpkg-genchanges >\.\./(.+\.changes)") while True: stream, line = yield mo = r.search(line) if mo: self.setProperty("deb-changes", mo.group(1), "DebPbuilder") class DebCowbuilder(DebPbuilder): """Build a debian package with cowbuilder inside of a chroot.""" name = "cowbuilder" _default_basetgz = "/var/cache/pbuilder/{distribution}-{architecture}-buildbot.cow/" pbuilder = '/usr/sbin/cowbuilder' baseOption = '--basepath' class UbuPbuilder(DebPbuilder): """Build a Ubuntu package with pbuilder inside of a chroot.""" distribution = None mirror = "http://archive.ubuntu.com/ubuntu/" components = "main universe" class UbuCowbuilder(DebCowbuilder): """Build a Ubuntu package with cowbuilder inside of a chroot.""" distribution = None mirror = "http://archive.ubuntu.com/ubuntu/" components = "main universe" buildbot-3.4.0/master/buildbot/steps/package/rpm/000077500000000000000000000000001413250514000217435ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/steps/package/rpm/__init__.py000066400000000000000000000021441413250514000240550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Steve 'Ashcrow' Milner """ Steps specific to the rpm format. """ from buildbot.steps.package.rpm.mock import MockBuildSRPM from buildbot.steps.package.rpm.mock import MockRebuild from buildbot.steps.package.rpm.rpmbuild import RpmBuild from buildbot.steps.package.rpm.rpmlint import RpmLint __all__ = ['RpmBuild', 'RpmLint', 'MockBuildSRPM', 'MockRebuild'] buildbot-3.4.0/master/buildbot/steps/package/rpm/mock.py000066400000000000000000000120251413250514000232460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Marius Rieder import re from twisted.internet import defer from buildbot import config from buildbot.process import buildstep from buildbot.process import logobserver class MockStateObserver(logobserver.LogLineObserver): _line_re = re.compile(r'^.*State Changed: (.*)$') def outLineReceived(self, line): m = self._line_re.search(line.strip()) if m: state = m.group(1) if not state == 'end': self.step.descriptionSuffix = ["[{}]".format(m.group(1))] else: self.step.descriptionSuffix = None class Mock(buildstep.ShellMixin, buildstep.CommandMixin, buildstep.BuildStep): """Add the mock logfiles and clean them if they already exist. Add support for the root and resultdir parameter of mock.""" name = "mock" renderables = ["root", "resultdir"] haltOnFailure = 1 flunkOnFailure = 1 mock_logfiles = ['build.log', 'root.log', 'state.log'] root = None resultdir = None def __init__(self, root=None, resultdir=None, **kwargs): kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) if root: self.root = root if resultdir: self.resultdir = resultdir if not self.root: config.error("You must specify a mock root") self.command = ['mock', '--root', self.root] if self.resultdir: self.command += ['--resultdir', self.resultdir] @defer.inlineCallbacks def run(self): # Try to remove the old mock logs first. if self.resultdir: for lname in self.mock_logfiles: self.logfiles[lname] = self.build.path_module.join(self.resultdir, lname) else: for lname in self.mock_logfiles: self.logfiles[lname] = lname self.addLogObserver('state.log', MockStateObserver()) yield self.runRmdir([self.build.path_module.join('build', self.logfiles[l]) for l in self.mock_logfiles]) cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) return cmd.results() class MockBuildSRPM(Mock): """Build a srpm within a mock. Requires a spec file and a sources dir.""" name = "mockbuildsrpm" description = ["mock buildsrpm"] descriptionDone = ["mock buildsrpm"] spec = None sources = '.' def __init__(self, spec=None, sources=None, **kwargs): """ Creates the MockBuildSRPM object. @type spec: str @param spec: the path of the specfiles. @type sources: str @param sources: the path of the sources dir. @type kwargs: dict @param kwargs: All further keyword arguments. """ super().__init__(**kwargs) if spec: self.spec = spec if sources: self.sources = sources if not self.spec: config.error("You must specify a spec file") if not self.sources: config.error("You must specify a sources dir") self.command += ['--buildsrpm', '--spec', self.spec, '--sources', self.sources] self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) def logConsumer(self): r = re.compile(r"Wrote: .*/([^/]*.src.rpm)") while True: stream, line = yield m = r.search(line) if m: self.setProperty("srpm", m.group(1), 'MockBuildSRPM') class MockRebuild(Mock): """Rebuild a srpm within a mock. Requires a srpm file.""" name = "mock" description = ["mock rebuilding srpm"] descriptionDone = ["mock rebuild srpm"] srpm = None def __init__(self, srpm=None, **kwargs): """ Creates the MockRebuildRPM object. @type srpm: str @param srpm: the path of the srpm file. @type kwargs: dict @param kwargs: All further keyword arguments. """ super().__init__(**kwargs) if srpm: self.srpm = srpm if not self.srpm: config.error("You must specify a srpm") self.command += ['--rebuild', self.srpm] buildbot-3.4.0/master/buildbot/steps/package/rpm/rpmbuild.py000066400000000000000000000116651413250514000241440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Dan Radez # Portions Copyright Steve 'Ashcrow' Milner import os from twisted.internet import defer from buildbot import config from buildbot.process import buildstep from buildbot.process import logobserver class RpmBuild(buildstep.ShellMixin, buildstep.BuildStep): """ RpmBuild build step. """ renderables = ['dist'] name = "rpmbuilder" haltOnFailure = 1 flunkOnFailure = 1 description = ["RPMBUILD"] descriptionDone = ["RPMBUILD"] def __init__(self, specfile=None, topdir='`pwd`', builddir='`pwd`', rpmdir='`pwd`', sourcedir='`pwd`', specdir='`pwd`', srcrpmdir='`pwd`', dist='.el6', define=None, autoRelease=False, vcsRevision=False, **kwargs): kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) self.dist = dist self.base_rpmbuild = ( ('rpmbuild --define "_topdir {}" --define "_builddir {}"' ' --define "_rpmdir {}" --define "_sourcedir {}"' ' --define "_specdir {}" --define "_srcrpmdir {}"').format(topdir, builddir, rpmdir, sourcedir, specdir, srcrpmdir)) if define is None: define = {} for k, v in define.items(): self.base_rpmbuild += " --define \"{} {}\"".format(k, v) self.specfile = specfile self.autoRelease = autoRelease self.vcsRevision = vcsRevision if not self.specfile: config.error("You must specify a specfile") self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) @defer.inlineCallbacks def run(self): rpm_extras_dict = {} rpm_extras_dict['dist'] = self.dist if self.autoRelease: relfile = '{}.release'.format(os.path.basename(self.specfile).split('.')[0]) try: with open(relfile, 'r') as rfile: rel = int(rfile.readline().strip()) except (IOError, TypeError, ValueError): rel = 0 rpm_extras_dict['_release'] = rel with open(relfile, 'w') as rfile: rfile.write(str(rel + 1)) if self.vcsRevision: revision = self.getProperty('got_revision') # only do this in the case where there's a single codebase if revision and not isinstance(revision, dict): rpm_extras_dict['_revision'] = revision self.rpmbuild = self.base_rpmbuild # The unit tests expect a certain order, so we sort the dict to keep # format the same every time for k, v in sorted(rpm_extras_dict.items()): self.rpmbuild = '{0} --define "{1} {2}"'.format( self.rpmbuild, k, v) command = '{} -ba {}'.format(self.rpmbuild, self.specfile) cmd = yield self.makeRemoteShellCommand(command=command) yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() yield self.addCompleteLog('RPM Command Log', "\n".join(self.rpmcmdlog)) if self.rpmerrors: yield self.addCompleteLog('RPM Errors', "\n".join(self.rpmerrors)) return cmd.results() def logConsumer(self): rpm_prefixes = ['Provides:', 'Requires(', 'Requires:', 'Checking for unpackaged', 'Wrote:', 'Executing(%', '+ ', 'Processing files:'] rpm_err_pfx = [' ', 'RPM build errors:', 'error: '] self.rpmcmdlog = [] self.rpmerrors = [] while True: stream, line = yield for pfx in rpm_prefixes: if line.startswith(pfx): self.rpmcmdlog.append(line) break for err in rpm_err_pfx: if line.startswith(err): self.rpmerrors.append(line) break buildbot-3.4.0/master/buildbot/steps/package/rpm/rpmlint.py000066400000000000000000000046101413250514000240030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Steve 'Ashcrow' Milner """ Steps and objects related to rpmlint. """ from twisted.internet import defer from buildbot.steps.package import util as pkgutil from buildbot.steps.shell import Test class RpmLint(Test): """ Rpmlint build step. """ name = "rpmlint" description = ["Checking for RPM/SPEC issues"] descriptionDone = ["Finished checking RPM/SPEC issues"] fileloc = '.' config = None def __init__(self, fileloc=None, config=None, **kwargs): """ Create the Rpmlint object. @type fileloc: str @param fileloc: Location glob of the specs or rpms. @type config: str @param config: path to the rpmlint user config. @type kwargs: dict @param fileloc: all other keyword arguments. """ super().__init__(**kwargs) if fileloc: self.fileloc = fileloc if config: self.config = config self.command = ["rpmlint", "-i"] if self.config: self.command += ['-f', self.config] self.command.append(self.fileloc) self.obs = pkgutil.WEObserver() self.addLogObserver('stdio', self.obs) @defer.inlineCallbacks def createSummary(self): """ Create nice summary logs. @param log: log to create summary off of. """ warnings = self.obs.warnings errors = [] if warnings: yield self.addCompleteLog('%d Warnings' % len(warnings), "\n".join(warnings)) if errors: yield self.addCompleteLog('%d Errors' % len(errors), "\n".join(errors)) buildbot-3.4.0/master/buildbot/steps/package/util.py000066400000000000000000000021561413250514000225000ustar00rootroot00000000000000# This program is free software; you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Marius Rieder from buildbot.process import logobserver class WEObserver(logobserver.LogLineObserver): def __init__(self): super().__init__() self.warnings = [] self.errors = [] def outLineReceived(self, line): if line.startswith('W: '): self.warnings.append(line) elif line.startswith('E: '): self.errors.append(line) buildbot-3.4.0/master/buildbot/steps/python.py000066400000000000000000000354401413250514000214530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from buildbot import config from buildbot.process import buildstep from buildbot.process import logobserver from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results class BuildEPYDoc(buildstep.ShellMixin, buildstep.BuildStep): name = "epydoc" command = ["make", "epydocs"] description = "building epydocs" descriptionDone = "epydoc" def __init__(self, **kwargs): kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self._log_consumer)) def _log_consumer(self): self.import_errors = 0 self.warnings = 0 self.errors = 0 while True: stream, line = yield if line.startswith("Error importing "): self.import_errors += 1 if line.find("Warning: ") != -1: self.warnings += 1 if line.find("Error: ") != -1: self.errors += 1 def getResultSummary(self): summary = ' '.join(self.descriptionDone) if self.import_errors: summary += " ierr={}".format(self.import_errors) if self.warnings: summary += " warn={}".format(self.warnings) if self.errors: summary += " err={}".format(self.errors) if self.results != SUCCESS: summary += ' ({})'.format(Results[self.results]) return {'step': summary} @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() if cmd.didFail(): return FAILURE if self.warnings or self.errors: return WARNINGS return SUCCESS class PyFlakes(buildstep.ShellMixin, buildstep.BuildStep): name = "pyflakes" command = ["make", "pyflakes"] description = "running pyflakes" descriptionDone = "pyflakes" flunkOnFailure = False # any pyflakes lines like this cause FAILURE _flunkingIssues = ("undefined",) _MESSAGES = ("unused", "undefined", "redefs", "import*", "misc") def __init__(self, *args, **kwargs): # PyFlakes return 1 for both warnings and errors. We # categorize this initially as WARNINGS so that # evaluateCommand below can inspect the results more closely. kwargs['decodeRC'] = {0: SUCCESS, 1: WARNINGS} kwargs = self.setupShellMixin(kwargs) super().__init__(*args, **kwargs) self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self._log_consumer)) counts = self.counts = {} summaries = self.summaries = {} for m in self._MESSAGES: counts[m] = 0 summaries[m] = [] # we need a separate variable for syntax errors self._hasSyntaxError = False def _log_consumer(self): counts = self.counts summaries = self.summaries first = True while True: stream, line = yield if stream == 'h': continue # the first few lines might contain echoed commands from a 'make # pyflakes' step, so don't count these as warnings. Stop ignoring # the initial lines as soon as we see one with a colon. if first: if ':' in line: # there's the colon, this is the first real line first = False # fall through and parse the line else: # skip this line, keep skipping non-colon lines continue if line.find("imported but unused") != -1: m = "unused" elif line.find("*' used; unable to detect undefined names") != -1: m = "import*" elif line.find("undefined name") != -1: m = "undefined" elif line.find("redefinition of unused") != -1: m = "redefs" elif line.find("invalid syntax") != -1: self._hasSyntaxError = True # we can do this, because if a syntax error occurs # the output will only contain the info about it, nothing else m = "misc" else: m = "misc" summaries[m].append(line) counts[m] += 1 def getResultSummary(self): summary = ' '.join(self.descriptionDone) for m in self._MESSAGES: if self.counts[m]: summary += " {}={}".format(m, self.counts[m]) if self.results != SUCCESS: summary += ' ({})'.format(Results[self.results]) return {'step': summary} @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() # we log 'misc' as syntax-error if self._hasSyntaxError: yield self.addCompleteLog("syntax-error", "\n".join(self.summaries['misc'])) else: for m in self._MESSAGES: if self.counts[m]: yield self.addCompleteLog(m, "\n".join(self.summaries[m])) self.setProperty("pyflakes-{}".format(m), self.counts[m], "pyflakes") self.setProperty("pyflakes-total", sum(self.counts.values()), "pyflakes") if cmd.didFail() or self._hasSyntaxError: return FAILURE for m in self._flunkingIssues: if m in self.counts and self.counts[m] > 0: return FAILURE if sum(self.counts.values()) > 0: return WARNINGS return SUCCESS class PyLint(buildstep.ShellMixin, buildstep.BuildStep): '''A command that knows about pylint output. It is a good idea to add --output-format=parseable to your command, since it includes the filename in the message. ''' name = "pylint" description = "running pylint" descriptionDone = "pylint" # pylint's return codes (see pylint(1) for details) # 1 - 16 will be bit-ORed RC_OK = 0 RC_FATAL = 1 RC_ERROR = 2 RC_WARNING = 4 RC_REFACTOR = 8 RC_CONVENTION = 16 RC_USAGE = 32 # Using the default text output, the message format is : # MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE # with --output-format=parseable it is: (the outer brackets are literal) # FILE_NAME:LINE_NUM: [MESSAGE_TYPE[, OBJECT]] MESSAGE # message type consists of the type char and 4 digits # The message types: _MESSAGES = { 'C': "convention", # for programming standard violation 'R': "refactor", # for bad code smell 'W': "warning", # for python specific problems 'E': "error", # for much probably bugs in the code 'F': "fatal", # error prevented pylint from further processing. 'I': "info", } _flunkingIssues = ("F", "E") # msg categories that cause FAILURE _msgtypes_re_str = '(?P[{}])'.format(''.join(list(_MESSAGES))) _default_line_re = re.compile(r'^{}(\d+)?: *\d+(, *\d+)?:.+'.format(_msgtypes_re_str)) _default_2_0_0_line_re = \ re.compile(r'^(?P[^:]+):(?P\d+):\d+: *{}(\d+)?:.+'.format(_msgtypes_re_str)) _parseable_line_re = re.compile( r'(?P[^:]+):(?P\d+): \[{}(\d+)?(\([a-z-]+\))?[,\]] .+'.format(_msgtypes_re_str)) def __init__(self, store_results=True, **kwargs): kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) self._store_results = store_results self.counts = {} self.summaries = {} self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self._log_consumer)) # returns (message type, path, line) tuple if line has been matched, or None otherwise def _match_line(self, line): m = self._default_2_0_0_line_re.match(line) if m: try: line_int = int(m.group('line')) except ValueError: line_int = None return (m.group('errtype'), m.group('path'), line_int) m = self._parseable_line_re.match(line) if m: try: line_int = int(m.group('line')) except ValueError: line_int = None return (m.group('errtype'), m.group('path'), line_int) m = self._default_line_re.match(line) if m: return (m.group('errtype'), None, None) return None def _log_consumer(self): for m in self._MESSAGES: self.counts[m] = 0 self.summaries[m] = [] while True: stream, line = yield if stream == 'h': continue ret = self._match_line(line) if not ret: continue msgtype, path, line_number = ret assert msgtype in self._MESSAGES self.summaries[msgtype].append(line) self.counts[msgtype] += 1 if self._store_results and path is not None: self.addTestResult(self._result_setid, line, test_name=None, test_code_path=path, line=line_number) def getResultSummary(self): summary = ' '.join(self.descriptionDone) for msg, fullmsg in sorted(self._MESSAGES.items()): if self.counts[msg]: summary += " {}={}".format(fullmsg, self.counts[msg]) if self.results != SUCCESS: summary += ' ({})'.format(Results[self.results]) return {'step': summary} @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() for msg, fullmsg in sorted(self._MESSAGES.items()): if self.counts[msg]: yield self.addCompleteLog(fullmsg, "\n".join(self.summaries[msg])) self.setProperty("pylint-{}".format(fullmsg), self.counts[msg], 'Pylint') self.setProperty("pylint-total", sum(self.counts.values()), 'Pylint') if cmd.rc & (self.RC_FATAL | self.RC_ERROR | self.RC_USAGE): return FAILURE for msg in self._flunkingIssues: if msg in self.counts and self.counts[msg] > 0: return FAILURE if sum(self.counts.values()) > 0: return WARNINGS return SUCCESS @defer.inlineCallbacks def addTestResultSets(self): if not self._store_results: return self._result_setid = yield self.addTestResultSet('Pylint warnings', 'code_issue', 'message') class Sphinx(buildstep.ShellMixin, buildstep.BuildStep): ''' A Step to build sphinx documentation ''' name = "sphinx" description = "running sphinx" descriptionDone = "sphinx" haltOnFailure = True def __init__(self, sphinx_sourcedir='.', sphinx_builddir=None, sphinx_builder=None, sphinx='sphinx-build', tags=None, defines=None, strict_warnings=False, mode='incremental', **kwargs): if tags is None: tags = [] if defines is None: defines = {} if sphinx_builddir is None: # Who the heck is not interested in the built doc ? config.error("Sphinx argument sphinx_builddir is required") if mode not in ('incremental', 'full'): config.error("Sphinx argument mode has to be 'incremental' or" + "'full' is required") self.success = False kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) # build the command command = [sphinx] if sphinx_builder is not None: command.extend(['-b', sphinx_builder]) for tag in tags: command.extend(['-t', tag]) for key in sorted(defines): if defines[key] is None: command.extend(['-D', key]) elif isinstance(defines[key], bool): command.extend(['-D', '{}={}'.format(key, defines[key] and 1 or 0)]) else: command.extend(['-D', '{}={}'.format(key, defines[key])]) if mode == 'full': command.extend(['-E']) # Don't use a saved environment if strict_warnings: command.extend(['-W']) # Convert warnings to errors command.extend([sphinx_sourcedir, sphinx_builddir]) self.command = command self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self._log_consumer)) _msgs = ('WARNING', 'ERROR', 'SEVERE') def _log_consumer(self): self.warnings = [] next_is_warning = False while True: stream, line = yield if line.startswith('build succeeded') or \ line.startswith('no targets are out of date.'): self.success = True elif line.startswith('Warning, treated as error:'): next_is_warning = True else: if next_is_warning: self.warnings.append(line) next_is_warning = False else: for msg in self._msgs: if msg in line: self.warnings.append(line) def getResultSummary(self): summary = '{} {} warnings'.format(self.name, len(self.warnings)) if self.results != SUCCESS: summary += ' ({})'.format(Results[self.results]) return {'step': summary} @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() if self.warnings: yield self.addCompleteLog('warnings', "\n".join(self.warnings)) self.setStatistic('warnings', len(self.warnings)) if self.success: if not self.warnings: return SUCCESS return WARNINGS return FAILURE buildbot-3.4.0/master/buildbot/steps/python_twisted.py000066400000000000000000000373631413250514000232240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ BuildSteps that are specific to the Twisted source tree """ import re from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.process import buildstep from buildbot.process import logobserver from buildbot.process.results import FAILURE from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import shell class HLint(buildstep.ShellMixin, buildstep.BuildStep): """I run a 'lint' checker over a set of .xhtml files. Any deviations from recommended style is flagged and put in the output log. This step looks at .changes in the parent Build to extract a list of Lore XHTML files to check.""" name = "hlint" description = "running hlint" descriptionDone = "hlint" warnOnWarnings = True warnOnFailure = True # TODO: track time, but not output warnings = 0 def __init__(self, python=None, **kwargs): kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) self.python = python self.warningLines = [] self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) @defer.inlineCallbacks def run(self): # create the command html_files = set() for f in self.build.allFiles(): if f.endswith(".xhtml") and not f.startswith("sandbox/"): html_files.add(f) # remove duplicates hlintTargets = sorted(list(html_files)) if not hlintTargets: return SKIPPED self.hlintFiles = hlintTargets command = [] if self.python: command.append(self.python) command += ["bin/lore", "-p", "--output", "lint"] + self.hlintFiles cmd = yield self.makeRemoteShellCommand(command=command) yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() yield self.addCompleteLog('warnings', '\n'.join(self.warningLines)) yield self.addCompleteLog("files", "\n".join(self.hlintFiles) + "\n") # warnings are in stdout, rc is always 0, unless the tools break if cmd.didFail(): return FAILURE self.descriptionDone = "{} hlin{}".format(self.warnings, self.warnings == 1 and 't' or 'ts') if self.warnings: return WARNINGS return SUCCESS def logConsumer(self): while True: stream, line = yield if ':' in line: self.warnings += 1 self.warningLines.append(line) class TrialTestCaseCounter(logobserver.LogLineObserver): _line_re = re.compile(r'^(?:Doctest: )?([\w\.]+) \.\.\. \[([^\]]+)\]$') def __init__(self): super().__init__() self.numTests = 0 self.finished = False self.counts = {'total': None, 'failures': 0, 'errors': 0, 'skips': 0, 'expectedFailures': 0, 'unexpectedSuccesses': 0, } def outLineReceived(self, line): # different versions of Twisted emit different per-test lines with # the bwverbose reporter. # 2.0.0: testSlave (buildbot.test.test_runner.Create) ... [OK] # 2.1.0: buildbot.test.test_runner.Create.testSlave ... [OK] # 2.4.0: buildbot.test.test_runner.Create.testSlave ... [OK] # Let's just handle the most recent version, since it's the easiest. # Note that doctests create lines line this: # Doctest: viff.field.GF ... [OK] if line.startswith("=" * 40): self.finished = True if not self.finished: m = self._line_re.search(line.strip()) if m: testname, result = m.groups() self.numTests += 1 self.step.setProgress('tests', self.numTests) out = re.search(r'Ran (\d+) tests', line) if out: self.counts['total'] = int(out.group(1)) if (line.startswith("OK") or line.startswith("FAILED ") or line.startswith("PASSED")): # the extra space on FAILED_ is to distinguish the overall # status from an individual test which failed. The lack of a # space on the OK is because it may be printed without any # additional text (if there are no skips,etc) out = re.search(r'failures=(\d+)', line) if out: self.counts['failures'] = int(out.group(1)) out = re.search(r'errors=(\d+)', line) if out: self.counts['errors'] = int(out.group(1)) out = re.search(r'skips=(\d+)', line) if out: self.counts['skips'] = int(out.group(1)) out = re.search(r'expectedFailures=(\d+)', line) if out: self.counts['expectedFailures'] = int(out.group(1)) out = re.search(r'unexpectedSuccesses=(\d+)', line) if out: self.counts['unexpectedSuccesses'] = int(out.group(1)) # successes= is a Twisted-2.0 addition, and is not currently used out = re.search(r'successes=(\d+)', line) if out: self.counts['successes'] = int(out.group(1)) UNSPECIFIED = () # since None is a valid choice class Trial(buildstep.ShellMixin, buildstep.BuildStep): """ There are some class attributes which may be usefully overridden by subclasses. 'trialMode' and 'trialArgs' can influence the trial command line. """ name = "trial" progressMetrics = ('output', 'tests', 'test.log') # note: the slash only works on unix workers, of course, but we have # no way to know what the worker uses as a separator. # TODO: figure out something clever. logfiles = {"test.log": "_trial_temp/test.log"} # we use test.log to track Progress at the end of __init__() renderables = ['tests', 'jobs'] flunkOnFailure = True python = None trial = "trial" trialMode = ["--reporter=bwverbose"] # requires Twisted-2.1.0 or newer # for Twisted-2.0.0 or 1.3.0, use ["-o"] instead trialArgs = [] jobs = None testpath = UNSPECIFIED # required (but can be None) testChanges = False # TODO: needs better name recurse = False reactor = None randomly = False tests = None # required description = 'testing' descriptionDone = 'tests' def __init__(self, reactor=UNSPECIFIED, python=None, trial=None, testpath=UNSPECIFIED, tests=None, testChanges=None, recurse=None, randomly=None, trialMode=None, trialArgs=None, jobs=None, **kwargs): kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) if python: self.python = python if self.python is not None: if isinstance(self.python, str): self.python = [self.python] for s in self.python: if " " in s: # this is not strictly an error, but I suspect more # people will accidentally try to use python="python2.3 # -Wall" than will use embedded spaces in a python flag log.msg("python= component '%s' has spaces") log.msg("To add -Wall, use python=['python', '-Wall']") why = "python= value has spaces, probably an error" raise ValueError(why) if trial: self.trial = trial if " " in self.trial: raise ValueError("trial= value has spaces") if trialMode is not None: self.trialMode = trialMode if trialArgs is not None: self.trialArgs = trialArgs if jobs is not None: self.jobs = jobs if testpath is not UNSPECIFIED: self.testpath = testpath if self.testpath is UNSPECIFIED: raise ValueError("You must specify testpath= (it can be None)") assert isinstance(self.testpath, str) or self.testpath is None if reactor is not UNSPECIFIED: self.reactor = reactor if tests is not None: self.tests = tests if isinstance(self.tests, str): self.tests = [self.tests] if testChanges is not None: self.testChanges = testChanges # self.recurse = True # not sure this is necessary if not self.testChanges and self.tests is None: raise ValueError("Must either set testChanges= or provide tests=") if recurse is not None: self.recurse = recurse if randomly is not None: self.randomly = randomly if self.reactor: self.description = "testing ({})".format(self.reactor) # this counter will feed Progress along the 'test cases' metric self.observer = TrialTestCaseCounter() self.addLogObserver('stdio', self.observer) # this observer consumes multiple lines in a go, so it can't be easily # handled in TrialTestCaseCounter. self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) self.problems = [] self.warnings = {} # text used before commandComplete runs self.text = 'running' def setup_python_path(self): if self.testpath is None: return # this bit produces a list, which can be used by buildbot_worker.runprocess.RunProcess ppath = self.env.get('PYTHONPATH', self.testpath) if isinstance(ppath, str): ppath = [ppath] if self.testpath not in ppath: ppath.insert(0, self.testpath) self.env['PYTHONPATH'] = ppath @defer.inlineCallbacks def run(self): # choose progressMetrics and logfiles based on whether trial is being # run with multiple workers or not. output_observer = logobserver.OutputProgressObserver('test.log') # build up most of the command, then stash it until start() command = [] if self.python: command.extend(self.python) command.append(self.trial) command.extend(self.trialMode) if self.recurse: command.append("--recurse") if self.reactor: command.append("--reactor={}".format(self.reactor)) if self.randomly: command.append("--random=0") command.extend(self.trialArgs) if self.jobs is not None: self.jobs = int(self.jobs) command.append("--jobs=%d" % self.jobs) # using -j/--jobs flag produces more than one test log. self.logfiles = {} for i in range(self.jobs): self.logfiles['test.%d.log' % i] = '_trial_temp/%d/test.log' % i self.logfiles['err.%d.log' % i] = '_trial_temp/%d/err.log' % i self.logfiles['out.%d.log' % i] = '_trial_temp/%d/out.log' % i self.addLogObserver('test.%d.log' % i, output_observer) else: # this one just measures bytes of output in _trial_temp/test.log self.addLogObserver('test.log', output_observer) # now that self.build.allFiles() is nailed down, finish building the # command if self.testChanges: for f in self.build.allFiles(): if f.endswith(".py"): command.append("--testmodule={}".format(f)) else: command.extend(self.tests) self.setup_python_path() cmd = yield self.makeRemoteShellCommand(command=command) yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() # figure out all status, then let the various hook functions return # different pieces of it problems = '\n'.join(self.problems) warnings = self.warnings if problems: yield self.addCompleteLog("problems", problems) if warnings: lines = sorted(warnings.keys()) yield self.addCompleteLog("warnings", "".join(lines)) return self.build_results(cmd) def build_results(self, cmd): counts = self.observer.counts total = counts['total'] failures = counts['failures'] errors = counts['errors'] parsed = (total is not None) desc_parts = [] if not cmd.didFail(): if parsed: results = SUCCESS if total: desc_parts += [str(total), total == 1 and "test" or "tests", "passed"] else: desc_parts += ["no tests", "run"] else: results = FAILURE desc_parts += ["testlog", "unparseable"] else: # something failed results = FAILURE if parsed: desc_parts += ["tests"] if failures: desc_parts += [str(failures), failures == 1 and "failure" or "failures"] if errors: desc_parts += [str(errors), errors == 1 and "error" or "errors"] else: desc_parts += ["tests", "failed"] if counts['skips']: desc_parts += [str(counts['skips']), counts['skips'] == 1 and "skip" or "skips"] if counts['expectedFailures']: desc_parts += [str(counts['expectedFailures']), "todo" if counts['expectedFailures'] == 1 else "todos"] if self.reactor: desc_parts.append(self.rtext('({})')) self.descriptionDone = util.join_list(desc_parts) return results def rtext(self, fmt='{}'): if self.reactor: rtext = fmt.format(self.reactor) return rtext.replace("reactor", "") return "" def logConsumer(self): while True: stream, line = yield if line.find(" exceptions.DeprecationWarning: ") != -1: # no source warning = line # TODO: consider stripping basedir prefix here self.warnings[warning] = self.warnings.get(warning, 0) + 1 elif (line.find(" DeprecationWarning: ") != -1 or line.find(" UserWarning: ") != -1): # next line is the source warning = line + "\n" + (yield)[1] + "\n" self.warnings[warning] = self.warnings.get(warning, 0) + 1 elif line.find("Warning: ") != -1: warning = line self.warnings[warning] = self.warnings.get(warning, 0) + 1 if line.find("=" * 60) == 0 or line.find("-" * 60) == 0: # read to EOF while True: self.problems.append(line) stream, line = yield class RemovePYCs(shell.ShellCommand): name = "remove_pyc" command = ['find', '.', '-name', "'*.pyc'", '-exec', 'rm', '{}', ';'] description = "removing .pyc files" descriptionDone = "remove .pycs" buildbot-3.4.0/master/buildbot/steps/shell.py000066400000000000000000000511101413250514000212310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from twisted.python.deprecate import deprecatedModuleAttribute from twisted.python.versions import Version from buildbot import config from buildbot.process import buildstep from buildbot.process import logobserver # for existing configurations that import WithProperties from here. We like # to move this class around just to keep our readers guessing. from buildbot.process.properties import WithProperties from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.process.results import worst_status from buildbot.steps.worker import CompositeStepMixin from buildbot.util import join_list _hush_pyflakes = [ WithProperties, ] del _hush_pyflakes class TreeSize(buildstep.ShellMixin, buildstep.BuildStep): name = "treesize" command = ["du", "-s", "-k", "."] description = ["measuring", "tree", "size"] def __init__(self, **kwargs): kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) self.observer = logobserver.BufferLogObserver(wantStdout=True, wantStderr=True) self.addLogObserver('stdio', self.observer) @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() out = self.observer.getStdout() m = re.search(r'^(\d+)', out) kib = None if m: kib = int(m.group(1)) self.setProperty("tree-size-KiB", kib, "treesize") self.descriptionDone = "treesize {} KiB".format(kib) else: self.descriptionDone = "treesize unknown" if cmd.didFail(): return FAILURE if kib is None: return WARNINGS # not sure how 'du' could fail, but whatever return SUCCESS class SetPropertyFromCommand(buildstep.ShellMixin, buildstep.BuildStep): name = "setproperty" renderables = ['property'] def __init__(self, property=None, extract_fn=None, strip=True, includeStdout=True, includeStderr=False, **kwargs): kwargs = self.setupShellMixin(kwargs) self.property = property self.extract_fn = extract_fn self.strip = strip self.includeStdout = includeStdout self.includeStderr = includeStderr if not ((property is not None) ^ (extract_fn is not None)): config.error( "Exactly one of property and extract_fn must be set") super().__init__(**kwargs) if self.extract_fn: self.includeStderr = True self.observer = logobserver.BufferLogObserver( wantStdout=self.includeStdout, wantStderr=self.includeStderr) self.addLogObserver('stdio', self.observer) @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() property_changes = {} if self.property: if cmd.didFail(): return FAILURE result = self.observer.getStdout() if self.strip: result = result.strip() propname = self.property self.setProperty(propname, result, "SetPropertyFromCommand Step") property_changes[propname] = result else: new_props = self.extract_fn(cmd.rc, self.observer.getStdout(), self.observer.getStderr()) for k, v in new_props.items(): self.setProperty(k, v, "SetPropertyFromCommand Step") property_changes = new_props props_set = ["{}: {}".format(k, repr(v)) for k, v in sorted(property_changes.items())] yield self.addCompleteLog('property changes', "\n".join(props_set)) if len(property_changes) > 1: self.descriptionDone = '{} properties set'.format(len(property_changes)) elif len(property_changes) == 1: self.descriptionDone = 'property \'{}\' set'.format(list(property_changes)[0]) if cmd.didFail(): return FAILURE return SUCCESS SetProperty = SetPropertyFromCommand deprecatedModuleAttribute(Version("Buildbot", 0, 8, 8), "It has been renamed to SetPropertyFromCommand", "buildbot.steps.shell", "SetProperty") class ShellCommand(buildstep.ShellMixin, buildstep.BuildStep): name = 'shell' def __init__(self, **kwargs): if self.__class__ is ShellCommand: if 'command' not in kwargs: config.error("ShellCommand's `command' argument is not specified") # check validity of arguments being passed to RemoteShellCommand valid_rsc_args = [ 'command', 'env', 'want_stdout', 'want_stderr', 'timeout', 'maxTime', 'sigtermTime', 'logfiles', 'lazylogfiles', 'usePTY', 'logEnviron', 'collectStdout', 'collectStderr', 'interruptSignal', 'initialStdin', 'decodeRC', 'stdioLogName', 'workdir', ] + buildstep.BuildStep.parms invalid_args = [] for arg in kwargs: if arg not in valid_rsc_args: invalid_args.append(arg) if invalid_args: config.error("Invalid argument(s) passed to ShellCommand: " + ', '.join(invalid_args)) kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) return cmd.results() class Configure(ShellCommand): name = "configure" haltOnFailure = 1 flunkOnFailure = 1 description = "configuring" descriptionDone = "configure" command = ["./configure"] class WarningCountingShellCommand(buildstep.ShellMixin, CompositeStepMixin, buildstep.BuildStep): renderables = [ 'suppressionFile', 'suppressionList', 'warningPattern', 'directoryEnterPattern', 'directoryLeavePattern', 'maxWarnCount', ] warnCount = 0 warningPattern = '(?i).*warning[: ].*' # The defaults work for GNU Make. directoryEnterPattern = ("make.*: Entering directory " "[\u2019\"`'](.*)[\u2019'`\"]") directoryLeavePattern = "make.*: Leaving directory" suppressionFile = None commentEmptyLineRe = re.compile(r"^\s*(#.*)?$") suppressionLineRe = re.compile( r"^\s*(.+?)\s*:\s*(.+?)\s*(?:[:]\s*([0-9]+)(?:-([0-9]+))?\s*)?$") def __init__(self, warningPattern=None, warningExtractor=None, maxWarnCount=None, directoryEnterPattern=None, directoryLeavePattern=None, suppressionFile=None, suppressionList=None, **kwargs): # See if we've been given a regular expression to use to match # warnings. If not, use a default that assumes any line with "warning" # present is a warning. This may lead to false positives in some cases. if warningPattern: self.warningPattern = warningPattern if directoryEnterPattern: self.directoryEnterPattern = directoryEnterPattern if directoryLeavePattern: self.directoryLeavePattern = directoryLeavePattern if suppressionFile: self.suppressionFile = suppressionFile # self.suppressions is already taken, so use something else self.suppressionList = suppressionList if warningExtractor: self.warningExtractor = warningExtractor else: self.warningExtractor = WarningCountingShellCommand.warnExtractWholeLine self.maxWarnCount = maxWarnCount if self.__class__ is WarningCountingShellCommand and not kwargs.get('command'): # WarningCountingShellCommand class is directly instantiated. # Explicitly check that command is set to prevent runtime error # later. config.error("WarningCountingShellCommand's 'command' argument is not specified") kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) self.suppressions = [] self.directoryStack = [] self.warnCount = 0 self.loggedWarnings = [] self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.warningLogConsumer)) def addSuppression(self, suppressionList): """ This method can be used to add patters of warnings that should not be counted. It takes a single argument, a list of patterns. Each pattern is a 4-tuple (FILE-RE, WARN-RE, START, END). FILE-RE is a regular expression (string or compiled regexp), or None. If None, the pattern matches all files, else only files matching the regexp. If directoryEnterPattern is specified in the class constructor, matching is against the full path name, eg. src/main.c. WARN-RE is similarly a regular expression matched against the text of the warning, or None to match all warnings. START and END form an inclusive line number range to match against. If START is None, there is no lower bound, similarly if END is none there is no upper bound.""" for fileRe, warnRe, start, end in suppressionList: if fileRe is not None and isinstance(fileRe, str): fileRe = re.compile(fileRe) if warnRe is not None and isinstance(warnRe, str): warnRe = re.compile(warnRe) self.suppressions.append((fileRe, warnRe, start, end)) def warnExtractWholeLine(self, line, match): """ Extract warning text as the whole line. No file names or line numbers.""" return (None, None, line) def warnExtractFromRegexpGroups(self, line, match): """ Extract file name, line number, and warning text as groups (1,2,3) of warningPattern match.""" file = match.group(1) lineNo = match.group(2) if lineNo is not None: lineNo = int(lineNo) text = match.group(3) return (file, lineNo, text) def warningLogConsumer(self): # Now compile a regular expression from whichever warning pattern we're # using wre = self.warningPattern if isinstance(wre, str): wre = re.compile(wre) directoryEnterRe = self.directoryEnterPattern if (directoryEnterRe is not None and isinstance(directoryEnterRe, str)): directoryEnterRe = re.compile(directoryEnterRe) directoryLeaveRe = self.directoryLeavePattern if (directoryLeaveRe is not None and isinstance(directoryLeaveRe, str)): directoryLeaveRe = re.compile(directoryLeaveRe) # Check if each line in the output from this command matched our # warnings regular expressions. If did, bump the warnings count and # add the line to the collection of lines with warnings self.loggedWarnings = [] while True: stream, line = yield if directoryEnterRe: match = directoryEnterRe.search(line) if match: self.directoryStack.append(match.group(1)) continue if (directoryLeaveRe and self.directoryStack and directoryLeaveRe.search(line)): self.directoryStack.pop() continue match = wre.match(line) if match: self.maybeAddWarning(self.loggedWarnings, line, match) def maybeAddWarning(self, warnings, line, match): if self.suppressions: (file, lineNo, text) = self.warningExtractor(self, line, match) lineNo = lineNo and int(lineNo) if file is not None and file != "" and self.directoryStack: currentDirectory = '/'.join(self.directoryStack) if currentDirectory is not None and currentDirectory != "": file = "{}/{}".format(currentDirectory, file) # Skip adding the warning if any suppression matches. for fileRe, warnRe, start, end in self.suppressions: if not (file is None or fileRe is None or fileRe.match(file)): continue if not (warnRe is None or warnRe.search(text)): continue if ((start is not None and end is not None) and not (lineNo is not None and start <= lineNo <= end)): continue return warnings.append(line) self.warnCount += 1 @defer.inlineCallbacks def setup_suppression(self): if self.suppressionList is not None: self.addSuppression(self.suppressionList) if self.suppressionFile is not None: data = yield self.getFileContentFromWorker(self.suppressionFile, abandonOnFailure=True) lines = data.split("\n") list = [] for line in lines: if self.commentEmptyLineRe.match(line): continue match = self.suppressionLineRe.match(line) if (match): file, test, start, end = match.groups() if (end is not None): end = int(end) if (start is not None): start = int(start) if end is None: end = start list.append((file, test, start, end)) self.addSuppression(list) @defer.inlineCallbacks def run(self): yield self.setup_suppression() cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) yield self.finish_logs() yield self.createSummary() return self.evaluateCommand(cmd) @defer.inlineCallbacks def finish_logs(self): stdio_log = yield self.getLog('stdio') yield stdio_log.finish() @defer.inlineCallbacks def createSummary(self): """ Match log lines against warningPattern. Warnings are collected into another log for this step, and the build-wide 'warnings-count' is updated.""" # If there were any warnings, make the log if lines with warnings # available if self.warnCount: yield self.addCompleteLog("warnings (%d)" % self.warnCount, "\n".join(self.loggedWarnings) + "\n") warnings_stat = self.getStatistic('warnings', 0) self.setStatistic('warnings', warnings_stat + self.warnCount) old_count = self.getProperty("warnings-count", 0) self.setProperty( "warnings-count", old_count + self.warnCount, "WarningCountingShellCommand") def evaluateCommand(self, cmd): result = cmd.results() if (self.maxWarnCount is not None and self.warnCount > self.maxWarnCount): result = worst_status(result, FAILURE) elif self.warnCount: result = worst_status(result, WARNINGS) return result class Compile(WarningCountingShellCommand): name = "compile" haltOnFailure = 1 flunkOnFailure = 1 description = ["compiling"] descriptionDone = ["compile"] command = ["make", "all"] class Test(WarningCountingShellCommand): name = "test" warnOnFailure = 1 description = ["testing"] descriptionDone = ["test"] command = ["make", "test"] def setTestResults(self, total=0, failed=0, passed=0, warnings=0): """ Called by subclasses to set the relevant statistics; this actually adds to any statistics already present """ total += self.getStatistic('tests-total', 0) self.setStatistic('tests-total', total) failed += self.getStatistic('tests-failed', 0) self.setStatistic('tests-failed', failed) warnings += self.getStatistic('tests-warnings', 0) self.setStatistic('tests-warnings', warnings) passed += self.getStatistic('tests-passed', 0) self.setStatistic('tests-passed', passed) def getResultSummary(self): description = [] if self.hasStatistic('tests-total'): total = self.getStatistic("tests-total", 0) failed = self.getStatistic("tests-failed", 0) passed = self.getStatistic("tests-passed", 0) warnings = self.getStatistic("tests-warnings", 0) if not total: total = failed + passed + warnings if total: description += [str(total), 'tests'] if passed: description += [str(passed), 'passed'] if warnings: description += [str(warnings), 'warnings'] if failed: description += [str(failed), 'failed'] if description: summary = join_list(description) if self.results != SUCCESS: summary += ' ({})'.format(Results[self.results]) return {'step': summary} return super().getResultSummary() class PerlModuleTestObserver(logobserver.LogLineObserver): def __init__(self, warningPattern): super().__init__() if warningPattern: self.warningPattern = re.compile(warningPattern) else: self.warningPattern = None self.rc = SUCCESS self.total = 0 self.failed = 0 self.warnings = 0 self.newStyle = False self.complete = False failedRe = re.compile(r"Tests: \d+ Failed: (\d+)\)") testsRe = re.compile(r"Files=\d+, Tests=(\d+)") oldFailureCountsRe = re.compile(r"(\d+)/(\d+) subtests failed") oldSuccessCountsRe = re.compile(r"Files=\d+, Tests=(\d+),") def outLineReceived(self, line): if self.warningPattern.match(line): self.warnings += 1 if self.newStyle: if line.startswith('Result: FAIL'): self.rc = FAILURE mo = self.failedRe.search(line) if mo: self.failed += int(mo.group(1)) if self.failed: self.rc = FAILURE mo = self.testsRe.search(line) if mo: self.total = int(mo.group(1)) else: if line.startswith('Test Summary Report'): self.newStyle = True mo = self.oldFailureCountsRe.search(line) if mo: self.failed = int(mo.group(1)) self.total = int(mo.group(2)) self.rc = FAILURE mo = self.oldSuccessCountsRe.search(line) if mo: self.total = int(mo.group(1)) class PerlModuleTest(Test): command = ["prove", "--lib", "lib", "-r", "t"] total = 0 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.observer = PerlModuleTestObserver( warningPattern=self.warningPattern) self.addLogObserver('stdio', self.observer) def evaluateCommand(self, cmd): if self.observer.total: passed = self.observer.total - self.observer.failed self.setTestResults( total=self.observer.total, failed=self.observer.failed, passed=passed, warnings=self.observer.warnings) rc = self.observer.rc if rc == SUCCESS and self.observer.warnings: rc = WARNINGS return rc buildbot-3.4.0/master/buildbot/steps/shellsequence.py000066400000000000000000000120211413250514000227600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.process import buildstep from buildbot.process import results from buildbot.warnings import warn_deprecated class ShellArg(results.ResultComputingConfigMixin): publicAttributes = ( results.ResultComputingConfigMixin.resultConfig + ["command", "logname"]) def __init__(self, command=None, logname=None, logfile=None, **kwargs): name = self.__class__.__name__ if command is None: config.error(("the 'command' parameter of {} " "must not be None").format(name)) self.command = command self.logname = logname if logfile is not None: warn_deprecated('2.10.0', "{}: logfile is deprecated, use logname") if self.logname is not None: config.error(("{}: the 'logfile' parameter must not be specified when 'logname' " + "is set").format(name)) self.logname = logfile for k, v in kwargs.items(): if k not in self.resultConfig: config.error(("the parameter '{}' is not " "handled by ShellArg").format(k)) setattr(self, k, v) # we don't validate anything yet as we can have renderables. def validateAttributes(self): # only make the check if we have a list if not isinstance(self.command, (str, list)): config.error(("{} is an invalid command, " "it must be a string or a list").format(self.command)) if isinstance(self.command, list): if not all([isinstance(x, str) for x in self.command]): config.error("{} must only have strings in it".format(self.command)) runConfParams = [(p_attr, getattr(self, p_attr)) for p_attr in self.resultConfig] not_bool = [(p_attr, p_val) for (p_attr, p_val) in runConfParams if not isinstance(p_val, bool)] if not_bool: config.error("%r must be booleans" % (not_bool,)) @defer.inlineCallbacks def getRenderingFor(self, build): rv = copy.copy(self) for p_attr in self.publicAttributes: res = yield build.render(getattr(self, p_attr)) setattr(rv, p_attr, res) return rv class ShellSequence(buildstep.ShellMixin, buildstep.BuildStep): last_command = None renderables = ['commands'] def __init__(self, commands=None, **kwargs): self.commands = commands kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) def shouldRunTheCommand(self, cmd): return bool(cmd) def getFinalState(self): return self.describe(True) @defer.inlineCallbacks def runShellSequence(self, commands): terminate = False if commands is None: log.msg("After rendering, ShellSequence `commands` is None") return results.EXCEPTION overall_result = results.SUCCESS for arg in commands: if not isinstance(arg, ShellArg): log.msg("After rendering, ShellSequence `commands` list " "contains something that is not a ShellArg") return results.EXCEPTION try: arg.validateAttributes() except config.ConfigErrors as e: log.msg("After rendering, ShellSequence `commands` is invalid: {}".format(e)) return results.EXCEPTION # handle the command from the arg command = arg.command if not self.shouldRunTheCommand(command): continue # keep the command around so we can describe it self.last_command = command cmd = yield self.makeRemoteShellCommand(command=command, stdioLogName=arg.logname) yield self.runCommand(cmd) overall_result, terminate = results.computeResultAndTermination( arg, cmd.results(), overall_result) if terminate: break return overall_result def run(self): return self.runShellSequence(self.commands) buildbot-3.4.0/master/buildbot/steps/source/000077500000000000000000000000001413250514000210525ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/steps/source/__init__.py000066400000000000000000000014131413250514000231620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.steps.source.base import Source _hush_pyflakes = [Source] buildbot-3.4.0/master/buildbot/steps/source/base.py000066400000000000000000000303561413250514000223450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.process import buildstep from buildbot.process import properties from buildbot.process import remotecommand from buildbot.process.results import FAILURE from buildbot.steps.worker import CompositeStepMixin from buildbot.util import bytes2unicode class Source(buildstep.BuildStep, CompositeStepMixin): """This is a base class to generate a source tree in the worker. Each version control system has a specialized subclass, and is expected to override __init__ and implement computeSourceRevision() and run_vc(). The class as a whole builds up the self.args dictionary, then starts a RemoteCommand with those arguments. """ renderables = ['description', 'descriptionDone', 'descriptionSuffix', 'workdir', 'env'] description = None # set this to a list of short strings to override descriptionDone = None # alternate description when the step is complete descriptionSuffix = None # extra information to append to suffix # if the checkout fails, there's no point in doing anything else haltOnFailure = True flunkOnFailure = True branch = None # the default branch, should be set in __init__ def __init__(self, workdir=None, mode='update', alwaysUseLatest=False, timeout=20 * 60, retry=None, env=None, logEnviron=True, description=None, descriptionDone=None, descriptionSuffix=None, codebase='', **kwargs): """ @type workdir: string @param workdir: local directory (relative to the Builder's root) where the tree should be placed @type alwaysUseLatest: boolean @param alwaysUseLatest: whether to always update to the most recent available sources for this build. Normally the Source step asks its Build for a list of all Changes that are supposed to go into the build, then computes a 'source stamp' (revision number or timestamp) that will cause exactly that set of changes to be present in the checked out tree. This is turned into, e.g., 'cvs update -D timestamp', or 'svn update -r revnum'. If alwaysUseLatest=True, bypass this computation and always update to the latest available sources for each build. The source stamp helps avoid a race condition in which someone commits a change after the master has decided to start a build but before the worker finishes checking out the sources. At best this results in a build which contains more changes than the buildmaster thinks it has (possibly resulting in the wrong person taking the blame for any problems that result), at worst is can result in an incoherent set of sources (splitting a non-atomic commit) which may not build at all. @type logEnviron: boolean @param logEnviron: If this option is true (the default), then the step's logfile will describe the environment variables on the worker. In situations where the environment is not relevant and is long, it may be easier to set logEnviron=False. @type codebase: string @param codebase: Specifies which changes in a build are processed by the step. The default codebase value is ''. The codebase must correspond to a codebase assigned by the codebaseGenerator. If no codebaseGenerator is defined in the master then codebase doesn't need to be set, the default value will then match all changes. """ descriptions_for_mode = { "clobber": "checkout", "export": "exporting"} descriptionDones_for_mode = { "clobber": "checkout", "export": "export"} if not description: description = [descriptions_for_mode.get(mode, "updating")] if not descriptionDone: descriptionDone = [descriptionDones_for_mode.get(mode, "update")] if not descriptionSuffix and codebase: descriptionSuffix = [codebase] super().__init__(description=description, descriptionDone=descriptionDone, descriptionSuffix=descriptionSuffix, **kwargs) # This will get added to args later, after properties are rendered self.workdir = workdir self.sourcestamp = None self.codebase = codebase if self.codebase: self.name = properties.Interpolate( "%(kw:name)s-%(kw:codebase)s", name=self.name, codebase=self.codebase) self.alwaysUseLatest = alwaysUseLatest self.logEnviron = logEnviron self.env = env self.timeout = timeout self.retry = retry def _hasAttrGroupMember(self, attrGroup, attr): """ The hasattr equivalent for attribute groups: returns whether the given member is in the attribute group. """ method_name = '{}_{}'.format(attrGroup, attr) return hasattr(self, method_name) def _getAttrGroupMember(self, attrGroup, attr): """ The getattr equivalent for attribute groups: gets and returns the attribute group member. """ method_name = '{}_{}'.format(attrGroup, attr) return getattr(self, method_name) def _listAttrGroupMembers(self, attrGroup): """ Returns a list of all members in the attribute group. """ from inspect import getmembers, ismethod methods = getmembers(self, ismethod) group_prefix = attrGroup + '_' group_len = len(group_prefix) group_members = [method[0][group_len:] for method in methods if method[0].startswith(group_prefix)] return group_members def updateSourceProperty(self, name, value, source=''): """ Update a property, indexing the property by codebase if codebase is not ''. Source steps should generally use this instead of setProperty. """ # pick a decent source name if source == '': source = self.__class__.__name__ if self.codebase != '': assert not isinstance(self.getProperty(name, None), str), \ "Sourcestep {} has a codebase, other sourcesteps don't".format(self.name) property_dict = self.getProperty(name, {}) property_dict[self.codebase] = value super().setProperty(name, property_dict, source) else: assert not isinstance(self.getProperty(name, None), dict), \ "Sourcestep {} does not have a codebase, other sourcesteps do".format(self.name) super().setProperty(name, value, source) def computeSourceRevision(self, changes): """Each subclass must implement this method to do something more precise than -rHEAD every time. For version control systems that use repository-wide change numbers (SVN, P4), this can simply take the maximum such number from all the changes involved in this build. For systems that do not (CVS), it needs to create a timestamp based upon the latest Change, the Build's treeStableTimer, and an optional self.checkoutDelay value.""" return None @defer.inlineCallbacks def applyPatch(self, patch): patch_command = ['patch', '-p{}'.format(patch[0]), '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff'] cmd = remotecommand.RemoteShellCommand(self.workdir, patch_command, env=self.env, logEnviron=self.logEnviron) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.didFail(): raise buildstep.BuildStepFailed() return cmd.rc @defer.inlineCallbacks def patch(self, patch): diff = patch[1] root = None if len(patch) >= 3: root = patch[2] if root: workdir_root = self.build.path_module.join(self.workdir, root) workdir_root_abspath = self.build.path_module.abspath(workdir_root) workdir_abspath = self.build.path_module.abspath(self.workdir) if workdir_root_abspath.startswith(workdir_abspath): self.workdir = workdir_root yield self.downloadFileContentToWorker('.buildbot-diff', diff) yield self.downloadFileContentToWorker('.buildbot-patched', 'patched\n') yield self.applyPatch(patch) cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.build.path_module.join(self.workdir, ".buildbot-diff"), 'logEnviron': self.logEnviron}) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.didFail(): raise buildstep.BuildStepFailed() return cmd.rc def sourcedirIsPatched(self): d = self.pathExists( self.build.path_module.join(self.workdir, '.buildbot-patched')) return d @defer.inlineCallbacks def run(self): if getattr(self, 'startVC', None) is not None: msg = 'Old-style source steps are no longer supported. Please convert your custom ' \ 'source step to new style (replace startVC with run_vc and convert all used ' \ 'old style APIs to new style). Please consider contributing the source step to ' \ 'upstream BuildBot so that such migrations can be avoided in the future.' raise NotImplementedError(msg) if not self.alwaysUseLatest: # what source stamp would this step like to use? s = self.build.getSourceStamp(self.codebase) self.sourcestamp = s if self.sourcestamp: # if branch is None, then use the Step's "default" branch branch = s.branch or self.branch # if revision is None, use the latest sources (-rHEAD) revision = s.revision if not revision: revision = self.computeSourceRevision(s.changes) # the revision property is currently None, so set it to something # more interesting if revision is not None: self.updateSourceProperty('revision', str(revision)) # if patch is None, then do not patch the tree after checkout # 'patch' is None or a tuple of (patchlevel, diff, root) # root is optional. patch = s.patch if patch: yield self.addCompleteLog("patch", bytes2unicode(patch[1], errors='ignore')) else: log.msg("No sourcestamp found in build for codebase '{}'".format(self.codebase)) self.descriptionDone = "Codebase {} not in build".format(self.codebase) yield self.addCompleteLog("log", "No sourcestamp found in build for codebase '{}'".format( self.codebase)) return FAILURE else: revision = None branch = self.branch patch = None res = yield self.run_vc(branch, revision, patch) return res buildbot-3.4.0/master/buildbot/steps/source/bzr.py000066400000000000000000000212261413250514000222240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process import results from buildbot.steps.source.base import Source class Bzr(Source): name = 'bzr' renderables = ['repourl', 'baseURL'] def __init__(self, repourl=None, baseURL=None, mode='incremental', method=None, defaultBranch=None, **kwargs): self.repourl = repourl self.baseURL = baseURL self.branch = defaultBranch self.mode = mode self.method = method super().__init__(**kwargs) if repourl and baseURL: raise ValueError("you must provide exactly one of repourl and" " baseURL") if repourl is None and baseURL is None: raise ValueError("you must provide at least one of repourl and" " baseURL") if baseURL is not None and defaultBranch is None: raise ValueError("you must provide defaultBranch with baseURL") if not self._hasAttrGroupMember('mode', self.mode): raise ValueError("mode {} is not one of {}".format(self.mode, self._listAttrGroupMembers('mode'))) if self.mode == 'full': assert self.method in ['clean', 'fresh', 'clobber', 'copy', None] @defer.inlineCallbacks def run_vc(self, branch, revision, patch): if branch: self.branch = branch self.revision = revision self.method = self._getMethod() self.stdio_log = yield self.addLogForRemoteCommands("stdio") if self.repourl is None: self.repourl = os.path.join(self.baseURL, self.branch) installed = yield self.checkBzr() if not installed: raise WorkerSetupError("bzr is not installed on worker") patched = yield self.sourcedirIsPatched() if patched: yield self._dovccmd(['clean-tree', '--ignored', '--force']) yield self._getAttrGroupMember('mode', self.mode)() if patch: yield self.patch(patch) yield self.parseGotRevision() return results.SUCCESS @defer.inlineCallbacks def mode_incremental(self): updatable = yield self._sourcedirIsUpdatable() if updatable: command = ['update'] if self.revision: command.extend(['-r', self.revision]) yield self._dovccmd(command) else: yield self._doFull() @defer.inlineCallbacks def mode_full(self): if self.method == 'clobber': yield self.clobber() return elif self.method == 'copy': self.workdir = 'source' yield self.copy() return updatable = self._sourcedirIsUpdatable() if not updatable: log.msg("No bzr repo present, making full checkout") yield self._doFull() elif self.method == 'clean': yield self.clean() elif self.method == 'fresh': yield self.fresh() else: raise ValueError("Unknown method, check your configuration") @defer.inlineCallbacks def _clobber(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.rc != 0: raise RuntimeError("Failed to delete directory") @defer.inlineCallbacks def clobber(self): yield self._clobber() yield self._doFull() @defer.inlineCallbacks def copy(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': 'build', 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) yield self.mode_incremental() cmd = remotecommand.RemoteCommand('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) def clean(self): d = self._dovccmd(['clean-tree', '--ignored', '--force']) command = ['update'] if self.revision: command.extend(['-r', self.revision]) d.addCallback(lambda _: self._dovccmd(command)) return d def fresh(self): d = self._dovccmd(['clean-tree', '--force']) command = ['update'] if self.revision: command.extend(['-r', self.revision]) d.addCallback(lambda _: self._dovccmd(command)) return d @defer.inlineCallbacks def _doFull(self): command = ['checkout', self.repourl, '.'] if self.revision: command.extend(['-r', self.revision]) if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True res = yield self._dovccmd(command, abandonOnFailure=abandonOnFailure) if self.retry: if self.stopped or res == 0: return res delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._clobber()) df.addCallback(lambda _: self._doFull()) reactor.callLater(delay, df.callback, None) res = yield df return res def _sourcedirIsUpdatable(self): return self.pathExists(self.build.path_module.join(self.workdir, '.bzr')) def computeSourceRevision(self, changes): if not changes: return None lastChange = max([int(c.revision) for c in changes]) return lastChange def _dovccmd(self, command, abandonOnFailure=True, collectStdout=False): cmd = remotecommand.RemoteShellCommand(self.workdir, ['bzr'] + command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluateCommand(_): if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command {}".format(cmd)) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc return d def checkBzr(self): d = self._dovccmd(['--version']) @d.addCallback def check(res): return res == 0 return d def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' return None @defer.inlineCallbacks def parseGotRevision(self): stdout = yield self._dovccmd(["version-info", "--custom", "--template='{revno}"], collectStdout=True) revision = stdout.strip("'") try: int(revision) except ValueError as e: log.msg("Invalid revision number") raise buildstep.BuildStepFailed() from e log.msg("Got Git revision {}".format(revision)) self.updateSourceProperty('got_revision', revision) buildbot-3.4.0/master/buildbot/steps/source/cvs.py000066400000000000000000000302321413250514000222170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import time from email.utils import formatdate from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process import results from buildbot.process.remotetransfer import StringFileWriter from buildbot.steps.source.base import Source class CVS(Source): name = "cvs" renderables = ["cvsroot"] def __init__(self, cvsroot=None, cvsmodule='', mode='incremental', method=None, branch=None, global_options=None, extra_options=None, login=None, **kwargs): self.cvsroot = cvsroot self.cvsmodule = cvsmodule self.branch = branch if global_options is None: global_options = [] self.global_options = global_options if extra_options is None: extra_options = [] self.extra_options = extra_options self.login = login self.mode = mode self.method = method self.srcdir = 'source' if not self._hasAttrGroupMember('mode', self.mode): raise ValueError("mode {} is not one of {}".format(self.mode, self._listAttrGroupMembers('mode'))) super().__init__(**kwargs) @defer.inlineCallbacks def run_vc(self, branch, revision, patch): self.branch = branch self.revision = revision self.stdio_log = yield self.addLogForRemoteCommands("stdio") self.method = self._getMethod() installed = yield self.checkCvs() if not installed: raise WorkerSetupError("CVS is not installed on worker") yield self.checkLogin() patched = yield self.sourcedirIsPatched() if patched: yield self.purge(False) yield self._getAttrGroupMember('mode', self.mode)() if patch: yield self.patch(patch) yield self.parseGotRevision() return results.SUCCESS @defer.inlineCallbacks def mode_incremental(self): updatable = yield self._sourcedirIsUpdatable() if updatable: rv = yield self.doUpdate() else: rv = yield self.clobber() return rv @defer.inlineCallbacks def mode_full(self): if self.method == 'clobber': rv = yield self.clobber() return rv elif self.method == 'copy': rv = yield self.copy() return rv updatable = yield self._sourcedirIsUpdatable() if not updatable: log.msg("CVS repo not present, making full checkout") rv = yield self.doCheckout(self.workdir) elif self.method == 'clean': rv = yield self.clean() elif self.method == 'fresh': rv = yield self.fresh() else: raise ValueError("Unknown method, check your configuration") return rv @defer.inlineCallbacks def _clobber(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout}) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.rc: raise RuntimeError("Failed to delete directory") @defer.inlineCallbacks def clobber(self): yield self._clobber() res = yield self.doCheckout(self.workdir) return res @defer.inlineCallbacks def fresh(self, ): yield self.purge(True) res = yield self.doUpdate() return res @defer.inlineCallbacks def clean(self, ): yield self.purge(False) res = yield self.doUpdate() return res @defer.inlineCallbacks def copy(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout}) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) old_workdir = self.workdir self.workdir = self.srcdir yield self.mode_incremental() cmd = remotecommand.RemoteCommand('cpdir', { 'fromdir': self.srcdir, 'todir': old_workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout}) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) self.workdir = old_workdir return results.SUCCESS @defer.inlineCallbacks def purge(self, ignore_ignores): command = ['cvsdiscard'] if ignore_ignores: command += ['--ignore'] cmd = remotecommand.RemoteShellCommand(self.workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.didFail(): raise buildstep.BuildStepFailed() @defer.inlineCallbacks def doCheckout(self, dir): command = ['-d', self.cvsroot, '-z3', 'checkout', '-d', dir] command = self.global_options + command + self.extra_options if self.branch: command += ['-r', self.branch] if self.revision: command += ['-D', self.revision] command += [self.cvsmodule] if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True res = yield self._dovccmd(command, '', abandonOnFailure=abandonOnFailure) if self.retry: if self.stopped or res == 0: return res delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._clobber()) df.addCallback(lambda _: self.doCheckout(self.workdir)) reactor.callLater(delay, df.callback, None) res = yield df return res @defer.inlineCallbacks def doUpdate(self): command = ['-z3', 'update', '-dP'] branch = self.branch # special case. 'cvs update -r HEAD -D today' gives no files; see #2351 if branch == 'HEAD' and self.revision: branch = None if branch: command += ['-r', self.branch] if self.revision: command += ['-D', self.revision] res = yield self._dovccmd(command) return res @defer.inlineCallbacks def checkLogin(self): if self.login: yield self._dovccmd(['-d', self.cvsroot, 'login'], initialStdin=self.login + "\n") @defer.inlineCallbacks def _dovccmd(self, command, workdir=None, abandonOnFailure=True, initialStdin=None): if workdir is None: workdir = self.workdir if not command: raise ValueError("No command specified") cmd = remotecommand.RemoteShellCommand(workdir, ['cvs'] + command, env=self.env, timeout=self.timeout, logEnviron=self.logEnviron, initialStdin=initialStdin) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.rc != 0 and abandonOnFailure: log.msg("Source step failed while running command {}".format(cmd)) raise buildstep.BuildStepFailed() return cmd.rc def _cvsEntriesContainStickyDates(self, entries): for line in entries.splitlines(): if line == 'D': # the last line contains just a single 'D' pass elif line.split('/')[-1].startswith('D'): # fields are separated by slashes, the last field contains the tag or date # sticky dates start with 'D' return True return False # no sticky dates @defer.inlineCallbacks def _sourcedirIsUpdatable(self): myFileWriter = StringFileWriter() args = { 'workdir': self.build.path_module.join(self.workdir, 'CVS'), 'writer': myFileWriter, 'maxsize': None, 'blocksize': 32 * 1024, } def uploadFileArgs(source): full_args = dict(args) if self.workerVersionIsOlderThan('uploadFile', '3.0'): full_args['slavesrc'] = source else: full_args['workersrc'] = source return full_args cmd = remotecommand.RemoteCommand('uploadFile', uploadFileArgs('Root'), ignore_updates=True) yield self.runCommand(cmd) if cmd.rc is not None and cmd.rc != 0: return False # on Windows, the cvsroot may not contain the password, so compare to # both cvsroot_without_pw = re.sub("(:pserver:[^:]*):[^@]*(@.*)", r"\1\2", self.cvsroot) if myFileWriter.buffer.strip() not in (self.cvsroot, cvsroot_without_pw): return False myFileWriter.buffer = "" cmd = remotecommand.RemoteCommand('uploadFile', uploadFileArgs('Repository'), ignore_updates=True) yield self.runCommand(cmd) if cmd.rc is not None and cmd.rc != 0: return False if myFileWriter.buffer.strip() != self.cvsmodule: return False # if there are sticky dates (from an earlier build with revision), # we can't update (unless we remove those tags with cvs update -A) myFileWriter.buffer = "" cmd = remotecommand.RemoteCommand('uploadFile', uploadFileArgs('Entries'), ignore_updates=True) yield self.runCommand(cmd) if cmd.rc is not None and cmd.rc != 0: return False if self._cvsEntriesContainStickyDates(myFileWriter.buffer): return False return True def parseGotRevision(self): revision = time.strftime("%Y-%m-%d %H:%M:%S +0000", time.gmtime()) self.updateSourceProperty('got_revision', revision) @defer.inlineCallbacks def checkCvs(self): res = yield self._dovccmd(['--version']) return res == 0 def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' return None def computeSourceRevision(self, changes): if not changes: return None lastChange = max([c.when for c in changes]) lastSubmit = max([br.submittedAt for br in self.build.requests]) when = (lastChange + lastSubmit) / 2 return formatdate(when) buildbot-3.4.0/master/buildbot/steps/source/darcs.py000066400000000000000000000170171413250514000225260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Source step code for darcs """ from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.config import ConfigErrors from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process import results from buildbot.process.results import SUCCESS from buildbot.steps.source.base import Source class Darcs(Source): """ Class for Darcs with all smarts """ name = 'darcs' renderables = ['repourl'] possible_methods = ('clobber', 'copy') def __init__(self, repourl=None, mode='incremental', method=None, **kwargs): self.repourl = repourl self.method = method self.mode = mode super().__init__(**kwargs) errors = [] if not self._hasAttrGroupMember('mode', self.mode): errors.append("mode {} is not one of {}".format(self.mode, self._listAttrGroupMembers('mode'))) if self.mode == 'incremental' and self.method: errors.append("Incremental mode does not require method") if self.mode == 'full': if self.method is None: self.method = 'copy' elif self.method not in self.possible_methods: errors.append("Invalid method for mode == {}".format(self.mode)) if repourl is None: errors.append("you must provide repourl") if errors: raise ConfigErrors(errors) @defer.inlineCallbacks def run_vc(self, branch, revision, patch): self.revision = revision self.stdio_log = yield self.addLogForRemoteCommands("stdio") installed = yield self.checkDarcs() if not installed: raise WorkerSetupError("Darcs is not installed on worker") patched = yield self.sourcedirIsPatched() if patched: yield self.copy() yield self._getAttrGroupMember('mode', self.mode)() if patch: yield self.patch(patch) yield self.parseGotRevision() return results.SUCCESS @defer.inlineCallbacks def checkDarcs(self): cmd = remotecommand.RemoteShellCommand(self.workdir, ['darcs', '--version'], env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) return cmd.rc == 0 @defer.inlineCallbacks def mode_full(self): if self.method == 'clobber': yield self.clobber() return elif self.method == 'copy': yield self.copy() return @defer.inlineCallbacks def mode_incremental(self): updatable = yield self._sourcedirIsUpdatable() if not updatable: yield self._checkout() else: command = ['darcs', 'pull', '--all', '--verbose'] yield self._dovccmd(command) @defer.inlineCallbacks def copy(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) self.workdir = 'source' yield self.mode_incremental() cmd = remotecommand.RemoteCommand('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) self.workdir = 'build' @defer.inlineCallbacks def clobber(self): yield self.runRmdir(self.workdir) yield self._checkout() @defer.inlineCallbacks def _clone(self, abandonOnFailure=False): command = ['darcs', 'get', '--verbose', '--lazy', '--repo-name', self.workdir] if self.revision: yield self.downloadFileContentToWorker('.darcs-context', self.revision) command.append('--context') command.append('.darcs-context') command.append(self.repourl) yield self._dovccmd(command, abandonOnFailure=abandonOnFailure, wkdir='.') @defer.inlineCallbacks def _checkout(self): if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True res = yield self._clone(abandonOnFailure) if self.retry: if self.stopped or res == 0: return res delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self.runRmdir(self.workdir)) df.addCallback(lambda _: self._checkout()) reactor.callLater(delay, df.callback, None) res = yield df return res @defer.inlineCallbacks def parseGotRevision(self): revision = yield self._dovccmd(['darcs', 'changes', '--max-count=1'], collectStdout=True) self.updateSourceProperty('got_revision', revision) @defer.inlineCallbacks def _dovccmd(self, command, collectStdout=False, initialStdin=None, decodeRC=None, abandonOnFailure=True, wkdir=None): if not command: raise ValueError("No command specified") if decodeRC is None: decodeRC = {0: SUCCESS} workdir = wkdir or self.workdir cmd = remotecommand.RemoteShellCommand(workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, initialStdin=initialStdin, decodeRC=decodeRC) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command {}".format(cmd)) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc def _sourcedirIsUpdatable(self): return self.pathExists(self.build.path_module.join(self.workdir, '_darcs')) buildbot-3.4.0/master/buildbot/steps/source/gerrit.py000066400000000000000000000036661413250514000227330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.steps.source.git import Git class Gerrit(Git): def run_vc(self, branch, revision, patch): gerrit_branch = None changed_project = self.build.getProperty('event.change.project') if (not self.sourcestamp or (self.sourcestamp.project != changed_project)): # If we don't have a sourcestamp, or the project is wrong, this # isn't the repo that's changed. Drop through and check out the # head of the given branch pass elif self.build.hasProperty("event.patchSet.ref"): gerrit_branch = self.build.getProperty("event.patchSet.ref") self.updateSourceProperty("gerrit_branch", gerrit_branch) else: try: change = self.build.getProperty("gerrit_change", '').split('/') if len(change) == 2: gerrit_branch = "refs/changes/%2.2d/%d/%d" \ % (int(change[0]) % 100, int(change[0]), int(change[1])) self.updateSourceProperty("gerrit_branch", gerrit_branch) except Exception: pass branch = gerrit_branch or branch return super().run_vc(branch, revision, patch) buildbot-3.4.0/master/buildbot/steps/source/git.py000066400000000000000000000643641413250514000222240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot import config as bbconfig from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.steps.source.base import Source from buildbot.steps.worker import CompositeStepMixin from buildbot.util.git import RC_SUCCESS from buildbot.util.git import GitStepMixin GIT_HASH_LENGTH = 40 def isTrueOrIsExactlyZero(v): # nonzero values are true... if v: return True # ... and True for the number zero, but we have to # explicitly guard against v==False, since # isinstance(False, int) is surprisingly True if isinstance(v, int) and v is not False: return True # all other false-ish values are false return False git_describe_flags = [ # on or off ('all', lambda v: ['--all'] if v else None), ('always', lambda v: ['--always'] if v else None), ('contains', lambda v: ['--contains'] if v else None), ('debug', lambda v: ['--debug'] if v else None), ('long', lambda v: ['--long'] if v else None), ('exact-match', lambda v: ['--exact-match'] if v else None), ('tags', lambda v: ['--tags'] if v else None), # string parameter ('match', lambda v: ['--match', v] if v else None), # numeric parameter ('abbrev', lambda v: ['--abbrev={}'.format(v)] if isTrueOrIsExactlyZero(v) else None), ('candidates', lambda v: ['--candidates={}'.format(v)] if isTrueOrIsExactlyZero(v) else None), # optional string parameter ('dirty', lambda v: ['--dirty'] if (v is True or v == '') else None), ('dirty', lambda v: ['--dirty={}'.format(v)] if (v and v is not True) else None), ] class Git(Source, GitStepMixin): name = 'git' renderables = ["repourl", "reference", "branch", "codebase", "mode", "method", "origin"] def __init__(self, repourl=None, branch='HEAD', mode='incremental', method=None, reference=None, submodules=False, remoteSubmodules=False, shallow=False, filters=None, progress=True, retryFetch=False, clobberOnFailure=False, getDescription=False, config=None, origin=None, sshPrivateKey=None, sshHostKey=None, sshKnownHosts=None, **kwargs): if not getDescription and not isinstance(getDescription, dict): getDescription = False self.branch = branch self.method = method self.repourl = repourl self.reference = reference self.retryFetch = retryFetch self.submodules = submodules self.remoteSubmodules = remoteSubmodules self.shallow = shallow self.filters = filters self.clobberOnFailure = clobberOnFailure self.mode = mode self.prog = progress self.getDescription = getDescription self.sshPrivateKey = sshPrivateKey self.sshHostKey = sshHostKey self.sshKnownHosts = sshKnownHosts self.config = config self.srcdir = 'source' self.origin = origin super().__init__(**kwargs) self.setupGitStep() if isinstance(self.mode, str): if not self._hasAttrGroupMember('mode', self.mode): bbconfig.error("Git: mode must be {}".format( ' or '.join(self._listAttrGroupMembers('mode')))) if isinstance(self.method, str): if self.mode == 'full' and \ self.method not in ['clean', 'fresh', 'clobber', 'copy', None]: bbconfig.error("Git: invalid method for mode 'full'.") if self.shallow and (self.mode != 'full' or self.method != 'clobber'): bbconfig.error( "Git: shallow only possible with mode 'full' and method 'clobber'.") if not isinstance(self.getDescription, (bool, dict)): bbconfig.error("Git: getDescription must be a boolean or a dict.") @defer.inlineCallbacks def run_vc(self, branch, revision, patch): self.branch = branch or 'HEAD' self.revision = revision self.method = self._getMethod() self.stdio_log = yield self.addLogForRemoteCommands("stdio") try: gitInstalled = yield self.checkFeatureSupport() if not gitInstalled: raise WorkerSetupError("git is not installed on worker") patched = yield self.sourcedirIsPatched() if patched: yield self._dovccmd(['clean', '-f', '-f', '-d', '-x']) yield self._downloadSshPrivateKeyIfNeeded() yield self._getAttrGroupMember('mode', self.mode)() if patch: yield self.patch(patch) yield self.parseGotRevision() res = yield self.parseCommitDescription() yield self._removeSshPrivateKeyIfNeeded() return res except Exception: yield self._removeSshPrivateKeyIfNeeded() raise @defer.inlineCallbacks def mode_full(self): if self.method == 'clobber': yield self.clobber() return elif self.method == 'copy': yield self.copy() return action = yield self._sourcedirIsUpdatable() if action == "clobber": yield self.clobber() return elif action == "clone": log.msg("No git repo present, making full clone") yield self._fullCloneOrFallback() elif self.method == 'clean': yield self.clean() elif self.method == 'fresh': yield self.fresh() else: raise ValueError("Unknown method, check your configuration") @defer.inlineCallbacks def mode_incremental(self): action = yield self._sourcedirIsUpdatable() # if not updatable, do a full checkout if action == "clobber": yield self.clobber() return elif action == "clone": log.msg("No git repo present, making full clone") yield self._fullCloneOrFallback() return yield self._fetchOrFallback() yield self._syncSubmodule(None) yield self._updateSubmodule(None) @defer.inlineCallbacks def clean(self): clean_command = ['clean', '-f', '-f', '-d'] rc = yield self._dovccmd(clean_command) if rc != RC_SUCCESS: raise buildstep.BuildStepFailed rc = yield self._fetchOrFallback() if rc != RC_SUCCESS: raise buildstep.BuildStepFailed rc = yield self._syncSubmodule() if rc != RC_SUCCESS: raise buildstep.BuildStepFailed rc = yield self._updateSubmodule() if rc != RC_SUCCESS: raise buildstep.BuildStepFailed rc = yield self._cleanSubmodule() if rc != RC_SUCCESS: raise buildstep.BuildStepFailed if self.submodules: rc = yield self._dovccmd(clean_command) if rc != RC_SUCCESS: raise buildstep.BuildStepFailed return RC_SUCCESS @defer.inlineCallbacks def clobber(self): yield self._doClobber() res = yield self._fullClone(shallowClone=self.shallow) if res != RC_SUCCESS: raise buildstep.BuildStepFailed @defer.inlineCallbacks def fresh(self): clean_command = ['clean', '-f', '-f', '-d', '-x'] res = yield self._dovccmd(clean_command, abandonOnFailure=False) if res == RC_SUCCESS: yield self._fetchOrFallback() else: yield self._doClobber() yield self._fullCloneOrFallback() yield self._syncSubmodule() yield self._updateSubmodule() yield self._cleanSubmodule() if self.submodules: yield self._dovccmd(clean_command) @defer.inlineCallbacks def copy(self): yield self.runRmdir(self.workdir, abandonOnFailure=False, timeout=self.timeout) old_workdir = self.workdir self.workdir = self.srcdir try: yield self.mode_incremental() cmd = remotecommand.RemoteCommand('cpdir', {'fromdir': self.srcdir, 'todir': old_workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.didFail(): raise buildstep.BuildStepFailed() return RC_SUCCESS finally: self.workdir = old_workdir @defer.inlineCallbacks def parseGotRevision(self, _=None): stdout = yield self._dovccmd(['rev-parse', 'HEAD'], collectStdout=True) revision = stdout.strip() if len(revision) != GIT_HASH_LENGTH: raise buildstep.BuildStepFailed() log.msg("Got Git revision {}".format(revision)) self.updateSourceProperty('got_revision', revision) return RC_SUCCESS @defer.inlineCallbacks def parseCommitDescription(self, _=None): # dict() should not return here if isinstance(self.getDescription, bool) and not self.getDescription: return RC_SUCCESS cmd = ['describe'] if isinstance(self.getDescription, dict): for opt, arg in git_describe_flags: opt = self.getDescription.get(opt, None) arg = arg(opt) if arg: cmd.extend(arg) # 'git describe' takes a commitish as an argument for all options # *except* --dirty if not any(arg.startswith('--dirty') for arg in cmd): cmd.append('HEAD') try: stdout = yield self._dovccmd(cmd, collectStdout=True) desc = stdout.strip() self.updateSourceProperty('commit-description', desc) except Exception: pass return RC_SUCCESS def _getSshDataWorkDir(self): if self.method == 'copy' and self.mode == 'full': return self.srcdir return self.workdir @defer.inlineCallbacks def _fetch(self, _): fetch_required = True # If the revision already exists in the repo, we don't need to fetch. if self.revision: rc = yield self._dovccmd(['cat-file', '-e', self.revision], abandonOnFailure=False) if rc == RC_SUCCESS: fetch_required = False if fetch_required: command = ['fetch', '-f', '-t', self.repourl, self.branch] # If the 'progress' option is set, tell git fetch to output # progress information to the log. This can solve issues with # long fetches killed due to lack of output, but only works # with Git 1.7.2 or later. if self.prog: if self.supportsProgress: command.append('--progress') else: log.msg("Git versions < 1.7.2 don't support progress") yield self._dovccmd(command) if self.revision: rev = self.revision else: rev = 'FETCH_HEAD' command = ['checkout', '-f', rev] abandonOnFailure = not self.retryFetch and not self.clobberOnFailure res = yield self._dovccmd(command, abandonOnFailure) # Rename the branch if needed. if res == RC_SUCCESS and self.branch != 'HEAD': # Ignore errors yield self._dovccmd(['checkout', '-B', self.branch], abandonOnFailure=False) return res @defer.inlineCallbacks def _fetchOrFallback(self, _=None): """ Handles fallbacks for failure of fetch, wrapper for self._fetch """ res = yield self._fetch(None) if res == RC_SUCCESS: return res elif self.retryFetch: yield self._fetch(None) elif self.clobberOnFailure: yield self.clobber() else: raise buildstep.BuildStepFailed() return None @defer.inlineCallbacks def _clone(self, shallowClone): """Retry if clone failed""" command = ['clone'] switchToBranch = self.branch != 'HEAD' if self.supportsBranch and self.branch != 'HEAD': if self.branch.startswith('refs/'): # we can't choose this branch from 'git clone' directly; we # must do so after the clone command += ['--no-checkout'] else: switchToBranch = False command += ['--branch', self.branch] if shallowClone: command += ['--depth', str(int(shallowClone))] if self.reference: command += ['--reference', self.reference] if self.origin: command += ['--origin', self.origin] if self.filters: if self.supportsFilters: for filter in self.filters: command += ['--filter', filter] else: log.msg("Git versions < 2.27.0 don't support filters on clone") command += [self.repourl, '.'] if self.prog: if self.supportsProgress: command.append('--progress') else: log.msg("Git versions < 1.7.2 don't support progress") if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True # If it's a shallow clone abort build step res = yield self._dovccmd(command, abandonOnFailure=(abandonOnFailure and shallowClone)) if switchToBranch: res = yield self._fetch(None) done = self.stopped or res == RC_SUCCESS # or shallow clone?? if self.retry and not done: delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._doClobber()) df.addCallback(lambda _: self._clone(shallowClone)) reactor.callLater(delay, df.callback, None) res = yield df return res @defer.inlineCallbacks def _fullClone(self, shallowClone=False): """Perform full clone and checkout to the revision if specified In the case of shallow clones if any of the step fail abort whole build step. """ res = yield self._clone(shallowClone) if res != RC_SUCCESS: return res # If revision specified checkout that revision if self.revision: res = yield self._dovccmd(['checkout', '-f', self.revision], shallowClone) # init and update submodules, recursively. If there's not recursion # it will not do it. if self.submodules: cmdArgs = ["submodule", "update", "--init", "--recursive"] if self.remoteSubmodules: cmdArgs.append("--remote") res = yield self._dovccmd(cmdArgs, shallowClone) return res @defer.inlineCallbacks def _fullCloneOrFallback(self): """Wrapper for _fullClone(). In the case of failure, if clobberOnFailure is set to True remove the build directory and try a full clone again. """ res = yield self._fullClone() if res != RC_SUCCESS: if not self.clobberOnFailure: raise buildstep.BuildStepFailed() res = yield self.clobber() return res @defer.inlineCallbacks def _doClobber(self): """Remove the work directory""" rc = yield self.runRmdir(self.workdir, timeout=self.timeout) if rc != RC_SUCCESS: raise RuntimeError("Failed to delete directory") return rc def computeSourceRevision(self, changes): if not changes: return None return changes[-1].revision @defer.inlineCallbacks def _syncSubmodule(self, _=None): rc = RC_SUCCESS if self.submodules: rc = yield self._dovccmd(['submodule', 'sync']) return rc @defer.inlineCallbacks def _updateSubmodule(self, _=None): rc = RC_SUCCESS if self.submodules: vccmd = ['submodule', 'update', '--init', '--recursive'] if self.supportsSubmoduleForce: vccmd.extend(['--force']) if self.supportsSubmoduleCheckout: vccmd.extend(["--checkout"]) if self.remoteSubmodules: vccmd.extend(["--remote"]) rc = yield self._dovccmd(vccmd) return rc @defer.inlineCallbacks def _cleanSubmodule(self, _=None): rc = RC_SUCCESS if self.submodules: subcommand = 'git clean -f -f -d' if self.mode == 'full' and self.method == 'fresh': subcommand += ' -x' command = ['submodule', 'foreach', '--recursive', subcommand] rc = yield self._dovccmd(command) return rc def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' return None @defer.inlineCallbacks def applyPatch(self, patch): yield self._dovccmd(['update-index', '--refresh']) res = yield self._dovccmd(['apply', '--index', '-p', str(patch[0])], initialStdin=patch[1]) return res @defer.inlineCallbacks def _sourcedirIsUpdatable(self): if self.workerVersionIsOlderThan('listdir', '2.16'): git_path = self.build.path_module.join(self.workdir, '.git') exists = yield self.pathExists(git_path) if exists: return "update" return "clone" cmd = remotecommand.RemoteCommand('listdir', {'dir': self.workdir}) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if 'files' not in cmd.updates: # no files - directory doesn't exist return "clone" files = cmd.updates['files'][0] if '.git' in files: return "update" elif files: return "clobber" else: return "clone" class GitPush(buildstep.BuildStep, GitStepMixin, CompositeStepMixin): description = None descriptionDone = None descriptionSuffix = None name = 'gitpush' renderables = ['repourl', 'branch'] def __init__(self, workdir=None, repourl=None, branch=None, force=False, env=None, timeout=20 * 60, logEnviron=True, sshPrivateKey=None, sshHostKey=None, sshKnownHosts=None, config=None, **kwargs): self.workdir = workdir self.repourl = repourl self.branch = branch self.force = force self.env = env self.timeout = timeout self.logEnviron = logEnviron self.sshPrivateKey = sshPrivateKey self.sshHostKey = sshHostKey self.sshKnownHosts = sshKnownHosts self.config = config super().__init__(**kwargs) self.setupGitStep() if not self.branch: bbconfig.error('GitPush: must provide branch') def _getSshDataWorkDir(self): return self.workdir @defer.inlineCallbacks def run(self): self.stdio_log = yield self.addLog("stdio") try: gitInstalled = yield self.checkFeatureSupport() if not gitInstalled: raise WorkerSetupError("git is not installed on worker") yield self._downloadSshPrivateKeyIfNeeded() ret = yield self._doPush() yield self._removeSshPrivateKeyIfNeeded() return ret except Exception as e: yield self._removeSshPrivateKeyIfNeeded() raise e @defer.inlineCallbacks def _doPush(self): cmd = ['push', self.repourl, self.branch] if self.force: cmd.append('--force') ret = yield self._dovccmd(cmd) return ret class GitTag(buildstep.BuildStep, GitStepMixin, CompositeStepMixin): description = None descriptionDone = None descriptionSuffix = None name = 'gittag' renderables = ['repourl', 'tagName', 'messages'] def __init__(self, workdir=None, tagName=None, annotated=False, messages=None, force=False, env=None, timeout=20 * 60, logEnviron=True, config=None, **kwargs): self.workdir = workdir self.tagName = tagName self.annotated = annotated self.messages = messages self.force = force self.env = env self.timeout = timeout self.logEnviron = logEnviron self.config = config # These attributes are required for GitStepMixin but not useful to tag self.repourl = " " self.sshHostKey = None self.sshPrivateKey = None self.sshKnownHosts = None super().__init__(**kwargs) self.setupGitStep() if not self.tagName: bbconfig.error('GitTag: must provide tagName') if self.annotated and not self.messages: bbconfig.error('GitTag: must provide messages in case of annotated tag') if not self.annotated and self.messages: bbconfig.error('GitTag: messages are required only in case of annotated tag') if self.messages and not isinstance(self.messages, list): bbconfig.error('GitTag: messages should be a list') @defer.inlineCallbacks def run(self): self.stdio_log = yield self.addLog("stdio") gitInstalled = yield self.checkFeatureSupport() if not gitInstalled: raise WorkerSetupError("git is not installed on worker") ret = yield self._doTag() return ret @defer.inlineCallbacks def _doTag(self): cmd = ['tag'] if self.annotated: cmd.append('-a') cmd.append(self.tagName) for msg in self.messages: cmd.extend(['-m', msg]) else: cmd.append(self.tagName) if self.force: cmd.append('--force') ret = yield self._dovccmd(cmd) return ret class GitCommit(buildstep.BuildStep, GitStepMixin, CompositeStepMixin): description = None descriptionDone = None descriptionSuffix = None name = 'gitcommit' renderables = ['paths', 'messages'] def __init__(self, workdir=None, paths=None, messages=None, env=None, timeout=20 * 60, logEnviron=True, emptyCommits='disallow', config=None, **kwargs): self.workdir = workdir self.messages = messages self.paths = paths self.env = env self.timeout = timeout self.logEnviron = logEnviron self.config = config self.emptyCommits = emptyCommits # The repourl, sshPrivateKey and sshHostKey attributes are required by # GitStepMixin, but aren't needed by git add and commit operations self.repourl = " " self.sshPrivateKey = None self.sshHostKey = None self.sshKnownHosts = None super().__init__(**kwargs) self.setupGitStep() if not self.messages: bbconfig.error('GitCommit: must provide messages') if not isinstance(self.messages, list): bbconfig.error('GitCommit: messages must be a list') if not self.paths: bbconfig.error('GitCommit: must provide paths') if not isinstance(self.paths, list): bbconfig.error('GitCommit: paths must be a list') if self.emptyCommits not in ('disallow', 'create-empty-commit', 'ignore'): bbconfig.error('GitCommit: emptyCommits must be one of "disallow", ' '"create-empty-commit" and "ignore"') @defer.inlineCallbacks def run(self): self.stdio_log = yield self.addLog("stdio") gitInstalled = yield self.checkFeatureSupport() if not gitInstalled: raise WorkerSetupError("git is not installed on worker") yield self._checkDetachedHead() yield self._doAdd() yield self._doCommit() return RC_SUCCESS @defer.inlineCallbacks def _checkDetachedHead(self): cmd = ['symbolic-ref', 'HEAD'] rc = yield self._dovccmd(cmd, abandonOnFailure=False) if rc != RC_SUCCESS: yield self.stdio_log.addStderr("You are in detached HEAD") raise buildstep.BuildStepFailed @defer.inlineCallbacks def _checkHasSomethingToCommit(self): cmd = ['status', '--porcelain=v1'] stdout = yield self._dovccmd(cmd, collectStdout=True) for line in stdout.splitlines(False): if line[0] in 'MADRCU': return True return False @defer.inlineCallbacks def _doCommit(self): if self.emptyCommits == 'ignore': has_commit = yield self._checkHasSomethingToCommit() if not has_commit: return 0 cmd = ['commit'] for message in self.messages: cmd.extend(['-m', message]) if self.emptyCommits == 'create-empty-commit': cmd.extend(['--allow-empty']) ret = yield self._dovccmd(cmd) return ret @defer.inlineCallbacks def _doAdd(self): cmd = ['add'] cmd.extend(self.paths) ret = yield self._dovccmd(cmd) return ret buildbot-3.4.0/master/buildbot/steps/source/github.py000066400000000000000000000017501413250514000227110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.steps.source.git import Git class GitHub(Git): def run_vc(self, branch, revision, patch): # ignore the revision if the branch ends with /merge if branch.endswith("/merge"): revision = None return super().run_vc(branch, revision, patch) buildbot-3.4.0/master/buildbot/steps/source/gitlab.py000066400000000000000000000041461413250514000226730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from buildbot.steps.source.git import Git class GitLab(Git): """ Source step that knows how to handle merge requests from the GitLab change source """ def run_vc(self, branch, revision, patch): # If this is a merge request: if self.build.hasProperty("target_branch"): target_repourl = self.build.getProperty("target_git_ssh_url", None) if self.repourl != target_repourl: log.msg(("GitLab.run_vc: note: GitLab step for merge requests" " should probably have repourl='{}' instead of '{}'?" ).format(target_repourl, self.repourl)) # This step is (probably) configured to fetch the target # branch of a merge (because it is impractical for users to # configure one builder for each of the infinite number of # possible source branches for merge requests). # Point instead to the source being proposed for merge. branch = self.build.getProperty("source_branch", None) # FIXME: layering violation, should not be modifying self here? self.repourl = self.build.getProperty("source_git_ssh_url", None) # The revision is unlikely to exist in the repo already, # so tell Git to not check. revision = None return super().run_vc(branch, revision, patch) buildbot-3.4.0/master/buildbot/steps/source/mercurial.py000066400000000000000000000321031413250514000234060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Source step code for mercurial """ from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.config import ConfigErrors from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process import results from buildbot.process.results import SUCCESS from buildbot.steps.source.base import Source class Mercurial(Source): """ Class for Mercurial with all the smarts """ name = "hg" renderables = ["repourl"] possible_methods = (None, 'clean', 'fresh', 'clobber') possible_branchTypes = ('inrepo', 'dirname') def __init__(self, repourl=None, mode='incremental', method=None, defaultBranch=None, branchType='dirname', clobberOnBranchChange=True, **kwargs): """ @type repourl: string @param repourl: the URL which points at the Mercurial repository. if 'dirname' branches are enabled, this is the base URL to which a branch name will be appended. It should probably end in a slash. @param defaultBranch: if branches are enabled, this is the branch to use if the Build does not specify one explicitly. For 'dirname' branches, It will simply be appended to C{repourl} and the result handed to the 'hg update' command. For 'inrepo' branches, this specifies the named revision to which the tree will update after a clone. @param branchType: either 'dirname' or 'inrepo' depending on whether the branch name should be appended to the C{repourl} or the branch is a mercurial named branch and can be found within the C{repourl} @param clobberOnBranchChange: boolean, defaults to True. If set and using inrepos branches, clobber the tree at each branch change. Otherwise, just update to the branch. """ self.repourl = repourl self.defaultBranch = self.branch = defaultBranch self.branchType = branchType self.method = method self.clobberOnBranchChange = clobberOnBranchChange self.mode = mode super().__init__(**kwargs) errors = [] if not self._hasAttrGroupMember('mode', self.mode): errors.append("mode {} is not one of {}".format(self.mode, self._listAttrGroupMembers('mode'))) if self.method not in self.possible_methods: errors.append("method {} is not one of {}".format(self.method, self.possible_methods)) if self.branchType not in self.possible_branchTypes: errors.append("branchType {} is not one of {}".format(self.branchType, self.possible_branchTypes)) if repourl is None: errors.append("you must provide a repourl") if errors: raise ConfigErrors(errors) @defer.inlineCallbacks def run_vc(self, branch, revision, patch): self.revision = revision self.method = self._getMethod() self.stdio_log = yield self.addLogForRemoteCommands("stdio") installed = yield self.checkHg() if not installed: raise WorkerSetupError("Mercurial is not installed on worker") # FIXME: this does not do anything yield self.sourcedirIsPatched() if self.branchType == 'dirname': self.repourl = self.repourl + (branch or '') self.branch = self.defaultBranch self.update_branch = branch elif self.branchType == 'inrepo': self.update_branch = (branch or 'default') yield self._getAttrGroupMember('mode', self.mode)() if patch: yield self.patch(patch) yield self.parseGotRevision() return results.SUCCESS @defer.inlineCallbacks def mode_full(self): if self.method == 'clobber': yield self.clobber() return updatable = yield self._sourcedirIsUpdatable() if not updatable: yield self._clone() yield self._update() elif self.method == 'clean': yield self.clean() elif self.method == 'fresh': yield self.fresh() else: raise ValueError("Unknown method, check your configuration") @defer.inlineCallbacks def mode_incremental(self): if self.method is not None: raise ValueError(self.method) updatable = yield self._sourcedirIsUpdatable() if updatable: yield self._dovccmd(self.getHgPullCommand()) else: yield self._clone() yield self._checkBranchChange() @defer.inlineCallbacks def clean(self): command = ['--config', 'extensions.purge=', 'purge'] yield self._dovccmd(command) yield self._pullUpdate() @defer.inlineCallbacks def _clobber(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron}) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) @defer.inlineCallbacks def clobber(self): yield self._clobber() yield self._clone() yield self._update() @defer.inlineCallbacks def fresh(self): command = ['--config', 'extensions.purge=', 'purge', '--all'] yield self._dovccmd(command) yield self._pullUpdate() @defer.inlineCallbacks def parseGotRevision(self): stdout = yield self._dovccmd(['parents', '--template', '{node}\\n'], collectStdout=True) revision = stdout.strip() if len(revision) != 40: raise ValueError("Incorrect revision id") log.msg("Got Mercurial revision {}".format(revision)) self.updateSourceProperty('got_revision', revision) @defer.inlineCallbacks def _checkBranchChange(self): current_branch = yield self._getCurrentBranch() msg = "Working dir is on in-repo branch '{}' and build needs '{}'.".format(current_branch, self.update_branch) if current_branch != self.update_branch and self.clobberOnBranchChange: msg += ' Clobbering.' log.msg(msg) yield self.clobber() return msg += ' Updating.' log.msg(msg) yield self._removeAddedFilesAndUpdate(None) def getHgPullCommand(self): command = ['pull', self.repourl] if self.revision: command.extend(['--rev', self.revision]) elif self.branchType == 'inrepo': command.extend(['--rev', self.update_branch]) return command @defer.inlineCallbacks def _pullUpdate(self): command = self.getHgPullCommand() yield self._dovccmd(command) yield self._checkBranchChange() @defer.inlineCallbacks def _dovccmd(self, command, collectStdout=False, initialStdin=None, decodeRC=None, abandonOnFailure=True): if not command: raise ValueError("No command specified") if decodeRC is None: decodeRC = {0: SUCCESS} cmd = remotecommand.RemoteShellCommand(self.workdir, ['hg', '--verbose'] + command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, initialStdin=initialStdin, decodeRC=decodeRC) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command {}".format(cmd)) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc def computeSourceRevision(self, changes): if not changes: return None # without knowing the revision ancestry graph, we can't sort the # changes at all. So for now, assume they were given to us in sorted # order, and just pay attention to the last one. See ticket #103 for # more details. if len(changes) > 1: log.msg("Mercurial.computeSourceRevision: warning: " "there are %d changes here, assuming the last one is " "the most recent" % len(changes)) return changes[-1].revision @defer.inlineCallbacks def _getCurrentBranch(self): if self.branchType == 'dirname': return self.branch stdout = yield self._dovccmd(['identify', '--branch'], collectStdout=True) return stdout.strip() def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' return None def _sourcedirIsUpdatable(self): return self.pathExists(self.build.path_module.join(self.workdir, '.hg')) @defer.inlineCallbacks def _removeAddedFilesAndUpdate(self, _): command = ['locate', 'set:added()'] stdout = yield self._dovccmd(command, collectStdout=True, decodeRC={0: SUCCESS, 1: SUCCESS}) files = [] for filename in stdout.splitlines(): filename = self.workdir + '/' + filename files.append(filename) if files: if self.workerVersionIsOlderThan('rmdir', '2.14'): yield self.removeFiles(files) else: cmd = remotecommand.RemoteCommand('rmdir', {'dir': files, 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) yield self._update() @defer.inlineCallbacks def removeFiles(self, files): for filename in files: cmd = remotecommand.RemoteCommand('rmdir', {'dir': filename, 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.rc != 0: return cmd.rc return 0 @defer.inlineCallbacks def _update(self): command = ['update', '--clean'] if self.revision: command += ['--rev', self.revision] elif self.branchType == 'inrepo': command += ['--rev', self.update_branch] yield self._dovccmd(command) def _clone(self): if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True d = self._dovccmd(['clone', '--noupdate', self.repourl, '.'], abandonOnFailure=abandonOnFailure) def _retry(res): if self.stopped or res == 0: return res delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._clobber()) df.addCallback(lambda _: self._clone()) reactor.callLater(delay, df.callback, None) return df return res if self.retry: d.addCallback(_retry) return d def checkHg(self): d = self._dovccmd(['--version']) @d.addCallback def check(res): return res == 0 return d def applyPatch(self, patch): d = self._dovccmd(['import', '--no-commit', '-p', str(patch[0]), '-'], initialStdin=patch[1]) return d buildbot-3.4.0/master/buildbot/steps/source/mtn.py000066400000000000000000000307311413250514000222260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Source step code for Monotone """ from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.config import ConfigErrors from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process.results import SUCCESS from buildbot.steps.source.base import Source class Monotone(Source): """ Class for Monotone with all smarts """ name = 'monotone' renderables = ['repourl'] possible_methods = ('clobber', 'copy', 'fresh', 'clean') def __init__(self, repourl=None, branch=None, progress=False, mode='incremental', method=None, **kwargs): self.repourl = repourl self.method = method self.mode = mode self.branch = branch self.sourcedata = "{}?{}".format(self.repourl, self.branch) self.database = 'db.mtn' self.progress = progress super().__init__(**kwargs) errors = [] if not self._hasAttrGroupMember('mode', self.mode): errors.append("mode {} is not one of {}".format(self.mode, self._listAttrGroupMembers('mode'))) if self.mode == 'incremental' and self.method: errors.append("Incremental mode does not require method") if self.mode == 'full': if self.method is None: self.method = 'copy' elif self.method not in self.possible_methods: errors.append("Invalid method for mode == {}".format(self.mode)) if repourl is None: errors.append("you must provide repourl") if branch is None: errors.append("you must provide branch") if errors: raise ConfigErrors(errors) @defer.inlineCallbacks def run_vc(self, branch, revision, patch): self.revision = revision self.stdio_log = yield self.addLogForRemoteCommands("stdio") try: monotoneInstalled = yield self.checkMonotone() if not monotoneInstalled: raise WorkerSetupError("Monotone is not installed on worker") yield self._checkDb() yield self._retryPull() # If we're not throwing away the workdir, check if it's # somehow patched or modified and revert. if self.mode != 'full' or self.method not in ('clobber', 'copy'): patched = yield self.sourcedirIsPatched() if patched: yield self.clean() # Call a mode specific method fn = self._getAttrGroupMember('mode', self.mode) yield fn() if patch: yield self.patch(patch) yield self.parseGotRevision() return SUCCESS finally: pass # FIXME: remove this try:raise block @defer.inlineCallbacks def mode_full(self): if self.method == 'clobber': yield self.clobber() return elif self.method == 'copy': yield self.copy() return updatable = yield self._sourcedirIsUpdatable() if not updatable: yield self.clobber() elif self.method == 'clean': yield self.clean() yield self._update() elif self.method == 'fresh': yield self.clean(False) yield self._update() else: raise ValueError("Unknown method, check your configuration") @defer.inlineCallbacks def mode_incremental(self): updatable = yield self._sourcedirIsUpdatable() if not updatable: yield self.clobber() else: yield self._update() @defer.inlineCallbacks def clobber(self): yield self.runRmdir(self.workdir) yield self._checkout() @defer.inlineCallbacks def copy(self): cmd = remotecommand.RemoteCommand('rmdir', { 'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) self.workdir = 'source' yield self.mode_incremental() cmd = remotecommand.RemoteCommand('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) self.workdir = 'build' return 0 @defer.inlineCallbacks def checkMonotone(self): cmd = remotecommand.RemoteShellCommand(self.workdir, ['mtn', '--version'], env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) return cmd.rc == 0 @defer.inlineCallbacks def clean(self, ignore_ignored=True): files = [] commands = [['mtn', 'ls', 'unknown']] if not ignore_ignored: commands.append(['mtn', 'ls', 'ignored']) for cmd in commands: stdout = yield self._dovccmd(cmd, workdir=self.workdir, collectStdout=True) if not stdout: continue for filename in stdout.strip().split('\n'): filename = self.workdir + '/' + str(filename) files.append(filename) if not files: rc = 0 else: if self.workerVersionIsOlderThan('rmdir', '2.14'): rc = yield self.removeFiles(files) else: rc = yield self.runRmdir(files, abandonOnFailure=False) if rc != 0: log.msg("Failed removing files") raise buildstep.BuildStepFailed() @defer.inlineCallbacks def removeFiles(self, files): for filename in files: res = yield self.runRmdir(filename, abandonOnFailure=False) if res: return res return 0 def _checkout(self, abandonOnFailure=False): command = ['mtn', 'checkout', self.workdir, '--db', self.database] if self.revision: command.extend(['--revision', self.revision]) command.extend(['--branch', self.branch]) return self._dovccmd(command, workdir='.', abandonOnFailure=abandonOnFailure) def _update(self, abandonOnFailure=False): command = ['mtn', 'update'] if self.revision: command.extend(['--revision', self.revision]) else: command.extend(['--revision', 'h:' + self.branch]) command.extend(['--branch', self.branch]) return self._dovccmd(command, workdir=self.workdir, abandonOnFailure=abandonOnFailure) def _pull(self, abandonOnFailure=False): command = ['mtn', 'pull', self.sourcedata, '--db', self.database] if self.progress: command.extend(['--ticker=dot']) else: command.extend(['--ticker=none']) d = self._dovccmd(command, workdir='.', abandonOnFailure=abandonOnFailure) return d @defer.inlineCallbacks def _retryPull(self): if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True res = yield self._pull(abandonOnFailure) if self.retry: delay, repeats = self.retry if self.stopped or res == 0 or repeats <= 0: return res else: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._retryPull()) reactor.callLater(delay, df.callback, None) yield df return None @defer.inlineCallbacks def parseGotRevision(self): stdout = yield self._dovccmd(['mtn', 'automate', 'select', 'w:'], workdir=self.workdir, collectStdout=True) revision = stdout.strip() if len(revision) != 40: raise buildstep.BuildStepFailed() log.msg("Got Monotone revision {}".format(revision)) self.updateSourceProperty('got_revision', revision) return 0 @defer.inlineCallbacks def _dovccmd(self, command, workdir, collectStdout=False, initialStdin=None, decodeRC=None, abandonOnFailure=True): if not command: raise ValueError("No command specified") if decodeRC is None: decodeRC = {0: SUCCESS} cmd = remotecommand.RemoteShellCommand(workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, initialStdin=initialStdin, decodeRC=decodeRC) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command {}".format(cmd)) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout else: return cmd.rc @defer.inlineCallbacks def _checkDb(self): db_exists = yield self.pathExists(self.database) db_needs_init = False if db_exists: stdout = yield self._dovccmd( ['mtn', 'db', 'info', '--db', self.database], workdir='.', collectStdout=True) if stdout.find("migration needed") >= 0: log.msg("Older format database found, migrating it") yield self._dovccmd(['mtn', 'db', 'migrate', '--db', self.database], workdir='.') elif stdout.find("too new, cannot use") >= 0 or \ stdout.find("database has no tables") >= 0: # The database is of a newer format which the worker's # mtn version can not handle. Drop it and pull again # with that monotone version installed on the # worker. Do the same if it's an empty file. yield self.runRmdir(self.database) db_needs_init = True elif stdout.find("not a monotone database") >= 0: # There exists a database file, but it's not a valid # monotone database. Do not delete it, but fail with # an error. raise buildstep.BuildStepFailed() else: log.msg("Database exists and compatible") else: db_needs_init = True log.msg("Database does not exist") if db_needs_init: command = ['mtn', 'db', 'init', '--db', self.database] yield self._dovccmd(command, workdir='.') @defer.inlineCallbacks def _sourcedirIsUpdatable(self): workdir_path = self.build.path_module.join(self.workdir, '_MTN') workdir_exists = yield self.pathExists(workdir_path) if not workdir_exists: log.msg("Workdir does not exist, falling back to a fresh clone") return workdir_exists buildbot-3.4.0/master/buildbot/steps/source/p4.py000066400000000000000000000373601413250514000217600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Portions Copyright 2013 Bad Dog Consulting import re from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot import interfaces from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process import results from buildbot.process.properties import Interpolate from buildbot.steps.source import Source # Notes: # see # http://perforce.com/perforce/doc.current/manuals/cmdref/o.gopts.html#1040647 # for getting p4 command to output marshalled python dictionaries as output # for commands. # Perhaps switch to using 'p4 -G' : From URL above: # -G Causes all output (and batch input for form commands with -i) to be # formatted as marshalled Python dictionary objects. This is most often used # when scripting. class P4(Source): """Perform Perforce checkout/update operations.""" name = 'p4' renderables = ['mode', 'p4base', 'p4client', 'p4viewspec', 'p4branch', 'p4passwd'] possible_modes = ('incremental', 'full') def __init__(self, mode='incremental', method=None, p4base=None, p4branch=None, p4port=None, p4user=None, p4passwd=None, p4extra_views=(), p4line_end='local', p4viewspec=None, p4viewspec_suffix='...', p4client=Interpolate( 'buildbot_%(prop:workername)s_%(prop:buildername)s'), p4client_spec_options='allwrite rmdir', p4extra_args=None, p4bin='p4', use_tickets=False, stream=False, debug=False, **kwargs): self.method = method self.mode = mode self.p4branch = p4branch self.p4bin = p4bin self.p4base = p4base self.p4port = p4port self.p4user = p4user self.p4passwd = p4passwd self.p4extra_views = p4extra_views self.p4viewspec = p4viewspec self.p4viewspec_suffix = p4viewspec_suffix self.p4line_end = p4line_end self.p4client = p4client self.p4client_spec_options = p4client_spec_options self.p4extra_args = p4extra_args self.use_tickets = use_tickets self.stream = stream self.debug = debug super().__init__(**kwargs) if self.mode not in self.possible_modes and \ not interfaces.IRenderable.providedBy(self.mode): config.error("mode {} is not an IRenderable, or one of {}".format(self.mode, self.possible_modes)) if not p4viewspec and p4base is None: config.error("You must provide p4base or p4viewspec") if p4viewspec and (p4base or p4branch or p4extra_views): config.error( "Either provide p4viewspec or p4base and p4branch (and optionally p4extra_views)") if p4viewspec and isinstance(p4viewspec, str): config.error( "p4viewspec must not be a string, and should be a sequence of 2 element sequences") if not interfaces.IRenderable.providedBy(p4base) and p4base and not p4base.startswith('/'): config.error('p4base should start with // [p4base = {}]'.format(p4base)) if not interfaces.IRenderable.providedBy(p4base) and p4base and p4base.endswith('/'): config.error('p4base should not end with a trailing / [p4base = {}]'.format(p4base)) if not interfaces.IRenderable.providedBy(p4branch) and p4branch and p4branch.endswith('/'): config.error('p4branch should not end with a trailing / [p4branch = {}]'.format( p4branch)) if stream: if (p4extra_views or p4viewspec): config.error('You can\'t use p4extra_views not p4viewspec with stream') if not p4base or not p4branch: config.error('You must specify both p4base and p4branch when using stream') if not interfaces.IRenderable.providedBy(p4base) and " " in p4base: config.error('p4base must not contain any whitespace') if not interfaces.IRenderable.providedBy(p4branch) and " " in p4branch: config.error('p4branch must not contain any whitespace') if self.p4client_spec_options is None: self.p4client_spec_options = '' @defer.inlineCallbacks def run_vc(self, branch, revision, patch): if self.debug: log.msg('in run_vc') self.revision = revision self.method = self._getMethod() self.stdio_log = yield self.addLogForRemoteCommands("stdio") installed = yield self.checkP4() if not installed: raise WorkerSetupError("p4 is not installed on worker") # Try to obfuscate the password when used as an argument to commands. if self.p4passwd is not None: if not self.workerVersionIsOlderThan('shell', '2.16'): self.p4passwd_arg = ('obfuscated', self.p4passwd, 'XXXXXX') else: self.p4passwd_arg = self.p4passwd log.msg("Worker does not understand obfuscation; " "p4 password will be logged") if self.use_tickets and self.p4passwd: yield self._acquireTicket() yield self._getAttrGroupMember('mode', self.mode)() yield self.parseGotRevision() return results.SUCCESS @defer.inlineCallbacks def mode_full(self): if self.debug: log.msg("P4:full()..") # First we need to create the client yield self._createClientSpec() # Then p4 sync #none yield self._dovccmd(['sync', '#none']) # Then remove directory. yield self.runRmdir(self.workdir) # Then we need to sync the client if self.revision: if self.debug: log.msg("P4: full() sync command based on :base:%s changeset:%d", self._getP4BaseForLog(), int(self.revision)) yield self._dovccmd(['sync', '{}...@{}'.format(self._getP4BaseForCommand(), int(self.revision))], collectStdout=True) else: if self.debug: log.msg("P4: full() sync command based on :base:%s no revision", self._getP4BaseForLog()) yield self._dovccmd(['sync'], collectStdout=True) if self.debug: log.msg("P4: full() sync done.") @defer.inlineCallbacks def mode_incremental(self): if self.debug: log.msg("P4:incremental()") # First we need to create the client yield self._createClientSpec() # and plan to do a checkout command = ['sync', ] if self.revision: command.extend(['{}...@{}'.format(self._getP4BaseForCommand(), int(self.revision))]) if self.debug: log.msg( "P4:incremental() command:%s revision:%s", command, self.revision) yield self._dovccmd(command) def _getP4BaseForLog(self): return self.p4base or '' def _getP4BaseForCommand(self): return self.p4base or '' def _buildVCCommand(self, doCommand): assert doCommand, "No command specified" command = [self.p4bin, ] if self.p4port: command.extend(['-p', self.p4port]) if self.p4user: command.extend(['-u', self.p4user]) if not self.use_tickets and self.p4passwd: command.extend(['-P', self.p4passwd_arg]) if self.p4client: command.extend(['-c', self.p4client]) # Only add the extra arguments for the `sync` command. if doCommand[0] == 'sync' and self.p4extra_args: command.extend(self.p4extra_args) command.extend(doCommand) return command @defer.inlineCallbacks def _dovccmd(self, command, collectStdout=False, initialStdin=None): command = self._buildVCCommand(command) if self.debug: log.msg("P4:_dovccmd():workdir->{}".format(self.workdir)) cmd = remotecommand.RemoteShellCommand(self.workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, initialStdin=initialStdin,) cmd.useLog(self.stdio_log, False) if self.debug: log.msg("Starting p4 command : p4 {}".format(" ".join(command))) yield self.runCommand(cmd) if cmd.rc != 0: if self.debug: log.msg("P4:_dovccmd():Source step failed while running command {}".format(cmd)) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' return None @defer.inlineCallbacks def _createClientSpec(self): builddir = self.getProperty('builddir') if self.debug: log.msg("P4:_createClientSpec() builddir:{}".format(builddir)) log.msg("P4:_createClientSpec() SELF.workdir:{}".format(self.workdir)) prop_dict = self.getProperties().asDict() prop_dict['p4client'] = self.p4client client_spec = '' client_spec += "Client: {}\n\n".format(self.p4client) client_spec += "Owner: {}\n\n".format(self.p4user) client_spec += "Description:\n\tCreated by {}\n\n".format(self.p4user) client_spec += "Root:\t{}\n\n".format(self.build.path_module.normpath( self.build.path_module.join(builddir, self.workdir))) client_spec += "Options:\t{}\n\n".format(self.p4client_spec_options) if self.p4line_end: client_spec += "LineEnd:\t{}\n\n".format(self.p4line_end) else: client_spec += "LineEnd:\tlocal\n\n" # Perforce generates the view for stream-associated workspaces if self.stream: client_spec += "Stream:\t{}/{}\n".format(self.p4base, self.p4branch) else: # Setup a view client_spec += "View:\n" def has_whitespace(*args): return any([re.search(r'\s', i) for i in args if i is not None]) if self.p4viewspec: # uses only p4viewspec array of tuples to build view # If the user specifies a viewspec via an array of tuples then # Ignore any specified p4base,p4branch, and/or p4extra_views suffix = self.p4viewspec_suffix or '' for k, v in self.p4viewspec: if self.debug: log.msg('P4:_createClientSpec():key:{} value:{}'.format(k, v)) qa = '"' if has_whitespace(k, suffix) else '' qb = '"' if has_whitespace(self.p4client, v, suffix) else '' client_spec += '\t{}{}{}{} {}//{}/{}{}{}\n'.format(qa, k, suffix, qa, qb, self.p4client, v, suffix, qb) else: # Uses p4base, p4branch, p4extra_views qa = '"' if has_whitespace(self.p4base, self.p4branch) else '' client_spec += "\t{}{}".format(qa, self.p4base) if self.p4branch: client_spec += "/{}".format(self.p4branch) client_spec += "/...{} ".format(qa) qb = '"' if has_whitespace(self.p4client) else '' client_spec += "{}//{}/...{}\n".format(qb, self.p4client, qb) if self.p4extra_views: for k, v in self.p4extra_views: qa = '"' if has_whitespace(k) else '' qb = '"' if has_whitespace(k, self.p4client, v) else '' client_spec += "\t{}{}/...{} {}//{}/{}/...{}\n".format(qa, k, qa, qb, self.p4client, v, qb) if self.debug: log.msg(client_spec) stdout = yield self._dovccmd(['client', '-i'], collectStdout=True, initialStdin=client_spec) mo = re.search(r'Client (\S+) (.+)$', stdout, re.M) return mo and (mo.group(2) == 'saved.' or mo.group(2) == 'not changed.') @defer.inlineCallbacks def _acquireTicket(self): if self.debug: log.msg("P4:acquireTicket()") # TODO: check first if the ticket is still valid? initialStdin = self.p4passwd + "\n" yield self._dovccmd(['login'], initialStdin=initialStdin) @defer.inlineCallbacks def parseGotRevision(self): command = self._buildVCCommand(['changes', '-m1', '#have']) cmd = remotecommand.RemoteShellCommand(self.workdir, command, env=self.env, timeout=self.timeout, logEnviron=self.logEnviron, collectStdout=True) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) stdout = cmd.stdout.strip() # Example output from p4 changes -m1 #have # Change 212798 on 2012/04/13 by user@user-unix-bldng2 'change to # pickup build' revision = stdout.split()[1] try: int(revision) except ValueError as e: msg = (("p4.parseGotRevision unable to parse output " "of 'p4 changes -m1 \"#have\"': '{}'").format(stdout)) log.msg(msg) raise buildstep.BuildStepFailed() from e if self.debug: log.msg("Got p4 revision {}".format(revision)) self.updateSourceProperty('got_revision', revision) @defer.inlineCallbacks def purge(self, ignore_ignores): """Delete everything that shown up on status.""" command = ['sync', '#none'] if ignore_ignores: command.append('--no-ignore') yield self._dovccmd(command, collectStdout=True) # FIXME: do the following comments need addressing? # add deferred to rm tree # then add defer to sync to revision @defer.inlineCallbacks def checkP4(self): cmd = remotecommand.RemoteShellCommand(self.workdir, [self.p4bin, '-V'], env=self.env, logEnviron=self.logEnviron) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) return cmd.rc == 0 def computeSourceRevision(self, changes): if not changes or None in [c.revision for c in changes]: return None lastChange = max([int(c.revision) for c in changes]) return lastChange buildbot-3.4.0/master/buildbot/steps/source/repo.py000066400000000000000000000464671413250514000224120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import textwrap from twisted.internet import defer from twisted.internet import reactor from zope.interface import implementer from buildbot import util from buildbot.interfaces import IRenderable from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process import results from buildbot.steps.source.base import Source @implementer(IRenderable) class RepoDownloadsFromProperties(util.ComparableMixin): parse_download_re = (re.compile(r"repo download ([^ ]+) ([0-9]+/[0-9]+)"), re.compile(r"([^ ]+) ([0-9]+/[0-9]+)"), re.compile(r"([^ ]+)/([0-9]+/[0-9]+)"), ) compare_attrs = ('names',) def __init__(self, names): self.names = names def getRenderingFor(self, props): downloads = [] for propName in self.names: s = props.getProperty(propName) if s is not None: downloads.extend(self.parseDownloadProperty(s)) return downloads def parseDownloadProperty(self, s): """ lets try to be nice in the format we want can support several instances of "repo download proj number/patch" (direct copy paste from gerrit web site) or several instances of "proj number/patch" (simpler version) This feature allows integrator to build with several pending interdependent changes. returns list of repo downloads sent to the worker """ if s is None: return [] ret = [] for cur_re in self.parse_download_re: res = cur_re.search(s) while res: ret.append("{} {}".format(res.group(1), res.group(2))) s = s[:res.start(0)] + s[res.end(0):] res = cur_re.search(s) return ret @implementer(IRenderable) class RepoDownloadsFromChangeSource(util.ComparableMixin): compare_attrs = ('codebase',) def __init__(self, codebase=None): self.codebase = codebase def getRenderingFor(self, props): downloads = [] if self.codebase is None: changes = props.getBuild().allChanges() else: changes = props.getBuild().getSourceStamp(self.codebase).changes for change in changes: if ("event.type" in change.properties and change.properties["event.type"] == "patchset-created"): downloads.append("{} {}/{}".format(change.properties["event.change.project"], change.properties["event.change.number"], change.properties["event.patchSet.number"])) return downloads class Repo(Source): """ Class for Repo with all the smarts """ name = 'repo' renderables = ["manifestURL", "manifestBranch", "manifestFile", "tarball", "jobs", "syncAllBranches", "updateTarballAge", "manifestOverrideUrl", "repoDownloads", "depth", "submodules"] ref_not_found_re = re.compile(r"fatal: Couldn't find remote ref") cherry_pick_error_re = re.compile(r"|".join([r"Automatic cherry-pick failed", r"error: " r"fatal: " r"possibly due to conflict resolution."])) re_change = re.compile(r".* refs/changes/\d\d/(\d+)/(\d+) -> FETCH_HEAD$") re_head = re.compile(r"^HEAD is now at ([0-9a-f]+)...") # number of retries, if we detect mirror desynchronization mirror_sync_retry = 10 # wait 1min between retries (thus default total retry time is 10min) mirror_sync_sleep = 60 def __init__(self, manifestURL=None, manifestBranch="master", manifestFile="default.xml", tarball=None, jobs=None, syncAllBranches=False, updateTarballAge=7 * 24.0 * 3600.0, manifestOverrideUrl=None, repoDownloads=None, depth=0, submodules=False, syncQuietly=False, **kwargs): """ @type manifestURL: string @param manifestURL: The URL which points at the repo manifests repository. @type manifestBranch: string @param manifestBranch: The manifest branch to check out by default. @type manifestFile: string @param manifestFile: The manifest to use for sync. @type syncAllBranches: bool. @param syncAllBranches: true, then we must slowly synchronize all branches. @type updateTarballAge: float @param updateTarballAge: renderable to determine the update tarball policy, given properties Returns: max age of tarball in seconds, or None, if we want to skip tarball update @type manifestOverrideUrl: string @param manifestOverrideUrl: optional http URL for overriding the manifest usually coming from Property setup by a ForceScheduler @type repoDownloads: list of strings @param repoDownloads: optional repo download to perform after the repo sync @type depth: integer @param depth: optional depth parameter to repo init. If specified, create a shallow clone with given depth. @type submodules: string @param submodules: optional submodules parameter to repo init. @type syncQuietly: bool. @param syncQuietly: true, then suppress verbose output from repo sync. """ self.manifestURL = manifestURL self.manifestBranch = manifestBranch self.manifestFile = manifestFile self.tarball = tarball self.jobs = jobs self.syncAllBranches = syncAllBranches self.updateTarballAge = updateTarballAge self.manifestOverrideUrl = manifestOverrideUrl if repoDownloads is None: repoDownloads = [] self.repoDownloads = repoDownloads self.depth = depth self.submodules = submodules self.syncQuietly = syncQuietly super().__init__(**kwargs) assert self.manifestURL is not None def computeSourceRevision(self, changes): if not changes: return None return changes[-1].revision def filterManifestPatches(self): """ Patches to manifest projects are a bit special. repo does not support a way to download them automatically, so we need to implement the boilerplate manually. This code separates the manifest patches from the other patches, and generates commands to import those manifest patches. """ manifest_unrelated_downloads = [] manifest_related_downloads = [] for download in self.repoDownloads: project, ch_ps = download.split(" ")[-2:] if (self.manifestURL.endswith("/" + project) or self.manifestURL.endswith("/" + project + ".git")): ch, ps = map(int, ch_ps.split("/")) branch = "refs/changes/%02d/%d/%d" % (ch % 100, ch, ps) manifest_related_downloads.append( ["git", "fetch", self.manifestURL, branch]) manifest_related_downloads.append( ["git", "cherry-pick", "FETCH_HEAD"]) else: manifest_unrelated_downloads.append(download) self.repoDownloads = manifest_unrelated_downloads self.manifestDownloads = manifest_related_downloads def _repoCmd(self, command, abandonOnFailure=True, **kwargs): return self._Cmd(["repo"] + command, abandonOnFailure=abandonOnFailure, **kwargs) @defer.inlineCallbacks def _Cmd(self, command, abandonOnFailure=True, workdir=None, **kwargs): if workdir is None: workdir = self.workdir cmd = remotecommand.RemoteShellCommand(workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, **kwargs) self.lastCommand = cmd # does not make sense to logEnviron for each command (just for first) self.logEnviron = False cmd.useLog(self.stdio_log, False) yield self.stdio_log.addHeader("Starting command: {}\n".format(" ".join(command))) self.description = ' '.join(command[:2]) # FIXME: enable when new style step is switched on yield self.updateSummary() yield self.runCommand(cmd) if abandonOnFailure and cmd.didFail(): self.descriptionDone = "repo failed at: {}".format(" ".join(command[:2])) msg = "Source step failed while running command {}\n".format(cmd) yield self.stdio_log.addStderr(msg) raise buildstep.BuildStepFailed() return cmd.rc def repoDir(self): return self.build.path_module.join(self.workdir, ".repo") def sourcedirIsUpdateable(self): return self.pathExists(self.repoDir()) def run_vc(self, branch, revision, patch): return self.doStartVC() @defer.inlineCallbacks def doStartVC(self): self.stdio_log = yield self.addLogForRemoteCommands("stdio") self.filterManifestPatches() if self.repoDownloads: yield self.stdio_log.addHeader("will download:\nrepo download {}\n".format( "\nrepo download ".join(self.repoDownloads))) self.willRetryInCaseOfFailure = True try: yield self.doRepoSync() except buildstep.BuildStepFailed as e: if not self.willRetryInCaseOfFailure: raise yield self.stdio_log.addStderr("got issue at first try:\n" + str(e) + "\nRetry after clobber...") yield self.doRepoSync(forceClobber=True) yield self.maybeUpdateTarball() # starting from here, clobbering will not help yield self.doRepoDownloads() return results.SUCCESS @defer.inlineCallbacks def doClobberStart(self): yield self.runRmdir(self.workdir) yield self.runMkdir(self.workdir) yield self.maybeExtractTarball() @defer.inlineCallbacks def doRepoSync(self, forceClobber=False): updatable = yield self.sourcedirIsUpdateable() if not updatable or forceClobber: # no need to re-clobber in case of failure self.willRetryInCaseOfFailure = False yield self.doClobberStart() yield self.doCleanup() command = ['init', '-u', self.manifestURL, '-b', self.manifestBranch, '-m', self.manifestFile, '--depth', str(self.depth)] if self.submodules: command.append('--submodules') yield self._repoCmd(command) if self.manifestOverrideUrl: msg = "overriding manifest with {}\n".format(self.manifestOverrideUrl) yield self.stdio_log.addHeader(msg) local_path = self.build.path_module.join(self.workdir, self.manifestOverrideUrl) local_file = yield self.pathExists(local_path) if local_file: yield self._Cmd(["cp", "-f", self.manifestOverrideUrl, "manifest_override.xml"]) else: yield self._Cmd(["wget", self.manifestOverrideUrl, "-O", "manifest_override.xml"]) yield self._Cmd(["ln", "-sf", "../manifest_override.xml", "manifest.xml"], workdir=self.build.path_module.join(self.workdir, ".repo")) for command in self.manifestDownloads: yield self._Cmd(command, workdir=self.build.path_module.join(self.workdir, ".repo", "manifests")) command = ['sync', '--force-sync'] if self.jobs: command.append('-j' + str(self.jobs)) if not self.syncAllBranches: command.append('-c') if self.syncQuietly: command.append('-q') self.description = "repo sync" # FIXME: enable when new style step is used: yield self.updateSummary() yield self.stdio_log.addHeader("synching manifest {} from branch {} from {}\n".format( self.manifestFile, self.manifestBranch, self.manifestURL)) yield self._repoCmd(command) command = ['manifest', '-r', '-o', 'manifest-original.xml'] yield self._repoCmd(command) # check whether msg matches one of the # compiled regexps in self.re_error_messages def _findErrorMessages(self, error_re): for logname in ['stderr', 'stdout']: if not hasattr(self.lastCommand, logname): continue msg = getattr(self.lastCommand, logname) if not (re.search(error_re, msg) is None): return True return False def _sleep(self, delay): d = defer.Deferred() reactor.callLater(delay, d.callback, 1) return d @defer.inlineCallbacks def doRepoDownloads(self): self.repo_downloaded = "" for download in self.repoDownloads: command = ['download'] + download.split(' ') yield self.stdio_log.addHeader("downloading changeset {}\n".format(download)) retry = self.mirror_sync_retry + 1 while retry > 0: yield self._repoCmd(command, abandonOnFailure=False, collectStdout=True, collectStderr=True) if not self._findErrorMessages(self.ref_not_found_re): break retry -= 1 yield self.stdio_log.addStderr("failed downloading changeset {}\n".format(download)) yield self.stdio_log.addHeader("wait one minute for mirror sync\n") yield self._sleep(self.mirror_sync_sleep) if retry == 0: self.descriptionDone = "repo: change {} does not exist".format(download) raise buildstep.BuildStepFailed() if self.lastCommand.didFail() or self._findErrorMessages(self.cherry_pick_error_re): # cherry pick error! We create a diff with status current workdir # in stdout, which reveals the merge errors and exit command = ['forall', '-c', 'git', 'diff', 'HEAD'] yield self._repoCmd(command, abandonOnFailure=False) self.descriptionDone = "download failed: {}".format(download) raise buildstep.BuildStepFailed() if hasattr(self.lastCommand, 'stderr'): lines = self.lastCommand.stderr.split("\n") match1 = match2 = False for line in lines: if not match1: match1 = self.re_change.match(line) if not match2: match2 = self.re_head.match(line) if match1 and match2: self.repo_downloaded += "{}/{} {} ".format(match1.group(1), match1.group(2), match2.group(1)) self.setProperty("repo_downloaded", self.repo_downloaded, "Source") def computeTarballOptions(self): # Keep in mind that the compression part of tarball generation # can be non negligible tar = ['tar'] if self.tarball.endswith("pigz"): tar.append('-I') tar.append('pigz') elif self.tarball.endswith("gz"): tar.append('-z') elif self.tarball.endswith("bz2") or self.tarball.endswith("bz"): tar.append('-j') elif self.tarball.endswith("lzma"): tar.append('--lzma') elif self.tarball.endswith("lzop"): tar.append('--lzop') return tar @defer.inlineCallbacks def maybeExtractTarball(self): if self.tarball: tar = self.computeTarballOptions() + ['-xvf', self.tarball] res = yield self._Cmd(tar, abandonOnFailure=False) if res: # error with tarball.. erase repo dir and tarball yield self._Cmd(["rm", "-f", self.tarball], abandonOnFailure=False) yield self.runRmdir(self.repoDir(), abandonOnFailure=False) @defer.inlineCallbacks def maybeUpdateTarball(self): if not self.tarball or self.updateTarballAge is None: return # tarball path is absolute, so we cannot use worker's stat command # stat -c%Y gives mtime in second since epoch res = yield self._Cmd(["stat", "-c%Y", self.tarball], collectStdout=True, abandonOnFailure=False) if not res: tarball_mtime = int(self.lastCommand.stdout) yield self._Cmd(["stat", "-c%Y", "."], collectStdout=True) now_mtime = int(self.lastCommand.stdout) age = now_mtime - tarball_mtime if res or age > self.updateTarballAge: tar = self.computeTarballOptions() + \ ['-cvf', self.tarball, ".repo"] res = yield self._Cmd(tar, abandonOnFailure=False) if res: # error with tarball.. erase tarball, but don't fail yield self._Cmd(["rm", "-f", self.tarball], abandonOnFailure=False) # a simple shell script to gather all cleanup tweaks... # doing them one by one just complicate the stuff # and mess up the stdio log def _getCleanupCommand(self): """also used by tests for expectations""" return textwrap.dedent("""\ set -v if [ -d .repo/manifests ] then # repo just refuse to run if manifest is messed up # so ensure we are in a known state cd .repo/manifests rm -f .git/index.lock git fetch origin git reset --hard remotes/origin/%(manifestBranch)s git config branch.default.merge %(manifestBranch)s cd .. ln -sf manifests/%(manifestFile)s manifest.xml cd .. fi repo forall -c rm -f .git/index.lock repo forall -c git clean -f -d -x 2>/dev/null repo forall -c git reset --hard HEAD 2>/dev/null rm -f %(workdir)s/.repo/project.list """) % dict(manifestBranch=self.manifestBranch, manifestFile=self.manifestFile, workdir=self.workdir) def doCleanup(self): command = self._getCleanupCommand() return self._Cmd(["bash", "-c", command], abandonOnFailure=False) buildbot-3.4.0/master/buildbot/steps/source/svn.py000066400000000000000000000417261413250514000222440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import xml.dom.minidom import xml.parsers.expat from urllib.parse import quote as urlquote from urllib.parse import unquote as urlunquote from urllib.parse import urlparse from urllib.parse import urlunparse from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.config import ConfigErrors from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.steps.source.base import Source class SVN(Source): """I perform Subversion checkout/update operations.""" name = 'svn' renderables = ['repourl', 'password'] possible_methods = ('clean', 'fresh', 'clobber', 'copy', 'export', None) def __init__(self, repourl=None, mode='incremental', method=None, username=None, password=None, extra_args=None, keep_on_purge=None, depth=None, preferLastChangedRev=False, **kwargs): self.repourl = repourl self.username = username self.password = password self.extra_args = extra_args self.keep_on_purge = keep_on_purge or [] self.depth = depth self.method = method self.mode = mode self.preferLastChangedRev = preferLastChangedRev super().__init__(**kwargs) errors = [] if not self._hasAttrGroupMember('mode', self.mode): errors.append("mode {} is not one of {}".format(self.mode, self._listAttrGroupMembers('mode'))) if self.method not in self.possible_methods: errors.append("method {} is not one of {}".format(self.method, self.possible_methods)) if repourl is None: errors.append("you must provide repourl") if errors: raise ConfigErrors(errors) @defer.inlineCallbacks def run_vc(self, branch, revision, patch): self.revision = revision self.method = self._getMethod() self.stdio_log = yield self.addLogForRemoteCommands("stdio") # if the version is new enough, and the password is set, then obfuscate # it if self.password is not None: if not self.workerVersionIsOlderThan('shell', '2.16'): self.password = ('obfuscated', self.password, 'XXXXXX') else: log.msg("Worker does not understand obfuscation; " "svn password will be logged") installed = yield self.checkSvn() if not installed: raise WorkerSetupError("SVN is not installed on worker") patched = yield self.sourcedirIsPatched() if patched: yield self.purge(False) yield self._getAttrGroupMember('mode', self.mode)() if patch: yield self.patch(patch) res = yield self.parseGotRevision() return res @defer.inlineCallbacks def mode_full(self): if self.method == 'clobber': yield self.clobber() return elif self.method in ['copy', 'export']: yield self.copy() return updatable = yield self._sourcedirIsUpdatable() if not updatable: # blow away the old (un-updatable) directory and checkout yield self.clobber() elif self.method == 'clean': yield self.clean() elif self.method == 'fresh': yield self.fresh() @defer.inlineCallbacks def mode_incremental(self): updatable = yield self._sourcedirIsUpdatable() if not updatable: # blow away the old (un-updatable) directory and checkout yield self.clobber() else: # otherwise, do an update command = ['update'] if self.revision: command.extend(['--revision', str(self.revision)]) yield self._dovccmd(command) @defer.inlineCallbacks def clobber(self): yield self.runRmdir(self.workdir, timeout=self.timeout) yield self._checkout() @defer.inlineCallbacks def fresh(self): yield self.purge(True) cmd = ['update'] if self.revision: cmd.extend(['--revision', str(self.revision)]) yield self._dovccmd(cmd) @defer.inlineCallbacks def clean(self): yield self.purge(False) cmd = ['update'] if self.revision: cmd.extend(['--revision', str(self.revision)]) yield self._dovccmd(cmd) @defer.inlineCallbacks def copy(self): yield self.runRmdir(self.workdir, timeout=self.timeout) checkout_dir = 'source' if self.codebase: checkout_dir = self.build.path_module.join( checkout_dir, self.codebase) # temporarily set workdir = checkout_dir and do an incremental checkout try: old_workdir = self.workdir self.workdir = checkout_dir yield self.mode_incremental() finally: self.workdir = old_workdir self.workdir = old_workdir # if we're copying, copy; otherwise, export from source to build if self.method == 'copy': cmd = remotecommand.RemoteCommand('cpdir', {'fromdir': checkout_dir, 'todir': self.workdir, 'logEnviron': self.logEnviron}) else: export_cmd = ['svn', 'export'] if self.revision: export_cmd.extend(["--revision", str(self.revision)]) if self.username: export_cmd.extend(['--username', self.username]) if self.password is not None: export_cmd.extend(['--password', self.password]) if self.extra_args: export_cmd.extend(self.extra_args) export_cmd.extend([checkout_dir, self.workdir]) cmd = remotecommand.RemoteShellCommand('', export_cmd, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.didFail(): raise buildstep.BuildStepFailed() @defer.inlineCallbacks def _dovccmd(self, command, collectStdout=False, collectStderr=False, abandonOnFailure=True): assert command, "No command specified" command.extend(['--non-interactive', '--no-auth-cache']) if self.username: command.extend(['--username', self.username]) if self.password is not None: command.extend(['--password', self.password]) if self.depth: command.extend(['--depth', self.depth]) if self.extra_args: command.extend(self.extra_args) cmd = remotecommand.RemoteShellCommand(self.workdir, ['svn'] + command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, collectStderr=collectStderr) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.didFail() and abandonOnFailure: log.msg("Source step failed while running command {}".format(cmd)) raise buildstep.BuildStepFailed() if collectStdout and collectStderr: return (cmd.stdout, cmd.stderr) elif collectStdout: return cmd.stdout elif collectStderr: return cmd.stderr return cmd.rc def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' return None @defer.inlineCallbacks def _sourcedirIsUpdatable(self): # first, perform a stat to ensure that this is really an svn directory res = yield self.pathExists(self.build.path_module.join(self.workdir, '.svn')) if not res: return False # then run 'svn info --xml' to check that the URL matches our repourl stdout, stderr = yield self._dovccmd(['info', '--xml'], collectStdout=True, collectStderr=True, abandonOnFailure=False) # svn: E155037: Previous operation has not finished; run 'cleanup' if # it was interrupted if 'E155037:' in stderr: return False try: stdout_xml = xml.dom.minidom.parseString(stdout) extractedurl = stdout_xml.getElementsByTagName( 'url')[0].firstChild.nodeValue except xml.parsers.expat.ExpatError as e: yield self.stdio_log.addHeader("Corrupted xml, aborting step") raise buildstep.BuildStepFailed() from e return extractedurl == self.svnUriCanonicalize(self.repourl) @defer.inlineCallbacks def parseGotRevision(self): # if this was a full/export, then we need to check svnversion in the # *source* directory, not the build directory svnversion_dir = self.workdir if self.mode == 'full' and self.method == 'export': svnversion_dir = 'source' cmd = remotecommand.RemoteShellCommand(svnversion_dir, ['svn', 'info', '--xml'], env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=True) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) stdout = cmd.stdout try: stdout_xml = xml.dom.minidom.parseString(stdout) except xml.parsers.expat.ExpatError as e: yield self.stdio_log.addHeader("Corrupted xml, aborting step") raise buildstep.BuildStepFailed() from e revision = None if self.preferLastChangedRev: try: revision = stdout_xml.getElementsByTagName( 'commit')[0].attributes['revision'].value except (KeyError, IndexError): msg = ("SVN.parseGotRevision unable to detect Last Changed Rev in" " output of svn info") log.msg(msg) # fall through and try to get 'Revision' instead if revision is None: try: revision = stdout_xml.getElementsByTagName( 'entry')[0].attributes['revision'].value except (KeyError, IndexError) as e: msg = ("SVN.parseGotRevision unable to detect revision in" " output of svn info") log.msg(msg) raise buildstep.BuildStepFailed() from e yield self.stdio_log.addHeader("Got SVN revision {}".format(revision)) self.updateSourceProperty('got_revision', revision) return cmd.rc @defer.inlineCallbacks def purge(self, ignore_ignores): """Delete everything that shown up on status.""" command = ['status', '--xml'] if ignore_ignores: command.append('--no-ignore') stdout = yield self._dovccmd(command, collectStdout=True) files = [] for filename in self.getUnversionedFiles(stdout, self.keep_on_purge): filename = self.build.path_module.join(self.workdir, filename) files.append(filename) if files: if self.workerVersionIsOlderThan('rmdir', '2.14'): rc = yield self.removeFiles(files) else: rc = yield self.runRmdir(files, abandonOnFailure=False, timeout=self.timeout) if rc != 0: log.msg("Failed removing files") raise buildstep.BuildStepFailed() @staticmethod def getUnversionedFiles(xmlStr, keep_on_purge): try: result_xml = xml.dom.minidom.parseString(xmlStr) except xml.parsers.expat.ExpatError as e: log.err("Corrupted xml, aborting step") raise buildstep.BuildStepFailed() from e for entry in result_xml.getElementsByTagName('entry'): (wc_status,) = entry.getElementsByTagName('wc-status') if wc_status.getAttribute('item') == 'external': continue if wc_status.getAttribute('item') == 'missing': continue filename = entry.getAttribute('path') if filename in keep_on_purge or filename == '': continue yield filename @defer.inlineCallbacks def removeFiles(self, files): for filename in files: res = yield self.runRmdir(filename, abandonOnFailure=False, timeout=self.timeout) if res: return res return 0 @defer.inlineCallbacks def checkSvn(self): cmd = remotecommand.RemoteShellCommand(self.workdir, ['svn', '--version'], env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) return cmd.rc == 0 def computeSourceRevision(self, changes): if not changes or None in [c.revision for c in changes]: return None lastChange = max([int(c.revision) for c in changes]) return lastChange @staticmethod def svnUriCanonicalize(uri): collapse = re.compile(r'([^/]+/\.\./?|/\./|//|/\.$|/\.\.$|^/\.\.)') server_authority = re.compile(r'^(?:([^@]+)@)?([^:]+)(?::(.+))?$') default_port = {'http': '80', 'https': '443', 'svn': '3690'} relative_schemes = ['http', 'https', 'svn'] def quote(uri): return urlquote(uri, "!$&'()*+,-./:=@_~", encoding="latin-1") if not uri or uri == '/': return uri (scheme, authority, path, parameters, query, fragment) = urlparse(uri) scheme = scheme.lower() if authority: mo = server_authority.match(authority) if not mo: return uri # give up userinfo, host, port = mo.groups() if host[-1] == '.': host = host[:-1] authority = host.lower() if userinfo: authority = "{}@{}".format(userinfo, authority) if port and port != default_port.get(scheme, None): authority = "{}:{}".format(authority, port) if scheme in relative_schemes: last_path = path while True: path = collapse.sub('/', path, 1) if last_path == path: break last_path = path path = quote(urlunquote(path)) canonical_uri = urlunparse( (scheme, authority, path, parameters, query, fragment)) if canonical_uri == '/': return canonical_uri elif canonical_uri[-1] == '/' and canonical_uri[-2] != '/': return canonical_uri[:-1] return canonical_uri @defer.inlineCallbacks def _checkout(self): checkout_cmd = ['checkout', self.repourl, '.'] if self.revision: checkout_cmd.extend(["--revision", str(self.revision)]) if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True res = yield self._dovccmd(checkout_cmd, abandonOnFailure=abandonOnFailure) if self.retry: if self.stopped or res == 0: return delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self.runRmdir(self.workdir, timeout=self.timeout)) df.addCallback(lambda _: self._checkout()) reactor.callLater(delay, df.callback, None) yield df buildbot-3.4.0/master/buildbot/steps/subunit.py000066400000000000000000000132131413250514000216150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import io from unittest import TestResult from twisted.internet import defer from buildbot.process import buildstep from buildbot.process import logobserver from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import Results class SubunitLogObserver(logobserver.LogLineObserver, TestResult): """Observe a log that may contain subunit output. This class extends TestResult to receive the callbacks from the subunit parser in the most direct fashion. """ def __init__(self): super().__init__() try: from subunit import TestProtocolServer, PROGRESS_CUR, PROGRESS_SET from subunit import PROGRESS_PUSH, PROGRESS_POP except ImportError as e: raise ImportError("subunit is not importable, but is required for " "SubunitLogObserver support.") from e self.PROGRESS_CUR = PROGRESS_CUR self.PROGRESS_SET = PROGRESS_SET self.PROGRESS_PUSH = PROGRESS_PUSH self.PROGRESS_POP = PROGRESS_POP self.warningio = io.BytesIO() self.protocol = TestProtocolServer(self, self.warningio) self.skips = [] self.seen_tags = set() # don't yet know what tags does in subunit def outLineReceived(self, line): # Impedance mismatch: subunit wants lines, observers get lines-no\n # Note that observers get already decoded lines whereas protocol wants bytes self.protocol.lineReceived(line.encode('utf-8') + b'\n') def errLineReceived(self, line): # Same note as in outLineReceived applies self.protocol.lineReceived(line.encode('utf-8') + b'\n') def stopTest(self, test): super().stopTest(test) self.step.setProgress('tests', self.testsRun) def addSkip(self, test, detail): if hasattr(TestResult, 'addSkip'): super().addSkip(test, detail) else: self.skips.append((test, detail)) def addError(self, test, err): super().addError(test, err) self.issue(test, err) def addFailure(self, test, err): super().addFailure(test, err) self.issue(test, err) def issue(self, test, err): """An issue - failing, erroring etc test.""" self.step.setProgress('tests failed', len(self.failures) + len(self.errors)) def tags(self, new_tags, gone_tags): """Accumulate the seen tags.""" self.seen_tags.update(new_tags) class SubunitShellCommand(buildstep.ShellMixin, buildstep.BuildStep): name = 'shell' """A ShellCommand that sniffs subunit output. """ def __init__(self, failureOnNoTests=False, *args, **kwargs): kwargs = self.setupShellMixin(kwargs) super().__init__(*args, **kwargs) self.failureOnNoTests = failureOnNoTests self._observer = SubunitLogObserver() self.addLogObserver('stdio', self._observer) self.progressMetrics = self.progressMetrics + ('tests', 'tests failed') @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) stdio_log = yield self.getLog('stdio') yield stdio_log.finish() problems = "" for test, err in self._observer.errors + self._observer.failures: problems += "{}\n{}".format(test.id(), err) if problems: yield self.addCompleteLog("problems", problems) warnings = self._observer.warningio.getvalue() if warnings: yield self.addCompleteLog("warnings", warnings) failures = len(self._observer.failures) errors = len(self._observer.errors) total = self._observer.testsRun if cmd.didFail(): return FAILURE if failures + errors > 0: return FAILURE if not total and self.failureOnNoTests: return FAILURE return SUCCESS def getResultSummary(self): failures = len(self._observer.failures) errors = len(self._observer.errors) skips = len(self._observer.skips) total = self._observer.testsRun count = failures + errors summary = self.name if not count: if total: summary += " {} {} passed".format(total, total == 1 and "test" or "tests") else: summary += " no tests run" else: summary += " Total {} test(s)".format(total) if failures: summary += " {} {}".format(failures, failures == 1 and "failure" or "failures") if errors: summary += " {} {}".format(errors, errors == 1 and "error" or "errors") if skips: summary += " {} {}".format(skips, skips == 1 and "skip" or "skips") # TODO: expectedFailures/unexpectedSuccesses if self.results != SUCCESS: summary += ' ({})'.format(Results[self.results]) return {'step': summary} buildbot-3.4.0/master/buildbot/steps/transfer.py000066400000000000000000000466621413250514000217660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import stat from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.interfaces import WorkerSetupError from buildbot.process import remotecommand from buildbot.process import remotetransfer from buildbot.process.buildstep import FAILURE from buildbot.process.buildstep import SKIPPED from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep from buildbot.steps.worker import CompositeStepMixin from buildbot.util import flatten def makeStatusRemoteCommand(step, remote_command, args): self = remotecommand.RemoteCommand( remote_command, args, decodeRC={None: SUCCESS, 0: SUCCESS}) self.useLog(step.stdio_log) return self class _TransferBuildStep(BuildStep): """ Base class for FileUpload and FileDownload to factor out common functionality. """ renderables = ['workdir'] haltOnFailure = True flunkOnFailure = True def __init__(self, workdir=None, **buildstep_kwargs): super().__init__(**buildstep_kwargs) self.workdir = workdir @defer.inlineCallbacks def runTransferCommand(self, cmd, writer=None): # Run a transfer step, add a callback to extract the command status, # add an error handler that cancels the writer. self.cmd = cmd try: yield self.runCommand(cmd) return cmd.results() finally: if writer: writer.cancel() @defer.inlineCallbacks def interrupt(self, reason): yield self.addCompleteLog('interrupt', str(reason)) if self.cmd: yield self.cmd.interrupt(reason) return None class FileUpload(_TransferBuildStep): name = 'upload' renderables = [ 'masterdest', 'url', 'urlText', 'workersrc', ] def __init__(self, workersrc=None, masterdest=None, workdir=None, maxsize=None, blocksize=256 * 1024, mode=None, keepstamp=False, url=None, urlText=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workersrc is None or masterdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.workersrc = workersrc self.masterdest = masterdest self.maxsize = maxsize self.blocksize = blocksize if not isinstance(mode, (int, type(None))): config.error( 'mode must be an integer or None') self.mode = mode self.keepstamp = keepstamp self.url = url self.urlText = urlText @defer.inlineCallbacks def run(self): self.checkWorkerHasCommand("uploadFile") self.stdio_log = yield self.addLog("stdio") source = self.workersrc masterdest = self.masterdest # we rely upon the fact that the buildmaster runs chdir'ed into its # basedir to make sure that relative paths in masterdest are expanded # properly. TODO: maybe pass the master's basedir all the way down # into the BuildStep so we can do this better. masterdest = os.path.expanduser(masterdest) log.msg("FileUpload started, from worker %r to master %r" % (source, masterdest)) if self.description is None: self.description = ['uploading {}'.format(os.path.basename(source))] if self.descriptionDone is None: self.descriptionDone = self.description if self.url is not None: urlText = self.urlText if urlText is None: urlText = os.path.basename(masterdest) yield self.addURL(urlText, self.url) # we use maxsize to limit the amount of data on both sides fileWriter = remotetransfer.FileWriter( masterdest, self.maxsize, self.mode) if self.keepstamp and self.workerVersionIsOlderThan("uploadFile", "2.13"): m = (("This worker ({}) does not support preserving timestamps. " "Please upgrade the worker.").format(self.build.workername)) raise WorkerSetupError(m) # default arguments args = { 'workdir': self.workdir, 'writer': fileWriter, 'maxsize': self.maxsize, 'blocksize': self.blocksize, 'keepstamp': self.keepstamp, } if self.workerVersionIsOlderThan('uploadFile', '3.0'): args['slavesrc'] = source else: args['workersrc'] = source cmd = makeStatusRemoteCommand(self, 'uploadFile', args) res = yield self.runTransferCommand(cmd, fileWriter) log.msg("File '{}' upload finished with results {}".format( os.path.basename(self.workersrc), str(res))) return res class DirectoryUpload(_TransferBuildStep): name = 'upload' renderables = [ 'workersrc', 'masterdest', 'url', 'urlText' ] def __init__(self, workersrc=None, masterdest=None, workdir=None, maxsize=None, blocksize=16 * 1024, compress=None, url=None, urlText=None, **buildstep_kwargs ): # Emulate that first two arguments are positional. if workersrc is None or masterdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.workersrc = workersrc self.masterdest = masterdest self.maxsize = maxsize self.blocksize = blocksize if compress not in (None, 'gz', 'bz2'): config.error( "'compress' must be one of None, 'gz', or 'bz2'") self.compress = compress self.url = url self.urlText = urlText @defer.inlineCallbacks def run(self): self.checkWorkerHasCommand("uploadDirectory") self.stdio_log = yield self.addLog("stdio") source = self.workersrc masterdest = self.masterdest # we rely upon the fact that the buildmaster runs chdir'ed into its # basedir to make sure that relative paths in masterdest are expanded # properly. TODO: maybe pass the master's basedir all the way down # into the BuildStep so we can do this better. masterdest = os.path.expanduser(masterdest) log.msg("DirectoryUpload started, from worker {} to master {}".format(repr(source), repr(masterdest))) self.descriptionDone = "uploading {}".format(os.path.basename(source)) if self.url is not None: urlText = self.urlText if urlText is None: urlText = os.path.basename(os.path.normpath(masterdest)) yield self.addURL(urlText, self.url) # we use maxsize to limit the amount of data on both sides dirWriter = remotetransfer.DirectoryWriter( masterdest, self.maxsize, self.compress, 0o600) # default arguments args = { 'workdir': self.workdir, 'writer': dirWriter, 'maxsize': self.maxsize, 'blocksize': self.blocksize, 'compress': self.compress } if self.workerVersionIsOlderThan('uploadDirectory', '3.0'): args['slavesrc'] = source else: args['workersrc'] = source cmd = makeStatusRemoteCommand(self, 'uploadDirectory', args) res = yield self.runTransferCommand(cmd, dirWriter) return res class MultipleFileUpload(_TransferBuildStep, CompositeStepMixin): name = 'upload' logEnviron = False renderables = [ 'workersrcs', 'masterdest', 'url', 'urlText' ] def __init__(self, workersrcs=None, masterdest=None, workdir=None, maxsize=None, blocksize=16 * 1024, glob=False, mode=None, compress=None, keepstamp=False, url=None, urlText=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workersrcs is None or masterdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.workersrcs = workersrcs self.masterdest = masterdest self.maxsize = maxsize self.blocksize = blocksize if not isinstance(mode, (int, type(None))): config.error( 'mode must be an integer or None') self.mode = mode if compress not in (None, 'gz', 'bz2'): config.error( "'compress' must be one of None, 'gz', or 'bz2'") self.compress = compress self.glob = glob self.keepstamp = keepstamp self.url = url self.urlText = urlText def uploadFile(self, source, masterdest): fileWriter = remotetransfer.FileWriter( masterdest, self.maxsize, self.mode) args = { 'workdir': self.workdir, 'writer': fileWriter, 'maxsize': self.maxsize, 'blocksize': self.blocksize, 'keepstamp': self.keepstamp, } if self.workerVersionIsOlderThan('uploadFile', '3.0'): args['slavesrc'] = source else: args['workersrc'] = source cmd = makeStatusRemoteCommand(self, 'uploadFile', args) return self.runTransferCommand(cmd, fileWriter) def uploadDirectory(self, source, masterdest): dirWriter = remotetransfer.DirectoryWriter( masterdest, self.maxsize, self.compress, 0o600) args = { 'workdir': self.workdir, 'writer': dirWriter, 'maxsize': self.maxsize, 'blocksize': self.blocksize, 'compress': self.compress } if self.workerVersionIsOlderThan('uploadDirectory', '3.0'): args['slavesrc'] = source else: args['workersrc'] = source cmd = makeStatusRemoteCommand(self, 'uploadDirectory', args) return self.runTransferCommand(cmd, dirWriter) @defer.inlineCallbacks def startUpload(self, source, destdir): masterdest = os.path.join(destdir, os.path.basename(source)) args = { 'file': source, 'workdir': self.workdir } cmd = makeStatusRemoteCommand(self, 'stat', args) yield self.runCommand(cmd) if cmd.rc != 0: msg = 'File {}/{} not available at worker'.format(self.workdir, source) yield self.addCompleteLog('stderr', msg) return FAILURE s = cmd.updates['stat'][-1] if stat.S_ISDIR(s[stat.ST_MODE]): result = yield self.uploadDirectory(source, masterdest) elif stat.S_ISREG(s[stat.ST_MODE]): result = yield self.uploadFile(source, masterdest) else: msg = '{} is neither a regular file, nor a directory'.format(source) yield self.addCompleteLog('stderr', msg) return FAILURE yield self.uploadDone(result, source, masterdest) return result def uploadDone(self, result, source, masterdest): pass @defer.inlineCallbacks def allUploadsDone(self, result, sources, masterdest): if self.url is not None: urlText = self.urlText if urlText is None: urlText = os.path.basename(os.path.normpath(masterdest)) yield self.addURL(urlText, self.url) @defer.inlineCallbacks def run(self): self.checkWorkerHasCommand("uploadDirectory") self.checkWorkerHasCommand("uploadFile") self.checkWorkerHasCommand("stat") self.stdio_log = yield self.addLog("stdio") masterdest = os.path.expanduser(self.masterdest) sources = self.workersrcs if isinstance(self.workersrcs, list) else [self.workersrcs] if self.keepstamp and self.workerVersionIsOlderThan("uploadFile", "2.13"): m = (("This worker ({}) does not support preserving timestamps. " "Please upgrade the worker.").format(self.build.workername)) raise WorkerSetupError(m) if not sources: return SKIPPED if self.glob: results = yield defer.gatherResults([ self.runGlob(os.path.join(self.workdir, source), abandonOnFailure=False) for source in sources ]) sources = [self.workerPathToMasterPath(p) for p in flatten(results)] log.msg("MultipleFileUpload started, from worker {!r} to master {!r}".format(sources, masterdest)) self.descriptionDone = ['uploading', str(len(sources)), 'file' if len(sources) == 1 else 'files'] if not sources: result = SKIPPED else: result = SUCCESS for source in sources: result_single = yield self.startUpload(source, masterdest) if result_single == FAILURE: result = FAILURE break yield self.allUploadsDone(result, sources, masterdest) return result class FileDownload(_TransferBuildStep): name = 'download' renderables = ['mastersrc', 'workerdest'] def __init__(self, mastersrc, workerdest=None, workdir=None, maxsize=None, blocksize=16 * 1024, mode=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workerdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.mastersrc = mastersrc self.workerdest = workerdest self.maxsize = maxsize self.blocksize = blocksize if not isinstance(mode, (int, type(None))): config.error( 'mode must be an integer or None') self.mode = mode @defer.inlineCallbacks def run(self): self.checkWorkerHasCommand("downloadFile") self.stdio_log = yield self.addLog("stdio") # we are currently in the buildmaster's basedir, so any non-absolute # paths will be interpreted relative to that source = os.path.expanduser(self.mastersrc) workerdest = self.workerdest log.msg("FileDownload started, from master %r to worker %r" % (source, workerdest)) self.descriptionDone = ["downloading to", os.path.basename(workerdest)] # setup structures for reading the file try: fp = open(source, 'rb') except IOError: # if file does not exist, bail out with an error yield self.addCompleteLog('stderr', 'File {!r} not available at master'.format(source)) return FAILURE fileReader = remotetransfer.FileReader(fp) # default arguments args = { 'maxsize': self.maxsize, 'reader': fileReader, 'blocksize': self.blocksize, 'workdir': self.workdir, 'mode': self.mode, } if self.workerVersionIsOlderThan('downloadFile', '3.0'): args['slavedest'] = workerdest else: args['workerdest'] = workerdest cmd = makeStatusRemoteCommand(self, 'downloadFile', args) res = yield self.runTransferCommand(cmd) return res class StringDownload(_TransferBuildStep): name = 'string_download' renderables = ['workerdest', 's'] def __init__(self, s, workerdest=None, workdir=None, maxsize=None, blocksize=16 * 1024, mode=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workerdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.s = s self.workerdest = workerdest self.maxsize = maxsize self.blocksize = blocksize if not isinstance(mode, (int, type(None))): config.error("StringDownload step's mode must be an integer or None, got '{}'".format( mode)) self.mode = mode @defer.inlineCallbacks def run(self): # we use 'downloadFile' remote command on the worker self.checkWorkerHasCommand("downloadFile") self.stdio_log = yield self.addLog("stdio") # we are currently in the buildmaster's basedir, so any non-absolute # paths will be interpreted relative to that workerdest = self.workerdest log.msg("StringDownload started, from master to worker %r" % workerdest) self.descriptionDone = ["downloading to", os.path.basename(workerdest)] # setup structures for reading the file fileReader = remotetransfer.StringFileReader(self.s) # default arguments args = { 'maxsize': self.maxsize, 'reader': fileReader, 'blocksize': self.blocksize, 'workdir': self.workdir, 'mode': self.mode, } if self.workerVersionIsOlderThan('downloadFile', '3.0'): args['slavedest'] = workerdest else: args['workerdest'] = workerdest cmd = makeStatusRemoteCommand(self, 'downloadFile', args) res = yield self.runTransferCommand(cmd) return res class JSONStringDownload(StringDownload): name = "json_download" def __init__(self, o, workerdest=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workerdest is None: raise TypeError("__init__() takes at least 3 arguments") if 's' in buildstep_kwargs: del buildstep_kwargs['s'] s = json.dumps(o) super().__init__(s=s, workerdest=workerdest, **buildstep_kwargs) class JSONPropertiesDownload(StringDownload): name = "json_properties_download" def __init__(self, workerdest=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workerdest is None: raise TypeError("__init__() takes at least 2 arguments") if 's' in buildstep_kwargs: del buildstep_kwargs['s'] super().__init__(s=None, workerdest=workerdest, **buildstep_kwargs) @defer.inlineCallbacks def run(self): properties = self.build.getProperties() props = {} for key, value, source in properties.asList(): props[key] = value self.s = json.dumps(dict( properties=props, sourcestamps=[ss.asDict() for ss in self.build.getAllSourceStamps()], ), ) res = yield super().run() return res buildbot-3.4.0/master/buildbot/steps/trigger.py000066400000000000000000000356051413250514000216000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.interfaces import IRenderable from buildbot.interfaces import ITriggerableScheduler from buildbot.process.buildstep import CANCELLED from buildbot.process.buildstep import EXCEPTION from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep from buildbot.process.properties import Properties from buildbot.process.properties import Property from buildbot.process.results import ALL_RESULTS from buildbot.process.results import statusToString from buildbot.process.results import worst_status from buildbot.reporters.utils import getURLForBuild from buildbot.reporters.utils import getURLForBuildrequest class Trigger(BuildStep): name = "trigger" renderables = [ 'alwaysUseLatest', 'parent_relationship', 'schedulerNames', 'set_properties', 'sourceStamps', 'updateSourceStamp', 'waitForFinish' ] flunkOnFailure = True def __init__(self, schedulerNames=None, sourceStamp=None, sourceStamps=None, updateSourceStamp=None, alwaysUseLatest=False, waitForFinish=False, set_properties=None, copy_properties=None, parent_relationship="Triggered from", unimportantSchedulerNames=None, **kwargs): if schedulerNames is None: schedulerNames = [] if unimportantSchedulerNames is None: unimportantSchedulerNames = [] if not schedulerNames: config.error( "You must specify a scheduler to trigger") if (sourceStamp or sourceStamps) and (updateSourceStamp is not None): config.error( "You can't specify both sourceStamps and updateSourceStamp") if (sourceStamp or sourceStamps) and alwaysUseLatest: config.error( "You can't specify both sourceStamps and alwaysUseLatest") if alwaysUseLatest and (updateSourceStamp is not None): config.error( "You can't specify both alwaysUseLatest and updateSourceStamp" ) def hasRenderable(l): for s in l: if IRenderable.providedBy(s): return True return False if not hasRenderable(schedulerNames) and not hasRenderable(unimportantSchedulerNames): if not set(schedulerNames).issuperset(set(unimportantSchedulerNames)): config.error( "unimportantSchedulerNames must be a subset of schedulerNames" ) self.schedulerNames = schedulerNames self.unimportantSchedulerNames = unimportantSchedulerNames self.sourceStamps = sourceStamps or [] if sourceStamp: self.sourceStamps.append(sourceStamp) if updateSourceStamp is not None: self.updateSourceStamp = updateSourceStamp else: self.updateSourceStamp = not (alwaysUseLatest or self.sourceStamps) self.alwaysUseLatest = alwaysUseLatest self.waitForFinish = waitForFinish if set_properties is None: set_properties = {} if copy_properties is None: copy_properties = [] properties = {} properties.update(set_properties) for i in copy_properties: properties[i] = Property(i) self.set_properties = properties self.parent_relationship = parent_relationship self.running = False self.ended = False self.brids = [] self.triggeredNames = None self.waitForFinishDeferred = None self._result_list = [] super().__init__(**kwargs) def interrupt(self, reason): # We cancel the buildrequests, as the data api handles # both cases: # - build started: stop is sent, # - build not created yet: related buildrequests are set to CANCELLED. # Note that there is an identified race condition though (more details # are available at buildbot.data.buildrequests). for brid in self.brids: self.master.data.control("cancel", {'reason': 'parent build was interrupted'}, ("buildrequests", brid)) if self.running and not self.ended: self.ended = True # if we are interrupted because of a connection lost, we interrupt synchronously if self.build.conn is None and self.waitForFinishDeferred is not None: self.waitForFinishDeferred.cancel() # Create the properties that are used for the trigger def createTriggerProperties(self, properties): # make a new properties object from a dict rendered by the old # properties object trigger_properties = Properties() trigger_properties.update(properties, "Trigger") return trigger_properties def getSchedulerByName(self, name): # we use the fact that scheduler_manager is a multiservice, with schedulers as childs # this allow to quickly find schedulers instance by name schedulers = self.master.scheduler_manager.namedServices if name not in schedulers: raise ValueError("unknown triggered scheduler: %r" % (name,)) sch = schedulers[name] if not ITriggerableScheduler.providedBy(sch): raise ValueError( "triggered scheduler is not ITriggerableScheduler: %r" % (name,)) return sch # This customization endpoint allows users to dynamically select which # scheduler and properties to trigger def getSchedulersAndProperties(self): return [{ 'sched_name': sched, 'props_to_set': self.set_properties, 'unimportant': sched in self.unimportantSchedulerNames} for sched in self.schedulerNames] def prepareSourcestampListForTrigger(self): if self.sourceStamps: ss_for_trigger = {} for ss in self.sourceStamps: codebase = ss.get('codebase', '') assert codebase not in ss_for_trigger, "codebase specified multiple times" ss_for_trigger[codebase] = ss trigger_values = [ss_for_trigger[k] for k in sorted(ss_for_trigger.keys())] return trigger_values if self.alwaysUseLatest: return [] # start with the sourcestamps from current build ss_for_trigger = {} objs_from_build = self.build.getAllSourceStamps() for ss in objs_from_build: ss_for_trigger[ss.codebase] = ss.asDict() # overrule revision in sourcestamps with got revision if self.updateSourceStamp: got = self.getAllGotRevisions() for codebase in ss_for_trigger: if codebase in got: ss_for_trigger[codebase]['revision'] = got[codebase] trigger_values = [ss_for_trigger[k] for k in sorted(ss_for_trigger.keys())] return trigger_values def getAllGotRevisions(self): all_got_revisions = self.getProperty('got_revision', {}) # For backwards compatibility all_got_revisions is a string if codebases # are not used. Convert to the default internal type (dict) if not isinstance(all_got_revisions, dict): all_got_revisions = {'': all_got_revisions} return all_got_revisions @defer.inlineCallbacks def worstStatus(self, overall_results, rclist, unimportant_brids): for was_cb, results in rclist: if isinstance(results, tuple): results, brids_dict = results # brids_dict.values() represents the list of brids kicked by a certain scheduler. # We want to ignore the result of ANY brid that was kicked off # by an UNimportant scheduler. if set(unimportant_brids).issuperset(set(brids_dict.values())): continue if not was_cb: yield self.addLogWithFailure(results) results = EXCEPTION overall_results = worst_status(overall_results, results) return overall_results @defer.inlineCallbacks def addBuildUrls(self, rclist): brids = {} for was_cb, results in rclist: if isinstance(results, tuple): results, brids = results builderNames = {} if was_cb: # errors were already logged in worstStatus for builderid, br in brids.items(): builds = yield self.master.db.builds.getBuilds(buildrequestid=br) for build in builds: builderid = build['builderid'] # When virtual builders are used, the builderid used for triggering # is not the same as the one that the build actually got if builderid not in builderNames: builderDict = yield self.master.data.get(("builders", builderid)) builderNames[builderid] = builderDict["name"] num = build['number'] url = getURLForBuild(self.master, builderid, num) yield self.addURL("{}: {} #{}".format(statusToString(build["results"]), builderNames[builderid], num), url) @defer.inlineCallbacks def _add_results(self, brid): @defer.inlineCallbacks def _is_buildrequest_complete(brid): buildrequest = yield self.master.db.buildrequests.getBuildRequest(brid) return buildrequest['complete'] event = ('buildrequests', str(brid), 'complete') yield self.master.mq.waitUntilEvent(event, lambda: _is_buildrequest_complete(brid)) builds = yield self.master.db.builds.getBuilds(buildrequestid=brid) for build in builds: self._result_list.append(build["results"]) self.updateSummary() @defer.inlineCallbacks def run(self): schedulers_and_props = yield self.getSchedulersAndProperties() schedulers_and_props_list = [] # To be back compatible we need to differ between old and new style # schedulers_and_props can either consist of 2 elements tuple or # dictionary for element in schedulers_and_props: if isinstance(element, dict): schedulers_and_props_list = schedulers_and_props break # Old-style back compatibility: Convert tuple to dict and make # it important d = { 'sched_name': element[0], 'props_to_set': element[1], 'unimportant': False } schedulers_and_props_list.append(d) # post process the schedulernames, and raw properties # we do this out of the loop, as this can result in errors schedulers_and_props = [( self.getSchedulerByName(entry_dict['sched_name']), self.createTriggerProperties(entry_dict['props_to_set']), entry_dict['unimportant']) for entry_dict in schedulers_and_props_list] ss_for_trigger = self.prepareSourcestampListForTrigger() dl = [] triggeredNames = [] results = SUCCESS self.running = True unimportant_brids = [] for sch, props_to_set, unimportant in schedulers_and_props: idsDeferred, resultsDeferred = sch.trigger( waited_for=self.waitForFinish, sourcestamps=ss_for_trigger, set_props=props_to_set, parent_buildid=self.build.buildid, parent_relationship=self.parent_relationship ) # we are not in a hurry of starting all in parallel and managing # the deferred lists, just let the db writes be serial. brids = {} try: bsid, brids = yield idsDeferred except Exception as e: yield self.addLogWithException(e) results = EXCEPTION if unimportant: unimportant_brids.extend(brids.values()) self.brids.extend(brids.values()) for brid in brids.values(): # put the url to the brids, so that we can have the status from # the beginning url = getURLForBuildrequest(self.master, brid) yield self.addURL("{} #{}".format(sch.name, brid), url) # No yield since we let this happen as the builds complete self._add_results(brid) dl.append(resultsDeferred) triggeredNames.append(sch.name) if self.ended: return CANCELLED self.triggeredNames = triggeredNames if self.waitForFinish: self.waitForFinishDeferred = defer.DeferredList(dl, consumeErrors=1) try: rclist = yield self.waitForFinishDeferred except defer.CancelledError: pass # we were interrupted, don't bother update status if self.ended: return CANCELLED yield self.addBuildUrls(rclist) results = yield self.worstStatus(results, rclist, unimportant_brids) else: # do something to handle errors for d in dl: d.addErrback(log.err, '(ignored) while invoking Triggerable schedulers:') return results def getResultSummary(self): if self.ended: return {'step': 'interrupted'} return {'step': self.getCurrentSummary()['step']} if self.triggeredNames else {} def getCurrentSummary(self): if not self.triggeredNames: return {'step': 'running'} summary = "" if self._result_list: for status in ALL_RESULTS: count = self._result_list.count(status) if count: summary = summary + ", {} {}".format(self._result_list.count(status), statusToString(status, count)) return {'step': 'triggered {}{}'.format(', '.join(self.triggeredNames), summary)} buildbot-3.4.0/master/buildbot/steps/vstudio.py000066400000000000000000000425371413250514000216340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Visual studio steps import re from twisted.internet import defer from buildbot import config from buildbot.process import buildstep from buildbot.process import results from buildbot.process.logobserver import LogLineObserver class MSLogLineObserver(LogLineObserver): stdoutDelimiter = "\r\n" stderrDelimiter = "\r\n" _re_delimiter = re.compile(r'^(\d+>)?-{5}.+-{5}$') _re_file = re.compile(r'^(\d+>)?[^ ]+\.(cpp|c)$') _re_warning = re.compile(r' ?: warning [A-Z]+[0-9]+:') _re_error = re.compile(r' ?error ([A-Z]+[0-9]+)?\s?: ') nbFiles = 0 nbProjects = 0 nbWarnings = 0 nbErrors = 0 logwarnings = None logerrors = None def __init__(self, logwarnings, logerrors, **kwargs): super().__init__(**kwargs) self.logwarnings = logwarnings self.logerrors = logerrors def outLineReceived(self, line): if self._re_delimiter.search(line): self.nbProjects += 1 self.logwarnings.addStdout("{}\n".format(line)) self.logerrors.addStdout("{}\n".format(line)) self.step.setProgress('projects', self.nbProjects) elif self._re_file.search(line): self.nbFiles += 1 self.step.setProgress('files', self.nbFiles) elif self._re_warning.search(line): self.nbWarnings += 1 self.logwarnings.addStdout("{}\n".format(line)) self.step.setProgress('warnings', self.nbWarnings) elif self._re_error.search("{}\n".format(line)): # error has no progress indication self.nbErrors += 1 self.logerrors.addStderr("{}\n".format(line)) class VisualStudio(buildstep.ShellMixin, buildstep.BuildStep): # an *abstract* base class, which will not itself work as a buildstep name = "compile" description = "compiling" descriptionDone = "compile" progressMetrics = (buildstep.BuildStep.progressMetrics + ('projects', 'files', 'warnings',)) logobserver = None installdir = None default_installdir = None # One of build, clean or rebuild mode = "rebuild" projectfile = None config = None useenv = False project = None PATH = [] INCLUDE = [] LIB = [] renderables = ['projectfile', 'config', 'project', 'mode'] def __init__(self, installdir=None, mode="rebuild", projectfile=None, config='release', useenv=False, project=None, INCLUDE=None, LIB=None, PATH=None, **kwargs): if INCLUDE is None: INCLUDE = [] if LIB is None: LIB = [] if PATH is None: PATH = [] self.installdir = installdir self.mode = mode self.projectfile = projectfile self.config = config self.useenv = useenv self.project = project if INCLUDE: self.INCLUDE = INCLUDE self.useenv = True if LIB: self.LIB = LIB self.useenv = True if PATH: self.PATH = PATH kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) def add_env_path(self, name, value): """ concat a path for this name """ try: oldval = self.env[name] if not oldval.endswith(';'): oldval = oldval + ';' except KeyError: oldval = "" if not value.endswith(';'): value = value + ';' self.env[name] = oldval + value @defer.inlineCallbacks def setup_log_files(self): logwarnings = yield self.addLog("warnings") logerrors = yield self.addLog("errors") self.logobserver = MSLogLineObserver(logwarnings, logerrors) yield self.addLogObserver('stdio', self.logobserver) def setupEnvironment(self): if self.env is None: self.env = {} # setup the custom one, those one goes first for path in self.PATH: self.add_env_path("PATH", path) for path in self.INCLUDE: self.add_env_path("INCLUDE", path) for path in self.LIB: self.add_env_path("LIB", path) if not self.installdir: self.installdir = self.default_installdir def evaluate_result(self, cmd): self.setStatistic('projects', self.logobserver.nbProjects) self.setStatistic('files', self.logobserver.nbFiles) self.setStatistic('warnings', self.logobserver.nbWarnings) self.setStatistic('errors', self.logobserver.nbErrors) if cmd.didFail(): return results.FAILURE if self.logobserver.nbErrors > 0: return results.FAILURE if self.logobserver.nbWarnings > 0: return results.WARNINGS return results.SUCCESS @defer.inlineCallbacks def run(self): self.setupEnvironment() yield self.setup_log_files() cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) yield self.finish_logs() self.results = self.evaluate_result(cmd) return self.results def getResultSummary(self): description = 'compile {} projects {} files'.format(self.logobserver.nbProjects, self.logobserver.nbFiles) if self.logobserver.nbWarnings > 0: description += ' {} warnings'.format(self.logobserver.nbWarnings) if self.logobserver.nbErrors > 0: description += ' {} errors'.format(self.logobserver.nbErrors) if self.results != results.SUCCESS: description += ' ({})'.format(results.Results[self.results]) return {'step': description} @defer.inlineCallbacks def finish_logs(self): log = yield self.getLog("warnings") log.finish() log = yield self.getLog("errors") log.finish() class VC6(VisualStudio): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio' def setupEnvironment(self): super().setupEnvironment() # Root of Visual Developer Studio Common files. VSCommonDir = self.installdir + '\\Common' MSVCDir = self.installdir + '\\VC98' MSDevDir = VSCommonDir + '\\msdev98' self.add_env_path("PATH", MSDevDir + '\\BIN') self.add_env_path("PATH", MSVCDir + '\\BIN') self.add_env_path("PATH", VSCommonDir + '\\TOOLS\\WINNT') self.add_env_path("PATH", VSCommonDir + '\\TOOLS') self.add_env_path("INCLUDE", MSVCDir + '\\INCLUDE') self.add_env_path("INCLUDE", MSVCDir + '\\ATL\\INCLUDE') self.add_env_path("INCLUDE", MSVCDir + '\\MFC\\INCLUDE') self.add_env_path("LIB", MSVCDir + '\\LIB') self.add_env_path("LIB", MSVCDir + '\\MFC\\LIB') @defer.inlineCallbacks def run(self): command = [ "msdev", self.projectfile, "/MAKE" ] if self.project is not None: command.append(self.project + " - " + self.config) else: command.append("ALL - " + self.config) if self.mode == "rebuild": command.append("/REBUILD") elif self.mode == "clean": command.append("/CLEAN") else: command.append("/BUILD") if self.useenv: command.append("/USEENV") self.command = command res = yield super().run() return res class VC7(VisualStudio): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio .NET 2003' def setupEnvironment(self): super().setupEnvironment() VSInstallDir = self.installdir + '\\Common7\\IDE' VCInstallDir = self.installdir MSVCDir = self.installdir + '\\VC7' self.add_env_path("PATH", VSInstallDir) self.add_env_path("PATH", MSVCDir + '\\BIN') self.add_env_path("PATH", VCInstallDir + '\\Common7\\Tools') self.add_env_path("PATH", VCInstallDir + '\\Common7\\Tools\\bin') self.add_env_path("INCLUDE", MSVCDir + '\\INCLUDE') self.add_env_path("INCLUDE", MSVCDir + '\\ATLMFC\\INCLUDE') self.add_env_path("INCLUDE", MSVCDir + '\\PlatformSDK\\include') self.add_env_path("INCLUDE", VCInstallDir + '\\SDK\\v1.1\\include') self.add_env_path("LIB", MSVCDir + '\\LIB') self.add_env_path("LIB", MSVCDir + '\\ATLMFC\\LIB') self.add_env_path("LIB", MSVCDir + '\\PlatformSDK\\lib') self.add_env_path("LIB", VCInstallDir + '\\SDK\\v1.1\\lib') @defer.inlineCallbacks def run(self): command = [ "devenv.com", self.projectfile ] if self.mode == "rebuild": command.append("/Rebuild") elif self.mode == "clean": command.append("/Clean") else: command.append("/Build") command.append(self.config) if self.useenv: command.append("/UseEnv") if self.project is not None: command.append("/Project") command.append(self.project) self.command = command res = yield super().run() return res # alias VC7 as VS2003 VS2003 = VC7 class VC8(VC7): # Our ones arch = None default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 8' renderables = ['arch'] def __init__(self, arch="x86", **kwargs): self.arch = arch # always upcall ! super().__init__(**kwargs) def setupEnvironment(self): # Do not use super() here. We want to override VC7.setupEnvironment(). VisualStudio.setupEnvironment(self) VSInstallDir = self.installdir VCInstallDir = self.installdir + '\\VC' self.add_env_path("PATH", VSInstallDir + '\\Common7\\IDE') if self.arch == "x64": self.add_env_path("PATH", VCInstallDir + '\\BIN\\x86_amd64') self.add_env_path("PATH", VCInstallDir + '\\BIN') self.add_env_path("PATH", VSInstallDir + '\\Common7\\Tools') self.add_env_path("PATH", VSInstallDir + '\\Common7\\Tools\\bin') self.add_env_path("PATH", VCInstallDir + '\\PlatformSDK\\bin') self.add_env_path("PATH", VSInstallDir + '\\SDK\\v2.0\\bin') self.add_env_path("PATH", VCInstallDir + '\\VCPackages') self.add_env_path("PATH", r'${PATH}') self.add_env_path("INCLUDE", VCInstallDir + '\\INCLUDE') self.add_env_path("INCLUDE", VCInstallDir + '\\ATLMFC\\include') self.add_env_path("INCLUDE", VCInstallDir + '\\PlatformSDK\\include') archsuffix = '' if self.arch == "x64": archsuffix = '\\amd64' self.add_env_path("LIB", VCInstallDir + '\\LIB' + archsuffix) self.add_env_path("LIB", VCInstallDir + '\\ATLMFC\\LIB' + archsuffix) self.add_env_path("LIB", VCInstallDir + '\\PlatformSDK\\lib' + archsuffix) self.add_env_path("LIB", VSInstallDir + '\\SDK\\v2.0\\lib' + archsuffix) # alias VC8 as VS2005 VS2005 = VC8 class VCExpress9(VC8): @defer.inlineCallbacks def run(self): command = [ "vcexpress", self.projectfile ] if self.mode == "rebuild": command.append("/Rebuild") elif self.mode == "clean": command.append("/Clean") else: command.append("/Build") command.append(self.config) if self.useenv: command.append("/UseEnv") if self.project is not None: command.append("/Project") command.append(self.project) self.command = command # Do not use super() here. We want to override VC7.start(). res = yield VisualStudio.run(self) return res # Add first support for VC9 (Same as VC8, with a different installdir) class VC9(VC8): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 9.0' VS2008 = VC9 # VC10 doesn't look like it needs extra stuff. class VC10(VC9): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 10.0' VS2010 = VC10 # VC11 doesn't look like it needs extra stuff. class VC11(VC10): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 11.0' VS2012 = VC11 # VC12 doesn't look like it needs extra stuff. class VC12(VC11): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 12.0' VS2013 = VC12 # VC14 doesn't look like it needs extra stuff. class VC14(VC12): default_installdir = 'C:\\Program Files (x86)\\Microsoft Visual Studio 14.0' VS2015 = VC14 class VC141(VC14): default_installdir = r"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community" VS2017 = VC141 def _msbuild_format_defines_parameter(defines): if defines is None or len(defines) == 0: return "" return ' /p:DefineConstants="{}"'.format(";".join(defines)) def _msbuild_format_target_parameter(mode, project): modestring = None if mode == "clean": modestring = 'Clean' elif mode == "build": modestring = 'Build' elif mode == "rebuild": modestring = 'Rebuild' parameter = "" if project is not None: if modestring == "Rebuild" or modestring is None: parameter = ' /t:"%s"' % (project) else: parameter = ' /t:"%s:%s"' % (project, modestring) elif modestring is not None: parameter = ' /t:%s' % (modestring) return parameter class MsBuild4(VisualStudio): platform = None defines = None vcenv_bat = r"${VS110COMNTOOLS}..\..\VC\vcvarsall.bat" renderables = ['platform'] description = 'building' def __init__(self, platform, defines=None, **kwargs): self.platform = platform self.defines = defines super().__init__(**kwargs) def setupEnvironment(self): super().setupEnvironment() self.env['VCENV_BAT'] = self.vcenv_bat def describe_project(self, done=False): project = self.project if project is None: project = 'solution' return '{} for {}|{}'.format(project, self.config, self.platform) def getCurrentSummary(self): return {'step': 'building ' + self.describe_project()} def getResultSummary(self): return {'step': 'built ' + self.describe_project()} @defer.inlineCallbacks def run(self): if self.platform is None: config.error('platform is mandatory. Please specify a string such as "Win32"') yield self.updateSummary() command = (('"%VCENV_BAT%" x86 && msbuild "{}" /p:Configuration="{}" /p:Platform="{}" ' '/maxcpucount').format(self.projectfile, self.config, self.platform)) command += _msbuild_format_target_parameter(self.mode, self.project) command += _msbuild_format_defines_parameter(self.defines) self.command = command res = yield super().run() return res MsBuild = MsBuild4 class MsBuild12(MsBuild4): vcenv_bat = r"${VS120COMNTOOLS}..\..\VC\vcvarsall.bat" class MsBuild14(MsBuild4): vcenv_bat = r"${VS140COMNTOOLS}..\..\VC\vcvarsall.bat" class MsBuild141(VisualStudio): platform = None defines = None vcenv_bat = r"\VC\Auxiliary\Build\vcvarsall.bat" renderables = ['platform'] def __init__(self, platform, defines=None, **kwargs): self.platform = platform self.defines = defines super().__init__(**kwargs) def setupEnvironment(self): super().setupEnvironment() self.env['VCENV_BAT'] = self.vcenv_bat self.add_env_path("PATH", 'C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\') self.add_env_path("PATH", r'${PATH}') def describe_project(self, done=False): project = self.project if project is None: project = 'solution' return '{} for {}|{}'.format(project, self.config, self.platform) @defer.inlineCallbacks def run(self): if self.platform is None: config.error( 'platform is mandatory. Please specify a string such as "Win32"') self.description = 'building ' + self.describe_project() self.descriptionDone = 'built ' + self.describe_project() yield self.updateSummary() command = (('FOR /F "tokens=*" %%I in (\'vswhere.exe -property installationPath\') ' ' do "%%I\\%VCENV_BAT%" x86 && msbuild "{}" /p:Configuration="{}" ' '/p:Platform="{}" /maxcpucount').format(self.projectfile, self.config, self.platform)) command += _msbuild_format_target_parameter(self.mode, self.project) command += _msbuild_format_defines_parameter(self.defines) self.command = command res = yield super().run() return res buildbot-3.4.0/master/buildbot/steps/worker.py000066400000000000000000000251541413250514000214440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat from twisted.internet import defer from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS class WorkerBuildStep(buildstep.BuildStep): pass class SetPropertiesFromEnv(WorkerBuildStep): """ Sets properties from environment variables on the worker. Note this is transferred when the worker first connects """ name = 'SetPropertiesFromEnv' description = ['Setting'] descriptionDone = ['Set'] def __init__(self, variables, source="WorkerEnvironment", **kwargs): super().__init__(**kwargs) self.variables = variables self.source = source @defer.inlineCallbacks def run(self): # on Windows, environment variables are case-insensitive, but we have # a case-sensitive dictionary in worker_environ. Fortunately, that # dictionary is also folded to uppercase, so we can simply fold the # variable names to uppercase to duplicate the case-insensitivity. fold_to_uppercase = (self.worker.worker_system == 'win32') properties = self.build.getProperties() environ = self.worker.worker_environ variables = self.variables log = [] if isinstance(variables, str): variables = [self.variables] for variable in variables: key = variable if fold_to_uppercase: key = variable.upper() value = environ.get(key, None) if value: # note that the property is not uppercased properties.setProperty(variable, value, self.source, runtime=True) log.append("{} = {}".format(variable, repr(value))) yield self.addCompleteLog("properties", "\n".join(log)) return SUCCESS class FileExists(WorkerBuildStep): """ Check for the existence of a file on the worker. """ name = 'FileExists' renderables = ['file'] haltOnFailure = True flunkOnFailure = True def __init__(self, file, **kwargs): super().__init__(**kwargs) self.file = file @defer.inlineCallbacks def run(self): self.checkWorkerHasCommand('stat') cmd = remotecommand.RemoteCommand('stat', {'file': self.file}) yield self.runCommand(cmd) if cmd.didFail(): self.descriptionDone = ["File not found."] return FAILURE s = cmd.updates["stat"][-1] if stat.S_ISREG(s[stat.ST_MODE]): self.descriptionDone = ["File found."] return SUCCESS else: self.descriptionDone = ["Not a file."] return FAILURE class CopyDirectory(WorkerBuildStep): """ Copy a directory tree on the worker. """ name = 'CopyDirectory' description = ['Copying'] descriptionDone = ['Copied'] renderables = ['src', 'dest'] haltOnFailure = True flunkOnFailure = True def __init__(self, src, dest, timeout=120, maxTime=None, **kwargs): super().__init__(**kwargs) self.src = src self.dest = dest self.timeout = timeout self.maxTime = maxTime @defer.inlineCallbacks def run(self): self.checkWorkerHasCommand('cpdir') args = {'fromdir': self.src, 'todir': self.dest} args['timeout'] = self.timeout if self.maxTime: args['maxTime'] = self.maxTime cmd = remotecommand.RemoteCommand('cpdir', args) yield self.runCommand(cmd) if cmd.didFail(): self.descriptionDone = ["Copying", self.src, "to", self.dest, "failed."] return FAILURE self.descriptionDone = ["Copied", self.src, "to", self.dest] return SUCCESS class RemoveDirectory(WorkerBuildStep): """ Remove a directory tree on the worker. """ name = 'RemoveDirectory' description = ['Deleting'] descriptionDone = ['Deleted'] renderables = ['dir'] haltOnFailure = True flunkOnFailure = True def __init__(self, dir, **kwargs): super().__init__(**kwargs) self.dir = dir @defer.inlineCallbacks def run(self): self.checkWorkerHasCommand('rmdir') cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.dir}) yield self.runCommand(cmd) if cmd.didFail(): self.descriptionDone = ["Delete failed."] return FAILURE return SUCCESS class MakeDirectory(WorkerBuildStep): """ Create a directory on the worker. """ name = 'MakeDirectory' description = ['Creating'] descriptionDone = ['Created'] renderables = ['dir'] haltOnFailure = True flunkOnFailure = True def __init__(self, dir, **kwargs): super().__init__(**kwargs) self.dir = dir @defer.inlineCallbacks def run(self): self.checkWorkerHasCommand('mkdir') cmd = remotecommand.RemoteCommand('mkdir', {'dir': self.dir}) yield self.runCommand(cmd) if cmd.didFail(): self.descriptionDone = ["Create failed."] return FAILURE return SUCCESS class CompositeStepMixin(): def workerPathToMasterPath(self, path): return os.path.join(*self.worker.path_module.split(path)) @defer.inlineCallbacks def addLogForRemoteCommands(self, logname): """This method must be called by user classes composite steps could create several logs, this mixin functions will write to the last one. """ self.rc_log = yield self.addLog(logname) return self.rc_log def runRemoteCommand(self, cmd, args, abandonOnFailure=True, evaluateCommand=lambda cmd: cmd.didFail()): """generic RemoteCommand boilerplate""" cmd = remotecommand.RemoteCommand(cmd, args) if hasattr(self, "rc_log"): cmd.useLog(self.rc_log, False) d = self.runCommand(cmd) def commandComplete(cmd): if abandonOnFailure and cmd.didFail(): raise buildstep.BuildStepFailed() return evaluateCommand(cmd) d.addCallback(lambda res: commandComplete(cmd)) return d def runRmdir(self, dir, timeout=None, **kwargs): """ remove a directory from the worker """ cmd_args = {'dir': dir, 'logEnviron': self.logEnviron} if timeout: cmd_args['timeout'] = timeout return self.runRemoteCommand('rmdir', cmd_args, **kwargs) def runRmFile(self, path, timeout=None, **kwargs): """ remove a file from the worker """ cmd_args = {'path': path, 'logEnviron': self.logEnviron} if timeout: cmd_args['timeout'] = timeout if self.workerVersionIsOlderThan('rmfile', '3.1'): cmd_args['dir'] = os.path.abspath(path) return self.runRemoteCommand('rmdir', cmd_args, **kwargs) return self.runRemoteCommand('rmfile', cmd_args, **kwargs) def pathExists(self, path): """ test whether path exists""" def commandComplete(cmd): return not cmd.didFail() return self.runRemoteCommand('stat', {'file': path, 'logEnviron': self.logEnviron, }, abandonOnFailure=False, evaluateCommand=commandComplete) def runMkdir(self, _dir, **kwargs): """ create a directory and its parents""" return self.runRemoteCommand('mkdir', {'dir': _dir, 'logEnviron': self.logEnviron, }, **kwargs) def runGlob(self, path, **kwargs): """ find files matching a shell-style pattern""" def commandComplete(cmd): return cmd.updates['files'][-1] return self.runRemoteCommand('glob', {'path': path, 'logEnviron': self.logEnviron, }, evaluateCommand=commandComplete, **kwargs) def getFileContentFromWorker(self, filename, abandonOnFailure=False): self.checkWorkerHasCommand("uploadFile") fileWriter = remotetransfer.StringFileWriter() # default arguments args = { 'workdir': self.workdir, 'writer': fileWriter, 'maxsize': None, 'blocksize': 32 * 1024, } if self.workerVersionIsOlderThan('uploadFile', '3.0'): args['slavesrc'] = filename else: args['workersrc'] = filename def commandComplete(cmd): if cmd.didFail(): return None return fileWriter.buffer return self.runRemoteCommand('uploadFile', args, abandonOnFailure=abandonOnFailure, evaluateCommand=commandComplete) def downloadFileContentToWorker(self, workerdest, strfile, abandonOnFailure=False, mode=None, workdir=None): if workdir is None: workdir = self.workdir self.checkWorkerHasCommand("downloadFile") fileReader = remotetransfer.StringFileReader(strfile) # default arguments args = { 'workdir': workdir, 'maxsize': None, 'mode': mode, 'reader': fileReader, 'blocksize': 32 * 1024, } if self.workerVersionIsOlderThan('downloadFile', '3.0'): args['slavedest'] = workerdest else: args['workerdest'] = workerdest def commandComplete(cmd): if cmd.didFail(): return None return fileReader return self.runRemoteCommand('downloadFile', args, abandonOnFailure=abandonOnFailure, evaluateCommand=commandComplete) buildbot-3.4.0/master/buildbot/test/000077500000000000000000000000001413250514000173735ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/__init__.py000066400000000000000000000142641413250514000215130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys import warnings from pkg_resources import parse_version import setuptools # force import setuptools before any other distutils imports from buildbot import monkeypatches from buildbot.test.util.warnings import assertProducesWarning # noqa pylint: disable=wrong-import-position from buildbot.test.util.warnings import assertProducesWarnings # noqa pylint: disable=wrong-import-position from buildbot.warnings import DeprecatedApiWarning # noqa pylint: disable=wrong-import-position # import mock so we bail out early if it's not installed try: import mock [mock] except ImportError as e: raise ImportError("\nBuildbot tests require the 'mock' module; " "try 'pip install mock'") from e # apply the same patches the buildmaster does when it starts monkeypatches.patch_all(for_tests=True) # enable deprecation warnings warnings.filterwarnings('always', category=DeprecationWarning) if parse_version(mock.__version__) < parse_version("0.8"): raise ImportError("\nBuildbot tests require mock version 0.8.0 or " "higher; try 'pip install -U mock'") [setuptools] # force use for pylint # This is where we load deprecated module-level APIs to ignore warning produced by importing them. # After the deprecated API has been removed, leave at least one instance of the import in a # commented state as reference. # with assertProducesWarnings(DeprecatedApiWarning, # messages_patterns=[ # r" buildbot\.status\.base has been deprecated", # ]): # import buildbot.status.base as _ # noqa # All deprecated modules should be loaded, consider future warnings in tests as errors. # In order to not pollute the test outputs, # warnings in tests shall be forcefully tested with assertProducesWarning, # or shutdown using the warning module warnings.filterwarnings('error') # if buildbot_worker is installed in pip install -e mode, then the docker directory will # match "import docker", and produce a warning. # We just suppress this warning instead of doing silly workaround. warnings.filterwarnings('ignore', "Not importing directory.*docker': missing __init__.py", category=ImportWarning) # FIXME: needs to be sorted out (#3666) warnings.filterwarnings('ignore', "1300 Invalid utf8 character string") # twisted.compat.execfile is using 'U' https://twistedmatrix.com/trac/ticket/9023 warnings.filterwarnings('ignore', "'U' mode is deprecated", DeprecationWarning) # twisted.python.filepath and trial are using bytes file paths when # the "native" file path (Unicode) should be used on Windows. warnings.filterwarnings('ignore', "The Windows bytes API has been " "deprecated, use Unicode filenames instead") # moto warning v1.0.0 warnings.filterwarnings('ignore', "Flags not at the start of the expression") warnings.filterwarnings('ignore', r"object\(\) takes no parameters") # this warning happens sometimes on python3.4 warnings.filterwarnings('ignore', r"The value of convert_charrefs will become True in 3.5") # Twisted 18.4+ adds a deprecation warning and still use the deprecated API in its own code! warnings.filterwarnings('ignore', ".*getClientIP was deprecated.*", DeprecationWarning) # Python 3.7 adds a deprecation warning formatargspec. # The signature api that replaces it is not available in 2.7 warnings.filterwarnings('ignore', ".*`formatargspec` is deprecated.*", DeprecationWarning) # Python 3.7 adds a deprecation importing ABCs from collection. # Such imports are made in dependencies (e.g moto, werzeug, pyparsing) warnings.filterwarnings('ignore', ".*Using or importing the ABCs from 'collections'.*", DeprecationWarning) # more 3.7 warning from moto warnings.filterwarnings('ignore', r".*Use 'list\(elem\)' or iteration over elem instead.*", DeprecationWarning) # ignore ResourceWarnings for unclosed sockets for the pg8000 driver on Python 3+ (tech debt: #4508) if sys.version_info[0] >= 3 and "pg8000" in os.getenv("BUILDBOT_TEST_DB_URL", ""): warnings.filterwarnings('ignore', ".*unclosed .*socket", ResourceWarning) # ignore ResourceWarnings when connecting to a HashiCorp vault via hvac in integration tests warnings.filterwarnings('ignore', r".*unclosed .*socket.*raddr=.*, 8200[^\d]", ResourceWarning) # Python 3.5-3.8 shows this warning warnings.filterwarnings('ignore', ".*the imp module is deprecated in favour of importlib*") # Python 3.3-3.7 show this warning and in invoked from autobahn warnings.filterwarnings('ignore', ".*time.clock has been deprecated in Python 3.3.*") # ignore an attrs API warning for APIs used in dependencies warnings.filterwarnings('ignore', ".*The usage of `cmp` is deprecated and will be removed " "on or after.*", DeprecationWarning) # ignore a warning emitted by pkg_resources when importing certain namespace packages warnings.filterwarnings('ignore', ".*Not importing directory .*/zope: missing __init__", category=ImportWarning) warnings.filterwarnings('ignore', ".*Not importing directory .*/sphinxcontrib: missing __init__", category=ImportWarning) # ignore warnings from importing lib2to3 via buildbot_pkg -> # setuptools.command.build_py -> setuptools.lib2to3_ex -> lib2to3 # https://github.com/pypa/setuptools/issues/2086 warnings.filterwarnings('ignore', ".*lib2to3 package is deprecated", category=PendingDeprecationWarning) buildbot-3.4.0/master/buildbot/test/fake/000077500000000000000000000000001413250514000203015ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/fake/__init__.py000066400000000000000000000000001413250514000224000ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/fake/botmaster.py000066400000000000000000000033161413250514000226560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process import botmaster from buildbot.util import service class FakeBotMaster(service.AsyncMultiService, botmaster.LockRetrieverMixin): def __init__(self): super().__init__() self.setName("fake-botmaster") self.builders = {} # dictionary mapping worker names to builders self.buildsStartedForWorkers = [] self.delayShutdown = False def getBuildersForWorker(self, workername): return self.builders.get(workername, []) def maybeStartBuildsForWorker(self, workername): self.buildsStartedForWorkers.append(workername) def maybeStartBuildsForAllBuilders(self): self.buildsStartedForWorkers += self.builders.keys() def workerLost(self, bot): pass def cleanShutdown(self, quickMode=False, stopReactor=True): self.shuttingDown = True if self.delayShutdown: self.shutdownDeferred = defer.Deferred() return self.shutdownDeferred return None buildbot-3.4.0/master/buildbot/test/fake/bworkermanager.py000066400000000000000000000052131413250514000236620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.util import service class FakeWorkerManager(service.AsyncMultiService): def __init__(self): super().__init__() self.setName('workers') # WorkerRegistration instances keyed by worker name self.registrations = {} # connection objects keyed by worker name self.connections = {} # self.workers contains a ready Worker instance for each # potential worker, i.e. all the ones listed in the config file. # If the worker is connected, self.workers[workername].worker will # contain a RemoteReference to their Bot instance. If it is not # connected, that attribute will hold None. self.workers = {} # maps workername to Worker def register(self, worker): workerName = worker.workername reg = FakeWorkerRegistration(worker) self.registrations[workerName] = reg return defer.succeed(reg) def _unregister(self, registration): del self.registrations[registration.worker.workername] def getWorkerByName(self, workerName): return self.registrations[workerName].worker def newConnection(self, conn, workerName): assert workerName not in self.connections self.connections[workerName] = conn conn.info = {} def remove(): del self.connections[workerName] return defer.succeed(True) class FakeWorkerRegistration: def __init__(self, worker): self.updates = [] self.unregistered = False self.worker = worker def getPBPort(self): return 1234 def unregister(self): assert not self.unregistered, "called twice" self.unregistered = True return defer.succeed(None) def update(self, worker_config, global_config): if worker_config.workername not in self.updates: self.updates.append(worker_config.workername) return defer.succeed(None) buildbot-3.4.0/master/buildbot/test/fake/change.py000066400000000000000000000021741413250514000221040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.properties import Properties from buildbot.test.fake.state import State class Change(State): project = '' repository = '' branch = '' category = '' codebase = '' properties = {} def __init__(self, **kw): super().__init__(**kw) # change.properties is a IProperties props = Properties() props.update(self.properties, "test") self.properties = props buildbot-3.4.0/master/buildbot/test/fake/docker.py000066400000000000000000000070501413250514000221240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members version = "1.10.6" class Client: latest = None containerCreated = False def __init__(self, base_url): Client.latest = self self.call_args_create_container = [] self.call_args_create_host_config = [] self.called_class_name = None self._images = [ {'RepoTags': ['busybox:latest', 'worker:latest', 'tester:latest']}] self._pullable = ['alpine:latest', 'tester:latest'] self._pullCount = 0 self._containers = {} if Client.containerCreated: self.create_container("some-default-image") def images(self): return self._images def start(self, container): pass def stop(self, id): pass def wait(self, id): return 0 def build(self, fileobj, tag): if fileobj.read() == b'BUG': pass else: logs = [] for line in logs: yield line self._images.append({'RepoTags': [tag + ':latest']}) def pull(self, image, *args, **kwargs): if image in self._pullable: self._pullCount += 1 self._images.append({'RepoTags': [image]}) def containers(self, filters=None, *args, **kwargs): if filters is not None: if 'existing' in filters.get('name', ''): self.create_container( image='busybox:latest', name="buildbot-existing-87de7e" ) self.create_container( image='busybox:latest', name="buildbot-existing-87de7ef" ) return [ c for c in self._containers.values() if c['name'].startswith(filters['name']) ] return self._containers.values() def create_host_config(self, *args, **kwargs): self.call_args_create_host_config.append(kwargs) def create_container(self, image, *args, **kwargs): self.call_args_create_container.append(kwargs) self.called_class_name = self.__class__.__name__ name = kwargs.get('name', None) if 'buggy' in image: raise Exception('we could not create this container') for c in self._containers.values(): if c['name'] == name: raise Exception('cannot create with same name') ret = { 'Id': '8a61192da2b3bb2d922875585e29b74ec0dc4e0117fcbf84c962204e97564cd7', 'Warnings': None } self._containers[ret['Id']] = { 'started': False, 'image': image, 'Id': ret['Id'], 'name': name, # docker does not return this 'Names': [name] # this what docker returns } return ret def remove_container(self, id, **kwargs): del self._containers[id] class APIClient(Client): pass buildbot-3.4.0/master/buildbot/test/fake/endpoint.py000066400000000000000000000145071413250514000225020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # This is a static resource type and set of endpoints used as common data by # tests. from twisted.internet import defer from buildbot.data import base from buildbot.data import types testData = { 13: {'testid': 13, 'info': 'ok', 'success': True, 'tags': []}, 14: {'testid': 14, 'info': 'failed', 'success': False, 'tags': []}, 15: {'testid': 15, 'info': 'warned', 'success': True, 'tags': ['a', 'b', ]}, 16: {'testid': 16, 'info': 'skipped', 'success': True, 'tags': ['a']}, 17: {'testid': 17, 'info': 'ignored', 'success': True, 'tags': []}, 18: {'testid': 18, 'info': 'unexp', 'success': False, 'tags': []}, 19: {'testid': 19, 'info': 'todo', 'success': True, 'tags': []}, 20: {'testid': 20, 'info': 'error', 'success': False, 'tags': []}, } stepData = { 13: {'stepid': 13, 'testid': 13, 'info': 'ok'}, 14: {'stepid': 14, 'testid': 13, 'info': 'failed'}, 15: {'stepid': 15, 'testid': 14, 'info': 'failed'}, } class TestsEndpoint(base.Endpoint): isCollection = True pathPatterns = """ /tests /test """ rootLinkName = 'tests' def get(self, resultSpec, kwargs): # results are sorted by ID for test stability return defer.succeed(sorted(testData.values(), key=lambda v: v['testid'])) class RawTestsEndpoint(base.Endpoint): isCollection = False isRaw = True pathPatterns = "/rawtest" def get(self, resultSpec, kwargs): return defer.succeed({ "filename": "test.txt", "mime-type": "text/test", 'raw': 'value' }) class FailEndpoint(base.Endpoint): isCollection = False pathPatterns = "/test/fail" def get(self, resultSpec, kwargs): return defer.fail(RuntimeError('oh noes')) class TestEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /tests/n:testid /test/n:testid """ def get(self, resultSpec, kwargs): if kwargs['testid'] == 0: return None return defer.succeed(testData[kwargs['testid']]) def control(self, action, args, kwargs): if action == "fail": return defer.fail(RuntimeError("oh noes")) return defer.succeed({'action': action, 'args': args, 'kwargs': kwargs}) class StepsEndpoint(base.Endpoint): isCollection = True pathPatterns = "/tests/n:testid/steps" def get(self, resultSpec, kwargs): data = [step for step in stepData.values() if step['testid'] == kwargs['testid']] # results are sorted by ID for test stability return defer.succeed(sorted(data, key=lambda v: v['stepid'])) class StepEndpoint(base.Endpoint): isCollection = False pathPatterns = "/tests/n:testid/steps/n:stepid" def get(self, resultSpec, kwargs): if kwargs['testid'] == 0: return None return defer.succeed(testData[kwargs['testid']]) class Step(base.ResourceType): name = "step" plural = "steps" endpoints = [StepsEndpoint, StepEndpoint] keyField = "stepid" class EntityType(types.Entity): stepid = types.Integer() testid = types.Integer() info = types.String() entityType = EntityType(name, 'Step') class Test(base.ResourceType): name = "test" plural = "tests" endpoints = [TestsEndpoint, TestEndpoint, FailEndpoint, RawTestsEndpoint] keyField = "testid" subresources = ["Step"] class EntityType(types.Entity): testid = types.Integer() info = types.String() success = types.Boolean() tags = types.List(of=types.String()) entityType = EntityType(name, 'Test') graphql_schema = """ # custom scalar types for buildbot data model scalar Date # stored as utc unix timestamp scalar Binary # arbitrary data stored as base85 scalar JSON # arbitrary json stored as string, mainly used for properties values type Query {{ {queries} }} type Subscription {{ {queries} }} type Test {{ testid: Int! info: String! success: Boolean! tags: [String]! steps(info: String, info__contains: String, info__eq: String, info__ge: String, info__gt: String, info__in: [String], info__le: String, info__lt: String, info__ne: String, info__notin: [String], stepid: Int, stepid__contains: Int, stepid__eq: Int, stepid__ge: Int, stepid__gt: Int, stepid__in: [Int], stepid__le: Int, stepid__lt: Int, stepid__ne: Int, stepid__notin: [Int], testid: Int, testid__contains: Int, testid__eq: Int, testid__ge: Int, testid__gt: Int, testid__in: [Int], testid__le: Int, testid__lt: Int, testid__ne: Int, testid__notin: [Int], order: String, limit: Int, offset: Int): [Step]! step(stepid: Int): Step }} type Step {{ stepid: Int! testid: Int! info: String! }} """.format(queries=""" tests(info: String, info__contains: String, info__eq: String, info__ge: String, info__gt: String, info__in: [String], info__le: String, info__lt: String, info__ne: String, info__notin: [String], success: Boolean, success__contains: Boolean, success__eq: Boolean, success__ge: Boolean, success__gt: Boolean, success__le: Boolean, success__lt: Boolean, success__ne: Boolean, tags: String, tags__contains: String, tags__eq: String, tags__ge: String, tags__gt: String, tags__in: [String], tags__le: String, tags__lt: String, tags__ne: String, tags__notin: [String], testid: Int, testid__contains: Int, testid__eq: Int, testid__ge: Int, testid__gt: Int, testid__in: [Int], testid__le: Int, testid__lt: Int, testid__ne: Int, testid__notin: [Int], order: String, limit: Int, offset: Int): [Test]! test(testid: Int): Test""") buildbot-3.4.0/master/buildbot/test/fake/fakebuild.py000066400000000000000000000061621413250514000226060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import posixpath import mock from buildbot import config from buildbot.process import factory from buildbot.process import properties from buildbot.process import workerforbuilder from buildbot.test.fake import fakemaster from buildbot.worker import base class FakeWorkerStatus(properties.PropertiesMixin): def __init__(self, name): self.name = name self.info = properties.Properties() self.info.setProperty("test", "test", "Worker") class FakeBuild(properties.PropertiesMixin): def __init__(self, props=None, master=None): self.builder = fakemaster.FakeBuilder(master) self.workerforbuilder = mock.Mock( spec=workerforbuilder.WorkerForBuilder) self.workerforbuilder.worker = mock.Mock(spec=base.Worker) self.workerforbuilder.worker.info = properties.Properties() self.workerforbuilder.worker.workername = 'workername' self.builder.config = config.BuilderConfig( name='bldr', workernames=['a'], factory=factory.BuildFactory()) self.path_module = posixpath self.buildid = 92 self.number = 13 self.workdir = 'build' self.locks = [] self.sources = {} if props is None: props = properties.Properties() props.build = self self.properties = props self.master = None self.config_version = 0 def getProperties(self): return self.properties def getSourceStamp(self, codebase): if codebase in self.sources: return self.sources[codebase] return None def getAllSourceStamps(self): return list(self.sources.values()) def allChanges(self): for s in self.sources.values(): for c in s.changes: yield c def allFiles(self): files = [] for c in self.allChanges(): for f in c.files: files.append(f) return files def getBuilder(self): return self.builder def getWorkerInfo(self): return self.workerforbuilder.worker.info def setUniqueStepName(self, step): pass class FakeBuildForRendering: def render(self, r): if isinstance(r, str): return "rendered:" + r if isinstance(r, list): return list(self.render(i) for i in r) if isinstance(r, tuple): return tuple(self.render(i) for i in r) return r buildbot-3.4.0/master/buildbot/test/fake/fakedata.py000066400000000000000000000574271413250514000224320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from twisted.python import failure from buildbot.data import connector from buildbot.data import resultspec from buildbot.db.buildrequests import AlreadyClaimedError from buildbot.test.util import validation from buildbot.util import service class FakeUpdates(service.AsyncService): # unlike "real" update methods, all of the fake methods are here in a # single class. def __init__(self, testcase): self.testcase = testcase # test cases should assert the values here: self.changesAdded = [] # Changes are numbered starting at 1. # { name : id }; users can add changesources here self.changesourceIds = {} self.buildsetsAdded = [] # Buildsets are numbered starting at 1 self.maybeBuildsetCompleteCalls = 0 self.masterStateChanges = [] # dictionaries self.schedulerIds = {} # { name : id }; users can add schedulers here self.builderIds = {} # { name : id }; users can add schedulers here self.schedulerMasters = {} # { schedulerid : masterid } self.changesourceMasters = {} # { changesourceid : masterid } self.workerIds = {} # { name : id }; users can add workers here # { logid : {'finished': .., 'name': .., 'type': .., 'content': [ .. ]} } self.logs = {} self.claimedBuildRequests = set([]) self.stepStateString = {} # { stepid : string } self.stepUrls = {} # { stepid : [(name,url)] } self.properties = [] self.missingWorkers = [] # extra assertions def assertProperties(self, sourced, properties): self.testcase.assertIsInstance(properties, dict) for k, v in properties.items(): self.testcase.assertIsInstance(k, str) if sourced: self.testcase.assertIsInstance(v, tuple) self.testcase.assertEqual(len(v), 2) propval, propsrc = v self.testcase.assertIsInstance(propsrc, str) else: propval = v try: json.dumps(propval) except (TypeError, ValueError): self.testcase.fail("value for {} is not JSON-able".format(k)) # update methods def addChange(self, files=None, comments=None, author=None, committer=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase=None, project='', src=None): if properties is None: properties = {} # double-check args, types, etc. if files is not None: self.testcase.assertIsInstance(files, list) map(lambda f: self.testcase.assertIsInstance(f, str), files) self.testcase.assertIsInstance(comments, (type(None), str)) self.testcase.assertIsInstance(author, (type(None), str)) self.testcase.assertIsInstance(committer, (type(None), str)) self.testcase.assertIsInstance(revision, (type(None), str)) self.testcase.assertIsInstance(when_timestamp, (type(None), int)) self.testcase.assertIsInstance(branch, (type(None), str)) if callable(category): pre_change = self.master.config.preChangeGenerator(author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=when_timestamp, branch=branch, revlink=revlink, properties=properties, repository=repository, project=project) category = category(pre_change) self.testcase.assertIsInstance(category, (type(None), str)) self.testcase.assertIsInstance(revlink, (type(None), str)) self.assertProperties(sourced=False, properties=properties) self.testcase.assertIsInstance(repository, str) self.testcase.assertIsInstance(codebase, (type(None), str)) self.testcase.assertIsInstance(project, str) self.testcase.assertIsInstance(src, (type(None), str)) # use locals() to ensure we get all of the args and don't forget if # more are added self.changesAdded.append(locals()) self.changesAdded[-1].pop('self') return defer.succeed(len(self.changesAdded)) def masterActive(self, name, masterid): self.testcase.assertIsInstance(name, str) self.testcase.assertIsInstance(masterid, int) if masterid: self.testcase.assertEqual(masterid, 1) self.thisMasterActive = True return defer.succeed(None) def masterStopped(self, name, masterid): self.testcase.assertIsInstance(name, str) self.testcase.assertEqual(masterid, 1) self.thisMasterActive = False return defer.succeed(None) def expireMasters(self, forceHouseKeeping=False): return defer.succeed(None) @defer.inlineCallbacks def addBuildset(self, waited_for, scheduler=None, sourcestamps=None, reason='', properties=None, builderids=None, external_idstring=None, parent_buildid=None, parent_relationship=None): if sourcestamps is None: sourcestamps = [] if properties is None: properties = {} if builderids is None: builderids = [] # assert types self.testcase.assertIsInstance(scheduler, str) self.testcase.assertIsInstance(sourcestamps, list) for ss in sourcestamps: if not isinstance(ss, int) and not isinstance(ss, dict): self.testcase.fail("{} ({}) is not an integer or a dictionary".format(ss, type(ss))) del ss # since we use locals(), below self.testcase.assertIsInstance(reason, str) self.assertProperties(sourced=True, properties=properties) self.testcase.assertIsInstance(builderids, list) self.testcase.assertIsInstance(external_idstring, (type(None), str)) self.buildsetsAdded.append(locals()) self.buildsetsAdded[-1].pop('self') # call through to the db layer, since many scheduler tests expect to # find the buildset in the db later - TODO fix this! bsid, brids = yield self.master.db.buildsets.addBuildset( sourcestamps=sourcestamps, reason=reason, properties=properties, builderids=builderids, waited_for=waited_for, external_idstring=external_idstring, parent_buildid=parent_buildid, parent_relationship=parent_relationship) return (bsid, brids) def maybeBuildsetComplete(self, bsid): self.maybeBuildsetCompleteCalls += 1 return defer.succeed(None) @defer.inlineCallbacks def claimBuildRequests(self, brids, claimed_at=None): validation.verifyType(self.testcase, 'brids', brids, validation.ListValidator(validation.IntValidator())) validation.verifyType(self.testcase, 'claimed_at', claimed_at, validation.NoneOk(validation.DateTimeValidator())) if not brids: return True try: yield self.master.db.buildrequests.claimBuildRequests( brids=brids, claimed_at=claimed_at) except AlreadyClaimedError: return False self.claimedBuildRequests.update(set(brids)) return True @defer.inlineCallbacks def unclaimBuildRequests(self, brids): validation.verifyType(self.testcase, 'brids', brids, validation.ListValidator(validation.IntValidator())) self.claimedBuildRequests.difference_update(set(brids)) if brids: yield self.master.db.buildrequests.unclaimBuildRequests(brids) def completeBuildRequests(self, brids, results, complete_at=None): validation.verifyType(self.testcase, 'brids', brids, validation.ListValidator(validation.IntValidator())) validation.verifyType(self.testcase, 'results', results, validation.IntValidator()) validation.verifyType(self.testcase, 'complete_at', complete_at, validation.NoneOk(validation.DateTimeValidator())) return defer.succeed(True) def rebuildBuildrequest(self, buildrequest): return defer.succeed(None) def updateBuilderList(self, masterid, builderNames): self.testcase.assertEqual(masterid, self.master.masterid) for n in builderNames: self.testcase.assertIsInstance(n, str) self.builderNames = builderNames return defer.succeed(None) @defer.inlineCallbacks def updateBuilderInfo(self, builderid, description, tags): yield self.master.db.builders.updateBuilderInfo(builderid, description, tags) def masterDeactivated(self, masterid): return defer.succeed(None) def findSchedulerId(self, name): return self.master.db.schedulers.findSchedulerId(name) def forget_about_it(self, name): validation.verifyType(self.testcase, 'scheduler name', name, validation.StringValidator()) if name not in self.schedulerIds: self.schedulerIds[name] = max( [0] + list(self.schedulerIds.values())) + 1 return defer.succeed(self.schedulerIds[name]) def findChangeSourceId(self, name): validation.verifyType(self.testcase, 'changesource name', name, validation.StringValidator()) if name not in self.changesourceIds: self.changesourceIds[name] = max( [0] + list(self.changesourceIds.values())) + 1 return defer.succeed(self.changesourceIds[name]) def findBuilderId(self, name): validation.verifyType(self.testcase, 'builder name', name, validation.StringValidator()) return self.master.db.builders.findBuilderId(name) def trySetSchedulerMaster(self, schedulerid, masterid): currentMasterid = self.schedulerMasters.get(schedulerid) if isinstance(currentMasterid, Exception): return defer.fail(failure.Failure( currentMasterid)) if currentMasterid and masterid is not None: return defer.succeed(False) self.schedulerMasters[schedulerid] = masterid return defer.succeed(True) def trySetChangeSourceMaster(self, changesourceid, masterid): currentMasterid = self.changesourceMasters.get(changesourceid) if isinstance(currentMasterid, Exception): return defer.fail(failure.Failure( currentMasterid)) if currentMasterid and masterid is not None: return defer.succeed(False) self.changesourceMasters[changesourceid] = masterid return defer.succeed(True) def addBuild(self, builderid, buildrequestid, workerid): validation.verifyType(self.testcase, 'builderid', builderid, validation.IntValidator()) validation.verifyType(self.testcase, 'buildrequestid', buildrequestid, validation.IntValidator()) validation.verifyType(self.testcase, 'workerid', workerid, validation.IntValidator()) return defer.succeed((10, 1)) def generateNewBuildEvent(self, buildid): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) return defer.succeed(None) def setBuildStateString(self, buildid, state_string): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'state_string', state_string, validation.StringValidator()) return defer.succeed(None) def finishBuild(self, buildid, results): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'results', results, validation.IntValidator()) return defer.succeed(None) def setBuildProperty(self, buildid, name, value, source): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.StringValidator()) try: json.dumps(value) except (TypeError, ValueError): self.testcase.fail("Value for {} is not JSON-able".format(name)) validation.verifyType(self.testcase, 'source', source, validation.StringValidator()) return defer.succeed(None) @defer.inlineCallbacks def setBuildProperties(self, buildid, properties): for k, v, s in properties.getProperties().asList(): self.properties.append((buildid, k, v, s)) yield self.setBuildProperty(buildid, k, v, s) def addStep(self, buildid, name): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.IdentifierValidator(50)) return defer.succeed((10, 1, name)) def addStepURL(self, stepid, name, url): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.StringValidator()) validation.verifyType(self.testcase, 'url', url, validation.StringValidator()) self.stepUrls.setdefault(stepid, []).append((name, url)) return defer.succeed(None) def startStep(self, stepid): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) return defer.succeed(None) def setStepStateString(self, stepid, state_string): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.testcase, 'state_string', state_string, validation.StringValidator()) self.stepStateString[stepid] = state_string return defer.succeed(None) def finishStep(self, stepid, results, hidden): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.testcase, 'results', results, validation.IntValidator()) validation.verifyType(self.testcase, 'hidden', hidden, validation.BooleanValidator()) return defer.succeed(None) def addLog(self, stepid, name, type): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.StringValidator()) validation.verifyType(self.testcase, 'type', type, validation.IdentifierValidator(1)) logid = max([0] + list(self.logs)) + 1 self.logs[logid] = dict( name=name, type=type, content=[], finished=False) return defer.succeed(logid) def finishLog(self, logid): validation.verifyType(self.testcase, 'logid', logid, validation.IntValidator()) self.logs[logid]['finished'] = True return defer.succeed(None) def compressLog(self, logid): validation.verifyType(self.testcase, 'logid', logid, validation.IntValidator()) return defer.succeed(None) def appendLog(self, logid, content): validation.verifyType(self.testcase, 'logid', logid, validation.IntValidator()) validation.verifyType(self.testcase, 'content', content, validation.StringValidator()) self.testcase.assertEqual(content[-1], '\n') self.logs[logid]['content'].append(content) return defer.succeed(None) def findWorkerId(self, name): validation.verifyType(self.testcase, 'worker name', name, validation.IdentifierValidator(50)) # this needs to actually get inserted into the db (fake or real) since # getWorker will get called later return self.master.db.workers.findWorkerId(name) def workerConnected(self, workerid, masterid, workerinfo): return self.master.db.workers.workerConnected( workerid=workerid, masterid=masterid, workerinfo=workerinfo) def workerConfigured(self, workerid, masterid, builderids): return self.master.db.workers.workerConfigured( workerid=workerid, masterid=masterid, builderids=builderids) def workerDisconnected(self, workerid, masterid): return self.master.db.workers.workerDisconnected( workerid=workerid, masterid=masterid) def deconfigureAllWorkersForMaster(self, masterid): return self.master.db.workers.deconfigureAllWorkersForMaster( masterid=masterid) def workerMissing(self, workerid, masterid, last_connection, notify): self.missingWorkers.append((workerid, masterid, last_connection, notify)) def schedulerEnable(self, schedulerid, v): return self.master.db.schedulers.enable(schedulerid, v) def setWorkerState(self, workerid, paused, graceful): return self.master.db.workers.setWorkerState( workerid=workerid, paused=paused, graceful=graceful) # methods form BuildData resource @defer.inlineCallbacks def setBuildData(self, buildid, name, value, source): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.StringValidator()) validation.verifyType(self.testcase, 'value', value, validation.BinaryValidator()) validation.verifyType(self.testcase, 'source', source, validation.StringValidator()) yield self.master.db.build_data.setBuildData(buildid, name, value, source) # methods from TestResultSet resource @defer.inlineCallbacks def addTestResultSet(self, builderid, buildid, stepid, description, category, value_unit): validation.verifyType(self.testcase, 'builderid', builderid, validation.IntValidator()) validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.testcase, 'description', description, validation.StringValidator()) validation.verifyType(self.testcase, 'category', category, validation.StringValidator()) validation.verifyType(self.testcase, 'value_unit', value_unit, validation.StringValidator()) test_result_setid = \ yield self.master.db.test_result_sets.addTestResultSet(builderid, buildid, stepid, description, category, value_unit) return test_result_setid @defer.inlineCallbacks def completeTestResultSet(self, test_result_setid, tests_passed=None, tests_failed=None): validation.verifyType(self.testcase, 'test_result_setid', test_result_setid, validation.IntValidator()) validation.verifyType(self.testcase, 'tests_passed', tests_passed, validation.NoneOk(validation.IntValidator())) validation.verifyType(self.testcase, 'tests_failed', tests_failed, validation.NoneOk(validation.IntValidator())) yield self.master.db.test_result_sets.completeTestResultSet(test_result_setid, tests_passed, tests_failed) # methods from TestResult resource @defer.inlineCallbacks def addTestResults(self, builderid, test_result_setid, result_values): yield self.master.db.test_results.addTestResults(builderid, test_result_setid, result_values) class FakeDataConnector(service.AsyncMultiService): # FakeDataConnector delegates to the real DataConnector so it can get all # of the proper getter and consumer behavior; it overrides all of the # relevant updates with fake methods, though. def __init__(self, master, testcase): super().__init__() self.setServiceParent(master) self.updates = FakeUpdates(testcase) self.updates.setServiceParent(self) # get and control are delegated to a real connector, # after some additional assertions self.realConnector = connector.DataConnector() self.realConnector.setServiceParent(self) self.rtypes = self.realConnector.rtypes self.plural_rtypes = self.realConnector.plural_rtypes def _scanModule(self, mod): return self.realConnector._scanModule(mod) def getEndpoint(self, path): if not isinstance(path, tuple): raise TypeError('path must be a tuple') return self.realConnector.getEndpoint(path) def getResourceType(self, name): return getattr(self.rtypes, name) def get(self, path, filters=None, fields=None, order=None, limit=None, offset=None): if not isinstance(path, tuple): raise TypeError('path must be a tuple') return self.realConnector.get(path, filters=filters, fields=fields, order=order, limit=limit, offset=offset) def get_with_resultspec(self, path, rspec): if not isinstance(path, tuple): raise TypeError('path must be a tuple') if not isinstance(rspec, resultspec.ResultSpec): raise TypeError('rspec must be ResultSpec') return self.realConnector.get_with_resultspec(path, rspec) def control(self, action, args, path): if not isinstance(path, tuple): raise TypeError('path must be a tuple') return self.realConnector.control(action, args, path) def resultspec_from_jsonapi(self, args, entityType, is_collection): return self.realConnector.resultspec_from_jsonapi(args, entityType, is_collection) def getResourceTypeForGraphQlType(self, type): return self.realConnector.getResourceTypeForGraphQlType(type) buildbot-3.4.0/master/buildbot/test/fake/fakemaster.py000066400000000000000000000122131413250514000227740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import weakref import mock from twisted.internet import defer from twisted.internet import reactor from buildbot import config from buildbot.data.graphql import GraphQLConnector from buildbot.test import fakedb from buildbot.test.fake import bworkermanager from buildbot.test.fake import endpoint from buildbot.test.fake import fakedata from buildbot.test.fake import fakemq from buildbot.test.fake import pbmanager from buildbot.test.fake.botmaster import FakeBotMaster from buildbot.test.fake.machine import FakeMachineManager from buildbot.util import service class FakeCache: """Emulate an L{AsyncLRUCache}, but without any real caching. This I{does} do the weakref part, to catch un-weakref-able objects.""" def __init__(self, name, miss_fn): self.name = name self.miss_fn = miss_fn def get(self, key, **kwargs): d = self.miss_fn(key, **kwargs) @d.addCallback def mkref(x): if x is not None: weakref.ref(x) return x return d def put(self, key, val): pass class FakeCaches: def get_cache(self, name, miss_fn): return FakeCache(name, miss_fn) class FakeBuilder: def __init__(self, master=None, buildername="Builder"): if master: self.master = master self.botmaster = master.botmaster self.name = buildername class FakeLogRotation: rotateLength = 42 maxRotatedFiles = 42 class FakeMaster(service.MasterService): """ Create a fake Master instance: a Mock with some convenience implementations: - Non-caching implementation for C{self.caches} """ def __init__(self, reactor, master_id=fakedb.FakeBuildRequestsComponent.MASTER_ID): super().__init__() self._master_id = master_id self.reactor = reactor self.objectids = {} self.config = config.MasterConfig() self.caches = FakeCaches() self.pbmanager = pbmanager.FakePBManager() self.initLock = defer.DeferredLock() self.basedir = 'basedir' self.botmaster = FakeBotMaster() self.botmaster.setServiceParent(self) self.name = 'fake:/master' self.masterid = master_id self.workers = bworkermanager.FakeWorkerManager() self.workers.setServiceParent(self) self.machine_manager = FakeMachineManager() self.machine_manager.setServiceParent(self) self.log_rotation = FakeLogRotation() self.db = mock.Mock() self.next_objectid = 0 self.config_version = 0 def getObjectId(sched_name, class_name): k = (sched_name, class_name) try: rv = self.objectids[k] except KeyError: rv = self.objectids[k] = self.next_objectid self.next_objectid += 1 return defer.succeed(rv) self.db.state.getObjectId = getObjectId def getObjectId(self): return defer.succeed(self._master_id) def subscribeToBuildRequests(self, callback): pass # Leave this alias, in case we want to add more behavior later def make_master(testcase, wantMq=False, wantDb=False, wantData=False, wantRealReactor=False, wantGraphql=False, url=None, **kwargs): if wantRealReactor: _reactor = reactor else: assert testcase is not None, "need testcase for fake reactor" # The test case must inherit from TestReactorMixin and setup it. _reactor = testcase.reactor master = FakeMaster(_reactor, **kwargs) if url: master.buildbotURL = url if wantData: wantMq = wantDb = True if wantMq: assert testcase is not None, "need testcase for wantMq" master.mq = fakemq.FakeMQConnector(testcase) master.mq.setServiceParent(master) if wantDb: assert testcase is not None, "need testcase for wantDb" master.db = fakedb.FakeDBConnector(testcase) master.db.setServiceParent(master) if wantData: master.data = fakedata.FakeDataConnector(master, testcase) if wantGraphql: master.graphql = GraphQLConnector() master.graphql.setServiceParent(master) master.graphql.data = master.data.realConnector master.data._scanModule(endpoint) master.config.www = {'graphql': {"debug": True}} try: master.graphql.reconfigServiceWithBuildbotConfig(master.config) except ImportError: pass return master buildbot-3.4.0/master/buildbot/test/fake/fakemq.py000066400000000000000000000077051413250514000221300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.mq import base from buildbot.test.util import validation from buildbot.util import deferwaiter from buildbot.util import service from buildbot.util import tuplematch class FakeMQConnector(service.AsyncMultiService, base.MQBase): # a fake connector that doesn't actually bridge messages from production to # consumption, and thus doesn't do any topic handling or persistence # note that this *does* verify all messages sent and received, unless this # is set to false: verifyMessages = True def __init__(self, testcase): super().__init__() self.testcase = testcase self.setup_called = False self.productions = [] self.qrefs = [] self._deferwaiter = deferwaiter.DeferWaiter() @defer.inlineCallbacks def stopService(self): yield self._deferwaiter.wait() yield super().stopService() def setup(self): self.setup_called = True return defer.succeed(None) def produce(self, routingKey, data): self.testcase.assertIsInstance(routingKey, tuple) # XXX this is incompatible with the new scheme of sending multiple messages, # since the message type is no longer encoded by the first element of the # routing key # if self.verifyMessages: # validation.verifyMessage(self.testcase, routingKey, data) if any(not isinstance(k, str) for k in routingKey): raise AssertionError("{} is not all str".format(routingKey)) self.productions.append((routingKey, data)) # note - no consumers are called: IT'S A FAKE def callConsumer(self, routingKey, msg): if self.verifyMessages: validation.verifyMessage(self.testcase, routingKey, msg) matched = False for q in self.qrefs: if tuplematch.matchTuple(routingKey, q.filter): matched = True self._deferwaiter.add(q.callback(routingKey, msg)) if not matched: raise AssertionError("no consumer found") def startConsuming(self, callback, filter, persistent_name=None): if any(not isinstance(k, str) and k is not None for k in filter): raise AssertionError("{} is not a filter".format(filter)) qref = FakeQueueRef() qref.qrefs = self.qrefs qref.callback = callback qref.filter = filter qref.persistent_name = persistent_name self.qrefs.append(qref) return defer.succeed(qref) def clearProductions(self): "Clear out the cached productions" self.productions = [] def assertProductions(self, exp, orderMatters=True): """Assert that the given messages have been produced, then flush the list of produced messages. If C{orderMatters} is false, then the messages are sorted first; use this in cases where the messages must all be produced, but the order is not specified. """ if orderMatters: self.testcase.assertEqual(self.productions, exp) else: self.testcase.assertEqual(sorted(self.productions), sorted(exp)) self.productions = [] class FakeQueueRef: def stopConsuming(self): if self in self.qrefs: self.qrefs.remove(self) buildbot-3.4.0/master/buildbot/test/fake/fakeprotocol.py000066400000000000000000000053351413250514000233510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.worker.protocols import base class FakeTrivialConnection(base.Connection): info = {} def __init__(self): super().__init__("Fake") def loseConnection(self): self.notifyDisconnected() def remoteSetBuilderList(self, builders): return defer.succeed(None) class FakeConnection(base.Connection): def __init__(self, worker): super().__init__(worker.workername) self._connected = True self.remoteCalls = [] self.builders = {} # { name : isBusy } # users of the fake can add to this as desired self.info = { 'worker_commands': [], 'version': '0.9.0', 'basedir': '/w', 'system': 'nt', } def loseConnection(self): self.notifyDisconnected() def remotePrint(self, message): self.remoteCalls.append(('remotePrint', message)) return defer.succeed(None) def remoteGetWorkerInfo(self): self.remoteCalls.append(('remoteGetWorkerInfo',)) return defer.succeed(self.info) def remoteSetBuilderList(self, builders): self.remoteCalls.append(('remoteSetBuilderList', builders[:])) self.builders = dict((b, False) for b in builders) return defer.succeed(None) def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): self.remoteCalls.append(('remoteStartCommand', remoteCommand, builderName, commandId, commandName, args)) return defer.succeed(None) def remoteShutdown(self): self.remoteCalls.append(('remoteShutdown',)) return defer.succeed(None) def remoteStartBuild(self, builderName): self.remoteCalls.append(('remoteStartBuild', builderName)) return defer.succeed(None) def remoteInterruptCommand(self, builderName, commandId, why): self.remoteCalls.append( ('remoteInterruptCommand', builderName, commandId, why)) return defer.succeed(None) buildbot-3.4.0/master/buildbot/test/fake/fakestats.py000066400000000000000000000040171413250514000226420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process import buildstep from buildbot.process.results import SUCCESS from buildbot.statistics import capture from buildbot.statistics.storage_backends.base import StatsStorageBase class FakeStatsStorageService(StatsStorageBase): """ Fake Storage service used in unit tests """ def __init__(self, stats=None, name='FakeStatsStorageService'): self.stored_data = [] if not stats: self.stats = [capture.CaptureProperty("TestBuilder", 'test')] else: self.stats = stats self.name = name self.captures = [] def thd_postStatsValue(self, post_data, series_name, context=None): if not context: context = {} self.stored_data.append((post_data, series_name, context)) class FakeBuildStep(buildstep.BuildStep): """ A fake build step to be used for testing. """ def doSomething(self): self.setProperty("test", 10, "test") def start(self): self.doSomething() return SUCCESS class FakeInfluxDBClient: """ Fake Influx module for testing on systems that don't have influxdb installed. """ def __init__(self, *args, **kwargs): self.points = [] def write_points(self, points): self.points.extend(points) buildbot-3.4.0/master/buildbot/test/fake/httpclientservice.py000066400000000000000000000150201413250514000244100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json as jsonmodule import mock from twisted.internet import defer from zope.interface import implementer from buildbot.interfaces import IHttpResponse from buildbot.util import httpclientservice from buildbot.util import service from buildbot.util import toJson from buildbot.util import unicode2bytes from buildbot.util.logger import Logger log = Logger() @implementer(IHttpResponse) class ResponseWrapper: def __init__(self, code, content, url=None): self._content = content self._code = code self._url = url def content(self): content = unicode2bytes(self._content) return defer.succeed(content) def json(self): return defer.succeed(jsonmodule.loads(self._content)) @property def code(self): return self._code @property def url(self): return self._url class HTTPClientService(service.SharedService): """ HTTPClientService is a SharedService class that fakes http requests for buildbot http service testing. This class is named the same as the real HTTPClientService so that it could replace the real class in tests. If a test creates this class earlier than the real one, fake is going to be used until the master is destroyed. Whenever a master wants to create real HTTPClientService, it will find an existing fake service with the same name and use it instead. """ quiet = False def __init__(self, base_url, auth=None, headers=None, debug=None, verify=None, skipEncoding=None): assert not base_url.endswith("/"), "baseurl should not end with /" super().__init__() self._base_url = base_url self._auth = auth self._headers = headers self._session = None self._expected = [] def updateHeaders(self, headers): if self._headers is None: self._headers = {} self._headers.update(headers) @classmethod @defer.inlineCallbacks def getService(cls, master, case, *args, **kwargs): def assertNotCalled(self, *_args, **_kwargs): case.fail(("HTTPClientService called with *{!r}, **{!r} " "while should be called *{!r} **{!r}").format( _args, _kwargs, args, kwargs)) case.patch(httpclientservice.HTTPClientService, "__init__", assertNotCalled) service = yield super().getService(master, *args, **kwargs) service.case = case case.addCleanup(service.assertNoOutstanding) return service # tests should ensure this has been called checkAvailable = mock.Mock() def expect(self, method, ep, params=None, headers=None, data=None, json=None, code=200, content=None, content_json=None, files=None): if content is not None and content_json is not None: return ValueError("content and content_json cannot be both specified") if content_json is not None: content = jsonmodule.dumps(content_json, default=toJson) self._expected.append(dict( method=method, ep=ep, params=params, headers=headers, data=data, json=json, code=code, content=content, files=files)) return None def assertNoOutstanding(self): self.case.assertEqual(0, len(self._expected), "expected more http requests:\n {!r}".format(self._expected)) def _doRequest(self, method, ep, params=None, headers=None, data=None, json=None, files=None, timeout=None): assert ep == "" or ep.startswith("/"), "ep should start with /: " + ep if not self.quiet: log.debug("{method} {ep} {params!r} <- {data!r}", method=method, ep=ep, params=params, data=data or json) if json is not None: # ensure that the json is really jsonable jsonmodule.dumps(json, default=toJson) if files is not None: files = dict((k, v.read()) for (k, v) in files.items()) if not self._expected: raise AssertionError( "Not expecting a request, while we got: " "method={!r}, ep={!r}, params={!r}, headers={!r}, " "data={!r}, json={!r}, files={!r}".format( method, ep, params, headers, data, json, files)) expect = self._expected.pop(0) # pylint: disable=too-many-boolean-expressions if (expect['method'] != method or expect['ep'] != ep or expect['params'] != params or expect['headers'] != headers or expect['data'] != data or expect['json'] != json or expect['files'] != files): raise AssertionError( "expecting:\n" "method={!r}, ep={!r}, params={!r}, headers={!r}, " "data={!r}, json={!r}, files={!r}\n" "got :\n" "method={!r}, ep={!r}, params={!r}, headers={!r}, " "data={!r}, json={!r}, files={!r}".format( expect['method'], expect['ep'], expect['params'], expect['headers'], expect['data'], expect['json'], expect['files'], method, ep, params, headers, data, json, files, )) if not self.quiet: log.debug("{method} {ep} -> {code} {content!r}", method=method, ep=ep, code=expect['code'], content=expect['content']) return defer.succeed(ResponseWrapper(expect['code'], expect['content'])) # lets be nice to the auto completers, and don't generate that code def get(self, ep, **kwargs): return self._doRequest('get', ep, **kwargs) def put(self, ep, **kwargs): return self._doRequest('put', ep, **kwargs) def delete(self, ep, **kwargs): return self._doRequest('delete', ep, **kwargs) def post(self, ep, **kwargs): return self._doRequest('post', ep, **kwargs) buildbot-3.4.0/master/buildbot/test/fake/kube.py000066400000000000000000000046611413250514000216100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import time from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.util.kubeclientservice import KubeError class KubeClientService(fakehttpclientservice.HTTPClientService): def __init__(self, kube_config=None, *args, **kwargs): c = kube_config.getConfig() super().__init__(c['master_url'], *args, **kwargs) self.namespace = c['namespace'] self.addService(kube_config) self.pods = {} def createPod(self, namespace, spec): if 'metadata' not in spec: raise KubeError({ 'message': 'Pod "" is invalid: metadata.name: ' 'Required value: name or generateName is required' }) name = spec['metadata']['name'] pod = { 'kind': 'Pod', 'metadata': copy.copy(spec['metadata']), 'spec': copy.deepcopy(spec['spec']) } self.pods[namespace + '/' + name] = pod return pod def deletePod(self, namespace, name, graceperiod=0): if namespace + '/' + name not in self.pods: raise KubeError({ 'message': 'Pod not found', 'reason': 'NotFound'}) spec = self.pods[namespace + '/' + name] del self.pods[namespace + '/' + name] spec['metadata']['deletionTimestamp'] = time.ctime(time.time()) return spec def waitForPodDeletion(self, namespace, name, timeout): if namespace + '/' + name in self.pods: raise TimeoutError("Did not see pod {name} terminate after {timeout}s".format( name=name, timeout=timeout )) return { 'kind': 'Status', 'reason': 'NotFound' } buildbot-3.4.0/master/buildbot/test/fake/latent.py000066400000000000000000000165331413250514000221520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import enum from twisted.internet import defer from twisted.python.filepath import FilePath from twisted.trial.unittest import SkipTest from buildbot.test.fake.worker import SeverWorkerConnectionMixin from buildbot.test.fake.worker import disconnect_master_side_worker from buildbot.worker import AbstractLatentWorker try: from buildbot_worker.bot import LocalWorker as RemoteWorker from buildbot_worker.base import BotBase except ImportError: RemoteWorker = None class States(enum.Enum): STOPPED = 0 STARTING = 1 STARTED = 2 STOPPING = 3 class LatentController(SeverWorkerConnectionMixin): """ A controller for ``ControllableLatentWorker``. https://glyph.twistedmatrix.com/2015/05/separate-your-fakes-and-your-inspectors.html Note that by default workers will connect automatically if True is passed to start_instance(). Also by default workers will disconnect automatically just as stop_instance() is executed. """ def __init__(self, case, name, kind=None, build_wait_timeout=600, starts_without_substantiate=None, **kwargs): self.case = case self.build_wait_timeout = build_wait_timeout self.worker = ControllableLatentWorker(name, self, **kwargs) self.remote_worker = None if starts_without_substantiate is not None: self.worker.starts_without_substantiate = \ starts_without_substantiate self.state = States.STOPPED self.auto_stop_flag = False self.auto_start_flag = False self.auto_connect_worker = True self.auto_disconnect_worker = True self.kind = kind self._started_kind = None self._started_kind_deferred = None @property def starting(self): return self.state == States.STARTING @property def started(self): return self.state == States.STARTED @property def stopping(self): return self.state == States.STOPPING @property def stopped(self): return self.state == States.STOPPED def auto_start(self, result): self.auto_start_flag = result if self.auto_start_flag and self.state == States.STARTING: self.start_instance(True) @defer.inlineCallbacks def start_instance(self, result): yield self.do_start_instance(result) d, self._start_deferred = self._start_deferred, None d.callback(result) @defer.inlineCallbacks def do_start_instance(self, result): assert self.state == States.STARTING self.state = States.STARTED if self.auto_connect_worker and result is True: yield self.connect_worker() @defer.inlineCallbacks def auto_stop(self, result): self.auto_stop_flag = result if self.auto_stop_flag and self.state == States.STOPPING: yield self.stop_instance(True) @defer.inlineCallbacks def stop_instance(self, result): yield self.do_stop_instance() d, self._stop_deferred = self._stop_deferred, None d.callback(result) @defer.inlineCallbacks def do_stop_instance(self): assert self.state == States.STOPPING self.state = States.STOPPED self._started_kind = None if self.auto_disconnect_worker: yield self.disconnect_worker() @defer.inlineCallbacks def connect_worker(self): if self.remote_worker is not None: return if RemoteWorker is None: raise SkipTest("buildbot-worker package is not installed") workdir = FilePath(self.case.mktemp()) workdir.createDirectory() self.remote_worker = RemoteWorker(self.worker.name, workdir.path, False) yield self.remote_worker.setServiceParent(self.worker) @defer.inlineCallbacks def disconnect_worker(self): yield super().disconnect_worker() if self.remote_worker is None: return self.remote_worker, worker = None, self.remote_worker disconnect_master_side_worker(self.worker) yield worker.disownServiceParent() def setup_kind(self, build): if build: self._started_kind_deferred = build.render(self.kind) else: self._started_kind_deferred = self.kind @defer.inlineCallbacks def get_started_kind(self): if self._started_kind_deferred: self._started_kind = yield self._started_kind_deferred self._started_kind_deferred = None return self._started_kind def patchBot(self, case, remoteMethod, patch): case.patch(BotBase, remoteMethod, patch) class ControllableLatentWorker(AbstractLatentWorker): """ A latent worker that can be controlled by tests. """ builds_may_be_incompatible = True def __init__(self, name, controller, **kwargs): self._controller = controller self._random_password_id = 0 AbstractLatentWorker.__init__(self, name, None, **kwargs) def checkConfig(self, name, _, **kwargs): AbstractLatentWorker.checkConfig( self, name, None, build_wait_timeout=self._controller.build_wait_timeout, **kwargs) def reconfigService(self, name, _, **kwargs): return super().reconfigService(name, self.getRandomPass(), build_wait_timeout=self._controller.build_wait_timeout, **kwargs) def _generate_random_password(self): self._random_password_id += 1 return 'password_{}'.format(self._random_password_id) @defer.inlineCallbacks def isCompatibleWithBuild(self, build_props): if self._controller.state == States.STOPPED: return True requested_kind = yield build_props.render((self._controller.kind)) curr_kind = yield self._controller.get_started_kind() return requested_kind == curr_kind def start_instance(self, build): self._controller.setup_kind(build) assert self._controller.state == States.STOPPED self._controller.state = States.STARTING if self._controller.auto_start_flag: self._controller.do_start_instance(True) return defer.succeed(True) self._controller._start_deferred = defer.Deferred() return self._controller._start_deferred @defer.inlineCallbacks def stop_instance(self, fast): assert self._controller.state == States.STARTED self._controller.state = States.STOPPING if self._controller.auto_stop_flag: yield self._controller.do_stop_instance() return True self._controller._stop_deferred = defer.Deferred() return (yield self._controller._stop_deferred) buildbot-3.4.0/master/buildbot/test/fake/libvirt.py000066400000000000000000000046111413250514000223300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class Domain: def __init__(self, name, conn, libvirt_id): self.conn = conn self._name = name self.running = False self.libvirt_id = libvirt_id self.metadata = {} def ID(self): return self.libvirt_id def name(self): return self._name def create(self): self.running = True def shutdown(self): self.running = False def destroy(self): self.running = False del self.conn[self._name] def setMetadata(self, type, metadata, key, uri, flags): self.metadata[key] = (type, uri, metadata, flags) class Connection: def __init__(self, uri): self.uri = uri self.domains = {} self._next_libvirt_id = 1 def createXML(self, xml, flags): # FIXME: This should really parse the name out of the xml, i guess d = self.fake_add("instance", self._next_libvirt_id) self._next_libvirt_id += 1 d.running = True return d def listDomainsID(self): return list(self.domains) def lookupByName(self, name): return self.domains.get(name, None) def lookupByID(self, ID): for d in self.domains.values(): if d.ID == ID: return d return None def fake_add(self, name, libvirt_id): d = Domain(name, self, libvirt_id) self.domains[name] = d return d def fake_add_domain(self, name, d): self.domains[name] = d def registerCloseCallback(self, c, c2): pass def open(uri): raise NotImplementedError('this must be patched in tests') VIR_DOMAIN_AFFECT_CONFIG = 2 VIR_DOMAIN_METADATA_ELEMENT = 2 class libvirtError(Exception): pass buildbot-3.4.0/master/buildbot/test/fake/logfile.py000066400000000000000000000065631413250514000223060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import util from buildbot.util import lineboundaries class FakeLogFile: def __init__(self, name): self.name = name self.header = '' self.stdout = '' self.stderr = '' self.lbfs = {} self.finished = False self._finish_waiters = [] self._had_errors = False self.subPoint = util.subscription.SubscriptionPoint("%r log" % (name,)) def getName(self): return self.name def subscribe(self, callback): return self.subPoint.subscribe(callback) def _getLbf(self, stream, meth): try: return self.lbfs[stream] except KeyError: def wholeLines(lines): self.subPoint.deliver(stream, lines) assert not self.finished lbf = self.lbfs[stream] = \ lineboundaries.LineBoundaryFinder(wholeLines) return lbf def addHeader(self, text): if not isinstance(text, str): text = text.decode('utf-8') self.header += text self._getLbf('h', 'headerReceived').append(text) return defer.succeed(None) def addStdout(self, text): if not isinstance(text, str): text = text.decode('utf-8') self.stdout += text self._getLbf('o', 'outReceived').append(text) return defer.succeed(None) def addStderr(self, text): if not isinstance(text, str): text = text.decode('utf-8') self.stderr += text self._getLbf('e', 'errReceived').append(text) return defer.succeed(None) def isFinished(self): return self.finished def waitUntilFinished(self): d = defer.Deferred() if self.finished: d.succeed(None) else: self._finish_waiters.append(d) return d def flushFakeLogfile(self): for lbf in self.lbfs.values(): lbf.flush() def had_errors(self): return self._had_errors @defer.inlineCallbacks def finish(self): assert not self.finished self.flushFakeLogfile() self.finished = True # notify subscribers *after* finishing the log self.subPoint.deliver(None, None) yield self.subPoint.waitForDeliveriesToFinish() self._had_errors = len(self.subPoint.pop_exceptions()) > 0 # notify those waiting for finish for d in self._finish_waiters: d.callback(None) def fakeData(self, header='', stdout='', stderr=''): if header: self.header += header if stdout: self.stdout += stdout if stderr: self.stderr += stderr buildbot-3.4.0/master/buildbot/test/fake/machine.py000066400000000000000000000047151413250514000222660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.machine.latent import AbstractLatentMachine from buildbot.machine.latent import States as MachineStates from buildbot.util import service class FakeMachineManager(service.AsyncMultiService): name = 'MachineManager' @property def machines(self): return self.namedServices def getMachineByName(self, name): if name in self.machines: return self.machines[name] return None class LatentMachineController: """ A controller for ``ControllableLatentMachine`` """ def __init__(self, name, **kwargs): self.machine = ControllableLatentMachine(name, self, **kwargs) self._start_deferred = None self._stop_deferred = None def start_machine(self, result): assert self.machine.state == MachineStates.STARTING d, self._start_deferred = self._start_deferred, None if isinstance(result, Exception): d.errback(result) else: d.callback(result) def stop_machine(self, result=True): assert self.machine.state == MachineStates.STOPPING d, self._stop_deferred = self._stop_deferred, None if isinstance(result, Exception): d.errback(result) else: d.callback(result) class ControllableLatentMachine(AbstractLatentMachine): """ A latent machine that can be controlled by tests """ def __init__(self, name, controller, **kwargs): self._controller = controller super().__init__(name, **kwargs) def start_machine(self): d = defer.Deferred() self._controller._start_deferred = d return d def stop_machine(self): d = defer.Deferred() self._controller._stop_deferred = d return d buildbot-3.4.0/master/buildbot/test/fake/openstack.py000066400000000000000000000140111413250514000226370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2013 Cray Inc. import uuid ACTIVE = 'ACTIVE' BUILD = 'BUILD' DELETED = 'DELETED' ERROR = 'ERROR' UNKNOWN = 'UNKNOWN' TEST_UUIDS = { 'image': '28a65eb4-f354-4420-97dc-253b826547f7', 'volume': '65fbb9f1-c4d5-40a8-a233-ad47c52bb837', 'snapshot': 'ab89152d-3c26-4d30-9ae5-65b705f874b7', 'flavor': '853774a1-459f-4f1f-907e-c96f62472531', } class FakeNovaClient(): region_name = "" # Parts used from novaclient class Client(): def __init__(self, version, session): self.glance = ItemManager() self.glance._add_items([Image(TEST_UUIDS['image'], 'CirrOS 0.3.4', 13287936)]) self.volumes = ItemManager() self.volumes._add_items([Volume(TEST_UUIDS['volume'], 'CirrOS 0.3.4', 4)]) self.volume_snapshots = ItemManager() self.volume_snapshots._add_items([Snapshot(TEST_UUIDS['snapshot'], 'CirrOS 0.3.4', 2)]) self.flavors = ItemManager() self.flavors._add_items([Flavor(TEST_UUIDS['flavor'], 'm1.small', 0)]) self.servers = Servers() self.session = session self.client = FakeNovaClient() class ItemManager(): def __init__(self): self._items = {} def _add_items(self, new_items): for item in new_items: self._items[item.id] = item def list(self): return self._items.values() def get(self, uuid): if uuid in self._items: return self._items[uuid] else: raise NotFound # This exists because Image needs an attribute that isn't supported by # namedtuple. And once the base code is there might as well have Volume and # Snapshot use it too. class Item(): def __init__(self, id, name, size): self.id = id self.name = name self.size = size class Image(Item): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) setattr(self, 'OS-EXT-IMG-SIZE:size', self.size) class Flavor(Item): pass class Volume(Item): pass class Snapshot(Item): pass class Servers(): fail_to_get = False fail_to_start = False gets_until_active = 3 gets_until_disappears = 1 instances = {} def create(self, *boot_args, **boot_kwargs): instance_id = uuid.uuid4() instance = Instance(instance_id, self, boot_args, boot_kwargs) self.instances[instance_id] = instance return instance def get(self, instance_id): if instance_id not in self.instances: raise NotFound inst = self.instances[instance_id] if not self.fail_to_get or inst.gets < self.gets_until_disappears: if not inst.status.startswith('BUILD'): return inst inst.gets += 1 if inst.gets >= self.gets_until_active: if not self.fail_to_start: inst.status = ACTIVE else: inst.status = ERROR return inst else: raise NotFound def delete(self, instance_id): if instance_id in self.instances: del self.instances[instance_id] def findall(self, **kwargs): name = kwargs.get('name', None) if name: return list(filter(lambda item: item.name == name, self.instances.values())) return [] def find(self, **kwargs): result = self.findall(**kwargs) if len(result) > 0: raise NoUniqueMatch if len(result) == 0: raise NotFound return result[0] # This is returned by Servers.create(). class Instance(): def __init__(self, id, servers, boot_args, boot_kwargs): self.id = id self.servers = servers self.boot_args = boot_args self.boot_kwargs = boot_kwargs self.gets = 0 self.status = 'BUILD(networking)' self.metadata = boot_kwargs.get('meta', {}) try: self.name = boot_args[0] except IndexError: self.name = 'name' def delete(self): self.servers.delete(self.id) # Parts used from novaclient.exceptions. class NotFound(Exception): pass class NoUniqueMatch(Exception): pass # Parts used from keystoneauth1. def get_plugin_loader(plugin_type): if plugin_type == 'password': return PasswordLoader() if plugin_type == 'token': return TokenLoader() raise ValueError("plugin_type '{}' is not supported".format(plugin_type)) class PasswordLoader(): def load_from_options(self, **kwargs): return PasswordAuth(**kwargs) class TokenLoader(): def load_from_options(self, **kwargs): return TokenAuth(**kwargs) class PasswordAuth(): def __init__(self, auth_url, password, project_name, username, user_domain_name=None, project_domain_name=None): self.auth_url = auth_url self.password = password self.project_name = project_name self.username = username self.user_domain_name = user_domain_name self.project_domain_name = project_domain_name class TokenAuth(): def __init__(self, auth_url, token): self.auth_url = auth_url self.token = token self.project_name = 'tenant' self.username = 'testuser' self.user_domain_name = 'token' self.project_domain_name = 'token' class Session(): def __init__(self, auth): self.auth = auth buildbot-3.4.0/master/buildbot/test/fake/pbmanager.py000066400000000000000000000035461413250514000226170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.util import service class FakePBManager(service.AsyncMultiService): def __init__(self): super().__init__() self.setName("fake-pbmanager") self._registrations = [] self._unregistrations = [] def register(self, portstr, username, password, pfactory): if (portstr, username) not in self._registrations: reg = FakeRegistration(self, portstr, username) self._registrations.append((portstr, username, password)) return defer.succeed(reg) else: raise KeyError("username '{}' is already registered on port {}".format(username, portstr)) def _unregister(self, portstr, username): self._unregistrations.append((portstr, username)) return defer.succeed(None) class FakeRegistration: def __init__(self, pbmanager, portstr, username): self._portstr = portstr self._username = username self._pbmanager = pbmanager def unregister(self): self._pbmanager._unregister(self._portstr, self._username) buildbot-3.4.0/master/buildbot/test/fake/private_tempdir.py000066400000000000000000000025661413250514000240620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os class FakePrivateTemporaryDirectory: def __init__(self, suffix=None, prefix=None, dir=None, mode=0o700): dir = dir or '/' prefix = prefix or '' suffix = suffix or '' self.name = os.path.join(dir, prefix + '@@@' + suffix) self.mode = mode def __enter__(self): return self.name def __exit__(self, exc, value, tb): pass def cleanup(self): pass class MockPrivateTemporaryDirectory: def __init__(self): self.dirs = [] def __call__(self, *args, **kwargs): ret = FakePrivateTemporaryDirectory(*args, **kwargs) self.dirs.append((ret.name, ret.mode)) return ret buildbot-3.4.0/master/buildbot/test/fake/reactor.py000066400000000000000000000136151413250514000223200ustar00rootroot00000000000000# Copyright Buildbot Team Members # Portions copyright 2015-2016 ClusterHQ Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from twisted.internet import defer from twisted.internet import reactor from twisted.internet.base import _ThreePhaseEvent from twisted.internet.interfaces import IReactorCore from twisted.internet.interfaces import IReactorThreads from twisted.internet.task import Clock from twisted.python import log from twisted.python.failure import Failure from zope.interface import implementer # The code here is based on the implementations in # https://twistedmatrix.com/trac/ticket/8295 # https://twistedmatrix.com/trac/ticket/8296 @implementer(IReactorCore) class CoreReactor: """ Partial implementation of ``IReactorCore``. """ def __init__(self): super().__init__() self._triggers = {} def addSystemEventTrigger(self, phase, eventType, f, *args, **kw): event = self._triggers.setdefault(eventType, _ThreePhaseEvent()) return eventType, event.addTrigger(phase, f, *args, **kw) def removeSystemEventTrigger(self, triggerID): eventType, handle = triggerID event = self._triggers.setdefault(eventType, _ThreePhaseEvent()) event.removeTrigger(handle) def fireSystemEvent(self, eventType): event = self._triggers.get(eventType) if event is not None: event.fireEvent() def callWhenRunning(self, f, *args, **kwargs): f(*args, **kwargs) class NonThreadPool: """ A stand-in for ``twisted.python.threadpool.ThreadPool`` so that the majority of the test suite does not need to use multithreading. This implementation takes the function call which is meant to run in a thread pool and runs it synchronously in the calling thread. :ivar int calls: The number of calls which have been dispatched to this object. """ calls = 0 def __init__(self, **kwargs): pass def callInThreadWithCallback(self, onResult, func, *args, **kw): self.calls += 1 try: result = func(*args, **kw) except: # noqa pylint: disable=bare-except # We catch *everything* here, since normally this code would be # running in a thread, where there is nothing that will catch # error. onResult(False, Failure()) else: onResult(True, result) def start(self): pass def stop(self): pass @implementer(IReactorThreads) class NonReactor: """ A partial implementation of ``IReactorThreads`` which fits into the execution model defined by ``NonThreadPool``. """ def callFromThread(self, f, *args, **kwargs): f(*args, **kwargs) def getThreadPool(self): return NonThreadPool() class TestReactor(NonReactor, CoreReactor, Clock): def __init__(self): super().__init__() # whether there are calls that should run right now self._pendingCurrentCalls = False self.stop_called = False def _executeCurrentDelayedCalls(self): while self.getDelayedCalls(): first = sorted(self.getDelayedCalls(), key=lambda a: a.getTime())[0] if first.getTime() > self.seconds(): break self.advance(0) self._pendingCurrentCalls = False @defer.inlineCallbacks def _catchPrintExceptions(self, what, *a, **kw): try: r = what(*a, **kw) if isinstance(r, defer.Deferred): yield r except Exception as e: log.msg('Unhandled exception from deferred when doing ' 'TestReactor.advance()', e) raise def callLater(self, when, what, *a, **kw): # Buildbot often uses callLater(0, ...) to defer execution of certain # code to the next iteration of the reactor. This means that often # there are pending callbacks registered to the reactor that might # block other code from proceeding unless the test reactor has an # iteration. To avoid deadlocks in tests we give the real reactor a # chance to advance the test reactor whenever we detect that there # are callbacks that should run in the next iteration of the test # reactor. # # Additionally, we wrap all calls with a function that prints any # unhandled exceptions if when <= 0 and not self._pendingCurrentCalls: reactor.callLater(0, self._executeCurrentDelayedCalls) return super().callLater(when, self._catchPrintExceptions, what, *a, **kw) def stop(self): # first fire pending calls until the current time. Note that the real # reactor only advances until the current time in the case of shutdown. self.advance(0) # then, fire the shutdown event self.fireSystemEvent('shutdown') self.stop_called = True buildbot-3.4.0/master/buildbot/test/fake/remotecommand.py000066400000000000000000000326611413250514000235150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import functools from twisted.internet import defer from twisted.python import failure from buildbot.process.results import CANCELLED from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS class FakeRemoteCommand: # callers should set this to the running TestCase instance testcase = None active = False interrupted = False _waiting_for_interrupt = False def __init__(self, remote_command, args, ignore_updates=False, collectStdout=False, collectStderr=False, decodeRC=None, stdioLogName='stdio'): if decodeRC is None: decodeRC = {0: SUCCESS} # copy the args and set a few defaults self.remote_command = remote_command self.args = args.copy() self.logs = {} self._log_close_when_finished = {} self.delayedLogs = {} self.rc = -999 self.collectStdout = collectStdout self.collectStderr = collectStderr self.updates = {} self.decodeRC = decodeRC self.stdioLogName = stdioLogName if collectStdout: self.stdout = '' if collectStderr: self.stderr = '' @defer.inlineCallbacks def run(self, step, conn, builder_name): if self._waiting_for_interrupt: yield step.interrupt('interrupt reason') if not self.interrupted: raise RuntimeError("Interrupted step, but command was not interrupted") # delegate back to the test case cmd = yield self.testcase._remotecommand_run(self, step, conn, builder_name) for name, log_ in self.logs.items(): if self._log_close_when_finished[name]: log_.finish() return cmd def useLog(self, log_, closeWhenFinished=False, logfileName=None): if not logfileName: logfileName = log_.getName() assert logfileName not in self.logs assert logfileName not in self.delayedLogs self.logs[logfileName] = log_ self._log_close_when_finished[logfileName] = closeWhenFinished def useLogDelayed(self, logfileName, activateCallBack, closeWhenFinished=False): assert logfileName not in self.logs assert logfileName not in self.delayedLogs self.delayedLogs[logfileName] = (activateCallBack, closeWhenFinished) def addStdout(self, data): if self.collectStdout: self.stdout += data if self.stdioLogName is not None and self.stdioLogName in self.logs: self.logs[self.stdioLogName].addStdout(data) def addStderr(self, data): if self.collectStderr: self.stderr += data if self.stdioLogName is not None and self.stdioLogName in self.logs: self.logs[self.stdioLogName].addStderr(data) def addHeader(self, data): if self.stdioLogName is not None and self.stdioLogName in self.logs: self.logs[self.stdioLogName].addHeader(data) @defer.inlineCallbacks def addToLog(self, logname, data): # Activate delayed logs on first data. if logname in self.delayedLogs: (activate_callback, close_when_finished) = self.delayedLogs[logname] del self.delayedLogs[logname] loog = yield activate_callback(self) self.logs[logname] = loog self._log_close_when_finished[logname] = close_when_finished if logname in self.logs: self.logs[logname].addStdout(data) else: raise Exception("{}.addToLog: no such log {}".format(self, logname)) def interrupt(self, why): if not self._waiting_for_interrupt: raise RuntimeError("Got interrupt, but FakeRemoteCommand was not expecting it") self._waiting_for_interrupt = False self.interrupted = True def results(self): if self.interrupted: return CANCELLED if self.rc in self.decodeRC: return self.decodeRC[self.rc] return FAILURE def didFail(self): return self.results() == FAILURE def set_run_interrupt(self): self._waiting_for_interrupt = True def __repr__(self): return "FakeRemoteCommand(" + repr(self.remote_command) + "," + repr(self.args) + ")" class FakeRemoteShellCommand(FakeRemoteCommand): def __init__(self, workdir, command, env=None, want_stdout=1, want_stderr=1, timeout=20 * 60, maxTime=None, sigtermTime=None, logfiles=None, usePTY=None, logEnviron=True, collectStdout=False, collectStderr=False, interruptSignal=None, initialStdin=None, decodeRC=None, stdioLogName='stdio'): if logfiles is None: logfiles = {} if decodeRC is None: decodeRC = {0: SUCCESS} args = dict(workdir=workdir, command=command, env=env or {}, want_stdout=want_stdout, want_stderr=want_stderr, initial_stdin=initialStdin, timeout=timeout, maxTime=maxTime, logfiles=logfiles, usePTY=usePTY, logEnviron=logEnviron) if interruptSignal is not None and interruptSignal != 'KILL': args['interruptSignal'] = interruptSignal super().__init__("shell", args, collectStdout=collectStdout, collectStderr=collectStderr, decodeRC=decodeRC, stdioLogName=stdioLogName) class ExpectRemoteRef: """ Define an expected RemoteReference in the args to an L{Expect} class """ def __init__(self, rrclass): self.rrclass = rrclass def __eq__(self, other): return isinstance(other, self.rrclass) class Expect: """ Define an expected L{RemoteCommand}, with the same arguments Extra behaviors of the remote command can be added to the instance, using class methods. Use L{Expect.log} to add a logfile, L{Expect.update} to add an arbitrary update, or add an integer to specify the return code (rc), or add a Failure instance to raise an exception. Additionally, use L{Expect.behavior}, passing a callable that will be invoked with the real command and can do what it likes: def custom_behavior(command): ... Expect('somecommand', { args='foo' }) + Expect.behavior(custom_behavior), ... Expect('somecommand', { args='foo' }) + Expect.log('stdio', stdout='foo!') + Expect.log('config.log', stdout='some info') + Expect.update('status', 'running') + 0, # (specifies the rc) ... """ def __init__(self, remote_command, args, interrupted=False): """ Expect a command named C{remote_command}, with args C{args}. """ self.remote_command = remote_command self.args = args self.result = None self.interrupted = interrupted self.behaviors = [] @classmethod def behavior(cls, callable): """ Add an arbitrary behavior that is expected of this command. C{callable} will be invoked with the real command as an argument, and can do what it wishes. It will be invoked with maybeDeferred, in case the operation is asynchronous. """ return ('callable', callable) @classmethod def log(self, name, **streams): return ('log', name, streams) @classmethod def update(self, name, value): return ('update', name, value) def __add__(self, other): # special-case adding an integer (return code) or failure (error) if isinstance(other, int): self.behaviors.append(('rc', other)) elif isinstance(other, failure.Failure): self.behaviors.append(('err', other)) else: self.behaviors.append(other) return self def runBehavior(self, behavior, args, command): """ Implement the given behavior. Returns a Deferred. """ if behavior == 'rc': command.rc = args[0] d = defer.succeed(None) for log in command.logs.values(): if hasattr(log, 'unwrap'): # We're handling an old style log that was # used in an old style step. We handle the necessary # stuff to make the make sync/async log hack work. d.addCallback( functools.partial(lambda log, _: log.unwrap(), log)) d.addCallback(lambda l: l.flushFakeLogfile()) return d elif behavior == 'err': return defer.fail(args[0]) elif behavior == 'update': command.updates.setdefault(args[0], []).append(args[1]) elif behavior == 'log': name, streams = args for stream in streams: if stream not in ['header', 'stdout', 'stderr']: raise Exception('Log stream {} is not recognized'.format(stream)) if name == command.stdioLogName: if 'header' in streams: command.addHeader(streams['header']) if 'stdout' in streams: command.addStdout(streams['stdout']) if 'stderr' in streams: command.addStderr(streams['stderr']) else: if 'header' in streams or 'stderr' in streams: raise Exception('Non stdio streams only support stdout') return command.addToLog(name, streams['stdout']) elif behavior == 'callable': return defer.maybeDeferred(lambda: args[0](command)) else: return defer.fail(failure.Failure(AssertionError('invalid behavior {}'.format( behavior)))) return defer.succeed(None) @defer.inlineCallbacks def runBehaviors(self, command): """ Run all expected behaviors for this command """ for behavior in self.behaviors: yield self.runBehavior(behavior[0], behavior[1:], command) def expectationPassed(self, exp): """ Some expectations need to be able to distinguish pass/fail of nested expectations. This will get invoked once for every nested exception and once for self unless anything fails. Failures are passed to raiseExpectationFailure for handling. @param exp: The nested exception that passed or self. """ def raiseExpectationFailure(self, exp, failure): """ Some expectations may wish to suppress failure. The default expectation does not. This will get invoked if the expectations fails on a command. @param exp: the expectation that failed. this could be self or a nested exception """ raise failure def shouldAssertCommandEqualExpectation(self): """ Whether or not we should validate that the current command matches the expectation. Some expectations may not have a way to match a command. """ return True def shouldRunBehaviors(self): """ Whether or not, once the command matches the expectation, the behaviors should be run for this step. """ return True def shouldKeepMatchingAfter(self, command): """ Expectations are by default not kept matching multiple commands. Return True if you want to re-use a command for multiple commands. """ return False def nestedExpectations(self): """ Any sub-expectations that should be validated. """ return [] def __repr__(self): return "Expect(" + repr(self.remote_command) + ")" class ExpectShell(Expect): """ Define an expected L{RemoteShellCommand}, with the same arguments Any non-default arguments must be specified explicitly (e.g., usePTY). """ def __init__(self, workdir, command, env=None, want_stdout=1, want_stderr=1, initialStdin=None, timeout=20 * 60, maxTime=None, logfiles=None, usePTY=None, logEnviron=True, interruptSignal=None): if env is None: env = {} if logfiles is None: logfiles = {} args = dict(workdir=workdir, command=command, env=env, want_stdout=want_stdout, want_stderr=want_stderr, initial_stdin=initialStdin, timeout=timeout, maxTime=maxTime, logfiles=logfiles, usePTY=usePTY, logEnviron=logEnviron) if interruptSignal is not None: args['interruptSignal'] = interruptSignal super().__init__("shell", args) def __repr__(self): return "ExpectShell(" + repr(self.remote_command) + repr(self.args['command']) + ")" buildbot-3.4.0/master/buildbot/test/fake/secrets.py000066400000000000000000000006241413250514000223250ustar00rootroot00000000000000 from buildbot.secrets.providers.base import SecretProviderBase class FakeSecretStorage(SecretProviderBase): name = "SecretsInFake" def reconfigService(self, secretdict=None): if secretdict is None: secretdict = {} self.allsecrets = secretdict def get(self, key): if key in self.allsecrets: return self.allsecrets[key] return None buildbot-3.4.0/master/buildbot/test/fake/state.py000066400000000000000000000017761413250514000220060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class State: """ A simple class you can use to keep track of state throughout a test. Just assign whatever you want to its attributes. Its constructor provides a shortcut to setting initial values for attributes """ def __init__(self, **kwargs): self.__dict__.update(kwargs) buildbot-3.4.0/master/buildbot/test/fake/step.py000066400000000000000000000042171413250514000216320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.buildstep import BuildStep from buildbot.process.results import CANCELLED class BuildStepController: """ A controller for ``ControllableBuildStep``. https://glyph.twistedmatrix.com/2015/05/separate-your-fakes-and-your-inspectors.html """ def __init__(self, **kwargs): self.step = ControllableBuildStep(self, **kwargs) self.running = False self.auto_finish_results = None def finish_step(self, result): assert self.running self.running = False d, self._run_deferred = self._run_deferred, None d.callback(result) def auto_finish_step(self, result): self.auto_finish_results = result if self.running: self.finish_step(result) class ControllableBuildStep(BuildStep): """ A latent worker that can be controlled by tests. """ name = "controllableStep" def __init__(self, controller, **kwargs): super().__init__(**kwargs) self._controller = controller def run(self): if self._controller.auto_finish_results is not None: return defer.succeed(self._controller.auto_finish_results) assert not self._controller.running self._controller.running = True self._controller._run_deferred = defer.Deferred() return self._controller._run_deferred def interrupt(self, reason): self._controller.finish_step(CANCELLED) buildbot-3.4.0/master/buildbot/test/fake/web.py000066400000000000000000000066711413250514000214420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from io import BytesIO from mock import Mock from twisted.internet import defer from twisted.web import server from buildbot.test.fake import fakemaster def fakeMasterForHooks(testcase): # testcase must derive from TestReactorMixin and setUpTestReactor() # must be called before calling this function. master = fakemaster.make_master(testcase, wantData=True) master.www = Mock() return master class FakeRequest(Mock): """ A fake Twisted Web Request object, including some pointers to the buildmaster and an addChange method on that master which will append its arguments to self.addedChanges. """ written = b'' finished = False redirected_to = None failure = None def __init__(self, args=None, content=b''): super().__init__() if args is None: args = {} self.args = args self.content = BytesIO(content) self.site = Mock() self.site.buildbot_service = Mock() self.uri = b'/' self.prepath = [] self.method = b'GET' self.received_headers = {} self.deferred = defer.Deferred() def getHeader(self, key): return self.received_headers.get(key) def write(self, data): self.written = self.written + data def redirect(self, url): self.redirected_to = url def finish(self): self.finished = True self.deferred.callback(None) def processingFailed(self, f): self.deferred.errback(f) # work around http://code.google.com/p/mock/issues/detail?id=105 def _get_child_mock(self, **kw): return Mock(**kw) # cribed from twisted.web.test._util._render def test_render(self, resource): for arg in self.args: if not isinstance(arg, bytes): raise ValueError("self.args: {!r}, contains " "values which are not bytes".format(self.args)) if self.uri and not isinstance(self.uri, bytes): raise ValueError("self.uri: {!r} is {}, not bytes".format( self.uri, type(self.uri))) if self.method and not isinstance(self.method, bytes): raise ValueError("self.method: {!r} is {}, not bytes".format( self.method, type(self.method))) result = resource.render(self) if isinstance(result, bytes): self.write(result) self.finish() return self.deferred elif isinstance(result, str): raise ValueError("{!r} should return bytes, not {}: {!r}".format( resource.render, type(result), result)) elif result is server.NOT_DONE_YET: return self.deferred else: raise ValueError("Unexpected return value: {!r}".format(result)) buildbot-3.4.0/master/buildbot/test/fake/worker.py000066400000000000000000000136251413250514000221730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.python.filepath import FilePath from twisted.spread import pb from twisted.trial.unittest import SkipTest from buildbot.process import properties from buildbot.test.fake import fakeprotocol from buildbot.worker import Worker try: from buildbot_worker.bot import LocalWorker as RemoteWorker except ImportError: RemoteWorker = None class FakeWorker: workername = 'test' def __init__(self, master): self.master = master self.conn = fakeprotocol.FakeConnection(self) self.info = properties.Properties() self.properties = properties.Properties() self.defaultProperties = properties.Properties() self.workerid = 383 def acquireLocks(self): return True def releaseLocks(self): pass def attached(self, conn): self.worker_system = 'posix' self.path_module = os.path self.workerid = 1234 self.worker_basedir = '/wrk' return defer.succeed(None) def detached(self): pass def addWorkerForBuilder(self, wfb): pass def removeWorkerForBuilder(self, wfb): pass def buildFinished(self, wfb): pass def canStartBuild(self): pass def putInQuarantine(self): pass def resetQuarantine(self): pass @defer.inlineCallbacks def disconnect_master_side_worker(worker): # Force disconnection because the LocalWorker does not disconnect itself. Note that # the worker may have already been disconnected by something else (e.g. if it's not # responding). We need to call detached() explicitly because the order in which # disconnection subscriptions are invoked is unspecified. if worker.conn is not None: worker._detached_sub.unsubscribe() conn = worker.conn yield worker.detached() conn.loseConnection() yield worker.waitForCompleteShutdown() class SeverWorkerConnectionMixin: _connection_severed = False _severed_deferreds = None def disconnect_worker(self): if not self._connection_severed: return if self._severed_deferreds is not None: for d in self._severed_deferreds: d.errback(pb.PBConnectionLost('lost connection')) self._connection_severed = False def sever_connection(self): # stubs the worker connection so that it appears that the TCP connection # has been severed in a way that no response is ever received, but # messages don't fail immediately. All callback will be called when # disconnect_worker is called self._connection_severed = True def register_deferred(): d = defer.Deferred() if self._severed_deferreds is None: self._severed_deferreds = [] self._severed_deferreds.append(d) return d def remotePrint(message): return register_deferred() self.worker.conn.remotePrint = remotePrint def remoteGetWorkerInfo(): return register_deferred() self.worker.conn.remoteGetWorkerInfo = remoteGetWorkerInfo def remoteSetBuilderList(builders): return register_deferred() self.worker.conn.remoteSetBuilderList = remoteSetBuilderList def remoteStartCommand(remoteCommand, builderName, commandId, commandName, args): return register_deferred() self.worker.conn.remoteStartCommand = remoteStartCommand def remoteShutdown(): return register_deferred() self.worker.conn.remoteShutdown = remoteShutdown def remoteStartBuild(builderName): return register_deferred() self.worker.conn.remoteStartBuild = remoteStartBuild def remoteInterruptCommand(builderName, commandId, why): return register_deferred() self.worker.conn.remoteInterruptCommand = remoteInterruptCommand class WorkerController(SeverWorkerConnectionMixin): """ A controller for a ``Worker``. https://glyph.twistedmatrix.com/2015/05/separate-your-fakes-and-your-inspectors.html """ def __init__(self, case, name, build_wait_timeout=600, worker_class=None, **kwargs): if worker_class is None: worker_class = Worker self.case = case self.build_wait_timeout = build_wait_timeout self.worker = worker_class(name, self, **kwargs) self.remote_worker = None @defer.inlineCallbacks def connect_worker(self): if self.remote_worker is not None: return if RemoteWorker is None: raise SkipTest("buildbot-worker package is not installed") workdir = FilePath(self.case.mktemp()) workdir.createDirectory() self.remote_worker = RemoteWorker(self.worker.name, workdir.path, False) yield self.remote_worker.setServiceParent(self.worker) @defer.inlineCallbacks def disconnect_worker(self): yield super().disconnect_worker() if self.remote_worker is None: return self.remote_worker, worker = None, self.remote_worker disconnect_master_side_worker(self.worker) yield worker.disownServiceParent() buildbot-3.4.0/master/buildbot/test/fakedb/000077500000000000000000000000001413250514000206075ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/fakedb/__init__.py000066400000000000000000000103721413250514000227230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ A complete re-implementation of the database connector components, but without using a database. These classes should pass the same tests as are applied to the real connector components. """ from .base import FakeDBComponent from .build_data import BuildData from .build_data import FakeBuildDataComponent from .builders import Builder from .builders import BuilderMaster from .builders import BuildersTags from .builders import FakeBuildersComponent from .buildrequests import BuildRequest from .buildrequests import BuildRequestClaim from .buildrequests import FakeBuildRequestsComponent from .builds import Build from .builds import BuildProperty from .builds import FakeBuildsComponent from .buildsets import Buildset from .buildsets import BuildsetProperty from .buildsets import BuildsetSourceStamp from .buildsets import FakeBuildsetsComponent from .changes import Change from .changes import ChangeFile from .changes import ChangeProperty from .changes import ChangeUser from .changes import FakeChangesComponent from .changesources import ChangeSource from .changesources import ChangeSourceMaster from .changesources import FakeChangeSourcesComponent from .connector import FakeDBConnector from .logs import FakeLogsComponent from .logs import Log from .logs import LogChunk from .masters import FakeMastersComponent from .masters import Master from .schedulers import FakeSchedulersComponent from .schedulers import Scheduler from .schedulers import SchedulerChange from .schedulers import SchedulerMaster from .sourcestamps import FakeSourceStampsComponent from .sourcestamps import Patch from .sourcestamps import SourceStamp from .state import FakeStateComponent from .state import Object from .state import ObjectState from .steps import FakeStepsComponent from .steps import Step from .tags import FakeTagsComponent from .tags import Tag from .test_result_sets import FakeTestResultSetsComponent from .test_result_sets import TestResultSet from .test_results import FakeTestResultsComponent from .test_results import TestCodePath from .test_results import TestName from .test_results import TestResult from .users import FakeUsersComponent from .users import User from .users import UserInfo from .workers import ConfiguredWorker from .workers import ConnectedWorker from .workers import FakeWorkersComponent from .workers import Worker __all__ = [ 'Build', 'BuildData', 'BuildProperty', 'BuildRequest', 'BuildRequestClaim', 'Builder', 'BuilderMaster', 'BuildersTags', 'Buildset', 'BuildsetProperty', 'BuildsetSourceStamp', 'Change', 'ChangeFile', 'ChangeProperty', 'ChangeSource', 'ChangeSourceMaster', 'ChangeUser', 'ConfiguredWorker', 'ConnectedWorker', 'FakeBuildRequestsComponent', 'FakeBuildersComponent', 'FakeBuildsComponent', 'FakeBuildsetsComponent', 'FakeBuildDataComponent', 'FakeChangeSourcesComponent', 'FakeChangesComponent', 'FakeDBComponent', 'FakeDBConnector', 'FakeLogsComponent', 'FakeMastersComponent', 'FakeSchedulersComponent', 'FakeSourceStampsComponent', 'FakeStateComponent', 'FakeStepsComponent', 'FakeTagsComponent', 'FakeTestResultSetsComponent', 'FakeTestResultsComponent', 'FakeUsersComponent', 'FakeWorkersComponent', 'Log', 'LogChunk', 'Master', 'Object', 'ObjectState', 'Patch', 'Scheduler', 'SchedulerChange', 'SchedulerMaster', 'SourceStamp', 'Step', 'Tag', 'TestCodePath', 'TestName', 'TestResultSet', 'TestResult', 'User', 'UserInfo', 'Worker', ] buildbot-3.4.0/master/buildbot/test/fakedb/base.py000066400000000000000000000040331413250514000220730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.data import resultspec class FakeDBComponent: data2db = {} def __init__(self, db, testcase): self.db = db self.t = testcase self.reactor = testcase.reactor self.setUp() def mapFilter(self, f, fieldMapping): field = fieldMapping[f.field].split(".")[-1] return resultspec.Filter(field, f.op, f.values) def mapOrder(self, o, fieldMapping): if o.startswith('-'): reverse, o = o[0], o[1:] else: reverse = "" o = fieldMapping[o].split(".")[-1] return reverse + o def applyResultSpec(self, data, rs): def applicable(field): if field.startswith('-'): field = field[1:] return field in rs.fieldMapping filters = [self.mapFilter(f, rs.fieldMapping) for f in rs.filters if applicable(f.field)] order = [] offset = limit = None if rs.order: order = [self.mapOrder(o, rs.fieldMapping) for o in rs.order if applicable(o)] if len(filters) == len(rs.filters) and rs.order is not None and len(order) == len(rs.order): offset, limit = rs.offset, rs.limit rs = resultspec.ResultSpec( filters=filters, order=order, limit=limit, offset=offset) return rs.apply(data) buildbot-3.4.0/master/buildbot/test/fakedb/build_data.py000066400000000000000000000100101413250514000232410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row class BuildData(Row): table = 'build_data' id_column = 'id' foreignKeys = ('buildid',) required_columns = ('buildid', 'name', 'value', 'length', 'source') binary_columns = ('value',) def __init__(self, id=None, buildid=None, name=None, value=None, source=None): super().__init__(id=id, buildid=buildid, name=name, value=value, source=source, length=len(value)) class FakeBuildDataComponent(FakeDBComponent): def setUp(self): self.build_data = {} def insertTestData(self, rows): for row in rows: if isinstance(row, BuildData): self.build_data[row.id] = row.values.copy() def _get_build_data_row(self, buildid, name): for row in self.build_data.values(): if row['buildid'] == buildid and row['name'] == name: return row return None def setBuildData(self, buildid, name, value, source): assert isinstance(value, bytes) row = self._get_build_data_row(buildid, name) if row is not None: row['value'] = value row['length'] = len(value) row['source'] = source return id = Row.nextId() self.build_data[id] = { 'id': id, 'buildid': buildid, 'name': name, 'value': value, 'length': len(value), 'source': source } # returns a Deferred def getBuildData(self, buildid, name): row = self._get_build_data_row(buildid, name) if row is not None: return defer.succeed(self._row2dict(row)) return defer.succeed(None) # returns a Deferred def getBuildDataNoValue(self, buildid, name): row = self._get_build_data_row(buildid, name) if row is not None: return defer.succeed(self._row2dict_novalue(row)) return defer.succeed(None) # returns a Deferred def getAllBuildDataNoValues(self, buildid): ret = [] for row in self.build_data.values(): if row['buildid'] != buildid: continue ret.append(self._row2dict_novalue(row)) return defer.succeed(ret) # returns a Deferred def deleteOldBuildData(self, older_than_timestamp): buildids_to_keep = [] for build_dict in self.db.builds.builds.values(): if build_dict['complete_at'] is None or \ build_dict['complete_at'] >= older_than_timestamp: buildids_to_keep.append(build_dict['id']) count_before = len(self.build_data) build_dataids_to_remove = [] for build_datadict in self.build_data.values(): if build_datadict['buildid'] not in buildids_to_keep: build_dataids_to_remove.append(build_datadict['id']) for id in build_dataids_to_remove: self.build_data.pop(id) count_after = len(self.build_data) return defer.succeed(count_before - count_after) def _row2dict(self, row): ret = row.copy() del ret['id'] return ret def _row2dict_novalue(self, row): ret = row.copy() del ret['id'] ret['value'] = None return ret buildbot-3.4.0/master/buildbot/test/fakedb/builders.py000066400000000000000000000123031413250514000227710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row class Builder(Row): table = "builders" id_column = 'id' hashedColumns = [('name_hash', ('name',))] def __init__(self, id=None, name='some:builder', name_hash=None, description=None): super().__init__(id=id, name=name, name_hash=name_hash, description=description) class BuilderMaster(Row): table = "builder_masters" id_column = 'id' required_columns = ('builderid', 'masterid') def __init__(self, id=None, builderid=None, masterid=None): super().__init__(id=id, builderid=builderid, masterid=masterid) class BuildersTags(Row): table = "builders_tags" foreignKeys = ('builderid', 'tagid') required_columns = ('builderid', 'tagid', ) id_column = 'id' def __init__(self, id=None, builderid=None, tagid=None): super().__init__(id=id, builderid=builderid, tagid=tagid) class FakeBuildersComponent(FakeDBComponent): def setUp(self): self.builders = {} self.builder_masters = {} self.builders_tags = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Builder): self.builders[row.id] = dict( id=row.id, name=row.name, description=row.description) if isinstance(row, BuilderMaster): self.builder_masters[row.id] = \ (row.builderid, row.masterid) if isinstance(row, BuildersTags): assert row.builderid in self.builders self.builders_tags.setdefault(row.builderid, []).append(row.tagid) def findBuilderId(self, name, autoCreate=True): for m in self.builders.values(): if m['name'] == name: return defer.succeed(m['id']) if not autoCreate: return defer.succeed(None) id = len(self.builders) + 1 self.builders[id] = dict( id=id, name=name, description=None, tags=[]) return defer.succeed(id) def addBuilderMaster(self, builderid=None, masterid=None): if (builderid, masterid) not in list(self.builder_masters.values()): self.insertTestData([ BuilderMaster(builderid=builderid, masterid=masterid), ]) return defer.succeed(None) def removeBuilderMaster(self, builderid=None, masterid=None): for id, tup in self.builder_masters.items(): if tup == (builderid, masterid): del self.builder_masters[id] break return defer.succeed(None) def getBuilder(self, builderid): if builderid in self.builders: masterids = [bm[1] for bm in self.builder_masters.values() if bm[0] == builderid] bldr = self.builders[builderid].copy() bldr['masterids'] = sorted(masterids) return defer.succeed(self._row2dict(bldr)) return defer.succeed(None) def getBuilders(self, masterid=None): rv = [] for builderid, bldr in self.builders.items(): masterids = [bm[1] for bm in self.builder_masters.values() if bm[0] == builderid] bldr = bldr.copy() bldr['masterids'] = sorted(masterids) rv.append(self._row2dict(bldr)) if masterid is not None: rv = [bd for bd in rv if masterid in bd['masterids']] return defer.succeed(rv) def addTestBuilder(self, builderid, name=None): if name is None: name = "SomeBuilder-%d" % builderid self.db.insertTestData([ Builder(id=builderid, name=name), ]) @defer.inlineCallbacks def updateBuilderInfo(self, builderid, description, tags): if builderid in self.builders: tags = tags if tags else [] self.builders[builderid]['description'] = description # add tags tagids = [] for tag in tags: if not isinstance(tag, type(1)): tag = yield self.db.tags.findTagId(tag) tagids.append(tag) self.builders_tags[builderid] = tagids def _row2dict(self, row): row = row.copy() row['tags'] = [self.db.tags.tags[tagid]['name'] for tagid in self.builders_tags.get(row['id'], [])] return row buildbot-3.4.0/master/buildbot/test/fakedb/buildrequests.py000066400000000000000000000164171413250514000240650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.db import buildrequests from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row from buildbot.util import datetime2epoch class BuildRequest(Row): table = "buildrequests" foreignKeys = ('buildsetid',) id_column = 'id' required_columns = ('buildsetid',) def __init__(self, id=None, buildsetid=None, builderid=None, buildername=None, priority=0, complete=0, results=-1, submitted_at=12345678, complete_at=None, waited_for=0): super().__init__(id=id, buildsetid=buildsetid, builderid=builderid, buildername=buildername, priority=priority, complete=complete, results=results, submitted_at=submitted_at, complete_at=complete_at, waited_for=waited_for) class BuildRequestClaim(Row): table = "buildrequest_claims" foreignKeys = ('brid', 'masterid') required_columns = ('brid', 'masterid', 'claimed_at') def __init__(self, brid=None, masterid=None, claimed_at=None): super().__init__(brid=brid, masterid=masterid, claimed_at=claimed_at) class FakeBuildRequestsComponent(FakeDBComponent): # for use in determining "my" requests MASTER_ID = 824 def setUp(self): self.reqs = {} self.claims = {} def insertTestData(self, rows): for row in rows: if isinstance(row, BuildRequest): self.reqs[row.id] = row if isinstance(row, BuildRequestClaim): self.claims[row.brid] = row # component methods @defer.inlineCallbacks def getBuildRequest(self, brid): row = self.reqs.get(brid) if row: claim_row = self.claims.get(brid, None) if claim_row: row.claimed_at = claim_row.claimed_at row.claimed = True row.masterid = claim_row.masterid row.claimed_by_masterid = claim_row.masterid else: row.claimed_at = None builder = yield self.db.builders.getBuilder(row.builderid) row.buildername = builder["name"] return self._brdictFromRow(row) else: return None @defer.inlineCallbacks def getBuildRequests(self, builderid=None, complete=None, claimed=None, bsid=None, branch=None, repository=None, resultSpec=None): rv = [] for br in self.reqs.values(): if builderid and br.builderid != builderid: continue if complete is not None: if complete and not br.complete: continue if not complete and br.complete: continue claim_row = self.claims.get(br.id) if claim_row: br.claimed_at = claim_row.claimed_at br.claimed = True br.masterid = claim_row.masterid br.claimed_by_masterid = claim_row.masterid else: br.claimed_at = None if claimed is not None: if isinstance(claimed, bool): if claimed: if not claim_row: continue else: if br.complete or claim_row: continue else: if not claim_row or claim_row.masterid != claimed: continue if bsid is not None: if br.buildsetid != bsid: continue if branch or repository: buildset = yield self.db.buildsets.getBuildset(br.buildsetid) sourcestamps = [] for ssid in buildset['sourcestamps']: sourcestamps.append((yield self.db.sourcestamps.getSourceStamp(ssid))) if branch and not any(branch == s['branch'] for s in sourcestamps): continue if repository and not any(repository == s['repository'] for s in sourcestamps): continue builder = yield self.db.builders.getBuilder(br.builderid) br.buildername = builder["name"] rv.append(self._brdictFromRow(br)) if resultSpec is not None: rv = self.applyResultSpec(rv, resultSpec) return rv def claimBuildRequests(self, brids, claimed_at=None): for brid in brids: if brid not in self.reqs or brid in self.claims: raise buildrequests.AlreadyClaimedError if claimed_at is not None: claimed_at = datetime2epoch(claimed_at) else: claimed_at = int(self.reactor.seconds()) # now that we've thrown any necessary exceptions, get started for brid in brids: self.claims[brid] = BuildRequestClaim(brid=brid, masterid=self.MASTER_ID, claimed_at=claimed_at) return defer.succeed(None) def unclaimBuildRequests(self, brids): for brid in brids: if brid in self.claims and self.claims[brid].masterid == self.db.master.masterid: self.claims.pop(brid) def completeBuildRequests(self, brids, results, complete_at=None): if complete_at is not None: complete_at = datetime2epoch(complete_at) else: complete_at = int(self.reactor.seconds()) for brid in brids: if brid not in self.reqs or self.reqs[brid].complete == 1: raise buildrequests.NotClaimedError for brid in brids: self.reqs[brid].complete = 1 self.reqs[brid].results = results self.reqs[brid].complete_at = complete_at return defer.succeed(None) def _brdictFromRow(self, row): return buildrequests.BuildRequestsConnectorComponent._brdictFromRow(row, self.MASTER_ID) # fake methods def fakeClaimBuildRequest(self, brid, claimed_at=None, masterid=None): if masterid is None: masterid = self.MASTER_ID self.claims[brid] = BuildRequestClaim(brid=brid, masterid=masterid, claimed_at=self.reactor.seconds()) def fakeUnclaimBuildRequest(self, brid): del self.claims[brid] # assertions def assertMyClaims(self, claimed_brids): self.t.assertEqual( [id for (id, brc) in self.claims.items() if brc.masterid == self.MASTER_ID], claimed_brids) buildbot-3.4.0/master/buildbot/test/fakedb/builds.py000066400000000000000000000172121413250514000224460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row from buildbot.test.util import validation from buildbot.util import epoch2datetime class Build(Row): table = "builds" id_column = 'id' foreignKeys = ('buildrequestid', 'masterid', 'workerid', 'builderid') required_columns = ('buildrequestid', 'masterid', 'workerid') def __init__(self, id=None, number=29, buildrequestid=None, builderid=None, workerid=-1, masterid=None, started_at=1304262222, complete_at=None, state_string="test", results=None): super().__init__(id=id, number=number, buildrequestid=buildrequestid, builderid=builderid, workerid=workerid, masterid=masterid, started_at=started_at, complete_at=complete_at, state_string=state_string, results=results) class BuildProperty(Row): table = "build_properties" foreignKeys = ('buildid',) required_columns = ('buildid',) def __init__(self, buildid=None, name='prop', value=42, source='fakedb'): super().__init__(buildid=buildid, name=name, value=value, source=source) class FakeBuildsComponent(FakeDBComponent): def setUp(self): self.builds = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Build): build = self.builds[row.id] = row.values.copy() build['properties'] = {} for row in rows: if isinstance(row, BuildProperty): assert row.buildid in self.builds self.builds[row.buildid]['properties'][ row.name] = (row.value, row.source) # component methods def _newId(self): id = 100 while id in self.builds: id += 1 return id def _row2dict(self, row): return dict( id=row['id'], number=row['number'], buildrequestid=row['buildrequestid'], builderid=row['builderid'], masterid=row['masterid'], workerid=row['workerid'], started_at=epoch2datetime(row['started_at']), complete_at=epoch2datetime(row['complete_at']), state_string=row['state_string'], results=row['results']) def getBuild(self, buildid): row = self.builds.get(buildid) if not row: return defer.succeed(None) return defer.succeed(self._row2dict(row)) def getBuildByNumber(self, builderid, number): for row in self.builds.values(): if row['builderid'] == builderid and row['number'] == number: return defer.succeed(self._row2dict(row)) return defer.succeed(None) def getBuilds(self, builderid=None, buildrequestid=None, workerid=None, complete=None, resultSpec=None): ret = [] for (id, row) in self.builds.items(): if builderid is not None and row['builderid'] != builderid: continue if buildrequestid is not None and row['buildrequestid'] != buildrequestid: continue if workerid is not None and row['workerid'] != workerid: continue if complete is not None and complete != (row['complete_at'] is not None): continue ret.append(self._row2dict(row)) if resultSpec is not None: ret = self.applyResultSpec(ret, resultSpec) return defer.succeed(ret) def addBuild(self, builderid, buildrequestid, workerid, masterid, state_string): validation.verifyType(self.t, 'state_string', state_string, validation.StringValidator()) id = self._newId() number = max([0] + [r['number'] for r in self.builds.values() if r['builderid'] == builderid]) + 1 self.builds[id] = dict(id=id, number=number, buildrequestid=buildrequestid, builderid=builderid, workerid=workerid, masterid=masterid, state_string=state_string, started_at=self.reactor.seconds(), complete_at=None, results=None) return defer.succeed((id, number)) def setBuildStateString(self, buildid, state_string): validation.verifyType(self.t, 'state_string', state_string, validation.StringValidator()) b = self.builds.get(buildid) if b: b['state_string'] = state_string return defer.succeed(None) def finishBuild(self, buildid, results): now = self.reactor.seconds() b = self.builds.get(buildid) if b: b['complete_at'] = now b['results'] = results return defer.succeed(None) def getBuildProperties(self, bid, resultSpec=None): if bid in self.builds: ret = [{"name": k, "source": v[1], "value": v[0]} for k, v in self.builds[bid]['properties'].items()] if resultSpec is not None: ret = self.applyResultSpec(ret, resultSpec) ret = {v['name']: (v['value'], v['source']) for v in ret} return defer.succeed(ret) def setBuildProperty(self, bid, name, value, source): assert bid in self.builds self.builds[bid]['properties'][name] = (value, source) return defer.succeed(None) @defer.inlineCallbacks def getBuildsForChange(self, changeid): change = yield self.db.changes.getChange(changeid) bsets = yield self.db.buildsets.getBuildsets() breqs = yield self.db.buildrequests.getBuildRequests() builds = yield self.db.builds.getBuilds() results = [] for bset in bsets: for ssid in bset['sourcestamps']: if change['sourcestampid'] == ssid: bset['changeid'] = changeid results.append({'buildsetid': bset['bsid']}) for breq in breqs: for result in results: if result['buildsetid'] == breq['buildsetid']: result['buildrequestid'] = breq['buildrequestid'] for build in builds: for result in results: if result['buildrequestid'] == build['buildrequestid']: result['id'] = build['id'] result['number'] = build['number'] result['builderid'] = build['builderid'] result['workerid'] = build['workerid'] result['masterid'] = build['masterid'] result['started_at'] = epoch2datetime(1304262222) result['complete_at'] = build['complete_at'] result['state_string'] = build['state_string'] result['results'] = build['results'] for result in results: del result['buildsetid'] return results buildbot-3.4.0/master/buildbot/test/fakedb/buildsets.py000066400000000000000000000224731413250514000231670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from buildbot.db import buildsets from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.buildrequests import BuildRequest from buildbot.test.fakedb.row import Row from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class Buildset(Row): table = "buildsets" id_column = 'id' def __init__(self, id=None, external_idstring='extid', reason='because', submitted_at=12345678, complete=0, complete_at=None, results=-1, parent_buildid=None, parent_relationship=None): super().__init__(id=id, external_idstring=external_idstring, reason=reason, submitted_at=submitted_at, complete=complete, complete_at=complete_at, results=results, parent_buildid=parent_buildid, parent_relationship=parent_relationship) class BuildsetProperty(Row): table = "buildset_properties" foreignKeys = ('buildsetid',) required_columns = ('buildsetid', ) def __init__(self, buildsetid=None, property_name='prop', property_value='[22, "fakedb"]'): super().__init__(buildsetid=buildsetid, property_name=property_name, property_value=property_value) class BuildsetSourceStamp(Row): table = "buildset_sourcestamps" foreignKeys = ('buildsetid', 'sourcestampid') required_columns = ('buildsetid', 'sourcestampid', ) id_column = 'id' def __init__(self, id=None, buildsetid=None, sourcestampid=None): super().__init__(id=id, buildsetid=buildsetid, sourcestampid=sourcestampid) class FakeBuildsetsComponent(FakeDBComponent): def setUp(self): self.buildsets = {} self.completed_bsids = set() self.buildset_sourcestamps = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Buildset): bs = self.buildsets[row.id] = row.values.copy() bs['properties'] = {} for row in rows: if isinstance(row, BuildsetProperty): assert row.buildsetid in self.buildsets n = row.property_name v, src = tuple(json.loads(row.property_value)) self.buildsets[row.buildsetid]['properties'][n] = (v, src) for row in rows: if isinstance(row, BuildsetSourceStamp): assert row.buildsetid in self.buildsets self.buildset_sourcestamps.setdefault(row.buildsetid, []).append(row.sourcestampid) # component methods def _newBsid(self): bsid = 200 while bsid in self.buildsets: bsid += 1 return bsid @defer.inlineCallbacks def addBuildset(self, sourcestamps, reason, properties, builderids, waited_for, external_idstring=None, submitted_at=None, parent_buildid=None, parent_relationship=None): # We've gotten this wrong a couple times. assert isinstance( waited_for, bool), 'waited_for should be boolean: %r' % waited_for # calculate submitted at if submitted_at is not None: submitted_at = datetime2epoch(submitted_at) else: submitted_at = int(self.reactor.seconds()) bsid = self._newBsid() br_rows = [] for builderid in builderids: br_rows.append( BuildRequest(buildsetid=bsid, builderid=builderid, waited_for=waited_for, submitted_at=submitted_at)) self.db.buildrequests.insertTestData(br_rows) # make up a row and keep its dictionary, with the properties tacked on bsrow = Buildset(id=bsid, reason=reason, external_idstring=external_idstring, submitted_at=submitted_at, parent_buildid=parent_buildid, parent_relationship=parent_relationship) self.buildsets[bsid] = bsrow.values.copy() self.buildsets[bsid]['properties'] = properties # add sourcestamps ssids = [] for ss in sourcestamps: if not isinstance(ss, type(1)): ss = yield self.db.sourcestamps.findSourceStampId(**ss) ssids.append(ss) self.buildset_sourcestamps[bsid] = ssids return (bsid, {br.builderid: br.id for br in br_rows}) def completeBuildset(self, bsid, results, complete_at=None): if bsid not in self.buildsets or self.buildsets[bsid]['complete']: raise buildsets.AlreadyCompleteError() if complete_at is not None: complete_at = datetime2epoch(complete_at) else: complete_at = int(self.reactor.seconds()) self.buildsets[bsid]['results'] = results self.buildsets[bsid]['complete'] = 1 self.buildsets[bsid]['complete_at'] = complete_at return defer.succeed(None) def getBuildset(self, bsid): if bsid not in self.buildsets: return defer.succeed(None) row = self.buildsets[bsid] return defer.succeed(self._row2dict(row)) def getBuildsets(self, complete=None, resultSpec=None): rv = [] for bs in self.buildsets.values(): if complete is not None: if complete and bs['complete']: rv.append(bs) elif not complete and not bs['complete']: rv.append(bs) else: rv.append(bs) if resultSpec is not None: rv = self.applyResultSpec(rv, resultSpec) rv = [self._row2dict(bs) for bs in rv] return defer.succeed(rv) @defer.inlineCallbacks def getRecentBuildsets(self, count=None, branch=None, repository=None, complete=None): if not count: return [] rv = [] for bs in (yield self.getBuildsets(complete=complete)): if branch or repository: ok = True if not bs['sourcestamps']: # no sourcestamps -> no match ok = False for ssid in bs['sourcestamps']: ss = yield self.db.sourcestamps.getSourceStamp(ssid) if branch and ss['branch'] != branch: ok = False if repository and ss['repository'] != repository: ok = False else: ok = True if ok: rv.append(bs) rv.sort(key=lambda bs: -bs['bsid']) return list(reversed(rv[:count])) def _row2dict(self, row): row = row.copy() row['complete_at'] = epoch2datetime(row['complete_at']) row['submitted_at'] = epoch2datetime(row['submitted_at']) row['complete'] = bool(row['complete']) row['bsid'] = row['id'] row['sourcestamps'] = self.buildset_sourcestamps.get(row['id'], []) del row['id'] del row['properties'] return row def getBuildsetProperties(self, key, no_cache=False): if key in self.buildsets: return defer.succeed( self.buildsets[key]['properties']) return defer.succeed({}) # fake methods def fakeBuildsetCompletion(self, bsid, result): assert bsid in self.buildsets self.buildsets[bsid]['results'] = result self.completed_bsids.add(bsid) # assertions def assertBuildsetCompletion(self, bsid, complete): """Assert that the completion state of buildset BSID is COMPLETE""" actual = self.buildsets[bsid]['complete'] self.t.assertTrue( (actual and complete) or (not actual and not complete)) def assertBuildset(self, bsid=None, expected_buildset=None): """Assert that the given buildset looks as expected; the ssid parameter of the buildset is omitted. Properties are converted with asList and sorted. Attributes complete, complete_at, submitted_at, results, and parent_* are ignored if not specified.""" self.t.assertIn(bsid, self.buildsets) buildset = self.buildsets[bsid].copy() del buildset['id'] # clear out some columns if the caller doesn't care columns = [ 'complete', 'complete_at', 'submitted_at', 'results', 'parent_buildid', 'parent_relationship' ] for col in columns: if col not in expected_buildset: del buildset[col] if buildset['properties']: buildset['properties'] = sorted(buildset['properties'].items()) self.t.assertEqual(buildset, expected_buildset) return bsid buildbot-3.4.0/master/buildbot/test/fakedb/changes.py000066400000000000000000000220351413250514000225730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import json from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class Change(Row): table = "changes" lists = ('files', 'uids') dicts = ('properties',) id_column = 'changeid' def __init__(self, changeid=None, author='frank', committer='steve', comments='test change', branch='master', revision='abcd', revlink='http://vc/abcd', when_timestamp=1200000, category='cat', repository='repo', codebase='', project='proj', sourcestampid=92, parent_changeids=None): super().__init__(changeid=changeid, author=author, committer=committer, comments=comments, branch=branch, revision=revision, revlink=revlink, when_timestamp=when_timestamp, category=category, repository=repository, codebase=codebase, project=project, sourcestampid=sourcestampid, parent_changeids=parent_changeids) class ChangeFile(Row): table = "change_files" foreignKeys = ('changeid',) required_columns = ('changeid',) def __init__(self, changeid=None, filename=None): super().__init__(changeid=changeid, filename=filename) class ChangeProperty(Row): table = "change_properties" foreignKeys = ('changeid',) required_columns = ('changeid',) def __init__(self, changeid=None, property_name=None, property_value=None): super().__init__(changeid=changeid, property_name=property_name, property_value=property_value) class ChangeUser(Row): table = "change_users" foreignKeys = ('changeid',) required_columns = ('changeid',) def __init__(self, changeid=None, uid=None): super().__init__(changeid=changeid, uid=uid) class FakeChangesComponent(FakeDBComponent): def setUp(self): self.changes = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Change): # copy this since we'll be modifying it (e.g., adding files) ch = self.changes[row.changeid] = copy.deepcopy(row.values) ch['files'] = [] ch['properties'] = {} ch['uids'] = [] elif isinstance(row, ChangeFile): ch = self.changes[row.changeid] ch['files'].append(row.filename) elif isinstance(row, ChangeProperty): ch = self.changes[row.changeid] n, vs = row.property_name, row.property_value v, s = json.loads(vs) ch['properties'][n] = (v, s) elif isinstance(row, ChangeUser): ch = self.changes[row.changeid] ch['uids'].append(row.uid) # component methods @defer.inlineCallbacks def addChange(self, author=None, committer=None, files=None, comments=None, is_dir=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase='', project='', uid=None): if properties is None: properties = {} if self.changes: changeid = max(list(self.changes)) + 1 else: changeid = 500 ssid = yield self.db.sourcestamps.findSourceStampId( revision=revision, branch=branch, repository=repository, codebase=codebase, project=project) parent_changeids = yield self.getParentChangeIds(branch, repository, project, codebase) self.changes[changeid] = ch = dict( changeid=changeid, parent_changeids=parent_changeids, author=author, committer=committer, comments=comments, revision=revision, when_timestamp=datetime2epoch(when_timestamp), branch=branch, category=category, revlink=revlink, repository=repository, project=project, codebase=codebase, uids=[], files=files, properties=properties, sourcestampid=ssid) if uid: ch['uids'].append(uid) return changeid def getLatestChangeid(self): if self.changes: return defer.succeed(max(list(self.changes))) return defer.succeed(None) def getParentChangeIds(self, branch, repository, project, codebase): if self.changes: for changeid, change in self.changes.items(): if (change['branch'] == branch and change['repository'] == repository and change['project'] == project and change['codebase'] == codebase): return defer.succeed([change['changeid']]) return defer.succeed([]) def getChange(self, key, no_cache=False): try: row = self.changes[key] except KeyError: return defer.succeed(None) return defer.succeed(self._chdict(row)) def getChangeUids(self, changeid): try: ch_uids = self.changes[changeid]['uids'] except KeyError: ch_uids = [] return defer.succeed(ch_uids) def getChanges(self, resultSpec=None): if resultSpec is not None and resultSpec.limit is not None: ids = sorted(self.changes.keys()) chdicts = [self._chdict(self.changes[id]) for id in ids[-resultSpec.limit:]] return defer.succeed(chdicts) chdicts = [self._chdict(v) for v in self.changes.values()] return defer.succeed(chdicts) def getChangesCount(self): return defer.succeed(len(self.changes)) def getChangesForBuild(self, buildid): # the algorithm is too complicated to be worth faked, better patch it # ad-hoc raise NotImplementedError( "Please patch in tests to return appropriate results") def getChangeFromSSid(self, ssid): chdicts = [self._chdict(v) for v in self.changes.values() if v['sourcestampid'] == ssid] if chdicts: return defer.succeed(chdicts[0]) return defer.succeed(None) def _chdict(self, row): chdict = row.copy() del chdict['uids'] if chdict['parent_changeids'] is None: chdict['parent_changeids'] = [] chdict['when_timestamp'] = epoch2datetime(chdict['when_timestamp']) return chdict # assertions def assertChange(self, changeid, row): row_only = self.changes[changeid].copy() del row_only['files'] del row_only['properties'] del row_only['uids'] if not row_only['parent_changeids']: # Convert [] to None # None is the value stored in the DB. # We need this kind of conversion, because for the moment we only support # 1 parent for a change. # When we will support multiple parent for change, then we will have a # table parent_changes with at least 2 col: "changeid", "parent_changeid" # And the col 'parent_changeids' of the table changes will be # dropped row_only['parent_changeids'] = None self.t.assertEqual(row_only, row.values) def assertChangeUsers(self, changeid, expectedUids): self.t.assertEqual(self.changes[changeid]['uids'], expectedUids) # fake methods def fakeAddChangeInstance(self, change): if not hasattr(change, 'number') or not change.number: if self.changes: changeid = max(list(self.changes)) + 1 else: changeid = 500 else: changeid = change.number # make a row from the change row = dict( changeid=changeid, author=change.who, files=change.files, comments=change.comments, revision=change.revision, when_timestamp=change.when, branch=change.branch, category=change.category, revlink=change.revlink, properties=change.properties, repository=change.repository, codebase=change.codebase, project=change.project, uids=[]) self.changes[changeid] = row buildbot-3.4.0/master/buildbot/test/fakedb/changesources.py000066400000000000000000000107241413250514000240160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.db import changesources from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row class ChangeSource(Row): table = "changesources" id_column = 'id' hashedColumns = [('name_hash', ('name',))] def __init__(self, id=None, name='csname', name_hash=None): super().__init__(id=id, name=name, name_hash=name_hash) class ChangeSourceMaster(Row): table = "changesource_masters" foreignKeys = ('changesourceid', 'masterid') required_columns = ('changesourceid', 'masterid') def __init__(self, changesourceid=None, masterid=None): super().__init__(changesourceid=changesourceid, masterid=masterid) class FakeChangeSourcesComponent(FakeDBComponent): def setUp(self): self.changesources = {} self.changesource_masters = {} self.states = {} def insertTestData(self, rows): for row in rows: if isinstance(row, ChangeSource): self.changesources[row.id] = row.name if isinstance(row, ChangeSourceMaster): self.changesource_masters[row.changesourceid] = row.masterid # component methods def findChangeSourceId(self, name): for cs_id, cs_name in self.changesources.items(): if cs_name == name: return defer.succeed(cs_id) new_id = (max(self.changesources) + 1) if self.changesources else 1 self.changesources[new_id] = name return defer.succeed(new_id) def getChangeSource(self, changesourceid): if changesourceid in self.changesources: rv = dict( id=changesourceid, name=self.changesources[changesourceid], masterid=None) # only set masterid if the relevant changesource master exists and # is active rv['masterid'] = self.changesource_masters.get(changesourceid) return defer.succeed(rv) return None def getChangeSources(self, active=None, masterid=None): d = defer.DeferredList([ self.getChangeSource(id) for id in self.changesources ]) @d.addCallback def filter(results): # filter off the DeferredList results (we know it's good) results = [r[1] for r in results] # filter for masterid if masterid is not None: results = [r for r in results if r['masterid'] == masterid] # filter for active or inactive if necessary if active: results = [r for r in results if r['masterid'] is not None] elif active is not None: results = [r for r in results if r['masterid'] is None] return results return d def setChangeSourceMaster(self, changesourceid, masterid): current_masterid = self.changesource_masters.get(changesourceid) if current_masterid and masterid is not None and current_masterid != masterid: return defer.fail(changesources.ChangeSourceAlreadyClaimedError()) self.changesource_masters[changesourceid] = masterid return defer.succeed(None) # fake methods def fakeChangeSource(self, name, changesourceid): self.changesources[changesourceid] = name def fakeChangeSourceMaster(self, changesourceid, masterid): if masterid is not None: self.changesource_masters[changesourceid] = masterid else: del self.changesource_masters[changesourceid] # assertions def assertChangeSourceMaster(self, changesourceid, masterid): self.t.assertEqual(self.changesource_masters.get(changesourceid), masterid) buildbot-3.4.0/master/buildbot/test/fakedb/connector.py000066400000000000000000000117211413250514000231550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.fakedb.build_data import FakeBuildDataComponent from buildbot.test.fakedb.builders import FakeBuildersComponent from buildbot.test.fakedb.buildrequests import FakeBuildRequestsComponent from buildbot.test.fakedb.builds import FakeBuildsComponent from buildbot.test.fakedb.buildsets import FakeBuildsetsComponent from buildbot.test.fakedb.changes import FakeChangesComponent from buildbot.test.fakedb.changesources import FakeChangeSourcesComponent from buildbot.test.fakedb.logs import FakeLogsComponent from buildbot.test.fakedb.masters import FakeMastersComponent from buildbot.test.fakedb.row import Row from buildbot.test.fakedb.schedulers import FakeSchedulersComponent from buildbot.test.fakedb.sourcestamps import FakeSourceStampsComponent from buildbot.test.fakedb.state import FakeStateComponent from buildbot.test.fakedb.steps import FakeStepsComponent from buildbot.test.fakedb.tags import FakeTagsComponent from buildbot.test.fakedb.test_result_sets import FakeTestResultSetsComponent from buildbot.test.fakedb.test_results import FakeTestResultsComponent from buildbot.test.fakedb.users import FakeUsersComponent from buildbot.test.fakedb.workers import FakeWorkersComponent from buildbot.util import service class FakeDBConnector(service.AsyncMultiService): """ A stand-in for C{master.db} that operates without an actual database backend. This also implements a test-data interface similar to the L{buildbot.test.util.db.RealDatabaseMixin.insertTestData} method. The child classes implement various useful assertions and faking methods; see their documentation for more. """ def __init__(self, testcase): super().__init__() # reset the id generator, for stable id's Row._next_id = 1000 self.t = testcase self.checkForeignKeys = False self._components = [] self.changes = comp = FakeChangesComponent(self, testcase) self._components.append(comp) self.changesources = comp = FakeChangeSourcesComponent(self, testcase) self._components.append(comp) self.schedulers = comp = FakeSchedulersComponent(self, testcase) self._components.append(comp) self.sourcestamps = comp = FakeSourceStampsComponent(self, testcase) self._components.append(comp) self.buildsets = comp = FakeBuildsetsComponent(self, testcase) self._components.append(comp) self.workers = comp = FakeWorkersComponent(self, testcase) self._components.append(comp) self.state = comp = FakeStateComponent(self, testcase) self._components.append(comp) self.buildrequests = comp = FakeBuildRequestsComponent(self, testcase) self._components.append(comp) self.builds = comp = FakeBuildsComponent(self, testcase) self._components.append(comp) self.build_data = comp = FakeBuildDataComponent(self, testcase) self._components.append(comp) self.steps = comp = FakeStepsComponent(self, testcase) self._components.append(comp) self.logs = comp = FakeLogsComponent(self, testcase) self._components.append(comp) self.users = comp = FakeUsersComponent(self, testcase) self._components.append(comp) self.masters = comp = FakeMastersComponent(self, testcase) self._components.append(comp) self.builders = comp = FakeBuildersComponent(self, testcase) self._components.append(comp) self.tags = comp = FakeTagsComponent(self, testcase) self._components.append(comp) self.test_results = comp = FakeTestResultsComponent(self, testcase) self._components.append(comp) self.test_result_sets = comp = FakeTestResultSetsComponent(self, testcase) self._components.append(comp) def setup(self): self.is_setup = True return defer.succeed(None) def insertTestData(self, rows): """Insert a list of Row instances into the database; this method can be called synchronously or asynchronously (it completes immediately) """ for row in rows: if self.checkForeignKeys: row.checkForeignKeys(self, self.t) for comp in self._components: comp.insertTestData([row]) return defer.succeed(None) buildbot-3.4.0/master/buildbot/test/fakedb/logs.py000066400000000000000000000116631413250514000221340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row from buildbot.test.util import validation class Log(Row): table = "logs" id_column = 'id' required_columns = ('stepid', ) def __init__(self, id=None, name='log29', slug='log29', stepid=None, complete=0, num_lines=0, type='s'): super().__init__(id=id, name=name, slug=slug, stepid=stepid, complete=complete, num_lines=num_lines, type=type) class LogChunk(Row): table = "logchunks" required_columns = ('logid', ) # 'content' column is sa.LargeBinary, it's bytestring. binary_columns = ('content',) def __init__(self, logid=None, first_line=0, last_line=0, content='', compressed=0): super().__init__(logid=logid, first_line=first_line, last_line=last_line, content=content, compressed=compressed) class FakeLogsComponent(FakeDBComponent): def setUp(self): self.logs = {} self.log_lines = {} # { logid : [ lines ] } def insertTestData(self, rows): for row in rows: if isinstance(row, Log): self.logs[row.id] = row.values.copy() for row in rows: if isinstance(row, LogChunk): lines = self.log_lines.setdefault(row.logid, []) # make sure there are enough slots in the list if len(lines) < row.last_line + 1: lines.append([None] * (row.last_line + 1 - len(lines))) row_lines = row.content.decode('utf-8').split('\n') lines[row.first_line:row.last_line + 1] = row_lines # component methods def _newId(self): id = 100 while id in self.logs: id += 1 return id def _row2dict(self, row): return dict( id=row['id'], stepid=row['stepid'], name=row['name'], slug=row['slug'], complete=bool(row['complete']), num_lines=row['num_lines'], type=row['type']) def getLog(self, logid): row = self.logs.get(logid) if not row: return defer.succeed(None) return defer.succeed(self._row2dict(row)) def getLogBySlug(self, stepid, slug): row = None for row in self.logs.values(): if row['slug'] == slug and row['stepid'] == stepid: break else: return defer.succeed(None) return defer.succeed(self._row2dict(row)) def getLogs(self, stepid=None): return defer.succeed([ self._row2dict(row) for row in self.logs.values() if row['stepid'] == stepid]) def getLogLines(self, logid, first_line, last_line): if logid not in self.logs or first_line > last_line: return defer.succeed('') lines = self.log_lines.get(logid, []) rv = lines[first_line:last_line + 1] return defer.succeed('\n'.join(rv) + '\n' if rv else '') def addLog(self, stepid, name, slug, type): id = self._newId() self.logs[id] = dict(id=id, stepid=stepid, name=name, slug=slug, type=type, complete=0, num_lines=0) self.log_lines[id] = [] return defer.succeed(id) def appendLog(self, logid, content): validation.verifyType(self.t, 'logid', logid, validation.IntValidator()) validation.verifyType(self.t, 'content', content, validation.StringValidator()) self.t.assertEqual(content[-1], '\n') content = content[:-1].split('\n') lines = self.log_lines[logid] lines.extend(content) num_lines = self.logs[logid]['num_lines'] = len(lines) return defer.succeed((num_lines - len(content), num_lines - 1)) def finishLog(self, logid): if id in self.logs: self.logs['id'].complete = 1 return defer.succeed(None) def compressLog(self, logid, force=False): return defer.succeed(None) def deleteOldLogChunks(self, older_than_timestamp): # not implemented self._deleted = older_than_timestamp return defer.succeed(1) buildbot-3.4.0/master/buildbot/test/fakedb/masters.py000066400000000000000000000057331413250514000226470ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row from buildbot.util import epoch2datetime class Master(Row): table = "masters" id_column = 'id' hashedColumns = [('name_hash', ('name',))] def __init__(self, id=None, name='some:master', name_hash=None, active=1, last_active=9998999): super().__init__(id=id, name=name, name_hash=name_hash, active=active, last_active=last_active) class FakeMastersComponent(FakeDBComponent): data2db = {"masterid": "id", "link": "id"} def setUp(self): self.masters = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Master): self.masters[row.id] = dict( id=row.id, name=row.name, active=bool(row.active), last_active=epoch2datetime(row.last_active)) def findMasterId(self, name): for m in self.masters.values(): if m['name'] == name: return defer.succeed(m['id']) id = len(self.masters) + 1 self.masters[id] = dict( id=id, name=name, active=False, last_active=epoch2datetime(self.reactor.seconds())) return defer.succeed(id) def setMasterState(self, masterid, active): if masterid in self.masters: was_active = self.masters[masterid]['active'] self.masters[masterid]['active'] = active if active: self.masters[masterid]['last_active'] = \ epoch2datetime(self.reactor.seconds()) return defer.succeed(bool(was_active) != bool(active)) else: return defer.succeed(False) def getMaster(self, masterid): if masterid in self.masters: return defer.succeed(self.masters[masterid]) return defer.succeed(None) def getMasters(self): return defer.succeed(sorted(self.masters.values(), key=lambda x: x['id'])) # test helpers def markMasterInactive(self, masterid): if masterid in self.masters: self.masters[masterid]['active'] = False return defer.succeed(None) buildbot-3.4.0/master/buildbot/test/fakedb/row.py000066400000000000000000000142661413250514000220010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib from twisted.internet import defer from buildbot.util import unicode2bytes class Row: """ Parent class for row classes, which are used to specify test data for database-related tests. @cvar table: the table name @cvar id_column: specify a column that should be assigned an auto-incremented id. Auto-assigned id's begin at 1000, so any explicitly specified ID's should be less than 1000. @cvar required_columns: a tuple of columns that must be given in the constructor @cvar hashedColumns: a tuple of hash column and source columns designating a hash to work around MySQL's inability to do indexing. @ivar values: the values to be inserted into this row """ id_column = () required_columns = () lists = () dicts = () hashedColumns = [] foreignKeys = [] # Columns that content is represented as sa.Binary-like type in DB model. # They value is bytestring (in contrast to text-like columns, which are # unicode). binary_columns = () _next_id = None def __init__(self, **kwargs): if self.__init__.__func__ is Row.__init__: raise Exception('Row.__init__ must be overridden to supply default values for columns') self.values = kwargs.copy() if self.id_column: if self.values[self.id_column] is None: self.values[self.id_column] = self.nextId() for col in self.required_columns: assert col in kwargs, "{} not specified: {}".format(col, kwargs) for col in self.lists: setattr(self, col, []) for col in self.dicts: setattr(self, col, {}) # cast to unicode for k, v in self.values.items(): if isinstance(v, str): self.values[k] = str(v) # Binary columns stores either (compressed) binary data or encoded # with utf-8 unicode string. We assume that Row constructor receives # only unicode strings and encode them to utf-8 here. # At this moment there is only one such column: logchunks.contents, # which stores either utf-8 encoded string, or gzip-compressed # utf-8 encoded string. for col in self.binary_columns: self.values[col] = unicode2bytes(self.values[col]) # calculate any necessary hashes for hash_col, src_cols in self.hashedColumns: self.values[hash_col] = self.hashColumns( *(self.values[c] for c in src_cols)) # make the values appear as attributes self.__dict__.update(self.values) def __eq__(self, other): if self.__class__ != other.__class__: return False return self.values == other.values def __ne__(self, other): if self.__class__ != other.__class__: return True return self.values != other.values def __lt__(self, other): if self.__class__ != other.__class__: raise TypeError("Cannot compare {} and {}".format( self.__class__, other.__class__)) return self.values < other.values def __le__(self, other): if self.__class__ != other.__class__: raise TypeError("Cannot compare {} and {}".format( self.__class__, other.__class__)) return self.values <= other.values def __gt__(self, other): if self.__class__ != other.__class__: raise TypeError("Cannot compare {} and {}".format( self.__class__, other.__class__)) return self.values > other.values def __ge__(self, other): if self.__class__ != other.__class__: raise TypeError("Cannot compare {} and {}".format( self.__class__, other.__class__)) return self.values >= other.values def __repr__(self): return '{}(**{})'.format(self.__class__.__name__, repr(self.values)) @staticmethod def nextId(): id = Row._next_id if Row._next_id is not None else 1 Row._next_id = id + 1 return id def hashColumns(self, *args): # copied from master/buildbot/db/base.py def encode(x): if x is None: return b'\xf5' elif isinstance(x, str): return x.encode('utf-8') return str(x).encode('utf-8') return hashlib.sha1(b'\0'.join(map(encode, args))).hexdigest() @defer.inlineCallbacks def checkForeignKeys(self, db, t): accessors = dict( buildsetid=db.buildsets.getBuildset, workerid=db.workers.getWorker, builderid=db.builders.getBuilder, buildid=db.builds.getBuild, changesourceid=db.changesources.getChangeSource, changeid=db.changes.getChange, buildrequestid=db.buildrequests.getBuildRequest, sourcestampid=db.sourcestamps.getSourceStamp, schedulerid=db.schedulers.getScheduler, brid=db.buildrequests.getBuildRequest, stepid=db.steps.getStep, masterid=db.masters.getMaster) for foreign_key in self.foreignKeys: if foreign_key in accessors: key = getattr(self, foreign_key) if key is not None: val = yield accessors[foreign_key](key) t.assertTrue(val is not None, "foreign key {}:{} does not exit".format(foreign_key, repr(key))) else: raise ValueError( "warning, unsupported foreign key", foreign_key, self.table) buildbot-3.4.0/master/buildbot/test/fakedb/schedulers.py000066400000000000000000000174341413250514000233330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.db import schedulers from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row class Scheduler(Row): table = "schedulers" id_column = 'id' hashedColumns = [('name_hash', ('name',))] def __init__(self, id=None, name='schname', name_hash=None, enabled=1): super().__init__(id=id, name=name, name_hash=name_hash, enabled=enabled) class SchedulerMaster(Row): table = "scheduler_masters" defaults = dict( schedulerid=None, masterid=None, ) foreignKeys = ('schedulerid', 'masterid') required_columns = ('schedulerid', 'masterid') def __init__(self, schedulerid=None, masterid=None): super().__init__(schedulerid=schedulerid, masterid=masterid) class SchedulerChange(Row): table = "scheduler_changes" defaults = dict( schedulerid=None, changeid=None, important=1, ) foreignKeys = ('schedulerid', 'changeid') required_columns = ('schedulerid', 'changeid') def __init__(self, schedulerid=None, changeid=None, important=1): super().__init__(schedulerid=schedulerid, changeid=changeid, important=important) class FakeSchedulersComponent(FakeDBComponent): def setUp(self): self.schedulers = {} self.scheduler_masters = {} self.states = {} self.classifications = {} self.enabled = {} def insertTestData(self, rows): for row in rows: if isinstance(row, SchedulerChange): cls = self.classifications.setdefault(row.schedulerid, {}) cls[row.changeid] = row.important if isinstance(row, Scheduler): self.schedulers[row.id] = row.name self.enabled[row.id] = True if isinstance(row, SchedulerMaster): self.scheduler_masters[row.schedulerid] = row.masterid # component methods def classifyChanges(self, schedulerid, classifications): self.classifications.setdefault( schedulerid, {}).update(classifications) return defer.succeed(None) def flushChangeClassifications(self, schedulerid, less_than=None): if less_than is not None: classifications = self.classifications.setdefault(schedulerid, {}) for changeid in list(classifications): if changeid < less_than: del classifications[changeid] else: self.classifications[schedulerid] = {} return defer.succeed(None) def getChangeClassifications(self, schedulerid, branch=-1, repository=-1, project=-1, codebase=-1): classifications = self.classifications.setdefault(schedulerid, {}) sentinel = dict(branch=object(), repository=object(), project=object(), codebase=object()) if branch != -1: # filter out the classifications for the requested branch classifications = dict( (k, v) for (k, v) in classifications.items() if self.db.changes.changes.get(k, sentinel)['branch'] == branch) if repository != -1: # filter out the classifications for the requested branch classifications = dict( (k, v) for (k, v) in classifications.items() if self.db.changes.changes.get(k, sentinel)['repository'] == repository) if project != -1: # filter out the classifications for the requested branch classifications = dict( (k, v) for (k, v) in classifications.items() if self.db.changes.changes.get(k, sentinel)['project'] == project) if codebase != -1: # filter out the classifications for the requested branch classifications = dict( (k, v) for (k, v) in classifications.items() if self.db.changes.changes.get(k, sentinel)['codebase'] == codebase) return defer.succeed(classifications) def findSchedulerId(self, name): for sch_id, sch_name in self.schedulers.items(): if sch_name == name: return defer.succeed(sch_id) new_id = (max(self.schedulers) + 1) if self.schedulers else 1 self.schedulers[new_id] = name return defer.succeed(new_id) def getScheduler(self, schedulerid): if schedulerid in self.schedulers: rv = dict( id=schedulerid, name=self.schedulers[schedulerid], enabled=self.enabled.get(schedulerid, True), masterid=None) # only set masterid if the relevant scheduler master exists and # is active rv['masterid'] = self.scheduler_masters.get(schedulerid) return defer.succeed(rv) return None def getSchedulers(self, active=None, masterid=None): d = defer.DeferredList([ self.getScheduler(id) for id in self.schedulers ]) @d.addCallback def filter(results): # filter off the DeferredList results (we know it's good) results = [r[1] for r in results] # filter for masterid if masterid is not None: results = [r for r in results if r['masterid'] == masterid] # filter for active or inactive if necessary if active: results = [r for r in results if r['masterid'] is not None] elif active is not None: results = [r for r in results if r['masterid'] is None] return results return d def setSchedulerMaster(self, schedulerid, masterid): current_masterid = self.scheduler_masters.get(schedulerid) if current_masterid and masterid is not None and current_masterid != masterid: return defer.fail(schedulers.SchedulerAlreadyClaimedError()) self.scheduler_masters[schedulerid] = masterid return defer.succeed(None) # fake methods def fakeClassifications(self, schedulerid, classifications): """Set the set of classifications for a scheduler""" self.classifications[schedulerid] = classifications def fakeScheduler(self, name, schedulerid): self.schedulers[schedulerid] = name def fakeSchedulerMaster(self, schedulerid, masterid): if masterid is not None: self.scheduler_masters[schedulerid] = masterid else: del self.scheduler_masters[schedulerid] # assertions def assertClassifications(self, schedulerid, classifications): self.t.assertEqual( self.classifications.get(schedulerid, {}), classifications) def assertSchedulerMaster(self, schedulerid, masterid): self.t.assertEqual(self.scheduler_masters.get(schedulerid), masterid) def enable(self, schedulerid, v): assert schedulerid in self.schedulers self.enabled[schedulerid] = v return defer.succeed((('control', 'schedulers', schedulerid, 'enable'), {'enabled': v})) buildbot-3.4.0/master/buildbot/test/fakedb/sourcestamps.py000066400000000000000000000142531413250514000237160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row from buildbot.util import epoch2datetime class Patch(Row): table = "patches" id_column = 'id' def __init__(self, id=None, patchlevel=0, patch_base64='aGVsbG8sIHdvcmxk', # 'hello, world', patch_author=None, patch_comment=None, subdir=None): super().__init__(id=id, patchlevel=patchlevel, patch_base64=patch_base64, patch_author=patch_author, patch_comment=patch_comment, subdir=subdir) class SourceStamp(Row): table = "sourcestamps" id_column = 'id' hashedColumns = [('ss_hash', ('branch', 'revision', 'repository', 'project', 'codebase', 'patchid',))] def __init__(self, id=None, branch='master', revision='abcd', patchid=None, repository='repo', codebase='', project='proj', created_at=89834834, ss_hash=None): super().__init__(id=id, branch=branch, revision=revision, patchid=patchid, repository=repository, codebase=codebase, project=project, created_at=created_at, ss_hash=ss_hash) class FakeSourceStampsComponent(FakeDBComponent): def setUp(self): self.sourcestamps = {} self.patches = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Patch): self.patches[row.id] = dict( patch_level=row.patchlevel, patch_body=base64.b64decode(row.patch_base64), patch_author=row.patch_author, patch_comment=row.patch_comment, patch_subdir=row.subdir) for row in rows: if isinstance(row, SourceStamp): ss = self.sourcestamps[row.id] = row.values.copy() ss['created_at'] = epoch2datetime(ss['created_at']) del ss['ss_hash'] del ss['id'] # component methods def findSourceStampId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): d = self.findOrCreateId( branch, revision, repository, project, codebase, patch_body, patch_level, patch_author, patch_comment, patch_subdir) d.addCallback(lambda pair: pair[0]) return d def findOrCreateId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): assert codebase is not None, "codebase cannot be None" assert project is not None, "project cannot be None" assert repository is not None, "repository cannot be None" if patch_body: patchid = len(self.patches) + 1 while patchid in self.patches: patchid += 1 self.patches[patchid] = dict( patch_level=patch_level, patch_body=patch_body, patch_subdir=patch_subdir, patch_author=patch_author, patch_comment=patch_comment ) else: patchid = None new_ssdict = dict(branch=branch, revision=revision, codebase=codebase, patchid=patchid, repository=repository, project=project, created_at=epoch2datetime(self.reactor.seconds())) for id, ssdict in self.sourcestamps.items(): keys = ['branch', 'revision', 'repository', 'codebase', 'project', 'patchid'] if [ssdict[k] for k in keys] == [new_ssdict[k] for k in keys]: return defer.succeed((id, True)) id = len(self.sourcestamps) + 100 while id in self.sourcestamps: id += 1 self.sourcestamps[id] = new_ssdict return defer.succeed((id, False)) def getSourceStamp(self, key, no_cache=False): return defer.succeed(self._getSourceStamp_sync(key)) def getSourceStamps(self): return defer.succeed([ self._getSourceStamp_sync(ssid) for ssid in self.sourcestamps ]) def _getSourceStamp_sync(self, ssid): if ssid in self.sourcestamps: ssdict = self.sourcestamps[ssid].copy() ssdict['ssid'] = ssid patchid = ssdict['patchid'] if patchid: ssdict.update(self.patches[patchid]) ssdict['patchid'] = patchid else: ssdict['patch_body'] = None ssdict['patch_level'] = None ssdict['patch_subdir'] = None ssdict['patch_author'] = None ssdict['patch_comment'] = None return ssdict else: return None @defer.inlineCallbacks def getSourceStampsForBuild(self, buildid): build = yield self.db.builds.getBuild(buildid) breq = yield self.db.buildrequests.getBuildRequest(build['buildrequestid']) bset = yield self.db.buildsets.getBuildset(breq['buildsetid']) results = [] for ssid in bset['sourcestamps']: results.append((yield self.getSourceStamp(ssid))) return results buildbot-3.4.0/master/buildbot/test/fakedb/state.py000066400000000000000000000101271413250514000223020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row from buildbot.util import bytes2unicode class Object(Row): table = "objects" id_column = 'id' def __init__(self, id=None, name='nam', class_name='cls'): super().__init__(id=id, name=name, class_name=class_name) class ObjectState(Row): table = "object_state" required_columns = ('objectid', ) def __init__(self, objectid=None, name='nam', value_json='{}'): super().__init__(objectid=objectid, name=name, value_json=value_json) class FakeStateComponent(FakeDBComponent): def setUp(self): self.objects = {} self.states = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Object): self.objects[(row.name, row.class_name)] = row.id self.states[row.id] = {} for row in rows: if isinstance(row, ObjectState): assert row.objectid in list(self.objects.values()) self.states[row.objectid][row.name] = row.value_json # component methods def _newId(self): id = 100 while id in self.states: id += 1 return id def getObjectId(self, name, class_name): try: id = self.objects[(name, class_name)] except KeyError: # invent a new id and add it id = self.objects[(name, class_name)] = self._newId() self.states[id] = {} return defer.succeed(id) def getState(self, objectid, name, default=object): try: json_value = self.states[objectid][name] except KeyError: if default is not object: return defer.succeed(default) raise return defer.succeed(json.loads(json_value)) def setState(self, objectid, name, value): self.states[objectid][name] = json.dumps(value) return defer.succeed(None) def atomicCreateState(self, objectid, name, thd_create_callback): value = thd_create_callback() self.states[objectid][name] = json.dumps(bytes2unicode(value)) return defer.succeed(value) # fake methods def set_fake_state(self, object, **kwargs): state_key = (object.name, object.__class__.__name__) if state_key in self.objects: id = self.objects[state_key] else: id = self.objects[state_key] = self._newId() self.states[id] = dict((k, json.dumps(v)) for k, v in kwargs.items()) return id # assertions def assertState(self, objectid, missing_keys=None, **kwargs): if missing_keys is None: missing_keys = [] state = self.states[objectid] for k in missing_keys: self.t.assertFalse(k in state, "{} in {}".format(k, state)) for k, v in kwargs.items(): self.t.assertIn(k, state) self.t.assertEqual(json.loads(state[k]), v, "state is %r" % (state,)) def assertStateByClass(self, name, class_name, **kwargs): objectid = self.objects[(name, class_name)] state = self.states[objectid] for k, v in kwargs.items(): self.t.assertIn(k, state) self.t.assertEqual(json.loads(state[k]), v, "state is %r" % (state,)) buildbot-3.4.0/master/buildbot/test/fakedb/steps.py000066400000000000000000000137751413250514000223340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row from buildbot.test.util import validation from buildbot.util import epoch2datetime class Step(Row): table = "steps" id_column = 'id' foreignKeys = ('buildid',) required_columns = ('buildid', ) def __init__(self, id=None, number=29, name='step29', buildid=None, started_at=1304262222, complete_at=None, state_string='', results=None, urls_json='[]', hidden=0): super().__init__(id=id, number=number, name=name, buildid=buildid, started_at=started_at, complete_at=complete_at, state_string=state_string, results=results, urls_json=urls_json, hidden=hidden) class FakeStepsComponent(FakeDBComponent): def setUp(self): self.steps = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Step): self.steps[row.id] = row.values.copy() # component methods def _newId(self): id = 100 while id in self.steps: id += 1 return id def _row2dict(self, row): return dict( id=row['id'], buildid=row['buildid'], number=row['number'], name=row['name'], started_at=epoch2datetime(row['started_at']), complete_at=epoch2datetime(row['complete_at']), state_string=row['state_string'], results=row['results'], urls=json.loads(row['urls_json']), hidden=bool(row['hidden'])) def getStep(self, stepid=None, buildid=None, number=None, name=None): if stepid is not None: row = self.steps.get(stepid) if not row: return defer.succeed(None) return defer.succeed(self._row2dict(row)) else: if number is None and name is None: return defer.fail(RuntimeError("specify both name and number")) for row in self.steps.values(): if row['buildid'] != buildid: continue if number is not None and row['number'] != number: continue if name is not None and row['name'] != name: continue return defer.succeed(self._row2dict(row)) return defer.succeed(None) def getSteps(self, buildid): ret = [] for row in self.steps.values(): if row['buildid'] != buildid: continue ret.append(self._row2dict(row)) ret.sort(key=lambda r: r['number']) return defer.succeed(ret) def addStep(self, buildid, name, state_string): validation.verifyType(self.t, 'state_string', state_string, validation.StringValidator()) validation.verifyType(self.t, 'name', name, validation.IdentifierValidator(50)) # get a unique name and number build_steps = [r for r in self.steps.values() if r['buildid'] == buildid] if build_steps: number = max([r['number'] for r in build_steps]) + 1 names = {r['name'] for r in build_steps} if name in names: i = 1 while '{}_{}'.format(name, i) in names: i += 1 name = '{}_{}'.format(name, i) else: number = 0 id = self._newId() self.steps[id] = { 'id': id, 'buildid': buildid, 'number': number, 'name': name, 'started_at': None, 'complete_at': None, 'results': None, 'state_string': state_string, 'urls_json': '[]', 'hidden': False} return defer.succeed((id, number, name)) def startStep(self, stepid): b = self.steps.get(stepid) if b: b['started_at'] = self.reactor.seconds() return defer.succeed(None) def setStepStateString(self, stepid, state_string): validation.verifyType(self.t, 'state_string', state_string, validation.StringValidator()) b = self.steps.get(stepid) if b: b['state_string'] = state_string return defer.succeed(None) def addURL(self, stepid, name, url, _racehook=None): validation.verifyType(self.t, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.t, 'name', name, validation.IdentifierValidator(50)) validation.verifyType(self.t, 'url', url, validation.StringValidator()) b = self.steps.get(stepid) if b: urls = json.loads(b['urls_json']) url_item = dict(name=name, url=url) if url_item not in urls: urls.append(url_item) b['urls_json'] = json.dumps(urls) return defer.succeed(None) def finishStep(self, stepid, results, hidden): now = self.reactor.seconds() b = self.steps.get(stepid) if b: b['complete_at'] = now b['results'] = results b['hidden'] = bool(hidden) return defer.succeed(None) buildbot-3.4.0/master/buildbot/test/fakedb/tags.py000066400000000000000000000031701413250514000221200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row class Tag(Row): table = "tags" id_column = 'id' hashedColumns = [('name_hash', ('name',))] def __init__(self, id=None, name='some:tag', name_hash=None): super().__init__(id=id, name=name, name_hash=name_hash) class FakeTagsComponent(FakeDBComponent): def setUp(self): self.tags = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Tag): self.tags[row.id] = dict( id=row.id, name=row.name) def findTagId(self, name): for m in self.tags.values(): if m['name'] == name: return defer.succeed(m['id']) id = len(self.tags) + 1 self.tags[id] = dict( id=id, name=name) return defer.succeed(id) buildbot-3.4.0/master/buildbot/test/fakedb/test_result_sets.py000066400000000000000000000104731413250514000246010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.db.test_result_sets import TestResultSetAlreadyCompleted from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row class TestResultSet(Row): table = 'test_result_sets' id_column = 'id' foreignKeys = ('builderid', 'buildid', 'stepid') required_columns = ('builderid', 'buildid', 'stepid', 'category', 'value_unit', 'complete') def __init__(self, id=None, builderid=None, buildid=None, stepid=None, description=None, category=None, value_unit=None, tests_passed=None, tests_failed=None, complete=None): super().__init__(id=id, builderid=builderid, buildid=buildid, stepid=stepid, description=description, category=category, value_unit=value_unit, tests_passed=tests_passed, tests_failed=tests_failed, complete=complete) class FakeTestResultSetsComponent(FakeDBComponent): def setUp(self): self.result_sets = {} def insertTestData(self, rows): for row in rows: if isinstance(row, TestResultSet): self.result_sets[row.id] = row.values.copy() def addTestResultSet(self, builderid, buildid, stepid, description, category, value_unit): id = Row.nextId() self.result_sets[id] = { 'id': id, 'builderid': builderid, 'buildid': buildid, 'stepid': stepid, 'description': description, 'category': category, 'value_unit': value_unit, 'tests_failed': None, 'tests_passed': None, 'complete': False } return defer.succeed(id) def _row2dict(self, row): row = row.copy() row['complete'] = bool(row['complete']) return row # returns a Deferred def getTestResultSet(self, test_result_setid): if test_result_setid not in self.result_sets: return defer.succeed(None) return defer.succeed(self._row2dict(self.result_sets[test_result_setid])) # returns a Deferred def getTestResultSets(self, builderid, buildid=None, stepid=None, complete=None, result_spec=None): ret = [] for id, row in self.result_sets.items(): if row['builderid'] != builderid: continue if buildid is not None and row['buildid'] != buildid: continue if stepid is not None and row['stepid'] != stepid: continue if complete is not None and row['complete'] != complete: continue ret.append(self._row2dict(row)) if result_spec is not None: ret = self.applyResultSpec(ret, result_spec) return defer.succeed(ret) # returns a Deferred def completeTestResultSet(self, test_result_setid, tests_passed=None, tests_failed=None): if test_result_setid not in self.result_sets: raise TestResultSetAlreadyCompleted(('Test result set {} is already completed ' 'or does not exist').format(test_result_setid)) row = self.result_sets[test_result_setid] if row['complete'] != 0: raise TestResultSetAlreadyCompleted(('Test result set {} is already completed ' 'or does not exist').format(test_result_setid)) row['complete'] = 1 if tests_passed is not None: row['tests_passed'] = tests_passed if tests_failed is not None: row['tests_failed'] = tests_failed return defer.succeed(None) buildbot-3.4.0/master/buildbot/test/fakedb/test_results.py000066400000000000000000000204131413250514000237210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row class TestName(Row): table = 'test_names' id_column = 'id' foreignKeys = ('builderid',) required_columns = ('builderid', 'name') def __init__(self, id=None, builderid=None, name='nam'): super().__init__(id=id, builderid=builderid, name=name) class TestCodePath(Row): table = 'test_code_paths' id_column = 'id' foreignKeys = ('builderid',) required_columns = ('builderid', 'path') def __init__(self, id=None, builderid=None, path='path/to/file'): super().__init__(id=id, builderid=builderid, path=path) class TestResult(Row): table = 'test_results' id_column = 'id' foreignKeys = ('builderid', 'test_result_setid', 'test_nameid', 'test_code_pathid') required_columns = ('builderid', 'test_result_setid', 'value') def __init__(self, id=None, builderid=None, test_result_setid=None, test_nameid=None, test_code_pathid=None, line=None, duration_ns=None, value=None): super().__init__(id=id, builderid=builderid, test_result_setid=test_result_setid, test_nameid=test_nameid, test_code_pathid=test_code_pathid, line=line, duration_ns=duration_ns, value=value) class FakeTestResultsComponent(FakeDBComponent): def setUp(self): self.results = {} self.code_paths = {} self.names = {} def insertTestData(self, rows): for row in rows: if isinstance(row, TestName): self.names[row.id] = row.values.copy() for row in rows: if isinstance(row, TestCodePath): self.code_paths[row.id] = row.values.copy() for row in rows: if isinstance(row, TestResult): if row.test_nameid is not None: assert row.test_nameid in self.names if row.test_code_pathid is not None: assert row.test_code_pathid in self.code_paths self.results[row.id] = row.values.copy() def _add_code_paths(self, builderid, paths): path_to_id = {} for path in sorted(paths): id = self._get_code_path_id(builderid, path) if id is not None: path_to_id[path] = id continue id = Row.nextId() self.code_paths[id] = { 'builderid': builderid, 'path': path } path_to_id[path] = id return path_to_id def _get_code_path_id(self, builderid, path): for id, path_dict in self.code_paths.items(): if path_dict['builderid'] == builderid and path_dict['path'] == path: return id return None def _add_names(self, builderid, names): name_to_id = {} for name in sorted(names): id = self._get_name_id(builderid, name) if id is not None: name_to_id[name] = id continue id = Row.nextId() self.names[id] = { 'builderid': builderid, 'name': name } name_to_id[name] = id return name_to_id def _get_name_id(self, builderid, name): for id, name_dict in self.names.items(): if name_dict['builderid'] == builderid and name_dict['name'] == name: return id return None @defer.inlineCallbacks def addTestResults(self, builderid, test_result_setid, result_values): insert_code_paths = set() insert_names = set() for result_value in result_values: if 'value' not in result_value: raise KeyError('Each of result_values must contain \'value\' key') if 'test_name' not in result_value and 'test_code_path' not in result_value: raise KeyError('Each of result_values must contain at least one of ' '\'test_name\' or \'test_code_path\' keys') if 'test_name' in result_value: insert_names.add(result_value['test_name']) if 'test_code_path' in result_value: insert_code_paths.add(result_value['test_code_path']) code_path_to_id = yield self._add_code_paths(builderid, insert_code_paths) name_to_id = yield self._add_names(builderid, insert_names) for result_value in result_values: insert_value = { 'value': result_value['value'], 'builderid': builderid, 'test_result_setid': test_result_setid, 'test_nameid': None, 'test_code_pathid': None, 'duration_ns': None, 'line': None, } if 'test_name' in result_value: insert_value['test_nameid'] = name_to_id[result_value['test_name']] if 'test_code_path' in result_value: insert_value['test_code_pathid'] = code_path_to_id[result_value['test_code_path']] if 'line' in result_value: insert_value['line'] = result_value['line'] if 'duration_ns' in result_value: insert_value['duration_ns'] = result_value['duration_ns'] self.results[Row.nextId()] = insert_value # returns a Deferred def getTestNames(self, builderid, name_prefix=None, result_spec=None): ret = [] for id, row in sorted(self.names.items()): if row['builderid'] != builderid: continue if name_prefix is not None and not row['name'].startswith(name_prefix): continue ret.append(row['name']) if result_spec is not None: ret = self.applyResultSpec(ret, result_spec) return defer.succeed(ret) # returns a Deferred def getTestCodePaths(self, builderid, path_prefix=None, result_spec=None): ret = [] for id, row in sorted(self.code_paths.items()): if row['builderid'] != builderid: continue if path_prefix is not None and not row['path'].startswith(path_prefix): continue ret.append(row['path']) if result_spec is not None: ret = self.applyResultSpec(ret, result_spec) return defer.succeed(ret) def _fill_extra_data(self, id, row): row = row.copy() row['id'] = id if row['test_nameid'] is not None: row['test_name'] = self.names[row['test_nameid']]['name'] else: row['test_name'] = None del row['test_nameid'] if row['test_code_pathid'] is not None: row['test_code_path'] = self.code_paths[row['test_code_pathid']]['path'] else: row['test_code_path'] = None del row['test_code_pathid'] return row # returns a Deferred def getTestResult(self, test_resultid): if test_resultid not in self.results: return defer.succeed(None) return defer.succeed(self._fill_extra_data(test_resultid, self.results[test_resultid])) # returns a Deferred def getTestResults(self, builderid, test_result_setid, result_spec=None): ret = [] for id, row in sorted(self.results.items()): if row['builderid'] != builderid: continue if row['test_result_setid'] != test_result_setid: continue ret.append(self._fill_extra_data(id, row)) if result_spec is not None: ret = self.applyResultSpec(ret, result_spec) return defer.succeed(ret) buildbot-3.4.0/master/buildbot/test/fakedb/users.py000066400000000000000000000123241413250514000223240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row class User(Row): table = "users" id_column = 'uid' def __init__(self, uid=None, identifier='soap', bb_username=None, bb_password=None): super().__init__(uid=uid, identifier=identifier, bb_username=bb_username, bb_password=bb_password) class UserInfo(Row): table = "users_info" foreignKeys = ('uid',) required_columns = ('uid', ) def __init__(self, uid=None, attr_type='git', attr_data='Tyler Durden '): super().__init__(uid=uid, attr_type=attr_type, attr_data=attr_data) class FakeUsersComponent(FakeDBComponent): def setUp(self): self.users = {} self.users_info = {} self.id_num = 0 def insertTestData(self, rows): for row in rows: if isinstance(row, User): self.users[row.uid] = dict(identifier=row.identifier, bb_username=row.bb_username, bb_password=row.bb_password) if isinstance(row, UserInfo): assert row.uid in self.users if row.uid not in self.users_info: self.users_info[row.uid] = [dict(attr_type=row.attr_type, attr_data=row.attr_data)] else: self.users_info[row.uid].append( dict(attr_type=row.attr_type, attr_data=row.attr_data)) def _user2dict(self, uid): usdict = None if uid in self.users: usdict = self.users[uid] if uid in self.users_info: infos = self.users_info[uid] for attr in infos: usdict[attr['attr_type']] = attr['attr_data'] usdict['uid'] = uid return usdict def nextId(self): self.id_num += 1 return self.id_num # component methods def findUserByAttr(self, identifier, attr_type, attr_data): for uid in self.users_info: attrs = self.users_info[uid] for attr in attrs: if (attr_type == attr['attr_type'] and attr_data == attr['attr_data']): return defer.succeed(uid) uid = self.nextId() self.db.insertTestData([User(uid=uid, identifier=identifier)]) self.db.insertTestData([UserInfo(uid=uid, attr_type=attr_type, attr_data=attr_data)]) return defer.succeed(uid) def getUser(self, uid): usdict = None if uid in self.users: usdict = self._user2dict(uid) return defer.succeed(usdict) def getUserByUsername(self, username): usdict = None for uid in self.users: user = self.users[uid] if user['bb_username'] == username: usdict = self._user2dict(uid) return defer.succeed(usdict) def updateUser(self, uid=None, identifier=None, bb_username=None, bb_password=None, attr_type=None, attr_data=None): assert uid is not None if identifier is not None: self.users[uid]['identifier'] = identifier if bb_username is not None: assert bb_password is not None try: user = self.users[uid] user['bb_username'] = bb_username user['bb_password'] = bb_password except KeyError: pass if attr_type is not None: assert attr_data is not None try: infos = self.users_info[uid] for attr in infos: if attr_type == attr['attr_type']: attr['attr_data'] = attr_data break else: infos.append(dict(attr_type=attr_type, attr_data=attr_data)) except KeyError: pass return defer.succeed(None) def removeUser(self, uid): if uid in self.users: self.users.pop(uid) self.users_info.pop(uid) return defer.succeed(None) def identifierToUid(self, identifier): for uid in self.users: if identifier == self.users[uid]['identifier']: return defer.succeed(uid) return defer.succeed(None) buildbot-3.4.0/master/buildbot/test/fakedb/workers.py000066400000000000000000000216671413250514000226710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from buildbot.test.fakedb.base import FakeDBComponent from buildbot.test.fakedb.row import Row from buildbot.test.util import validation class Worker(Row): table = "workers" id_column = 'id' required_columns = ('name', ) def __init__(self, id=None, name='some:worker', info={"a": "b"}, paused=0, graceful=0): super().__init__(id=id, name=name, info=info, paused=paused, graceful=graceful) class ConnectedWorker(Row): table = "connected_workers" id_column = 'id' required_columns = ('masterid', 'workerid') def __init__(self, id=None, masterid=None, workerid=None): super().__init__(id=id, masterid=masterid, workerid=workerid) class ConfiguredWorker(Row): table = "configured_workers" id_column = 'id' required_columns = ('buildermasterid', 'workerid') def __init__(self, id=None, buildermasterid=None, workerid=None): super().__init__(id=id, buildermasterid=buildermasterid, workerid=workerid) class FakeWorkersComponent(FakeDBComponent): def setUp(self): self.workers = {} self.configured = {} self.connected = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Worker): self.workers[row.id] = dict( id=row.id, name=row.name, paused=row.paused, graceful=row.graceful, info=row.info) elif isinstance(row, ConfiguredWorker): row.id = row.buildermasterid * 10000 + row.workerid self.configured[row.id] = dict( buildermasterid=row.buildermasterid, workerid=row.workerid) elif isinstance(row, ConnectedWorker): self.connected[row.id] = dict( masterid=row.masterid, workerid=row.workerid) def findWorkerId(self, name): validation.verifyType(self.t, 'name', name, validation.IdentifierValidator(50)) for m in self.workers.values(): if m['name'] == name: return defer.succeed(m['id']) id = len(self.workers) + 1 self.workers[id] = dict( id=id, name=name, info={}) return defer.succeed(id) def _getWorkerByName(self, name): for worker in self.workers.values(): if worker['name'] == name: return worker return None def getWorker(self, workerid=None, name=None, masterid=None, builderid=None): # get the id and the worker if workerid is None: for worker in self.workers.values(): if worker['name'] == name: workerid = worker['id'] break else: worker = None else: worker = self.workers.get(workerid) if not worker: return defer.succeed(None) # now get the connection status per builder_master, filtered # by builderid and masterid return defer.succeed(self._mkdict(worker, builderid, masterid)) def getWorkers(self, masterid=None, builderid=None, paused=None, graceful=None): if masterid is not None or builderid is not None: builder_masters = self.db.builders.builder_masters workers = [] for worker in self.workers.values(): configured = [cfg for cfg in self.configured.values() if cfg['workerid'] == worker['id']] pairs = [builder_masters[cfg['buildermasterid']] for cfg in configured] if builderid is not None and masterid is not None: if (builderid, masterid) not in pairs: continue if builderid is not None: if not any(builderid == p[0] for p in pairs): continue if masterid is not None: if not any((masterid == p[1]) for p in pairs): continue workers.append(worker) else: workers = list(self.workers.values()) if paused is not None: workers = [w for w in workers if w['paused'] == paused] if graceful is not None: workers = [w for w in workers if w['graceful'] == graceful] return defer.succeed([ self._mkdict(worker, builderid, masterid) for worker in workers]) def workerConnected(self, workerid, masterid, workerinfo): worker = self.workers.get(workerid) # test serialization json.dumps(workerinfo) if worker is not None: worker['info'] = workerinfo new_conn = dict(masterid=masterid, workerid=workerid) if new_conn not in self.connected.values(): conn_id = max([0] + list(self.connected)) + 1 self.connected[conn_id] = new_conn return defer.succeed(None) def deconfigureAllWorkersForMaster(self, masterid): buildermasterids = [_id for _id, (builderid, mid) in self.db.builders.builder_masters.items() if mid == masterid] for k, v in list(self.configured.items()): if v['buildermasterid'] in buildermasterids: del self.configured[k] def workerConfigured(self, workerid, masterid, builderids): buildermasterids = [_id for _id, (builderid, mid) in self.db.builders.builder_masters.items() if mid == masterid and builderid in builderids] if len(buildermasterids) != len(builderids): raise ValueError(("Some builders are not configured for this master: " "builders: {}, master: {} buildermaster:{}" ).format(builderids, masterid, self.db.builders.builder_masters)) allbuildermasterids = [_id for _id, (builderid, mid) in self.db.builders.builder_masters.items() if mid == masterid] for k, v in list(self.configured.items()): if v['buildermasterid'] in allbuildermasterids and v['workerid'] == workerid: del self.configured[k] self.insertTestData([ConfiguredWorker(workerid=workerid, buildermasterid=buildermasterid) for buildermasterid in buildermasterids]) return defer.succeed(None) def workerDisconnected(self, workerid, masterid): del_conn = dict(masterid=masterid, workerid=workerid) for id, conn in self.connected.items(): if conn == del_conn: del self.connected[id] break return defer.succeed(None) def setWorkerState(self, workerid, paused, graceful): worker = self.workers.get(workerid) if worker is not None: worker['paused'] = int(paused) worker['graceful'] = int(graceful) def _configuredOn(self, workerid, builderid=None, masterid=None): cfg = [] for cs in self.configured.values(): if cs['workerid'] != workerid: continue bid, mid = self.db.builders.builder_masters[cs['buildermasterid']] if builderid is not None and bid != builderid: continue if masterid is not None and mid != masterid: continue cfg.append({'builderid': bid, 'masterid': mid}) return cfg def _connectedTo(self, workerid, masterid=None): conns = [] for cs in self.connected.values(): if cs['workerid'] != workerid: continue if masterid is not None and cs['masterid'] != masterid: continue conns.append(cs['masterid']) return conns def _mkdict(self, w, builderid, masterid): return { 'id': w['id'], 'workerinfo': w['info'], 'name': w['name'], 'paused': bool(w.get('paused')), 'graceful': bool(w.get('graceful')), 'configured_on': self._configuredOn(w['id'], builderid, masterid), 'connected_to': self._connectedTo(w['id'], masterid), } buildbot-3.4.0/master/buildbot/test/fuzz/000077500000000000000000000000001413250514000203715ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/fuzz/__init__.py000066400000000000000000000000001413250514000224700ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/fuzz/test_lru.py000066400000000000000000000065171413250514000226150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import random from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.test.util import fuzz from buildbot.util import lru # construct weakref-able objects for particular keys def short(k): return set([k.upper() * 3]) def long(k): return set([k.upper() * 6]) def deferUntilLater(secs, result=None): d = defer.Deferred() reactor.callLater(secs, d.callback, result) return d class LRUCacheFuzzer(fuzz.FuzzTestCase): FUZZ_TIME = 60 def setUp(self): lru.inv_failed = False def tearDown(self): self.assertFalse(lru.inv_failed, "invariant failed; see logs") if hasattr(self, 'lru'): log.msg("hits: %d; misses: %d; refhits: %d" % (self.lru.hits, self.lru.misses, self.lru.refhits)) # tests @defer.inlineCallbacks def do_fuzz(self, endTime): lru.inv_failed = False def delayed_miss_fn(key): return deferUntilLater(random.uniform(0.001, 0.002), set([key + 1000])) self.lru = lru.AsyncLRUCache(delayed_miss_fn, 50) keys = list(range(250)) errors = [] # bail out early in the event of an error results = [] # keep references to (most) results # fire off as many requests as we can in one second, with lots of # overlap. while not errors and reactor.seconds() < endTime: key = random.choice(keys) d = self.lru.get(key) def check(result, key): self.assertEqual(result, set([key + 1000])) if random.uniform(0, 1.0) < 0.9: results.append(result) results[:-100] = [] d.addCallback(check, key) @d.addErrback def eb(f): errors.append(f) return f # unhandled error -> in the logs # give the reactor some time to process pending events if random.uniform(0, 1.0) < 0.5: yield deferUntilLater(0) # now wait until all of the pending calls have cleared, noting that # this method will be counted as one delayed call, in the current # implementation while len(reactor.getDelayedCalls()) > 1: # give the reactor some time to process pending events yield deferUntilLater(0.001) self.assertFalse(lru.inv_failed, "invariant failed; see logs") log.msg("hits: %d; misses: %d; refhits: %d" % (self.lru.hits, self.lru.misses, self.lru.refhits)) buildbot-3.4.0/master/buildbot/test/integration/000077500000000000000000000000001413250514000217165ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/integration/README.txt000066400000000000000000000003331413250514000234130ustar00rootroot00000000000000"Integration" tests are tests that exercise a significant chunk of the Buildbot code, and thus do not really count as unit tests. When debugging, get the unit tests working first, *then* work on the integration tests. buildbot-3.4.0/master/buildbot/test/integration/__init__.py000066400000000000000000000000001413250514000240150ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/integration/interop/000077500000000000000000000000001413250514000233765ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/integration/interop/__init__.py000066400000000000000000000015071413250514000255120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """Tests in this module are meant to be used for interoperability between different version of worker vs master (e.g py2 vs py3) """ buildbot-3.4.0/master/buildbot/test/integration/interop/test_commandmixin.py000066400000000000000000000062051413250514000274750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process import results from buildbot.process.buildstep import BuildStep from buildbot.process.buildstep import CommandMixin from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # and makes sure the command mixin is working. class CommandMixinMaster(RunMasterBase): @defer.inlineCallbacks def test_commandmixin(self): yield self.setupConfig(masterConfig()) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['results'], results.SUCCESS) class CommandMixinMasterPB(CommandMixinMaster): proto = "pb" class TestCommandMixinStep(BuildStep, CommandMixin): @defer.inlineCallbacks def run(self): contents = yield self.runGlob('*') if contents != []: return results.FAILURE hasPath = yield self.pathExists('composite_mixin_test') if hasPath: return results.FAILURE yield self.runMkdir('composite_mixin_test') hasPath = yield self.pathExists('composite_mixin_test') if not hasPath: return results.FAILURE contents = yield self.runGlob('*') if not contents[0].endswith('composite_mixin_test'): return results.FAILURE yield self.runRmdir('composite_mixin_test') hasPath = yield self.pathExists('composite_mixin_test') if hasPath: return results.FAILURE return results.SUCCESS # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(TestCommandMixinStep()) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/interop/test_compositestepmixin.py000066400000000000000000000064371413250514000307640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process import results from buildbot.process.buildstep import BuildStep from buildbot.steps.worker import CompositeStepMixin from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # and makes sure the composite step mixin is working. class CompositeStepMixinMaster(RunMasterBase): @defer.inlineCallbacks def test_compositemixin(self): yield self.setupConfig(masterConfig()) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['results'], results.SUCCESS) class CompositeStepMixinMasterPb(CompositeStepMixinMaster): proto = "pb" class TestCompositeMixinStep(BuildStep, CompositeStepMixin): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.logEnviron = False @defer.inlineCallbacks def run(self): contents = yield self.runGlob('*') if contents != []: return results.FAILURE hasPath = yield self.pathExists('composite_mixin_test') if hasPath: return results.FAILURE yield self.runMkdir('composite_mixin_test') hasPath = yield self.pathExists('composite_mixin_test') if not hasPath: return results.FAILURE contents = yield self.runGlob('*') if not contents[0].endswith('composite_mixin_test'): return results.FAILURE yield self.runRmdir('composite_mixin_test') hasPath = yield self.pathExists('composite_mixin_test') if hasPath: return results.FAILURE return results.SUCCESS # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(TestCompositeMixinStep()) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/interop/test_integration_secrets.py000066400000000000000000000112351413250514000310640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from parameterized import parameterized from twisted.internet import defer from buildbot.process.properties import Interpolate from buildbot.reporters.http import HttpStatusPush from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.integration import RunMasterBase class FakeSecretReporter(HttpStatusPush): def sendMessage(self, reports): assert self.auth == ('user', 'myhttppasswd') self.reported = True class SecretsConfig(RunMasterBase): # Note that the secret name must be long enough so that it does not crash with random directory # or file names in the build dictionary. @parameterized.expand([ ('with_interpolation', True), ('plain_command', False), ]) @defer.inlineCallbacks def test_secret(self, name, use_interpolation): c = masterConfig(use_interpolation) yield self.setupConfig(c) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) # check the command line res = yield self.checkBuildStepLogExist(build, "echo ") # also check the secrets are replaced in argv yield self.checkBuildStepLogExist(build, "argv:.*echo.*", regex=True) # also check that the correct value goes to the command if os.name == "posix" and use_interpolation: res &= yield self.checkBuildStepLogExist(build, "The password was there") self.assertTrue(res) # at this point, build contains all the log and steps info that is in the db # we check that our secret is not in there! self.assertNotIn("secretvalue", repr(build)) self.assertTrue(c['services'][0].reported) @parameterized.expand([ ('with_interpolation', True), ('plain_command', False), ]) @defer.inlineCallbacks def test_secretReconfig(self, name, use_interpolation): c = masterConfig(use_interpolation) yield self.setupConfig(c) c['secretsProviders'] = [FakeSecretStorage( secretdict={"foo": "different_value", "something": "more"})] yield self.master.reconfig() build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "echo ") self.assertTrue(res) # at this point, build contains all the log and steps info that is in the db # we check that our secret is not in there! self.assertNotIn("different_value", repr(build)) class SecretsConfigPB(SecretsConfig): proto = "pb" # master configuration def masterConfig(use_interpolation): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers, steps, util c['services'] = [FakeSecretReporter('http://example.com/hook', auth=('user', Interpolate('%(secret:httppasswd)s')))] c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] c['secretsProviders'] = [FakeSecretStorage(secretdict={"foo": "secretvalue", "something": "more", 'httppasswd': 'myhttppasswd'})] f = BuildFactory() if use_interpolation: if os.name == "posix": # on posix we can also check whether the password was passed to the command command = Interpolate('echo %(secret:foo)s | ' + 'sed "s/secretvalue/The password was there/"') else: command = Interpolate('echo %(secret:foo)s') else: command = ['echo', util.Secret('foo')] f.addStep(steps.ShellCommand(command=command)) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/interop/test_interruptcommand.py000066400000000000000000000044041413250514000304040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.results import CANCELLED from buildbot.test.util.decorators import flaky from buildbot.test.util.integration import RunMasterBase from buildbot.util import asyncSleep class InterruptCommand(RunMasterBase): """Make sure we can interrupt a command""" @flaky(bugNumber=4404, onPlatform='win32') @defer.inlineCallbacks def test_interrupt(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True) self.assertEqual(build['steps'][-1]['results'], CANCELLED) class InterruptCommandPb(InterruptCommand): proto = "pb" # master configuration def masterConfig(): c = {} from buildbot.plugins import schedulers, steps, util class SleepAndInterrupt(steps.ShellSequence): @defer.inlineCallbacks def run(self): if self.worker.worker_system == "nt": sleep = "waitfor SomethingThatIsNeverHappening /t 100 >nul 2>&1" else: sleep = ["sleep", "100"] d = self.runShellSequence([util.ShellArg(sleep)]) yield asyncSleep(1) self.interrupt("just testing") res = yield d return res c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = util.BuildFactory() f.addStep(SleepAndInterrupt()) c['builders'] = [ util.BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/interop/test_setpropertyfromcommand.py000066400000000000000000000046461413250514000316440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.internet import task from buildbot.test.util.integration import RunMasterBase # This integration test helps reproduce http://trac.buildbot.net/ticket/3024 # we make sure that we can reconfigure the master while build is running class SetPropertyFromCommand(RunMasterBase): @defer.inlineCallbacks def test_setProp(self): yield self.setupConfig(masterConfig()) oldNewLog = self.master.data.updates.addLog @defer.inlineCallbacks def newLog(*arg, **kw): # Simulate db delay. We usually don't test race conditions # with delays, but in integrations test, that would be pretty # tricky yield task.deferLater(reactor, .1, lambda: None) res = yield oldNewLog(*arg, **kw) return res self.master.data.updates.addLog = newLog build = yield self.doForceBuild(wantProperties=True) self.assertEqual( build['properties']['test'], ('foo', 'SetPropertyFromCommand Step')) class SetPropertyFromCommandPB(SetPropertyFromCommand): proto = "pb" # master configuration num_reconfig = 0 def masterConfig(): global num_reconfig num_reconfig += 1 c = {} from buildbot.plugins import schedulers, steps, util c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = util.BuildFactory() f.addStep(steps.SetPropertyFromCommand( property="test", command=["echo", "foo"])) c['builders'] = [ util.BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/interop/test_transfer.py000066400000000000000000000174511413250514000266430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil from twisted.internet import defer from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.test.util.decorators import flaky from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment # and make sure the transfer steps are working # When new protocols are added, make sure you update this test to exercise # your proto implementation class TransferStepsMasterPb(RunMasterBase): proto = "pb" def readMasterDirContents(self, top): contents = {} for root, dirs, files in os.walk(top): for name in files: fn = os.path.join(root, name) with open(fn) as f: contents[fn] = f.read() return contents def get_config_single_step(self, step): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.FileUpload(workersrc="dir/noexist_path", masterdest="master_dest")) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c def get_non_existing_file_upload_config(self): from buildbot.plugins import steps step = steps.FileUpload(workersrc="dir/noexist_path", masterdest="master_dest") return self.get_config_single_step(step) def get_non_existing_directory_upload_config(self): from buildbot.plugins import steps step = steps.DirectoryUpload(workersrc="dir/noexist_path", masterdest="master_dest") return self.get_config_single_step(step) def get_non_existing_multiple_file_upload_config(self): from buildbot.plugins import steps step = steps.MultipleFileUpload(workersrcs=["dir/noexist_path"], masterdest="master_dest") return self.get_config_single_step(step) @flaky(bugNumber=4407, onPlatform='win32') @defer.inlineCallbacks def test_transfer(self): yield self.setupConfig(masterConfig(bigfilename=self.mktemp())) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], SUCCESS) dirContents = self.readMasterDirContents("dir") self.assertEqual( dirContents, {os.path.join('dir', 'file1.txt'): 'filecontent', os.path.join('dir', 'file2.txt'): 'filecontent2', os.path.join('dir', 'file3.txt'): 'filecontent2'}) # cleanup our mess (worker is cleaned up by parent class) shutil.rmtree("dir") os.unlink("master.txt") @defer.inlineCallbacks def test_globTransfer(self): yield self.setupConfig(masterGlobConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], SUCCESS) dirContents = self.readMasterDirContents("dest") self.assertEqual(dirContents, { os.path.join('dest', 'file1.txt'): 'filecontent', os.path.join('dest', 'notafile1.txt'): 'filecontent2', os.path.join('dest', 'only1.txt'): 'filecontent2' }) # cleanup shutil.rmtree("dest") @defer.inlineCallbacks def test_no_exist_file_upload(self): yield self.setupConfig(self.get_non_existing_file_upload_config()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], FAILURE) res = yield self.checkBuildStepLogExist(build, "Cannot open file") self.assertTrue(res) @defer.inlineCallbacks def test_no_exist_directory_upload(self): yield self.setupConfig(self.get_non_existing_directory_upload_config()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], FAILURE) res = yield self.checkBuildStepLogExist(build, "Cannot open file") self.assertTrue(res) @defer.inlineCallbacks def test_no_exist_multiple_file_upload(self): yield self.setupConfig(self.get_non_existing_multiple_file_upload_config()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], FAILURE) res = yield self.checkBuildStepLogExist(build, "Cannot open file") self.assertTrue(res) class TransferStepsMasterNull(TransferStepsMasterPb): proto = "null" # master configuration def masterConfig(bigfilename): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() # do a bunch of transfer to exercise the protocol f.addStep(steps.StringDownload("filecontent", workerdest="dir/file1.txt")) f.addStep(steps.StringDownload("filecontent2", workerdest="dir/file2.txt")) # create 8 MB file with open(bigfilename, 'w') as o: buf = "xxxxxxxx" * 1024 for i in range(1000): o.write(buf) f.addStep( steps.FileDownload( mastersrc=bigfilename, workerdest="bigfile.txt")) f.addStep( steps.FileUpload(workersrc="dir/file2.txt", masterdest="master.txt")) f.addStep( steps.FileDownload(mastersrc="master.txt", workerdest="dir/file3.txt")) f.addStep(steps.DirectoryUpload(workersrc="dir", masterdest="dir")) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c def masterGlobConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers from buildbot.steps.worker import CompositeStepMixin class CustomStep(steps.BuildStep, CompositeStepMixin): @defer.inlineCallbacks def run(self): content = yield self.getFileContentFromWorker( "dir/file1.txt", abandonOnFailure=True) assert content == "filecontent" return SUCCESS c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"]) ] f = BuildFactory() f.addStep(steps.StringDownload("filecontent", workerdest="dir/file1.txt")) f.addStep( steps.StringDownload( "filecontent2", workerdest="dir/notafile1.txt")) f.addStep(steps.StringDownload("filecontent2", workerdest="dir/only1.txt")) f.addStep( steps.MultipleFileUpload( workersrcs=["dir/file*.txt", "dir/not*.txt", "dir/only?.txt"], masterdest="dest/", glob=True)) f.addStep(CustomStep()) c['builders'] = [ BuilderConfig( name="testy", workernames=["local1"], factory=f) ] return c buildbot-3.4.0/master/buildbot/test/integration/interop/test_worker_reconnect.py000066400000000000000000000042601413250514000303620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.buildstep import BuildStep from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase class DisconnectingStep(BuildStep): disconnection_list = [] def run(self): self.disconnection_list.append(self) if len(self.disconnection_list) < 2: self.worker.disconnect() return SUCCESS class WorkerReconnect(RunMasterBase): """integration test for testing worker disconnection and reconnection""" proto = "pb" @defer.inlineCallbacks def test_eventually_reconnect(self): DisconnectingStep.disconnection_list = [] yield self.setupConfig(masterConfig()) build = yield self.doForceBuild() self.assertEqual(build['buildid'], 2) self.assertEqual(len(DisconnectingStep.disconnection_list), 2) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"]), schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(DisconnectingStep()) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/pki/000077500000000000000000000000001413250514000225015ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/integration/pki/127.0.0.1.crt000066400000000000000000000126661413250514000241720ustar00rootroot00000000000000Certificate: Data: Version: 3 (0x2) Serial Number: 1 (0x1) Signature Algorithm: sha256WithRSAEncryption Issuer: C=ZZ, ST=QA, L=Nowhere, O=Buildbot, OU=Development Team, CN=Buildbot CA/name=EasyRSA/emailAddress=buildbot@integration.test Validity Not Before: Sep 2 12:10:17 2016 GMT Not After : Aug 31 12:10:17 2026 GMT Subject: C=ZZ, ST=QA, L=Nowhere, O=Buildbot, OU=Development Team, CN=127.0.0.1/name=EasyRSA/emailAddress=buildbot@integration.test Subject Public Key Info: Public Key Algorithm: rsaEncryption Public-Key: (2048 bit) Modulus: 00:9e:e5:6e:8d:83:89:6e:3c:45:7a:37:2d:cf:dc: a4:37:38:30:b8:58:cb:50:b7:78:d2:f5:11:e4:e4: 3b:de:3f:02:f0:b5:4b:2a:f4:4e:e5:cc:f0:e7:cf: 43:a4:36:5a:22:6b:89:3e:aa:c4:ef:2c:75:3a:cc: 43:e4:8d:d5:99:4e:1f:08:a6:3d:36:2d:72:80:10: 7b:52:20:44:9a:c7:ee:6b:45:2f:41:cd:0e:3e:dd: 59:01:eb:bb:11:2c:cb:e4:34:bd:63:d9:73:84:90: 36:d9:9b:1b:1b:4f:d0:15:12:89:df:bd:a6:3c:cf: 7e:5b:f5:0b:4d:e1:18:47:1f:7c:58:e4:2a:ae:17: fa:c1:13:64:6f:06:78:32:92:8f:83:78:b0:5d:a4: 8b:7f:a5:8d:d5:c8:87:b1:37:28:17:7a:34:d5:83: 29:8c:e8:d1:1d:a2:df:4d:c5:94:22:4d:0e:75:92: 20:bb:8b:b4:08:85:fb:17:1e:8b:f3:86:b5:b5:5c: 63:9f:fa:3e:e7:52:7c:b6:c6:2a:a3:79:37:44:e0: fc:cd:0b:a1:fc:3c:42:fe:ee:a1:11:b1:c0:a4:17: fb:77:5f:89:ae:7c:55:37:0e:75:8e:93:a8:3a:c3: 34:1b:24:2f:39:87:2c:ee:f0:70:7e:d4:70:0d:db: 29:af Exponent: 65537 (0x10001) X509v3 extensions: X509v3 Basic Constraints: CA:FALSE Netscape Comment: Easy-RSA Generated Certificate X509v3 Subject Key Identifier: 18:6E:2E:76:45:FE:0D:4D:66:76:B6:4D:97:AE:DD:87:27:F0:42:A2 X509v3 Authority Key Identifier: keyid:FB:03:F2:3E:31:9D:6C:14:52:7D:8E:29:18:92:7E:75:43:7C:09:F9 DirName:/C=ZZ/ST=QA/L=Nowhere/O=Buildbot/OU=Development Team/CN=Buildbot CA/name=EasyRSA/emailAddress=buildbot@integration.test serial:B1:2A:2E:B0:BF:9B:5C:37 X509v3 Extended Key Usage: TLS Web Client Authentication X509v3 Key Usage: Digital Signature X509v3 Subject Alternative Name: DNS:127.0.0.1 Signature Algorithm: sha256WithRSAEncryption 58:d9:74:e7:ce:32:aa:b1:a7:dc:06:23:c6:bd:76:b4:3b:7b: 01:ec:82:61:b7:80:7e:ba:c9:ca:a0:48:40:ef:3e:ca:1c:55: 0d:64:3f:80:8c:01:5f:c0:2e:a3:b6:bd:ec:67:29:d6:cf:3e: f4:d2:b9:3b:70:84:95:d8:6d:81:dd:dc:07:6a:15:0c:48:ea: dd:b8:93:55:6f:3f:0d:6f:95:57:d3:dc:e4:a1:60:fd:d4:1b: 33:eb:b1:95:14:c0:65:c7:aa:95:f3:a6:0b:8b:73:fa:77:33: 61:68:e8:fd:cd:f5:1a:a4:c4:6b:78:5d:f6:3b:23:be:f4:92: 88:dc:42:d5:cb:04:96:0b:e5:a7:61:ad:1a:68:ef:8f:38:1f: cf:a0:de:5a:aa:27:e2:fb:98:de:eb:76:1b:a4:0c:2c:7b:8f: 38:14:21:28:f2:cb:c6:78:9f:43:c7:f6:9e:e9:49:54:fa:ff: 36:67:ee:69:2b:d2:3b:2d:08:25:7c:5f:f5:49:0a:23:c1:e3: 8b:4b:09:a5:15:95:60:02:9f:91:bf:64:9c:a8:99:9a:7a:bf: 7a:45:58:c2:0d:b1:da:f0:73:96:0e:9d:fd:f6:a3:02:8f:dc: fe:77:40:16:64:23:57:7f:87:d5:5b:8e:5a:3d:f1:2a:29:e2: c4:ea:d7:43 -----BEGIN CERTIFICATE----- MIIFTjCCBDagAwIBAgIBATANBgkqhkiG9w0BAQsFADCBrDELMAkGA1UEBhMCWlox CzAJBgNVBAgTAlFBMRAwDgYDVQQHEwdOb3doZXJlMREwDwYDVQQKEwhCdWlsZGJv dDEZMBcGA1UECxMQRGV2ZWxvcG1lbnQgVGVhbTEUMBIGA1UEAxMLQnVpbGRib3Qg Q0ExEDAOBgNVBCkTB0Vhc3lSU0ExKDAmBgkqhkiG9w0BCQEWGWJ1aWxkYm90QGlu dGVncmF0aW9uLnRlc3QwHhcNMTYwOTAyMTIxMDE3WhcNMjYwODMxMTIxMDE3WjCB qjELMAkGA1UEBhMCWloxCzAJBgNVBAgTAlFBMRAwDgYDVQQHEwdOb3doZXJlMREw DwYDVQQKEwhCdWlsZGJvdDEZMBcGA1UECxMQRGV2ZWxvcG1lbnQgVGVhbTESMBAG A1UEAxMJMTI3LjAuMC4xMRAwDgYDVQQpEwdFYXN5UlNBMSgwJgYJKoZIhvcNAQkB FhlidWlsZGJvdEBpbnRlZ3JhdGlvbi50ZXN0MIIBIjANBgkqhkiG9w0BAQEFAAOC AQ8AMIIBCgKCAQEAnuVujYOJbjxFejctz9ykNzgwuFjLULd40vUR5OQ73j8C8LVL KvRO5czw589DpDZaImuJPqrE7yx1OsxD5I3VmU4fCKY9Ni1ygBB7UiBEmsfua0Uv Qc0OPt1ZAeu7ESzL5DS9Y9lzhJA22ZsbG0/QFRKJ372mPM9+W/ULTeEYRx98WOQq rhf6wRNkbwZ4MpKPg3iwXaSLf6WN1ciHsTcoF3o01YMpjOjRHaLfTcWUIk0OdZIg u4u0CIX7Fx6L84a1tVxjn/o+51J8tsYqo3k3ROD8zQuh/DxC/u6hEbHApBf7d1+J rnxVNw51jpOoOsM0GyQvOYcs7vBwftRwDdsprwIDAQABo4IBeTCCAXUwCQYDVR0T BAIwADAtBglghkgBhvhCAQ0EIBYeRWFzeS1SU0EgR2VuZXJhdGVkIENlcnRpZmlj YXRlMB0GA1UdDgQWBBQYbi52Rf4NTWZ2tk2Xrt2HJ/BCojCB4QYDVR0jBIHZMIHW gBT7A/I+MZ1sFFJ9jikYkn51Q3wJ+aGBsqSBrzCBrDELMAkGA1UEBhMCWloxCzAJ BgNVBAgTAlFBMRAwDgYDVQQHEwdOb3doZXJlMREwDwYDVQQKEwhCdWlsZGJvdDEZ MBcGA1UECxMQRGV2ZWxvcG1lbnQgVGVhbTEUMBIGA1UEAxMLQnVpbGRib3QgQ0Ex EDAOBgNVBCkTB0Vhc3lSU0ExKDAmBgkqhkiG9w0BCQEWGWJ1aWxkYm90QGludGVn cmF0aW9uLnRlc3SCCQCxKi6wv5tcNzATBgNVHSUEDDAKBggrBgEFBQcDAjALBgNV HQ8EBAMCB4AwFAYDVR0RBA0wC4IJMTI3LjAuMC4xMA0GCSqGSIb3DQEBCwUAA4IB AQBY2XTnzjKqsafcBiPGvXa0O3sB7IJht4B+usnKoEhA7z7KHFUNZD+AjAFfwC6j tr3sZynWzz700rk7cISV2G2B3dwHahUMSOrduJNVbz8Nb5VX09zkoWD91Bsz67GV FMBlx6qV86YLi3P6dzNhaOj9zfUapMRreF32OyO+9JKI3ELVywSWC+WnYa0aaO+P OB/PoN5aqifi+5je63YbpAwse484FCEo8svGeJ9Dx/ae6UlU+v82Z+5pK9I7LQgl fF/1SQojweOLSwmlFZVgAp+Rv2ScqJmaer96RVjCDbHa8HOWDp399qMCj9z+d0AW ZCNXf4fVW45aPfEqKeLE6tdD -----END CERTIFICATE----- buildbot-3.4.0/master/buildbot/test/integration/pki/127.0.0.1.key000066400000000000000000000032501413250514000241570ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCe5W6Ng4luPEV6 Ny3P3KQ3ODC4WMtQt3jS9RHk5DvePwLwtUsq9E7lzPDnz0OkNloia4k+qsTvLHU6 zEPkjdWZTh8Ipj02LXKAEHtSIESax+5rRS9BzQ4+3VkB67sRLMvkNL1j2XOEkDbZ mxsbT9AVEonfvaY8z35b9QtN4RhHH3xY5CquF/rBE2RvBngyko+DeLBdpIt/pY3V yIexNygXejTVgymM6NEdot9NxZQiTQ51kiC7i7QIhfsXHovzhrW1XGOf+j7nUny2 xiqjeTdE4PzNC6H8PEL+7qERscCkF/t3X4mufFU3DnWOk6g6wzQbJC85hyzu8HB+ 1HAN2ymvAgMBAAECggEADV5sYMeyZm33woKl/hkoT+UQZFJEOPRW3Bj2enWhe999 VddLDcAkaz1E/5v2qvhPuRmnIHipvR3Wdy38gFxWnmFuRwIFoGtOeOvqFEzWuNcd fjUB9t1T14I0HO9Ce/1y4i51yNLg30Rq+QAN1cxvS3aV1xdTx0YF8aK6YsEPk9w2 XbBz7ujSRFE3/37uMboUNVLbHflu0l8UauJBbGm4Z4l1VALf1r38j2q3qEbJ9T+m uKu1SvjyNeqTb/wZPPYfBBe8TEHiilh+mif1fUN0F8Q/kQnSDgpTy1r94sfvuvwU E/+GR/h8Eawv6dW5TfoRMP80vMiA6vgSWf0tcQtwYQKBgQDOODGFoyjstpzTZ8P+ o2oFi/spwzxhtiMvA1dmb9xzmywDCJI7A6UmKn51wdFgA05O7RINF+daNL0JHzmK 0NBiNrChOIYKxNEBzjT0AC35ktUoEWt3EoWyuiDtqshkZE0ZbUaCGwI8R6zv+CMN yL5hB99V5sla43F8kblXlXAcqQKBgQDFQMmY6IrnLMAXuiW0Tj2G7ifM8sdlIVqv VCDjqWQjY21CuYZvQBphQD67rUSa0QPzDXK1FOnYEcZ31qyVvxvo+w+nGu7rmswX VpLmuegaKxzatW7PpLRXvC2P2WjTboHZlgxLqXKqzWJgcd87p6AF6QBtbT2CZnKK uBuWF59ylwKBgExJVWiOdzE6TMGX/VVRtoLeycclRk0PR+y4W7i1YOQTXzCwFwhl zM5VofqF/KJ8Fpfz812MnjUslVGuj2be4He0q5q4gj2xmXAFjGlHN7q/qsLrzsl5 vKPlXxEMwJ0CzdK+LfsraqRKD5umO7F5tZPHicMJYSuSQAVJEztnONYhAoGBAJNg sr+Cj7Xl46hWtrEe/C2CZ05j3sMaxqzVCLXQ7DbcpNgD0gPxO0SKQdTrwqSBopfI 5nmRpJ6BuW30gYJpBatvWeSa5QQ35mFRl/S31kknCSoIAUE3aF9dBBXEdOP0XyR5 TbqCYmBnkCdLLWVe+tsvmdgolJqHfPFUWZgtEj8FAoGAE6BFO+Lt9LSauo+JLycj PDWWnYP4ZhhI7loIT+n7Jw7eDPigLkYmUe7h3XhyQwz+mCPR4tbDUo9vw22KGLNW kUIrOSbYNIXFM3ZsbLIXhRqUNlhWuqfo/IidJv59iFsnsK0liBa2KjxPMIE2rn77 kkBS4k2hMfpgdtp4IkGCThY= -----END PRIVATE KEY----- buildbot-3.4.0/master/buildbot/test/integration/pki/ca/000077500000000000000000000000001413250514000230645ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/integration/pki/ca/ca.crt000066400000000000000000000033611413250514000241640ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIIE9DCCA9ygAwIBAgIJALEqLrC/m1w3MA0GCSqGSIb3DQEBCwUAMIGsMQswCQYD VQQGEwJaWjELMAkGA1UECBMCUUExEDAOBgNVBAcTB05vd2hlcmUxETAPBgNVBAoT CEJ1aWxkYm90MRkwFwYDVQQLExBEZXZlbG9wbWVudCBUZWFtMRQwEgYDVQQDEwtC dWlsZGJvdCBDQTEQMA4GA1UEKRMHRWFzeVJTQTEoMCYGCSqGSIb3DQEJARYZYnVp bGRib3RAaW50ZWdyYXRpb24udGVzdDAeFw0xNjA5MDIxMjA5NTJaFw0yNjA4MzEx MjA5NTJaMIGsMQswCQYDVQQGEwJaWjELMAkGA1UECBMCUUExEDAOBgNVBAcTB05v d2hlcmUxETAPBgNVBAoTCEJ1aWxkYm90MRkwFwYDVQQLExBEZXZlbG9wbWVudCBU ZWFtMRQwEgYDVQQDEwtCdWlsZGJvdCBDQTEQMA4GA1UEKRMHRWFzeVJTQTEoMCYG CSqGSIb3DQEJARYZYnVpbGRib3RAaW50ZWdyYXRpb24udGVzdDCCASIwDQYJKoZI hvcNAQEBBQADggEPADCCAQoCggEBALJZcC9j4XYBi1fYT/fibY2FRWn6Qh74b1Pg I7iIde6Sf3DPdh/ogYvZAT+cIlkZdo4v326d0EkuYKcywDvho8UeET6sIYhuHPDW lRl1Ret6ylxpbEfxFNvMoEGNhYAP0C6QS2eWEP9LkV2lCuMQtWWzdedjk+efqBjR Gozaim0lr/5lx7bnVx0oRLAgbI5/9Ukbopansfr+Cp9CpFpbNPGZSmELzC3FPKXK 5tycj8WEqlywlha2/VRnCZfYefB3aAuQqQilLh+QHyhn6hzc26+n5B0l8QvrMkOX atKdznMLzJWGxS7UwmDKcsolcMAW+82BZ8nUCBPF3U5PkTLO540CAwEAAaOCARUw ggERMB0GA1UdDgQWBBT7A/I+MZ1sFFJ9jikYkn51Q3wJ+TCB4QYDVR0jBIHZMIHW gBT7A/I+MZ1sFFJ9jikYkn51Q3wJ+aGBsqSBrzCBrDELMAkGA1UEBhMCWloxCzAJ BgNVBAgTAlFBMRAwDgYDVQQHEwdOb3doZXJlMREwDwYDVQQKEwhCdWlsZGJvdDEZ MBcGA1UECxMQRGV2ZWxvcG1lbnQgVGVhbTEUMBIGA1UEAxMLQnVpbGRib3QgQ0Ex EDAOBgNVBCkTB0Vhc3lSU0ExKDAmBgkqhkiG9w0BCQEWGWJ1aWxkYm90QGludGVn cmF0aW9uLnRlc3SCCQCxKi6wv5tcNzAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEB CwUAA4IBAQCJGJVMAmwZRK/mRqm9E0e3s4YGmYT2jwX5IX17XljEy+1cS4huuZW2 33CFpslkT1MN/r8IIZWilxT/lTujHyt4eERGjE1oRVKU8rlTH8WUjFzPIVu7nkte 09abqynAoec8aQukg79NRCY1l/E2/WzfnUt3yTgKPfZmzoiN0K+hH4gVlWtrizPA LaGwoslYYTA6jHNEeMm8OQLNf17OTmAa7EpeIgVpLRCieI9S3JIG4WYU8fVkeuiU cB439SdixU4cecVjNfFDpq6JM8N6+DQoYOSNRt9Dy0ioGyx5D4lWoIQ+BmXQENal gw+XLyejeNTNgLOxf9pbNYMJqxhkTkoE -----END CERTIFICATE----- buildbot-3.4.0/master/buildbot/test/integration/test_URLs.py000066400000000000000000000042731413250514000241620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import runtime from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment # and make sure the UrlForBuild renderable is working class UrlForBuildMaster(RunMasterBase): proto = "null" @defer.inlineCallbacks def test_url(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], SUCCESS) if runtime.platformType == 'win32': command = "echo http://localhost:8080/#builders/1/builds/1" else: command = "echo 'http://localhost:8080/#builders/1/builds/1'" self.assertIn(command, build['steps'][1]['logs'][0]['contents']['content']) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers, util c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() # do a bunch of transfer to exercise the protocol f.addStep(steps.ShellCommand(command=["echo", util.URLForBuild])) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c buildbot-3.4.0/master/buildbot/test/integration/test_configs.py000066400000000000000000000067021413250514000247640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.python import util from twisted.trial import unittest from buildbot import config from buildbot.scripts import runner from buildbot.test.util import dirs from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.warnings import DeprecatedApiWarning class RealConfigs(dirs.DirsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('basedir') self.basedir = os.path.abspath('basedir') self.filename = os.path.abspath("test.cfg") def tearDown(self): self.tearDownDirs() def test_sample_config(self): filename = util.sibpath(runner.__file__, 'sample.cfg') with assertNotProducesWarnings(DeprecatedApiWarning): config.FileLoader(self.basedir, filename).loadConfig() def test_0_9_0b5_api_renamed_config(self): with open(self.filename, "w") as f: f.write(sample_0_9_0b5_api_renamed) with assertNotProducesWarnings(DeprecatedApiWarning): config.FileLoader(self.basedir, self.filename).loadConfig() # sample.cfg from various versions, with comments stripped. Adjustments made # for compatibility are marked with comments # Template for master configuration just after worker renaming. sample_0_9_0b5_api_renamed = """\ from buildbot.plugins import * c = BuildmasterConfig = {} c['workers'] = [worker.Worker("example-worker", "pass")] c['protocols'] = {'pb': {'port': 9989}} c['change_source'] = [] c['change_source'].append(changes.GitPoller( 'https://github.com/buildbot/hello-world.git', workdir='gitpoller-workdir', branch='master', pollinterval=300)) c['schedulers'] = [] c['schedulers'].append(schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch='master'), treeStableTimer=None, builderNames=["runtests"])) c['schedulers'].append(schedulers.ForceScheduler( name="force", builderNames=["runtests"])) factory = util.BuildFactory() factory.addStep(steps.Git(repourl='https://github.com/buildbot/hello-world.git', mode='incremental')) factory.addStep(steps.ShellCommand(command=["trial", "hello"], env={"PYTHONPATH": "."})) c['builders'] = [] c['builders'].append( util.BuilderConfig(name="runtests", workernames=["example-worker"], factory=factory)) c['title'] = "Pyflakes" c['titleURL'] = "https://launchpad.net/pyflakes" c['buildbotURL'] = "http://localhost:8010/" c['www'] = dict(port=8010, plugins=dict(waterfall_view={}, console_view={})) c['db'] = { 'db_url' : "sqlite:///state.sqlite", } """ # noqa pylint: disable=line-too-long buildbot-3.4.0/master/buildbot/test/integration/test_custom_buildstep.py000066400000000000000000000114761413250514000267250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import error from buildbot.config import BuilderConfig from buildbot.process import buildstep from buildbot.process import logobserver from buildbot.process import results from buildbot.process.factory import BuildFactory from buildbot.test.util.integration import RunFakeMasterTestCase class TestLogObserver(logobserver.LogObserver): def __init__(self): self.observed = [] def outReceived(self, txt): self.observed.append(txt) class Latin1ProducingCustomBuildStep(buildstep.BuildStep): @defer.inlineCallbacks def run(self): _log = yield self.addLog('xx') output_str = '\N{CENT SIGN}' yield _log.addStdout(output_str) yield _log.finish() return results.SUCCESS class BuildStepWithFailingLogObserver(buildstep.BuildStep): @defer.inlineCallbacks def run(self): self.addLogObserver('xx', logobserver.LineConsumerLogObserver(self.log_consumer)) _log = yield self.addLog('xx') yield _log.addStdout('line1\nline2\n') yield _log.finish() return results.SUCCESS def log_consumer(self): _, _ = yield raise RuntimeError('fail') class FailingCustomStep(buildstep.BuildStep): flunkOnFailure = True def __init__(self, exception=buildstep.BuildStepFailed, *args, **kwargs): super().__init__(*args, **kwargs) self.exception = exception @defer.inlineCallbacks def run(self): yield defer.succeed(None) raise self.exception() class RunSteps(RunFakeMasterTestCase): @defer.inlineCallbacks def create_config_for_step(self, step): config_dict = { 'builders': [ BuilderConfig(name="builder", workernames=["worker1"], factory=BuildFactory([step]) ), ], 'workers': [self.createLocalWorker('worker1')], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.setup_master(config_dict) builder_id = yield self.master.data.updates.findBuilderId('builder') return builder_id @defer.inlineCallbacks def test_step_raising_buildstepfailed_in_start(self): builder_id = yield self.create_config_for_step(FailingCustomStep()) yield self.do_test_build(builder_id) yield self.assertBuildResults(1, results.FAILURE) @defer.inlineCallbacks def test_step_raising_exception_in_start(self): builder_id = yield self.create_config_for_step(FailingCustomStep(exception=ValueError)) yield self.do_test_build(builder_id) yield self.assertBuildResults(1, results.EXCEPTION) self.assertEqual(len(self.flushLoggedErrors(ValueError)), 1) @defer.inlineCallbacks def test_step_raising_connectionlost_in_start(self): ''' Check whether we can recover from raising ConnectionLost from a step if the worker did not actually disconnect ''' step = FailingCustomStep(exception=error.ConnectionLost) builder_id = yield self.create_config_for_step(step) yield self.do_test_build(builder_id) yield self.assertBuildResults(1, results.EXCEPTION) test_step_raising_connectionlost_in_start.skip = "Results in infinite loop" @defer.inlineCallbacks def test_step_raising_in_log_observer(self): step = BuildStepWithFailingLogObserver() builder_id = yield self.create_config_for_step(step) yield self.do_test_build(builder_id) yield self.assertBuildResults(1, results.EXCEPTION) yield self.assertStepStateString(2, "finished (exception)") self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_Latin1ProducingCustomBuildStep(self): step = Latin1ProducingCustomBuildStep(logEncoding='latin-1') builder_id = yield self.create_config_for_step(step) yield self.do_test_build(builder_id) yield self.assertLogs(1, { 'xx': 'o\N{CENT SIGN}\n', }) buildbot-3.4.0/master/buildbot/test/integration/test_customservices.py000066400000000000000000000102061413250514000264040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.util.integration import RunFakeMasterTestCase # This integration test creates a master and worker environment, # with one builder and a custom step # The custom step is using a CustomService, in order to calculate its result # we make sure that we can reconfigure the master while build is running class CustomServiceMaster(RunFakeMasterTestCase): def setUp(self): super().setUp() self.num_reconfig = 0 def create_master_config(self): self.num_reconfig += 1 from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.steps.shell import ShellCommand from buildbot.util.service import BuildbotService class MyShellCommand(ShellCommand): def getResultSummary(self): service = self.master.service_manager.namedServices['myService'] return dict(step="num reconfig: %d" % (service.num_reconfig,)) class MyService(BuildbotService): name = "myService" def reconfigService(self, num_reconfig): self.num_reconfig = num_reconfig return defer.succeed(None) config_dict = { 'builders': [ BuilderConfig(name="builder", workernames=["worker1"], factory=BuildFactory([MyShellCommand(command='echo hei')])), ], 'workers': [self.createLocalWorker('worker1')], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, 'db_url': 'sqlite://', # we need to make sure reconfiguration uses the same URL 'services': [MyService(num_reconfig=self.num_reconfig)] } if self.num_reconfig == 3: config_dict['services'].append(MyService(name="myService2", num_reconfig=self.num_reconfig)) return config_dict @defer.inlineCallbacks def test_custom_service(self): yield self.setup_master(self.create_master_config()) yield self.do_test_build_by_name('builder') self.assertStepStateString(1, 'worker worker1 ready') self.assertStepStateString(2, 'num reconfig: 1') myService = self.master.service_manager.namedServices['myService'] self.assertEqual(myService.num_reconfig, 1) self.assertTrue(myService.running) # We do several reconfig, and make sure the service # are reconfigured as expected yield self.reconfig_master(self.create_master_config()) yield self.do_test_build_by_name('builder') self.assertEqual(myService.num_reconfig, 2) self.assertStepStateString(1, 'worker worker1 ready') self.assertStepStateString(2, 'num reconfig: 1') yield self.reconfig_master(self.create_master_config()) myService2 = self.master.service_manager.namedServices['myService2'] self.assertTrue(myService2.running) self.assertEqual(myService2.num_reconfig, 3) self.assertEqual(myService.num_reconfig, 3) yield self.reconfig_master(self.create_master_config()) # second service removed self.assertNotIn('myService2', self.master.service_manager.namedServices) self.assertFalse(myService2.running) self.assertEqual(myService2.num_reconfig, 3) self.assertEqual(myService.num_reconfig, 4) buildbot-3.4.0/master/buildbot/test/integration/test_graphql.py000066400000000000000000000312161413250514000247700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os from twisted.internet import defer from twisted.trial import unittest from buildbot.data import connector as dataconnector from buildbot.data.graphql import GraphQLConnector from buildbot.mq import connector as mqconnector from buildbot.process.results import SUCCESS from buildbot.schedulers.forcesched import ForceScheduler from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import toJson try: from ruamel.yaml import YAML except ImportError: YAML = None try: import graphql as graphql_core except ImportError: graphql_core = None class GraphQL(unittest.TestCase, TestReactorMixin): if not graphql_core: skip = "graphql-core is required for GraphQL integration tests" master = None def load_yaml(self, f): if YAML is None: # for running the test ruamel is not needed (to avoid a build dependency for distros) import yaml return yaml.safe_load(f) self.yaml = YAML() self.yaml.default_flow_style = False # default is round-trip return self.yaml.load(f) def save_yaml(self, data, f): if YAML is None: raise ImportError("please install ruamel.yaml for test regeneration") self.yaml.dump(data, f) @defer.inlineCallbacks def setUp(self): self.setUpTestReactor(use_asyncio=True) master = fakemaster.make_master(self) master.db = fakedb.FakeDBConnector(self) yield master.db.setServiceParent(master) master.config.mq = {'type': "simple"} master.mq = mqconnector.MQConnector() yield master.mq.setServiceParent(master) yield master.mq.setup() master.data = dataconnector.DataConnector() yield master.data.setServiceParent(master) master.graphql = GraphQLConnector() yield master.graphql.setServiceParent(master) master.config.www = {'graphql': {"debug": True}} master.graphql.reconfigServiceWithBuildbotConfig(master.config) self.master = master scheds = [ForceScheduler( name="force", builderNames=["runtests0", "runtests1", "runtests2", "slowruntests"])] self.master.allSchedulers = lambda: scheds yield self.master.startService() yield self.insert_initial_data() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() def insert_initial_data(self): self.master.db.insertTestData([ fakedb.Master(id=1), fakedb.Worker(id=1, name='example-worker'), fakedb.Scheduler(id=1, name='custom', enabled=1), fakedb.Scheduler(id=2, name='all', enabled=2), fakedb.Scheduler(id=3, name='force', enabled=3), fakedb.SchedulerMaster(schedulerid=1, masterid=1), fakedb.SchedulerMaster(schedulerid=2, masterid=1), fakedb.SchedulerMaster(schedulerid=3, masterid=1), fakedb.Builder(id=1, name='runtests1'), fakedb.Builder(id=2, name='runtests2'), fakedb.Builder(id=3, name='runtests3'), fakedb.BuilderMaster(id=1, builderid=1, masterid=1), fakedb.BuilderMaster(id=2, builderid=2, masterid=1), fakedb.BuilderMaster(id=3, builderid=3, masterid=1), fakedb.Tag(id=1, name='tag1'), fakedb.Tag(id=2, name='tag12'), fakedb.Tag(id=3, name='tag23'), fakedb.BuildersTags(id=1, builderid=1, tagid=1), fakedb.BuildersTags(id=2, builderid=1, tagid=2), fakedb.BuildersTags(id=3, builderid=2, tagid=2), fakedb.BuildersTags(id=4, builderid=2, tagid=3), fakedb.BuildersTags(id=5, builderid=3, tagid=3), fakedb.Buildset(id=1, results=SUCCESS, reason="Force reason 1", submitted_at=100000, complete_at=100110, complete=1), fakedb.Buildset(id=2, results=SUCCESS, reason="Force reason 2", submitted_at=100200, complete_at=100330, complete=1), fakedb.Buildset(id=3, results=SUCCESS, reason="Force reason 3", submitted_at=100400, complete_at=100550, complete=1), fakedb.BuildsetProperty(buildsetid=1, property_name='scheduler', property_value='["custom", "Scheduler"]'), fakedb.BuildsetProperty(buildsetid=2, property_name='scheduler', property_value='["all", "Scheduler"]'), fakedb.BuildsetProperty(buildsetid=3, property_name='scheduler', property_value='["force", "Scheduler"]'), fakedb.BuildsetProperty(buildsetid=3, property_name='owner', property_value='["some@example.com", "Force Build Form"]'), fakedb.SourceStamp(id=1, branch='master', revision='1234abcd'), fakedb.Change(changeid=1, branch='master', revision='1234abcd', sourcestampid=1), fakedb.ChangeProperty(changeid=1, property_name="owner", property_value='["me@example.com", "change"]'), fakedb.ChangeProperty(changeid=1, property_name="other_prop", property_value='["value", "change"]'), fakedb.BuildsetSourceStamp(id=1, buildsetid=1, sourcestampid=1), fakedb.BuildsetSourceStamp(id=2, buildsetid=2, sourcestampid=1), fakedb.BuildsetSourceStamp(id=3, buildsetid=3, sourcestampid=1), fakedb.BuildRequest(id=1, buildsetid=1, builderid=1, results=SUCCESS, submitted_at=100001, complete_at=100109, complete=1), fakedb.BuildRequest(id=2, buildsetid=2, builderid=1, results=SUCCESS, submitted_at=100201, complete_at=100329, complete=1), fakedb.BuildRequest(id=3, buildsetid=3, builderid=2, results=SUCCESS, submitted_at=100401, complete_at=100549, complete=1), fakedb.Build(id=1, number=1, buildrequestid=1, builderid=1, workerid=1, masterid=1001, started_at=100002, complete_at=100108, state_string='build successful', results=SUCCESS), fakedb.Build(id=2, number=2, buildrequestid=2, builderid=1, workerid=1, masterid=1001, started_at=100202, complete_at=100328, state_string='build successful', results=SUCCESS), fakedb.Build(id=3, number=1, buildrequestid=3, builderid=2, workerid=1, masterid=1001, started_at=100402, complete_at=100548, state_string='build successful', results=SUCCESS), fakedb.BuildProperty(buildid=3, name='reason', value='"force build"', source="Force Build Form"), fakedb.BuildProperty(buildid=3, name='owner', value='"some@example.com"', source="Force Build Form"), fakedb.BuildProperty(buildid=3, name='scheduler', value='"force"', source="Scheduler"), fakedb.BuildProperty(buildid=3, name='buildername', value='"runtests3"', source="Builder"), fakedb.BuildProperty(buildid=3, name='workername', value='"example-worker"', source="Worker"), fakedb.Step(id=1, number=1, name='step1', buildid=1, started_at=100010, complete_at=100019, state_string='step1 done'), fakedb.Step(id=2, number=2, name='step2', buildid=1, started_at=100020, complete_at=100029, state_string='step2 done'), fakedb.Step(id=3, number=3, name='step3', buildid=1, started_at=100030, complete_at=100039, state_string='step3 done'), fakedb.Step(id=11, number=1, name='step1', buildid=2, started_at=100210, complete_at=100219, state_string='step1 done'), fakedb.Step(id=12, number=2, name='step2', buildid=2, started_at=100220, complete_at=100229, state_string='step2 done'), fakedb.Step(id=13, number=3, name='step3', buildid=2, started_at=100230, complete_at=100239, state_string='step3 done'), fakedb.Step(id=21, number=1, name='step1', buildid=3, started_at=100410, complete_at=100419, state_string='step1 done'), fakedb.Step(id=22, number=2, name='step2', buildid=3, started_at=100420, complete_at=100429, state_string='step2 done'), fakedb.Step(id=23, number=3, name='step3', buildid=3, started_at=100430, complete_at=100439, state_string='step3 done'), fakedb.Log(id=1, name='stdio', slug='stdio', stepid=1, complete=1, num_lines=10), fakedb.Log(id=2, name='stdio', slug='stdio', stepid=2, complete=1, num_lines=20), fakedb.Log(id=3, name='stdio', slug='stdio', stepid=3, complete=1, num_lines=30), fakedb.Log(id=11, name='stdio', slug='stdio', stepid=11, complete=1, num_lines=30), fakedb.Log(id=12, name='stdio', slug='stdio', stepid=12, complete=1, num_lines=40), fakedb.Log(id=13, name='stdio', slug='stdio', stepid=13, complete=1, num_lines=50), fakedb.Log(id=21, name='stdio', slug='stdio', stepid=21, complete=1, num_lines=50), fakedb.Log(id=22, name='stdio', slug='stdio', stepid=22, complete=1, num_lines=60), fakedb.Log(id=23, name='stdio', slug='stdio', stepid=23, complete=1, num_lines=70), fakedb.LogChunk(logid=1, first_line=0, last_line=2, content='o line1\no line2\n'), fakedb.LogChunk(logid=1, first_line=2, last_line=3, content='o line3\n'), fakedb.LogChunk(logid=2, first_line=0, last_line=4, content='o line1\no line2\no line3\no line4\n'), ]) @defer.inlineCallbacks def test_examples_from_yaml(self): """This test takes input from yaml file containing queries to execute and expected results. In order to ease writing of tests, if the expected key is not found, it is automatically generated, so developer only has to review results Full regen can still be done with regen local variable just below """ regen = False need_save = False fn = os.path.join(os.path.dirname(__file__), "test_graphql_queries.yaml") with open(fn) as f: data = self.load_yaml(f) focussed_data = [test for test in data if test.get('focus')] if not focussed_data: focussed_data = data for test in focussed_data: query = test['query'] result = yield self.master.graphql.query( query ) self.assertIsNone(result.errors) if 'expected' not in test or regen: need_save = True test['expected'] = result.data else: # remove ruamel metadata before compare (it is needed for round-trip regen, # but confuses the comparison) result_data = json.loads(json.dumps(result.data, default=toJson)) expected = json.loads(json.dumps(test['expected'], default=toJson)) self.assertEqual( result_data, expected, f"for {query}") if need_save: with open(fn, 'w') as f: self.save_yaml(data, f) @defer.inlineCallbacks def test_buildrequests_builds(self): data = yield self.master.graphql.query( "{buildrequests{buildrequestid, builds{number, buildrequestid}}}" ) self.assertEqual(data.errors, None) for br in data.data["buildrequests"]: for build in br["builds"]: self.assertEqual(build["buildrequestid"], br["buildrequestid"]) buildbot-3.4.0/master/buildbot/test/integration/test_graphql_queries.yaml000066400000000000000000000071361413250514000270430ustar00rootroot00000000000000# to test a single testcase, add "focus: true" inside it, e.g: - # focus: true query: | {masters{name, builders(name:"runtests1"){name, builds(limit:1){number}}}} expected: masters: - name: some:master builders: - name: runtests1 builds: - number: 1 - query: | {masters{name, builders(name:"runtests1"){name, builds(limit:10){number}}}} expected: masters: - name: some:master builders: - name: runtests1 builds: - number: 1 - number: 2 - query: | {builders(name:"runtests2"){name, builds(limit:1){number}}} expected: builders: - name: runtests2 builds: - number: 1 - query: | {builders(name:"runtests3"){name, builds(limit:1){number}}} expected: builders: - name: runtests3 builds: [] - query: | {buildrequests(limit:1){buildrequestid, builds(limit:1){number}}} expected: buildrequests: - buildrequestid: 1 builds: - number: 1 - query: | {changes{author, builds(limit:1){number}}} expected: changes: - author: frank builds: - number: 1 - query: | {workers{name, builds(limit:1){number}}} expected: workers: - name: example-worker builds: - number: 1 - query: | {builds(limit:1){number, steps(limit:1){name}}} expected: builds: - number: 1 steps: - name: step1 - query: | { builds(limit:1){ number steps(limit:1,offset:1){ name logs { name num_lines } } } } expected: builds: - number: 1 steps: - name: step2 logs: - name: stdio num_lines: 20 - query: | { build(buildid:1){ step(stepid:1){ log(logid:1){ logchunks(offset:1, limit:2){ content } } } } } expected: build: step: log: logchunks: content: "o line2\no line3\n" - query: | { logchunks(logid: 1, offset:1, limit:2){ content } } expected: logchunks: content: "o line2\no line3\n" - query: | { sourcestamp(ssid: 1){ created_at } } expected: sourcestamp: created_at: 89834834 - query: | { sourcestamps { changes {changeid} } } expected: sourcestamps: - changes: - changeid: 1 - query: | {builders(limit:1){name, forceschedulers{name}}} expected: builders: - name: runtests1 forceschedulers: - name: force - query: |- {builders(limit:1){name, schedulers{name}}} expected: builders: - name: runtests1 schedulers: - name: custom - name: all - name: force - query: | {builders(builderid:2){name, builderid, buildrequests{buildrequestid, builderid}}} expected: builders: - name: runtests2 builderid: 2 buildrequests: - buildrequestid: 3 builderid: 2 - query: | {builds(buildid:3){_properties(name__in: ["reason", "owner"]){name, value, source }}} expected: builds: - _properties: - name: reason value: '"\"force build\""' source: Force Build Form - name: owner value: '"\"some@example.com\""' source: Force Build Form - query: | {buildsets(bsid:3){_properties(name__in: ["reason", "owner"]){name, value }}} expected: buildsets: - _properties: - name: owner value: '"some@example.com"' buildbot-3.4.0/master/buildbot/test/integration/test_integration_force_with_patch.py000066400000000000000000000063251413250514000312500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps.source.base import Source from buildbot.test.util.decorators import skipUnlessPlatformIs from buildbot.test.util.integration import RunMasterBase # a simple patch which adds a Makefile PATCH = b"""diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..8a5cf80 --- /dev/null +++ b/Makefile @@ -0,0 +1,2 @@ +all: +\techo OK """ class MySource(Source): """A source class which only applies the patch""" @defer.inlineCallbacks def run_vc(self, branch, revision, patch): self.stdio_log = yield self.addLogForRemoteCommands("stdio") if patch: yield self.patch(patch) return SUCCESS class ShellMaster(RunMasterBase): @skipUnlessPlatformIs("posix") # make is not installed on windows @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True, forceParams={'foo_patch_body': PATCH}) self.assertEqual(build['buildid'], 1) # if makefile was not properly created, we would have a failure self.assertEqual(build['results'], SUCCESS) @defer.inlineCallbacks def test_shell_no_patch(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) # if no patch, the source step is happy, but the make step cannot find makefile self.assertEqual(build['steps'][1]['results'], SUCCESS) self.assertEqual(build['steps'][2]['results'], FAILURE) self.assertEqual(build['results'], FAILURE) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers, util c['schedulers'] = [ schedulers.ForceScheduler( name="force", codebases=[util.CodebaseParameter( "foo", patch=util.PatchParameter())], builderNames=["testy"])] f = BuildFactory() f.addStep(MySource(codebase='foo')) # if the patch was applied correctly, then make will work! f.addStep(steps.ShellCommand(command=["make"])) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/test_integration_mastershell.py000066400000000000000000000041001413250514000302500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builders and a shellcommand step # meant to be a template for integration steps class ShellMaster(RunMasterBase): @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.MasterShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/test_integration_scheduler_reconfigure.py000066400000000000000000000054161413250514000323060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.plugins import schedulers from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builders and a shellcommand step # meant to be a template for integration steps class ShellMaster(RunMasterBase): @defer.inlineCallbacks def test_shell(self): cfg = masterConfig() yield self.setupConfig(cfg) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) # switch the configuration of the scheduler, and make sure the correct builder is run cfg['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched1", builderNames=["testy2"]), schedulers.ForceScheduler( name="sched2", builderNames=["testy1"]) ] yield self.master.reconfig() build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) builder = yield self.master.data.get(('builders', build['builderid'])) self.assertEqual(builder['name'], 'testy2') # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched1", builderNames=["testy1"]), schedulers.ForceScheduler( name="sched2", builderNames=["testy2"]) ] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name=name, workernames=["local1"], factory=f) for name in ['testy1', 'testy2'] ] return c buildbot-3.4.0/master/buildbot/test/integration/test_integration_secrets_with_vault.py000066400000000000000000000116141413250514000316530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import subprocess from unittest.case import SkipTest from twisted.internet import defer from buildbot.process.properties import Interpolate from buildbot.secrets.providers.vault import HashiCorpVaultSecretProvider from buildbot.steps.shell import ShellCommand from buildbot.test.util.integration import RunMasterBase from buildbot.test.util.warnings import assertProducesWarning from buildbot.warnings import DeprecatedApiWarning # This integration test creates a master and worker environment, # with one builders and a shellcommand step class SecretsConfig(RunMasterBase): def setUp(self): try: subprocess.check_call(['docker', 'pull', 'vault']) subprocess.check_call(['docker', 'run', '-d', '-e', 'SKIP_SETCAP=yes', '-e', 'VAULT_DEV_ROOT_TOKEN_ID=my_vaulttoken', '-e', 'VAULT_TOKEN=my_vaulttoken', '--name=vault_for_buildbot', '-p', '8200:8200', 'vault']) self.addCleanup(self.remove_container) subprocess.check_call(['docker', 'exec', '-e', 'VAULT_ADDR=http://127.0.0.1:8200/', 'vault_for_buildbot', 'vault', 'kv', 'put', 'secret/key', 'value=word']) subprocess.check_call(['docker', 'exec', '-e', 'VAULT_ADDR=http://127.0.0.1:8200/', 'vault_for_buildbot', 'vault', 'kv', 'put', 'secret/anykey', 'anyvalue=anyword']) subprocess.check_call(['docker', 'exec', '-e', 'VAULT_ADDR=http://127.0.0.1:8200/', 'vault_for_buildbot', 'vault', 'kv', 'put', 'secret/key1/key2', 'id=val']) except (FileNotFoundError, subprocess.CalledProcessError): raise SkipTest("Vault integration needs docker environment to be setup") def remove_container(self): subprocess.call(['docker', 'rm', '-f', 'vault_for_buildbot']) @defer.inlineCallbacks def do_secret_test(self, secret_specifier, expected_obfuscation, expected_value): with assertProducesWarning(DeprecatedApiWarning): yield self.setupConfig(masterConfig(secret_specifier=secret_specifier)) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) patterns = [ "echo {}".format(expected_obfuscation), base64.b64encode((expected_value + "\n").encode('utf-8')).decode('utf-8'), ] res = yield self.checkBuildStepLogExist(build, patterns) self.assertTrue(res) @defer.inlineCallbacks def test_key(self): yield self.do_secret_test('%(secret:key)s', '', 'word') @defer.inlineCallbacks def test_key_value(self): yield self.do_secret_test('%(secret:key/value)s', '', 'word') @defer.inlineCallbacks def test_any_key(self): yield self.do_secret_test('%(secret:anykey/anyvalue)s', '', 'anyword') @defer.inlineCallbacks def test_nested_key(self): yield self.do_secret_test('%(secret:key1/key2/id)s', '', 'val') def masterConfig(secret_specifier): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] # note that as of December 2018, the vault docker image default to kv # version 2 to be enabled by default c['secretsProviders'] = [HashiCorpVaultSecretProvider( vaultToken='my_vaulttoken', vaultServer="http://localhost:8200", apiVersion=2 )] f = BuildFactory() f.addStep(ShellCommand(command=Interpolate('echo {} | base64'.format(secret_specifier)))) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/test_integration_secrets_with_vault_hvac.py000066400000000000000000000115511413250514000326540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import subprocess import time from unittest.case import SkipTest from twisted.internet import defer from buildbot.process.properties import Interpolate from buildbot.secrets.providers.vault_hvac import HashiCorpVaultKvSecretProvider from buildbot.secrets.providers.vault_hvac import VaultAuthenticatorToken from buildbot.steps.shell import ShellCommand from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builders and a shellcommand step class TestVaultHvac(RunMasterBase): def setUp(self): try: subprocess.check_call(['docker', 'pull', 'vault']) subprocess.check_call(['docker', 'run', '-d', '-e', 'SKIP_SETCAP=yes', '-e', 'VAULT_DEV_ROOT_TOKEN_ID=my_vaulttoken', '-e', 'VAULT_TOKEN=my_vaulttoken', '--name=vault_for_buildbot', '-p', '8200:8200', 'vault']) time.sleep(1) # the container needs a little time to setup itself self.addCleanup(self.remove_container) subprocess.check_call(['docker', 'exec', '-e', 'VAULT_ADDR=http://127.0.0.1:8200/', 'vault_for_buildbot', 'vault', 'kv', 'put', 'secret/key', 'value=word']) subprocess.check_call(['docker', 'exec', '-e', 'VAULT_ADDR=http://127.0.0.1:8200/', 'vault_for_buildbot', 'vault', 'kv', 'put', 'secret/anykey', 'anyvalue=anyword']) subprocess.check_call(['docker', 'exec', '-e', 'VAULT_ADDR=http://127.0.0.1:8200/', 'vault_for_buildbot', 'vault', 'kv', 'put', 'secret/key1/key2', 'id=val']) except (FileNotFoundError, subprocess.CalledProcessError): raise SkipTest("Vault integration needs docker environment to be setup") def remove_container(self): subprocess.call(['docker', 'rm', '-f', 'vault_for_buildbot']) @defer.inlineCallbacks def do_secret_test(self, secret_specifier, expected_obfuscation, expected_value): yield self.setupConfig(master_config(secret_specifier=secret_specifier)) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) patterns = [ "echo {}".format(expected_obfuscation), base64.b64encode((expected_value + "\n").encode('utf-8')).decode('utf-8'), ] res = yield self.checkBuildStepLogExist(build, patterns) self.assertTrue(res) @defer.inlineCallbacks def test_key(self): yield self.do_secret_test('%(secret:key|value)s', '', 'word') @defer.inlineCallbacks def test_key_any_value(self): yield self.do_secret_test('%(secret:anykey|anyvalue)s', '', 'anyword') @defer.inlineCallbacks def test_nested_key(self): yield self.do_secret_test('%(secret:key1/key2|id)s', '', 'val') def master_config(secret_specifier): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] # note that as of August 2021, the vault docker image default to kv # version 2 to be enabled by default c['secretsProviders'] = [ HashiCorpVaultKvSecretProvider(authenticator=VaultAuthenticatorToken('my_vaulttoken'), vault_server="http://localhost:8200", secrets_mount="secret") ] f = BuildFactory() f.addStep(ShellCommand(command=Interpolate('echo {} | base64'.format(secret_specifier)))) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c buildbot-3.4.0/master/buildbot/test/integration/test_integration_template.py000066400000000000000000000046501413250514000275520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builder and a shellcommand step # meant to be a template for integration steps class ShellMaster(RunMasterBase): @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) # if you don't need change, you can just remove this change, and useChange parameter change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True, wantProperties=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['properties']['owners'], (['me@foo.com'], 'Build')) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"]), schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] c['www'] = {'graphql': True} return c buildbot-3.4.0/master/buildbot/test/integration/test_integration_with_secrets.py000066400000000000000000000050551413250514000304420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.properties import Interpolate from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.integration import RunMasterBase class SecretsConfig(RunMasterBase): @defer.inlineCallbacks def test_secret(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "") self.assertTrue(res) @defer.inlineCallbacks def test_withsecrets(self): yield self.setupConfig(masterConfig(use_with=True)) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "") self.assertTrue(res) # master configuration def masterConfig(use_with=False): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers, steps c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] c['secretsProviders'] = [FakeSecretStorage( secretdict={"foo": "bar", "something": "more"})] f = BuildFactory() if use_with: secrets_list = [("pathA", Interpolate('%(secret:something)s'))] with f.withSecrets(secrets_list): f.addStep(steps.ShellCommand(command=Interpolate('echo %(secret:foo)s'))) else: f.addSteps([steps.ShellCommand(command=Interpolate('echo %(secret:foo)s'))], withSecrets=[("pathA", Interpolate('%(secret:something)s'))]) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/test_locks.py000066400000000000000000000423731413250514000244530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.plugins import util from buildbot.process.factory import BuildFactory from buildbot.process.results import SUCCESS from buildbot.test.fake.step import BuildStepController from buildbot.test.util.integration import RunFakeMasterTestCase from buildbot.util.eventual import flushEventualQueue class Tests(RunFakeMasterTestCase): @defer.inlineCallbacks def create_single_worker_two_builder_lock_config(self, lock_cls, mode): stepcontrollers = [BuildStepController(), BuildStepController()] lock = lock_cls("lock1", maxCount=1) config_dict = { 'builders': [ BuilderConfig(name='builder1', workernames=['worker1'], factory=BuildFactory([stepcontrollers[0].step]), locks=[lock.access(mode)]), BuilderConfig(name='builder2', workernames=['worker1'], factory=BuildFactory([stepcontrollers[1].step]), locks=[lock.access(mode)]), ], 'workers': [ self.createLocalWorker('worker1'), ], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.setup_master(config_dict) builder_ids = [ (yield self.master.data.updates.findBuilderId('builder1')), (yield self.master.data.updates.findBuilderId('builder2')), ] return stepcontrollers, builder_ids @defer.inlineCallbacks def create_single_worker_two_builder_step_lock_config(self, lock_cls, mode): lock = lock_cls("lock1", maxCount=1) stepcontrollers = [BuildStepController(locks=[lock.access(mode)]), BuildStepController(locks=[lock.access(mode)])] config_dict = { 'builders': [ BuilderConfig(name='builder1', workernames=['worker1'], factory=BuildFactory([stepcontrollers[0].step])), BuilderConfig(name='builder2', workernames=['worker1'], factory=BuildFactory([stepcontrollers[1].step])), ], 'workers': [ self.createLocalWorker('worker1'), ], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.setup_master(config_dict) builder_ids = [ (yield self.master.data.updates.findBuilderId('builder1')), (yield self.master.data.updates.findBuilderId('builder2')), ] return stepcontrollers, builder_ids @defer.inlineCallbacks def create_two_worker_two_builder_lock_config(self, mode): stepcontrollers = [BuildStepController(), BuildStepController()] master_lock = util.MasterLock("lock1", maxCount=1) config_dict = { 'builders': [ BuilderConfig(name='builder1', workernames=['worker1'], factory=BuildFactory([stepcontrollers[0].step]), locks=[master_lock.access(mode)]), BuilderConfig(name='builder2', workernames=['worker2'], factory=BuildFactory([stepcontrollers[1].step]), locks=[master_lock.access(mode)]), ], 'workers': [ self.createLocalWorker('worker1'), self.createLocalWorker('worker2'), ], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.setup_master(config_dict) builder_ids = [ (yield self.master.data.updates.findBuilderId('builder1')), (yield self.master.data.updates.findBuilderId('builder2')), ] return stepcontrollers, builder_ids @defer.inlineCallbacks def assert_two_builds_created_one_after_another(self, stepcontrollers, builder_ids): # start two builds and verify that a second build starts after the # first is finished yield self.create_build_request([builder_ids[0]]) yield self.create_build_request([builder_ids[1]]) builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), 1) self.assertEqual(builds[0]['results'], None) self.assertEqual(builds[0]['builderid'], builder_ids[0]) stepcontrollers[0].finish_step(SUCCESS) # execute Build.releaseLocks which is called eventually yield flushEventualQueue() builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], SUCCESS) self.assertEqual(builds[1]['results'], None) self.assertEqual(builds[1]['builderid'], builder_ids[1]) stepcontrollers[1].finish_step(SUCCESS) builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], SUCCESS) self.assertEqual(builds[1]['results'], SUCCESS) @defer.inlineCallbacks def assert_two_steps_created_one_after_another(self, stepcontrollers, builder_ids): # start two builds and verify that a second build starts after the # first is finished yield self.create_build_request([builder_ids[0]]) yield self.create_build_request([builder_ids[1]]) builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], None) self.assertEqual(builds[0]['builderid'], builder_ids[0]) self.assertEqual(builds[1]['results'], None) self.assertEqual(builds[1]['builderid'], builder_ids[1]) self.assertTrue(stepcontrollers[0].running) self.assertFalse(stepcontrollers[1].running) stepcontrollers[0].finish_step(SUCCESS) yield flushEventualQueue() self.assertFalse(stepcontrollers[0].running) self.assertTrue(stepcontrollers[1].running) builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], SUCCESS) self.assertEqual(builds[1]['results'], None) stepcontrollers[1].finish_step(SUCCESS) yield flushEventualQueue() builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], SUCCESS) self.assertEqual(builds[1]['results'], SUCCESS) @parameterized.expand([ (util.MasterLock, 'counting'), (util.MasterLock, 'exclusive'), (util.WorkerLock, 'counting'), (util.WorkerLock, 'exclusive'), ]) @defer.inlineCallbacks def test_builder_lock_prevents_concurrent_builds(self, lock_cls, mode): ''' Tests whether a builder lock works at all in preventing a build when the lock is taken. ''' stepcontrollers, builder_ids = \ yield self.create_single_worker_two_builder_lock_config(lock_cls, mode) yield self.assert_two_builds_created_one_after_another(stepcontrollers, builder_ids) @parameterized.expand([ (util.MasterLock, 'counting'), (util.MasterLock, 'exclusive'), (util.WorkerLock, 'counting'), (util.WorkerLock, 'exclusive'), ]) @defer.inlineCallbacks def test_step_lock_prevents_concurrent_builds(self, lock_cls, mode): ''' Tests whether a builder lock works at all in preventing a build when the lock is taken. ''' stepcontrollers, builder_ids = \ yield self.create_single_worker_two_builder_step_lock_config( lock_cls, mode) yield self.assert_two_steps_created_one_after_another(stepcontrollers, builder_ids) @parameterized.expand(['counting', 'exclusive']) @defer.inlineCallbacks def test_builder_lock_release_wakes_builds_for_another_builder(self, mode): """ If a builder locks a master lock then the build request distributor must retry running any buildrequests that might have been not scheduled due to unavailability of that lock when the lock becomes available. """ stepcontrollers, builder_ids = \ yield self.create_two_worker_two_builder_lock_config(mode) yield self.assert_two_builds_created_one_after_another(stepcontrollers, builder_ids) class TestReconfig(RunFakeMasterTestCase): def create_stepcontrollers(self, count, lock, mode): stepcontrollers = [] for i in range(count): locks = [lock.access(mode)] if lock is not None else [] stepcontrollers.append(BuildStepController(locks=locks)) return stepcontrollers def update_builder_config(self, config_dict, stepcontrollers, lock, mode): config_dict['builders'] = [] for i, stepcontroller in enumerate(stepcontrollers): locks = [lock.access(mode)] if lock is not None else [] b = BuilderConfig(name='builder{}'.format(i), workernames=['worker1'], factory=BuildFactory([stepcontroller.step]), locks=locks) config_dict['builders'].append(b) @defer.inlineCallbacks def create_single_worker_n_builder_lock_config(self, builder_count, lock_cls, max_count, mode): stepcontrollers = self.create_stepcontrollers(builder_count, None, None) lock = lock_cls("lock1", maxCount=max_count) config_dict = { 'builders': [], 'workers': [ self.createLocalWorker('worker1'), ], 'protocols': {'null': {}}, 'multiMaster': True, } self.update_builder_config(config_dict, stepcontrollers, lock, mode) yield self.setup_master(config_dict) builder_ids = [] for i in range(builder_count): builder_ids.append(( yield self.master.data.updates.findBuilderId('builder{}'.format(i)))) return stepcontrollers, config_dict, lock, builder_ids @defer.inlineCallbacks def create_single_worker_n_builder_step_lock_config(self, builder_count, lock_cls, max_count, mode): lock = lock_cls("lock1", maxCount=max_count) stepcontrollers = self.create_stepcontrollers(builder_count, lock, mode) config_dict = { 'builders': [], 'workers': [ self.createLocalWorker('worker1'), ], 'protocols': {'null': {}}, 'multiMaster': True, } self.update_builder_config(config_dict, stepcontrollers, None, None) yield self.setup_master(config_dict) builder_ids = [] for i in range(builder_count): builder_ids.append(( yield self.master.data.updates.findBuilderId('builder{}'.format(i)))) return stepcontrollers, config_dict, lock, builder_ids @parameterized.expand([ (3, util.MasterLock, 'counting', 1, 2, 1, 2), (3, util.WorkerLock, 'counting', 1, 2, 1, 2), (3, util.MasterLock, 'counting', 2, 1, 2, 2), (3, util.WorkerLock, 'counting', 2, 1, 2, 2), (2, util.MasterLock, 'exclusive', 1, 2, 1, 1), (2, util.WorkerLock, 'exclusive', 1, 2, 1, 1), (2, util.MasterLock, 'exclusive', 2, 1, 1, 1), (2, util.WorkerLock, 'exclusive', 2, 1, 1, 1), ]) @defer.inlineCallbacks def test_changing_max_lock_count_does_not_break_builder_locks( self, builder_count, lock_cls, mode, max_count_before, max_count_after, allowed_builds_before, allowed_builds_after): ''' Check that Buildbot does not allow extra claims on a claimed lock after a reconfig that changed the maxCount of that lock. Some Buildbot versions created a completely separate real lock after each maxCount change, which allowed to e.g. take an exclusive lock twice. ''' stepcontrollers, config_dict, lock, builder_ids = \ yield self.create_single_worker_n_builder_lock_config( builder_count, lock_cls, max_count_before, mode) # create a number of builds and check that the expected number of them # start for i in range(builder_count): yield self.create_build_request([builder_ids[i]]) builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), allowed_builds_before) # update the config and reconfig the master lock = lock_cls(lock.name, maxCount=max_count_after) self.update_builder_config(config_dict, stepcontrollers, lock, mode) yield self.master.reconfig() yield flushEventualQueue() # check that the number of running builds matches expectation builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), allowed_builds_after) # finish the steps and check that builds finished as expected for stepcontroller in stepcontrollers: stepcontroller.finish_step(SUCCESS) yield flushEventualQueue() builds = yield self.master.data.get(("builds",)) for b in builds[allowed_builds_after:]: self.assertEqual(b['results'], SUCCESS) @parameterized.expand([ (3, util.MasterLock, 'counting', 1, 2, 1, 2), (3, util.WorkerLock, 'counting', 1, 2, 1, 2), (3, util.MasterLock, 'counting', 2, 1, 2, 2), (3, util.WorkerLock, 'counting', 2, 1, 2, 2), (2, util.MasterLock, 'exclusive', 1, 2, 1, 1), (2, util.WorkerLock, 'exclusive', 1, 2, 1, 1), (2, util.MasterLock, 'exclusive', 2, 1, 1, 1), (2, util.WorkerLock, 'exclusive', 2, 1, 1, 1), ]) @defer.inlineCallbacks def test_changing_max_lock_count_does_not_break_step_locks( self, builder_count, lock_cls, mode, max_count_before, max_count_after, allowed_steps_before, allowed_steps_after): ''' Check that Buildbot does not allow extra claims on a claimed lock after a reconfig that changed the maxCount of that lock. Some Buildbot versions created a completely separate real lock after each maxCount change, which allowed to e.g. take an exclusive lock twice. ''' stepcontrollers, config_dict, lock, builder_ids = \ yield self.create_single_worker_n_builder_step_lock_config( builder_count, lock_cls, max_count_before, mode) # create a number of builds and check that the expected number of them # start their steps for i in range(builder_count): yield self.create_build_request([builder_ids[i]]) builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), builder_count) self.assertEqual(sum(sc.running for sc in stepcontrollers), allowed_steps_before) # update the config and reconfig the master lock = lock_cls(lock.name, maxCount=max_count_after) new_stepcontrollers = \ self.create_stepcontrollers(builder_count, lock, mode) self.update_builder_config(config_dict, new_stepcontrollers, lock, mode) yield self.master.reconfig() yield flushEventualQueue() # check that all builds are still running builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), builder_count) # check that the expected number of steps has been started and that # none of the new steps has been started self.assertEqual(sum(sc.running for sc in stepcontrollers), allowed_steps_before) self.assertEqual(sum(sc.running for sc in new_stepcontrollers), 0) # finish the steps and check that builds finished as expected for stepcontroller in stepcontrollers: stepcontroller.finish_step(SUCCESS) yield flushEventualQueue() builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), builder_count) for b in builds: self.assertEqual(b['results'], SUCCESS) self.assertEqual(sum(sc.running for sc in stepcontrollers), 0) self.assertEqual(sum(sc.running for sc in new_stepcontrollers), 0) buildbot-3.4.0/master/buildbot/test/integration/test_log_finish.py000066400000000000000000000111631413250514000254520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.plugins import steps from buildbot.process.results import EXCEPTION from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase class TestLog(RunMasterBase): # master configuration def masterConfig(self, step): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c @defer.inlineCallbacks def test_shellcommand(self): class MyStep(steps.ShellCommand): def _newLog(obj, name, type, logid, logEncoding): r = steps.ShellCommand._newLog(obj, name, type, logid, logEncoding) self.curr_log = r return self.curr_log step = MyStep(command='echo hello') yield self.setupConfig(self.masterConfig(step)) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none") build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['results'], SUCCESS) self.assertTrue(self.curr_log.finished) @defer.inlineCallbacks def test_mastershellcommand(self): class MyStep(steps.MasterShellCommand): def _newLog(obj, name, type, logid, logEncoding): r = steps.MasterShellCommand._newLog(obj, name, type, logid, logEncoding) self.curr_log = r return self.curr_log step = MyStep(command='echo hello') yield self.setupConfig(self.masterConfig(step)) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none") build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['results'], SUCCESS) self.assertTrue(self.curr_log.finished) @defer.inlineCallbacks def test_mastershellcommand_issue(self): class MyStep(steps.MasterShellCommand): def _newLog(obj, name, type, logid, logEncoding): r = steps.MasterShellCommand._newLog(obj, name, type, logid, logEncoding) self.curr_log = r self.patch(r, "finish", lambda: defer.fail(RuntimeError('Could not finish'))) return self.curr_log step = MyStep(command='echo hello') yield self.setupConfig(self.masterConfig(step)) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none") build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertFalse(self.curr_log.finished) self.assertEqual(build['results'], EXCEPTION) errors = self.flushLoggedErrors() self.assertEqual(len(errors), 1) error = errors[0] self.assertEqual(error.getErrorMessage(), 'Could not finish') buildbot-3.4.0/master/buildbot/test/integration/test_master.py000066400000000000000000000062251413250514000246270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.internet.task import deferLater from buildbot.changes.filter import ChangeFilter from buildbot.changes.pb import PBChangeSource from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.schedulers.basic import AnyBranchScheduler from buildbot.schedulers.forcesched import ForceScheduler from buildbot.steps.shell import ShellCommand from buildbot.test.util import www from buildbot.test.util.integration import RunMasterBase from buildbot.worker import Worker class RunMaster(RunMasterBase, www.RequiresWwwMixin): proto = 'pb' @defer.inlineCallbacks def do_test_master(self): yield self.setupConfig(BuildmasterConfig, startWorker=False) # hang out for a fraction of a second, to let startup processes run yield deferLater(reactor, 0.01, lambda: None) # run this test twice, to make sure the first time shut everything down # correctly; if this second test fails, but the first succeeds, then # something is not cleaning up correctly in stopService. def test_master1(self): return self.do_test_master() def test_master2(self): return self.do_test_master() # master configuration # Note that the *same* configuration objects are used for both runs of the # master. This is a more strenuous test than strictly required, since a master # will generally re-execute master.cfg on startup. However, it's good form and # will help to flush out any bugs that may otherwise be difficult to find. c = BuildmasterConfig = {} c['workers'] = [Worker("local1", "localpw")] c['protocols'] = {'pb': {'port': 'tcp:0'}} c['change_source'] = [] c['change_source'] = PBChangeSource() c['schedulers'] = [] c['schedulers'].append(AnyBranchScheduler(name="all", change_filter=ChangeFilter( project_re='^testy/'), treeStableTimer=1 * 60, builderNames=['testy', ])) c['schedulers'].append(ForceScheduler( name="force", builderNames=["testy"])) f1 = BuildFactory() f1.addStep(ShellCommand(command='echo hi')) c['builders'] = [] c['builders'].append( BuilderConfig(name="testy", workernames=["local1"], factory=f1)) c['title'] = "test" c['titleURL'] = "test" c['buildbotURL'] = "http://localhost:8010/" buildbot-3.4.0/master/buildbot/test/integration/test_notifier.py000066400000000000000000000151351413250514000251530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 from twisted.internet import defer from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.reporters.generators.buildset import BuildSetStatusGenerator from buildbot.reporters.generators.worker import WorkerMissingGenerator from buildbot.reporters.mail import ESMTPSenderFactory from buildbot.reporters.mail import MailNotifier from buildbot.reporters.message import MessageFormatter from buildbot.reporters.message import MessageFormatterMissingWorker from buildbot.reporters.pushover import PushoverNotifier from buildbot.test.util.integration import RunMasterBase from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes # This integration test creates a master and worker environment, # with one builders and a shellcommand step, and a MailNotifier class NotifierMaster(RunMasterBase): if not ESMTPSenderFactory: skip = ("twisted-mail unavailable, " "see: https://twistedmatrix.com/trac/ticket/8770") @defer.inlineCallbacks def create_master_config(self, build_set_summary=False): self.mailDeferred = defer.Deferred() # patch MailNotifier.sendmail to know when the mail has been sent def sendMail(_, mail, recipients): self.mailDeferred.callback((mail.as_string(), recipients)) self.patch(MailNotifier, "sendMail", sendMail) self.notification = defer.Deferred() def sendNotification(_, params): self.notification.callback(params) self.patch(PushoverNotifier, "sendNotification", sendNotification) yield self.setupConfig(masterConfig(build_set_summary=build_set_summary)) @defer.inlineCallbacks def doTest(self, what): change = dict(branch="master", files=["foo.c"], author="author@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) mail, recipients = yield self.mailDeferred self.assertEqual(recipients, ["author@foo.com"]) self.assertIn("From: bot@foo.com", mail) self.assertIn("Subject: Buildbot success in Buildbot", mail) self.assertEncodedIn("The Buildbot has detected a passing build", mail) params = yield self.notification self.assertEqual(build['buildid'], 1) self.assertEqual(params, {'title': "Buildbot success in Buildbot on {}".format(what), 'message': "This is a message."}) def assertEncodedIn(self, text, mail): # python 2.6 default transfer in base64 for utf-8 if "base64" not in mail: self.assertIn(text, mail) else: # b64encode and remove '=' padding (hence [:-1]) encodedBytes = base64.b64encode(unicode2bytes(text)).rstrip(b"=") encodedText = bytes2unicode(encodedBytes) self.assertIn(encodedText, mail) @defer.inlineCallbacks def test_notifiy_for_build(self): yield self.create_master_config(build_set_summary=False) yield self.doTest('testy') @defer.inlineCallbacks def test_notifiy_for_buildset(self): yield self.create_master_config(build_set_summary=True) yield self.doTest('whole buildset') @defer.inlineCallbacks def test_missing_worker(self): yield self.create_master_config(build_set_summary=False) yield self.master.data.updates.workerMissing( workerid='local1', masterid=self.master.masterid, last_connection='long time ago', notify=['admin@worker.org'], ) mail, recipients = yield self.mailDeferred self.assertIn("From: bot@foo.com", mail) self.assertEqual(recipients, ['admin@worker.org']) self.assertIn("Subject: Buildbot worker local1 missing", mail) self.assertIn("disconnected at long time ago", mail) self.assertEncodedIn("worker named local1 went away", mail) params = yield self.notification self.assertEqual(params, {'title': "Buildbot worker local1 missing", 'message': b"No worker."}) # master configuration def masterConfig(build_set_summary): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers, reporters c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"]) ] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] formatter = MessageFormatter(template='This is a message.') formatter_worker = MessageFormatterMissingWorker(template='No worker.') if build_set_summary: generators_mail = [ BuildSetStatusGenerator(mode='all'), WorkerMissingGenerator(workers='all'), ] generators_pushover = [ BuildSetStatusGenerator(mode='all', message_formatter=formatter), WorkerMissingGenerator(workers=['local1'], message_formatter=formatter_worker), ] else: generators_mail = [ BuildStatusGenerator(mode='all'), WorkerMissingGenerator(workers='all'), ] generators_pushover = [ BuildStatusGenerator(mode='all', message_formatter=formatter), WorkerMissingGenerator(workers=['local1'], message_formatter=formatter_worker), ] c['services'] = [ reporters.MailNotifier("bot@foo.com", generators=generators_mail), reporters.PushoverNotifier('1234', 'abcd', generators=generators_pushover) ] return c buildbot-3.4.0/master/buildbot/test/integration/test_process_botmaster.py000066400000000000000000000044761413250514000271000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.process.workerforbuilder import PingException from buildbot.test.fake.worker import WorkerController from buildbot.test.util.integration import RunFakeMasterTestCase class Tests(RunFakeMasterTestCase): @defer.inlineCallbacks def do_terminates_ping_on_shutdown(self, quick_mode): """ During shutdown we want to terminate any outstanding pings. """ controller = WorkerController(self, 'local') config_dict = { 'builders': [ BuilderConfig(name="testy", workernames=['local'], factory=BuildFactory()), ], 'workers': [controller.worker], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.setup_master(config_dict) builder_id = yield self.master.data.updates.findBuilderId('testy') yield controller.connect_worker() controller.sever_connection() yield self.create_build_request([builder_id]) # give time for any delayed actions to complete self.reactor.advance(1) yield self.master.botmaster.cleanShutdown(quickMode=quick_mode, stopReactor=False) self.flushLoggedErrors(PingException) def test_terminates_ping_on_shutdown_quick_mode(self): return self.do_terminates_ping_on_shutdown(quick_mode=True) def test_terminates_ping_on_shutdown_slow_mode(self): return self.do_terminates_ping_on_shutdown(quick_mode=False) buildbot-3.4.0/master/buildbot/test/integration/test_setup_entrypoints.py000066400000000000000000000265211413250514000271530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import importlib import inspect import os import pkg_resources import warnings import twisted from twisted.trial import unittest from twisted.trial.unittest import SkipTest from zope.interface.verify import verifyClass from buildbot.interfaces import IBuildStep from buildbot.interfaces import IChangeSource from buildbot.interfaces import IScheduler from buildbot.interfaces import IWorker from buildbot.plugins.db import get_plugins def get_python_module_contents(package_name): spec = importlib.util.find_spec(package_name) if spec is None or spec.origin is None: return set() pathname = os.path.dirname(spec.origin) result = set() with os.scandir(pathname) as dir_entries: for dir_entry in dir_entries: filename = dir_entry.name if filename.startswith('__'): continue next_package_name = '.'.join((package_name, filename.partition('.')[0])) if dir_entry.is_file() and filename.endswith('.py'): result.add(next_package_name) if dir_entry.is_dir(): result.add(next_package_name) result |= get_python_module_contents(next_package_name) return result # NOTE: when running this test locally, make sure to reinstall master after every change to pick up # new entry points. class TestSetupPyEntryPoints(unittest.TestCase): def test_changes(self): known_not_exported = { 'buildbot.changes.gerritchangesource.GerritChangeSourceBase', 'buildbot.changes.base.ReconfigurablePollingChangeSource', 'buildbot.changes.base.PollingChangeSource', 'buildbot.changes.base.ChangeSource', } self.verify_plugins_registered('changes', 'buildbot.changes', IChangeSource, known_not_exported) def test_schedulers(self): known_not_exported = { 'buildbot.schedulers.basic.BaseBasicScheduler', 'buildbot.schedulers.timed.Timed', 'buildbot.schedulers.trysched.TryBase', 'buildbot.schedulers.base.BaseScheduler', 'buildbot.schedulers.timed.NightlyBase', 'buildbot.schedulers.basic.Scheduler', } self.verify_plugins_registered('schedulers', 'buildbot.schedulers', IScheduler, known_not_exported) def test_steps(self): known_not_exported = { 'buildbot.steps.download_secret_to_worker.RemoveWorkerFileSecret', 'buildbot.steps.source.base.Source', 'buildbot.steps.download_secret_to_worker.DownloadSecretsToWorker', 'buildbot.steps.shell.SetProperty', 'buildbot.steps.worker.WorkerBuildStep', 'buildbot.steps.vstudio.VisualStudio', } self.verify_plugins_registered('steps', 'buildbot.steps', IBuildStep, known_not_exported) def test_util(self): # work around Twisted bug 9384. if pkg_resources.parse_version(twisted.__version__) < pkg_resources.parse_version("18.9.0"): raise SkipTest('manhole.py can not be imported on old twisted and new python') known_not_exported = { 'buildbot.util._notifier.Notifier', 'buildbot.util.backoff.ExponentialBackoffEngineAsync', 'buildbot.util.backoff.ExponentialBackoffEngineSync', 'buildbot.util.backoff.BackoffTimeoutExceededError', 'buildbot.util.backoff.ExponentialBackoffEngine', 'buildbot.util.bbcollections.KeyedSets', 'buildbot.util.codebase.AbsoluteSourceStampsMixin', 'buildbot.util.config.ConfiguredMixin', 'buildbot.util.croniter.croniter', 'buildbot.util.debounce.Debouncer', 'buildbot.util.deferwaiter.DeferWaiter', 'buildbot.util.deferwaiter.RepeatedActionHandler', 'buildbot.util.git.GitMixin', 'buildbot.util.git.GitStepMixin', 'buildbot.util.giturlparse.GitUrl', 'buildbot.util.httpclientservice.HTTPClientService', 'buildbot.util.httpclientservice.TxRequestsResponseWrapper', 'buildbot.util.kubeclientservice.KubeClientService', 'buildbot.util.kubeclientservice.KubeConfigLoaderBase', 'buildbot.util.kubeclientservice.KubeError', 'buildbot.util.latent.CompatibleLatentWorkerMixin', 'buildbot.util.lineboundaries.LineBoundaryFinder', 'buildbot.util.lru.AsyncLRUCache', 'buildbot.util.lru.LRUCache', 'buildbot.util.maildir.MaildirService', 'buildbot.util.maildir.NoSuchMaildir', 'buildbot.util.netstrings.NetstringParser', 'buildbot.util.netstrings.NullAddress', 'buildbot.util.netstrings.NullTransport', 'buildbot.util.pathmatch.Matcher', 'buildbot.util.poll.Poller', 'buildbot.util.private_tempdir.PrivateTemporaryDirectory', 'buildbot.util.protocol.LineBuffer', 'buildbot.util.protocol.LineProcessProtocol', 'buildbot.util.pullrequest.PullRequestMixin', 'buildbot.util.queue.ConnectableThreadQueue', 'buildbot.util.queue.UndoableQueue', 'buildbot.util.raml.RamlLoader', 'buildbot.util.raml.RamlSpec', 'buildbot.util.runprocess.RunProcessPP', 'buildbot.util.runprocess.RunProcess', 'buildbot.util.sautils.InsertFromSelect', 'buildbot.util.service.AsyncMultiService', 'buildbot.util.service.AsyncService', 'buildbot.util.service.BuildbotService', 'buildbot.util.service.BuildbotServiceManager', 'buildbot.util.service.ClusteredBuildbotService', 'buildbot.util.service.MasterService', 'buildbot.util.service.ReconfigurableServiceMixin', 'buildbot.util.service.SharedService', 'buildbot.util.state.StateMixin', 'buildbot.util.subscription.Subscription', 'buildbot.util.subscription.SubscriptionPoint', 'buildbot.util.test_result_submitter.TestResultSubmitter', } self.verify_plugins_registered('util', 'buildbot.util', None, known_not_exported) def test_reporters(self): known_not_exported = { 'buildbot.reporters.base.ReporterBase', 'buildbot.reporters.generators.utils.BuildStatusGeneratorMixin', 'buildbot.reporters.gerrit.DEFAULT_REVIEW', 'buildbot.reporters.gerrit.DEFAULT_SUMMARY', 'buildbot.reporters.irc.IRCChannel', 'buildbot.reporters.irc.IRCContact', 'buildbot.reporters.irc.IrcStatusBot', 'buildbot.reporters.irc.IrcStatusFactory', 'buildbot.reporters.irc.UsageError', 'buildbot.reporters.mail.Domain', 'buildbot.reporters.message.MessageFormatterBase', 'buildbot.reporters.message.MessageFormatterBaseJinja', 'buildbot.reporters.telegram.TelegramChannel', 'buildbot.reporters.telegram.TelegramContact', 'buildbot.reporters.telegram.TelegramPollingBot', 'buildbot.reporters.telegram.TelegramStatusBot', 'buildbot.reporters.telegram.TelegramWebhookBot', 'buildbot.reporters.words.Channel', 'buildbot.reporters.words.Contact', 'buildbot.reporters.words.ForceOptions', 'buildbot.reporters.words.StatusBot', 'buildbot.reporters.words.ThrottledClientFactory', 'buildbot.reporters.words.UsageError', 'buildbot.reporters.words.WebhookResource', } self.verify_plugins_registered('reporters', 'buildbot.reporters', None, known_not_exported) def test_secrets(self): known_not_exported = { 'buildbot.secrets.manager.SecretManager', 'buildbot.secrets.providers.base.SecretProviderBase', 'buildbot.secrets.secret.SecretDetails', 'buildbot.secrets.providers.vault_hvac.VaultAuthenticator', } self.verify_plugins_registered('secrets', 'buildbot.secrets', None, known_not_exported) def test_webhooks(self): # in the case of webhooks the entry points list modules, not classes, so # verify_plugins_registered won't work. For now let's ignore this edge case get_plugins('webhooks', None, load_now=True) def test_workers(self): known_not_exported = { 'buildbot.worker.upcloud.UpcloudLatentWorker', 'buildbot.worker.base.AbstractWorker', 'buildbot.worker.latent.AbstractLatentWorker', 'buildbot.worker.latent.LocalLatentWorker', 'buildbot.worker.marathon.MarathonLatentWorker', 'buildbot.worker.docker.DockerBaseWorker', } self.verify_plugins_registered('worker', 'buildbot.worker', IWorker, known_not_exported) def verify_plugins_registered(self, plugin_type, module_name, interface, known_not_exported=None): # This will verify whether we can load plugins, i.e. whether the entry points are valid. plugins = get_plugins(plugin_type, interface, load_now=True) # Now verify that are no unregistered plugins left. existing_classes = self.get_existing_classes(module_name, interface) exported_classes = {'{}.{}'.format(plugins._get_entry(name)._entry.module_name, name) for name in plugins.names} if known_not_exported is None: known_not_exported = set() not_exported_classes = existing_classes - exported_classes - known_not_exported self.assertEqual(not_exported_classes, set()) self.assertEqual(known_not_exported - existing_classes, set()) def class_provides_iface(self, interface, klass): try: verifyClass(interface, klass) return True except Exception: return False def get_existing_classes(self, module_name, interface): existing_modules = get_python_module_contents(module_name) existing_classes = set() with warnings.catch_warnings(): warnings.simplefilter("ignore") for existing_module in existing_modules: module = importlib.import_module(existing_module) for name, obj in inspect.getmembers(module): if name.startswith('_'): continue if inspect.isclass(obj) and obj.__module__ == existing_module: if interface is not None and not self.class_provides_iface(interface, obj): continue existing_classes.add('{}.{}'.format(existing_module, name)) return existing_classes buildbot-3.4.0/master/buildbot/test/integration/test_stop_build.py000066400000000000000000000047561413250514000255070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase class ShellMaster(RunMasterBase): @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) @defer.inlineCallbacks def newStepCallback(_, data): # when the sleep step start, we kill it if data['name'] == 'sleep': brs = yield self.master.data.get(('buildrequests',)) brid = brs[-1]['buildrequestid'] self.master.data.control( 'cancel', {'reason': 'cancelled by test'}, ('buildrequests', brid)) yield self.master.mq.startConsuming( newStepCallback, ('steps', None, 'new')) build = yield self.doForceBuild(wantSteps=True, wantLogs=True, wantProperties=True) self.assertEqual(build['buildid'], 1) # make sure the cancel reason is transferred all the way to the step log cancel_log = build['steps'][1]['logs'][-1] self.assertEqual(cancel_log['name'], 'cancelled') self.assertIn('cancelled by test', cancel_log['contents']['content']) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"]), schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand(command='sleep 100', name='sleep')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/test_stop_trigger.py000066400000000000000000000133001413250514000260340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sys import textwrap from twisted.internet import defer from twisted.internet import reactor from buildbot.config import BuilderConfig from buildbot.plugins import schedulers from buildbot.plugins import steps from buildbot.process.factory import BuildFactory from buildbot.process.results import CANCELLED from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with two builders and a trigger step linking them. the triggered build never ends # so that we can reliably stop it recursively # master configurations def setupTriggerConfiguration(triggeredFactory, nextBuild=None): c = {} c['schedulers'] = [ schedulers.Triggerable( name="trigsched", builderNames=["triggered"]), schedulers.AnyBranchScheduler( name="sched", builderNames=["main"])] f = BuildFactory() f.addStep(steps.Trigger(schedulerNames=['trigsched'], waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) mainBuilder = BuilderConfig(name="main", workernames=["local1"], factory=f) triggeredBuilderKwargs = {'name': "triggered", 'workernames': ["local1"], 'factory': triggeredFactory} if nextBuild is not None: triggeredBuilderKwargs['nextBuild'] = nextBuild triggeredBuilder = BuilderConfig(**triggeredBuilderKwargs) c['builders'] = [mainBuilder, triggeredBuilder] return c def triggerRunsForever(): f2 = BuildFactory() # Infinite sleep command. if sys.platform == 'win32': # Ping localhost infinitely. # There are other options, however they either don't work in # non-interactive mode (e.g. 'pause'), or doesn't available on all # Windows versions (e.g. 'timeout' and 'choice' are available # starting from Windows 7). cmd = 'ping -t 127.0.0.1'.split() else: cmd = textwrap.dedent("""\ while : do echo "sleeping"; sleep 1; done """) f2.addStep(steps.ShellCommand(command=cmd)) return setupTriggerConfiguration(f2) def triggeredBuildIsNotCreated(): f2 = BuildFactory() f2.addStep(steps.ShellCommand(command="echo 'hello'")) def nextBuild(*args, **kwargs): return defer.succeed(None) return setupTriggerConfiguration(f2, nextBuild=nextBuild) class TriggeringMaster(RunMasterBase): timeout = 120 change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none") def assertBuildIsCancelled(self, b): self.assertTrue(b['complete']) self.assertEqual(b['results'], CANCELLED, repr(b)) @defer.inlineCallbacks def runTest(self, newBuildCallback, flushErrors=False): newConsumer = yield self.master.mq.startConsuming( newBuildCallback, ('builds', None, 'new')) build = yield self.doForceBuild(wantSteps=True, useChange=self.change, wantLogs=True) self.assertBuildIsCancelled(build) newConsumer.stopConsuming() builds = yield self.master.data.get(("builds",)) for b in builds: self.assertBuildIsCancelled(b) if flushErrors: self.flushLoggedErrors() @defer.inlineCallbacks def testTriggerRunsForever(self): yield self.setupConfig(triggerRunsForever()) self.higherBuild = None def newCallback(_, data): if self.higherBuild is None: self.higherBuild = data['buildid'] else: self.master.data.control( "stop", {}, ("builds", self.higherBuild)) self.higherBuild = None yield self.runTest(newCallback, flushErrors=True) @defer.inlineCallbacks def testTriggerRunsForeverAfterCmdStarted(self): yield self.setupConfig(triggerRunsForever()) self.higherBuild = None def newCallback(_, data): if self.higherBuild is None: self.higherBuild = data['buildid'] else: def f(): self.master.data.control( "stop", {}, ("builds", self.higherBuild)) self.higherBuild = None reactor.callLater(5.0, f) yield self.runTest(newCallback, flushErrors=True) @defer.inlineCallbacks def testTriggeredBuildIsNotCreated(self): yield self.setupConfig(triggeredBuildIsNotCreated()) def newCallback(_, data): self.master.data.control("stop", {}, ("builds", data['buildid'])) yield self.runTest(newCallback) buildbot-3.4.0/master/buildbot/test/integration/test_telegram_bot.py000066400000000000000000000227651413250514000260070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import mock from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from twisted.web import client from twisted.web.http_headers import Headers from twisted.web.iweb import IBodyProducer from zope.interface import implementer from buildbot.data import connector as dataconnector from buildbot.mq import connector as mqconnector from buildbot.reporters import telegram from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util import db from buildbot.test.util import www from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www import auth from buildbot.www import authz from buildbot.www import service as wwwservice @implementer(IBodyProducer) class BytesProducer(object): def __init__(self, body): self.body = body self.length = len(body) def startProducing(self, consumer): consumer.write(self.body) return defer.succeed(None) def pauseProducing(self): pass def stopProducing(self): pass class TelegramBot(db.RealDatabaseWithConnectorMixin, www.RequiresWwwMixin, unittest.TestCase): master = None @defer.inlineCallbacks def get_http(self, bot_token): base_url = "https://api.telegram.org/telegram" + bot_token http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, base_url) # This is necessary as Telegram will make requests in the reconfig http.expect("post", "/getMe", content_json={'ok': 1, 'result': {'username': 'testbot'}}) if bot_token == 'poll': http.expect("post", "/deleteWebhook", content_json={'ok': 1}) else: http.expect("post", "/setWebhook", json={'url': bytes2unicode(self.bot_url)}, content_json={'ok': 1}) return http @defer.inlineCallbacks def setUp(self): table_names = [ 'objects', 'object_state', 'masters', 'workers', 'configured_workers', 'connected_workers', 'builder_masters', 'builders' ] master = fakemaster.make_master(self, wantRealReactor=True) yield self.setUpRealDatabaseWithConnector(master, table_names=table_names, sqlite_memory=False) master.data = dataconnector.DataConnector() yield master.data.setServiceParent(master) master.config.mq = dict(type='simple') master.mq = mqconnector.MQConnector() yield master.mq.setServiceParent(master) yield master.mq.setup() yield master.mq.startService() master.config.www = dict( port='tcp:0:interface=127.0.0.1', debug=True, auth=auth.NoAuth(), authz=authz.Authz(), avatar_methods=[], logfileName='http.log') master.www = wwwservice.WWWService() yield master.www.setServiceParent(master) yield master.www.startService() yield master.www.reconfigServiceWithBuildbotConfig(master.config) session = mock.Mock() session.uid = "0" master.www.site.sessionFactory = mock.Mock(return_value=session) # now that we have a port, construct the real URL and insert it into # the config. The second reconfig isn't really required, but doesn't # hurt. self.url = 'http://127.0.0.1:%d/' % master.www.getPortnum() self.url = unicode2bytes(self.url) master.config.buildbotURL = self.url yield master.www.reconfigServiceWithBuildbotConfig(master.config) self.master = master self.agent = client.Agent(reactor) # create a telegram bot service tb = master.config.services['TelegramBot'] = telegram.TelegramBot( bot_token='12345:secret', useWebhook=True, chat_ids=[-123456], notify_events=['worker'] ) tb._get_http = self.get_http yield tb.setServiceParent(self.master) self.bot_url = self.url + b"telegram12345:secret" yield tb.startService() self.sent_messages = [] def send_message(chat, message, **kwargs): self.sent_messages.append((chat, message)) tb.bot.send_message = send_message @defer.inlineCallbacks def tearDown(self): if self.master: yield self.master.www.stopService() yield self.master.mq.stopService() yield self.tearDownRealDatabaseWithConnector() @defer.inlineCallbacks def testWebhook(self): payload = unicode2bytes(json.dumps({ "update_id": 12345, "message": { "message_id": 123, "from": { "id": 123456789, "first_name": "Alice", }, "chat": { "id": -12345678, "title": "Wonderlands", "type": "group" }, "date": 1566688888, "text": "/getid", } })) pg = yield self.agent.request(b'POST', self.bot_url, Headers({'Content-Type': ['application/json']}), BytesProducer(payload)) self.assertEqual(pg.code, 202, "did not get 202 response for '{}'".format(bytes2unicode(self.bot_url))) self.assertIn('123456789', self.sent_messages[0][1]) self.assertIn('-12345678', self.sent_messages[1][1]) @defer.inlineCallbacks def testReconfig(self): tb = self.master.config.services['TelegramBot'] yield tb.reconfigService( bot_token='12345:secret', useWebhook=True, chat_ids=[-123456], notify_events=['problem'] ) @defer.inlineCallbacks def testLoadState(self): tboid = yield self.master.db.state.getObjectId( 'testbot', 'buildbot.reporters.telegram.TelegramWebhookBot') yield self.insertTestData([ fakedb.ObjectState(objectid=tboid, name='notify_events', value_json='[[123456789, ["started", "finished"]]]'), fakedb.ObjectState(objectid=tboid, name='missing_workers', value_json='[[123456789, [12]]]'), ]) tb = self.master.config.services['TelegramBot'] yield tb.bot.loadState() c = tb.bot.getContact({'id': 123456789}, {'id': 123456789}) self.assertEquals(c.channel.notify_events, {'started', 'finished'}) self.assertEquals(c.channel.missing_workers, {12}) @defer.inlineCallbacks def testSaveState(self): tb = self.master.config.services['TelegramBot'] tboid = yield self.master.db.state.getObjectId( 'testbot', 'buildbot.reporters.telegram.TelegramWebhookBot') notify_events = yield self.master.db.state.getState(tboid, 'notify_events', ()) missing_workers = yield self.master.db.state.getState(tboid, 'missing_workers', ()) self.assertNotIn([99, ['cancelled']], notify_events) self.assertNotIn([99, [13]], missing_workers) tb.bot.getChannel(98) # this channel should not be saved c = tb.bot.getChannel(99) self.assertIn(98, tb.bot.channels) self.assertIn(99, tb.bot.channels) c.notify_events = {'cancelled'} c.missing_workers = {13} yield tb.bot.saveNotifyEvents() yield tb.bot.saveMissingWorkers() notify_events = yield self.master.db.state.getState(tboid, 'notify_events', ()) missing_workers = yield self.master.db.state.getState(tboid, 'missing_workers', ()) self.assertNotIn(98, (c for c, _ in notify_events)) self.assertIn([99, ['cancelled']], notify_events) self.assertIn([99, [13]], missing_workers) @defer.inlineCallbacks def testMissingWorker(self): yield self.insertTestData([fakedb.Worker(id=1, name='local1')]) tb = self.master.config.services['TelegramBot'] channel = tb.bot.getChannel(-123456) self.assertEquals(channel.notify_events, {'worker'}) yield self.master.data.updates.workerMissing( workerid=1, masterid=self.master.masterid, last_connection='long time ago', notify=['admin@worker.org'], ) self.assertEquals(self.sent_messages[0][1], "Worker `local1` is missing. It was seen last on long time ago.") yield self.master.data.updates.workerConnected( workerid=1, masterid=self.master.masterid, workerinfo={}, ) self.assertEquals(self.sent_messages[1][1], "Worker `local1` is back online.") buildbot-3.4.0/master/buildbot/test/integration/test_trigger.py000066400000000000000000000113421413250514000247730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from io import StringIO from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with two builders and a trigger step linking them expectedOutputRegex = \ r"""\*\*\* BUILD 1 \*\*\* ==> build successful \(success\) \*\*\* STEP worker_preparation \*\*\* ==> worker local1 ready \(success\) \*\*\* STEP shell \*\*\* ==> 'echo hello' \(success\) log:stdio \({loglines}\) \*\*\* STEP trigger \*\*\* ==> triggered trigsched, 1 success \(success\) url:trigsched #2 \(http://localhost:8080/#buildrequests/2\) url:success: build #1 \(http://localhost:8080/#builders/(1|2)/builds/1\) \*\*\* STEP shell_1 \*\*\* ==> 'echo world' \(success\) log:stdio \({loglines}\) \*\*\* BUILD 2 \*\*\* ==> build successful \(success\) \*\*\* STEP worker_preparation \*\*\* ==> worker local1 ready \(success\) \*\*\* STEP shell \*\*\* ==> 'echo ola' \(success\) log:stdio \({loglines}\) """ class TriggeringMaster(RunMasterBase): change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) @defer.inlineCallbacks def test_trigger(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, useChange=self.change, wantLogs=True) self.assertEqual( build['steps'][2]['state_string'], 'triggered trigsched, 1 success') builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), 2) dump = StringIO() for b in builds: yield self.printBuild(b, dump) # depending on the environment the number of lines is different between # test hosts loglines = builds[1]['steps'][1]['logs'][0]['num_lines'] self.assertRegex(dump.getvalue(), expectedOutputRegex.format(loglines=loglines)) @defer.inlineCallbacks def test_trigger_failure(self): yield self.setupConfig(masterConfig(addFailure=True)) build = yield self.doForceBuild(wantSteps=True, useChange=self.change, wantLogs=True) self.assertEqual( build['steps'][2]['state_string'], 'triggered trigsched, 2 successes, 1 failure') builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), 4) # master configuration def masterConfig(addFailure=False): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.Triggerable( name="trigsched", builderNames=["build"]), schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep(steps.Trigger(schedulerNames=['trigsched'], waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f), BuilderConfig(name="build", workernames=["local1"], factory=f2)] if addFailure: f3 = BuildFactory() f3.addStep(steps.ShellCommand(command='false')) c['builders'].append(BuilderConfig(name="build2", workernames=["local1"], factory=f3)) c['builders'].append(BuilderConfig(name="build3", workernames=["local1"], factory=f2)) c['schedulers'][0] = schedulers.Triggerable(name="trigsched", builderNames=["build", "build2", "build3"]) return c buildbot-3.4.0/master/buildbot/test/integration/test_try_client.py000066400000000000000000000240241413250514000255050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from twisted.python.filepath import FilePath from buildbot import util from buildbot.clients import tryclient from buildbot.schedulers import trysched from buildbot.test.util import www from buildbot.test.util.integration import RunMasterBase # wait for some asynchronous result @defer.inlineCallbacks def waitFor(fn): while True: res = yield fn() if res: return res yield util.asyncSleep(.01) class Schedulers(RunMasterBase, www.RequiresWwwMixin): def setUp(self): self.master = None self.sch = None def spawnProcess(pp, executable, args, environ): tmpfile = os.path.join(self.jobdir, 'tmp', 'testy') newfile = os.path.join(self.jobdir, 'new', 'testy') with open(tmpfile, "w") as f: f.write(pp.job) os.rename(tmpfile, newfile) log.msg("wrote jobfile {}".format(newfile)) # get the scheduler to poll this directory now d = self.sch.watcher.poll() d.addErrback(log.err, 'while polling') @d.addCallback def finished(_): st = mock.Mock() st.value.signal = None st.value.exitCode = 0 pp.processEnded(st) self.patch(reactor, 'spawnProcess', spawnProcess) self.sourcestamp = tryclient.SourceStamp(branch='br', revision='rr', patch=(0, '++--')) def getSourceStamp(vctype, treetop, branch=None, repository=None): return defer.succeed(self.sourcestamp) self.patch(tryclient, 'getSourceStamp', getSourceStamp) self.output = [] # stub out printStatus, as it's timing-based and thus causes # occasional test failures. self.patch(tryclient.Try, 'printStatus', lambda _: None) def output(*msg): msg = ' '.join(map(str, msg)) log.msg("output: {}".format(msg)) self.output.append(msg) self.patch(tryclient, 'output', output) def setupJobdir(self): jobdir = FilePath(self.mktemp()) jobdir.createDirectory() self.jobdir = jobdir.path for sub in 'new', 'tmp', 'cur': jobdir.child(sub).createDirectory() return self.jobdir @defer.inlineCallbacks def startMaster(self, sch): extra_config = { 'schedulers': [sch], } self.sch = sch yield self.setupConfig(masterConfig(extra_config)) # wait until the scheduler is active yield waitFor(lambda: self.sch.active) # and, for Try_Userpass, until it's registered its port if isinstance(self.sch, trysched.Try_Userpass): def getSchedulerPort(): if not self.sch.registrations: return None self.serverPort = self.sch.registrations[0].getPort() log.msg("Scheduler registered at port %d" % self.serverPort) return True yield waitFor(getSchedulerPort) def runClient(self, config): self.clt = tryclient.Try(config) return self.clt.run_impl() @defer.inlineCallbacks def test_userpass_no_wait(self): yield self.startMaster( trysched.Try_Userpass('try', ['a'], 0, [('u', b'p')])) yield self.runClient({ 'connect': 'pb', 'master': '127.0.0.1:{}'.format(self.serverPort), 'username': 'u', 'passwd': b'p', }) self.assertEqual(self.output, [ "using 'pb' connect method", 'job created', 'Delivering job; comment= None', 'job has been delivered', 'not waiting for builds to finish' ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 1) @defer.inlineCallbacks def test_userpass_wait(self): yield self.startMaster( trysched.Try_Userpass('try', ['a'], 0, [('u', b'p')])) yield self.runClient({ 'connect': 'pb', 'master': '127.0.0.1:{}'.format(self.serverPort), 'username': 'u', 'passwd': b'p', 'wait': True, }) self.assertEqual(self.output, [ "using 'pb' connect method", 'job created', 'Delivering job; comment= None', 'job has been delivered', 'All Builds Complete', 'a: success (build successful)', ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 1) @defer.inlineCallbacks def test_userpass_wait_bytes(self): self.sourcestamp = tryclient.SourceStamp(branch=b'br', revision=b'rr', patch=(0, b'++--')) yield self.startMaster( trysched.Try_Userpass('try', ['a'], 0, [('u', b'p')])) yield self.runClient({ 'connect': 'pb', 'master': '127.0.0.1:{}'.format(self.serverPort), 'username': 'u', 'passwd': b'p', 'wait': True, }) self.assertEqual(self.output, [ "using 'pb' connect method", 'job created', 'Delivering job; comment= None', 'job has been delivered', 'All Builds Complete', 'a: success (build successful)', ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 1) @defer.inlineCallbacks def test_userpass_wait_dryrun(self): yield self.startMaster( trysched.Try_Userpass('try', ['a'], 0, [('u', b'p')])) yield self.runClient({ 'connect': 'pb', 'master': '127.0.0.1:{}'.format(self.serverPort), 'username': 'u', 'passwd': b'p', 'wait': True, 'dryrun': True, }) self.assertEqual(self.output, [ "using 'pb' connect method", 'job created', 'Job:\n' '\tRepository: \n' '\tProject: \n' '\tBranch: br\n' '\tRevision: rr\n' '\tBuilders: None\n' '++--', 'job has been delivered', 'All Builds Complete', ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 0) @defer.inlineCallbacks def test_userpass_list_builders(self): yield self.startMaster( trysched.Try_Userpass('try', ['a'], 0, [('u', b'p')])) yield self.runClient({ 'connect': 'pb', 'get-builder-names': True, 'master': '127.0.0.1:{}'.format(self.serverPort), 'username': 'u', 'passwd': b'p', }) self.assertEqual(self.output, [ "using 'pb' connect method", 'The following builders are available for the try scheduler: ', 'a' ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 0) @defer.inlineCallbacks def test_jobdir_no_wait(self): jobdir = self.setupJobdir() yield self.startMaster(trysched.Try_Jobdir('try', ['a'], jobdir)) yield self.runClient({ 'connect': 'ssh', 'master': '127.0.0.1', 'username': 'u', 'passwd': b'p', 'builders': 'a', # appears to be required for ssh }) self.assertEqual(self.output, [ "using 'ssh' connect method", 'job created', 'job has been delivered', 'not waiting for builds to finish' ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 1) @defer.inlineCallbacks def test_jobdir_wait(self): jobdir = self.setupJobdir() yield self.startMaster(trysched.Try_Jobdir('try', ['a'], jobdir)) yield self.runClient({ 'connect': 'ssh', 'wait': True, 'host': '127.0.0.1', 'username': 'u', 'passwd': b'p', 'builders': 'a', # appears to be required for ssh }) self.assertEqual(self.output, [ "using 'ssh' connect method", 'job created', 'job has been delivered', 'waiting for builds with ssh is not supported' ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 1) def masterConfig(extra_config): c = {} from buildbot.config import BuilderConfig from buildbot.process.buildstep import BuildStep from buildbot.process.factory import BuildFactory from buildbot.process import results class MyBuildStep(BuildStep): def run(self): return results.SUCCESS c['change_source'] = [] c['schedulers'] = [] # filled in above f1 = BuildFactory() f1.addStep(MyBuildStep(name='one')) f1.addStep(MyBuildStep(name='two')) c['builders'] = [ BuilderConfig(name="a", workernames=["local1"], factory=f1), ] c['title'] = "test" c['titleURL'] = "test" c['buildbotURL'] = "http://localhost:8010/" c['mq'] = {'debug': True} # test wants to influence the config, but we still return a new config # each time c.update(extra_config) return c buildbot-3.4.0/master/buildbot/test/integration/test_try_client_e2e.py000066400000000000000000000044151413250514000262420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.internet import reactor from buildbot.test.util.integration import RunMasterBase # This integration test tests that the try command line works end2end class TryClientE2E(RunMasterBase): timeout = 15 @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) def trigger_callback(): port = self.master.pbmanager.dispatchers['tcp:0'].port.getHost().port def thd(): os.system(f"buildbot try --connect=pb --master=127.0.0.1:{port} -b testy " "--property=foo:bar --username=alice --passwd=pw1 --vc=none") reactor.callInThread(thd) build = yield self.doForceBuild(wantSteps=True, triggerCallback=trigger_callback, wantLogs=True, wantProperties=True) self.assertEqual(build['buildid'], 1) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.Try_Userpass(name="try", builderNames=["testy"], port='tcp:0', userpass=[("alice", "pw1")]) ] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/test_upgrade.py000066400000000000000000000244141413250514000247630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import locale import os import pprint import shutil import sqlite3 import tarfile import sqlalchemy as sa from alembic.autogenerate import compare_metadata from alembic.migration import MigrationContext from sqlalchemy.exc import DatabaseError from twisted.internet import defer from twisted.python import util from twisted.trial import unittest from buildbot.db import connector from buildbot.db.model import UpgradeFromBefore0p9Error from buildbot.db.model import UpgradeFromBefore3p0Error from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util import querylog from buildbot.test.util.misc import TestReactorMixin class UpgradeTestMixin(db.RealDatabaseMixin, TestReactorMixin): """Supporting code to test upgrading from older versions by untarring a basedir tarball and then checking that the results are as expected.""" # class variables to set in subclasses # filename of the tarball (sibling to this file) source_tarball = None # set to true in subclasses to set up and use a real DB use_real_db = False # db URL to use, if not using a real db db_url = "sqlite:///state.sqlite" # these tests take a long time on platforms where sqlite is slow # (e.g., lion, see #2256) timeout = 1200 @defer.inlineCallbacks def setUpUpgradeTest(self): # set up the "real" db if desired if self.use_real_db: # note this changes self.db_url yield self.setUpRealDatabase(sqlite_memory=False) self.basedir = None if self.source_tarball: tarball = util.sibpath(__file__, self.source_tarball) if not os.path.exists(tarball): raise unittest.SkipTest("'{}' not found (normal when not building from Git)".format( tarball)) tf = tarfile.open(tarball) prefixes = set() for inf in tf: tf.extract(inf) prefixes.add(inf.name.split('/', 1)[0]) tf.close() # (note that tf.extractall isn't available in py2.4) # get the top-level dir from the tarball assert len(prefixes) == 1, "tarball has multiple top-level dirs!" self.basedir = prefixes.pop() else: if not os.path.exists("basedir"): os.makedirs("basedir") self.basedir = os.path.abspath("basedir") self.master = master = fakemaster.make_master(self) master.config.db['db_url'] = self.db_url self.db = connector.DBConnector(self.basedir) yield self.db.setServiceParent(master) yield self.db.setup(check_version=False) self._sql_log_handler = querylog.start_log_queries() @defer.inlineCallbacks def tearDownUpgradeTest(self): querylog.stop_log_queries(self._sql_log_handler) if self.use_real_db: yield self.tearDownRealDatabase() if self.basedir: shutil.rmtree(self.basedir) # save subclasses the trouble of calling our setUp and tearDown methods def setUp(self): self.setUpTestReactor() return self.setUpUpgradeTest() def tearDown(self): return self.tearDownUpgradeTest() @defer.inlineCallbacks def assertModelMatches(self): def comp(engine): # use compare_model_to_db, which gets everything but indexes with engine.connect() as conn: diff = compare_metadata(MigrationContext.configure(conn), self.db.model.metadata) if engine.dialect.name == 'mysql': # MySQL/MyISAM does not support foreign keys, which is expected. diff = [d for d in diff if d[0] != 'add_fk'] if diff: return diff # check indexes manually insp = sa.inspect(engine) # unique, name, column_names diff = [] for tbl in self.db.model.metadata.sorted_tables: exp = sorted([ dict(name=idx.name, unique=idx.unique and 1 or 0, column_names=sorted([c.name for c in idx.columns])) for idx in tbl.indexes], key=lambda x: x['name']) # include implied indexes on postgres and mysql if engine.dialect.name == 'mysql': implied = [idx for (tname, idx) in self.db.model.implied_indexes if tname == tbl.name] exp = sorted(exp + implied, key=lambda k: k["name"]) got = sorted(insp.get_indexes(tbl.name), key=lambda x: x['name']) if exp != got: got_names = {idx['name'] for idx in got} exp_names = {idx['name'] for idx in exp} got_info = dict((idx['name'], idx) for idx in got) exp_info = dict((idx['name'], idx) for idx in exp) for name in got_names - exp_names: diff.append("got unexpected index {} on table {}: {}".format(name, tbl.name, repr(got_info[name]))) for name in exp_names - got_names: diff.append("missing index {} on table {}".format(name, tbl.name)) for name in got_names & exp_names: gi = dict(name=name, unique=got_info[name]['unique'] and 1 or 0, column_names=sorted(got_info[name]['column_names'])) ei = exp_info[name] if gi != ei: diff.append(("index {} on table {} differs: got {}; exp {}" ).format(name, tbl.name, gi, ei)) if diff: return "\n".join(diff) return None try: diff = yield self.db.pool.do_with_engine(comp) except TypeError: # older sqlites cause failures in reflection, which manifest as a # TypeError. Reflection is only used for tests, so we can just skip # this test on such platforms. We still get the advantage of trying # the upgrade, at any rate. raise unittest.SkipTest("model comparison skipped: bugs in schema " "reflection on this sqlite version") if diff: self.fail("\n" + pprint.pformat(diff)) def gotError(self, e): if isinstance(e, (sqlite3.DatabaseError, DatabaseError)): if "file is encrypted or is not a database" in str(e): self.flushLoggedErrors(sqlite3.DatabaseError) self.flushLoggedErrors(DatabaseError) raise unittest.SkipTest(f"sqlite dump not readable on this machine {str(e)}") @defer.inlineCallbacks def do_test_upgrade(self, pre_callbacks=None): if pre_callbacks is None: pre_callbacks = [] for cb in pre_callbacks: yield cb try: yield self.db.model.upgrade() except Exception as e: self.gotError(e) yield self.db.pool.do(self.verify_thd) yield self.assertModelMatches() class UpgradeTestEmpty(UpgradeTestMixin, unittest.TestCase): use_real_db = True @defer.inlineCallbacks def test_emptydb_modelmatches(self): os_encoding = locale.getpreferredencoding() try: '\N{SNOWMAN}'.encode(os_encoding) except UnicodeEncodeError as e: # Default encoding of Windows console is 'cp1252' # which cannot encode the snowman. raise unittest.SkipTest("Cannot encode weird unicode " "on this platform with {}".format(os_encoding)) from e yield self.db.model.upgrade() yield self.assertModelMatches() class UpgradeTestV2_10_5(UpgradeTestMixin, unittest.TestCase): source_tarball = "v2.10.5.tgz" def test_upgrade(self): return self.do_test_upgrade() def verify_thd(self, conn): pass @defer.inlineCallbacks def test_got_invalid_sqlite_file(self): def upgrade(): return defer.fail(sqlite3.DatabaseError('file is encrypted or is not a database')) self.db.model.upgrade = upgrade with self.assertRaises(unittest.SkipTest): yield self.do_test_upgrade() @defer.inlineCallbacks def test_got_invalid_sqlite_file2(self): def upgrade(): return defer.fail(DatabaseError('file is encrypted or is not a database', None, None)) self.db.model.upgrade = upgrade with self.assertRaises(unittest.SkipTest): yield self.do_test_upgrade() class UpgradeTestV090b4(UpgradeTestMixin, unittest.TestCase): source_tarball = "v090b4.tgz" def gotError(self, e): self.flushLoggedErrors(UpgradeFromBefore3p0Error) def test_upgrade(self): return self.do_test_upgrade() def verify_thd(self, conn): r = conn.execute("select version from migrate_version limit 1") version = r.scalar() self.assertEqual(version, 44) def assertModelMatches(self): pass class UpgradeTestV087p1(UpgradeTestMixin, unittest.TestCase): source_tarball = "v087p1.tgz" def gotError(self, e): self.flushLoggedErrors(UpgradeFromBefore0p9Error) def verify_thd(self, conn): r = conn.execute("select version from migrate_version limit 1") version = r.scalar() self.assertEqual(version, 22) def assertModelMatches(self): pass def test_upgrade(self): return self.do_test_upgrade() buildbot-3.4.0/master/buildbot/test/integration/test_usePty.py000066400000000000000000000060551413250514000246260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from pkg_resources import parse_version from twisted import __version__ as twistedVersion from twisted.internet import defer from buildbot.test.util.decorators import skipUnlessPlatformIs from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builder and a shellcommand step, which use usePTY class ShellMaster(RunMasterBase): @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_usePTY(self): yield self.setupConfig(masterConfig(usePTY=True)) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "in a terminal", onlyStdout=True) self.assertTrue(res) # Twisted versions less than 17.1.0 would issue a warning: # "Argument strings and environment keys/values passed to reactor.spawnProcess # "should be str, not unicode." # This warning was unnecessary. Even in the old versions of Twisted, the # unicode arguments were encoded. This warning was removed in Twisted here: # # https://github.com/twisted/twisted/commit/23fa3cc05549251ea4118e4e03354d58df87eaaa if parse_version(twistedVersion) < parse_version("17.1.0"): self.flushWarnings() @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_NOusePTY(self): yield self.setupConfig(masterConfig(usePTY=False)) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "not a terminal", onlyStdout=True) self.assertTrue(res) # master configuration def masterConfig(usePTY): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand( command='if [ -t 1 ] ; then echo in a terminal; else echo "not a terminal"; fi', usePTY=usePTY)) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/test_virtual_builder.py000066400000000000000000000045601413250514000265300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builder and a shellcommand step # meant to be a template for integration steps class ShellMaster(RunMasterBase): @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) builders = yield self.master.data.get(("builders",)) self.assertEqual(len(builders), 2) self.assertEqual(builders[1], { 'masterids': [], 'tags': ['virtual', '_virtual_'], 'description': 'I am a virtual builder', 'name': 'virtual_testy', 'builderid': 2}) self.assertEqual(build['builderid'], builders[1]['builderid']) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], properties={ 'virtual_builder_name': 'virtual_testy', 'virtual_builder_description': 'I am a virtual builder', 'virtual_builder_tags': ['virtual'], }, factory=f)] return c buildbot-3.4.0/master/buildbot/test/integration/test_worker.py000066400000000000000000000233571413250514000246520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from zope.interface import implementer from buildbot.config import BuilderConfig from buildbot.interfaces import IBuildStepFactory from buildbot.machine.base import Machine from buildbot.process.buildstep import BuildStep from buildbot.process.factory import BuildFactory from buildbot.process.properties import Interpolate from buildbot.process.results import CANCELLED from buildbot.process.results import RETRY from buildbot.test.fake.latent import LatentController from buildbot.test.fake.step import BuildStepController from buildbot.test.fake.worker import WorkerController from buildbot.test.util.integration import RunFakeMasterTestCase try: from buildbot_worker.bot import LocalWorker as RemoteWorker except ImportError: RemoteWorker = None @implementer(IBuildStepFactory) class StepController: def __init__(self, **kwargs): self.kwargs = kwargs self.steps = [] def buildStep(self): step_deferred = defer.Deferred() step = ControllableStep(step_deferred, **self.kwargs) self.steps.append((step, step_deferred)) return step class ControllableStep(BuildStep): def run(self): return self._step_deferred def __init__(self, step_deferred, **kwargs): super().__init__(**kwargs) self._step_deferred = step_deferred def interrupt(self, reason): self._step_deferred.callback(CANCELLED) class Tests(RunFakeMasterTestCase): @defer.inlineCallbacks def test_latent_max_builds(self): """ If max_builds is set, only one build is started on a latent worker at a time. """ controller = LatentController(self, 'local', max_builds=1) step_controller = StepController() config_dict = { 'builders': [ BuilderConfig(name="testy-1", workernames=["local"], factory=BuildFactory([step_controller]), ), BuilderConfig(name="testy-2", workernames=["local"], factory=BuildFactory([step_controller]), ), ], 'workers': [controller.worker], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.setup_master(config_dict) builder_ids = [ (yield self.master.data.updates.findBuilderId('testy-1')), (yield self.master.data.updates.findBuilderId('testy-2')), ] started_builds = [] yield self.master.mq.startConsuming( lambda key, build: started_builds.append(build), ('builds', None, 'new')) # Trigger a buildrequest bsid, brids = yield self.master.data.updates.addBuildset( waited_for=False, builderids=builder_ids, sourcestamps=[ {'codebase': '', 'repository': '', 'branch': None, 'revision': None, 'project': ''}, ], ) # The worker fails to substantiate. controller.start_instance(True) yield controller.connect_worker() self.assertEqual(len(started_builds), 1) yield controller.auto_stop(True) @defer.inlineCallbacks def test_local_worker_max_builds(self): """ If max_builds is set, only one build is started on a worker at a time. """ step_controller = StepController() config_dict = { 'builders': [ BuilderConfig(name="testy-1", workernames=["local"], factory=BuildFactory([step_controller]), ), BuilderConfig(name="testy-2", workernames=["local"], factory=BuildFactory([step_controller]), ), ], 'workers': [self.createLocalWorker('local', max_builds=1)], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.setup_master(config_dict) builder_ids = [ (yield self.master.data.updates.findBuilderId('testy-1')), (yield self.master.data.updates.findBuilderId('testy-2')), ] started_builds = [] yield self.master.mq.startConsuming( lambda key, build: started_builds.append(build), ('builds', None, 'new')) # Trigger a buildrequest bsid, brids = yield self.master.data.updates.addBuildset( waited_for=False, builderids=builder_ids, sourcestamps=[ {'codebase': '', 'repository': '', 'branch': None, 'revision': None, 'project': ''}, ], ) self.assertEqual(len(started_builds), 1) @defer.inlineCallbacks def test_worker_registered_to_machine(self): worker = self.createLocalWorker('worker1', machine_name='machine1') machine = Machine('machine1') config_dict = { 'builders': [ BuilderConfig(name="builder1", workernames=["worker1"], factory=BuildFactory(), ), ], 'workers': [worker], 'machines': [machine], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.setup_master(config_dict) self.assertIs(worker.machine, machine) @defer.inlineCallbacks def test_worker_reconfigure_with_new_builder(self): """ Checks if we can successfully reconfigure if we add new builders to worker. """ config_dict = { 'builders': [ BuilderConfig(name="builder1", workernames=['local1'], factory=BuildFactory()), ], 'workers': [self.createLocalWorker('local1', max_builds=1)], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.setup_master(config_dict) config_dict['builders'] += [ BuilderConfig(name="builder2", workernames=['local1'], factory=BuildFactory()), ] config_dict['workers'] = [self.createLocalWorker('local1', max_builds=2)] # reconfig should succeed yield self.reconfig_master(config_dict) @defer.inlineCallbacks def test_step_with_worker_build_props_during_worker_disconnect(self): """ We need to handle worker disconnection and steps with worker build properties gracefully """ controller = WorkerController(self, 'local') stepcontroller = BuildStepController() config_dict = { 'builders': [ BuilderConfig(name="builder", workernames=['local'], properties={'worker': Interpolate("%(worker:numcpus)s")}, factory=BuildFactory([stepcontroller.step])), ], 'workers': [controller.worker], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.setup_master(config_dict) builder_id = yield self.master.data.updates.findBuilderId('builder') yield self.create_build_request([builder_id]) yield controller.connect_worker() self.reactor.advance(1) yield controller.disconnect_worker() self.reactor.advance(1) yield self.assertBuildResults(1, RETRY) @defer.inlineCallbacks def test_worker_os_release_info_roundtrip(self): """ Checks if we can successfully get information about the platform the worker is running on. This is very similar to test_worker_comm.TestWorkerComm.test_worker_info, except that we check details such as whether the information is passed in correct encoding. """ worker = self.createLocalWorker('local1') config_dict = { 'builders': [ BuilderConfig(name="builder1", workernames=['local1'], factory=BuildFactory()), ], 'workers': [worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.setup_master(config_dict) props = worker.info from buildbot_worker.base import BotBase expected_props_dict = {} BotBase._read_os_release(BotBase.os_release_file, expected_props_dict) for key, value in expected_props_dict.items(): self.assertTrue(isinstance(key, str)) self.assertTrue(isinstance(value, str)) self.assertEqual(props.getProperty(key), value) if RemoteWorker is None: skip = "buildbot-worker package is not installed" buildbot-3.4.0/master/buildbot/test/integration/test_worker_comm.py000066400000000000000000000330541413250514000256600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.cred import credentials from twisted.internet import defer from twisted.internet import reactor from twisted.internet.endpoints import clientFromString from twisted.python import log from twisted.python import util from twisted.spread import pb from twisted.trial import unittest import buildbot from buildbot import config from buildbot import pbmanager from buildbot import worker from buildbot.process import botmaster from buildbot.process import builder from buildbot.process import factory from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util.eventual import eventually from buildbot.worker import manager as workermanager PKI_DIR = util.sibpath(__file__, 'pki') class FakeWorkerForBuilder(pb.Referenceable): """ Fake worker-side WorkerForBuilder object """ class FakeWorkerWorker(pb.Referenceable): """ Fake worker-side Worker object @ivar master_persp: remote perspective on the master """ def __init__(self, callWhenBuilderListSet): self.callWhenBuilderListSet = callWhenBuilderListSet self.master_persp = None self._detach_deferreds = [] self._detached = False def waitForDetach(self): if self._detached: return defer.succeed(None) d = defer.Deferred() self._detach_deferreds.append(d) return d def setMasterPerspective(self, persp): self.master_persp = persp # clear out master_persp on disconnect def clear_persp(): self.master_persp = None persp.broker.notifyOnDisconnect(clear_persp) def fire_deferreds(): self._detached = True self._detach_deferreds, deferreds = None, self._detach_deferreds for d in deferreds: d.callback(None) persp.broker.notifyOnDisconnect(fire_deferreds) def remote_print(self, message): log.msg("WORKER-SIDE: remote_print(%r)" % (message,)) def remote_getWorkerInfo(self): return { 'info': 'here', 'worker_commands': { 'x': 1, }, 'numcpus': 1, 'none': None, 'os_release': b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode(), b'\xe3\x83\xaa\xe3\x83\xaa\xe3\x83\xbc\xe3\x82\xb9\xe3' b'\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode(): b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode(), } def remote_getVersion(self): return buildbot.version def remote_getCommands(self): return {'x': 1} def remote_setBuilderList(self, builder_info): builder_names = [n for n, dir in builder_info] slbuilders = [FakeWorkerForBuilder() for n in builder_names] eventually(self.callWhenBuilderListSet) return dict(zip(builder_names, slbuilders)) class FakeBuilder(builder.Builder): def attached(self, worker, commands): return defer.succeed(None) def detached(self, worker): pass def getOldestRequestTime(self): return 0 def maybeStartBuild(self): return defer.succeed(None) class MyWorker(worker.Worker): def attached(self, conn): self.detach_d = defer.Deferred() return super().attached(conn) def detached(self): super().detached() self.detach_d, d = None, self.detach_d d.callback(None) class TestWorkerComm(unittest.TestCase, TestReactorMixin): """ Test handling of connections from workers as integrated with - Twisted Spread - real TCP connections. - PBManager @ivar master: fake build master @ivar pbamanger: L{PBManager} instance @ivar botmaster: L{BotMaster} instance @ivar worker: master-side L{Worker} instance @ivar workerworker: worker-side L{FakeWorkerWorker} instance @ivar port: TCP port to connect to @ivar server_connection_string: description string for the server endpoint @ivar client_connection_string_tpl: description string template for the client endpoint (expects to passed 'port') @ivar endpoint: endpoint controlling the outbound connection from worker to master """ @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) # set the worker port to a loopback address with unspecified # port self.pbmanager = self.master.pbmanager = pbmanager.PBManager() yield self.pbmanager.setServiceParent(self.master) # remove the fakeServiceParent from fake service hierarchy, and replace # by a real one yield self.master.workers.disownServiceParent() self.workers = self.master.workers = workermanager.WorkerManager( self.master) yield self.workers.setServiceParent(self.master) self.botmaster = botmaster.BotMaster() yield self.botmaster.setServiceParent(self.master) self.master.botmaster = self.botmaster self.master.data.updates.workerConfigured = lambda *a, **k: None yield self.master.startService() self.buildworker = None self.port = None self.workerworker = None self.endpoint = None self.broker = None self._detach_deferreds = [] # patch in our FakeBuilder for the regular Builder class self.patch(botmaster, 'Builder', FakeBuilder) self.server_connection_string = "tcp:0:interface=127.0.0.1" self.client_connection_string_tpl = "tcp:host=127.0.0.1:port={port}" def tearDown(self): if self.broker: del self.broker if self.endpoint: del self.endpoint deferreds = self._detach_deferreds + [ self.pbmanager.stopService(), self.botmaster.stopService(), self.workers.stopService(), ] # if the worker is still attached, wait for it to detach, too if self.buildworker and self.buildworker.detach_d: deferreds.append(self.buildworker.detach_d) return defer.gatherResults(deferreds) @defer.inlineCallbacks def addWorker(self, **kwargs): """ Create a master-side worker instance and add it to the BotMaster @param **kwargs: arguments to pass to the L{Worker} constructor. """ self.buildworker = MyWorker("testworker", "pw", **kwargs) # reconfig the master to get it set up new_config = self.master.config new_config.protocols = {"pb": {"port": self.server_connection_string}} new_config.workers = [self.buildworker] new_config.builders = [config.BuilderConfig( name='bldr', workername='testworker', factory=factory.BuildFactory())] yield self.botmaster.reconfigServiceWithBuildbotConfig(new_config) yield self.workers.reconfigServiceWithBuildbotConfig(new_config) # as part of the reconfig, the worker registered with the pbmanager, so # get the port it was assigned self.port = self.buildworker.registration.getPBPort() def connectWorker(self, waitForBuilderList=True): """ Connect a worker the master via PB @param waitForBuilderList: don't return until the setBuilderList has been called @returns: L{FakeWorkerWorker} and a Deferred that will fire when it is detached; via deferred """ factory = pb.PBClientFactory() creds = credentials.UsernamePassword(b"testworker", b"pw") setBuilderList_d = defer.Deferred() workerworker = FakeWorkerWorker( lambda: setBuilderList_d.callback(None)) login_d = factory.login(creds, workerworker) @login_d.addCallback def logged_in(persp): workerworker.setMasterPerspective(persp) # set up to hear when the worker side disconnects workerworker.detach_d = defer.Deferred() persp.broker.notifyOnDisconnect( lambda: workerworker.detach_d.callback(None)) self._detach_deferreds.append(workerworker.detach_d) return workerworker self.endpoint = clientFromString( reactor, self.client_connection_string_tpl.format(port=self.port)) connected_d = self.endpoint.connect(factory) dlist = [connected_d, login_d] if waitForBuilderList: dlist.append(setBuilderList_d) d = defer.DeferredList(dlist, consumeErrors=True, fireOnOneErrback=True) d.addCallback(lambda _: workerworker) return d def workerSideDisconnect(self, worker): """Disconnect from the worker side""" worker.master_persp.broker.transport.loseConnection() @defer.inlineCallbacks def test_connect_disconnect(self): """Test a single worker connecting and disconnecting.""" yield self.addWorker() # connect worker = yield self.connectWorker() # disconnect self.workerSideDisconnect(worker) # wait for the resulting detach yield worker.waitForDetach() @defer.inlineCallbacks def test_tls_connect_disconnect(self): """Test with TLS or SSL endpoint. According to the deprecation note for the SSL client endpoint, the TLS endpoint is supported from Twistd 16.0. TODO add certificate verification (also will require some conditionals on various versions, including PyOpenSSL, service_identity. The CA used to generate the testing cert is in ``PKI_DIR/ca`` """ def escape_colon(path): # on windows we can't have \ as it serves as the escape character for : return path.replace('\\', '/').replace(':', '\\:') self.server_connection_string = ( "ssl:port=0:certKey={pub}:privateKey={priv}:" + "interface=127.0.0.1").format( pub=escape_colon(os.path.join(PKI_DIR, '127.0.0.1.crt')), priv=escape_colon(os.path.join(PKI_DIR, '127.0.0.1.key'))) self.client_connection_string_tpl = "ssl:host=127.0.0.1:port={port}" yield self.addWorker() # connect worker = yield self.connectWorker() # disconnect self.workerSideDisconnect(worker) # wait for the resulting detach yield worker.waitForDetach() @defer.inlineCallbacks def test_worker_info(self): yield self.addWorker() worker = yield self.connectWorker() props = self.buildworker.info # check worker info passing self.assertEqual(props.getProperty("info"), "here") # check worker info passing with UTF-8 self.assertEqual(props.getProperty("os_release"), b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode()) self.assertEqual(props.getProperty(b'\xe3\x83\xaa\xe3\x83\xaa\xe3\x83\xbc\xe3\x82' b'\xb9\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode()), b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode()) self.assertEqual(props.getProperty("none"), None) self.assertEqual(props.getProperty("numcpus"), 1) self.workerSideDisconnect(worker) yield worker.waitForDetach() @defer.inlineCallbacks def _test_duplicate_worker(self): yield self.addWorker() # connect first worker worker1 = yield self.connectWorker() # connect second worker; this should fail try: yield self.connectWorker(waitForBuilderList=False) connect_failed = False except Exception: connect_failed = True self.assertTrue(connect_failed) # disconnect both and wait for that to percolate self.workerSideDisconnect(worker1) yield worker1.waitForDetach() # flush the exception logged for this on the master self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def _test_duplicate_worker_old_dead(self): yield self.addWorker() # connect first worker worker1 = yield self.connectWorker() # monkeypatch that worker to fail with PBConnectionLost when its # remote_print method is called def remote_print(message): worker1.master_persp.broker.transport.loseConnection() raise pb.PBConnectionLost("fake!") worker1.remote_print = remote_print # connect second worker; this should succeed, and the old worker # should be disconnected. worker2 = yield self.connectWorker() # disconnect both and wait for that to percolate self.workerSideDisconnect(worker2) yield worker1.waitForDetach() # flush the exception logged for this on the worker self.assertEqual(len(self.flushLoggedErrors(pb.PBConnectionLost)), 1) buildbot-3.4.0/master/buildbot/test/integration/test_worker_kubernetes.py000066400000000000000000000115141413250514000270710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from unittest.case import SkipTest from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.plugins import schedulers from buildbot.plugins import steps from buildbot.process.factory import BuildFactory from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase from buildbot.util import kubeclientservice from buildbot.worker import kubernetes # This integration test creates a master and kubernetes worker environment, # It requires a kubernetes cluster up and running. It tries to get the config # like loading "~/.kube/config" files or environment variable. # You can use minikube to create a kubernetes environment for development: # # See https://github.com/kubernetes/minikube for full documentation # minikube start # [--vm-driver=kvm] # # export masterFQDN=$(ip route get $(minikube ip)| awk '{ print $5 }') # export KUBE_NAMESPACE=`kubectl config get-contexts \`kubectl config current-context\` # |tail -n1 |awk '{print $5}'` # useful commands: # - 'minikube dashboard' to display WebUI of the kubernetes cluster # - 'minikube ip' to display the IP of the kube-apimaster # - 'minikube ssh' to get a shell into the minikube VM # following environment variable can be used to stress concurrent worker startup NUM_CONCURRENT = int(os.environ.get("KUBE_TEST_NUM_CONCURRENT_BUILD", 1)) class KubernetesMaster(RunMasterBase): timeout = 200 def setUp(self): if "TEST_KUBERNETES" not in os.environ: raise SkipTest( "kubernetes integration tests only run when environment " "variable TEST_KUBERNETES is set") if 'masterFQDN' not in os.environ: raise SkipTest( "you need to export masterFQDN. You have example in the test file. " "Make sure that you're spawned worker can callback this IP") @defer.inlineCallbacks def test_trigger(self): yield self.setupConfig( masterConfig(num_concurrent=NUM_CONCURRENT), startWorker=False) yield self.doForceBuild() builds = yield self.master.data.get(("builds", )) # if there are some retry, there will be more builds self.assertEqual(len(builds), 1 + NUM_CONCURRENT) for b in builds: self.assertEqual(b['results'], SUCCESS) class KubernetesMasterTReq(KubernetesMaster): def setup(self): super().setUp() self.patch(kubernetes.KubeClientService, 'PREFER_TREQ', True) # master configuration def masterConfig(num_concurrent, extra_steps=None): if extra_steps is None: extra_steps = [] c = {} c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] triggereables = [] for i in range(num_concurrent): c['schedulers'].append( schedulers.Triggerable( name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep( steps.Trigger( schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) for step in extra_steps: f2.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["kubernetes0"], factory=f), BuilderConfig( name="build", workernames=["kubernetes" + str(i) for i in range(num_concurrent)], factory=f2) ] masterFQDN = os.environ.get('masterFQDN') c['workers'] = [ kubernetes.KubeLatentWorker( 'kubernetes' + str(i), 'buildbot/buildbot-worker', kube_config=kubeclientservice.KubeCtlProxyConfigLoader( namespace=os.getenv("KUBE_NAMESPACE", "default")), masterFQDN=masterFQDN) for i in range(num_concurrent) ] # un comment for debugging what happens if things looks locked. # c['www'] = {'port': 8080} c['protocols'] = {"pb": {"port": "tcp:9989"}} return c buildbot-3.4.0/master/buildbot/test/integration/test_worker_latent.py000066400000000000000000001743331413250514000262220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.python.failure import Failure from twisted.spread import pb from buildbot.config import BuilderConfig from buildbot.config import MasterConfig from buildbot.interfaces import LatentWorkerCannotSubstantiate from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.interfaces import LatentWorkerSubstantiatiationCancelled from buildbot.machine.latent import States as MachineStates from buildbot.process.factory import BuildFactory from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.test.fake.latent import ControllableLatentWorker from buildbot.test.fake.latent import LatentController from buildbot.test.fake.machine import LatentMachineController from buildbot.test.fake.step import BuildStepController from buildbot.test.util.integration import RunFakeMasterTestCase from buildbot.test.util.misc import TimeoutableTestCase from buildbot.test.util.patch_delay import patchForDelay from buildbot.worker import manager from buildbot.worker.latent import States class TestException(Exception): """ An exception thrown in tests. """ class Latent(TimeoutableTestCase, RunFakeMasterTestCase): def tearDown(self): # Flush the errors logged by the master stop cancelling the builds. self.flushLoggedErrors(LatentWorkerSubstantiatiationCancelled) super().tearDown() @defer.inlineCallbacks def create_single_worker_config(self, controller_kwargs=None): if not controller_kwargs: controller_kwargs = {} controller = LatentController(self, 'local', **controller_kwargs) config_dict = { 'builders': [ BuilderConfig(name="testy", workernames=["local"], factory=BuildFactory(), ), ], 'workers': [controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.setup_master(config_dict) builder_id = yield self.master.data.updates.findBuilderId('testy') return controller, builder_id @defer.inlineCallbacks def create_single_worker_config_with_step(self, controller_kwargs=None): if not controller_kwargs: controller_kwargs = {} controller = LatentController(self, 'local', **controller_kwargs) stepcontroller = BuildStepController() config_dict = { 'builders': [ BuilderConfig(name="testy", workernames=["local"], factory=BuildFactory([stepcontroller.step]), ), ], 'workers': [controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.setup_master(config_dict) builder_id = yield self.master.data.updates.findBuilderId('testy') return controller, stepcontroller, builder_id @defer.inlineCallbacks def create_single_worker_two_builder_config(self, controller_kwargs=None): if not controller_kwargs: controller_kwargs = {} controller = LatentController(self, 'local', **controller_kwargs) config_dict = { 'builders': [ BuilderConfig(name="testy-1", workernames=["local"], factory=BuildFactory(), ), BuilderConfig(name="testy-2", workernames=["local"], factory=BuildFactory(), ), ], 'workers': [controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.setup_master(config_dict) builder_ids = [ (yield self.master.data.updates.findBuilderId('testy-1')), (yield self.master.data.updates.findBuilderId('testy-2')), ] return controller, builder_ids @defer.inlineCallbacks def reconfig_workers_remove_all(self): config_dict = { 'workers': [], 'multiMaster': True } config = MasterConfig.loadFromDict(config_dict, '') yield self.master.workers.reconfigServiceWithBuildbotConfig(config) def stop_first_build(self, results): stopped_d = defer.Deferred() def new_callback(_, data): if stopped_d.called: return # Stop the build buildid = data['buildid'] self.master.mq.produce(('control', 'builds', str(buildid), 'stop'), {'reason': 'no reason', 'results': results}) stopped_d.callback(None) consumed_d = self.master.mq.startConsuming(new_callback, ('builds', None, 'new')) return consumed_d, stopped_d @defer.inlineCallbacks def test_latent_workers_start_in_parallel(self): """ If there are two latent workers configured, and two build requests for them, both workers will start substantiating concurrently. """ controllers = [ LatentController(self, 'local1'), LatentController(self, 'local2'), ] config_dict = { 'builders': [ BuilderConfig(name="testy", workernames=["local1", "local2"], factory=BuildFactory()), ], 'workers': [controller.worker for controller in controllers], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.setup_master(config_dict) builder_id = yield self.master.data.updates.findBuilderId('testy') # Request two builds. for i in range(2): yield self.create_build_request([builder_id]) # Check that both workers were requested to start. self.assertEqual(controllers[0].starting, True) self.assertEqual(controllers[1].starting, True) for controller in controllers: yield controller.start_instance(True) yield controller.auto_stop(True) @defer.inlineCallbacks def test_refused_substantiations_get_requeued(self): """ If a latent worker refuses to substantiate, the build request becomes unclaimed. """ controller, builder_id = yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.create_build_request([builder_id]) unclaimed_build_requests = [] yield self.master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) # Indicate that the worker can't start an instance. yield controller.start_instance(False) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) yield self.assertBuildResults(1, RETRY) yield controller.auto_stop(True) self.flushLoggedErrors(LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_failed_substantiations_get_requeued(self): """ If a latent worker fails to substantiate, the build request becomes unclaimed. """ controller, builder_id = yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.create_build_request([builder_id]) unclaimed_build_requests = [] yield self.master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) # The worker fails to substantiate. yield controller.start_instance( Failure(TestException("substantiation failed"))) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) yield self.assertBuildResults(1, RETRY) yield controller.auto_stop(True) @defer.inlineCallbacks def test_failed_substantiations_get_exception(self): """ If a latent worker fails to substantiate, the result is an exception. """ controller, builder_id = yield self.create_single_worker_config() # Trigger a buildrequest yield self.create_build_request([builder_id]) # The worker fails to substantiate. yield controller.start_instance( Failure(LatentWorkerCannotSubstantiate("substantiation failed"))) # Flush the errors logged by the failure. self.flushLoggedErrors(LatentWorkerCannotSubstantiate) # When the substantiation fails, the result is an exception. yield self.assertBuildResults(1, EXCEPTION) yield controller.auto_stop(True) @defer.inlineCallbacks def test_worker_accepts_builds_after_failure(self): """ If a latent worker fails to substantiate, the worker is still able to accept jobs. """ controller, builder_id = yield self.create_single_worker_config() yield controller.auto_stop(True) # Trigger a buildrequest bsid, brids = yield self.create_build_request([builder_id]) unclaimed_build_requests = [] yield self.master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) # The worker fails to substantiate. yield controller.start_instance( Failure(TestException("substantiation failed"))) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) # The retry logic should only trigger after a exponential backoff self.assertEqual(controller.starting, False) # advance the time to the point where we should retry self.reactor.advance(controller.worker.quarantine_initial_timeout) # If the worker started again after the failure, then the retry logic will have # already kicked in to start a new build on this (the only) worker. We check that # a new instance was requested, which indicates that the worker # accepted the build. self.assertEqual(controller.starting, True) # The worker fails to substantiate(again). yield controller.start_instance( Failure(TestException("substantiation failed"))) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) yield self.assertBuildResults(1, RETRY) # advance the time to the point where we should not retry self.reactor.advance(controller.worker.quarantine_initial_timeout) self.assertEqual(controller.starting, False) # advance the time to the point where we should retry self.reactor.advance(controller.worker.quarantine_initial_timeout) self.assertEqual(controller.starting, True) controller.auto_start(True) controller.auto_stop(True) @defer.inlineCallbacks def test_worker_multiple_substantiations_succeed(self): """ If multiple builders trigger try to substantiate a worker at the same time, if the substantiation succeeds then all of the builds proceed. """ controller, builder_ids = yield self.create_single_worker_two_builder_config() # Trigger a buildrequest bsid, brids = yield self.create_build_request(builder_ids) # The worker succeeds to substantiate. yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) yield self.assertBuildResults(2, SUCCESS) yield controller.auto_stop(True) @defer.inlineCallbacks def test_very_late_detached_after_substantiation(self): ''' A latent worker may detach at any time after stop_instance() call. Make sure it works at the most late detachment point, i.e. when we're substantiating again. ''' controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) yield self.create_build_request([builder_id]) self.assertTrue(controller.starting) controller.auto_disconnect_worker = False yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) # stop the instance, but don't disconnect the worker up to until just # before we complete start_instance() self.assertTrue(controller.stopping) yield controller.stop_instance(True) self.assertTrue(controller.stopped) yield self.create_build_request([builder_id]) self.assertTrue(controller.starting) yield controller.disconnect_worker() yield controller.start_instance(True) yield self.assertBuildResults(2, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) yield controller.disconnect_worker() @defer.inlineCallbacks def test_substantiation_during_stop_instance(self): ''' If a latent worker detaches before stop_instance() completes and we start a build then it should start successfully without causing an erroneous cancellation of the substantiation request. ''' controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) # Trigger a single buildrequest yield self.create_build_request([builder_id]) self.assertEqual(True, controller.starting) # start instance controller.auto_disconnect_worker = False yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) self.assertTrue(controller.stopping) yield controller.disconnect_worker() # now create a buildrequest that will substantiate the build. It should # either not start at all until the instance finished substantiating, # or the substantiation request needs to be recorded and start # immediately after stop_instance completes. yield self.create_build_request([builder_id]) yield controller.stop_instance(True) yield controller.start_instance(True) yield self.assertBuildResults(2, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) yield controller.disconnect_worker() @defer.inlineCallbacks def test_substantiation_during_stop_instance_canStartBuild_race(self): ''' If build attempts substantiation after the latent worker detaches, but stop_instance() is not completed yet, then we should successfully complete substantiation without causing an erroneous cancellation. The above sequence of events was possible even if canStartBuild checked for a in-progress insubstantiation, as if the build is scheduled before insubstantiation, its start could be delayed until when stop_instance() is in progress. ''' controller, builder_ids = yield self.create_single_worker_two_builder_config( controller_kwargs=dict(build_wait_timeout=1)) # Trigger a single buildrequest yield self.create_build_request([builder_ids[0]]) self.assertEqual(True, controller.starting) # start instance yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) with patchForDelay('buildbot.process.builder.Builder.maybeStartBuild') as delay: # create a build request which will result in a build, but it won't # attempt to substantiate until after stop_instance() is in progress yield self.create_build_request([builder_ids[1]]) self.assertEqual(len(delay), 1) self.reactor.advance(1) self.assertTrue(controller.stopping) delay.fire() yield controller.stop_instance(True) self.assertTrue(controller.starting) yield controller.start_instance(True) yield self.assertBuildResults(2, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) @defer.inlineCallbacks def test_insubstantiation_during_substantiation_refuses_substantiation(self): """ If a latent worker gets insubstantiation() during substantiation, then it should refuse to substantiate. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) # insubstantiate during start_instance(). Note that failed substantiation is notified only # after the latent workers completes start-stop cycle. yield self.create_build_request([builder_id]) d = controller.worker.insubstantiate() yield controller.start_instance(False) yield controller.stop_instance(True) yield d yield self.assertBuildResults(1, RETRY) @defer.inlineCallbacks def test_stopservice_during_insubstantiation_completes(self): """ When stopService is called and a worker is insubstantiating, we should wait for this process to complete. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) # Substantiate worker via a build yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.assertTrue(controller.started) # Wait until worker starts insubstantiation and then shutdown worker self.reactor.advance(1) self.assertTrue(controller.stopping) d = self.reconfig_workers_remove_all() self.assertFalse(d.called) yield controller.stop_instance(True) yield d @parameterized.expand([ ('with_substantiation_failure', False, False), ('without_worker_connecting', True, False), ('with_worker_connecting', True, True), ]) @defer.inlineCallbacks def test_stopservice_during_substantiation_completes(self, name, subst_success, worker_connects): """ When stopService is called and a worker is substantiating, we should wait for this process to complete. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) controller.auto_connect_worker = worker_connects # Substantiate worker via a build yield self.create_build_request([builder_id]) self.assertTrue(controller.starting) d = self.reconfig_workers_remove_all() self.assertFalse(d.called) yield controller.start_instance(subst_success) # we should stop the instance immediately after it substantiates regardless of the result self.assertTrue(controller.stopping) yield controller.stop_instance(True) yield d @defer.inlineCallbacks def test_substantiation_is_cancelled_by_build_stop(self): """ Stopping a build during substantiation should cancel the substantiation itself. Otherwise we will be left with a substantiating worker without a corresponding build which means that master shutdown may not work correctly. """ controller, builder_id = yield self.create_single_worker_config() controller.auto_connect_worker = False controller.auto_stop(True) # Substantiate worker via a build yield self.create_build_request([builder_id]) yield controller.start_instance(True) self.master.mq.produce(('control', 'builds', '1', 'stop'), {'reason': 'no reason'}) self.reactor.advance(1) # force build to actually execute the stop instruction self.assertTrue(controller.stopped) @parameterized.expand([ ('after_start_instance_no_worker', False, False), ('after_start_instance_with_worker', True, False), ('before_start_instance_no_worker', False, True), ('before_start_instance_with_worker', True, True), ]) @defer.inlineCallbacks def test_reconfigservice_during_substantiation_clean_shutdown_after(self, name, worker_connects, before_start_service): """ When stopService is called and a worker is substantiating, we should wait for this process to complete. """ registered_workers = [] def registration_updates(reg, worker_config, global_config): registered_workers.append((worker_config.workername, worker_config.password)) self.patch(manager.WorkerRegistration, 'update', registration_updates) controller, builder_id = yield self.create_single_worker_config() controller.auto_connect_worker = worker_connects controller.auto_stop(True) # Substantiate worker via a build yield self.create_build_request([builder_id]) self.assertTrue(controller.starting) # change some unimportant property of the worker to force configuration self.master.config_loader.config_dict['workers'] = [ ControllableLatentWorker('local', controller, max_builds=3) ] if before_start_service: yield self.reconfig_master() yield controller.start_instance(True) else: yield controller.start_instance(True) yield self.reconfig_master() yield self.clean_master_shutdown(quick=True) self.assertEqual(registered_workers, [('local', 'password_1'), ('local', 'password_1')]) @defer.inlineCallbacks def test_substantiation_cancelled_by_insubstantiation_when_waiting_for_insubstantiation(self): """ We should cancel substantiation if we insubstantiate when that substantiation is waiting on current insubstantiation to finish """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) yield self.create_build_request([builder_id]) # put the worker into insubstantiation phase yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) self.assertTrue(controller.stopping) # build should wait on the insubstantiation yield self.create_build_request([builder_id]) self.assertEqual(controller.worker.state, States.INSUBSTANTIATING_SUBSTANTIATING) # build should be requeued if we insubstantiate. d = controller.worker.insubstantiate() yield controller.stop_instance(True) yield d yield self.assertBuildResults(2, RETRY) @defer.inlineCallbacks def test_stalled_substantiation_then_timeout_get_requeued(self): """ If a latent worker substantiate, but not connect, and then be unsubstantiated, the build request becomes unclaimed. """ controller, builder_id = yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.create_build_request([builder_id]) unclaimed_build_requests = [] yield self.master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) # We never start the worker, rather timeout it. self.reactor.advance(controller.worker.missing_timeout) # Flush the errors logged by the failure. self.flushLoggedErrors(defer.TimeoutError) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) yield controller.start_instance(False) yield controller.auto_stop(True) @defer.inlineCallbacks def test_sever_connection_before_ping_then_timeout_get_requeued(self): """ If a latent worker connects, but its connection is severed without notification in the TCP layer, we successfully wait until TCP times out and requeue the build. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) bsid, brids = yield self.create_build_request([builder_id]) # sever connection just before ping() with patchForDelay( 'buildbot.process.workerforbuilder.AbstractWorkerForBuilder.ping') as delay: yield controller.start_instance(True) controller.sever_connection() delay.fire() # lose connection after TCP times out self.reactor.advance(100) yield controller.disconnect_worker() yield self.assertBuildResults(1, RETRY) # the worker will be put into quarantine self.reactor.advance(controller.worker.quarantine_initial_timeout) yield controller.stop_instance(True) yield controller.start_instance(True) yield self.assertBuildResults(2, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) self.flushLoggedErrors(pb.PBConnectionLost) @defer.inlineCallbacks def test_failed_sendBuilderList_get_requeued(self): """ sendBuilderList can fail due to missing permissions on the workdir, the build request becomes unclaimed """ controller, builder_id = yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.create_build_request([builder_id]) unclaimed_build_requests = [] yield self.master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) logs = [] yield self.master.mq.startConsuming( lambda key, log: logs.append(log), ('logs', None, 'new')) # The worker succeed to substantiate def remote_setBuilderList(self, dirs): raise TestException("can't create dir") controller.patchBot(self, 'remote_setBuilderList', remote_setBuilderList) yield controller.start_instance(True) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) # should get 2 logs (html and txt) with proper information in there self.assertEqual(len(logs), 2) logs_by_name = {} for _log in logs: fulllog = yield self.master.data.get(("logs", str(_log['logid']), "raw")) logs_by_name[fulllog['filename']] = fulllog['raw'] for i in ["err_text", "err_html"]: self.assertIn("can't create dir", logs_by_name[i]) # make sure stacktrace is present in html self.assertIn("buildbot.test.integration.test_worker_latent.TestException", logs_by_name[i]) yield controller.auto_stop(True) @defer.inlineCallbacks def test_failed_ping_get_requeued(self): """ sendBuilderList can fail due to missing permissions on the workdir, the build request becomes unclaimed """ controller, builder_id = yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.create_build_request([builder_id]) unclaimed_build_requests = [] yield self.master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) logs = [] yield self.master.mq.startConsuming( lambda key, log: logs.append(log), ('logs', None, 'new')) # The worker succeed to substantiate def remote_print(self, msg): if msg == "ping": raise TestException("can't ping") controller.patchBot(self, 'remote_print', remote_print) yield controller.start_instance(True) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) # should get 2 logs (html and txt) with proper information in there self.assertEqual(len(logs), 2) logs_by_name = {} for _log in logs: fulllog = yield self.master.data.get(("logs", str(_log['logid']), "raw")) logs_by_name[fulllog['filename']] = fulllog['raw'] for i in ["err_text", "err_html"]: self.assertIn("can't ping", logs_by_name[i]) # make sure stacktrace is present in html self.assertIn("buildbot.test.integration.test_worker_latent.TestException", logs_by_name[i]) yield controller.auto_stop(True) @defer.inlineCallbacks def test_worker_close_connection_while_building(self): """ If the worker close connection in the middle of the build, the next build can start correctly """ controller, stepcontroller, builder_id = yield self.create_single_worker_config_with_step( controller_kwargs=dict(build_wait_timeout=0) ) # Request a build and disconnect midway controller.auto_disconnect_worker = False yield self.create_build_request([builder_id]) yield controller.auto_stop(True) self.assertTrue(controller.starting) yield controller.start_instance(True) yield self.assertBuildResults(1, None) yield controller.disconnect_worker() yield self.assertBuildResults(1, RETRY) # Now check that the build requeued and finished with success yield controller.start_instance(True) yield self.assertBuildResults(2, None) stepcontroller.finish_step(SUCCESS) yield self.assertBuildResults(2, SUCCESS) yield controller.disconnect_worker() @defer.inlineCallbacks def test_negative_build_timeout_reattach_substantiated(self): """ When build_wait_timeout is negative, we don't disconnect the worker from our side. We should still support accidental disconnections from worker side due to, e.g. network problems. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=-1)) controller.auto_disconnect_worker = False controller.auto_connect_worker = False # Substantiate worker via a build yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield controller.connect_worker() yield self.assertBuildResults(1, SUCCESS) self.assertTrue(controller.started) # Now disconnect and reconnect worker and check whether we can still # build. This should not change the worker state from our side. yield controller.disconnect_worker() self.assertTrue(controller.started) yield controller.connect_worker() self.assertTrue(controller.started) yield self.create_build_request([builder_id]) yield self.assertBuildResults(1, SUCCESS) # The only way to stop worker with negative build timeout is to # insubstantiate explicitly yield controller.auto_stop(True) yield controller.worker.insubstantiate() yield controller.disconnect_worker() @defer.inlineCallbacks def test_sever_connection_while_building(self): """ If the connection to worker is severed without TCP notification in the middle of the build, the build is re-queued and successfully restarted. """ controller, stepcontroller, builder_id = yield self.create_single_worker_config_with_step( controller_kwargs=dict(build_wait_timeout=0)) # Request a build and disconnect midway yield self.create_build_request([builder_id]) yield controller.auto_stop(True) self.assertTrue(controller.starting) yield controller.start_instance(True) yield self.assertBuildResults(1, None) # sever connection and lose it after TCP times out controller.sever_connection() self.reactor.advance(100) yield controller.disconnect_worker() yield self.assertBuildResults(1, RETRY) # Request one build. yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(2, None) stepcontroller.finish_step(SUCCESS) yield self.assertBuildResults(2, SUCCESS) @defer.inlineCallbacks def test_sever_connection_during_insubstantiation(self): """ If latent worker connection is severed without notification in the TCP layer, we successfully wait until TCP times out, insubstantiate and can substantiate after that. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) # sever connection just before insubstantiation and lose it after TCP # times out with patchForDelay('buildbot.worker.base.AbstractWorker.disconnect') as delay: self.reactor.advance(1) self.assertTrue(controller.stopping) controller.sever_connection() delay.fire() yield controller.stop_instance(True) self.reactor.advance(100) yield controller.disconnect_worker() # create new build request and verify it works yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) self.flushLoggedErrors(pb.PBConnectionLost) @defer.inlineCallbacks def test_sever_connection_during_insubstantiation_and_buildrequest(self): """ If latent worker connection is severed without notification in the TCP layer, we successfully wait until TCP times out, insubstantiate and can substantiate after that. In this the subsequent build request is created during insubstantiation """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) # sever connection just before insubstantiation and lose it after TCP # times out with patchForDelay('buildbot.worker.base.AbstractWorker.disconnect') as delay: self.reactor.advance(1) self.assertTrue(controller.stopping) yield self.create_build_request([builder_id]) controller.sever_connection() delay.fire() yield controller.stop_instance(True) self.reactor.advance(100) yield controller.disconnect_worker() # verify the previously created build successfully completes yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) self.flushLoggedErrors(pb.PBConnectionLost) @defer.inlineCallbacks def test_negative_build_timeout_reattach_insubstantiating(self): """ When build_wait_timeout is negative, we don't disconnect the worker from our side, but it can disconnect and reattach from worker side due to, e.g. network problems. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=-1)) controller.auto_disconnect_worker = False controller.auto_connect_worker = False # Substantiate worker via a build yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield controller.connect_worker() yield self.assertBuildResults(1, SUCCESS) self.assertTrue(controller.started) # Now start insubstantiation and disconnect and reconnect the worker. # It should not change worker state from master side. d = controller.worker.insubstantiate() self.assertTrue(controller.stopping) yield controller.disconnect_worker() self.assertTrue(controller.stopping) yield controller.connect_worker() self.assertTrue(controller.stopping) yield controller.stop_instance(True) yield d self.assertTrue(controller.stopped) yield controller.disconnect_worker() # Now substantiate the worker and verify build succeeds yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield controller.connect_worker() yield self.assertBuildResults(1, SUCCESS) controller.auto_disconnect_worker = True yield controller.auto_stop(True) @defer.inlineCallbacks def test_negative_build_timeout_no_disconnect_insubstantiating(self): """ When build_wait_timeout is negative, we don't disconnect the worker from our side, so it should be possible to insubstantiate and substantiate it without problems if the worker does not disconnect either. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=-1)) controller.auto_disconnect_worker = False controller.auto_connect_worker = False # Substantiate worker via a build yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield controller.connect_worker() yield self.assertBuildResults(1, SUCCESS) self.assertTrue(controller.started) # Insubstantiate worker without disconnecting it d = controller.worker.insubstantiate() self.assertTrue(controller.stopping) yield controller.stop_instance(True) yield d self.assertTrue(controller.stopped) # Now substantiate the worker without connecting it yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) controller.auto_disconnect_worker = True yield controller.auto_stop(True) @defer.inlineCallbacks def test_negative_build_timeout_insubstantiates_on_master_shutdown(self): """ When build_wait_timeout is negative, we should still insubstantiate when master shuts down. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=-1)) # Substantiate worker via a build yield self.create_build_request([builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.assertTrue(controller.started) # Shutdown master d = self.master.stopService() yield controller.stop_instance(True) yield d @defer.inlineCallbacks def test_stop_instance_synchronous_exception(self): """ Throwing a synchronous exception from stop_instance should allow subsequent build to start. """ controller, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) controller.auto_stop(True) # patch stop_instance() to raise exception synchronously def raise_stop_instance(fast): raise TestException() real_stop_instance = controller.worker.stop_instance controller.worker.stop_instance = raise_stop_instance # create a build and wait for stop yield self.create_build_request([builder_id]) yield controller.start_instance(True) self.reactor.advance(1) yield self.assertBuildResults(1, SUCCESS) self.flushLoggedErrors(TestException) # unpatch stop_instance() and call it to cleanup state of fake worker controller controller.worker.stop_instance = real_stop_instance yield controller.worker.stop_instance(False) self.reactor.advance(1) # subsequent build should succeed yield self.create_build_request([builder_id]) yield controller.start_instance(True) self.reactor.advance(1) yield self.assertBuildResults(2, SUCCESS) @defer.inlineCallbacks def test_build_stop_with_cancelled_during_substantiation(self): """ If a build is stopping during latent worker substantiating, the build becomes cancelled """ controller, builder_id = yield self.create_single_worker_config() consumed_d, stopped_d = self.stop_first_build(CANCELLED) yield consumed_d # Trigger a buildrequest yield self.create_build_request([builder_id]) yield stopped_d # Indicate that the worker can't start an instance. yield controller.start_instance(False) yield self.assertBuildResults(1, CANCELLED) yield controller.auto_stop(True) self.flushLoggedErrors(LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_build_stop_with_retry_during_substantiation(self): """ If master is shutting down during latent worker substantiating, the build becomes retry. """ controller, builder_id = yield self.create_single_worker_config() consumed_d, stopped_d = self.stop_first_build(RETRY) yield consumed_d # Trigger a buildrequest _, brids = yield self.create_build_request([builder_id]) unclaimed_build_requests = [] yield self.master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) yield stopped_d # Indicate that the worker can't start an instance. yield controller.start_instance(False) yield self.assertBuildResults(1, RETRY) self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) yield controller.auto_stop(True) self.flushLoggedErrors(LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_rejects_build_on_instance_with_different_type_timeout_zero(self): """ If latent worker supports getting its instance type from properties that are rendered from build then the buildrequestdistributor must not schedule any builds on workers that are running different instance type than what these builds will require. """ controller, stepcontroller, builder_id = \ yield self.create_single_worker_config_with_step( controller_kwargs=dict( kind=Interpolate('%(prop:worker_kind)s'), build_wait_timeout=0 ) ) # create build request yield self.create_build_request([builder_id], properties=Properties(worker_kind='a')) # start the build and verify the kind of the worker. Note that the # buildmaster needs to restart the worker in order to change the worker # kind, so we allow it both to auto start and stop self.assertEqual(True, controller.starting) controller.auto_start(True) yield controller.auto_stop(True) self.assertEqual((yield controller.get_started_kind()), 'a') # before the other build finished, create another build request yield self.create_build_request([builder_id], properties=Properties(worker_kind='b')) stepcontroller.finish_step(SUCCESS) # give the botmaster chance to insubstantiate the worker and # maybe substantiate it for the pending build the builds on worker self.reactor.advance(0.1) # verify that the second build restarted with the expected instance # kind self.assertEqual((yield controller.get_started_kind()), 'b') stepcontroller.finish_step(SUCCESS) yield self.assertBuildResults(1, SUCCESS) yield self.assertBuildResults(2, SUCCESS) @defer.inlineCallbacks def test_rejects_build_on_instance_with_different_type_timeout_nonzero(self): """ If latent worker supports getting its instance type from properties that are rendered from build then the buildrequestdistributor must not schedule any builds on workers that are running different instance type than what these builds will require. """ controller, stepcontroller, builder_id = \ yield self.create_single_worker_config_with_step( controller_kwargs=dict( kind=Interpolate('%(prop:worker_kind)s'), build_wait_timeout=5 ) ) # create build request yield self.create_build_request([builder_id], properties=Properties(worker_kind='a')) # start the build and verify the kind of the worker. Note that the # buildmaster needs to restart the worker in order to change the worker # kind, so we allow it both to auto start and stop self.assertEqual(True, controller.starting) controller.auto_start(True) yield controller.auto_stop(True) self.assertEqual((yield controller.get_started_kind()), 'a') # before the other build finished, create another build request yield self.create_build_request([builder_id], properties=Properties(worker_kind='b')) stepcontroller.finish_step(SUCCESS) # give the botmaster chance to insubstantiate the worker and # maybe substantiate it for the pending build the builds on worker self.reactor.advance(0.1) # verify build has not started, even though the worker is waiting # for one self.assertIsNone((yield self.master.db.builds.getBuild(2))) self.assertTrue(controller.started) # wait until the latent worker times out, is insubstantiated, # is substantiated because of pending buildrequest and starts the build self.reactor.advance(6) self.assertIsNotNone((yield self.master.db.builds.getBuild(2))) # verify that the second build restarted with the expected instance # kind self.assertEqual((yield controller.get_started_kind()), 'b') stepcontroller.finish_step(SUCCESS) yield self.assertBuildResults(1, SUCCESS) yield self.assertBuildResults(2, SUCCESS) @defer.inlineCallbacks def test_supports_no_build_for_substantiation(self): """ Abstract latent worker should support being substantiated without a build and then insubstantiated. """ controller, _ = yield self.create_single_worker_config() controller.worker.substantiate(None, None) yield controller.start_instance(True) self.assertTrue(controller.started) d = controller.worker.insubstantiate() yield controller.stop_instance(True) yield d @defer.inlineCallbacks def test_supports_no_build_for_substantiation_accepts_build_later(self): """ Abstract latent worker should support being substantiated without a build and then accept a build request. """ controller, stepcontroller, builder_id = \ yield self.create_single_worker_config_with_step( controller_kwargs=dict(build_wait_timeout=1)) controller.worker.substantiate(None, None) yield controller.start_instance(True) self.assertTrue(controller.started) self.create_build_request([builder_id]) stepcontroller.finish_step(SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) class LatentWithLatentMachine(TimeoutableTestCase, RunFakeMasterTestCase): def tearDown(self): # Flush the errors logged by the master stop cancelling the builds. self.flushLoggedErrors(LatentWorkerSubstantiatiationCancelled) super().tearDown() @defer.inlineCallbacks def create_single_worker_config(self, build_wait_timeout=0): machine_controller = LatentMachineController( name='machine1', build_wait_timeout=build_wait_timeout) worker_controller = LatentController(self, 'worker1', machine_name='machine1') step_controller = BuildStepController() config_dict = { 'machines': [machine_controller.machine], 'builders': [ BuilderConfig(name="builder1", workernames=["worker1"], factory=BuildFactory([step_controller.step]), ), ], 'workers': [worker_controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.setup_master(config_dict) builder_id = yield self.master.data.updates.findBuilderId('builder1') return machine_controller, worker_controller, step_controller, builder_id @defer.inlineCallbacks def create_two_worker_config(self, build_wait_timeout=0, controller_kwargs=None): if not controller_kwargs: controller_kwargs = {} machine_controller = LatentMachineController( name='machine1', build_wait_timeout=build_wait_timeout) worker1_controller = LatentController(self, 'worker1', machine_name='machine1', **controller_kwargs) worker2_controller = LatentController(self, 'worker2', machine_name='machine1', **controller_kwargs) step1_controller = BuildStepController() step2_controller = BuildStepController() config_dict = { 'machines': [machine_controller.machine], 'builders': [ BuilderConfig(name="builder1", workernames=["worker1"], factory=BuildFactory([step1_controller.step]), ), BuilderConfig(name="builder2", workernames=["worker2"], factory=BuildFactory([step2_controller.step]), ), ], 'workers': [worker1_controller.worker, worker2_controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.setup_master(config_dict) builder1_id = yield self.master.data.updates.findBuilderId('builder1') builder2_id = yield self.master.data.updates.findBuilderId('builder2') return (machine_controller, [worker1_controller, worker2_controller], [step1_controller, step2_controller], [builder1_id, builder2_id]) @defer.inlineCallbacks def test_1worker_starts_and_stops_after_single_build_success(self): machine_controller, worker_controller, step_controller, builder_id = \ yield self.create_single_worker_config() worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.create_build_request([builder_id]) machine_controller.start_machine(True) self.assertTrue(worker_controller.started) step_controller.finish_step(SUCCESS) self.reactor.advance(0) # force deferred suspend call to be executed machine_controller.stop_machine() self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_1worker_starts_and_stops_after_single_build_failure(self): machine_controller, worker_controller, step_controller, builder_id = \ yield self.create_single_worker_config() worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.create_build_request([builder_id]) machine_controller.start_machine(True) self.assertTrue(worker_controller.started) step_controller.finish_step(FAILURE) self.reactor.advance(0) # force deferred stop call to be executed machine_controller.stop_machine() self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_1worker_stops_machine_after_timeout(self): machine_controller, worker_controller, step_controller, builder_id = \ yield self.create_single_worker_config(build_wait_timeout=5) worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.create_build_request([builder_id]) machine_controller.start_machine(True) self.reactor.advance(10.0) step_controller.finish_step(SUCCESS) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) self.reactor.advance(4.9) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) # put clock 5s after step finish, machine should start suspending self.reactor.advance(0.1) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPING) machine_controller.stop_machine() self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_1worker_does_not_stop_machine_machine_after_timeout_during_build(self): machine_controller, worker_controller, step_controller, builder_id = \ yield self.create_single_worker_config(build_wait_timeout=5) worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.create_build_request([builder_id]) machine_controller.start_machine(True) self.reactor.advance(10.0) step_controller.finish_step(SUCCESS) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) # create build request while machine is still awake. It should not # suspend regardless of how much time passes self.reactor.advance(4.9) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) yield self.create_build_request([builder_id]) self.reactor.advance(5.1) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) step_controller.finish_step(SUCCESS) self.reactor.advance(4.9) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) # put clock 5s after step finish, machine should start suspending self.reactor.advance(0.1) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPING) machine_controller.stop_machine() self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_1worker_insubstantiated_after_start_failure(self): machine_controller, worker_controller, step_controller, builder_id = \ yield self.create_single_worker_config() worker_controller.auto_connect_worker = False worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.create_build_request([builder_id]) machine_controller.start_machine(False) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) self.assertEqual(worker_controller.started, False) @defer.inlineCallbacks def test_1worker_eats_exception_from_start_machine(self): machine_controller, worker_controller, step_controller, builder_id = \ yield self.create_single_worker_config() worker_controller.auto_connect_worker = False worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.create_build_request([builder_id]) class FakeError(Exception): pass machine_controller.start_machine(FakeError('start error')) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) self.assertEqual(worker_controller.started, False) self.flushLoggedErrors(FakeError) @defer.inlineCallbacks def test_1worker_eats_exception_from_stop_machine(self): machine_controller, worker_controller, step_controller, builder_id = \ yield self.create_single_worker_config() worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.create_build_request([builder_id]) machine_controller.start_machine(True) step_controller.finish_step(SUCCESS) self.reactor.advance(0) # force deferred suspend call to be executed class FakeError(Exception): pass machine_controller.stop_machine(FakeError('stop error')) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) self.flushLoggedErrors(FakeError) @defer.inlineCallbacks def test_2workers_build_substantiates_insubstantiates_both_workers(self): machine_controller, worker_controllers, step_controllers, builder_ids = \ yield self.create_two_worker_config( controller_kwargs=dict(starts_without_substantiate=True)) for wc in worker_controllers: wc.auto_start(True) wc.auto_stop(True) yield self.create_build_request([builder_ids[0]]) machine_controller.start_machine(True) for wc in worker_controllers: self.assertTrue(wc.started) step_controllers[0].finish_step(SUCCESS) self.reactor.advance(0) # force deferred suspend call to be executed machine_controller.stop_machine() for wc in worker_controllers: self.assertFalse(wc.started) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_2workers_two_builds_start_machine_concurrently(self): machine_controller, worker_controllers, step_controllers, builder_ids = \ yield self.create_two_worker_config() for wc in worker_controllers: wc.auto_start(True) wc.auto_stop(True) yield self.create_build_request([builder_ids[0]]) self.assertEqual(machine_controller.machine.state, MachineStates.STARTING) yield self.create_build_request([builder_ids[1]]) machine_controller.start_machine(True) for wc in worker_controllers: self.assertTrue(wc.started) step_controllers[0].finish_step(SUCCESS) step_controllers[1].finish_step(SUCCESS) self.reactor.advance(0) # force deferred suspend call to be executed machine_controller.stop_machine() for wc in worker_controllers: self.assertFalse(wc.started) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_2workers_insubstantiated_after_one_start_failure(self): machine_controller, worker_controllers, step_controllers, builder_ids = \ yield self.create_two_worker_config() for wc in worker_controllers: wc.auto_connect_worker = False wc.auto_start(True) wc.auto_stop(True) yield self.create_build_request([builder_ids[0]]) machine_controller.start_machine(False) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) for wc in worker_controllers: self.assertEqual(wc.started, False) buildbot-3.4.0/master/buildbot/test/integration/test_worker_marathon.py000066400000000000000000000113731413250514000265360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from unittest.case import SkipTest from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.plugins import schedulers from buildbot.plugins import steps from buildbot.process.factory import BuildFactory from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase from buildbot.worker.marathon import MarathonLatentWorker # This integration test creates a master and marathon worker environment, # It requires environment variable set to your marathon hosting. # you can use the mesos-compose to create a marathon environment for development: # git clone https://github.com/bobrik/mesos-compose.git # cd mesos-compose # make run # then set the environment variable to run the test: # export BBTEST_MARATHON_URL=http://localhost:8080 # following environment variable can be used to stress concurrent worker startup NUM_CONCURRENT = int(os.environ.get("MARATHON_TEST_NUM_CONCURRENT_BUILD", 1)) # if you run the stress test against a real mesos deployment, you want to also use https and basic # credentials export BBTEST_MARATHON_CREDS=login:passwd class MarathonMaster(RunMasterBase): def setUp(self): if "BBTEST_MARATHON_URL" not in os.environ: raise SkipTest( "marathon integration tests only run when environment variable BBTEST_MARATHON_URL" " is with url to Marathon api ") @defer.inlineCallbacks def test_trigger(self): yield self.setupConfig(masterConfig(num_concurrent=NUM_CONCURRENT), startWorker=False) yield self.doForceBuild() builds = yield self.master.data.get(("builds",)) # if there are some retry, there will be more builds self.assertEqual(len(builds), 1 + NUM_CONCURRENT) for b in builds: self.assertEqual(b['results'], SUCCESS) # master configuration def masterConfig(num_concurrent, extra_steps=None): if extra_steps is None: extra_steps = [] c = {} c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] triggereables = [] for i in range(num_concurrent): c['schedulers'].append( schedulers.Triggerable( name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep(steps.Trigger(schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) for step in extra_steps: f2.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["marathon0"], factory=f), BuilderConfig(name="build", workernames=["marathon" + str(i) for i in range(num_concurrent)], factory=f2)] url = os.environ.get('BBTEST_MARATHON_URL') creds = os.environ.get('BBTEST_MARATHON_CREDS') if creds is not None: user, password = creds.split(":") else: user = password = None masterFQDN = os.environ.get('masterFQDN') marathon_extra_config = { } c['workers'] = [ MarathonLatentWorker('marathon' + str(i), url, user, password, 'buildbot/buildbot-worker:master', marathon_extra_config=marathon_extra_config, masterFQDN=masterFQDN) for i in range(num_concurrent) ] # un comment for debugging what happens if things looks locked. # c['www'] = {'port': 8080} # if the masterFQDN is forced (proxy case), then we use 9989 default port # else, we try to find a free port if masterFQDN is not None: c['protocols'] = {"pb": {"port": "tcp:9989"}} else: c['protocols'] = {"pb": {"port": "tcp:0"}} return c buildbot-3.4.0/master/buildbot/test/integration/test_worker_proxy.py000066400000000000000000000141101413250514000260760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import asyncio import multiprocessing import os import signal import socket from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase from .interop import test_commandmixin from .interop import test_compositestepmixin from .interop import test_integration_secrets from .interop import test_interruptcommand from .interop import test_setpropertyfromcommand from .interop import test_transfer from .interop import test_worker_reconnect # This integration test puts HTTP proxy in between the master and worker. def get_log_path(): return f'test_worker_proxy_stdout_{os.getpid()}.txt' def write_to_log(msg, with_traceback=False): with open(get_log_path(), 'a') as outfile: outfile.write(msg) if with_traceback: import traceback traceback.print_exc(file=outfile) async def handle_client(local_reader, local_writer): async def pipe(reader, writer): try: while not reader.at_eof(): writer.write(await reader.read(2048)) except ConnectionResetError: pass finally: writer.close() try: request = await local_reader.read(2048) lines = request.split(b"\r\n") if not lines[0].startswith(b"CONNECT "): write_to_log(f"bad request {request.decode()}\n") local_writer.write(b"HTTP/1.1 407 Only CONNECT allowed\r\n\r\n") return host, port = lines[0].split(b" ")[1].split(b":") try: remote_reader, remote_writer = await asyncio.open_connection( host.decode(), int(port) ) except socket.gaierror: write_to_log(f"failed to relay to {host} {port}\n") local_writer.write(b"HTTP/1.1 404 Not Found\r\n\r\n") return write_to_log(f"relaying to {host} {port}\n") local_writer.write(b"HTTP/1.1 200 Connection established\r\n\r\n") pipe1 = pipe(local_reader, remote_writer) pipe2 = pipe(remote_reader, local_writer) await asyncio.gather(pipe1, pipe2) finally: local_writer.close() def run_proxy(queue): write_to_log("run_proxy\n") try: try: loop = asyncio.get_event_loop() except RuntimeError: # We can get RuntimeError due to current thread being not main thread on Python 3.8. # It's not clear why that happens, so work around it. loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) coro = asyncio.start_server(handle_client, host="127.0.0.1") server = loop.run_until_complete(coro) host, port = server.sockets[0].getsockname() queue.put(port) def signal_handler(sig, trace): raise KeyboardInterrupt signal.signal(signal.SIGTERM, signal_handler) write_to_log(f"Serving on {host}:{port}\n") try: write_to_log("Running forever\n") loop.run_forever() except KeyboardInterrupt: write_to_log("End\n") server.close() loop.run_until_complete(server.wait_closed()) loop.close() except BaseException as e: write_to_log(f"Exception Raised: {str(e)}\n", with_traceback=True) finally: queue.put(get_log_path()) class RunMasterBehindProxy(RunMasterBase): # we need slightly longer timeout for proxy related tests timeout = 30 debug = False def setUp(self): write_to_log("setUp\n") self.queue = multiprocessing.Queue() self.proxy_process = multiprocessing.Process(target=run_proxy, args=(self.queue,)) self.proxy_process.start() self.target_port = self.queue.get() write_to_log(f"got target_port {self.target_port}\n") def tearDown(self): write_to_log("tearDown\n") self.proxy_process.terminate() self.proxy_process.join() if self.debug: print("---- stdout ----") with open(get_log_path()) as file: print(file.read()) print("---- ------ ----") with open(self.queue.get()) as file: print(file.read()) print("---- ------ ----") os.unlink(get_log_path()) @defer.inlineCallbacks def setupConfig(self, config_dict, startWorker=True): proxy_connection_string = f"tcp:127.0.0.1:{self.target_port}" yield RunMasterBase.setupConfig(self, config_dict, startWorker, proxy_connection_string=proxy_connection_string) # Use interoperability test cases to test the HTTP proxy tunneling. class ProxyCommandMixinMasterPB(RunMasterBehindProxy, test_commandmixin.CommandMixinMasterPB): pass class ProxyCompositeStepMixinMasterPb(RunMasterBehindProxy, test_compositestepmixin.CompositeStepMixinMasterPb): pass class ProxyInterruptCommandPb(RunMasterBehindProxy, test_interruptcommand.InterruptCommandPb): pass class ProxySecretsConfigPB(RunMasterBehindProxy, test_integration_secrets.SecretsConfigPB): pass class ProxySetPropertyFromCommandPB(RunMasterBehindProxy, test_setpropertyfromcommand.SetPropertyFromCommandPB): pass class ProxyTransferStepsMasterPb(RunMasterBehindProxy, test_transfer.TransferStepsMasterPb): pass class ProxyWorkerReconnect(RunMasterBehindProxy, test_worker_reconnect.WorkerReconnect): pass buildbot-3.4.0/master/buildbot/test/integration/test_worker_upcloud.py000066400000000000000000000122731413250514000264000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from unittest.case import SkipTest from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.plugins import schedulers from buildbot.plugins import steps from buildbot.process.factory import BuildFactory from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase from buildbot.worker.upcloud import UpcloudLatentWorker # This integration test creates a master and upcloud worker environment. You # need to have upcloud account for this to work. Running this will cost money. # If you want to run this, # export BBTEST_UPCLOUD_CREDS=username:password # following environment variable can be used to stress concurrent worker startup NUM_CONCURRENT = int(os.environ.get("BUILDBOT_TEST_NUM_CONCURRENT_BUILD", 1)) class UpcloudMaster(RunMasterBase): # wait 5 minutes. timeout = 300 def setUp(self): if "BBTEST_UPCLOUD_CREDS" not in os.environ: raise SkipTest( "upcloud integration tests only run when environment variable BBTEST_UPCLOUD_CREDS" " is set to valid upcloud credentials ") @defer.inlineCallbacks def test_trigger(self): yield self.setupConfig(masterConfig(num_concurrent=1), startWorker=False) yield self.doForceBuild() builds = yield self.master.data.get(("builds",)) # if there are some retry, there will be more builds self.assertEqual(len(builds), 1 + NUM_CONCURRENT) for b in builds: self.assertEqual(b['results'], SUCCESS) # master configuration def masterConfig(num_concurrent, extra_steps=None): if extra_steps is None: extra_steps = [] c = {} c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] triggereables = [] for i in range(num_concurrent): c['schedulers'].append( schedulers.Triggerable( name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep(steps.Trigger(schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) for step in extra_steps: f2.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["upcloud0"], factory=f), BuilderConfig(name="build", workernames=["upcloud" + str(i) for i in range(num_concurrent)], factory=f2)] creds = os.environ.get('BBTEST_UPCLOUD_CREDS') if creds is not None: user, password = creds.split(":") else: raise "Cannot run this test without credentials" masterFQDN = os.environ.get('masterFQDN', 'localhost') c['workers'] = [] for i in range(num_concurrent): upcloud_host_config = { "user_data": """ #!/usr/bin/env bash groupadd -g 999 buildbot useradd -u 999 -g buildbot -s /bin/bash -d /buildworker -m buildbot passwd -l buildbot apt update apt install -y git python3 python3-dev python3-pip sudo gnupg curl pip3 install buildbot-worker service_identity chown -R buildbot:buildbot /buildworker cat <> /etc/hosts 127.0.1.1 upcloud{} EOF cat </etc/sudoers.d/buildbot buidbot ALL=(ALL) NOPASSWD:ALL EOF sudo -H -u buildbot bash -c "buildbot-worker create-worker /buildworker {} upcloud{} pass" sudo -H -u buildbot bash -c "buildbot-worker start /buildworker" """.format(i, masterFQDN, i) } c['workers'].append(UpcloudLatentWorker('upcloud' + str(i), api_username=user, api_password=password, image='Debian GNU/Linux 9 (Stretch)', hostconfig=upcloud_host_config, masterFQDN=masterFQDN)) # un comment for debugging what happens if things looks locked. # c['www'] = {'port': 8080} # if the masterFQDN is forced (proxy case), then we use 9989 default port # else, we try to find a free port if masterFQDN is not None: c['protocols'] = {"pb": {"port": "tcp:9989"}} else: c['protocols'] = {"pb": {"port": "tcp:0"}} return c buildbot-3.4.0/master/buildbot/test/integration/test_worker_workerside.py000066400000000000000000000262741413250514000271110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil import tempfile import time from twisted.cred.error import UnauthorizedLogin from twisted.internet import defer from twisted.internet import reactor from twisted.python import util from twisted.trial import unittest import buildbot_worker.bot from buildbot import config from buildbot import pbmanager from buildbot import worker from buildbot.process import botmaster from buildbot.process import builder from buildbot.process import factory from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import manager as workermanager PKI_DIR = util.sibpath(__file__, 'pki') # listening on port 0 says to the kernel to choose any free port (race-free) # the environment variable is handy for repetitive test launching with # introspecting tools (tcpdump, wireshark...) DEFAULT_PORT = os.environ.get("BUILDBOT_TEST_DEFAULT_PORT", "0") class FakeBuilder(builder.Builder): def attached(self, worker, commands): return defer.succeed(None) def detached(self, worker): pass def getOldestRequestTime(self): return 0 def maybeStartBuild(self): return defer.succeed(None) class TestingWorker(buildbot_worker.bot.Worker): """Add more introspection and scheduling hooks to the real Worker class. @ivar tests_connected: a ``Deferred`` that's called back once the PB connection is operational (``gotPerspective``). Callbacks receive the ``Perspective`` object. @ivar tests_disconnected: a ``Deferred`` that's called back upon disconnections. yielding these in an inlineCallbacks has the effect to wait on the corresponding conditions, actually allowing the services to fulfill them. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.tests_disconnected = defer.Deferred() self.tests_connected = defer.Deferred() self.tests_login_failed = defer.Deferred() self.master_perspective = None orig_got_persp = self.bf.gotPerspective orig_failed_get_persp = self.bf.failedToGetPerspective def gotPerspective(persp): orig_got_persp(persp) self.master_perspective = persp self.tests_connected.callback(persp) persp.broker.notifyOnDisconnect( lambda: self.tests_disconnected.callback(None)) def failedToGetPerspective(why, broker): orig_failed_get_persp(why, broker) self.tests_login_failed.callback((why, broker)) self.bf.gotPerspective = gotPerspective self.bf.failedToGetPerspective = failedToGetPerspective class TestWorkerConnection(unittest.TestCase, TestReactorMixin): """ Test handling of connections from real worker code This is meant primarily to test the worker itself. @ivar master: fake build master @ivar pbmanager: L{PBManager} instance @ivar botmaster: L{BotMaster} instance @ivar buildworker: L{worker.Worker} instance @ivar port: actual TCP port of the master PB service (fixed after call to ``addMasterSideWorker``) """ @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) # set the worker port to a loopback address with unspecified # port self.pbmanager = self.master.pbmanager = pbmanager.PBManager() yield self.pbmanager.setServiceParent(self.master) # remove the fakeServiceParent from fake service hierarchy, and replace # by a real one yield self.master.workers.disownServiceParent() self.workers = self.master.workers = workermanager.WorkerManager( self.master) yield self.workers.setServiceParent(self.master) self.botmaster = botmaster.BotMaster() yield self.botmaster.setServiceParent(self.master) self.master.botmaster = self.botmaster self.master.data.updates.workerConfigured = lambda *a, **k: None yield self.master.startService() self.buildworker = None self.port = None self.workerworker = None # patch in our FakeBuilder for the regular Builder class self.patch(botmaster, 'Builder', FakeBuilder) self.client_connection_string_tpl = r"tcp:host=127.0.0.1:port={port}" self.tmpdirs = set() @defer.inlineCallbacks def tearDown(self): for tmp in self.tmpdirs: if os.path.exists(tmp): shutil.rmtree(tmp) yield self.pbmanager.stopService() yield self.botmaster.stopService() yield self.workers.stopService() # if the worker is still attached, wait for it to detach, too if self.buildworker: yield self.buildworker.waitForCompleteShutdown() @defer.inlineCallbacks def addMasterSideWorker(self, connection_string=r"tcp:{port}:interface=127.0.0.1".format( port=DEFAULT_PORT), name="testworker", password="pw", update_port=True, **kwargs): """ Create a master-side worker instance and add it to the BotMaster @param **kwargs: arguments to pass to the L{Worker} constructor. """ self.buildworker = worker.Worker(name, password, **kwargs) # reconfig the master to get it set up new_config = self.master.config new_config.protocols = {"pb": {"port": connection_string}} new_config.workers = [self.buildworker] new_config.builders = [config.BuilderConfig( name='bldr', workername='testworker', factory=factory.BuildFactory())] yield self.botmaster.reconfigServiceWithBuildbotConfig(new_config) yield self.workers.reconfigServiceWithBuildbotConfig(new_config) if update_port: # as part of the reconfig, the worker registered with the # pbmanager, so get the port it was assigned self.port = self.buildworker.registration.getPBPort() def workerSideDisconnect(self, worker): """Disconnect from the worker side This seems a good way to simulate a broken connection. Returns a Deferred """ return worker.bf.disconnect() def addWorker(self, connection_string_tpl=r"tcp:host=127.0.0.1:port={port}", password="pw", name="testworker", keepalive=None): """Add a true Worker object to the services.""" wdir = tempfile.mkdtemp() self.tmpdirs.add(wdir) return TestingWorker(None, None, name, password, wdir, keepalive, connection_string=connection_string_tpl.format(port=self.port)) @defer.inlineCallbacks def test_connect_disconnect(self): yield self.addMasterSideWorker() def could_not_connect(): self.fail("Worker never got connected to master") timeout = reactor.callLater(10, could_not_connect) worker = self.addWorker() yield worker.startService() yield worker.tests_connected timeout.cancel() self.assertTrue('bldr' in worker.bot.builders) yield worker.stopService() yield worker.tests_disconnected @defer.inlineCallbacks def test_reconnect_network(self): yield self.addMasterSideWorker() def could_not_connect(): self.fail("Worker did not reconnect in time to master") worker = self.addWorker(r"tcp:host=127.0.0.1:port={port}") yield worker.startService() yield worker.tests_connected self.assertTrue('bldr' in worker.bot.builders) timeout = reactor.callLater(10, could_not_connect) yield self.workerSideDisconnect(worker) yield worker.tests_connected timeout.cancel() yield worker.stopService() yield worker.tests_disconnected @defer.inlineCallbacks def test_applicative_reconnection(self): """Test reconnection on PB errors. The worker starts with a password that the master does not accept at first, and then the master gets reconfigured to accept it. """ yield self.addMasterSideWorker() worker = self.addWorker(password="pw2") yield worker.startService() why, broker = yield worker.tests_login_failed self.assertEqual(1, len(self.flushLoggedErrors(UnauthorizedLogin))) def could_not_connect(): self.fail("Worker did not reconnect in time to master") # we have two reasons to call that again: # - we really need to instantiate a new one master-side worker, # just changing its password has it simply ignored # - we need to fix the port yield self.addMasterSideWorker( password='pw2', update_port=False, # don't know why, but it'd fail connection_string=r"tcp:{port}:interface=127.0.0.1".format(port=self.port)) timeout = reactor.callLater(10, could_not_connect) yield worker.tests_connected timeout.cancel() self.assertTrue('bldr' in worker.bot.builders) yield worker.stopService() yield worker.tests_disconnected @defer.inlineCallbacks def test_pb_keepalive(self): """Test applicative (PB) keepalives. This works by patching the master to callback a deferred on which the test waits. """ def perspective_keepalive(Connection_self): waiter = worker.keepalive_waiter if waiter is not None: waiter.callback(time.time()) worker.keepalive_waiter = None from buildbot.worker.protocols.pb import Connection self.patch(Connection, 'perspective_keepalive', perspective_keepalive) yield self.addMasterSideWorker() # short keepalive to make the test bearable to run worker = self.addWorker(keepalive=0.1) waiter = worker.keepalive_waiter = defer.Deferred() yield worker.startService() yield worker.tests_connected first = yield waiter yield worker.bf.currentKeepaliveWaiter waiter = worker.keepalive_waiter = defer.Deferred() second = yield waiter yield worker.bf.currentKeepaliveWaiter self.assertGreater(second, first) self.assertLess(second, first + 1) # seems safe enough yield worker.stopService() yield worker.tests_disconnected buildbot-3.4.0/master/buildbot/test/integration/test_www.py000066400000000000000000000143531413250514000241610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import mock from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.trial import unittest from twisted.web import client from buildbot.data import connector as dataconnector from buildbot.db import connector as dbconnector from buildbot.mq import connector as mqconnector from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util import www from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www import auth from buildbot.www import authz from buildbot.www import service as wwwservice SOMETIME = 1348971992 OTHERTIME = 1008971992 class BodyReader(protocol.Protocol): # an IProtocol that reads the entire HTTP body and then calls back # with it def __init__(self, finishedDeferred): self.body = [] self.finishedDeferred = finishedDeferred def dataReceived(self, bytes): self.body.append(bytes) def connectionLost(self, reason): if reason.check(client.ResponseDone): self.finishedDeferred.callback(b''.join(self.body)) else: self.finishedDeferred.errback(reason) class Www(db.RealDatabaseMixin, www.RequiresWwwMixin, unittest.TestCase): master = None @defer.inlineCallbacks def setUp(self): # set up a full master serving HTTP yield self.setUpRealDatabase(table_names=['masters', 'objects', 'object_state'], sqlite_memory=False) master = fakemaster.FakeMaster(reactor) master.config.db = dict(db_url=self.db_url) master.db = dbconnector.DBConnector('basedir') yield master.db.setServiceParent(master) yield master.db.setup(check_version=False) master.config.mq = dict(type='simple') master.mq = mqconnector.MQConnector() yield master.mq.setServiceParent(master) yield master.mq.setup() master.data = dataconnector.DataConnector() yield master.data.setServiceParent(master) master.config.www = dict( port='tcp:0:interface=127.0.0.1', debug=True, auth=auth.NoAuth(), authz=authz.Authz(), avatar_methods=[], logfileName='http.log') master.www = wwwservice.WWWService() yield master.www.setServiceParent(master) yield master.www.startService() yield master.www.reconfigServiceWithBuildbotConfig(master.config) session = mock.Mock() session.uid = "0" master.www.site.sessionFactory = mock.Mock(return_value=session) # now that we have a port, construct the real URL and insert it into # the config. The second reconfig isn't really required, but doesn't # hurt. self.url = 'http://127.0.0.1:%d/' % master.www.getPortnum() self.url = unicode2bytes(self.url) master.config.buildbotURL = self.url yield master.www.reconfigServiceWithBuildbotConfig(master.config) self.master = master # build an HTTP agent, using an explicit connection pool if Twisted # supports it (Twisted 13.0.0 and up) if hasattr(client, 'HTTPConnectionPool'): self.pool = client.HTTPConnectionPool(reactor) self.agent = client.Agent(reactor, pool=self.pool) else: self.pool = None self.agent = client.Agent(reactor) @defer.inlineCallbacks def tearDown(self): if self.pool: yield self.pool.closeCachedConnections() if self.master: yield self.master.www.stopService() yield self.tearDownRealDatabase() @defer.inlineCallbacks def apiGet(self, url, expect200=True): pg = yield self.agent.request(b'GET', url) # this is kind of obscene, but protocols are like that d = defer.Deferred() bodyReader = BodyReader(d) pg.deliverBody(bodyReader) body = yield d # check this *after* reading the body, otherwise Trial will # complain that the response is half-read if expect200 and pg.code != 200: self.fail("did not get 200 response for '{}'".format(url)) return json.loads(bytes2unicode(body)) def link(self, suffix): return self.url + b'api/v2/' + suffix # tests # There's no need to be exhaustive here. The intent is to test that data # can get all the way from the DB to a real HTTP client, and a few # resources will be sufficient to demonstrate that. @defer.inlineCallbacks def test_masters(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=SOMETIME), fakedb.Master(id=8, name='other:master', active=1, last_active=OTHERTIME), ]) res = yield self.apiGet(self.link(b'masters')) self.assertEqual(res, { 'masters': [ {'active': False, 'masterid': 7, 'name': 'some:master', 'last_active': SOMETIME}, {'active': True, 'masterid': 8, 'name': 'other:master', 'last_active': OTHERTIME}, ], 'meta': { 'total': 2, }}) res = yield self.apiGet(self.link(b'masters/7')) self.assertEqual(res, { 'masters': [ {'active': False, 'masterid': 7, 'name': 'some:master', 'last_active': SOMETIME}, ], 'meta': { }}) buildbot-3.4.0/master/buildbot/test/integration/v087p1-README.txt000066400000000000000000000002641413250514000243610ustar00rootroot00000000000000-- Basic v0.8.7p1 tarball -- This tarball is the result of a couple of runs from a single incarnation of a master that was running Buildbot-0.8.7p1. Both builds were successful. buildbot-3.4.0/master/buildbot/test/integration/v087p1.tgz000066400000000000000000000410311413250514000234100ustar00rootroot00000000000000jHQ]\Sk_Ѡ8J0c( 6E)))[T DLl^ϗ}~s~Xmch- mF]}qG~F:zm}mo0b  r~?Xvb"D:tVkk7_Ol_oHӷu uXgAyx1d !8 T#2iV,;^*4 K•1\ c Te睖ϠrR91T. /='!1*7B$I@@v3.CaЩ >F#F2h*32&"Hn\U :c| \D47ޛuOEL E 0dF-*4GxEћ*^qjFit2A}Qp +E Dl vi,@'1(|8g1##hG$24 mW⥊Rœ8脡T@z03c6$Fp\D@ pӈdf6,o18 '62~΀%UhT b PJa`ZT$1e:rgG!ELP>d|8כRfƀQ0@mJ~߱F=cD'S@*/&4Hwa:U1T`OPUNǬy!ɀ5˫Z2U]QELj)c"UAIOz%LpF0cT}7:Έ! {!5TXldl2Ո*LRV&'S*I$ ) 4~p]I"@`Ӣn t\ksEQ'Śœyc@by>K Z)dֱ5h2jb`G{-qՎdLW&1>$@KQp$T&iiph0բSpѵ hoN!.~՗Y.#.TW0IIb;Q=2I,$[2.DO6Bu ssCFƺz8=3'1p{z0D+B O0(5J]H $R+t!F,?(jP2\~*APBT| }LSBu c!P`1' 9JHH9tȁ0"EaO8f0hS0 bԈ@j$8:* e1TgKY.wጉ&D*Gb.~%TƐxFƽ2O&S㘓9aV<'M5RS6CЩ D̵NMp1$'&X4Dr2u2)82eP)TE"F 5O^: RT )Hk|Af,%Bq*0XB(p"]Ed ່TH )X: %%吘$b$\d(Ь+y>5L- .|?k'i_h󿡁O*4W_s<b!HӲoQdeTԫKX"HQF9#K J)ٔ"d^H Ag%"vP1 hJGBWD~iwsv`vE Z.9M҅j-zHm4\|y^ŕw?zK;[^ߘH0p s7ikŠO>RSꡘ[%6j݂VSՉmi?2b2w-Mn^deԦOo$c1sazΙv4ĻmjV+1w\x_hyP5k}O&aNʽ })}AUl4x{`sJ;uT::'TEqe^T%sy{wZ?қ!E I ^Uՠ<Ϛ v R{Jw IrO?A犺xz7U{J WH[~Ob/RWr:+Wg1:J yv<%pPS^-w3gׂ6' -j@gh[Cmr[Suʲ' V)znmZyQNh۾W(H=S}C/={1wpl+>O2/*Y @Z+%YӃSJa{պHlh}O^e{9oيlz<d7љ3oaLpw-K~cb S`Zj<ܭ1[4nn53`3[FvuwZꔠTL"o̬H{A)Kݝ7UooL}j A M)9iW ~Dօͯ3&A8--rfB9S^Bfʼn]k׳"ܠjʟ+fSS u6ofUeցc!ΉzՕ?-wF)y뤟}ti7mUgkgk?[(}Rʑď=̐k:lXڻN}ڤɟ|߯4*na-;dN0EA}@1/0UUD}6HouG·#"m>BdxTd}{C8}ZaHDW껉 Lt՝el%'A;U@Tm T弮,˶Mn 6~ ta8T\-}s?E1 $~M%鮵a "w_lmzJXdY%VlP;+ʢ&ԡqivzu*{+\|1MM=67T?ς™Z>~⍮3t$6#iPq;.⡬bc ^V ֯<>YYǪXM%f-Xbsbka/{vUk^;[GGS w}n!j r|7[s:u=&%WZ-KiWr ^mE}Ѩ%Q;lito[B <^59{~79>N|%!jeg؆f7_S""_'pw#پre¢GjŅ:{`qX%0T轇75b*h[-Rx_M>~joQӝ} o}PZ0bFPigK;+mQ%޼㓂9w5Uw~ߨ8! 㭻x/oTѶj!w`X|]ՁkqR9ѧ msbpO84/X9ۍk7?.~:Jລ j9J{6H>q~= K#_\̿v%-d&WYxK_ukׯ%vsZ<}Ƶ}} V %nM>{_ۭWJ HNT".\77PT3 2{O:>9eAuZh([k+K߶{+7e' CydCxVcW:3 y{]+c\^6R8Jz QyOy39Jn MHSdg(I۫m2yc}N<А˸ZL?jsF% h|e\ Ň_v"Q/p8{&9Pύ.Zj-C.]ͱsC]|.RK&&Tò~ԁ\qsytq V$;UdV?ʸPO*##l6ryW\C e0yŊk"N ptѸ^D7F}ty1bwˀAW@8v%EgݶmةcnJ1yV:#PB%Wsr ڋ Ǒwy ̒EQ9F 8f bcK9687V6\,tՉ8#?A1CvubM‡|&^7?ccRvoz7N%Epr|*+]}ezUv'z1s[lH4ks+NﹷQ~tGovAzN9I곣m:şnx!V(wS\t+{-z^5naZ$_v&6X|5}їiNovIdd[~)3|`d^㠅@ `=%kkRv\slcJ\|p2p\KM_^q~i0͐n5BU;4@4ZJ﬿E;Eۙ>驛D.|St Z%n0Ts>nʦ7e<% Y=ߊr)-O;hM=J,%Ѵ"BIU~z,'|ɚc+FKў{r/{ڶ <޹NY% =*jWN\}~5M㚋%o+up[šgwV#y0&' e8gf,,vvR]J,%Wd3 |*B\wRuٴ~SNҹuZz{_8=RhmYAvsv]wŁ'E@%m7"F˾̍]~0Y73/}PO?u("#vAAן~ru/J%1\#a ^ eƸa=0#+e<67j26L")|e005U! GLfFf (fbTqD1bVT1QvQ̎I'%S&3I6TAsfl8Qe2Quz(8(>(1Fdŀ"Q$38d2fQ7`}:kAx(GfԳyV8OubE[K];z[yNy=nк.)-4ΕwU4N kkj ?XmHkM q(ZWqrΚ]>41QI>Z{[&W;lXUF'Op]I GjRtv5դ㬴fKަhIRG+W8Y-xŽþሏ*ڡUU'o`9٢!֎|Tvńe,=XfX8V7Bj̃)|=<{g~sՉO*k%58]sglK+59mYM`do+2-(;I|O,}>}YOsh NIHn7"e8cg3y. qUwig+~兿+^lp'=>-V}HZpMHꯔѭl4+.O!IKk٤ڮ\>k g/=P!ѮfWOk( О_k;`mIF W݇#O5˻h|+;jgş6M(JQ3{FIݩ3_p†xW+QjL堜DF܆KS ].ۚ^xuR+ؓzhl]dU0!(,G샴,JMc>~vǬ?Zem Qxͥ]ΝWwAEOI0F/緃-?jhzbYYay}3YەOD&+zudh6ꆫg e:4Wh?ec"hW_Kpu%EbPӼg/A&';ֶ.ͻz lsxJ ]6$õ2ջ<ۥ/qQZ24_Y^;y{S??m?P)SJDm)<$ 3c9AkQS̽/韛չ'&ÜGK 7Ys:*7Uv$H=v.)$gE0Z~YBX^ ܩHp@{Pg!'mk/r})yȌsSfU ܂B[`|DW0%sMM|s/3.3ryS'M4a8*yg9vn!S=8Ҵc0A5:Cň9~Rʹ Q uZע+zmOݵ)NbB h.g^ى}qhiQQOO?dDH4Ϸ; =xX4aƖ-u̾sU@_2[to6!-q [Dw8Y.SyQI]&O3olEDlxy1-&ơ* 'ۮƗ=ꁾbzU/y+WWSAy""WK"5(' C{ Qr94Ɠ7c =`Jlԡr7y=/n'iRLԭ3f߄!)N,KNj8kaB *Y6Kl>!|}˹++PtJQBRC1WRZ/H~&^dٌؑLo}tkIclX, sݒ >Kly{  %پFc0iBLңܽwL~]'?7ﯹ~G+bVI̔#xdIL0ktq)p/.͘ujecud U۬ B o|UZr Qu D8;4Wh,Gk>@{Ҟ îtUި$?QA|; FSm<_w.p@k↓&uZ=[3'otZV6vvl_mY0;wIܫ.w: 6 .\Nxa$#5϶L;+V9#q-hBTxj 劊\uzs3튞m7C.:.A 6ZNx|9O.szGՕ.k9/K<7O%Bٛ_FbonP\S8kb덎-,f'%NZ$&7vo'|~ΡEʎSu?gnWU ׌.)N{x%*(LCu\re_j+~W[qc׹'&1[bT_YxEU+¢FTt&<*7)\W s̀q+\p|z;=2C;TX8?rHkAq|^N1g"/} $e#2 I]^`,Ep:/tfߎDݖ8L%T\̮oev_g%41\ YoZ\dׯoL]Pc^bn`f!4=ofn,?Br?bP+aJ׶TpXN)W/_ns(ox)X1W,, ݇f%~0)Ons媖yמ&>[X@Zs,]YM3f(BFjqwT*sxr\n ).Uު0'X+B\X;fS؎|X`pCIŐہ&˱N)ӋsRߓ[v٠ׯk#_ %?;yRp]Ӂ7gy6i쿼bxg( *:?5m|&Ὥi{Bk2{9ERHBO]q`>I+)˴p4;'EX2eR{vȚrWr-Pۄ6<*<}kmgNCOD /,3v}xV_)7f*aeE-V<^]q}[P^ULwf2&ˮ/J㹓o"[*q_7De힮'I{حdö[Qfbz>G/~pPxy"i,66*$o,у<FXo$ d0wjAS{R8/Y׸HS2sϟ߄0^{`aԹjU|wY[XM)7&Yon>JMͲ |hyb kWBE7_ +YD.!5zCΛ[S+ݫiWkD%P~E+bK:} f#| Gu >ZFs#'>[t-ìjns7SG+E* :Q{d[e;g;ƅt&{z7Wz-k*\-sk[ dތm2r%ԓ$Ƶ٠~y*ZE։uo9Gt𝂈TӺ%^Yntx i~;Uf=[x*趴 W~)۴zB}V J@^YLh>qbA"%CI tt1M{Du9;s@Rwei*6\ sƊfڮ@e=В `fScA!|ԓG^/:}>{iRz7\]6lEi(Σ$uz,.TTǗ"ޔ)=]ۢ!|BUh23-r~}y;w^ XؼA_/ u;7: 8Tjy^ay~!U|.y4yUGxW_,4Oie({`sk4CFպNjp✣Mb b> {ĪbWFV۶.-nY'LR#^~KfddNѝDa uGtySŮuY{x.ee H a_Uf@,TkY%pqr#}-ߖ/r6:o  Bt* i?# =]&g"p B!@}@ I^u&MvLR y;~>ʶ TP0p"erE9l-}lQ>V..(e^L@9z}Pnu/ke`饬2_y$.p@Z*t8RK .:>c,SaP]C(1}M\H55hP0bctIF؄Ordw] =V$9- |l|#1 ˟Y(̛8jP9@gOX\ z\Ք H11!2C0 o\[(|7=\sRyV`ix mMcM#A:k'ټ ~ @>gt2 į p99 3j3BQ*#eNVg~HBAX9:;} + 8㬧) <4PPʀ㟮3Ap`hNTf.lTB969uܨ2@cT<֐PnJ u#PdaRL '4$B JgA1?b Cap%W T5jNd[¦0N9X'̛#Xbtqeg_y(!PaJ`4aXzcL=,ӬL9ܿ{b`&}iDj s[H㸹t[VYZ|>l|PDX$C}SJ &iO{l)mĸƅb{GED#?}e%a*yT0S}['<Ox NTie $8((¸9zblQHQc 0x'~b0N\b'U] Pt+g=) @`=a8 ^? =(/JfhiWRL&0F- 8ړQ CYcF,Y=oG@DqT2^ӛJ!Zt[C@mip+oaIi^Ml=w{&5Y◂>_? 0BC!'!5L?'Ghr"-%t7, .c}J7'69P”ͿGd%Ćѣ"k0#Fiጤoy,oT$Œes"~\_쒎T$)e&7.4GM[Y:d:) vtqaVD!*sMјz* 92;81%Re}rPCQ)k"'&七~ă$GјFCJ%m"8GuѨmAf0oHǹOïB"4P6)f]D'ˊYc1 B&N+S 2c_rqQ4$I9agZ )+ M2 ɱge  Me|Gԇ+1fTcj?`<(9xkٔ?a7$o-͍l>_8Aup" yՈtY\Hh ;G%#A1PdA#GNAbzidSJRUWPǫ`F#R]1*!m:ϟP$: Tɚ-0(>F4GR :,ts*϶aOCeg>@ @`6_ _ \@10^$UY`Pq8:ۇ N@)P jJC=ks~y% LJ/M o>6- 7Jx *ㅎC?!E]u2@?8d]9XWkA5qA!4 6 π\H#h*KR9&수uޙɉ?kIr!-'G☋TNp\ㅯH-Ņt׃5}v PSx@OOc`hc&4= i dL1r,.B:YQQVF>"L8~~ڼ~DBYЋ!LJҜ苙Lf" i& M"V~1{婪x`S$uU ~]__  ~a_8AupCB GZJX8l o78<8*ä {[IXPp)x?l$Ru_-˅ԐL&N0 1O,aSʟxDXBjF BH%$4y\醵׳ ӗjhPޫĠfJT-#L|:b)LLj4EN,;{4&i.p"r.2)(`dG}戓zƈ.A1T|9PGust#XG,A0 @Z=5pĿ9@ @` @?.į p낥 CHX/ ^ Y }1ks!L W1?`qd 犺H89"̞=`C?ïoösɟ%΅yqN"'gs&t8Syh8F#>@Ctq"wgC>#9h{O+SR.TmT.$ )[gD>OR>;#ǥD2#'TSY`/Fşf7Mc?K fVq/(W1RT^ۈTy/kȡF/N#81 ~8a?Xs7!|m9z^+R>;U4ɑ]5D:1%B,[,r6-fJL!Ȧ3鸔Hdl%?-Iʭnu܅xal hR,ʒV}Gf̉gf?>'z(=yfܼx'HEZ3s|s˒,>9;T,qzⱹn^ayaf 5JQ~]~S+lVɣO#;8wN N':";,v"!zۨgl"?T)EU+%.KjVEbNs.TWjeYŅǏCgbc1]92^Iuh˜BrYҔjE]Vjj_g^!@O9&O١%G/-rlǴܽf)ނAQ}6|FgX]bGQuZ3+J.j~ƐTДr`eRhCx^&O֨3dmA[XPk¶mh- 5"VWj k߬ᅩmVqZiﮠ.Ŷo.-[Psv&-ߠ+tOx~WۂRUXPK=Vu0u/Z;L[PIPnOFV*EH))ro'"{ IzZ/<=Ѡ]?ݾf*iC~/MV5UXe3|"Vx,]*RR,-ʥt\?Y?]@hП0  < >&Őo0^F?aS G@MDTsԖ jkL]jC+sSIOm͕KQm>p1a3UVM56{zhz;),jsVx/ݴFV*z蕘 E6ZݠbLz9 / SuF@ Cܭp)7ΝkRTY,C!`b H4ZwE>2vZU0ms3\Y@f/ã ~ ƣ>pۑ8flA~NgOkHK<~Oc[;3dC4rR&RY~.U vnd{wA+j$ҟĻ[pF?%3hMrG68 _fw;ps"?QJr!.KTr&VNcT19;q7E~l^$;L!R\g2R2OgBd!%bd8##xu?u;|V7>4[;yOxaT\Q*;;Wu.?"eJb]VIrY*+G/TvL;;]ɋXkh$W}VK/=Lr6R_&o $Al2;ORrF]!F1m=x, .0t}_qy7-t:&ieY5<*L3*h|Y.n*5j]h"W4z0#QY[긏!znA 9j:5?wI"ܡB)eGQs rB'zΈ'w&"Yʹj0AF׸զ=Rrm ɶ@)u ڄmBZpk?z:1{nԝvCCQuFE,xR}eW}B4/0 <  /?ϐ~]B}+/<"w~E"}Zދ @hi]}{+͠#ĺ?< Dc7#^^88  )et*$\.R~:/&D)*-Ϳ?f0_ca?2 #vRSǣ^C}Ul2Olu{Y\~uVUZ_ulDQGު( 6k*{UuY+XOo#ro5 c 39wQ^ul\U30kit:g䷻9n67|}ꖭ[$ <`nw@xCgXۼ;=RmUsdXV+fc>OX8$bW*zRZY]6;1>N \dh q1៓>L|C ePs %Lz$cmB6[Dㅇ31E_^D@ε0j_ԥuǂamW?Ej 5g7 @8!ڡZGQIec_ZOXYʒx=Z_VI-0[`(=ISz?^`/0 bc}ou]Јpt7>E"<ʐ,>{#4p,QQ(EKaxToV8=. %-%VPf%Hh[}cPmB4W/ _=2f@밈Xٞf6`jy&7(IETT lm [[~ksui^a|U]?vL !w+hЮ=KAa/_WCp30("DzV>zJJEQN4Dtht/ߋo%+@h?BƐ5_);MBf5yі?:V #eya3.Ɛ|'x܍u?F`t /̠kQkWԨk" F>!Z ܒ]Ds^^86,VF iǀ:jgSq^omw30/웱Q;]c}ul,b @v6lZLAa@_^@81L34%4nR][ ڲTY7_G*3SבL5j6HOG[M5֑xguRX4֔\VTZ (+>+r%)+I+u.&/Uy֪DzB(%U6MZ\кX6=5Nz/g]Km>-'k`l-ܓھAGy!3l)9O Lc^]PL cBt4h%6'x`->W3TOmcӾܹZ'gnqd`8pkyF^`@x?Cüؿ˷F׍8/ [ˈ,Wo (RoP0Ϟ,Trs6kɠk.adZ>og+HIEr[[P5G;xa.O@QQ.5{u\f~vZ;!zrNZ&;I~&`],& ?#w:=, @xPZ  lxp_/6H`"=}e\Vy?ٗ_o,iuE*RY(cO/o3I4D-)a_I6ګZQ{ĕ;[F~?g G̩S_y~qO 2>~r:qӇyAg"pUk*uzD̨H+Δt,{'w6hjYZnXVE".=~p2m\(v8+*=$#wfLQ^P3ׇ됭m͗aӭ'Iޣhk J!HiwfdfB\< Q /hZ; 'Z FL3!fĒ#?R'<!~Spa8ՇXE3. /bgQ f7q-CBA\WcTrOhiZs1H@КMϙ=N1.zJ~Ę fBM7JP I*QG)7(ߜU|O;>{Ϛ>kGPͱxUU(݇Ǥӷ7̜0i/i~;Hf` pgz(8c?_BY`PO'՟8V~F̑񳔚?E#U`Xb):~sF̩95eOd|/#|BqF{6I_~c ހn[(Š'A:$`/[f4\Y @0Q TMTM糩)MTM\6a9l* /+A&|0 &<0„&\0L!@ NS)vEwڷVXr!9~RO{\f붟ाɝ:ì9ʜ$?7vƌwS+Ln(>t? m\)v>kjc,ޘ (p&L 6KbY\ʚ|yᶓGX`W1^!;,^UԀahFY'J|";Q#?/Gb|9 @C8F bv:tY23YE!87%?AAgwX_6i{,^Tftw>c|)><| ~gC#!xyb }K4-JV3iňS~ N| a7^Fgz  z'r]qN" *EltedHquv 7A΁lp$`! 8k` m)0݇n?z qA&X9 dØ`.;&c1aXjLN%ƄcB ؉q(pn%s5œ]BX  9|Hsx6,gpcpde3[n6b3k Y6'{VPrAH D- &؁5`X 4"b@`7N] j&0\Ybȯ[>X[~~LHǦh(+3֟X gA0\ .%!ĕ$0][ DI.TJ fziմ Brq'3>ts%xS\dտ .qM@ eߺnt@B6Ëvnޫ$)Sff9sT*dJx ?M938&nJKxeeh274~ 6}soNG]_06)4l>.LGdmHJbeD lU手Cx/XGŰ*mέh.wdPv)iy3],}g*@!kW*>bL ޟԘlѲZWء%ڼ&Q)e)AKaR[|z(3l}D.4##yml$} ffY' .(yLu3jw?]mfy!7}=SWR.؛ԗo΍ҋm]&g[ݧCi0(m?3# ۧ4e~?jʪGUL`Z%w'[jQ," M8Fͽe(>tky`+gYX)f{E^kx=Mb5N>5HoTy]R?6(OVunyx=T͖MWWSWZ-y<|謧7QQd7@ IK0eIt7|4WҒ&0eǟX aqg?@?45u0(7_;T˯fw UP?t*ƒ-~AX.Hj|.LGc3_yyVOͤYFM;ďX|?.tjhU"Ыe|P4`ؾ.K(LugŃjr 6$y1=l|Bqw+LEv+."Vii_yO8vDLukn=/|1y[6!)/QLe6G&k&=~ɩ"u23_T d04v'  KTA}N]AWQ|PK} 2~hd*ӽo ĺc+ H- %tS_uL )ŏ iɮ i,}5aTvZ'neFK?LJ,i?O(8hR×v84amη:;r׌+U4ܽ]XTgP5nN"B%U]n*+o~š,xU_10[ۯ>,!Nq١a3Z*6KclMMg-ujjt] ̺G wTudXqGFSEar뭨=;8 [@S&c,oUͻ3^ '\ nYoy(5ƺ:nɜ+a?ԅ]tZ&IX;Q7]=75!J5GүKϻ!э_R\o__%XF'ɺL;+CLbAm/ Ujw}VRgz*ZyM~`]BIbHQ|+|^XQꎅA-fVmWI7Q/_)V̼k~˼G󼀍^=_EC(Mg?rԸVd{:v^% E֛!=/kn&,iSD,GS \]tulpSfԺR*W_^a;ii ڷӚ۾ v k|&9l=Xܿ5li)#Bkٮ7(Ӕ1{;Ǯ7B5<)v֚1Z(|k7Ȟ*oNMN[E^ ؼ:6V̋u׽|IS;}PS9Æ㫯KNH3>)K԰PzE&zWP0Π9T表C*7mPnÞ [n‰?WϨl Ċ[~ao%Ev".u3eeu@miúط ~4 ] - !tHww4 /}9>{X f+f*?ƅXxgs%"P9@"f-}LB~X^o{lo-bcHȚb&Q1cT^k o)-`w4izBiwYRHI*J/UE$jJ+ߐmH7$ 5̏w l 3y<쥂I;HTSk3Yh׏Wm٬2] iy*h_=3)w0}5TbbU :mɯ֜xl<+?$īnTJjپRH[`:tKO]'2] ה|s*.G_eFg11*`I{@A$,PKf!=(uxzXr7K\[lGzEV&ÜR/'3!CF)n=>^ _4Ay”m,R7(w-4+$Tn~6tMa+QA\iz6w -j 4XTh,]cɳ oD fxs66Yo(Ii}E;kI~[vl&Z%'B&6GS!3숩dSo<`6k.@&<`Z.8< mp V?A3 ɕ;ѫ Md_'QhL OMM$bnpExG k-ׇ9!f@"$Tgp-Pc)bM*1&5D&zZsÈrJק^7ѼiHtcjw8sg)pδx&9У ə 1jf[,Z'#JI!ڥ28ϙ&Y!!qDh$ĐY`%̡%Y㬃s8L臼P%b~D^}tG3]ЯLPQa5 ,3GlwYUxHΠZJgԿ;3p:5{fzu`ѹه?L;3gܫВ ܒ-;re 4,J}\eNFZˑUd-KL٫8]-& /$L]J\'ݎ;i՚_ʻF߸<$GKZm$1f'J\S%o /-Drj?f4t_v)u& VV@p?0Rdp|6IOGjvBUn,tקکQLLGӑ!U"U6x~ߖYncimXJϘP@ϘŐˈ؀U1®fh@+\?pW~M;!χp`)@fB r1,_4JiZ}-i->򍚃X@@_mMS):5*MmG8FFAQ~^zKul9(s嫚]j|*!&98LI9(WՏY M,Hy[뷐Y<)AT چ4AR=1nqG^zkD*x+!%kvbt:~_tئE$&wr gx*LA]܏D;iM^Pșt9Iu"̙SI8r %v}r؀<95W\krUCH7avn dX$ao !'_N#sy8]ˁ#@Gu5:f:le2hCi#ub:cs_EEEm@wFؒkx6&TNMDP{R gxPj\?Oa5m]Q&h;ZtU5e +GvUVRJ,?sKh~Y5f} /f4;MOh =2CՋ{5CA-65?$, fP1[`#<30{@ FkΰEQ'R&k"-%^FQX˘X@֏Nsta\EFԁaZD7 &&&B#g2za~}Ls@ l' ^un^WGCK{ wfBON_8G,KROW\]Z)y\.]yiZjkIhK$92)63tz?gwlX%dXnPT=cԉNq%pif8ʜKzJM>ЛbKbjf,'HXf /RLWi38ƂԹģ<3eĝd^K|hTO K~'hG^9˝z|~֘ N"G@F}N8MHMZAD`Nۺ(aSVMF60K>TqM:R.J5$ɇwq /LAMd}I${tSYWo sd0%dWf` U3FvVՠ3$9Z 2`T㧬%eu]ӭUp9.YlpYޅJ'撖@ F;Zni LcZ:zUDԬubj)!>ٶʌHo?W*l ޕŬfRLtB} ^1z0&Jv!pUce@w׭]xY8iJ̳Ҁ)U6;HŨ"y|B(>;t,[{04)Ơ(Oȸ. o%tE1sEq1@Է#`/r*E+r)[m==9ZRd=4LO gM {4 rX*83 ſY>MCDP:WD5X`ֈ*lb v|o}ؼ#kc8z6Jӑ 5>pnw=zIr箓 \6Px"rlqp!JL_1SMGBe4WoAXv9Eڊ(YEG{o ʜ޺|QELLA9ؿ3|<1lDK٧9 Hhh}0U>A$c "rc}06#5:g6RyPN'WMOeؤXP8_\o`Y؏m̢?K(f7h]N4r/"%FsE\ϡj2X (wLi mۉ-HfQ??+]=bP^騵pPH!n QΆa9Ї)C#5cG,'oS%Ash.NM#YϪ`.ઙezkEt?YC_J:t!+S|t_(r@Co5Bb&ØI?ibXnY\ȷ5wN%նZT)É+?\.kP+|PYo9r5TV>+{ u#`4a M=ˏ4:4hI~oDט$3fRҾ,o4.#Zk)ok 7%Ϥ\(R d`z.9mccVo={P&xlY~^0@ (7o*%J~QGKH_&0%'y`0=tK4@4?3>,CJXmԢl#;f_hbUw5~?ش#p8BkɍN[W.sNx qE/Dˏ* [okt&.r"/Vv Xnɾ*;;hpo&6+jGsc>Vhcd;ЛVȓ„+T{.frrsYE֮1/:ۙGS鈉'JO>Bx 3uN=;̎zV1Ql,meeo&쏴`r Mp{Y<gUp..0m}ߎ.\}j7\Y,8؉:i~_D2uBzY\Q'F % |nƭfi71PW "gjnB;K d"TY}@ёh|Q@r[.E yĕɏ7V7;}6)Қv w/2X:lُF mka!8 *'~l/i_bg7?5k-ңnE\j&Tr~**-rjvf dX7쨳ᙸ0h!cv !3|V' ;[C1o𧴿}3~$#KYIE_ǯqQ-iU'9-P#O.Ǽ` wڊ1,҂?S&cxne;?I{tjs;{} eNXޝ_, $N#vpMTvyAyس^xy?p./FTcNtE*DYGt |'gkj?1[gzn,PT]҉ `esΊ<ϓ̈́}E5a3BJQCwIWeޑuVMp$W gi(GS _ys\W=<"7?ŀy OyO, c d'%G䘔娨: {i{${Y؊:AtU 3ResX)ɩSY)yhJP13r ᜫk8>, %'b=dž;O'^G@fǜl,Lav8W~sEg>!"b/)Ai իѤ՝wg*1`U`jJ ƨ0L"/dxL& /'Y1!/ dR_5]ȃa`rħPpglkaԅ $P5,"]1"#򨰮7kDO:0s Y\kl5ڒ=QX+[87*l5oh:OVa7^#Sծb;Lqf|L~OFӍh&?aԓ<"ܧy7ukBԞ~몬M~cf Ԗ1h1Za9jl(2f/GZWQ*ڏ;Gb?{Θ(;KAe !oMEfA2Me9`Jyt[tAB4aO0"~p ȑ4}fKBiwP'}US e^?3\R8}z m ꝁYd(_7~l/Agq`§uEqOCKƓҼ=uT|lֱw0880= e//gHs۞g^jg|X ,z o #Ѓ`)=eqJu'9 Wܟ/(=mY~sAg9<%\Wþbӛ]v} q7`Xӣt0YgqUO<I#9.}Z͗6Eǩ {{;%b䣴oSS5 ~/?bsh?6WiCѬ2YB7))mYwfaHӯC-,;!aB6wdmBK[`L,LI1@:5v-k3{lSY;~ToB6~6Ծ~GM8BN,vmͫysY?LP"+Opoݳ{>[={*LdjGdiDgO$37&ҷ2q#7'qVNvxJXXÌ ` (u5Cf[#7\Sx \;ĠsYd@9`'0hTA@ `P Q@F? ݫŰp @տ#%̀VFDwo&$[ۊfĽ9ƍ_732u`,l4H,,],H)0!@,R1 @r=4NucB_+GI@wd`odr7@z3@s;<$!]lPnkgCS[/ݯhoYu28\5f'+[QYRS"AiNY?wk~%Eě q ^~GGܲ? &<vTFU\& ?* >X@nԄw&e-A] w$ĸpn_T A-?Gr/Vƣ11>x3|B&Ļ?vgo^|_wPSokQڰ`4P/*xe2ES|*44IO|ꁽm.:J׷Rdň8p]>?}r uUj8}\5Erwנwo߶/߶/߷^^^^^^^0buildbot-3.4.0/master/buildbot/test/integration/v2.10.5.tgz000066400000000000000000000317371413250514000233700ustar00rootroot00000000000000la;E2ӸUU\8+[)jӮEN$9׺J5ՍVUUi+3Q_BM2^/j.,M=A$]~(M`}xxyV<<9!FJ?"b%rAKs[|7-WEՊR5ü j:>~R;M]x` |! l`??aߖ8hW倔~T;;=-빩|ece\6˕3ەj0%ɅBPOjZn!ͧ伞fTj&k9}W_2߾[U9$ܮTowƴFis쬺Y/Ocy0jvKXb&YI}Ͼߒ$ 7I0Ktd_I <rPRj^/jE=דL>5IsBb~A_Hx4}/$>rP:&]7kQ34}6?3[Os|\g\83;_ЋT\[?&~|ϼfh=z.zT<Qu:F4}^*rڔ=UԒt y~{{r{{ЫJy}W^YIa}~+_ ^5k{ti^M+]+@a?7 mozPaCg"_|m0x2~S*_) Piʅz]7jF6_*5:r~%Q//_ȼz^YnX5`N剐2>.걚+͵4pmkKwp Zw-s)._YS.-V#a+֫=bJT݊xuiMw%lCf|U7eQD3̨6J^j5қ:mdTT[uWgB xe%sҲ6nY,Ϭ`2KW/^xfc6^w.zW&XhH;!?3muX ;xq._~jNmChv|>ʻu[5R4~uu=;yu+*L}yqW&fbZV{fYf<73#[V7DY+KzWu4 3m^~|@W92kH6B}.({:V q9h {GH&((54خT ]q[]x,L>ڣ{MmXmojf4۽ڍIXH񘫁ZILZNV^qT޶Zh_xbÈz5+yfJו94Rwn[mF߇]k/# )ǚQcj@s0:Ԯԭhr(;&گq"IW jʦyMr3RfO4_ 0[AƐWxa6L{/WӜ '݌4DgW8yC4+Qxa{Fb;u 9aKaW{TJQoٜf詹czm] zĊ)ņdPMqPNʯ[}B7͡lALgvƶ}c;ڀt#V5"?Bk/qZKŊWW s1Qz^{f+Q ݨԫy}q8CvlQ׾5וεt5tG~[%~\N{5ge^B^{ 3pܵVsN[Q;_C'=likNibJaԪO*f/.4C ^_ܽ_Xxj`֑S˚ez7f+˫k+f:2uAQ˙ej>H\?.H?ѡȏ"? 1@!IDv,[{iqvb66-5 dKIAֱk="]ґS׊ʾriN?~5֕rNxģ /QMBoVlbO;#~i6M3˞;ZTG8vdNrvD1wwteso%3W6Q{[TToHD=wݜexZ1WXTtG4ZN禛]&_UhNM?"&\ݵ~3R*]r]I|GȤzxw?q>`@ -})oD:|mҡ_;\O~} z}?,ihpLCo?~ۗz-3lHy~Vމ6 7z\u͗҆Ќ^-ԪwW.rsͬ;^@N8NO}Uo^^s=ϿW= z{sAtZ l+zɼhkl?Uv̢qDp|H9#>Yho?{2eiXjZl5=N7=v8TH9qBޭ[@;쬫;3=n_^\9ŕxb>֝|   IWQfUۦǰ%t_Mq@ jFh8tX KU GZtb$iɃɟ&|䋒ӓ$N[$ْ.N*N&J|E[z%u ӊ˅ɓ:'wNm//+--+Rl/)wۦO.n̙>nwxlE=6+x= 1sy+͕#2j ^^ꢹ-%eSyu}˓c3 ]hB L.nu{3AI=6WGwt-s; 9!'#% yg5skr ,dNQ݂+u=B^s)ZO-C^l) l=FBQh9ڇQpZ.҃ S@8%'ߐC r%[dy˳\1&4bRs(h46ֶ&N'!Z3GO`tw J65Kb0h3Yj n꥔m l\_an6С %6Ӛ֚805W*# YFP]B l6w*؟>G3.tt<xԫU~F{dz]`,WWNTgla47ly2"rw;P54יjkIq4U([z/,dV=.W#u CG6Ґ8Qu|M]bsoqgSi ;["-d<ku1\>\>\>\>*r+jVpjaU'W*rCQ5*2H:?r'?KR]]"SR\wL{\{ ;_]3;awM 7Vn4j' [GpQb71]X&vt vSX;m.A t#R:*,fSP]_ig>2ՐL6Ez~:EQjs9 W7F(r C5Z9y/*bVgxm-GDUi1ͪ5+m' jl17[kk Pk)%:zz J,zCy~~ l\_an D[\kU05W*# jR",x L^FVO VBBz~fUEŠ N(?Jk,7@}Qzżc6[w:NDV ij|Y#"+uDh'@>#L9&K~ITr%6'=[Z~ΣCMaw:M/G.Tj! .'uu#Y,7C?/)ITy(FQ:)1uP=N+L4G|=VEIi]o_oHoR]g̗_#Nr~mڏ7TtugYy?W>. ^y:k ?QLtL hN旌&=_Gk9^]1-l;YTMʅtƬ{+(FQ6u>RrX)8١k0Tߩwb؁QG/fBNauiz=- Y_tIƶz_͈a[y./YqLa#؇}p ?@؇؇؇>UQſߥ) ?@g5-[}f\{*اC yOci}r*[ZIۗմwO>>>xx\[fڍ}\swva7rӋښvNAӅc?]x#ʾƻ|Ye.ݽ}u kżg?ZEo޺眦5po>>ӁkYާ Dcwo qkcmqUQTǃ֔ê[2Oꟺ6iG) yc6VrCM9/]~_ջc <̿m~[R䣅vh+/5ݧg];4IUYksl߉;{}i[guTt]o;o}yy]) X{Wh˫}ۿH9__o [wnS6sg߯yvQ+Hݘi?qi\>GkoVHn5+K| ߴuC/g&j9Oݴ;nJy _-]m|_d\od+M~fLiL[涗5|3bf^aw#?wG9G?.ƂkY9Pw~䃣zo95[={N}uw[rWT󫻓M}]9ggUGy>Z^~~Yu·o~ +:nK_ylsI[nV?~o^14&*2z1%xLj?3ɌWуj%dQz1y$y$ڲJ0w4&XbP\))k`uIs. zJ񭲍'r[ޤk6>[X+'Ah1d*f0 .My-2<ޢ7gYy&*Ev /[Uk7bSiM]J=#^Pu Iޥ庥bm )sJ>pfԡ/tgCxId-IYiF[67 Mi0cm=G ϿkUtOdGLX◶,j_㫵SrdORvi-û/(g7\ y˧r [+]c |0e┎X|L%y6W&Ѣ72tݶ鑰^sFcg^S~QdDAFZM濜u$dދ|yg^AL/=FߺLO=WOӵ钽{jwD߱*ՀpGKr )qMjrjzqI"n.>z:rO#[ fG&N)*J"W*+m7G/V&JX~c+R˹͎[a>qʖ[\hlsr9@,`nIJ<O&9r3j,x|/ydw~Ywc}CVLR@vZCW1=I&d=U@7ÿ&c#ӛ-qi>!LY- 5'D꡷ʾYr#QECӇNM;N2'q\y+cy2me"]TJMk͵ yǓ4ʇ<ݢVc@\P}EG9mJKkv]SxW R?R %%#JPD+1X?waHnŻS:%ocÚVr36\I7밽o-NH9g:@{j 8  F\ǚ?Rr}_$]TRAX`Z[󈲆iMr(lԁ9ѽ7Nd7T/wyjDʼ%[ m$f[=B5O6re׻ 0VZ`zydM|ֺ):=aqĝ&(~C| Z, Zvf0$ێd*ww=t?fO2P?hdlǽh/b ў;W*MjE7 %oV_K|BQRE U{ Q>쓏 N\rGUVjj5[AX@EKd?ڍ!:`?\ӒSdWV$m)d?v1߶J.jeɇؽ)tKGtϏm) V5t/ӼSl\o,meyȏpڣ3+b{t̴DvcsiѸTE<gzSUC[y^K¬^x"tH9Yz]c'C.`Fh bɛR)yys|ǡ _{U\23q5\fɃPT.W'G_\y)_D_[3%8.b_N_5#mGD `_VX[si LvC*dF?k;f7NՃfyek:?;9Pxc'zBg搚EɻX?J][ѭז]ao,OeJu=_@>go!Se^=c=G,FQzCeG]G8Qymb3RG5ijse,vt})=%7ź[&[sϩOB8}MYHzkRl\3K쭵R[ZPFpbU-ChAZϽCiO^_\& VLyR3JբK%lj垽ENR^qAoÃґZ>S޼ŵ{Kݕ=<=ݏVHPv"waѨ2絜F,s.+'GgL BP?[m~ /C̄Ga?Wb`{Urr!2jނT^chHwS) vc{Qa $d.{X@ob*~b,Boӂ},rE99Vt\AclRJ>M',OR#l!'z`?@ghqh; z>(L>Vetq(Y3)RFeMj -|SvlRVWGEEϖcw|'= 0: -1", e.errors[0]) @defer.inlineCallbacks def test_config_negative_random_delay_min(self): try: yield self.changesource.reconfigServiceWithSibling( self.Subclass(name="NegativePollRandomDelayMin", pollInterval=1, pollAtLaunch=False, pollRandomDelayMin=-1, pollRandomDelayMax=1)) except ConfigErrors as e: self.assertEqual("min random delay must be >= 0: -1", e.errors[0]) @defer.inlineCallbacks def test_config_negative_random_delay_max(self): try: yield self.changesource.reconfigServiceWithSibling( self.Subclass(name="NegativePollRandomDelayMax", pollInterval=1, pollAtLaunch=False, pollRandomDelayMin=1, pollRandomDelayMax=-1)) except ConfigErrors as e: self.assertEqual("max random delay must be >= 0: -1", e.errors[0]) @defer.inlineCallbacks def test_config_random_delay_min_gt_random_delay_max(self): try: yield self.changesource.reconfigServiceWithSibling( self.Subclass(name="PollRandomDelayMinGtPollRandomDelayMax", pollInterval=1, pollAtLaunch=False, pollRandomDelayMin=2, pollRandomDelayMax=1)) except ConfigErrors as e: self.assertEqual("min random delay must be <= 1: 2", e.errors[0]) @defer.inlineCallbacks def test_config_random_delay_max_gte_interval(self): try: yield self.changesource.reconfigServiceWithSibling( self.Subclass(name="PollRandomDelayMaxGtePollInterval", pollInterval=1, pollAtLaunch=False, pollRandomDelayMax=1)) except ConfigErrors as e: self.assertEqual("max random delay must be < 1: 1", e.errors[0]) @defer.inlineCallbacks def test_loop_loops(self): # track when poll() gets called loops = [] self.changesource.poll = \ lambda: loops.append(self.reactor.seconds()) yield self.startChangeSource() yield self.changesource.reconfigServiceWithSibling(self.Subclass( name="DummyCS", pollInterval=5, pollAtLaunch=False)) yield self.runClockFor(12) # note that it does *not* poll at time 0 self.assertEqual(loops, [5.0, 10.0]) @defer.inlineCallbacks def test_loop_exception(self): # track when poll() gets called loops = [] def poll(): loops.append(self.reactor.seconds()) raise RuntimeError("oh noes") self.changesource.poll = poll yield self.startChangeSource() yield self.changesource.reconfigServiceWithSibling(self.Subclass( name="DummyCS", pollInterval=5, pollAtLaunch=False)) yield self.runClockFor(12) # note that it keeps looping after error self.assertEqual(loops, [5.0, 10.0]) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 2) @defer.inlineCallbacks def test_poll_only_if_activated(self): """The polling logic only applies if the source actually starts!""" self.setChangeSourceToMaster(self.OTHER_MASTER_ID) loops = [] self.changesource.poll = \ lambda: loops.append(self.reactor.seconds()) yield self.startChangeSource() yield self.changesource.reconfigServiceWithSibling(self.Subclass( name="DummyCS", pollInterval=5, pollAtLaunch=False)) yield self.runClockFor(12) # it doesn't do anything because it was already claimed self.assertEqual(loops, []) @defer.inlineCallbacks def test_pollAtLaunch(self): # track when poll() gets called loops = [] self.changesource.poll = \ lambda: loops.append(self.reactor.seconds()) yield self.startChangeSource() yield self.changesource.reconfigServiceWithSibling(self.Subclass( name="DummyCS", pollInterval=5, pollAtLaunch=True)) yield self.runClockFor(12) # note that it *does* poll at time 0 self.assertEqual(loops, [0.0, 5.0, 10.0]) buildbot-3.4.0/master/buildbot/test/unit/changes/test_bitbucket.py000066400000000000000000000546221413250514000253600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from datetime import datetime from twisted.internet import defer from twisted.trial import unittest from twisted.web import client from twisted.web.error import Error from buildbot.changes.bitbucket import BitbucketPullrequestPoller from buildbot.test.util import changesource from buildbot.test.util.misc import TestReactorMixin class SourceRest(): """https://bitbucket.org/!api/2.0/repositories/{owner}/{slug}""" template = """\ { "hash": "%(hash)s", "links": { "html": { "href": "https://bitbucket.org/%(owner)s/%(slug)s/commits/%(short_hash)s" } }, "repository": { "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(owner)s/%(slug)s" } } }, "date": "%(date)s" } """ repo_template = """\ { "links": { "html": { "href": "https://bitbucket.org/%(owner)s/%(slug)s" } } } """ def __init__(self, owner, slug, hash, date): self.owner = owner self.slug = slug self.hash = hash self.date = date def request(self): return self.template % { "owner": self.owner, "slug": self.slug, "hash": self.hash, "short_hash": self.hash[0:12], "date": self.date, } def repo_request(self): return self.repo_template % { "owner": self.owner, "slug": self.slug, } class PullRequestRest(): """https://bitbucket.org/!api/2.0/repositories/{owner}/{slug}/pullrequests/{pull_request_id}""" template = """\ { "description": "%(description)s", "title": "%(title)s", "source": { "commit": { "hash": "%(hash)s", "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(owner)s/%(slug)s/commit/%(hash)s" } } } }, "state": "OPEN", "author": { "display_name": "%(display_name)s" }, "created_on": "%(created_on)s", "participants": [ ], "updated_on": "%(updated_on)s", "merge_commit": null, "id": %(id)d } """ # noqa pylint: disable=line-too-long def __init__(self, nr, title, description, display_name, source, created_on, updated_on=None): self.nr = nr self.title = title self.description = description self.display_name = display_name self.source = source self.created_on = created_on if updated_on: self.updated_on = updated_on else: self.updated_on = self.created_on def request(self): return self.template % { "description": self.description, "title": self.title, "hash": self.source.hash, "short_hash": self.source.hash[0:12], "owner": self.source.owner, "slug": self.source.slug, "display_name": self.display_name, "created_on": self.created_on, "updated_on": self.updated_on, "id": self.nr, } class PullRequestListRest(): """https://bitbucket.org/api/2.0/repositories/{owner}/{slug}/pullrequests""" template = """\ { "description": "%(description)s", "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(owner)s/%(slug)s/pullrequests/%(id)d" }, "html": { "href": "https://bitbucket.org/%(owner)s/%(slug)s/pull-request/%(id)d" } }, "author": { "display_name": "%(display_name)s" }, "title": "%(title)s", "source": { "commit": { "hash": "%(short_hash)s", "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(src_owner)s/%(src_slug)s/commit/%(short_hash)s" } } }, "repository": { "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(src_owner)s/%(src_slug)s" } } }, "branch": { "name": "default" } }, "state": "OPEN", "created_on": "%(created_on)s", "updated_on": "%(updated_on)s", "merge_commit": null, "id": %(id)s } """ # noqa pylint: disable=line-too-long def __init__(self, owner, slug, prs): self.owner = owner self.slug = slug self.prs = prs self.pr_by_id = {} self.src_by_url = {} for pr in prs: self.pr_by_id[pr.nr] = pr self.src_by_url["{}/{}".format(pr.source.owner, pr.source.slug)] = pr.source def request(self): s = "" for pr in self.prs: s += self.template % { "description": pr.description, "owner": self.owner, "slug": self.slug, "display_name": pr.display_name, "title": pr.title, "hash": pr.source.hash, "short_hash": pr.source.hash[0:12], "src_owner": pr.source.owner, "src_slug": pr.source.slug, "created_on": pr.created_on, "updated_on": pr.updated_on, "id": pr.nr, } return """\ { "pagelen": 10, "values": [%s ], "page": 1 } """ % s def getPage(self, url, timeout=None, headers=None): list_url_re = re.compile( r"https://bitbucket.org/api/2.0/repositories/{}/{}/pullrequests".format(self.owner, self.slug)) pr_url_re = re.compile( r"https://bitbucket.org/!api/2.0/repositories/{}/{}/pullrequests/(?P\d+)".format( self.owner, self.slug)) source_commit_url_re = re.compile( r"https://bitbucket.org/!api/2.0/repositories/(?P.*)/(?P.*)/commit/(?P\d+)") # noqa pylint: disable=line-too-long source_url_re = re.compile( r"https://bitbucket.org/!api/2.0/repositories/(?P.*)/(?P.*)") if list_url_re.match(url): return defer.succeed(self.request()) m = pr_url_re.match(url) if m: return self.pr_by_id[int(m.group("id"))].request() m = source_commit_url_re.match(url) if m: return self.src_by_url["{}/{}".format(m.group("src_owner"), m.group("src_slug"))].request() m = source_url_re.match(url) if m: return self.src_by_url["{}/{}".format(m.group("src_owner"), m.group("src_slug"))].repo_request() raise Error(code=404) class TestBitbucketPullrequestPoller(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() # create pull requests self.date = "2013-10-15T20:38:20.001797+00:00" self.date_epoch = datetime.strptime(self.date.split('.')[0], '%Y-%m-%dT%H:%M:%S') src = SourceRest( owner="contributor", slug="slug", hash="1111111111111111111111111111111111111111", date=self.date, ) pr = PullRequestRest( nr=1, title="title", description="description", display_name="contributor", source=src, created_on=self.date, ) self.pr_list = PullRequestListRest( owner="owner", slug="slug", prs=[pr], ) # update src = SourceRest( owner="contributor", slug="slug", hash="2222222222222222222222222222222222222222", date=self.date, ) pr = PullRequestRest( nr=1, title="title", description="description", display_name="contributor", source=src, created_on=self.date, ) self.pr_list2 = PullRequestListRest( owner="owner", slug="slug", prs=[pr], ) return self.setUpChangeSource() def tearDown(self): return self.tearDownChangeSource() def _fakeGetPage(self, result): # Install a fake getPage that puts the requested URL in self.getPage_got_url # and return result self.getPage_got_url = None def fake(url, timeout=None, headers=None): self.getPage_got_url = url return defer.succeed(result) self.patch(client, "getPage", fake) def _fakeGetPage403(self, expected_headers): def fail_unauthorized(url, timeout=None, headers=None): if headers != expected_headers: raise Error(code=403) self.patch(client, "getPage", fail_unauthorized) def _fakeGetPage404(self): def fail(url, timeout=None, headers=None): raise Error(code=404) self.patch(client, "getPage", fail) def attachDefaultChangeSource(self): return self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug')) # tests @defer.inlineCallbacks def test_describe(self): yield self.attachDefaultChangeSource() assert re.search(r'owner/slug', self.changesource.describe()) @defer.inlineCallbacks def test_poll_unknown_repo(self): yield self.attachDefaultChangeSource() # Polling a non-existent repository should result in a 404 self._fakeGetPage404() try: yield self.changesource.poll() self.fail( 'Polling a non-existent repository should result in a 404.') except Exception as e: self.assertEqual(str(e), '404 Not Found') @defer.inlineCallbacks def test_poll_unauthorized_failure(self): expected_headers = {b'Authorization': b'Basic dXNlcjoxMjM0'} yield self.attachDefaultChangeSource() # Polling without authorization should result in a 403 self._fakeGetPage403(expected_headers) try: yield self.changesource.poll() self.fail('Polling without authorization should result in a 403.') except Exception as e: self.assertEqual(str(e), '403 Forbidden') @defer.inlineCallbacks def test_poll_authorized_success(self): auth = ('user', '1234') expected_headers = {b'Authorization': b'Basic dXNlcjoxMjM0'} yield self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug', auth=auth, )) # Polling with authorization should success self._fakeGetPage403(expected_headers) try: yield self.changesource.poll() except Exception as e: self.assertNotEqual(str(e), '403 Forbidden') @defer.inlineCallbacks def test_poll_no_pull_requests(self): yield self.attachDefaultChangeSource() rest = PullRequestListRest(owner="owner", slug="slug", prs=[]) self._fakeGetPage(rest.request()) yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_poll_new_pull_requests(self): yield self.attachDefaultChangeSource() # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {'pullrequesturl': 'https://bitbucket.org/owner/slug/pull-request/1'}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }]) @defer.inlineCallbacks def test_poll_no_updated_pull_request(self): yield self.attachDefaultChangeSource() # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {'pullrequesturl': 'https://bitbucket.org/owner/slug/pull-request/1'}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }]) # repoll yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) @defer.inlineCallbacks def test_poll_updated_pull_request(self): yield self.attachDefaultChangeSource() # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {'pullrequesturl': 'https://bitbucket.org/owner/slug/pull-request/1'}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }]) self.patch(client, "getPage", self.pr_list2.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [ { 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {'pullrequesturl': 'https://bitbucket.org/owner/slug/pull-request/1'}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }, { 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {'pullrequesturl': 'https://bitbucket.org/owner/slug/pull-request/1'}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '2222222222222222222222222222222222222222', 'revlink': 'https://bitbucket.org/contributor/slug/commits/222222222222', 'src': 'bitbucket', 'when_timestamp': 1381869500, } ]) @defer.inlineCallbacks def test_poll_pull_request_filter_False(self): yield self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug', pullrequest_filter=lambda x: False )) # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_poll_pull_request_filter_True(self): yield self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug', pullrequest_filter=lambda x: True )) # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {'pullrequesturl': 'https://bitbucket.org/owner/slug/pull-request/1'}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }]) @defer.inlineCallbacks def test_poll_pull_request_not_useTimestamps(self): yield self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug', useTimestamps=False, )) self.patch(client, "getPage", self.pr_list.getPage) self.reactor.advance(1396825656) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {'pullrequesturl': 'https://bitbucket.org/owner/slug/pull-request/1'}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1396825656, }]) @defer.inlineCallbacks def test_poll_pull_request_properties(self): yield self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug', bitbucket_property_whitelist=["bitbucket.*"], )) self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': { 'pullrequesturl': 'https://bitbucket.org/owner/slug/pull-request/1', 'bitbucket.author.display_name': 'contributor', 'bitbucket.created_on': '2013-10-15T20:38:20.001797+00:00', 'bitbucket.description': 'description', 'bitbucket.id': 1, 'bitbucket.links.html.href': 'https://bitbucket.org/owner/slug/pull-request/1', 'bitbucket.links.self.href': 'https://bitbucket.org/!api/2.0/' 'repositories/owner/slug/pullrequests/1', 'bitbucket.merge_commit': None, 'bitbucket.source.branch.name': 'default', 'bitbucket.source.commit.hash': '111111111111', 'bitbucket.source.commit.links.self.href': 'https://bitbucket.org/!api/2.0/' 'repositories/contributor/slug/' 'commit/111111111111', 'bitbucket.source.repository.links.self.href': 'https://bitbucket.org/!api/2.0/' 'repositories/contributor/slug', 'bitbucket.state': 'OPEN', 'bitbucket.title': 'title', 'bitbucket.updated_on': '2013-10-15T20:38:20.001797+00:00' }, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }]) buildbot-3.4.0/master/buildbot/test/unit/changes/test_changes.py000066400000000000000000000163541413250514000250140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import pprint import re import textwrap from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import changes from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class Change(unittest.TestCase, TestReactorMixin): change23_rows = [ fakedb.Change(changeid=23, author="dustin", committer="dustin", comments="fix whitespace", branch="warnerdb", revision="deadbeef", when_timestamp=266738404, revlink='http://warner/0e92a098b', category='devel', repository='git://warner', codebase='mainapp', project='Buildbot'), fakedb.ChangeFile(changeid=23, filename='master/README.txt'), fakedb.ChangeFile(changeid=23, filename='worker/README.txt'), fakedb.ChangeProperty(changeid=23, property_name='notest', property_value='["no","Change"]'), fakedb.ChangeUser(changeid=23, uid=27), ] def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.change23 = changes.Change(**dict( # using **dict(..) forces kwargs category='devel', repository='git://warner', codebase='mainapp', who='dustin', committer='dustin', when=266738404, comments='fix whitespace', project='Buildbot', branch='warnerdb', revlink='http://warner/0e92a098b', properties={'notest': "no"}, files=['master/README.txt', 'worker/README.txt'], revision='deadbeef')) self.change23.number = 23 self.change24 = changes.Change(**dict( category='devel', repository='git://warner', codebase='mainapp', who='dustin', committer='dustin', when=266738405, comments='fix whitespace again', project='Buildbot', branch='warnerdb', revlink='http://warner/0e92a098c', properties={'notest': "no"}, files=['master/README.txt', 'worker/README.txt'], revision='deadbeef')) self.change24.number = 24 self.change25 = changes.Change(**dict( category='devel', repository='git://warner', codebase='mainapp', who='dustin', committer='dustin', when=266738406, comments='fix whitespace again', project='Buildbot', branch='warnerdb', revlink='http://warner/0e92a098d', properties={'notest': "no"}, files=['master/README.txt', 'worker/README.txt'], revision='deadbeef')) self.change25.number = 25 @defer.inlineCallbacks def test_fromChdict(self): # get a real honest-to-goodness chdict from the fake db yield self.master.db.insertTestData(self.change23_rows) chdict = yield self.master.db.changes.getChange(23) exp = self.change23 got = yield changes.Change.fromChdict(self.master, chdict) # compare ok = True ok = ok and got.number == exp.number ok = ok and got.who == exp.who ok = ok and got.committer == exp.committer ok = ok and sorted(got.files) == sorted(exp.files) ok = ok and got.comments == exp.comments ok = ok and got.revision == exp.revision ok = ok and got.when == exp.when ok = ok and got.branch == exp.branch ok = ok and got.category == exp.category ok = ok and got.revlink == exp.revlink ok = ok and got.properties == exp.properties ok = ok and got.repository == exp.repository ok = ok and got.codebase == exp.codebase ok = ok and got.project == exp.project if not ok: def printable(c): return pprint.pformat(c.__dict__) self.fail("changes do not match; expected\n{}\ngot\n{}".format(printable(exp), printable(got))) def test_str(self): string = str(self.change23) self.assertTrue(re.match(r"Change\(.*\)", string), string) def test_asText(self): text = self.change23.asText() self.assertTrue(re.match(textwrap.dedent('''\ Files: master/README.txt worker/README.txt On: git://warner For: Buildbot At: .* Changed By: dustin Committed By: dustin Comments: fix whitespaceProperties:. notest: no '''), text), text) def test_asDict(self): dict = self.change23.asDict() self.assertIn('1978', dict['at']) # timezone-sensitive del dict['at'] self.assertEqual(dict, { 'branch': 'warnerdb', 'category': 'devel', 'codebase': 'mainapp', 'comments': 'fix whitespace', 'files': [{'name': 'master/README.txt'}, {'name': 'worker/README.txt'}], 'number': 23, 'project': 'Buildbot', 'properties': [('notest', 'no', 'Change')], 'repository': 'git://warner', 'rev': 'deadbeef', 'revision': 'deadbeef', 'revlink': 'http://warner/0e92a098b', 'when': 266738404, 'who': 'dustin', 'committer': 'dustin'}) def test_getShortAuthor(self): self.assertEqual(self.change23.getShortAuthor(), 'dustin') def test_getTime(self): # careful, or timezones will hurt here self.assertIn('Jun 1978', self.change23.getTime()) def test_getTimes(self): self.assertEqual(self.change23.getTimes(), (266738404, None)) def test_getText(self): self.change23.who = 'nasty < nasty' # test the html escaping (ugh!) self.assertEqual(self.change23.getText(), ['nasty < nasty']) def test_getLogs(self): self.assertEqual(self.change23.getLogs(), {}) def test_compare(self): self.assertEqual(self.change23, self.change23) self.assertNotEqual(self.change24, self.change23) self.assertGreater(self.change24, self.change23) self.assertGreaterEqual(self.change24, self.change23) self.assertGreaterEqual(self.change24, self.change24) self.assertLessEqual(self.change24, self.change24) self.assertLessEqual(self.change23, self.change24) self.assertLess(self.change23, self.change25) buildbot-3.4.0/master/buildbot/test/unit/changes/test_filter.py000066400000000000000000000136341413250514000246670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.trial import unittest from buildbot.changes import filter from buildbot.test.fake.change import Change class ChangeFilter(unittest.TestCase): def setUp(self): self.results = [] # (got, expected, msg) self.filt = None def tearDown(self): if self.results: raise RuntimeError("test forgot to call check()") def setfilter(self, **kwargs): self.filt = filter.ChangeFilter(**kwargs) def yes(self, change, msg): self.results.append((self.filt.filter_change(change), True, msg)) def no(self, change, msg): self.results.append((self.filt.filter_change(change), False, msg)) def check(self): errs = [] for r in self.results: if (r[0] or r[1]) and not (r[0] and r[1]): errs.append(r[2]) self.results = [] if errs: self.fail("; ".join(errs)) def test_filter_change_filter_fn(self): self.setfilter(filter_fn=lambda ch: ch.x > 3) self.no(Change(x=2), "filter_fn returns False") self.yes(Change(x=4), "filter_fn returns True") self.check() def test_filter_change_filt_str(self): self.setfilter(project="myproj") self.no(Change(project="yourproj"), "non-matching PROJECT returns False") self.yes(Change(project="myproj"), "matching PROJECT returns True") self.check() def test_filter_change_filt_list(self): self.setfilter(repository=["vc://a", "vc://b"]) self.yes(Change(repository="vc://a"), "matching REPOSITORY vc://a returns True") self.yes(Change(repository="vc://b"), "matching REPOSITORY vc://b returns True") self.no(Change(repository="vc://c"), "non-matching REPOSITORY returns False") self.no(Change(repository=None), "None for REPOSITORY returns False") self.check() def test_filter_change_filt_list_None(self): self.setfilter(branch=["mybr", None]) self.yes(Change(branch="mybr"), "matching BRANCH mybr returns True") self.yes(Change(branch=None), "matching BRANCH None returns True") self.no(Change(branch="misc"), "non-matching BRANCH returns False") self.check() def test_filter_change_filt_re(self): self.setfilter(category_re="^a.*") self.yes(Change(category="albert"), "matching CATEGORY returns True") self.no( Change(category="boris"), "non-matching CATEGORY returns False") self.check() def test_filter_change_branch_re(self): # regression - see #927 self.setfilter(branch_re="^t.*") self.yes(Change(branch="trunk"), "matching BRANCH returns True") self.no(Change(branch="development"), "non-matching BRANCH returns False") self.no(Change(branch=None), "branch=None returns False") self.check() def test_filter_change_filt_re_compiled(self): self.setfilter(category_re=re.compile("^b.*", re.I)) self.no(Change(category="albert"), "non-matching CATEGORY returns False") self.yes(Change(category="boris"), "matching CATEGORY returns True") self.yes( Change(category="Bruce"), "matching CATEGORY returns True, using re.I") self.check() def test_filter_change_combination(self): self.setfilter(project='p', repository='r', branch='b', category='c', codebase='cb') self.no(Change(project='x', repository='x', branch='x', category='x'), "none match -> False") self.no(Change(project='p', repository='r', branch='b', category='x'), "three match -> False") self.no(Change(project='p', repository='r', branch='b', category='c', codebase='x'), "four match -> False") self.yes(Change(project='p', repository='r', branch='b', category='c', codebase='cb'), "all match -> True") self.check() def test_filter_change_combination_filter_fn(self): self.setfilter(project='p', repository='r', branch='b', category='c', filter_fn=lambda c: c.ff) self.no(Change(project='x', repository='x', branch='x', category='x', ff=False), "none match and fn returns False -> False") self.no(Change(project='p', repository='r', branch='b', category='c', ff=False), "all match and fn returns False -> False") self.no(Change(project='x', repository='x', branch='x', category='x', ff=True), "none match and fn returns True -> False") self.yes(Change(project='p', repository='r', branch='b', category='c', ff=True), "all match and fn returns True -> False") self.check() def test_filter_props(self): self.setfilter() self.filt.checks.update( self.filt.createChecks( ("ref-updated", None, None, "prop:event.type"), )) self.yes( Change(properties={'event.type': 'ref-updated'}), "matching property") self.no( Change(properties={'event.type': 'patch-uploaded'}), "non matching property") self.no(Change(properties={}), "no property") self.check() buildbot-3.4.0/master/buildbot/test/unit/changes/test_gerritchangesource.py000066400000000000000000000736161413250514000272730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc[''], 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import datetime import json import types from twisted.internet import defer from twisted.internet import error from twisted.internet import reactor from twisted.python import failure from twisted.trial import unittest from buildbot.changes import gerritchangesource from buildbot.test import fakedb from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.change import Change from buildbot.test.util import changesource from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.runprocess import ExpectMaster from buildbot.test.util.runprocess import MasterRunProcessMixin class TestGerritHelpers(unittest.TestCase): def test_proper_json(self): self.assertEqual("Justin Case ", gerritchangesource._gerrit_user_to_author({ "username": "justincase", "name": "Justin Case", "email": "justin.case@example.com" })) def test_missing_username(self): self.assertEqual("Justin Case ", gerritchangesource._gerrit_user_to_author({ "name": "Justin Case", "email": "justin.case@example.com" })) def test_missing_name(self): self.assertEqual("unknown ", gerritchangesource._gerrit_user_to_author({ "email": "justin.case@example.com" })) self.assertEqual("gerrit ", gerritchangesource._gerrit_user_to_author({ "email": "justin.case@example.com" }, "gerrit")) self.assertEqual("justincase ", gerritchangesource._gerrit_user_to_author({ "username": "justincase", "email": "justin.case@example.com" }, "gerrit")) def test_missing_email(self): self.assertEqual("Justin Case", gerritchangesource._gerrit_user_to_author({ "username": "justincase", "name": "Justin Case" })) self.assertEqual("Justin Case", gerritchangesource._gerrit_user_to_author({ "name": "Justin Case" })) self.assertEqual("justincase", gerritchangesource._gerrit_user_to_author({ "username": "justincase" })) self.assertEqual("unknown", gerritchangesource._gerrit_user_to_author({ })) self.assertEqual("gerrit", gerritchangesource._gerrit_user_to_author({ }, "gerrit")) class TestGerritChangeSource(MasterRunProcessMixin, changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setup_master_run_process() return self.setUpChangeSource() def tearDown(self): return self.tearDownChangeSource() @defer.inlineCallbacks def newChangeSource(self, host, user, *args, **kwargs): s = gerritchangesource.GerritChangeSource( host, user, *args, **kwargs) yield self.attachChangeSource(s) s.configureService() return s def assert_changes(self, expected_changes, ignore_keys): self.assertEqual(len(self.master.data.updates.changesAdded), len(expected_changes)) for i, expected_change in enumerate(expected_changes): change = self.master.data.updates.changesAdded[i] for key in ignore_keys: del change[key] self.assertEqual(change, expected_change) # tests @defer.inlineCallbacks def test_describe(self): s = yield self.newChangeSource('somehost', 'someuser') self.assertSubstring("GerritChangeSource", s.describe()) @defer.inlineCallbacks def test_name(self): s = yield self.newChangeSource('somehost', 'someuser') self.assertEqual("GerritChangeSource:someuser@somehost:29418", s.name) s = yield self.newChangeSource('somehost', 'someuser', name="MyName") self.assertEqual("MyName", s.name) # TODO: test the backoff algorithm patchset_created_event = { "uploader": { 'name': 'uploader uploader', 'email': 'uploader@example.com', 'username': 'uploader' }, "patchSet": { "number": 1, "revision": "29b73c3eb1aeaa9e6c7da520a940d60810e883db", "parents": ["7e563631188dcadf32aad0d8647c818834921a1e"], "ref": "refs/changes/21/4321/1", "uploader": { 'name': 'uploader uploader', 'email': 'uploader@example.com', 'username': 'uploader' }, "createdOn": 1627214047, "author": { 'name': 'author author', 'email': 'author@example.com', 'username': 'author' }, "kind": "REWORK", "sizeInsertions": 1, "sizeDeletions": 0 }, "change": { "project": "test", "branch": "master", "id": "I21234123412341234123412341234", "number": 4321, "subject": "change subject", "owner": { 'name': 'owner owner', 'email': 'owner@example.com', 'username': 'owner' }, "url": "http://example.com/c/test/+/4321", "commitMessage": "test1\n\nChange-Id: I21234123412341234123412341234\n", "createdOn": 1627214047, "status": "NEW" }, "project": "test", "refName": "refs/heads/master", "changeKey": {"id": "I21234123412341234123412341234"}, "type": "patchset-created", "eventCreatedOn": 1627214048 } # this variable is reused in test_steps_source_repo # to ensure correct integration between change source and repo step expected_change_patchset_created = { 'category': 'patchset-created', 'files': ['unknown'], 'repository': 'ssh://someuser@somehost:29418/test', 'author': 'owner owner ', 'committer': None, 'comments': 'change subject', 'project': 'test', 'branch': 'refs/changes/21/4321/1', 'revision': '29b73c3eb1aeaa9e6c7da520a940d60810e883db', 'codebase': None, 'revlink': 'http://example.com/c/test/+/4321', 'src': None, 'when_timestamp': None, } @defer.inlineCallbacks def test_lineReceived_patchset_created(self): s = yield self.newChangeSource('somehost', 'someuser') yield s.lineReceived(json.dumps(self.patchset_created_event)) self.assert_changes([self.expected_change_patchset_created], ignore_keys=['properties']) @defer.inlineCallbacks def test_lineReceived_patchset_created_props(self): s = yield self.newChangeSource('somehost', 'someuser') yield s.lineReceived(json.dumps(self.patchset_created_event)) change = copy.deepcopy(self.expected_change_patchset_created) change['properties'] = { 'event.change.branch': 'master', 'event.change.commitMessage': 'test1\n\nChange-Id: I21234123412341234123412341234\n', 'event.change.createdOn': 1627214047, 'event.change.id': 'I21234123412341234123412341234', 'event.change.number': 4321, 'event.change.owner.email': 'owner@example.com', 'event.change.owner.name': 'owner owner', 'event.change.owner.username': 'owner', 'event.change.project': 'test', 'event.change.status': 'NEW', 'event.change.subject': 'change subject', 'event.change.url': 'http://example.com/c/test/+/4321', 'event.changeKey.id': 'I21234123412341234123412341234', 'event.patchSet.author.email': 'author@example.com', 'event.patchSet.author.name': 'author author', 'event.patchSet.author.username': 'author', 'event.patchSet.createdOn': 1627214047, 'event.patchSet.kind': 'REWORK', 'event.patchSet.number': 1, 'event.patchSet.parents': ['7e563631188dcadf32aad0d8647c818834921a1e'], 'event.patchSet.ref': 'refs/changes/21/4321/1', 'event.patchSet.revision': '29b73c3eb1aeaa9e6c7da520a940d60810e883db', 'event.patchSet.sizeDeletions': 0, 'event.patchSet.sizeInsertions': 1, 'event.patchSet.uploader.email': 'uploader@example.com', 'event.patchSet.uploader.name': 'uploader uploader', 'event.patchSet.uploader.username': 'uploader', 'event.project': 'test', 'event.refName': 'refs/heads/master', 'event.source': 'GerritChangeSource', 'event.type': 'patchset-created', 'event.uploader.email': 'uploader@example.com', 'event.uploader.name': 'uploader uploader', 'event.uploader.username': 'uploader', 'target_branch': 'master', } self.maxDiff = None self.assert_changes([change], ignore_keys=[]) comment_added_event = { "type": "comment-added", "author": { 'name': 'author author', 'email': 'author@example.com', 'username': 'author' }, "approvals": [{"type": "Code-Review", "description": "Code-Review", "value": "0"}], "comment": "Patch Set 1:\n\ntest comment", "patchSet": { "number": 1, "revision": "29b73c3eb1aeaa9e6c7da520a940d60810e883db", "parents": ["7e563631188dcadf32aad0d8647c818834921a1e"], "ref": "refs/changes/21/4321/1", "uploader": { 'name': 'uploader uploader', 'email': 'uploader@example.com', 'username': 'uploader' }, "createdOn": 1627214047, "author": { 'name': 'author author', 'email': 'author@example.com', 'username': 'author' }, "kind": "REWORK", "sizeInsertions": 1, "sizeDeletions": 0 }, "change": { "project": "test", "branch": "master", "id": "I21234123412341234123412341234", "number": 4321, "subject": "change subject", "owner": { 'name': 'owner owner', 'email': 'owner@example.com', 'username': 'owner' }, "url": "http://example.com/c/test/+/4321", "commitMessage": "test1\n\nChange-Id: I21234123412341234123412341234\n", "createdOn": 1627214047, "status": "NEW" }, "project": "test", "refName": "refs/heads/master", "changeKey": {"id": "I21234123412341234123412341234"}, "eventCreatedOn": 1627214102 } expected_change_comment_added = { 'category': 'comment-added', 'files': ['unknown'], 'repository': 'ssh://someuser@somehost:29418/test', 'author': 'owner owner ', 'committer': None, 'comments': 'change subject', 'project': 'test', 'branch': 'refs/changes/21/4321/1', 'revlink': 'http://example.com/c/test/+/4321', 'codebase': None, 'revision': '29b73c3eb1aeaa9e6c7da520a940d60810e883db', 'src': None, 'when_timestamp': None, } @defer.inlineCallbacks def test_lineReceived_comment_added(self): s = yield self.newChangeSource('somehost', 'someuser', handled_events=["comment-added"]) yield s.lineReceived(json.dumps(self.comment_added_event)) self.assert_changes([self.expected_change_comment_added], ignore_keys=['properties']) @defer.inlineCallbacks def test_lineReceived_ref_updated(self): s = yield self.newChangeSource('somehost', 'someuser') yield s.lineReceived(json.dumps({ 'type': 'ref-updated', 'submitter': { 'name': 'tester', 'email': 'tester@example.com', 'username': 'tester' }, 'refUpdate': { 'oldRev': '12341234', 'newRev': '56785678', 'refName': 'refs/heads/master', 'project': 'test' }, 'eventCreatedOn': 1614528683 })) self.assertEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] self.assertEqual(c, { 'files': ['unknown'], 'comments': 'Gerrit: commit(s) pushed.', 'author': 'tester ', 'committer': None, 'revision': '56785678', 'when_timestamp': None, 'branch': 'refs/heads/master', 'category': 'ref-updated', 'revlink': '', 'properties': { 'event.type': 'ref-updated', 'event.submitter.name': 'tester', 'event.submitter.email': 'tester@example.com', 'event.submitter.username': 'tester', 'event.refUpdate.oldRev': '12341234', 'event.refUpdate.newRev': '56785678', 'event.refUpdate.refName': 'refs/heads/master', 'event.refUpdate.project': 'test', 'event.source': 'GerritChangeSource' }, 'repository': 'ssh://someuser@somehost:29418/test', 'codebase': None, 'project': 'test', 'src': None }) @defer.inlineCallbacks def test_lineReceived_ref_updated_for_change(self): s = yield self.newChangeSource('somehost', 'someuser') yield s.lineReceived(json.dumps({ 'type': 'ref-updated', 'submitter': { 'name': 'tester', 'email': 'tester@example.com', 'username': 'tester' }, 'refUpdate': { 'oldRev': '00000000', 'newRev': '56785678', 'refName': 'refs/changes/12/432112/1', 'project': 'test' }, 'eventCreatedOn': 1614528683 })) self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_duplicate_events_ignored(self): s = yield self.newChangeSource('somehost', 'someuser') yield s.lineReceived(json.dumps(self.patchset_created_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 1) patchset_created_event = copy.deepcopy(self.patchset_created_event) patchset_created_event['change']['project'] = {'name': 'test'} yield s.lineReceived(json.dumps(patchset_created_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 1) @defer.inlineCallbacks def test_duplicate_non_source_events_not_ignored(self): s = yield self.newChangeSource('somehost', 'someuser', handled_events=['patchset-created', 'ref-updated', 'change-merged', 'comment-added']) yield s.lineReceived(json.dumps(self.comment_added_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 1) yield s.lineReceived(json.dumps(self.comment_added_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 2) @defer.inlineCallbacks def test_malformed_events_ignored(self): s = yield self.newChangeSource('somehost', 'someuser') # "change" not in event yield s.lineReceived(json.dumps(dict( type="patchset-created", patchSet=dict(revision="abcdef", number="12") ))) self.assertEqual(len(self.master.data.updates.changesAdded), 0) # "patchSet" not in event yield s.lineReceived(json.dumps(dict( type="patchset-created", change=dict( branch="br", # Note that this time "project" is a dictionary project=dict(name="pr"), number="4321", owner=dict(name="Dustin", email="dustin@mozilla.com"), url="http://buildbot.net", subject="fix 1234" ), ))) self.assertEqual(len(self.master.data.updates.changesAdded), 0) change_merged_event = { "type": "change-merged", "change": { "branch": "br", "project": "pr", "number": "4321", "owner": {"name": "Chuck", "email": "chuck@norris.com"}, "url": "http://buildbot.net", "subject": "fix 1234"}, "patchSet": {"revision": "abcdefj", "number": "13"} } @defer.inlineCallbacks def test_handled_events_filter_true(self): s = yield self.newChangeSource('somehost', 'some_choosy_user', handled_events=["change-merged"]) yield s.lineReceived(json.dumps(self.change_merged_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] self.assertEqual(c["category"], "change-merged") self.assertEqual(c["branch"], "br") @defer.inlineCallbacks def test_handled_events_filter_false(self): s = yield self.newChangeSource('somehost', 'some_choosy_user') yield s.lineReceived(json.dumps(self.change_merged_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_custom_handler(self): s = yield self.newChangeSource('somehost', 'some_choosy_user', handled_events=["change-merged"]) def custom_handler(self, properties, event): event['change']['project'] = "world" return self.addChangeFromEvent(properties, event) # Patches class to not bother with the inheritance s.eventReceived_change_merged = types.MethodType(custom_handler, s) yield s.lineReceived(json.dumps(self.change_merged_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] self.assertEqual(c['project'], "world") @defer.inlineCallbacks def test_startStreamProcess_bytes_output(self): s = yield self.newChangeSource('somehost', 'some_choosy_user', debug=True) exp_argv = ['ssh', '-o', 'BatchMode=yes', 'some_choosy_user@somehost', '-p', '29418'] exp_argv += ['gerrit', 'stream-events'] def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], [exp_argv[0], exp_argv]) pp.errReceived(b'test stderr\n') pp.outReceived(b'{"type":"dropped-output"}\n') so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) s.startStreamProcess() # ------------------------------------------------------------------------- # Test data for getFiles() # ------------------------------------------------------------------------- query_files_success_line1 = { "patchSets": [ { "number": 1, "files": [ {"file": "/COMMIT_MSG", "type": "ADDED", "insertions": 13, "deletions": 0}, ], }, { "number": 13, "files": [ {"file": "/COMMIT_MSG", "type": "ADDED", "insertions": 13, "deletions": 0}, {"file": "file1", "type": "MODIFIED", "insertions": 7, "deletions": 0}, {"file": "file2", "type": "MODIFIED", "insertions": 2, "deletions": -2}, ], } ] } query_files_success_line2 = { "type": "stats", "rowCount": 1 } query_files_success = '\n'.join([ json.dumps(query_files_success_line1), json.dumps(query_files_success_line2) ]).encode('utf8') query_files_failure = b'{"type":"stats","rowCount":0}' @defer.inlineCallbacks def test_getFiles(self): s = yield self.newChangeSource('host', 'user', gerritport=2222) exp_argv = [ 'ssh', '-o', 'BatchMode=yes', 'user@host', '-p', '2222', 'gerrit', 'query', '1000', '--format', 'JSON', '--files', '--patch-sets' ] self.expect_commands( ExpectMaster(exp_argv) .stdout(self.query_files_success), ExpectMaster(exp_argv) .stdout(self.query_files_failure) ) res = yield s.getFiles(1000, 13) self.assertEqual(set(res), {'/COMMIT_MSG', 'file1', 'file2'}) res = yield s.getFiles(1000, 13) self.assertEqual(res, ['unknown']) self.assert_all_commands_ran() @defer.inlineCallbacks def test_getFilesFromEvent(self): self.expect_commands( ExpectMaster(['ssh', '-o', 'BatchMode=yes', 'user@host', '-p', '29418', 'gerrit', 'query', '4321', '--format', 'JSON', '--files', '--patch-sets']) .stdout(self.query_files_success) ) s = yield self.newChangeSource('host', 'user', get_files=True, handled_events=["change-merged"]) yield s.lineReceived(json.dumps(self.change_merged_event)) c = self.master.data.updates.changesAdded[0] self.assertEqual(set(c['files']), {'/COMMIT_MSG', 'file1', 'file2'}) self.assert_all_commands_ran() class TestGerritEventLogPoller(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): NOW_TIMESTAMP = 1479302598 EVENT_TIMESTAMP = 1479302599 NOW_FORMATTED = '2016-11-16 13:23:18' EVENT_FORMATTED = '2016-11-16 13:23:19' OBJECTID = 1234 @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpChangeSource() yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() yield self.tearDownChangeSource() @defer.inlineCallbacks def newChangeSource(self, **kwargs): auth = kwargs.pop('auth', ('log', 'pass')) self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'gerrit', auth=auth) self.changesource = gerritchangesource.GerritEventLogPoller( 'gerrit', auth=auth, gitBaseURL="ssh://someuser@somehost:29418", pollAtLaunch=False, **kwargs) @defer.inlineCallbacks def startChangeSource(self): yield self.changesource.setServiceParent(self.master) yield self.attachChangeSource(self.changesource) # tests @defer.inlineCallbacks def test_now(self): yield self.newChangeSource() self.changesource.now() @defer.inlineCallbacks def test_describe(self): # describe is not used yet in buildbot nine, but it can still be useful in the future, so # lets implement and test it yield self.newChangeSource() self.assertSubstring('GerritEventLogPoller', self.changesource.describe()) @defer.inlineCallbacks def test_name(self): yield self.newChangeSource() self.assertEqual('GerritEventLogPoller:gerrit', self.changesource.name) @defer.inlineCallbacks def test_lineReceived_patchset_created(self): self.master.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='GerritEventLogPoller:gerrit', class_name='GerritEventLogPoller')]) yield self.newChangeSource(get_files=True) self.changesource.now = lambda: datetime.datetime.utcfromtimestamp( self.NOW_TIMESTAMP) thirty_days_ago = ( datetime.datetime.utcfromtimestamp(self.NOW_TIMESTAMP) - datetime.timedelta(days=30)) self._http.expect(method='get', ep='/plugins/events-log/events/', params={'t1': thirty_days_ago.strftime("%Y-%m-%d %H:%M:%S")}, content_json=dict( type="patchset-created", change=dict( branch="master", project="test", number="4321", owner=dict(name="owner owner", email="owner@example.com"), url="http://example.com/c/test/+/4321", subject="change subject" ), eventCreatedOn=self.EVENT_TIMESTAMP, patchSet={ 'revision': "29b73c3eb1aeaa9e6c7da520a940d60810e883db", 'number': "1", 'ref': 'refs/changes/21/4321/1'} )) self._http.expect( method='get', ep='/changes/4321/revisions/1/files/', content=self.change_revision_resp, ) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] expected_change = dict(TestGerritChangeSource.expected_change_patchset_created) for k, v in c.items(): if k in ('files', 'properties'): continue self.assertEqual(expected_change[k], v) self.master.db.state.assertState( self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP) self.assertEqual(set(c['files']), {'/COMMIT_MSG', 'file1'}) # do a second poll, it should ask for the next events self._http.expect(method='get', ep='/plugins/events-log/events/', params={'t1': self.EVENT_FORMATTED}, content_json=dict( type="patchset-created", change=dict( branch="br", project="pr", number="4321", owner=dict(name="Dustin", email="dustin@mozilla.com"), url="http://buildbot.net", subject="fix 1234" ), eventCreatedOn=self.EVENT_TIMESTAMP + 1, patchSet={ 'revision': "29b73c3eb1aeaa9e6c7da520a940d60810e883db", 'number': "1", 'ref': 'refs/changes/21/4321/1'} )) self._http.expect( method='get', ep='/changes/4321/revisions/1/files/', content=self.change_revision_resp, ) yield self.changesource.poll() self.master.db.state.assertState( self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP + 1) change_revision_dict = { '/COMMIT_MSG': {'status': 'A', 'lines_inserted': 9, 'size_delta': 1, 'size': 1}, 'file1': {'lines_inserted': 9, 'lines_deleted': 2, 'size_delta': 1, 'size': 1}, } change_revision_resp = b')]}\n' + json.dumps(change_revision_dict).encode('utf8') @defer.inlineCallbacks def test_getFiles(self): yield self.newChangeSource(get_files=True) yield self.startChangeSource() self._http.expect( method='get', ep='/changes/100/revisions/1/files/', content=self.change_revision_resp, ) files = yield self.changesource.getFiles(100, 1) self.assertEqual(set(files), {'/COMMIT_MSG', 'file1'}) class TestGerritChangeFilter(unittest.TestCase): def test_basic(self): props = { 'event.type': 'patchset-created', 'event.change.branch': 'master', } ch = Change(**TestGerritChangeSource.expected_change_patchset_created, properties=props) f = gerritchangesource.GerritChangeFilter( branch=["master"], eventtype=["patchset-created"]) self.assertTrue(f.filter_change(ch)) f = gerritchangesource.GerritChangeFilter( branch="master2", eventtype=["patchset-created"]) self.assertFalse(f.filter_change(ch)) f = gerritchangesource.GerritChangeFilter( branch="master", eventtype="ref-updated") self.assertFalse(f.filter_change(ch)) self.assertEqual( repr(f), '') buildbot-3.4.0/master/buildbot/test/unit/changes/test_github.py000066400000000000000000000503561413250514000246660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from twisted.trial import unittest from buildbot.changes.github import GitHubPullrequestPoller from buildbot.config import ConfigErrors from buildbot.process.properties import Properties from buildbot.process.properties import Secret from buildbot.secrets.manager import SecretManager from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util import changesource from buildbot.test.util.misc import TestReactorMixin gitJsonPayloadSinglePullrequest = """ { "html_url": "https://github.com/buildbot/buildbot/pull/4242", "number": 4242, "state": "open", "locked": false, "title": "Update the README with new information", "user": { "login": "defunkt" }, "body": "This is a pretty simple change that we need to pull into master.", "updated_at": "2017-01-25T22:36:21Z", "head": { "ref": "defunkt/change", "sha": "4c9a7f03e04e551a5e012064b581577f949dd3a4", "repo": { "name": "buildbot", "full_name": "defunkt/buildbot", "fork": true, "private": false, "git_url": "git://github.com/defunkt/buildbot.git", "ssh_url": "git@github.com:defunkt/buildbot.git", "clone_url": "https://github.com/defunkt/buildbot.git", "svn_url": "https://github.com/defunkt/buildbot" } }, "base": { "ref": "master", "sha": "4c9a7f03e04e551a5e012064b581577f949dd3a4", "name": "buildbot", "repo": { "full_name": "buildbot/buildbot", "fork": false, "private": false, "git_url": "git://github.com/buildbot/buildbot.git", "ssh_url": "git@github.com:buildbot/buildbot.git", "clone_url": "https://github.com/buildbot/buildbot.git", "svn_url": "https://github.com/buildbot/buildbot" } }, "merged": false, "commits": 42, "mergeable": true, "mergeable_state": "clean", "merged_by": null } """ gitJsonPayloadPullRequests = """ [ { "html_url": "https://github.com/buildbot/buildbot/pull/4242", "number": 4242, "locked": false, "title": "Update the README with new information", "user": { "login": "defunkt" }, "body": "This is a pretty simple change that we need to pull into master.", "updated_at": "2017-01-25T22:36:21Z", "head": { "ref": "defunkt/change", "sha": "4c9a7f03e04e551a5e012064b581577f949dd3a4", "repo": { "name": "buildbot", "git_url": "git://github.com/defunkt/buildbot.git", "ssh_url": "git@github.com:defunkt/buildbot.git", "clone_url": "https://github.com/defunkt/buildbot.git", "svn_url": "https://github.com/defunkt/buildbot" } }, "base": { "ref": "master", "name": "buildbot", "repo": { "git_url": "git://github.com/buildbot/buildbot.git", "ssh_url": "git@github.com:buildbot/buildbot.git", "clone_url": "https://github.com/buildbot/buildbot.git", "svn_url": "https://github.com/buildbot/buildbot" } } } ] """ gitJsonPayloadFiles = """ [ { "filename": "README.md" } ] """ gitJsonPayloadAuthors = """ [ { "commit": { "author": { "name": "defunkt", "email": "defunkt@defunkt.null" } } } ] """ gitJsonPayloadCommitters = """ [ { "commit": { "committer": { "name": "defunktc", "email": "defunktc@defunkt.null" } } } ] """ git_json_not_found = """ { "message": "Not Found", "documentation_url": "https://docs.github.com/rest/reference/pulls#list-pull-requests" } """ _CT_ENCODED = b'application/x-www-form-urlencoded' _CT_JSON = b'application/json' _GH_PARSED_PROPS = { 'pullrequesturl': 'https://github.com/buildbot/buildbot/pull/4242', 'github.head.sha': '4c9a7f03e04e551a5e012064b581577f949dd3a4', 'github.state': 'open', 'github.number': 4242, 'github.merged': False, 'github.base.repo.full_name': 'buildbot/buildbot', 'github.base.ref': 'master', 'github.base.sha': '4c9a7f03e04e551a5e012064b581577f949dd3a4', 'github.head.repo.full_name': 'defunkt/buildbot', 'github.mergeable_state': 'clean', 'github.mergeable': True, 'github.head.ref': 'defunkt/change', 'github.title': 'Update the README with new information', 'github.merged_by': None } class TestGitHubPullrequestPoller(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpChangeSource() fake_storage_service = FakeSecretStorage() secret_service = SecretManager() secret_service.services = [fake_storage_service] yield secret_service.setServiceParent(self.master) yield self.master.startService() fake_storage_service.reconfigService(secretdict={"token": "1234"}) @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() yield self.tearDownChangeSource() @defer.inlineCallbacks def newChangeSource(self, owner, repo, endpoint='https://api.github.com', **kwargs): http_headers = {'User-Agent': 'Buildbot'} token = kwargs.get('token', None) if token: p = Properties() p.master = self.master token = yield p.render(token) http_headers.update({'Authorization': 'token ' + token}) self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, endpoint, headers=http_headers) self.changesource = GitHubPullrequestPoller(owner, repo, **kwargs) @defer.inlineCallbacks def startChangeSource(self): yield self.changesource.setServiceParent(self.master) yield self.attachChangeSource(self.changesource) def assertDictSubset(self, expected_dict, response_dict): expected = {} for key in expected_dict.keys(): self.assertIn(key, set(response_dict.keys())) expected[key] = response_dict[key] self.assertDictEqual(expected_dict, expected) @defer.inlineCallbacks def test_describe(self): yield self.newChangeSource('defunkt', 'defunkt') yield self.startChangeSource() self.assertEqual( "GitHubPullrequestPoller watching the GitHub repository {}/{}". format('defunkt', 'defunkt'), self.changesource.describe()) @defer.inlineCallbacks def test_default_name(self): yield self.newChangeSource('defunkt', 'defunkt') yield self.startChangeSource() self.assertEqual("GitHubPullrequestPoller:{}/{}".format( 'defunkt', 'defunkt'), self.changesource.name) @defer.inlineCallbacks def test_custom_name(self): yield self.newChangeSource('defunkt', 'defunkt', name="MyName") yield self.startChangeSource() self.assertEqual("MyName", self.changesource.name) @defer.inlineCallbacks def test_SimplePR(self): yield self.newChangeSource( 'defunkt', 'defunkt', token='1234', github_property_whitelist=["github.*"]) yield self.simple_pr() @defer.inlineCallbacks def test_secret_token(self): yield self.newChangeSource( 'defunkt', 'defunkt', token=Secret('token'), github_property_whitelist=["github.*"]) yield self.simple_pr() @defer.inlineCallbacks def simple_pr(self): self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadAuthors)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadCommitters)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads(gitJsonPayloadFiles)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['author'], 'defunkt ') self.assertEqual(change['revision'], '4c9a7f03e04e551a5e012064b581577f949dd3a4') self.assertEqual(change['revlink'], 'https://github.com/buildbot/buildbot/pull/4242') self.assertEqual(change['branch'], 'defunkt/change') self.assertEqual(change['repository'], 'https://github.com/defunkt/buildbot.git') self.assertEqual(change['files'], ['README.md']) self.assertEqual(change['committer'], 'defunktc ') self.assertDictSubset(_GH_PARSED_PROPS, change['properties']) self.assertEqual(change["comments"], "GitHub Pull Request #4242 (42 commits)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") @defer.inlineCallbacks def test_wrongBranch(self): yield self.newChangeSource( 'defunkt', 'defunkt', token='1234', branches=['wrongBranch']) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_http_error(self): yield self.newChangeSource('defunkt', 'defunkt', token='1234') self._http.expect(method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(git_json_not_found), code=404) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_baseURL(self): yield self.newChangeSource( 'defunkt', 'defunkt', endpoint='https://my.other.endpoint', token='1234', baseURL='https://my.other.endpoint/', github_property_whitelist=["github.*"]) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadAuthors)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadCommitters)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads(gitJsonPayloadFiles)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['author'], 'defunkt ') self.assertEqual(change['revision'], '4c9a7f03e04e551a5e012064b581577f949dd3a4') self.assertEqual(change['revlink'], 'https://github.com/buildbot/buildbot/pull/4242') self.assertEqual(change['branch'], 'defunkt/change') self.assertEqual(change['repository'], 'https://github.com/defunkt/buildbot.git') self.assertEqual(change['files'], ['README.md']) self.assertEqual(change['committer'], 'defunktc ') self.assertDictSubset(_GH_PARSED_PROPS, change['properties']) self.assertEqual(change["comments"], "GitHub Pull Request #4242 (42 commits)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") @defer.inlineCallbacks def test_PRfilter(self): yield self.newChangeSource( 'defunkt', 'defunkt', token='1234', pullrequest_filter=lambda pr: pr['number'] == 1337 ) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_failCommitters(self): yield self.newChangeSource('defunkt', 'defunkt', token='1234') self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads("[{}]")) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads("[{}]")) yield self.startChangeSource() yield self.assertFailure(self.changesource.poll(), KeyError) @defer.inlineCallbacks def test_failFiles(self): yield self.newChangeSource('defunkt', 'defunkt', token='1234') self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads("[{}]")) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads("[{}]")) yield self.startChangeSource() yield self.assertFailure(self.changesource.poll(), KeyError) @defer.inlineCallbacks def test_wrongRepoLink(self): with self.assertRaises(ConfigErrors): yield self.newChangeSource('defunkt', 'defunkt', token='1234', repository_type='defunkt') @defer.inlineCallbacks def test_magicLink(self): yield self.newChangeSource( 'defunkt', 'defunkt', magic_link=True, token='1234', github_property_whitelist=["github.*"]) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadAuthors)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadCommitters)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads(gitJsonPayloadFiles)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['author'], 'defunkt ') self.assertEqual(change['revision'], '4c9a7f03e04e551a5e012064b581577f949dd3a4') self.assertEqual(change['revlink'], 'https://github.com/buildbot/buildbot/pull/4242') self.assertEqual(change['branch'], 'refs/pull/4242/merge') self.assertEqual(change['repository'], 'https://github.com/buildbot/buildbot.git') self.assertEqual(change['files'], ['README.md']) self.assertEqual(change['committer'], 'defunktc ') self.assertDictSubset(_GH_PARSED_PROPS, change['properties']) self.assertEqual(change["comments"], "GitHub Pull Request #4242 (42 commits)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") @defer.inlineCallbacks def test_AuthormissingEmail(self): yield self.newChangeSource( 'defunkt', 'defunkt', token='1234', github_property_whitelist=["github.*"]) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadAuthors)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadCommitters)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads(gitJsonPayloadFiles)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['author'], 'defunkt ') self.assertEqual(change['revision'], '4c9a7f03e04e551a5e012064b581577f949dd3a4') self.assertEqual(change['revlink'], 'https://github.com/buildbot/buildbot/pull/4242') self.assertEqual(change['branch'], 'defunkt/change') self.assertEqual(change['repository'], 'https://github.com/defunkt/buildbot.git') self.assertEqual(change['files'], ['README.md']) self.assertEqual(change['committer'], 'defunktc ') self.assertDictSubset(_GH_PARSED_PROPS, change['properties']) self.assertEqual(change["comments"], "GitHub Pull Request #4242 (42 commits)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") buildbot-3.4.0/master/buildbot/test/unit/changes/test_gitpoller.py000066400000000000000000002307621413250514000254060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import gitpoller from buildbot.test.fake.private_tempdir import MockPrivateTemporaryDirectory from buildbot.test.util import changesource from buildbot.test.util import config from buildbot.test.util import logging from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.runprocess import ExpectMaster from buildbot.test.util.runprocess import MasterRunProcessMixin from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes # Test that environment variables get propagated to subprocesses (See #2116) os.environ['TEST_THAT_ENVIRONMENT_GETS_PASSED_TO_SUBPROCESSES'] = 'TRUE' class TestGitPollerBase(MasterRunProcessMixin, changesource.ChangeSourceMixin, logging.LoggingMixin, TestReactorMixin, unittest.TestCase): REPOURL = 'git@example.com:~foo/baz.git' REPOURL_QUOTED = 'git%40example.com%3A%7Efoo%2Fbaz.git' POLLER_WORKDIR = os.path.join('basedir', 'gitpoller-work') def createPoller(self): # this is overridden in TestGitPollerWithSshPrivateKey return gitpoller.GitPoller(self.REPOURL) @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_master_run_process() yield self.setUpChangeSource() yield self.master.startService() self.poller = yield self.attachChangeSource(self.createPoller()) @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() yield self.tearDownChangeSource() class TestGitPoller(TestGitPollerBase): dummyRevStr = '12345abcde' @defer.inlineCallbacks def _perform_git_output_test(self, methodToTest, args, desiredGoodOutput, desiredGoodResult, emptyRaisesException=True): self.expect_commands( ExpectMaster(['git'] + args) .workdir(self.POLLER_WORKDIR), ) # we should get an Exception with empty output from git try: yield methodToTest(self.dummyRevStr) if emptyRaisesException: self.fail("run_process should have failed on empty output") except Exception as e: if not emptyRaisesException: import traceback traceback.print_exc() self.fail("run_process should NOT have failed on empty output: " + repr(e)) self.assert_all_commands_ran() # and the method shouldn't suppress any exceptions self.expect_commands( ExpectMaster(['git'] + args) .workdir(self.POLLER_WORKDIR) .exit(1), ) try: yield methodToTest(self.dummyRevStr) self.fail("run_process should have failed on stderr output") except Exception: pass self.assert_all_commands_ran() # finally we should get what's expected from good output self.expect_commands( ExpectMaster(['git'] + args) .workdir(self.POLLER_WORKDIR) .stdout(desiredGoodOutput) ) r = yield methodToTest(self.dummyRevStr) self.assertEqual(r, desiredGoodResult) # check types if isinstance(r, str): self.assertIsInstance(r, str) elif isinstance(r, list): [self.assertIsInstance(e, str) for e in r] self.assert_all_commands_ran() def test_get_commit_author(self): authorStr = 'Sammy Jankis ' authorBytes = unicode2bytes(authorStr) return self._perform_git_output_test(self.poller._get_commit_author, ['log', '--no-walk', '--format=%aN <%aE>', self.dummyRevStr, '--'], authorBytes, authorStr) def test_get_commit_committer(self): committerStr = 'Sammy Jankis ' committerBytes = unicode2bytes(committerStr) return self._perform_git_output_test(self.poller._get_commit_committer, ['log', '--no-walk', '--format=%cN <%cE>', self.dummyRevStr, '--'], committerBytes, committerStr) def _test_get_commit_comments(self, commentStr): commentBytes = unicode2bytes(commentStr) return self._perform_git_output_test(self.poller._get_commit_comments, ['log', '--no-walk', '--format=%s%n%b', self.dummyRevStr, '--'], commentBytes, commentStr, emptyRaisesException=False) def test_get_commit_comments(self): comments = ['this is a commit message\n\nthat is multiline', 'single line message', ''] return defer.DeferredList([self._test_get_commit_comments(commentStr) for commentStr in comments]) def test_get_commit_files(self): filesBytes = b'\n\nfile1\nfile2\n"\146ile_octal"\nfile space' filesRes = ['file1', 'file2', 'file_octal', 'file space'] return self._perform_git_output_test(self.poller._get_commit_files, ['log', '--name-only', '--no-walk', '--format=%n', self.dummyRevStr, '--'], filesBytes, filesRes, emptyRaisesException=False) def test_get_commit_files_with_space_in_changed_files(self): filesBytes = b'normal_directory/file1\ndirectory with space/file2' filesStr = bytes2unicode(filesBytes) return self._perform_git_output_test( self.poller._get_commit_files, ['log', '--name-only', '--no-walk', '--format=%n', self.dummyRevStr, '--'], filesBytes, [l for l in filesStr.splitlines() if l.strip()], emptyRaisesException=False, ) def test_get_commit_timestamp(self): stampBytes = b'1273258009' stampStr = bytes2unicode(stampBytes) return self._perform_git_output_test(self.poller._get_commit_timestamp, ['log', '--no-walk', '--format=%ct', self.dummyRevStr, '--'], stampBytes, float(stampStr)) def test_describe(self): self.assertSubstring("GitPoller", self.poller.describe()) def test_name(self): self.assertEqual(bytes2unicode(self.REPOURL), bytes2unicode(self.poller.name)) # and one with explicit name... other = gitpoller.GitPoller(self.REPOURL, name="MyName") self.assertEqual("MyName", other.name) @defer.inlineCallbacks def test_checkGitFeatures_git_not_installed(self): self.setUpLogging() self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'Command not found'), ) yield self.assertFailure(self.poller._checkGitFeatures(), EnvironmentError) self.assert_all_commands_ran() @defer.inlineCallbacks def test_checkGitFeatures_git_bad_version(self): self.setUpLogging() self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git ') ) with self.assertRaises(EnvironmentError): yield self.poller._checkGitFeatures() self.assert_all_commands_ran() @defer.inlineCallbacks def test_poll_initial(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) @defer.inlineCallbacks def test_poll_initial_poller_not_running(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ) self.poller.doPoll.running = False yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, {}) def test_poll_failInit(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]) .exit(1), ) self.poller.doPoll.running = True d = self.assertFailure(self.poller.poll(), EnvironmentError) d.addCallback(lambda _: self.assert_all_commands_ran()) return d def test_poll_failFetch(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .exit(1), ) self.poller.doPoll.running = True d = self.assertFailure(self.poller.poll(), EnvironmentError) d.addCallback(lambda _: self.assert_all_commands_ran()) return d @defer.inlineCallbacks def test_poll_failRevParse(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .exit(1), ) self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(len(self.flushLoggedErrors()), 1) self.assertEqual(self.poller.lastRev, {}) @defer.inlineCallbacks def test_poll_failLog(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--']) .workdir(self.POLLER_WORKDIR) .exit(1), ) # do the poll self.poller.lastRev = { 'master': 'fa3ae8ed68e664d4db24798611b352e3c6509930' } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(len(self.flushLoggedErrors()), 1) self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) @defer.inlineCallbacks def test_poll_GitError(self): # Raised when git exits with status code 128. See issue 2468 self.expect_commands( ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]) .exit(128), ) with self.assertRaises(gitpoller.GitError): yield self.poller._dovccmd('init', ['--bare', self.POLLER_WORKDIR]) self.assert_all_commands_ran() @defer.inlineCallbacks def test_poll_GitError_log(self): self.setUpLogging() self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]) .exit(128), ) self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertLogged("command.*on repourl.*failed.*exit code 128.*") @defer.inlineCallbacks def test_poll_nothingNew(self): # Test that environment variables get propagated to subprocesses # (See #2116) self.patch(os, 'environ', {'ENVVAR': 'TRUE'}) self.add_run_process_expect_env({'ENVVAR': 'TRUE'}) self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'no interesting output'), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b''), ) self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) @defer.inlineCallbacks def test_poll_multipleBranches_initial(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\t' b'refs/heads/release\n' b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master', '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR) .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'), ) # do the poll self.poller.branches = ['master', 'release', 'not_on_remote'] self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'release': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2' }) @defer.inlineCallbacks def test_poll_multipleBranches(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\t' b'refs/heads/release\n' b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master', '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR) .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '9118f4ab71963d23d02d4bdc54876ac8bf05acf2', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b'\n'.join([ b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2' ])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = ['master', 'release'] self.poller.lastRev = { 'master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', 'release': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'release': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2' }) self.assertEqual(self.master.data.updates.changesAdded, [ { 'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'master', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, }, { 'author': 'by:64a5dc2a', 'committer': 'by:64a5dc2a', 'branch': 'master', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/64a'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, }, { 'author': 'by:9118f4ab', 'committer': 'by:9118f4ab', 'branch': 'release', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/911'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, } ]) @defer.inlineCallbacks def test_poll_multipleBranches_buildPushesWithNoCommits_default(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/release\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b''), ) # do the poll self.poller.branches = ['release'] self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'release': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_poll_multipleBranches_buildPushesWithNoCommits_true(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/release\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b''), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = ['release'] self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', } self.poller.buildPushesWithNoCommits = True self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'release': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(self.master.data.updates.changesAdded, [ {'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'release', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009}] ) @defer.inlineCallbacks def test_poll_multipleBranches_buildPushesWithNoCommits_true_fast_forward(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/release\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^0ba9d553b7217ab4bbad89ad56dc0332c7d57a8c', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b''), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = ['release'] self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'release': '0ba9d553b7217ab4bbad89ad56dc0332c7d57a8c' } self.poller.buildPushesWithNoCommits = True self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'release': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(self.master.data.updates.changesAdded, [ {'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'release', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009}] ) @defer.inlineCallbacks def test_poll_multipleBranches_buildPushesWithNoCommits_true_not_tip(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/release\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^0ba9d553b7217ab4bbad89ad56dc0332c7d57a8c', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b''), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = ['release'] self.poller.lastRev = { 'master': '0ba9d553b7217ab4bbad89ad56dc0332c7d57a8c', } self.poller.buildPushesWithNoCommits = True self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'release': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(self.master.data.updates.changesAdded, [ {'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'release', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009}] ) @defer.inlineCallbacks def test_poll_allBranches_single(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = True self.poller.lastRev = { 'refs/heads/master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'refs/heads/master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 2) self.assertEqual(added[0]['author'], 'by:4423cdbc') self.assertEqual(added[0]['committer'], 'by:4423cdbc') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['branch'], 'master') self.assertEqual(added[0]['files'], ['/etc/442']) self.assertEqual(added[0]['src'], 'git') self.assertEqual(added[1]['author'], 'by:64a5dc2a') self.assertEqual(added[1]['committer'], 'by:64a5dc2a') self.assertEqual(added[1]['when_timestamp'], 1273258009) self.assertEqual(added[1]['comments'], 'hello!') self.assertEqual(added[1]['files'], ['/etc/64a']) self.assertEqual(added[1]['src'], 'git') @defer.inlineCallbacks def test_poll_noChanges(self): # Test that environment variables get propagated to subprocesses # (See #2116) self.patch(os, 'environ', {'ENVVAR': 'TRUE'}) self.add_run_process_expect_env({'ENVVAR': 'TRUE'}) self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'no interesting output'), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b''), ) self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) @defer.inlineCallbacks def test_poll_allBranches_multiple(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'\n'.join([ b'4423cdbcbb89c14e50dd5f4152415afd686c5241\trefs/heads/master', b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\trefs/heads/release', ])), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master', '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release']) .workdir(self.POLLER_WORKDIR) .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '9118f4ab71963d23d02d4bdc54876ac8bf05acf2', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b'\n'.join([b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = True self.poller.lastRev = { 'refs/heads/master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', 'refs/heads/release': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'refs/heads/master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'refs/heads/release': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2' }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 3) self.assertEqual(added[0]['author'], 'by:4423cdbc') self.assertEqual(added[0]['committer'], 'by:4423cdbc') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['branch'], 'master') self.assertEqual(added[0]['files'], ['/etc/442']) self.assertEqual(added[0]['src'], 'git') self.assertEqual(added[1]['author'], 'by:64a5dc2a') self.assertEqual(added[1]['committer'], 'by:64a5dc2a') self.assertEqual(added[1]['when_timestamp'], 1273258009) self.assertEqual(added[1]['comments'], 'hello!') self.assertEqual(added[1]['files'], ['/etc/64a']) self.assertEqual(added[1]['src'], 'git') self.assertEqual(added[2]['author'], 'by:9118f4ab') self.assertEqual(added[2]['committer'], 'by:9118f4ab') self.assertEqual(added[2]['when_timestamp'], 1273258009) self.assertEqual(added[2]['comments'], 'hello!') self.assertEqual(added[2]['files'], ['/etc/911']) self.assertEqual(added[2]['src'], 'git') @defer.inlineCallbacks def test_poll_callableFilteredBranches(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'\n'.join([ b'4423cdbcbb89c14e50dd5f4152415afd686c5241\trefs/heads/master', b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\trefs/heads/release', ])), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])) ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll class TestCallable: def __call__(self, branch): return branch == "refs/heads/master" self.poller.branches = TestCallable() self.poller.lastRev = { 'refs/heads/master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', 'refs/heads/release': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() # The release branch id should remain unchanged, # because it was ignored. self.assertEqual(self.poller.lastRev, { 'refs/heads/master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 2) self.assertEqual(added[0]['author'], 'by:4423cdbc') self.assertEqual(added[0]['committer'], 'by:4423cdbc') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['branch'], 'master') self.assertEqual(added[0]['files'], ['/etc/442']) self.assertEqual(added[0]['src'], 'git') self.assertEqual(added[1]['author'], 'by:64a5dc2a') self.assertEqual(added[1]['committer'], 'by:64a5dc2a') self.assertEqual(added[1]['when_timestamp'], 1273258009) self.assertEqual(added[1]['comments'], 'hello!') self.assertEqual(added[1]['files'], ['/etc/64a']) self.assertEqual(added[1]['src'], 'git') @defer.inlineCallbacks def test_poll_branchFilter(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'\n'.join([ b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/pull/410/merge', b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\t' b'refs/pull/410/head', ])), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+refs/pull/410/head:refs/buildbot/' + self.REPOURL_QUOTED + '/refs/pull/410/head']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/refs/pull/410/head']) .workdir(self.POLLER_WORKDIR) .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '9118f4ab71963d23d02d4bdc54876ac8bf05acf2', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b'\n'.join([b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) def pullFilter(branch): """ Note that this isn't useful in practice, because it will only pick up *changes* to pull requests, not the original request. """ return re.match('^refs/pull/[0-9]*/head$', branch) # do the poll self.poller.branches = pullFilter self.poller.lastRev = { 'master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', 'refs/pull/410/head': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'refs/pull/410/head': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2' }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 1) self.assertEqual(added[0]['author'], 'by:9118f4ab') self.assertEqual(added[0]['committer'], 'by:9118f4ab') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['files'], ['/etc/911']) self.assertEqual(added[0]['src'], 'git') @defer.inlineCallbacks def test_poll_old(self): # Test that environment variables get propagated to subprocesses # (See #2116) self.patch(os, 'environ', {'ENVVAR': 'TRUE'}) self.add_run_process_expect_env({'ENVVAR': 'TRUE'}) self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'no interesting output'), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241' ])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.lastRev = { 'master': 'fa3ae8ed68e664d4db24798611b352e3c6509930' } self.poller.doPoll.running = True yield self.poller.poll() # check the results self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'master', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, }, { 'author': 'by:64a5dc2a', 'committer': 'by:64a5dc2a', 'branch': 'master', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/64a'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, }]) self.assert_all_commands_ran() self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) @defer.inlineCallbacks def test_poll_callableCategory(self): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), ExpectMaster(['git', 'log', '--ignore-missing', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--']) .workdir(self.POLLER_WORKDIR) .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = True def callableCategory(chdict): return chdict['revision'][:6] self.poller.category = callableCategory self.poller.lastRev = { 'refs/heads/master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', } self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'refs/heads/master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 2) self.assertEqual(added[0]['author'], 'by:4423cdbc') self.assertEqual(added[0]['committer'], 'by:4423cdbc') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['branch'], 'master') self.assertEqual(added[0]['files'], ['/etc/442']) self.assertEqual(added[0]['src'], 'git') self.assertEqual(added[0]['category'], '4423cd') self.assertEqual(added[1]['author'], 'by:64a5dc2a') self.assertEqual(added[1]['committer'], 'by:64a5dc2a') self.assertEqual(added[1]['when_timestamp'], 1273258009) self.assertEqual(added[1]['comments'], 'hello!') self.assertEqual(added[1]['files'], ['/etc/64a']) self.assertEqual(added[1]['src'], 'git') self.assertEqual(added[1]['category'], '64a5dc') def test_startService(self): self.assertEqual(self.poller.workdir, self.POLLER_WORKDIR) self.assertEqual(self.poller.lastRev, {}) @defer.inlineCallbacks def test_startService_loadLastRev(self): yield self.poller.stopService() self.master.db.state.set_fake_state( self.poller, lastRev={"master": "fa3ae8ed68e664d4db24798611b352e3c6509930"}, ) yield self.poller.startService() self.assertEqual(self.poller.lastRev, { "master": "fa3ae8ed68e664d4db24798611b352e3c6509930" }) class TestGitPollerWithSshPrivateKey(TestGitPollerBase): def createPoller(self): return gitpoller.GitPoller(self.REPOURL, sshPrivateKey='ssh-key') @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_check_git_features_ssh_1_7(self, write_local_file_mock, temp_dir_mock): self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 1.7.5\n'), ) yield self.assertFailure(self.poller._checkGitFeatures(), EnvironmentError) self.assert_all_commands_ran() self.assertEqual(len(temp_dir_mock.dirs), 0) write_local_file_mock.assert_not_called() @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_initial_2_10(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 2.10.0\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', '-c', 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}"'.format(key_path), 'ls-remote', '--refs', self.REPOURL]), ExpectMaster(['git', '-c', 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}"'.format(key_path), 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) temp_dir_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) write_local_file_mock.assert_called_with(key_path, 'ssh-key', mode=0o400) @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_initial_2_3(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 2.3.0\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', 'ls-remote', '--refs', self.REPOURL]) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), ExpectMaster(['git', 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .env({'GIT_SSH_COMMAND': 'ssh -o "BatchMode=yes" -i "{0}"'.format(key_path)}), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) temp_dir_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) write_local_file_mock.assert_called_with(key_path, 'ssh-key', mode=0o400) @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_failFetch_git_2_10(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') # make sure we cleanup the private key when fetch fails self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 2.10.0\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', '-c', 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}"'.format(key_path), 'ls-remote', '--refs', self.REPOURL]), ExpectMaster(['git', '-c', 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}"'.format(key_path), 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .exit(1), ) self.poller.doPoll.running = True yield self.assertFailure(self.poller.poll(), EnvironmentError) self.assert_all_commands_ran() temp_dir_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) write_local_file_mock.assert_called_with(key_path, 'ssh-key', mode=0o400) class TestGitPollerWithSshHostKey(TestGitPollerBase): def createPoller(self): return gitpoller.GitPoller(self.REPOURL, sshPrivateKey='ssh-key', sshHostKey='ssh-host-key') @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_initial_2_10(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') known_hosts_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@', 'ssh-known-hosts') self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 2.10.0\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', '-c', 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}" ' '-o "UserKnownHostsFile={1}"'.format(key_path, known_hosts_path), 'ls-remote', '--refs', self.REPOURL]), ExpectMaster(['git', '-c', 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}" ' '-o "UserKnownHostsFile={1}"'.format(key_path, known_hosts_path), 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) temp_dir_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) expected_file_writes = [ mock.call(key_path, 'ssh-key', mode=0o400), mock.call(known_hosts_path, '* ssh-host-key'), mock.call(key_path, 'ssh-key', mode=0o400), mock.call(known_hosts_path, '* ssh-host-key'), ] self.assertEqual(expected_file_writes, write_local_file_mock.call_args_list) class TestGitPollerWithSshKnownHosts(TestGitPollerBase): def createPoller(self): return gitpoller.GitPoller(self.REPOURL, sshPrivateKey='ssh-key', sshKnownHosts='ssh-known-hosts') @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_initial_2_10(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') known_hosts_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@', 'ssh-known-hosts') self.expect_commands( ExpectMaster(['git', '--version']) .stdout(b'git version 2.10.0\n'), ExpectMaster(['git', 'init', '--bare', self.POLLER_WORKDIR]), ExpectMaster(['git', '-c', 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}" ' '-o "UserKnownHostsFile={1}"'.format(key_path, known_hosts_path), 'ls-remote', '--refs', self.REPOURL]), ExpectMaster(['git', '-c', 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}" ' '-o "UserKnownHostsFile={1}"'.format(key_path, known_hosts_path), 'fetch', '--progress', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR), ExpectMaster(['git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master']) .workdir(self.POLLER_WORKDIR) .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) self.poller.doPoll.running = True yield self.poller.poll() self.assert_all_commands_ran() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) temp_dir_path = os.path.join('basedir', 'gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) expected_file_writes = [ mock.call(key_path, 'ssh-key', mode=0o400), mock.call(known_hosts_path, 'ssh-known-hosts'), mock.call(key_path, 'ssh-key', mode=0o400), mock.call(known_hosts_path, 'ssh-known-hosts'), ] self.assertEqual(expected_file_writes, write_local_file_mock.call_args_list) class TestGitPollerConstructor(unittest.TestCase, TestReactorMixin, changesource.ChangeSourceMixin, config.ConfigErrorsMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpChangeSource() yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() yield self.tearDownChangeSource() @defer.inlineCallbacks def test_deprecatedFetchRefspec(self): with self.assertRaisesConfigError( "fetch_refspec is no longer supported"): yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git", fetch_refspec='not-supported')) @defer.inlineCallbacks def test_oldPollInterval(self): poller = yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git", pollinterval=10)) self.assertEqual(poller.pollInterval, 10) @defer.inlineCallbacks def test_branches_default(self): poller = yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git")) self.assertEqual(poller.branches, ["master"]) @defer.inlineCallbacks def test_branches_oldBranch(self): poller = yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git", branch='magic')) self.assertEqual(poller.branches, ["magic"]) @defer.inlineCallbacks def test_branches(self): poller = yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git", branches=['magic', 'marker'])) self.assertEqual(poller.branches, ["magic", "marker"]) @defer.inlineCallbacks def test_branches_True(self): poller = yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git", branches=True)) self.assertEqual(poller.branches, True) @defer.inlineCallbacks def test_only_tags_True(self): poller = yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git", only_tags=True)) self.assertIsNotNone(poller.branches) @defer.inlineCallbacks def test_branches_andBranch(self): with self.assertRaisesConfigError( "can't specify both branch and branches"): yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git", branch='bad', branches=['listy'])) @defer.inlineCallbacks def test_branches_and_only_tags(self): with self.assertRaisesConfigError( "can't specify only_tags and branch/branches"): yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git", only_tags=True, branches=['listy'])) @defer.inlineCallbacks def test_branch_and_only_tags(self): with self.assertRaisesConfigError( "can't specify only_tags and branch/branches"): yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git", only_tags=True, branch='bad')) @defer.inlineCallbacks def test_gitbin_default(self): poller = yield self.attachChangeSource(gitpoller.GitPoller("/tmp/git.git")) self.assertEqual(poller.gitbin, "git") buildbot-3.4.0/master/buildbot/test/unit/changes/test_hgpoller.py000066400000000000000000000353051413250514000252150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import hgpoller from buildbot.test.util import changesource from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.runprocess import ExpectMaster from buildbot.test.util.runprocess import MasterRunProcessMixin ENVIRON_2116_KEY = 'TEST_THAT_ENVIRONMENT_GETS_PASSED_TO_SUBPROCESSES' LINESEP_BYTES = os.linesep.encode("ascii") PATHSEP_BYTES = os.pathsep.encode("ascii") class TestHgPollerBase(MasterRunProcessMixin, changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): usetimestamps = True branches = None bookmarks = None @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_master_run_process() yield self.setUpChangeSource() # To test that environment variables get propagated to subprocesses # (See #2116) os.environ[ENVIRON_2116_KEY] = 'TRUE' yield self.setUpChangeSource() self.remote_repo = 'ssh://example.com/foo/baz' self.remote_hgweb = 'http://example.com/foo/baz/rev/{}' self.repo_ready = True def _isRepositoryReady(): return self.repo_ready self.poller = hgpoller.HgPoller(self.remote_repo, usetimestamps=self.usetimestamps, workdir='/some/dir', branches=self.branches, bookmarks=self.bookmarks, revlink=lambda branch, revision: self.remote_hgweb.format(revision)) yield self.poller.setServiceParent(self.master) self.poller._isRepositoryReady = _isRepositoryReady yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() yield self.tearDownChangeSource() @defer.inlineCallbacks def check_current_rev(self, wished, branch='default'): rev = yield self.poller._getCurrentRev(branch) self.assertEqual(rev, str(wished)) class TestHgPollerBranches(TestHgPollerBase): branches = ['one', 'two'] @defer.inlineCallbacks def test_poll_initial(self): self.expect_commands( ExpectMaster(['hg', 'pull', '-b', 'one', '-b', 'two', 'ssh://example.com/foo/baz']) .workdir('/some/dir'), ExpectMaster(['hg', 'heads', 'one', '--template={rev}' + os.linesep]) .workdir('/some/dir') .stdout(b"73591"), ExpectMaster(['hg', 'heads', 'two', '--template={rev}' + os.linesep]) .workdir('/some/dir') .stdout(b"22341"), ) # do the poll yield self.poller.poll() # check the results self.assertEqual(len(self.master.data.updates.changesAdded), 0) yield self.check_current_rev(73591, 'one') yield self.check_current_rev(22341, 'two') @defer.inlineCallbacks def test_poll_regular(self): # normal operation. There's a previous revision, we get a new one. # Let's say there was an intervening commit on an untracked branch, to # make it more interesting. self.expect_commands( ExpectMaster(['hg', 'pull', '-b', 'one', '-b', 'two', 'ssh://example.com/foo/baz']) .workdir('/some/dir'), ExpectMaster(['hg', 'heads', 'one', '--template={rev}' + os.linesep]) .workdir('/some/dir').stdout(b'6' + LINESEP_BYTES), ExpectMaster(['hg', 'log', '-r', '4::6', '--template={rev}:{node}\\n']) .workdir('/some/dir') .stdout(LINESEP_BYTES.join([b'4:1aaa5', b'6:784bd'])), ExpectMaster(['hg', 'log', '-r', '784bd', '--template={date|hgdate}' + os.linesep + '{author}' + os.linesep + "{files % '{file}" + os.pathsep + "'}" + os.linesep + '{desc|strip}']) .workdir('/some/dir') .stdout(LINESEP_BYTES.join([b'1273258009.0 -7200', b'Joe Test ', b'file1 file2', b'Comment', b''])), ExpectMaster(['hg', 'heads', 'two', '--template={rev}' + os.linesep]) .workdir('/some/dir').stdout(b'3' + LINESEP_BYTES), ) yield self.poller._setCurrentRev(3, 'two') yield self.poller._setCurrentRev(4, 'one') yield self.poller.poll() yield self.check_current_rev(6, 'one') self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['revision'], '784bd') self.assertEqual(change['revlink'], 'http://example.com/foo/baz/rev/784bd') self.assertEqual(change['comments'], 'Comment') class TestHgPollerBookmarks(TestHgPollerBase): bookmarks = ['one', 'two'] @defer.inlineCallbacks def test_poll_initial(self): self.expect_commands( ExpectMaster(['hg', 'pull', '-B', 'one', '-B', 'two', 'ssh://example.com/foo/baz']) .workdir('/some/dir'), ExpectMaster(['hg', 'heads', 'one', '--template={rev}' + os.linesep]) .workdir('/some/dir').stdout(b"73591"), ExpectMaster(['hg', 'heads', 'two', '--template={rev}' + os.linesep]) .workdir('/some/dir').stdout(b"22341"), ) # do the poll yield self.poller.poll() # check the results self.assertEqual(len(self.master.data.updates.changesAdded), 0) yield self.check_current_rev(73591, 'one') yield self.check_current_rev(22341, 'two') @defer.inlineCallbacks def test_poll_regular(self): # normal operation. There's a previous revision, we get a new one. # Let's say there was an intervening commit on an untracked branch, to # make it more interesting. self.expect_commands( ExpectMaster(['hg', 'pull', '-B', 'one', '-B', 'two', 'ssh://example.com/foo/baz']) .workdir('/some/dir'), ExpectMaster(['hg', 'heads', 'one', '--template={rev}' + os.linesep]) .workdir('/some/dir').stdout(b'6' + LINESEP_BYTES), ExpectMaster(['hg', 'log', '-r', '4::6', '--template={rev}:{node}\\n']) .workdir('/some/dir') .stdout(LINESEP_BYTES.join([b'4:1aaa5', b'6:784bd', ])), ExpectMaster(['hg', 'log', '-r', '784bd', '--template={date|hgdate}' + os.linesep + '{author}' + os.linesep + "{files % '{file}" + os.pathsep + "'}" + os.linesep + '{desc|strip}']) .workdir('/some/dir') .stdout(LINESEP_BYTES.join([b'1273258009.0 -7200', b'Joe Test ', b'file1 file2', b'Comment', b''])), ExpectMaster(['hg', 'heads', 'two', '--template={rev}' + os.linesep]) .workdir('/some/dir').stdout(b'3' + LINESEP_BYTES), ) yield self.poller._setCurrentRev(3, 'two') yield self.poller._setCurrentRev(4, 'one') yield self.poller.poll() yield self.check_current_rev(6, 'one') self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['revision'], '784bd') self.assertEqual(change['comments'], 'Comment') class TestHgPoller(TestHgPollerBase): def tearDown(self): del os.environ[ENVIRON_2116_KEY] return self.tearDownChangeSource() def gpoFullcommandPattern(self, commandName, *expected_args): """Match if the command is commandName and arg list start as expected. This allows to test a bit more if expected GPO are issued, be it by obscure failures due to the result not being given. """ def matchesSubcommand(bin, given_args, **kwargs): return bin == commandName and tuple( given_args[:len(expected_args)]) == expected_args return matchesSubcommand def test_describe(self): self.assertSubstring("HgPoller", self.poller.describe()) def test_name(self): self.assertEqual(self.remote_repo, self.poller.name) # and one with explicit name... other = hgpoller.HgPoller( self.remote_repo, name="MyName", workdir='/some/dir') self.assertEqual("MyName", other.name) # and one with explicit branches... other = hgpoller.HgPoller( self.remote_repo, branches=["b1", "b2"], workdir='/some/dir') self.assertEqual(self.remote_repo + "_b1_b2", other.name) def test_hgbin_default(self): self.assertEqual(self.poller.hgbin, "hg") @defer.inlineCallbacks def test_poll_initial(self): self.repo_ready = False # Test that environment variables get propagated to subprocesses # (See #2116) expected_env = {ENVIRON_2116_KEY: 'TRUE'} self.add_run_process_expect_env(expected_env) self.expect_commands( ExpectMaster(['hg', 'init', '/some/dir']), ExpectMaster(['hg', 'pull', '-b', 'default', 'ssh://example.com/foo/baz']) .workdir('/some/dir'), ExpectMaster(['hg', 'heads', 'default', '--template={rev}' + os.linesep]) .workdir('/some/dir') .stdout(b"73591"), ) # do the poll yield self.poller.poll() # check the results self.assertEqual(len(self.master.data.updates.changesAdded), 0) yield self.check_current_rev(73591) @defer.inlineCallbacks def test_poll_several_heads(self): # If there are several heads on the named branch, the poller mustn't # climb (good enough for now, ideally it should even go to the common # ancestor) self.expect_commands( ExpectMaster(['hg', 'pull', '-b', 'default', 'ssh://example.com/foo/baz']) .workdir('/some/dir'), ExpectMaster(['hg', 'heads', 'default', '--template={rev}' + os.linesep]) .workdir('/some/dir') .stdout(b'5' + LINESEP_BYTES + b'6' + LINESEP_BYTES) ) yield self.poller._setCurrentRev(3) # do the poll: we must stay at rev 3 yield self.poller.poll() yield self.check_current_rev(3) @defer.inlineCallbacks def test_poll_regular(self): # normal operation. There's a previous revision, we get a new one. self.expect_commands( ExpectMaster(['hg', 'pull', '-b', 'default', 'ssh://example.com/foo/baz']) .workdir('/some/dir'), ExpectMaster(['hg', 'heads', 'default', '--template={rev}' + os.linesep]) .workdir('/some/dir') .stdout(b'5' + LINESEP_BYTES), ExpectMaster(['hg', 'log', '-r', '4::5', '--template={rev}:{node}\\n']) .workdir('/some/dir') .stdout(LINESEP_BYTES.join([b'4:1aaa5', b'5:784bd'])), ExpectMaster(['hg', 'log', '-r', '784bd', '--template={date|hgdate}' + os.linesep + '{author}' + os.linesep + "{files % '{file}" + os.pathsep + "'}" + os.linesep + '{desc|strip}']) .workdir('/some/dir') .stdout(LINESEP_BYTES.join([b'1273258009.0 -7200', b'Joe Test ', b'file1 file2', b'Comment for rev 5', b''])), ) yield self.poller._setCurrentRev(4) yield self.poller.poll() yield self.check_current_rev(5) self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['revision'], '784bd') self.assertEqual(change['comments'], 'Comment for rev 5') @defer.inlineCallbacks def test_poll_force_push(self): # There's a previous revision, but not linked with new rev self.expect_commands( ExpectMaster(['hg', 'pull', '-b', 'default', 'ssh://example.com/foo/baz']) .workdir('/some/dir'), ExpectMaster(['hg', 'heads', 'default', '--template={rev}' + os.linesep]) .workdir('/some/dir').stdout(b'5' + LINESEP_BYTES), ExpectMaster(['hg', 'log', '-r', '4::5', '--template={rev}:{node}\\n']) .workdir('/some/dir') .stdout(b""), ExpectMaster(['hg', 'log', '-r', '5', '--template={rev}:{node}\\n']) .workdir('/some/dir') .stdout(LINESEP_BYTES.join([b'5:784bd'])), ExpectMaster(['hg', 'log', '-r', '784bd', '--template={date|hgdate}' + os.linesep + '{author}' + os.linesep + "{files % '{file}" + os.pathsep + "'}" + os.linesep + '{desc|strip}']) .workdir('/some/dir') .stdout(LINESEP_BYTES.join([b'1273258009.0 -7200', b'Joe Test ', b'file1 file2', b'Comment for rev 5', b''])), ) yield self.poller._setCurrentRev(4) yield self.poller.poll() yield self.check_current_rev(5) self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['revision'], '784bd') self.assertEqual(change['comments'], 'Comment for rev 5') class HgPollerNoTimestamp(TestHgPoller): """ Test HgPoller() without parsing revision commit timestamp """ usetimestamps = False buildbot-3.4.0/master/buildbot/test/unit/changes/test_mail.py000066400000000000000000000102141413250514000243130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import mail from buildbot.test.util import changesource from buildbot.test.util import dirs from buildbot.test.util.misc import TestReactorMixin class TestMaildirSource(changesource.ChangeSourceMixin, dirs.DirsMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.maildir = os.path.abspath("maildir") yield self.setUpChangeSource() yield self.setUpDirs(self.maildir) def populateMaildir(self): "create a fake maildir with a fake new message ('newmsg') in it" newdir = os.path.join(self.maildir, "new") os.makedirs(newdir) curdir = os.path.join(self.maildir, "cur") os.makedirs(curdir) fake_message = "Subject: test\n\nthis is a test" mailfile = os.path.join(newdir, "newmsg") with open(mailfile, "w") as f: f.write(fake_message) def assertMailProcessed(self): self.assertFalse( os.path.exists(os.path.join(self.maildir, "new", "newmsg"))) self.assertTrue( os.path.exists(os.path.join(self.maildir, "cur", "newmsg"))) @defer.inlineCallbacks def tearDown(self): yield self.tearDownDirs() yield self.tearDownChangeSource() # tests def test_describe(self): mds = mail.MaildirSource(self.maildir) self.assertSubstring(self.maildir, mds.describe()) @defer.inlineCallbacks def test_messageReceived_svn(self): self.populateMaildir() mds = mail.MaildirSource(self.maildir) yield self.attachChangeSource(mds) # monkey-patch in a parse method def parse(message, prefix): assert 'this is a test' in message.get_payload() return ('svn', dict(author='jimmy')) mds.parse = parse yield mds.messageReceived('newmsg') self.assertMailProcessed() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'jimmy', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': None, 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': 'svn', 'when_timestamp': None, }]) @defer.inlineCallbacks def test_messageReceived_bzr(self): self.populateMaildir() mds = mail.MaildirSource(self.maildir) yield self.attachChangeSource(mds) # monkey-patch in a parse method def parse(message, prefix): assert 'this is a test' in message.get_payload() return ('bzr', dict(author='jimmy')) mds.parse = parse yield mds.messageReceived('newmsg') self.assertMailProcessed() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'jimmy', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': None, 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': 'bzr', 'when_timestamp': None, }]) buildbot-3.4.0/master/buildbot/test/unit/changes/test_mail_CVSMaildirSource.py000066400000000000000000000165111413250514000275170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from email import message_from_string from email.utils import mktime_tz from email.utils import parsedate_tz from twisted.trial import unittest from buildbot.changes.mail import CVSMaildirSource # # Sample message from CVS version 1.11 # cvs1_11_msg = """From: Andy Howell To: buildbot@example.com Subject: cvs module MyModuleName Date: Sat, 07 Aug 2010 11:11:49 +0000 X-Mailer: Python buildbot-cvs-mail $Revision: 1.3 $ Cvsmode: 1.11 Category: None CVSROOT: :ext:cvshost.example.com:/cvsroot Files: base/module/src/make GNUmakefile,1.362,1.363 Project: MyModuleName Update of /cvsroot/base/module/src/make In directory cvshost:/tmp/cvs-serv10922 Modified Files: GNUmakefile Log Message: Commented out some stuff. """ # # Sample message from CVS version 1.12 # # Paths are handled differently by the two versions # cvs1_12_msg = """Date: Wed, 11 Aug 2010 04:56:44 +0000 From: andy@example.com To: buildbot@example.com Subject: cvs update for project RaiCore X-Mailer: Python buildbot-cvs-mail $Revision: 1.3 $ Cvsmode: 1.12 Category: None CVSROOT: :ext:cvshost.example.com:/cvsroot Files: file1.cpp 1.77 1.78 file2.cpp 1.75 1.76 Path: base/module/src Project: MyModuleName Update of /cvsroot/base/module/src In directory example.com:/tmp/cvs-serv26648/InsightMonAgent Modified Files: file1.cpp file2.cpp Log Message: Changes for changes sake """ class TestCVSMaildirSource(unittest.TestCase): def test_CVSMaildirSource_create_change_from_cvs1_11msg(self): m = message_from_string(cvs1_11_msg) src = CVSMaildirSource('/dev/null') src, chdict = src.parse(m) self.assertNotEqual(chdict, None) self.assertEqual(chdict['author'], 'andy') self.assertEqual(len(chdict['files']), 1) self.assertEqual( chdict['files'][0], 'base/module/src/make/GNUmakefile') self.assertEqual(chdict['comments'], 'Commented out some stuff.\n') self.assertFalse(chdict['isdir']) self.assertEqual(chdict['revision'], '2010-08-07 11:11:49') dateTuple = parsedate_tz('Sat, 07 Aug 2010 11:11:49 +0000') self.assertEqual(chdict['when'], mktime_tz(dateTuple)) self.assertEqual(chdict['branch'], None) self.assertEqual( chdict['repository'], ':ext:cvshost.example.com:/cvsroot') self.assertEqual(chdict['project'], 'MyModuleName') self.assertEqual(len(chdict['properties']), 0) self.assertEqual(src, 'cvs') def test_CVSMaildirSource_create_change_from_cvs1_12msg(self): m = message_from_string(cvs1_12_msg) src = CVSMaildirSource('/dev/null') src, chdict = src.parse(m) self.assertNotEqual(chdict, None) self.assertEqual(chdict['author'], 'andy') self.assertEqual(len(chdict['files']), 2) self.assertEqual(chdict['files'][0], 'base/module/src/file1.cpp') self.assertEqual(chdict['files'][1], 'base/module/src/file2.cpp') self.assertEqual(chdict['comments'], 'Changes for changes sake\n') self.assertFalse(chdict['isdir']) self.assertEqual(chdict['revision'], '2010-08-11 04:56:44') dateTuple = parsedate_tz('Wed, 11 Aug 2010 04:56:44 +0000') self.assertEqual(chdict['when'], mktime_tz(dateTuple)) self.assertEqual(chdict['branch'], None) self.assertEqual( chdict['repository'], ':ext:cvshost.example.com:/cvsroot') self.assertEqual(chdict['project'], 'MyModuleName') self.assertEqual(len(chdict['properties']), 0) self.assertEqual(src, 'cvs') def test_CVSMaildirSource_create_change_from_cvs1_12_with_no_path(self): msg = cvs1_12_msg.replace('Path: base/module/src', '') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') try: assert src.parse(m)[1] except ValueError: pass else: self.fail('Expect ValueError.') def test_CVSMaildirSource_create_change_with_bad_cvsmode(self): # Branch is indicated after 'Tag:' in modified file list msg = cvs1_11_msg.replace('Cvsmode: 1.11', 'Cvsmode: 9.99') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') try: assert src.parse(m)[1] except ValueError: pass else: self.fail('Expected ValueError') def test_CVSMaildirSource_create_change_with_branch(self): # Branch is indicated after 'Tag:' in modified file list msg = cvs1_11_msg.replace(' GNUmakefile', ' Tag: Test_Branch\n GNUmakefile') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['branch'], 'Test_Branch') def test_CVSMaildirSource_create_change_with_category(self): msg = cvs1_11_msg.replace('Category: None', 'Category: Test category') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['category'], 'Test category') def test_CVSMaildirSource_create_change_with_no_comment(self): # Strip off comments msg = cvs1_11_msg[:cvs1_11_msg.find('Commented out some stuff')] m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['comments'], None) def test_CVSMaildirSource_create_change_with_no_files(self): # A message with no files is likely not for us msg = cvs1_11_msg.replace( 'Files: base/module/src/make GNUmakefile,1.362,1.363', '') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m) self.assertEqual(chdict, None) def test_CVSMaildirSource_create_change_with_no_project(self): msg = cvs1_11_msg.replace('Project: MyModuleName', '') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['project'], None) def test_CVSMaildirSource_create_change_with_no_repository(self): msg = cvs1_11_msg.replace( 'CVSROOT: :ext:cvshost.example.com:/cvsroot', '') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['repository'], None) def test_CVSMaildirSource_create_change_with_property(self): m = message_from_string(cvs1_11_msg) propDict = {'foo': 'bar'} src = CVSMaildirSource('/dev/null', properties=propDict) chdict = src.parse(m)[1] self.assertEqual(chdict['properties']['foo'], 'bar') buildbot-3.4.0/master/buildbot/test/unit/changes/test_manager.py000066400000000000000000000076521413250514000250170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import base from buildbot.changes import manager from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.warnings import assertProducesWarnings from buildbot.warnings import DeprecatedApiWarning class TestChangeManager(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) self.cm = manager.ChangeManager() self.master.startService() yield self.cm.setServiceParent(self.master) self.new_config = mock.Mock() def tearDown(self): return self.master.stopService() def make_sources(self, n, klass=base.ChangeSource, **kwargs): for i in range(n): src = klass(name='ChangeSource %d' % i, **kwargs) yield src @defer.inlineCallbacks def test_reconfigService_add(self): src1, src2 = self.make_sources(2) yield src1.setServiceParent(self.cm) self.new_config.change_sources = [src1, src2] yield self.cm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(src2.parent, self.cm) self.assertIdentical(src2.master, self.master) @defer.inlineCallbacks def test_reconfigService_remove(self): src1, = self.make_sources(1) yield src1.setServiceParent(self.cm) self.new_config.change_sources = [] self.assertTrue(src1.running) yield self.cm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertFalse(src1.running) @defer.inlineCallbacks def test_reconfigService_change_reconfigurable(self): src1, = self.make_sources(1, base.ReconfigurablePollingChangeSource, pollInterval=1) yield src1.setServiceParent(self.cm) src2, = self.make_sources(1, base.ReconfigurablePollingChangeSource, pollInterval=2) self.new_config.change_sources = [src2] self.assertTrue(src1.running) self.assertEqual(src1.pollInterval, 1) yield self.cm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertTrue(src1.running) self.assertFalse(src2.running) self.assertEqual(src1.pollInterval, 2) @defer.inlineCallbacks def test_reconfigService_change_legacy(self): with assertProducesWarnings(DeprecatedApiWarning, message_pattern="use ReconfigurablePollingChangeSource"): src1, = self.make_sources(1, base.PollingChangeSource, pollInterval=1) yield src1.setServiceParent(self.cm) with assertProducesWarnings(DeprecatedApiWarning, message_pattern="use ReconfigurablePollingChangeSource"): src2, = self.make_sources(1, base.PollingChangeSource, pollInterval=2) self.new_config.change_sources = [src2] self.assertTrue(src1.running) self.assertEqual(src1.pollInterval, 1) yield self.cm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertFalse(src1.running) self.assertTrue(src2.running) self.assertEqual(src2.pollInterval, 2) buildbot-3.4.0/master/buildbot/test/unit/changes/test_p4poller.py000066400000000000000000000424051413250514000251410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import dateutil.tz from twisted.internet import defer from twisted.internet import error from twisted.internet import reactor from twisted.python import failure from twisted.trial import unittest from buildbot.changes.p4poller import P4PollerError from buildbot.changes.p4poller import P4Source from buildbot.changes.p4poller import get_simple_split from buildbot.test.util import changesource from buildbot.test.util import config from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.runprocess import ExpectMaster from buildbot.test.util.runprocess import MasterRunProcessMixin from buildbot.util import datetime2epoch first_p4changes = \ b"""Change 1 on 2006/04/13 by slamb@testclient 'first rev' """ second_p4changes = \ b"""Change 3 on 2006/04/13 by bob@testclient 'short desc truncated' Change 2 on 2006/04/13 by slamb@testclient 'bar' """ third_p4changes = \ b"""Change 5 on 2006/04/13 by mpatel@testclient 'first rev' """ fourth_p4changes = \ b"""Change 6 on 2006/04/14 by mpatel@testclient 'bar \xd0\x91' """ p4_describe_2 = \ b"""Change 2 by slamb@testclient on 2006/04/13 21:46:23 \tcreation Affected files ... ... //depot/myproject/trunk/whatbranch#1 add ... //depot/otherproject/trunk/something#1 add """ p4_describe_3 = \ """Change 3 by bob@testclient on 2006/04/13 21:51:39 \tshort desc truncated because this is a long description. \tASDF-GUI-P3-\u2018Upgrade Icon\u2019 disappears sometimes. Affected files ... ... //depot/myproject/branch_b/branch_b_file#1 add ... //depot/myproject/branch_b/whatbranch#1 branch ... //depot/myproject/branch_c/whatbranch#1 branch """ p4_describe_4 = \ b"""Change 4 by mpatel@testclient on 2006/04/13 21:55:39 \tThis is a multiline comment with tabs and spaces \t \tA list: \t Item 1 \t\tItem 2 Affected files ... ... //depot/myproject/branch_b/branch_b_file#1 add ... //depot/myproject/branch_b#75 edit ... //depot/myproject/branch_c/branch_c_file#1 add """ p4change = { 3: p4_describe_3, 2: p4_describe_2, 5: p4_describe_4, } class FakeTransport: def __init__(self): self.msg = None def write(self, msg): self.msg = msg def closeStdin(self): pass class TestP4Poller(changesource.ChangeSourceMixin, MasterRunProcessMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_master_run_process() yield self.setUpChangeSource() def tearDown(self): return self.tearDownChangeSource() def add_p4_describe_result(self, number, result): self.expect_commands( ExpectMaster(['p4', 'describe', '-s', str(number)]) .stdout(result) ) def makeTime(self, timestring): datefmt = '%Y/%m/%d %H:%M:%S' when = datetime.datetime.strptime(timestring, datefmt) return when @defer.inlineCallbacks def test_describe(self): yield self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1))) self.assertSubstring("p4source", self.changesource.describe()) def test_name(self): # no name: cs1 = P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1)) self.assertEqual("P4Source:None://depot/myproject/", cs1.name) # explicit name: cs2 = P4Source(p4port=None, p4user=None, name='MyName', p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1)) self.assertEqual("MyName", cs2.name) @defer.inlineCallbacks def do_test_poll_successful(self, **kwargs): encoding = kwargs.get('encoding', 'utf8') yield self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), **kwargs)) self.expect_commands( ExpectMaster(['p4', 'changes', '-m', '1', '//depot/myproject/...']) .stdout(first_p4changes), ExpectMaster(['p4', 'changes', '//depot/myproject/...@2,#head']) .stdout(second_p4changes), ) encoded_p4change = p4change.copy() encoded_p4change[3] = encoded_p4change[3].encode(encoding) self.add_p4_describe_result(2, encoded_p4change[2]) self.add_p4_describe_result(3, encoded_p4change[3]) # The first time, it just learns the change to start at. self.assertTrue(self.changesource.last_change is None) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, []) self.assertEqual(self.changesource.last_change, 1) # Subsequent times, it returns Change objects for new changes. yield self.changesource.poll() # when_timestamp is converted from a local time spec, so just # replicate that here when1 = self.makeTime("2006/04/13 21:46:23") when2 = self.makeTime("2006/04/13 21:51:39") # these two can happen in either order, since they're from the same # perforce change. changesAdded = self.master.data.updates.changesAdded if changesAdded[1]['branch'] == 'branch_c': changesAdded[1:] = reversed(changesAdded[1:]) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'slamb', 'committer': None, 'branch': 'trunk', 'category': None, 'codebase': None, 'comments': 'creation', 'files': ['whatbranch'], 'project': '', 'properties': {}, 'repository': '', 'revision': '2', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when1), }, { 'author': 'bob', 'committer': None, 'branch': 'branch_b', 'category': None, 'codebase': None, 'comments': 'short desc truncated because this is a long description.\n' 'ASDF-GUI-P3-\u2018Upgrade Icon\u2019 disappears sometimes.', 'files': ['branch_b_file', 'whatbranch'], 'project': '', 'properties': {}, 'repository': '', 'revision': '3', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when2), }, { 'author': 'bob', 'committer': None, 'branch': 'branch_c', 'category': None, 'codebase': None, 'comments': 'short desc truncated because this is a long description.\n' 'ASDF-GUI-P3-\u2018Upgrade Icon\u2019 disappears sometimes.', 'files': ['whatbranch'], 'project': '', 'properties': {}, 'repository': '', 'revision': '3', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when2), }]) self.assert_all_commands_ran() def test_poll_successful_default_encoding(self): return self.do_test_poll_successful() def test_poll_successful_macroman_encoding(self): return self.do_test_poll_successful(encoding='macroman') @defer.inlineCallbacks def test_poll_failed_changes(self): yield self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1))) self.expect_commands( ExpectMaster(['p4', 'changes', '-m', '1', '//depot/myproject/...']) .stdout(b'Perforce client error:\n...') ) # call _poll, so we can catch the failure with self.assertRaises(P4PollerError): yield self.changesource._poll() self.assert_all_commands_ran() @defer.inlineCallbacks def test_poll_failed_describe(self): yield self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1))) self.expect_commands( ExpectMaster(['p4', 'changes', '//depot/myproject/...@3,#head']) .stdout(second_p4changes), ) self.add_p4_describe_result(2, p4change[2]) self.add_p4_describe_result(3, b'Perforce client error:\n...') # tell poll() that it's already been called once self.changesource.last_change = 2 # call _poll, so we can catch the failure with self.assertRaises(P4PollerError): yield self.changesource._poll() # check that 2 was processed OK self.assertEqual(self.changesource.last_change, 2) self.assert_all_commands_ran() @defer.inlineCallbacks def test_poll_unicode_error(self): yield self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1))) self.expect_commands( ExpectMaster(['p4', 'changes', '//depot/myproject/...@3,#head']) .stdout(second_p4changes), ) # Add a character which cannot be decoded with utf-8 undecodableText = p4change[2] + b"\x81" self.add_p4_describe_result(2, undecodableText) # tell poll() that it's already been called once self.changesource.last_change = 2 # call _poll, so we can catch the failure with self.assertRaises(UnicodeError): yield self.changesource._poll() self.assert_all_commands_ran() @defer.inlineCallbacks def test_poll_unicode_error2(self): yield self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), encoding='ascii')) # Trying to decode a certain character with ascii codec should fail. self.expect_commands( ExpectMaster(['p4', 'changes', '-m', '1', '//depot/myproject/...']) .stdout(fourth_p4changes), ) yield self.changesource._poll() self.assert_all_commands_ran() @defer.inlineCallbacks def test_acquire_ticket_auth(self): yield self.attachChangeSource( P4Source(p4port=None, p4user='buildbot_user', p4passwd='pass', p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), use_tickets=True)) self.expect_commands( ExpectMaster(['p4', 'changes', '-m', '1', '//depot/myproject/...']) .stdout(first_p4changes) ) transport = FakeTransport() # p4poller uses only those arguments at the moment def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], ['p4', [b'p4', b'-u', b'buildbot_user', b'login']]) pp.makeConnection(transport) self.assertEqual(b'pass\n', transport.msg) pp.outReceived(b'Enter password:\nUser buildbot_user logged in.\n') so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) yield self.changesource.poll() self.assert_all_commands_ran() @defer.inlineCallbacks def test_acquire_ticket_auth_fail(self): yield self.attachChangeSource( P4Source(p4port=None, p4user=None, p4passwd='pass', p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), use_tickets=True)) self.expect_commands( ExpectMaster(['p4', 'changes', '-m', '1', '//depot/myproject/...']) .stdout(first_p4changes) ) transport = FakeTransport() # p4poller uses only those arguments at the moment def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], ['p4', [b'p4', b'login']]) pp.makeConnection(transport) self.assertEqual(b'pass\n', transport.msg) pp.outReceived(b'Enter password:\n') pp.errReceived(b"Password invalid.\n") so = error.ProcessDone(status=1) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) yield self.changesource.poll() @defer.inlineCallbacks def test_poll_split_file(self): """Make sure split file works on branch only changes""" yield self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=get_simple_split)) self.expect_commands( ExpectMaster(['p4', 'changes', '//depot/myproject/...@51,#head']) .stdout(third_p4changes), ) self.add_p4_describe_result(5, p4change[5]) self.changesource.last_change = 50 yield self.changesource.poll() # when_timestamp is converted from a local time spec, so just # replicate that here when = self.makeTime("2006/04/13 21:55:39") def changeKey(change): """ Let's sort the array of changes by branch, because in P4Source._poll(), changeAdded() is called by iterating over a dictionary of branches""" return change['branch'] self.assertEqual(sorted(self.master.data.updates.changesAdded, key=changeKey), sorted([{ 'author': 'mpatel', 'committer': None, 'branch': 'branch_c', 'category': None, 'codebase': None, 'comments': 'This is a multiline comment with tabs and spaces\n\nA list:\n ' 'Item 1\n\tItem 2', 'files': ['branch_c_file'], 'project': '', 'properties': {}, 'repository': '', 'revision': '5', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when), }, { 'author': 'mpatel', 'committer': None, 'branch': 'branch_b', 'category': None, 'codebase': None, 'comments': 'This is a multiline comment with tabs and spaces\n\nA list:\n ' 'Item 1\n\tItem 2', 'files': ['branch_b_file'], 'project': '', 'properties': {}, 'repository': '', 'revision': '5', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when), }], key=changeKey)) self.assertEqual(self.changesource.last_change, 5) self.assert_all_commands_ran() @defer.inlineCallbacks def test_server_tz(self): """Verify that the server_tz parameter is handled correctly""" yield self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=get_simple_split, server_tz="Europe/Berlin")) self.expect_commands( ExpectMaster(['p4', 'changes', '//depot/myproject/...@51,#head']) .stdout(third_p4changes), ) self.add_p4_describe_result(5, p4change[5]) self.changesource.last_change = 50 yield self.changesource.poll() # when_timestamp is converted from 21:55:39 Berlin time to UTC when_berlin = self.makeTime("2006/04/13 21:55:39") when_berlin = when_berlin.replace( tzinfo=dateutil.tz.gettz('Europe/Berlin')) when = datetime2epoch(when_berlin) self.assertEqual([ch['when_timestamp'] for ch in self.master.data.updates.changesAdded], [when, when]) self.assert_all_commands_ran() def test_resolveWho_callable(self): with self.assertRaisesConfigError( "You need to provide a valid callable for resolvewho"): P4Source(resolvewho=None) class TestSplit(unittest.TestCase): def test_get_simple_split(self): self.assertEqual(get_simple_split('foo/bar'), ('foo', 'bar')) self.assertEqual(get_simple_split('foo-bar'), (None, None)) self.assertEqual(get_simple_split('/bar'), ('', 'bar')) self.assertEqual(get_simple_split('foo/'), ('foo', '')) buildbot-3.4.0/master/buildbot/test/unit/changes/test_pb.py000066400000000000000000000375311413250514000240050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.changes import pb from buildbot.test.fake import fakemaster from buildbot.test.util import changesource from buildbot.test.util import pbmanager from buildbot.test.util.misc import TestReactorMixin class TestPBChangeSource(changesource.ChangeSourceMixin, pbmanager.PBManagerMixin, TestReactorMixin, unittest.TestCase): DEFAULT_CONFIG = dict(port='9999', user='alice', passwd='sekrit', name=changesource.ChangeSourceMixin.DEFAULT_NAME) EXP_DEFAULT_REGISTRATION = ('9999', 'alice', 'sekrit') @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpPBChangeSource() yield self.setUpChangeSource() self.master.pbmanager = self.pbmanager def test_registration_no_workerport(self): return self._test_registration(None, exp_ConfigErrors=True, user='alice', passwd='sekrit') def test_registration_global_workerport(self): return self._test_registration(self.EXP_DEFAULT_REGISTRATION, **self.DEFAULT_CONFIG) def test_registration_custom_port(self): return self._test_registration(('8888', 'alice', 'sekrit'), user='alice', passwd='sekrit', port='8888') def test_registration_no_userpass(self): return self._test_registration(('9939', 'change', 'changepw'), workerPort='9939') def test_registration_no_userpass_no_global(self): return self._test_registration(None, exp_ConfigErrors=True) def test_no_registration_if_master_already_claimed(self): # claim the CS on another master... self.setChangeSourceToMaster(self.OTHER_MASTER_ID) # and then use the same args as one of the above success cases, # but expect that it will NOT register return self._test_registration(None, **self.DEFAULT_CONFIG) @defer.inlineCallbacks def test_registration_later_if_master_can_do_it(self): # get the changesource running but not active due to the other master self.setChangeSourceToMaster(self.OTHER_MASTER_ID) yield self.attachChangeSource(pb.PBChangeSource(**self.DEFAULT_CONFIG)) self.startChangeSource() self.assertNotRegistered() # other master goes away self.setChangeSourceToMaster(None) # not quite enough time to cause it to activate self.changesource.clock.advance( self.changesource.POLL_INTERVAL_SEC * 4 / 5) self.assertNotRegistered() # there we go! self.changesource.clock.advance( self.changesource.POLL_INTERVAL_SEC * 2 / 5) self.assertRegistered(*self.EXP_DEFAULT_REGISTRATION) @defer.inlineCallbacks def _test_registration(self, exp_registration, exp_ConfigErrors=False, workerPort=None, **constr_kwargs): cfg = mock.Mock() cfg.protocols = {'pb': {'port': workerPort}} self.attachChangeSource(pb.PBChangeSource(**constr_kwargs)) self.startChangeSource() if exp_ConfigErrors: # if it's not registered, it should raise a ConfigError. try: yield self.changesource.reconfigServiceWithBuildbotConfig(cfg) except config.ConfigErrors: pass else: self.fail("Expected ConfigErrors") else: yield self.changesource.reconfigServiceWithBuildbotConfig(cfg) if exp_registration: self.assertRegistered(*exp_registration) yield self.stopChangeSource() if exp_registration: self.assertUnregistered(*exp_registration) self.assertEqual(self.changesource.registration, None) @defer.inlineCallbacks def test_perspective(self): yield self.attachChangeSource( pb.PBChangeSource('alice', 'sekrit', port='8888')) persp = self.changesource.getPerspective(mock.Mock(), 'alice') self.assertIsInstance(persp, pb.ChangePerspective) def test_describe(self): cs = pb.PBChangeSource() self.assertSubstring("PBChangeSource", cs.describe()) def test_name(self): cs = pb.PBChangeSource(port=1234) self.assertEqual("PBChangeSource:1234", cs.name) cs = pb.PBChangeSource(port=1234, prefix="pre") self.assertEqual("PBChangeSource:pre:1234", cs.name) # explicit name: cs = pb.PBChangeSource(name="MyName") self.assertEqual("MyName", cs.name) def test_describe_prefix(self): cs = pb.PBChangeSource(prefix="xyz") self.assertSubstring("PBChangeSource", cs.describe()) self.assertSubstring("xyz", cs.describe()) def test_describe_int(self): cs = pb.PBChangeSource(port=9989) self.assertSubstring("PBChangeSource", cs.describe()) @defer.inlineCallbacks def test_reconfigService_no_change(self): config = mock.Mock() yield self.attachChangeSource(pb.PBChangeSource(port='9876')) self.startChangeSource() yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertRegistered('9876', 'change', 'changepw') yield self.stopChangeSource() self.assertUnregistered('9876', 'change', 'changepw') @defer.inlineCallbacks def test_reconfigService_default_changed(self): config = mock.Mock() config.protocols = {'pb': {'port': '9876'}} yield self.attachChangeSource(pb.PBChangeSource()) self.startChangeSource() yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertRegistered('9876', 'change', 'changepw') config.protocols = {'pb': {'port': '1234'}} yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertUnregistered('9876', 'change', 'changepw') self.assertRegistered('1234', 'change', 'changepw') yield self.stopChangeSource() self.assertUnregistered('1234', 'change', 'changepw') @defer.inlineCallbacks def test_reconfigService_default_changed_but_inactive(self): """reconfig one that's not active on this master""" config = mock.Mock() config.protocols = {'pb': {'port': '9876'}} yield self.attachChangeSource(pb.PBChangeSource()) self.setChangeSourceToMaster(self.OTHER_MASTER_ID) self.startChangeSource() yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertNotRegistered() config.protocols = {'pb': {'port': '1234'}} yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertNotRegistered() yield self.stopChangeSource() self.assertNotRegistered() self.assertNotUnregistered() class TestChangePerspective(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) @defer.inlineCallbacks def test_addChange_noprefix(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(who="bar", files=['a'])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'bar', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': ['a'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_codebase(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(who="bar", files=[], codebase='cb')) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'bar', 'committer': None, 'branch': None, 'category': None, 'codebase': 'cb', 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_prefix(self): cp = pb.ChangePerspective(self.master, 'xx/') yield cp.perspective_addChange( dict(who="bar", files=['xx/a', 'yy/b'])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'bar', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': ['a'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_sanitize_None(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange( dict(project=None, revlink=None, repository=None) ) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': None, 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_when_None(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange( dict(when=None) ) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': None, 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_files_tuple(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange( dict(files=('a', 'b')) ) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': None, 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': ['a', 'b'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_unicode(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(author="\N{SNOWMAN}", comments="\N{SNOWMAN}", files=['\N{VERY MUCH GREATER-THAN}'])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': '\u2603', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': '\u2603', 'files': ['\u22d9'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_unicode_as_bytestring(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(author="\N{SNOWMAN}".encode('utf8'), comments="\N{SNOWMAN}".encode( 'utf8'), files=['\N{VERY MUCH GREATER-THAN}'.encode('utf8')])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': '\u2603', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': '\u2603', 'files': ['\u22d9'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_non_utf8_bytestring(self): cp = pb.ChangePerspective(self.master, None) bogus_utf8 = b'\xff\xff\xff\xff' replacement = bogus_utf8.decode('utf8', 'replace') yield cp.perspective_addChange(dict(author=bogus_utf8, files=['a'])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': replacement, 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': ['a'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_old_param_names(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(who='me', when=1234, files=[])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'me', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': 1234, }]) @defer.inlineCallbacks def test_createUserObject_git_src(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(who="c ", src='git')) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'c ', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': 'git', 'when_timestamp': None, }]) buildbot-3.4.0/master/buildbot/test/unit/changes/test_svnpoller.py000066400000000000000000000624721413250514000254320ustar00rootroot00000000000000# coding: utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import xml.dom.minidom from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import svnpoller from buildbot.process.properties import Interpolate from buildbot.test.util import changesource from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.runprocess import ExpectMaster from buildbot.test.util.runprocess import MasterRunProcessMixin # this is the output of "svn info --xml # svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" prefix_output = b"""\ svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk svn+ssh://svn.twistedmatrix.com/svn/Twisted bbbe8e31-12d6-0310-92fd-ac37d47ddeeb jml 2006-10-01T02:37:34.063255Z """ # and this is "svn info --xml svn://svn.twistedmatrix.com/svn/Twisted". I # think this is kind of a degenerate case.. it might even be a form of error. prefix_output_2 = b"""\ """ # this is the svn info output for a local repository, svn info --xml # file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository # noqa pylint: disable=line-too-long prefix_output_3 = b"""\ file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository c0f47ff4-ba1e-0410-96b5-d44cc5c79e7f warner 2006-10-01T07:37:04.182499Z """ # % svn info --xml file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample/trunk # noqa pylint: disable=line-too-long prefix_output_4 = b"""\ file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample/trunk file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository c0f47ff4-ba1e-0410-96b5-d44cc5c79e7f warner 2006-10-01T07:37:02.286440Z """ # noqa pylint: disable=line-too-long # output from svn log on .../SVN-Repository/sample # (so it includes trunk and branches) sample_base = ("file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/" + "_trial_temp/test_vc/repositories/SVN-Repository/sample") sample_logentries = [None] * 6 sample_logentries[5] = b"""\ warner 2006-10-01T19:35:16.165664Z /sample/branch/version.c revised_to_2 """ sample_logentries[4] = b"""\ warner 2006-10-01T19:35:16.165664Z /sample/branch revised_to_2 """ sample_logentries[3] = b"""\ warner 2006-10-01T19:35:16.165664Z /sample/trunk/version.c revised_to_2 """ sample_logentries[2] = b"""\ warner 2006-10-01T19:35:10.215692Z /sample/branch/c\xcc\xa7main.c commit_on_branch """ sample_logentries[1] = b"""\ warner 2006-10-01T19:35:09.154973Z /sample/branch make_branch """ sample_logentries[0] = b"""\ warner 2006-10-01T19:35:08.642045Z /sample /sample/trunk /sample/trunk/subdir/subdir.c /sample/trunk/main.c /sample/trunk/version.c /sample/trunk/subdir sample_project_files """ sample_info_output = b"""\ file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository 4f94adfc-c41e-0410-92d5-fbf86b7c7689 warner 2006-10-01T19:35:16.165664Z """ def make_changes_output(maxrevision): # return what 'svn log' would have just after the given revision was # committed logs = sample_logentries[0:maxrevision] assert len(logs) == maxrevision logs.reverse() output = (b""" """ + b"".join(logs) + b"") return output def make_logentry_elements(maxrevision): "return the corresponding logentry elements for the given revisions" doc = xml.dom.minidom.parseString(make_changes_output(maxrevision)) return doc.getElementsByTagName("logentry") def split_file(path): pieces = path.split("/") if pieces[0] == "branch": return dict(branch="branch", path="/".join(pieces[1:])) if pieces[0] == "trunk": return dict(path="/".join(pieces[1:])) raise RuntimeError("there shouldn't be any files like %r" % path) class TestSVNPoller(MasterRunProcessMixin, changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setup_master_run_process() return self.setUpChangeSource() def tearDown(self): return self.tearDownChangeSource() @defer.inlineCallbacks def attachSVNPoller(self, *args, **kwargs): s = svnpoller.SVNPoller(*args, **kwargs) yield self.attachChangeSource(s) return s @defer.inlineCallbacks def test_describe(self): s = yield self.attachSVNPoller('file://') self.assertSubstring("SVNPoller", s.describe()) @defer.inlineCallbacks def test_name(self): s = yield self.attachSVNPoller('file://') self.assertEqual("file://", s.name) s = yield self.attachSVNPoller('file://', name='MyName') self.assertEqual("MyName", s.name) @defer.inlineCallbacks def test_strip_repourl(self): base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" s = yield self.attachSVNPoller(base + "/") self.assertEqual(s.repourl, base) @defer.inlineCallbacks def do_test_get_prefix(self, base, output, expected): s = yield self.attachSVNPoller(base) self.expect_commands( ExpectMaster(['svn', 'info', '--xml', '--non-interactive', base]) .stdout(output) ) prefix = yield s.get_prefix() self.assertEqual(prefix, expected) self.assert_all_commands_ran() def test_get_prefix_1(self): base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" return self.do_test_get_prefix(base, prefix_output, 'trunk') def test_get_prefix_2(self): base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted" return self.do_test_get_prefix(base, prefix_output_2, '') def test_get_prefix_3(self): base = ("file:///home/warner/stuff/Projects/Buildbot/trees/" + "svnpoller/_trial_temp/test_vc/repositories/SVN-Repository") return self.do_test_get_prefix(base, prefix_output_3, '') def test_get_prefix_4(self): base = ("file:///home/warner/stuff/Projects/Buildbot/trees/" + "svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample/trunk") return self.do_test_get_prefix(base, prefix_output_3, 'sample/trunk') @defer.inlineCallbacks def test_log_parsing(self): s = yield self.attachSVNPoller('file:///foo') output = make_changes_output(4) entries = s.parse_logs(output) # no need for elaborate assertions here; this is minidom's logic self.assertEqual(len(entries), 4) @defer.inlineCallbacks def test_get_new_logentries(self): s = yield self.attachSVNPoller('file:///foo') entries = make_logentry_elements(4) s.last_change = 4 new = s.get_new_logentries(entries) self.assertEqual(s.last_change, 4) self.assertEqual(len(new), 0) s.last_change = 3 new = s.get_new_logentries(entries) self.assertEqual(s.last_change, 4) self.assertEqual(len(new), 1) s.last_change = 1 new = s.get_new_logentries(entries) self.assertEqual(s.last_change, 4) self.assertEqual(len(new), 3) # special case: if last_change is None, then no new changes are queued s.last_change = None new = s.get_new_logentries(entries) self.assertEqual(s.last_change, 4) self.assertEqual(len(new), 0) @defer.inlineCallbacks def test_get_text(self): doc = xml.dom.minidom.parseString(""" hi 1 2 """.strip()) s = yield self.attachSVNPoller('http://', split_file=split_file) self.assertEqual(s._get_text(doc, 'grandchild'), '1') self.assertEqual(s._get_text(doc, 'nonexistent'), 'unknown') @defer.inlineCallbacks def test_create_changes(self): base = ("file:///home/warner/stuff/Projects/Buildbot/trees/" + "svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample") s = yield self.attachSVNPoller(base, split_file=split_file) s._prefix = "sample" logentries = dict( zip(range(1, 7), reversed(make_logentry_elements(6)))) changes = s.create_changes(reversed([logentries[3], logentries[2]])) self.assertEqual(len(changes), 2) # note that parsing occurs in reverse self.assertEqual(changes[0]['branch'], "branch") self.assertEqual(changes[0]['revision'], '2') self.assertEqual(changes[0]['project'], '') self.assertEqual(changes[0]['repository'], base) self.assertEqual(changes[1]['branch'], "branch") self.assertEqual(changes[1]['files'], ["çmain.c"]) self.assertEqual(changes[1]['revision'], '3') self.assertEqual(changes[1]['project'], '') self.assertEqual(changes[1]['repository'], base) changes = s.create_changes([logentries[4]]) self.assertEqual(len(changes), 1) self.assertEqual(changes[0]['branch'], None) self.assertEqual(changes[0]['revision'], '4') self.assertEqual(changes[0]['files'], ["version.c"]) # r5 should *not* create a change as it's a branch deletion changes = s.create_changes([logentries[5]]) self.assertEqual(len(changes), 0) # r6 should create a change as it's not deleting an entire branch changes = s.create_changes([logentries[6]]) self.assertEqual(len(changes), 1) self.assertEqual(changes[0]['branch'], 'branch') self.assertEqual(changes[0]['revision'], '6') self.assertEqual(changes[0]['files'], ["version.c"]) def makeInfoExpect(self, password='bbrocks'): args = ['svn', 'info', '--xml', '--non-interactive', sample_base, '--username=dustin'] if password is not None: args.append('--password=' + password) return ExpectMaster(args) def makeLogExpect(self, password='bbrocks'): args = ['svn', 'log', '--xml', '--verbose', '--non-interactive', '--username=dustin'] if password is not None: args.append('--password=' + password) args.extend(['--limit=100', sample_base]) return ExpectMaster(args) @defer.inlineCallbacks def test_create_changes_overridden_project(self): def custom_split_file(path): f = split_file(path) if f: f["project"] = "overridden-project" f["repository"] = "overridden-repository" f["codebase"] = "overridden-codebase" return f base = ("file:///home/warner/stuff/Projects/Buildbot/trees/" + "svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample") s = yield self.attachSVNPoller(base, split_file=custom_split_file) s._prefix = "sample" logentries = dict( zip(range(1, 7), reversed(make_logentry_elements(6)))) changes = s.create_changes(reversed([logentries[3], logentries[2]])) self.assertEqual(len(changes), 2) # note that parsing occurs in reverse self.assertEqual(changes[0]['branch'], "branch") self.assertEqual(changes[0]['revision'], '2') self.assertEqual(changes[0]['project'], "overridden-project") self.assertEqual(changes[0]['repository'], "overridden-repository") self.assertEqual(changes[0]['codebase'], "overridden-codebase") self.assertEqual(changes[1]['branch'], "branch") self.assertEqual(changes[1]['files'], ['çmain.c']) self.assertEqual(changes[1]['revision'], '3') self.assertEqual(changes[1]['project'], "overridden-project") self.assertEqual(changes[1]['repository'], "overridden-repository") self.assertEqual(changes[1]['codebase'], "overridden-codebase") @defer.inlineCallbacks def test_poll(self): s = yield self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd='bbrocks') self.expect_commands( self.makeInfoExpect().stdout(sample_info_output), self.makeLogExpect().stdout(make_changes_output(1)), self.makeLogExpect().stdout(make_changes_output(1)), self.makeLogExpect().stdout(make_changes_output(2)), self.makeLogExpect().stdout(make_changes_output(4)), ) # fire it the first time; it should do nothing yield s.poll() # no changes generated on the first iteration self.assertEqual(self.master.data.updates.changesAdded, []) self.assertEqual(s.last_change, 1) # now fire it again, nothing changing yield s.poll() self.assertEqual(self.master.data.updates.changesAdded, []) self.assertEqual(s.last_change, 1) # and again, with r2 this time yield s.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'warner', 'committer': None, 'branch': 'branch', 'category': None, 'codebase': None, 'comments': 'make_branch', 'files': [''], 'project': '', 'properties': {}, 'repository': 'file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample', # noqa pylint: disable=line-too-long 'revision': '2', 'revlink': '', 'src': 'svn', 'when_timestamp': None, }]) self.assertEqual(s.last_change, 2) # and again with both r3 and r4 appearing together self.master.data.updates.changesAdded = [] yield s.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'warner', 'committer': None, 'branch': 'branch', 'category': None, 'codebase': None, 'comments': 'commit_on_branch', 'files': ['çmain.c'], 'project': '', 'properties': {}, 'repository': 'file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample', # noqa pylint: disable=line-too-long 'revision': '3', 'revlink': '', 'src': 'svn', 'when_timestamp': None, }, { 'author': 'warner', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': 'revised_to_2', 'files': ['version.c'], 'project': '', 'properties': {}, 'repository': 'file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample', # noqa pylint: disable=line-too-long 'revision': '4', 'revlink': '', 'src': 'svn', 'when_timestamp': None, }]) self.assertEqual(s.last_change, 4) self.assert_all_commands_ran() @defer.inlineCallbacks def test_poll_empty_password(self): s = yield self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd='') self.expect_commands( self.makeInfoExpect(password="").stdout(sample_info_output), self.makeLogExpect(password="").stdout(make_changes_output(1)), self.makeLogExpect(password="").stdout(make_changes_output(1)), self.makeLogExpect(password="").stdout(make_changes_output(2)), self.makeLogExpect(password="").stdout(make_changes_output(4)), ) yield s.poll() @defer.inlineCallbacks def test_poll_no_password(self): s = yield self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin') self.expect_commands( self.makeInfoExpect(password=None).stdout(sample_info_output), self.makeLogExpect(password=None).stdout(make_changes_output(1)), self.makeLogExpect(password=None).stdout(make_changes_output(1)), self.makeLogExpect(password=None).stdout(make_changes_output(2)), self.makeLogExpect(password=None).stdout(make_changes_output(4)), ) yield s.poll() @defer.inlineCallbacks def test_poll_interpolated_password(self): s = yield self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd=Interpolate('pa$$')) self.expect_commands( self.makeInfoExpect(password='pa$$').stdout(sample_info_output), self.makeLogExpect(password='pa$$').stdout(make_changes_output(1)), self.makeLogExpect(password='pa$$').stdout(make_changes_output(1)), self.makeLogExpect(password='pa$$').stdout(make_changes_output(2)), self.makeLogExpect(password='pa$$').stdout(make_changes_output(4)), ) yield s.poll() @defer.inlineCallbacks def test_poll_get_prefix_exception(self): s = yield self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd='bbrocks') self.expect_commands( self.makeInfoExpect().stderr(b"error")) yield s.poll() # should have logged the RuntimeError, but not errback'd from poll self.assertEqual(len(self.flushLoggedErrors(EnvironmentError)), 1) self.assert_all_commands_ran() @defer.inlineCallbacks def test_poll_get_logs_exception(self): s = yield self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd='bbrocks') s._prefix = "abc" # skip the get_prefix stuff self.expect_commands( self.makeLogExpect().stderr(b"some error")) yield s.poll() # should have logged the RuntimeError, but not errback'd from poll self.assertEqual(len(self.flushLoggedErrors(EnvironmentError)), 1) self.assert_all_commands_ran() @defer.inlineCallbacks def test_cachepath_empty(self): cachepath = os.path.abspath('revcache') if os.path.exists(cachepath): os.unlink(cachepath) s = yield self.attachSVNPoller(sample_base, cachepath=cachepath) self.assertEqual(s.last_change, None) @defer.inlineCallbacks def test_cachepath_full(self): cachepath = os.path.abspath('revcache') with open(cachepath, "w") as f: f.write('33') s = yield self.attachSVNPoller(sample_base, cachepath=cachepath) self.assertEqual(s.last_change, 33) s.last_change = 44 s.finished_ok(None) with open(cachepath) as f: self.assertEqual(f.read().strip(), '44') @defer.inlineCallbacks def test_cachepath_bogus(self): cachepath = os.path.abspath('revcache') with open(cachepath, "w") as f: f.write('nine') s = yield self.attachSVNPoller(sample_base, cachepath=cachepath) self.assertEqual(s.last_change, None) self.assertEqual(s.cachepath, None) # it should have called log.err once with a ValueError self.assertEqual(len(self.flushLoggedErrors(ValueError)), 1) def test_constructor_pollinterval(self): return self.attachSVNPoller(sample_base, pollinterval=100) # just don't fail! @defer.inlineCallbacks def test_extra_args(self): extra_args = ['--no-auth-cache', ] base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" s = yield self.attachSVNPoller(repourl=base, extra_args=extra_args) self.assertEqual(s.extra_args, extra_args) @defer.inlineCallbacks def test_use_svnurl(self): base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" with self.assertRaises(TypeError): yield self.attachSVNPoller(svnurl=base) class TestSplitFile(unittest.TestCase): def test_split_file_alwaystrunk(self): self.assertEqual( svnpoller.split_file_alwaystrunk('foo'), dict(path='foo')) def test_split_file_branches_trunk(self): self.assertEqual( svnpoller.split_file_branches('trunk/'), (None, '')) def test_split_file_branches_trunk_subdir(self): self.assertEqual( svnpoller.split_file_branches('trunk/subdir/'), (None, 'subdir/')) def test_split_file_branches_trunk_subfile(self): self.assertEqual( svnpoller.split_file_branches('trunk/subdir/file.c'), (None, 'subdir/file.c')) def test_split_file_branches_trunk_invalid(self): # file named trunk (not a directory): self.assertEqual( svnpoller.split_file_branches('trunk'), None) def test_split_file_branches_branch(self): self.assertEqual( svnpoller.split_file_branches('branches/1.5.x/'), ('branches/1.5.x', '')) def test_split_file_branches_branch_subdir(self): self.assertEqual( svnpoller.split_file_branches('branches/1.5.x/subdir/'), ('branches/1.5.x', 'subdir/')) def test_split_file_branches_branch_subfile(self): self.assertEqual( svnpoller.split_file_branches('branches/1.5.x/subdir/file.c'), ('branches/1.5.x', 'subdir/file.c')) def test_split_file_branches_branch_invalid(self): # file named branches/1.5.x (not a directory): self.assertEqual( svnpoller.split_file_branches('branches/1.5.x'), None) def test_split_file_branches_otherdir(self): # other dirs are ignored: self.assertEqual( svnpoller.split_file_branches('tags/testthis/subdir/'), None) def test_split_file_branches_otherfile(self): # other files are ignored: self.assertEqual( svnpoller.split_file_branches('tags/testthis/subdir/file.c'), None) def test_split_file_projects_branches(self): self.assertEqual( svnpoller.split_file_projects_branches( 'buildbot/trunk/subdir/file.c'), dict(project='buildbot', path='subdir/file.c')) self.assertEqual( svnpoller.split_file_projects_branches( 'buildbot/branches/1.5.x/subdir/file.c'), dict(project='buildbot', branch='branches/1.5.x', path='subdir/file.c')) # tags are ignored: self.assertEqual( svnpoller.split_file_projects_branches( 'buildbot/tags/testthis/subdir/file.c'), None) buildbot-3.4.0/master/buildbot/test/unit/data/000077500000000000000000000000001413250514000212635ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/data/__init__.py000066400000000000000000000000001413250514000233620ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/data/test_base.py000066400000000000000000000113541413250514000236120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot.data import base from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util.misc import TestReactorMixin class ResourceType(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() def makeResourceTypeSubclass(self, **attributes): attributes.setdefault('name', 'thing') return type('ThingResourceType', (base.ResourceType,), attributes) def test_sets_master(self): cls = self.makeResourceTypeSubclass() master = mock.Mock() inst = cls(master) self.assertIdentical(inst.master, master) def test_getEndpoints_instances_fails(self): ep = base.Endpoint(None, None) cls = self.makeResourceTypeSubclass(endpoints=[ep]) inst = cls(None) with self.assertRaises(TypeError): inst.getEndpoints() def test_getEndpoints_classes(self): class MyEndpoint(base.Endpoint): pass cls = self.makeResourceTypeSubclass(endpoints=[MyEndpoint]) master = mock.Mock() inst = cls(master) eps = inst.getEndpoints() self.assertIsInstance(eps[0], MyEndpoint) self.assertIdentical(eps[0].master, master) def test_produceEvent(self): cls = self.makeResourceTypeSubclass( name='singular', eventPathPatterns="/foo/:fooid/bar/:barid") master = fakemaster.make_master(self, wantMq=True) master.mq.verifyMessages = False # since this is a pretend message inst = cls(master) inst.produceEvent(dict(fooid=10, barid='20'), # note integer vs. string 'tested') master.mq.assertProductions([ (('foo', '10', 'bar', '20', 'tested'), dict(fooid=10, barid='20')) ]) def test_compilePatterns(self): class MyResourceType(base.ResourceType): eventPathPatterns = """ /builder/:builderid/build/:number /build/:buildid """ master = fakemaster.make_master(self, wantMq=True) master.mq.verifyMessages = False # since this is a pretend message inst = MyResourceType(master) self.assertEqual( inst.eventPaths, ['builder/{builderid}/build/{number}', 'build/{buildid}']) class Endpoint(endpoint.EndpointMixin, unittest.TestCase): class MyResourceType(base.ResourceType): name = "my" class MyEndpoint(base.Endpoint): pathPatterns = """ /my/pattern """ endpointClass = MyEndpoint resourceTypeClass = MyResourceType def setUp(self): self.setUpEndpoint() def tearDown(self): self.tearDownEndpoint() def test_sets_master(self): self.assertIdentical(self.master, self.ep.master) class ListResult(unittest.TestCase): def test_constructor(self): lr = base.ListResult([1, 2, 3], offset=10, total=20, limit=3) self.assertEqual(lr.data, [1, 2, 3]) self.assertEqual(lr.offset, 10) self.assertEqual(lr.total, 20) self.assertEqual(lr.limit, 3) def test_repr(self): lr = base.ListResult([1, 2, 3], offset=10, total=20, limit=3) self.assertTrue(repr(lr).startswith('ListResult')) def test_eq(self): lr1 = base.ListResult([1, 2, 3], offset=10, total=20, limit=3) lr2 = base.ListResult([1, 2, 3], offset=20, total=30, limit=3) lr3 = base.ListResult([1, 2, 3], offset=20, total=30, limit=3) self.assertEqual(lr2, lr3) self.assertNotEqual(lr1, lr2) self.assertNotEqual(lr1, lr3) def test_eq_to_list(self): list = [1, 2, 3] lr1 = base.ListResult([1, 2, 3], offset=10, total=20, limit=3) self.assertNotEqual(lr1, list) lr2 = base.ListResult([1, 2, 3], offset=None, total=None, limit=None) self.assertEqual(lr2, list) lr3 = base.ListResult([1, 2, 3], total=3) self.assertEqual(lr3, list) lr4 = base.ListResult([1, 2, 3], total=4) self.assertNotEqual(lr4, list) buildbot-3.4.0/master/buildbot/test/unit/data/test_build_data.py000066400000000000000000000305471413250514000247750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.trial import unittest from buildbot.data import build_data from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class TestBuildDataNoValueEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = build_data.BuildDataNoValueEndpoint resourceTypeClass = build_data.BuildData def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.BuildData(id=91, buildid=30, name='name1', value=b'value1', source='source1'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing_build_data_by_build_id(self): result = yield self.callGet(('builds', 30, 'data', 'name1')) self.validateData(result) self.assertEqual(result, { 'buildid': 30, 'name': 'name1', 'value': None, 'source': 'source1', 'length': 6, }) @defer.inlineCallbacks def test_get_existing_build_data_by_builder_name_build_number(self): result = yield self.callGet(('builders', 'b1', 'builds', 7, 'data', 'name1')) self.validateData(result) self.assertEqual(result, { 'buildid': 30, 'name': 'name1', 'value': None, 'source': 'source1', 'length': 6, }) @defer.inlineCallbacks def test_get_existing_build_data_by_builder_id_build_number(self): result = yield self.callGet(('builders', 88, 'builds', 7, 'data', 'name1')) self.validateData(result) self.assertEqual(result, { 'buildid': 30, 'name': 'name1', 'value': None, 'length': 6, 'source': 'source1', }) @defer.inlineCallbacks def test_get_missing_by_build_id_missing_build(self): result = yield self.callGet(('builds', 31, 'data', 'name1')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_build_id_missing_name(self): result = yield self.callGet(('builds', 30, 'data', 'name_missing')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_name_build_number_missing_builder(self): result = yield self.callGet(('builders', 'b_missing', 'builds', 7, 'data', 'name1')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_name_build_number_missing_build(self): result = yield self.callGet(('builders', 'b1', 'builds', 17, 'data', 'name1')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_name_build_number_missing_name(self): result = yield self.callGet(('builders', 'b1', 'builds', 7, 'data', 'name_missing')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_id_build_number_missing_builder(self): result = yield self.callGet(('builders', 188, 'builds', 7, 'data', 'name1')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_id_build_number_missing_build(self): result = yield self.callGet(('builders', 88, 'builds', 17, 'data', 'name1')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_id_build_number_missing_name(self): result = yield self.callGet(('builders', 88, 'builds', 7, 'data', 'name_missing')) self.assertIsNone(result) class TestBuildDataEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = build_data.BuildDataEndpoint resourceTypeClass = build_data.BuildData def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.BuildData(id=91, buildid=30, name='name1', value=b'value1', source='source1'), ]) def tearDown(self): self.tearDownEndpoint() def validateData(self, data): self.assertIsInstance(data['raw'], bytes) self.assertIsInstance(data['mime-type'], str) self.assertIsInstance(data['filename'], str) @defer.inlineCallbacks def test_get_existing_build_data_by_build_id(self): result = yield self.callGet(('builds', 30, 'data', 'name1', 'value')) self.validateData(result) self.assertEqual(result, { 'raw': b'value1', 'mime-type': 'application/octet-stream', 'filename': 'name1', }) @defer.inlineCallbacks def test_get_existing_build_data_by_builder_name_build_number(self): result = yield self.callGet(('builders', 'b1', 'builds', 7, 'data', 'name1', 'value')) self.validateData(result) self.assertEqual(result, { 'raw': b'value1', 'mime-type': 'application/octet-stream', 'filename': 'name1', }) @defer.inlineCallbacks def test_get_existing_build_data_by_builder_id_build_number(self): result = yield self.callGet(('builders', 88, 'builds', 7, 'data', 'name1', 'value')) self.validateData(result) self.assertEqual(result, { 'raw': b'value1', 'mime-type': 'application/octet-stream', 'filename': 'name1', }) @defer.inlineCallbacks def test_get_missing_by_build_id_missing_build(self): result = yield self.callGet(('builds', 31, 'data', 'name1', 'value')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_build_id_missing_name(self): result = yield self.callGet(('builds', 30, 'data', 'name_missing', 'value')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_name_build_number_missing_builder(self): result = yield self.callGet(('builders', 'b_missing', 'builds', 7, 'data', 'name1', 'value')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_name_build_number_missing_build(self): result = yield self.callGet(('builders', 'b1', 'builds', 17, 'data', 'name1', 'value')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_name_build_number_missing_name(self): result = yield self.callGet(('builders', 'b1', 'builds', 7, 'data', 'name_missing', 'value')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_id_build_number_missing_builder(self): result = yield self.callGet(('builders', 188, 'builds', 7, 'data', 'name1', 'value')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_id_build_number_missing_build(self): result = yield self.callGet(('builders', 88, 'builds', 17, 'data', 'name1', 'value')) self.assertIsNone(result) @defer.inlineCallbacks def test_get_missing_by_builder_id_build_number_missing_name(self): result = yield self.callGet(('builders', 88, 'builds', 7, 'data', 'name_missing', 'value')) self.assertIsNone(result) class TestBuildDatasNoValueEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = build_data.BuildDatasNoValueEndpoint resourceTypeClass = build_data.BuildData def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.Master(id=88), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.BuildRequest(id=42, buildsetid=20, builderid=88), fakedb.BuildRequest(id=43, buildsetid=20, builderid=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Build(id=31, buildrequestid=42, number=8, masterid=88, builderid=88, workerid=47), fakedb.Build(id=32, buildrequestid=42, number=9, masterid=88, builderid=88, workerid=47), fakedb.BuildData(id=91, buildid=30, name='name1', value=b'value1', source='source1'), fakedb.BuildData(id=92, buildid=30, name='name2', value=b'value2', source='source2'), fakedb.BuildData(id=93, buildid=31, name='name3', value=b'value3', source='source3'), ]) def tearDown(self): self.tearDownEndpoint() @parameterized.expand([ ('multiple_values', 7, ['name1', 'name2']), ('single_value', 8, ['name3']), ('no_values', 9, []), ('not_existing', 10, []), ]) @defer.inlineCallbacks def test_get_builders_builder_name(self, name, build_number, exp_names): results = yield self.callGet(('builders', 'b1', 'builds', build_number, 'data')) for result in results: self.validateData(result) self.assertEqual([r['name'] for r in results], exp_names) @parameterized.expand([ ('multiple_values', 7, ['name1', 'name2']), ('single_value', 8, ['name3']), ('no_values', 9, []), ('not_existing', 10, []), ]) @defer.inlineCallbacks def test_get_builders_builder_id(self, name, build_number, exp_names): results = yield self.callGet(('builders', 88, 'builds', build_number, 'data')) for result in results: self.validateData(result) self.assertEqual([r['name'] for r in results], exp_names) @parameterized.expand([ ('multiple_values', 30, ['name1', 'name2']), ('single_value', 31, ['name3']), ('no_values', 32, []), ('not_existing', 33, []), ]) @defer.inlineCallbacks def test_get_builds_id(self, name, buildid, exp_names): results = yield self.callGet(('builds', buildid, 'data')) for result in results: self.validateData(result) self.assertEqual([r['name'] for r in results], exp_names) class TestBuildData(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = build_data.BuildData(self.master) def test_signature_set_build_data(self): @self.assertArgSpecMatches(self.master.data.updates.setBuildData, self.rtype.setBuildData) def setBuildData(self, buildid, name, value, source): pass @defer.inlineCallbacks def test_set_build_data(self): yield self.rtype.setBuildData(buildid=2, name='name1', value=b'value1', source='source1') result = yield self.master.db.build_data.getBuildData(2, 'name1') self.assertEqual(result, { 'buildid': 2, 'name': 'name1', 'value': b'value1', 'length': 6, 'source': 'source1', }) buildbot-3.4.0/master/buildbot/test/unit/data/test_builders.py000066400000000000000000000245401413250514000245120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import builders from buildbot.data import resultspec from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class BuilderEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = builders.BuilderEndpoint resourceTypeClass = builders.Builder def setUp(self): self.setUpEndpoint() return self.db.insertTestData([ fakedb.Builder(id=1, name='buildera'), fakedb.Builder(id=2, name='builderb'), fakedb.Master(id=13), fakedb.BuilderMaster(id=1, builderid=2, masterid=13), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): builder = yield self.callGet(('builders', 2)) self.validateData(builder) self.assertEqual(builder['name'], 'builderb') @defer.inlineCallbacks def test_get_missing(self): builder = yield self.callGet(('builders', 99)) self.assertEqual(builder, None) @defer.inlineCallbacks def test_get_missing_with_name(self): builder = yield self.callGet(('builders', 'builderc')) self.assertEqual(builder, None) @defer.inlineCallbacks def test_get_existing_with_master(self): builder = yield self.callGet(('masters', 13, 'builders', 2)) self.validateData(builder) self.assertEqual(builder['name'], 'builderb') @defer.inlineCallbacks def test_get_existing_with_different_master(self): builder = yield self.callGet(('masters', 14, 'builders', 2)) self.assertEqual(builder, None) @defer.inlineCallbacks def test_get_missing_with_master(self): builder = yield self.callGet(('masters', 13, 'builders', 99)) self.assertEqual(builder, None) class BuildersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = builders.BuildersEndpoint resourceTypeClass = builders.Builder def setUp(self): self.setUpEndpoint() return self.db.insertTestData([ fakedb.Builder(id=1, name='buildera'), fakedb.Builder(id=2, name='builderb'), fakedb.Builder(id=3, name='builderTagA'), fakedb.Builder(id=4, name='builderTagB'), fakedb.Builder(id=5, name='builderTagAB'), fakedb.Tag(id=3, name="tagA"), fakedb.Tag(id=4, name="tagB"), fakedb.BuildersTags(builderid=3, tagid=3), fakedb.BuildersTags(builderid=4, tagid=4), fakedb.BuildersTags(builderid=5, tagid=3), fakedb.BuildersTags(builderid=5, tagid=4), fakedb.Master(id=13), fakedb.BuilderMaster(id=1, builderid=2, masterid=13), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): builders = yield self.callGet(('builders',)) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [1, 2, 3, 4, 5]) @defer.inlineCallbacks def test_get_masterid(self): builders = yield self.callGet(('masters', 13, 'builders')) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [2]) @defer.inlineCallbacks def test_get_masterid_missing(self): builders = yield self.callGet(('masters', 14, 'builders')) self.assertEqual(sorted([b['builderid'] for b in builders]), []) @defer.inlineCallbacks def test_get_contains_one_tag(self): resultSpec = resultspec.ResultSpec( filters=[resultspec.Filter('tags', 'contains', ["tagA"])]) builders = yield self.callGet(('builders',)) builders = resultSpec.apply(builders) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [3, 5]) @defer.inlineCallbacks def test_get_contains_two_tags(self): resultSpec = resultspec.ResultSpec( filters=[resultspec.Filter('tags', 'contains', ["tagA", "tagB"])]) builders = yield self.callGet(('builders',)) builders = resultSpec.apply(builders) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [3, 4, 5]) @defer.inlineCallbacks def test_get_contains_two_tags_one_unknown(self): resultSpec = resultspec.ResultSpec( filters=[resultspec.Filter('tags', 'contains', ["tagA", "tagC"])]) builders = yield self.callGet(('builders',)) builders = resultSpec.apply(builders) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [3, 5]) class Builder(interfaces.InterfaceTests, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = builders.Builder(self.master) return self.master.db.insertTestData([ fakedb.Master(id=13), fakedb.Master(id=14), ]) def test_signature_findBuilderId(self): @self.assertArgSpecMatches( self.master.data.updates.findBuilderId, # fake self.rtype.findBuilderId) # real def findBuilderId(self, name): pass def test_findBuilderId(self): # this just passes through to the db method, so test that rv = defer.succeed(None) self.master.db.builders.findBuilderId = mock.Mock(return_value=rv) self.assertIdentical(self.rtype.findBuilderId('foo'), rv) def test_signature_updateBuilderInfo(self): @self.assertArgSpecMatches(self.master.data.updates.updateBuilderInfo) def updateBuilderInfo(self, builderid, description, tags): pass def test_signature_updateBuilderList(self): @self.assertArgSpecMatches( self.master.data.updates.updateBuilderList, # fake self.rtype.updateBuilderList) # real def updateBuilderList(self, masterid, builderNames): pass @defer.inlineCallbacks def test_updateBuilderList(self): # add one builder master yield self.rtype.updateBuilderList(13, ['somebuilder']) self.assertEqual(sorted((yield self.master.db.builders.getBuilders())), sorted([ dict(id=1, masterids=[13], name='somebuilder', description=None, tags=[]), ])) self.master.mq.assertProductions([(('builders', '1', 'started'), {'builderid': 1, 'masterid': 13, 'name': 'somebuilder'})]) # add another yield self.rtype.updateBuilderList(13, ['somebuilder', 'another']) def builderKey(builder): return builder['id'] self.assertEqual(sorted((yield self.master.db.builders.getBuilders()), key=builderKey), sorted([ dict(id=1, masterids=[13], name='somebuilder', description=None, tags=[]), dict(id=2, masterids=[13], name='another', description=None, tags=[]), ], key=builderKey)) self.master.mq.assertProductions([(('builders', '2', 'started'), {'builderid': 2, 'masterid': 13, 'name': 'another'})]) # add one for another master yield self.rtype.updateBuilderList(14, ['another']) self.assertEqual(sorted((yield self.master.db.builders.getBuilders()), key=builderKey), sorted([ dict(id=1, masterids=[13], name='somebuilder', description=None, tags=[]), dict(id=2, masterids=[13, 14], name='another', description=None, tags=[]), ], key=builderKey)) self.master.mq.assertProductions([(('builders', '2', 'started'), {'builderid': 2, 'masterid': 14, 'name': 'another'})]) # remove both for the first master yield self.rtype.updateBuilderList(13, []) self.assertEqual(sorted((yield self.master.db.builders.getBuilders()), key=builderKey), sorted([ dict( id=1, masterids=[], name='somebuilder', description=None, tags=[]), dict( id=2, masterids=[14], name='another', description=None, tags=[]), ], key=builderKey)) self.master.mq.assertProductions([ (('builders', '1', 'stopped'), {'builderid': 1, 'masterid': 13, 'name': 'somebuilder'}), (('builders', '2', 'stopped'), {'builderid': 2, 'masterid': 13, 'name': 'another'}), ]) @defer.inlineCallbacks def test__masterDeactivated(self): # this method just calls updateBuilderList, so test that. self.rtype.updateBuilderList = mock.Mock( spec=self.rtype.updateBuilderList) yield self.rtype._masterDeactivated(10) self.rtype.updateBuilderList.assert_called_with(10, []) buildbot-3.4.0/master/buildbot/test/unit/data/test_buildrequests.py000066400000000000000000000617111413250514000255750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import buildrequests from buildbot.data import resultspec from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import UTC from buildbot.util import epoch2datetime class TestBuildRequestEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = buildrequests.BuildRequestEndpoint resourceTypeClass = buildrequests.BuildRequest CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC) CLAIMED_AT_EPOCH = 266761875 SUBMITTED_AT = datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC) SUBMITTED_AT_EPOCH = 298297875 COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC) COMPLETE_AT_EPOCH = 329920275 def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='bbb'), fakedb.Master(id=fakedb.FakeBuildRequestsComponent.MASTER_ID), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=44, buildsetid=8822, builderid=77, priority=7, submitted_at=self.SUBMITTED_AT_EPOCH, waited_for=1), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop1', property_value='["one", "fake1"]'), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop2', property_value='["two", "fake2"]'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def testGetExisting(self): self.db.buildrequests.claimBuildRequests( [44], claimed_at=self.CLAIMED_AT) self.db.buildrequests.completeBuildRequests( [44], 75, complete_at=self.COMPLETE_AT) buildrequest = yield self.callGet(('buildrequests', 44)) self.validateData(buildrequest) # check data formatting: self.assertEqual(buildrequest['buildrequestid'], 44) self.assertEqual(buildrequest['complete'], True) self.assertEqual(buildrequest['builderid'], 77) self.assertEqual(buildrequest['waited_for'], True) self.assertEqual(buildrequest['claimed_at'], self.CLAIMED_AT) self.assertEqual(buildrequest['results'], 75) self.assertEqual(buildrequest['claimed_by_masterid'], fakedb.FakeBuildRequestsComponent.MASTER_ID) self.assertEqual(buildrequest['claimed'], True) self.assertEqual(buildrequest['submitted_at'], self.SUBMITTED_AT) self.assertEqual(buildrequest['complete_at'], self.COMPLETE_AT) self.assertEqual(buildrequest['buildsetid'], 8822) self.assertEqual(buildrequest['priority'], 7) self.assertEqual(buildrequest['properties'], None) @defer.inlineCallbacks def testGetMissing(self): buildrequest = yield self.callGet(('buildrequests', 9999)) self.assertEqual(buildrequest, None) @defer.inlineCallbacks def testGetProperty(self): prop = resultspec.Property(b'property', 'eq', 'prop1') buildrequest = yield self.callGet(('buildrequests', 44), resultSpec=resultspec.ResultSpec(properties=[prop])) self.assertEqual(buildrequest['buildrequestid'], 44) self.assertEqual(buildrequest['properties'], {'prop1': ('one', 'fake1')}) @defer.inlineCallbacks def testGetProperties(self): prop = resultspec.Property(b'property', 'eq', '*') buildrequest = yield self.callGet(('buildrequests', 44), resultSpec=resultspec.ResultSpec(properties=[prop])) self.assertEqual(buildrequest['buildrequestid'], 44) self.assertEqual(buildrequest['properties'], {'prop1': ('one', 'fake1'), 'prop2': ('two', 'fake2')}) class TestBuildRequestsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = buildrequests.BuildRequestsEndpoint resourceTypeClass = buildrequests.BuildRequest CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC) CLAIMED_AT_EPOCH = 266761875 SUBMITTED_AT = datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC) SUBMITTED_AT_EPOCH = 298297875 COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC) COMPLETE_AT_EPOCH = 329920275 def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='bbb'), fakedb.Builder(id=78, name='ccc'), fakedb.Builder(id=79, name='ddd'), fakedb.Master(id=fakedb.FakeBuildRequestsComponent.MASTER_ID), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=44, buildsetid=8822, builderid=77, priority=7, submitted_at=self.SUBMITTED_AT_EPOCH, waited_for=1), fakedb.BuildRequest(id=45, buildsetid=8822, builderid=77), fakedb.BuildRequest(id=46, buildsetid=8822, builderid=78), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def testGetAll(self): buildrequests = yield self.callGet(('buildrequests',)) [self.validateData(br) for br in buildrequests] self.assertEqual(sorted([br['buildrequestid'] for br in buildrequests]), [44, 45, 46]) @defer.inlineCallbacks def testGetNoBuildRequest(self): buildrequests = yield self.callGet(('builders', 79, 'buildrequests')) self.assertEqual(buildrequests, []) @defer.inlineCallbacks def testGetBuilderid(self): buildrequests = yield self.callGet(('builders', 78, 'buildrequests')) [self.validateData(br) for br in buildrequests] self.assertEqual( sorted([br['buildrequestid'] for br in buildrequests]), [46]) @defer.inlineCallbacks def testGetUnknownBuilderid(self): buildrequests = yield self.callGet(('builders', 79, 'buildrequests')) self.assertEqual(buildrequests, []) @defer.inlineCallbacks def testGetProperties(self): self.master.db.insertTestData([ fakedb.BuildsetProperty(buildsetid=8822, property_name='prop1', property_value='["one", "fake1"]'), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop2', property_value='["two", "fake2"]'), ]) prop = resultspec.Property(b'property', 'eq', '*') buildrequests = yield self.callGet(('builders', 78, 'buildrequests'), resultSpec=resultspec.ResultSpec(properties=[prop])) self.assertEqual(len(buildrequests), 1) self.assertEqual(buildrequests[0]['buildrequestid'], 46) self.assertEqual(buildrequests[0]['properties'], {'prop1': ('one', 'fake1'), 'prop2': ('two', 'fake2')}) @defer.inlineCallbacks def testGetNoFilters(self): getBuildRequestsMock = mock.Mock(return_value={}) self.patch( self.master.db.buildrequests, 'getBuildRequests', getBuildRequestsMock) yield self.callGet(('buildrequests',)) getBuildRequestsMock.assert_called_with( builderid=None, bsid=None, complete=None, claimed=None, resultSpec=resultspec.ResultSpec()) @defer.inlineCallbacks def testGetFilters(self): getBuildRequestsMock = mock.Mock(return_value={}) self.patch( self.master.db.buildrequests, 'getBuildRequests', getBuildRequestsMock) f1 = resultspec.Filter('complete', 'eq', [False]) f2 = resultspec.Filter('claimed', 'eq', [True]) f3 = resultspec.Filter('buildsetid', 'eq', [55]) f4 = resultspec.Filter('branch', 'eq', ['mybranch']) f5 = resultspec.Filter('repository', 'eq', ['myrepo']) yield self.callGet( ('buildrequests',), resultSpec=resultspec.ResultSpec(filters=[f1, f2, f3, f4, f5])) getBuildRequestsMock.assert_called_with( builderid=None, bsid=55, complete=False, claimed=True, resultSpec=resultspec.ResultSpec(filters=[f4, f5])) @defer.inlineCallbacks def testGetClaimedByMasterIdFilters(self): getBuildRequestsMock = mock.Mock(return_value={}) self.patch( self.master.db.buildrequests, 'getBuildRequests', getBuildRequestsMock) f1 = resultspec.Filter('claimed', 'eq', [True]) f2 = resultspec.Filter('claimed_by_masterid', 'eq', [fakedb.FakeBuildRequestsComponent.MASTER_ID]) yield self.callGet( ('buildrequests',), resultSpec=resultspec.ResultSpec(filters=[f1, f2])) getBuildRequestsMock.assert_called_with( builderid=None, bsid=None, complete=None, claimed=fakedb.FakeBuildRequestsComponent.MASTER_ID, resultSpec=resultspec.ResultSpec(filters=[f1])) @defer.inlineCallbacks def testGetSortedLimit(self): yield self.master.db.buildrequests.completeBuildRequests([44], 1) res = yield self.callGet( ('buildrequests',), resultSpec=resultspec.ResultSpec(order=['results'], limit=2)) self.assertEqual(len(res), 2) self.assertEqual(res[0]['results'], -1) res = yield self.callGet( ('buildrequests',), resultSpec=resultspec.ResultSpec(order=['-results'], limit=2)) self.assertEqual(len(res), 2) self.assertEqual(res[0]['results'], 1) class TestBuildRequest(interfaces.InterfaceTests, TestReactorMixin, unittest.TestCase): CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC) COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC) class dBLayerException(Exception): pass def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = buildrequests.BuildRequest(self.master) @defer.inlineCallbacks def doTestCallthrough(self, dbMethodName, dbMockedMethod, method, methodargs=None, methodkwargs=None, expectedRes=None, expectedException=None, expectedDbApiCalled=True): self.patch(self.master.db.buildrequests, dbMethodName, dbMockedMethod) if expectedException is not None: try: yield method(*methodargs, **methodkwargs) except expectedException: pass except Exception as e: self.fail('{} exception should be raised, but got {}'.format(expectedException, repr(e))) else: self.fail('{} exception should be raised'.format(expectedException)) else: res = yield method(*methodargs, **methodkwargs) self.assertEqual(res, expectedRes) if expectedDbApiCalled: dbMockedMethod.assert_called_with(*methodargs, **methodkwargs) def testSignatureClaimBuildRequests(self): @self.assertArgSpecMatches( self.master.data.updates.claimBuildRequests, # fake self.rtype.claimBuildRequests) # real def claimBuildRequests(self, brids, claimed_at=None): pass @defer.inlineCallbacks def testFakeDataClaimBuildRequests(self): self.master.db.insertTestData([ fakedb.BuildRequest(id=44, buildsetid=8822), fakedb.BuildRequest(id=55, buildsetid=8822), ]) res = yield self.master.data.updates.claimBuildRequests( [44, 55], claimed_at=self.CLAIMED_AT) self.assertTrue(res) @defer.inlineCallbacks def testFakeDataClaimBuildRequestsNoneArgs(self): res = yield self.master.data.updates.claimBuildRequests([]) self.assertTrue(res) @defer.inlineCallbacks def testClaimBuildRequests(self): self.master.db.insertTestData([ fakedb.Builder(id=123), fakedb.BuildRequest(id=44, buildsetid=8822, builderid=123), fakedb.BuildRequest(id=55, buildsetid=8822, builderid=123), ]) claimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock, self.rtype.claimBuildRequests, methodargs=[[44]], methodkwargs=dict(claimed_at=self.CLAIMED_AT), expectedRes=True, expectedException=None) msg = { 'buildrequestid': 44, 'complete_at': None, 'complete': False, 'builderid': 123, 'waited_for': False, 'claimed_at': None, 'results': -1, 'priority': 0, 'submitted_at': datetime.datetime(1970, 5, 23, 21, 21, 18, tzinfo=UTC), 'claimed': False, 'claimed_by_masterid': None, 'buildsetid': 8822, 'properties': None, } self.assertEqual(sorted(self.master.mq.productions), sorted([ (('buildrequests', '44', 'claimed'), msg), (('builders', '123', 'buildrequests', '44', 'claimed'), msg), (('buildsets', '8822', 'builders', '123', 'buildrequests', '44', 'claimed'), msg), ])) @defer.inlineCallbacks def testClaimBuildRequestsNoBrids(self): claimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock, self.rtype.claimBuildRequests, methodargs=[[]], methodkwargs=dict(), expectedRes=True, expectedException=None, expectedDbApiCalled=False) self.assertEqual(self.master.mq.productions, []) @defer.inlineCallbacks def testClaimBuildRequestsAlreadyClaimed(self): claimBuildRequestsMock = mock.Mock( side_effect=buildrequests.AlreadyClaimedError('oups ! buildrequest already claimed')) yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock, self.rtype.claimBuildRequests, methodargs=[[44]], methodkwargs=dict(claimed_at=self.CLAIMED_AT), expectedRes=False, expectedException=None) self.assertEqual(self.master.mq.productions, []) @defer.inlineCallbacks def testClaimBuildRequestsUnknownException(self): claimBuildRequestsMock = mock.Mock( side_effect=self.dBLayerException('oups ! unknown error')) yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock, self.rtype.claimBuildRequests, methodargs=[[44]], methodkwargs=dict(claimed_at=self.CLAIMED_AT), expectedRes=None, expectedException=self.dBLayerException) self.assertEqual(self.master.mq.productions, []) def testSignatureUnclaimBuildRequests(self): @self.assertArgSpecMatches( self.master.data.updates.unclaimBuildRequests, # fake self.rtype.unclaimBuildRequests) # real def unclaimBuildRequests(self, brids): pass @defer.inlineCallbacks def testFakeDataUnclaimBuildRequests(self): res = yield self.master.data.updates.unclaimBuildRequests([44, 55]) self.assertEqual(res, None) @defer.inlineCallbacks def testFakeDataUnclaimBuildRequestsNoneArgs(self): res = yield self.master.data.updates.unclaimBuildRequests([]) self.assertEqual(res, None) @defer.inlineCallbacks def testUnclaimBuildRequests(self): self.master.db.insertTestData([ fakedb.Builder(id=123), fakedb.BuildRequest(id=44, buildsetid=8822, builderid=123), ]) unclaimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('unclaimBuildRequests', unclaimBuildRequestsMock, self.rtype.unclaimBuildRequests, methodargs=[[44]], methodkwargs=dict(), expectedRes=None, expectedException=None) msg = { 'buildrequestid': 44, 'complete_at': None, 'complete': False, 'builderid': 123, 'waited_for': False, 'claimed_at': None, 'results': -1, 'priority': 0, 'submitted_at': datetime.datetime(1970, 5, 23, 21, 21, 18, tzinfo=UTC), 'claimed': False, 'claimed_by_masterid': None, 'buildsetid': 8822, 'properties': None, } self.assertEqual(sorted(self.master.mq.productions), sorted([ (('buildrequests', '44', 'unclaimed'), msg), (('builders', '123', 'buildrequests', '44', 'unclaimed'), msg), (('buildsets', '8822', 'builders', '123', 'buildrequests', '44', 'unclaimed'), msg), ])) @defer.inlineCallbacks def testUnclaimBuildRequestsNoBrids(self): unclaimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('unclaimBuildRequests', unclaimBuildRequestsMock, self.rtype.unclaimBuildRequests, methodargs=[[]], methodkwargs=dict(), expectedRes=None, expectedException=None, expectedDbApiCalled=False) def testSignatureCompleteBuildRequests(self): @self.assertArgSpecMatches( self.master.data.updates.completeBuildRequests, # fake self.rtype.completeBuildRequests) # real def completeBuildRequests(self, brids, results, complete_at=None): pass @defer.inlineCallbacks def testFakeDataCompleteBuildRequests(self): res = yield self.master.data.updates.completeBuildRequests( [44, 55], 12, complete_at=self.COMPLETE_AT) self.assertTrue(res) @defer.inlineCallbacks def testFakeDataCompleteBuildRequestsNoneArgs(self): res = yield self.master.data.updates.completeBuildRequests([], 0) self.assertTrue(res) @defer.inlineCallbacks def testCompleteBuildRequests(self): completeBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('completeBuildRequests', completeBuildRequestsMock, self.rtype.completeBuildRequests, methodargs=[[46], 12], methodkwargs=dict(complete_at=self.COMPLETE_AT), expectedRes=True, expectedException=None) @defer.inlineCallbacks def testCompleteBuildRequestsNoBrids(self): completeBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('completeBuildRequests', completeBuildRequestsMock, self.rtype.completeBuildRequests, methodargs=[[], 0], methodkwargs=dict(), expectedRes=True, expectedException=None, expectedDbApiCalled=False) @defer.inlineCallbacks def testCompleteBuildRequestsNotClaimed(self): completeBuildRequestsMock = mock.Mock( side_effect=buildrequests.NotClaimedError('oups ! buildrequest not claimed')) yield self.doTestCallthrough('completeBuildRequests', completeBuildRequestsMock, self.rtype.completeBuildRequests, methodargs=[[46], 12], methodkwargs=dict(complete_at=self.COMPLETE_AT), expectedRes=False, expectedException=None) @defer.inlineCallbacks def testCompleteBuildRequestsUnknownException(self): completeBuildRequestsMock = mock.Mock( side_effect=self.dBLayerException('oups ! unknown error')) yield self.doTestCallthrough('completeBuildRequests', completeBuildRequestsMock, self.rtype.completeBuildRequests, methodargs=[[46], 12], methodkwargs=dict(complete_at=self.COMPLETE_AT), expectedRes=None, expectedException=self.dBLayerException) @defer.inlineCallbacks def testRebuildBuildrequest(self): self.master.db.insertTestData([ fakedb.Builder(id=77, name='builder'), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.SourceStamp(id=234), fakedb.BuildsetSourceStamp(buildsetid=8822, sourcestampid=234), fakedb.BuildRequest(id=82, buildsetid=8822, builderid=77), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop1', property_value='["one", "fake1"]'), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop2', property_value='["two", "fake2"]'), ]) buildrequest = yield self.master.data.get(('buildrequests', 82)) new_bsid, brid_dict = yield self.rtype.rebuildBuildrequest(buildrequest) self.assertEqual(list(brid_dict.keys()), [77]) buildrequest = yield self.master.data.get(('buildrequests', brid_dict[77])) # submitted_at is the time of the test, so better not depend on it self.assertEqual(buildrequest, {'buildrequestid': 1001, 'complete': False, 'waited_for': False, 'claimed_at': None, 'results': -1, 'claimed': False, 'buildsetid': 200, 'complete_at': None, 'submitted_at': epoch2datetime(0), 'builderid': 77, 'claimed_by_masterid': None, 'priority': 0, 'properties': None}) buildset = yield self.master.data.get(('buildsets', new_bsid)) oldbuildset = yield self.master.data.get(('buildsets', 8822)) # assert same sourcestamp self.assertEqual(buildset['sourcestamps'], oldbuildset['sourcestamps']) buildset['sourcestamps'] = None self.assertEqual(buildset, {'bsid': 200, 'complete_at': None, 'submitted_at': 0, 'sourcestamps': None, 'parent_buildid': None, 'results': -1, 'parent_relationship': None, 'reason': 'rebuild', 'external_idstring': 'extid', 'complete': False}) properties = yield self.master.data.get(('buildsets', new_bsid, 'properties')) self.assertEqual( properties, {'prop1': ('one', 'fake1'), 'prop2': ('two', 'fake2')}) buildbot-3.4.0/master/buildbot/test/unit/data/test_builds.py000066400000000000000000000346741413250514000241740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import builds from buildbot.data import resultspec from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime class BuildEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = builds.BuildEndpoint resourceTypeClass = builds.Build def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='builder77'), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822, builderid=77), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Build(id=14, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=4), fakedb.Build(id=15, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=5), fakedb.BuildProperty(buildid=13, name='reason', value='"force build"', source="Force Build Form"), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): build = yield self.callGet(('builds', 14)) self.validateData(build) self.assertEqual(build['number'], 4) @defer.inlineCallbacks def test_get_missing(self): build = yield self.callGet(('builds', 9999)) self.assertEqual(build, None) @defer.inlineCallbacks def test_get_missing_builder_number(self): build = yield self.callGet(('builders', 999, 'builds', 4)) self.assertEqual(build, None) @defer.inlineCallbacks def test_get_builder_missing_number(self): build = yield self.callGet(('builders', 77, 'builds', 44)) self.assertEqual(build, None) @defer.inlineCallbacks def test_get_builder_number(self): build = yield self.callGet(('builders', 77, 'builds', 5)) self.validateData(build) self.assertEqual(build['buildid'], 15) @defer.inlineCallbacks def test_get_buildername_number(self): build = yield self.callGet(('builders', 'builder77', 'builds', 5)) self.validateData(build) self.assertEqual(build['buildid'], 15) @defer.inlineCallbacks def test_get_buildername_not_existing_number(self): build = yield self.callGet(('builders', 'builder77_nope', 'builds', 5)) self.assertEqual(build, None) @defer.inlineCallbacks def test_properties_injection(self): resultSpec = resultspec.OptimisedResultSpec( properties=[resultspec.Property(b'property', 'eq', 'reason')]) build = yield self.callGet(('builders', 77, 'builds', 3), resultSpec=resultSpec) self.validateData(build) self.assertIn('reason', build['properties']) @defer.inlineCallbacks def test_action_stop(self): yield self.callControl("stop", {}, ('builders', 77, 'builds', 5)) self.master.mq.assertProductions( [(('control', 'builds', '15', 'stop'), {'reason': 'no reason'})]) @defer.inlineCallbacks def test_action_stop_reason(self): yield self.callControl("stop", {'reason': 'because'}, ('builders', 77, 'builds', 5)) self.master.mq.assertProductions( [(('control', 'builds', '15', 'stop'), {'reason': 'because'})]) @defer.inlineCallbacks def test_action_rebuild(self): self.patch(self.master.data.updates, "rebuildBuildrequest", mock.Mock(spec=self.master.data.updates.rebuildBuildrequest, return_value=(1, [2]))) r = yield self.callControl("rebuild", {}, ('builders', 77, 'builds', 5)) self.assertEqual(r, (1, [2])) buildrequest = yield self.master.data.get(('buildrequests', 82)) self.master.data.updates.rebuildBuildrequest.assert_called_with( buildrequest) class BuildsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = builds.BuildsEndpoint resourceTypeClass = builds.Build def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='builder77'), fakedb.Builder(id=78, name='builder78'), fakedb.Builder(id=79, name='builder79'), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Build(id=14, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=4), fakedb.Build(id=15, builderid=78, masterid=88, workerid=12, buildrequestid=83, number=5, complete_at=1), fakedb.Build(id=16, builderid=79, masterid=88, workerid=12, buildrequestid=84, number=6, complete_at=1), fakedb.BuildProperty(buildid=13, name='reason', value='"force build"', source="Force Build Form"), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_all(self): builds = yield self.callGet(('builds',)) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4, 5, 6]) @defer.inlineCallbacks def test_get_builder(self): builds = yield self.callGet(('builders', 78, 'builds')) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [5]) @defer.inlineCallbacks def test_get_buildername(self): builds = yield self.callGet(('builders', 'builder78', 'builds')) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [5]) @defer.inlineCallbacks def test_get_buildername_not_existing(self): builds = yield self.callGet(('builders', 'builder78_nope', 'builds')) self.assertEqual(builds, []) @defer.inlineCallbacks def test_get_buildrequest(self): builds = yield self.callGet(('buildrequests', 82, 'builds')) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_buildrequest_not_existing(self): builds = yield self.callGet(('buildrequests', 899, 'builds')) self.assertEqual(builds, []) @defer.inlineCallbacks def test_get_buildrequest_via_filter(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('buildrequestid', 'eq', [82])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_buildrequest_via_filter_with_string(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('buildrequestid', 'eq', ['82'])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_worker(self): builds = yield self.callGet(('workers', 13, 'builds')) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_complete(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('complete', 'eq', [False])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_complete_at(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('complete_at', 'eq', [None])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_properties_injection(self): resultSpec = resultspec.OptimisedResultSpec( properties=[resultspec.Property(b'property', 'eq', 'reason')]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(build) for build in builds] self.assertTrue(any([('reason' in b['properties']) for b in builds])) @defer.inlineCallbacks def test_get_filter_eq(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('builderid', 'eq', [78, 79])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(b) for b in builds] self.assertEqual(sorted([b['number'] for b in builds]), [5, 6]) @defer.inlineCallbacks def test_get_filter_ne(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('builderid', 'ne', [78, 79])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(b) for b in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) class Build(interfaces.InterfaceTests, TestReactorMixin, unittest.TestCase): new_build_event = {'builderid': 10, 'buildid': 100, 'buildrequestid': 13, 'workerid': 20, 'complete': False, 'complete_at': None, 'masterid': 824, 'number': 1, 'results': None, 'started_at': epoch2datetime(1), 'state_string': 'created', 'properties': {}} def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = builds.Build(self.master) @defer.inlineCallbacks def do_test_callthrough(self, dbMethodName, method, exp_args=None, exp_kwargs=None, *args, **kwargs): rv = (1, 2) m = mock.Mock(return_value=defer.succeed(rv)) setattr(self.master.db.builds, dbMethodName, m) res = yield method(*args, **kwargs) self.assertIdentical(res, rv) m.assert_called_with(*(exp_args or args), **(exp_kwargs or kwargs)) @defer.inlineCallbacks def do_test_event(self, method, exp_events=None, *args, **kwargs): self.reactor.advance(1) if exp_events is None: exp_events = [] yield method(*args, **kwargs) self.master.mq.assertProductions(exp_events) def test_signature_addBuild(self): @self.assertArgSpecMatches( self.master.data.updates.addBuild, # fake self.rtype.addBuild) # real def addBuild(self, builderid, buildrequestid, workerid): pass def test_addBuild(self): return self.do_test_callthrough('addBuild', self.rtype.addBuild, builderid=10, buildrequestid=13, workerid=20, exp_kwargs=dict(builderid=10, buildrequestid=13, workerid=20, masterid=self.master.masterid, state_string='created')) def test_addBuildEvent(self): @defer.inlineCallbacks def addBuild(*args, **kwargs): buildid, _ = yield self.rtype.addBuild(*args, **kwargs) yield self.rtype.generateNewBuildEvent(buildid) return None return self.do_test_event(addBuild, builderid=10, buildrequestid=13, workerid=20, exp_events=[(('builders', '10', 'builds', '1', 'new'), self.new_build_event), (('builds', '100', 'new'), self.new_build_event), (('workers', '20', 'builds', '100', 'new'), self.new_build_event)]) def test_signature_setBuildStateString(self): @self.assertArgSpecMatches( self.master.data.updates.setBuildStateString, # fake self.rtype.setBuildStateString) # real def setBuildStateString(self, buildid, state_string): pass def test_setBuildStateString(self): return self.do_test_callthrough('setBuildStateString', self.rtype.setBuildStateString, buildid=10, state_string='a b') def test_signature_finishBuild(self): @self.assertArgSpecMatches( self.master.data.updates.finishBuild, # fake self.rtype.finishBuild) # real def finishBuild(self, buildid, results): pass def test_finishBuild(self): return self.do_test_callthrough('finishBuild', self.rtype.finishBuild, buildid=15, results=3) buildbot-3.4.0/master/buildbot/test/unit/data/test_buildsets.py000066400000000000000000000367621413250514000247100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from zope.interface import implementer from buildbot import interfaces from buildbot.data import buildsets from buildbot.data import resultspec from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces as util_interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime A_TIMESTAMP = 1341700729 A_TIMESTAMP_EPOCH = epoch2datetime(A_TIMESTAMP) EARLIER = 1248529376 EARLIER_EPOCH = epoch2datetime(EARLIER) class BuildsetEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = buildsets.BuildsetEndpoint resourceTypeClass = buildsets.Buildset def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Buildset(id=13, reason='because I said so'), fakedb.SourceStamp(id=92), fakedb.SourceStamp(id=93), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=92), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=93), fakedb.Buildset(id=14, reason='no sourcestamps'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): buildset = yield self.callGet(('buildsets', 13)) self.validateData(buildset) self.assertEqual(buildset['reason'], 'because I said so') @defer.inlineCallbacks def test_get_existing_no_sourcestamps(self): buildset = yield self.callGet(('buildsets', 14)) self.validateData(buildset) self.assertEqual(buildset['sourcestamps'], []) @defer.inlineCallbacks def test_get_missing(self): buildset = yield self.callGet(('buildsets', 99)) self.assertEqual(buildset, None) class BuildsetsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = buildsets.BuildsetsEndpoint resourceTypeClass = buildsets.Buildset def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=92), fakedb.Buildset(id=13, complete=True), fakedb.Buildset(id=14, complete=False), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=92), fakedb.BuildsetSourceStamp(buildsetid=14, sourcestampid=92), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): buildsets = yield self.callGet(('buildsets',)) self.validateData(buildsets[0]) self.assertEqual(buildsets[0]['bsid'], 13) self.validateData(buildsets[1]) self.assertEqual(buildsets[1]['bsid'], 14) @defer.inlineCallbacks def test_get_complete(self): f = resultspec.Filter('complete', 'eq', [True]) buildsets = yield self.callGet(('buildsets',), resultSpec=resultspec.ResultSpec(filters=[f])) self.assertEqual(len(buildsets), 1) self.validateData(buildsets[0]) self.assertEqual(buildsets[0]['bsid'], 13) @defer.inlineCallbacks def test_get_incomplete(self): f = resultspec.Filter('complete', 'eq', [False]) buildsets = yield self.callGet(('buildsets',), resultSpec=resultspec.ResultSpec(filters=[f])) self.assertEqual(len(buildsets), 1) self.validateData(buildsets[0]) self.assertEqual(buildsets[0]['bsid'], 14) class Buildset(TestReactorMixin, util_interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = buildsets.Buildset(self.master) return self.master.db.insertTestData([ fakedb.SourceStamp(id=234, branch='br', codebase='cb', project='pr', repository='rep', revision='rev', created_at=89834834), fakedb.Builder(id=42, name='bldr1'), fakedb.Builder(id=43, name='bldr2'), ]) SS234_DATA = {'branch': 'br', 'codebase': 'cb', 'patch': None, 'project': 'pr', 'repository': 'rep', 'revision': 'rev', 'created_at': epoch2datetime(89834834), 'ssid': 234} def test_signature_addBuildset(self): @self.assertArgSpecMatches( self.master.data.updates.addBuildset, # fake self.rtype.addBuildset) # real def addBuildset(self, waited_for, scheduler=None, sourcestamps=None, reason='', properties=None, builderids=None, external_idstring=None, parent_buildid=None, parent_relationship=None): pass @defer.inlineCallbacks def do_test_addBuildset(self, kwargs, expectedReturn, expectedMessages, expectedBuildset): """Run a test of addBuildset. @param kwargs: kwargs to addBuildset @param expectedReturn: expected return value - tuple of (bsid, brids) @param expectedMessages: expected mq messages transmitted @param expectedBuildset: expected buildset inserted into the db The buildset is added at time A_TIMESTAMP. Note that addBuildset does not add sourcestamps, so this method assumes there are none in the db. """ self.reactor.advance(A_TIMESTAMP) (bsid, brids) = yield self.rtype.addBuildset(**kwargs) self.assertEqual((bsid, brids), expectedReturn) # check the correct message was received self.master.mq.assertProductions( expectedMessages, orderMatters=False) # and that the correct data was inserted into the db self.master.db.buildsets.assertBuildset(bsid, expectedBuildset) def _buildRequestMessageDict(self, brid, bsid, builderid): return {'builderid': builderid, 'buildrequestid': brid, 'buildsetid': bsid, 'claimed': False, 'claimed_at': None, 'claimed_by_masterid': None, 'complete': False, 'complete_at': None, 'priority': 0, 'results': -1, 'submitted_at': epoch2datetime(A_TIMESTAMP), 'waited_for': True, 'properties': None} def _buildRequestMessage1(self, brid, bsid, builderid): return ( ('buildsets', str(bsid), 'builders', str(builderid), 'buildrequests', str(brid), 'new'), self._buildRequestMessageDict(brid, bsid, builderid)) def _buildRequestMessage2(self, brid, bsid, builderid): return ( ('buildrequests', str(brid), 'new'), self._buildRequestMessageDict(brid, bsid, builderid)) def _buildRequestMessage3(self, brid, bsid, builderid): return ( ('builders', str(builderid), 'buildrequests', str(brid), 'new'), self._buildRequestMessageDict(brid, bsid, builderid)) def _buildsetMessage(self, bsid, external_idstring='extid', reason='because', scheduler='fakesched', sourcestampids=None, submitted_at=A_TIMESTAMP): if sourcestampids is None: sourcestampids = [234] ssmap = {234: self.SS234_DATA} return ( ('buildsets', str(bsid), 'new'), dict(bsid=bsid, complete=False, complete_at=None, external_idstring=external_idstring, reason=reason, results=None, scheduler=scheduler, sourcestamps=[ssmap[ssid] for ssid in sourcestampids], submitted_at=submitted_at)) def _buildsetCompleteMessage(self, bsid, complete_at=A_TIMESTAMP_EPOCH, submitted_at=A_TIMESTAMP_EPOCH, external_idstring='extid', reason='because', results=0, sourcestampids=None): if sourcestampids is None: sourcestampids = [234] ssmap = {234: self.SS234_DATA} return ( ('buildsets', str(bsid), 'complete'), dict(bsid=bsid, complete=True, complete_at=complete_at, external_idstring=external_idstring, reason=reason, results=results, submitted_at=submitted_at, sourcestamps=[ssmap[ssid] for ssid in sourcestampids])) def test_addBuildset_two_builderNames(self): @implementer(interfaces.IScheduler) class FakeSched: name = 'fakesched' kwargs = dict(scheduler='fakesched', reason='because', sourcestamps=[234], external_idstring='extid', builderids=[42, 43], waited_for=True) expectedReturn = (200, {42: 1000, 43: 1001}) expectedMessages = [ self._buildRequestMessage1(1000, 200, 42), self._buildRequestMessage2(1000, 200, 42), self._buildRequestMessage3(1000, 200, 42), self._buildRequestMessage1(1001, 200, 43), self._buildRequestMessage2(1001, 200, 43), self._buildRequestMessage3(1001, 200, 43), self._buildsetMessage(200), ] expectedBuildset = dict(reason='because', properties={}, external_idstring='extid') return self.do_test_addBuildset(kwargs, expectedReturn, expectedMessages, expectedBuildset) def test_addBuildset_no_builderNames(self): @implementer(interfaces.IScheduler) class FakeSched: name = 'fakesched' kwargs = dict(scheduler='fakesched', reason='because', sourcestamps=[234], external_idstring='extid', waited_for=False) expectedReturn = (200, {}) expectedMessages = [ self._buildsetMessage(200), # with no builderNames, this is done already self._buildsetCompleteMessage(200), ] expectedBuildset = dict(reason='because', properties={}, external_idstring='extid') return self.do_test_addBuildset(kwargs, expectedReturn, expectedMessages, expectedBuildset) def test_signature_maybeBuildsetComplete(self): @self.assertArgSpecMatches( self.master.data.updates.maybeBuildsetComplete, # fake self.rtype.maybeBuildsetComplete) # real def maybeBuildsetComplete(self, bsid): pass @defer.inlineCallbacks def do_test_maybeBuildsetComplete(self, buildRequestCompletions=None, buildRequestResults=None, buildsetComplete=False, expectComplete=False, expectMessage=False, expectSuccess=True): """Test maybeBuildsetComplete. @param buildRequestCompletions: dict mapping brid to True if complete, else False (and defaulting to False) @param buildRequestResults: dict mapping brid to result (defaulting to SUCCESS) @param buildsetComplete: true if the buildset is already complete @param expectComplete: true if the buildset should be complete at exit @param expectMessage: true if a buildset completion message is expected @param expectSuccess: if expectComplete, whether to expect the buildset to be complete This first adds two buildsets to the database - 72 and 73. Buildset 72 is already complete if buildsetComplete is true; 73 is not complete. It adds four buildrequests - 42, 43, and 44 for buildset 72, and 45 for buildset 73. The completion and results are based on buidlRequestCompletions and buildRequestResults. Then, maybeBuildsetComplete is called for buildset 72, and the expectations are checked. """ if buildRequestCompletions is None: buildRequestCompletions = {} if buildRequestResults is None: buildRequestResults = {} self.reactor.advance(A_TIMESTAMP) def mkbr(brid, bsid=72): return fakedb.BuildRequest(id=brid, buildsetid=bsid, builderid=42, complete=buildRequestCompletions.get( brid), results=buildRequestResults.get(brid, SUCCESS)) yield self.master.db.insertTestData([ fakedb.Builder(id=42, name='bldr1'), fakedb.Buildset(id=72, submitted_at=EARLIER, complete=buildsetComplete, complete_at=A_TIMESTAMP if buildsetComplete else None), mkbr(42), mkbr(43), mkbr(44), fakedb.BuildsetSourceStamp(buildsetid=72, sourcestampid=234), fakedb.Buildset(id=73, complete=False), mkbr(45, bsid=73), fakedb.BuildsetSourceStamp(buildsetid=73, sourcestampid=234), ]) yield self.rtype.maybeBuildsetComplete(72) self.master.db.buildsets.assertBuildsetCompletion(72, expectComplete) if expectMessage: self.assertEqual(self.master.mq.productions, [ self._buildsetCompleteMessage(72, results=SUCCESS if expectSuccess else FAILURE, submitted_at=EARLIER_EPOCH), ]) else: self.assertEqual(self.master.mq.productions, []) def test_maybeBuildsetComplete_not_yet(self): # only brid 42 is complete, so the buildset is not complete return self.do_test_maybeBuildsetComplete( buildRequestCompletions={42: True}) def test_maybeBuildsetComplete_complete(self): return self.do_test_maybeBuildsetComplete( buildRequestCompletions={42: True, 43: True, 44: True}, expectComplete=True, expectMessage=True) def test_maybeBuildsetComplete_complete_failure(self): return self.do_test_maybeBuildsetComplete( buildRequestCompletions={42: True, 43: True, 44: True}, buildRequestResults={43: FAILURE}, expectComplete=True, expectMessage=True, expectSuccess=False) def test_maybeBuildsetComplete_already_complete(self): return self.do_test_maybeBuildsetComplete( buildRequestCompletions={42: True, 43: True, 44: True}, buildsetComplete=True, expectComplete=True, expectMessage=False) buildbot-3.4.0/master/buildbot/test/unit/data/test_changes.py000066400000000000000000000405531413250514000243130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import changes from buildbot.data import resultspec from buildbot.process.users import users from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime class ChangeEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = changes.ChangeEndpoint resourceTypeClass = changes.Change def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=234), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://...', codebase='cbsvn', project='world-domination', sourcestampid=234), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): change = yield self.callGet(('changes', '13')) self.validateData(change) self.assertEqual(change['project'], 'world-domination') @defer.inlineCallbacks def test_get_missing(self): change = yield self.callGet(('changes', '99')) self.assertEqual(change, None) class ChangesEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = changes.ChangesEndpoint resourceTypeClass = changes.Change def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=133), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://...', codebase='cbsvn', project='world-domination', sourcestampid=133), fakedb.SourceStamp(id=144), fakedb.Change(changeid=14, branch='devel', revision='9284', repository='svn://...', codebase='cbsvn', project='world-domination', sourcestampid=144), fakedb.Build(buildrequestid=1, masterid=1, workerid=1, builderid=1), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): changes = yield self.callGet(('changes',)) self.validateData(changes[0]) self.assertEqual(changes[0]['changeid'], 13) self.validateData(changes[1]) self.assertEqual(changes[1]['changeid'], 14) @defer.inlineCallbacks def test_getChanges_from_build(self): fake_change = yield self.db.changes.getChangeFromSSid(ssid=144) mockGetChangeById = mock.Mock(spec=self.db.changes.getChangesForBuild, return_value=[fake_change]) self.patch(self.db.changes, 'getChangesForBuild', mockGetChangeById) changes = yield self.callGet(('builds', '1', 'changes')) self.validateData(changes[0]) self.assertEqual(changes[0]['changeid'], 14) @defer.inlineCallbacks def test_getChanges_from_builder(self): fake_change = yield self.db.changes.getChangeFromSSid(ssid=144) mockGetChangeById = mock.Mock(spec=self.db.changes.getChangesForBuild, return_value=[fake_change]) self.patch(self.db.changes, 'getChangesForBuild', mockGetChangeById) fake_build = yield {'id': 1} mockGetBuildByNumber = mock.Mock(spec=self.db.builds.getBuildByNumber, return_value=fake_build) self.patch(self.db.builds, 'getBuildByNumber', mockGetBuildByNumber) changes = yield self.callGet(('builders', '1', 'builds', '1', 'changes')) self.validateData(changes[0]) self.assertEqual(changes[0]['changeid'], 14) @defer.inlineCallbacks def test_getChanges_recent(self): resultSpec = resultspec.ResultSpec(limit=1, order=('-changeid',)) changes = yield self.callGet(('changes',), resultSpec=resultSpec) self.validateData(changes[0]) self.assertEqual(changes[0]['changeid'], 14) self.assertEqual(len(changes), 1) @defer.inlineCallbacks def test_getChangesOtherOrder(self): resultSpec = resultspec.ResultSpec(limit=1, order=('-when_time_stamp',)) changes = yield self.callGet(('changes',), resultSpec=resultSpec) self.assertEqual(len(changes), 1) @defer.inlineCallbacks def test_getChangesOtherOffset(self): resultSpec = resultspec.ResultSpec( limit=1, offset=1, order=('-changeid',)) changes = yield self.callGet(('changes',), resultSpec=resultSpec) self.assertEqual(len(changes), 1) class Change(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): changeEvent = { 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'codebase': '', 'comments': 'fix whitespace', 'changeid': 500, 'files': ['master/buildbot/__init__.py'], 'parent_changeids': [], 'project': 'Buildbot', 'properties': {'foo': (20, 'Change')}, 'repository': 'git://warner', 'revision': '0e92a098b', 'revlink': 'http://warner/0e92a098b', 'when_timestamp': 256738404, 'sourcestamp': { 'branch': 'warnerdb', 'codebase': '', 'patch': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '0e92a098b', 'created_at': epoch2datetime(10000000), 'ssid': 100, }, # uid } def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = changes.Change(self.master) def test_signature_addChange(self): @self.assertArgSpecMatches( self.master.data.updates.addChange, # fake self.rtype.addChange) # real def addChange(self, files=None, comments=None, author=None, committer=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase=None, project='', src=None): pass @defer.inlineCallbacks def do_test_addChange(self, kwargs, expectedRoutingKey, expectedMessage, expectedRow, expectedChangeUsers=None): if expectedChangeUsers is None: expectedChangeUsers = [] self.reactor.advance(10000000) changeid = yield self.rtype.addChange(**kwargs) self.assertEqual(changeid, 500) # check the correct message was received self.master.mq.assertProductions([ (expectedRoutingKey, expectedMessage), ]) # and that the correct data was inserted into the db self.master.db.changes.assertChange(500, expectedRow) self.master.db.changes.assertChangeUsers(500, expectedChangeUsers) def test_addChange(self): # src and codebase are default here kwargs = dict(author='warner', committer='david', branch='warnerdb', category='devel', comments='fix whitespace', files=['master/buildbot/__init__.py'], project='Buildbot', repository='git://warner', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, properties={'foo': 20}) expectedRoutingKey = ('changes', '500', 'new') expectedMessage = self.changeEvent expectedRow = fakedb.Change( changeid=500, author='warner', committer='david', comments='fix whitespace', branch='warnerdb', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, category='devel', repository='git://warner', codebase='', project='Buildbot', sourcestampid=100, ) return self.do_test_addChange(kwargs, expectedRoutingKey, expectedMessage, expectedRow) @defer.inlineCallbacks def test_addChange_src_codebase(self): createUserObject = mock.Mock(spec=users.createUserObject) createUserObject.return_value = defer.succeed(123) self.patch(users, 'createUserObject', createUserObject) kwargs = dict(author='warner', committer='david', branch='warnerdb', category='devel', comments='fix whitespace', files=['master/buildbot/__init__.py'], project='Buildbot', repository='git://warner', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, properties={'foo': 20}, src='git', codebase='cb') expectedRoutingKey = ('changes', '500', 'new') expectedMessage = { 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'codebase': 'cb', 'comments': 'fix whitespace', 'changeid': 500, 'files': ['master/buildbot/__init__.py'], 'parent_changeids': [], 'project': 'Buildbot', 'properties': {'foo': (20, 'Change')}, 'repository': 'git://warner', 'revision': '0e92a098b', 'revlink': 'http://warner/0e92a098b', 'when_timestamp': 256738404, 'sourcestamp': { 'branch': 'warnerdb', 'codebase': 'cb', 'patch': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '0e92a098b', 'created_at': epoch2datetime(10000000), 'ssid': 100, }, # uid } expectedRow = fakedb.Change( changeid=500, author='warner', committer='david', comments='fix whitespace', branch='warnerdb', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, category='devel', repository='git://warner', codebase='cb', project='Buildbot', sourcestampid=100, ) yield self.do_test_addChange(kwargs, expectedRoutingKey, expectedMessage, expectedRow, expectedChangeUsers=[123]) createUserObject.assert_called_once_with(self.master, 'warner', 'git') def test_addChange_src_codebaseGenerator(self): def preChangeGenerator(**kwargs): return kwargs self.master.config = mock.Mock(name='master.config') self.master.config.preChangeGenerator = preChangeGenerator self.master.config.codebaseGenerator = \ lambda change: 'cb-{}'.format(change['category']) kwargs = dict(author='warner', committer='david', branch='warnerdb', category='devel', comments='fix whitespace', files=['master/buildbot/__init__.py'], project='Buildbot', repository='git://warner', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, properties={'foo': 20}) expectedRoutingKey = ('changes', '500', 'new') expectedMessage = { 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'codebase': 'cb-devel', 'comments': 'fix whitespace', 'changeid': 500, 'files': ['master/buildbot/__init__.py'], 'parent_changeids': [], 'project': 'Buildbot', 'properties': {'foo': (20, 'Change')}, 'repository': 'git://warner', 'revision': '0e92a098b', 'revlink': 'http://warner/0e92a098b', 'when_timestamp': 256738404, 'sourcestamp': { 'branch': 'warnerdb', 'codebase': 'cb-devel', 'patch': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '0e92a098b', 'created_at': epoch2datetime(10000000), 'ssid': 100, }, # uid } expectedRow = fakedb.Change( changeid=500, author='warner', committer='david', comments='fix whitespace', branch='warnerdb', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, category='devel', repository='git://warner', codebase='cb-devel', project='Buildbot', sourcestampid=100, ) return self.do_test_addChange(kwargs, expectedRoutingKey, expectedMessage, expectedRow) def test_addChange_repository_revision(self): self.master.config = mock.Mock(name='master.config') self.master.config.revlink = lambda rev, repo: 'foo{}bar{}baz'.format(repo, rev) # revlink is default here kwargs = dict(author='warner', committer='david', branch='warnerdb', category='devel', comments='fix whitespace', files=['master/buildbot/__init__.py'], project='Buildbot', repository='git://warner', codebase='', revision='0e92a098b', when_timestamp=256738404, properties={'foo': 20}) expectedRoutingKey = ('changes', '500', 'new') # When no revlink is passed to addChange, but a repository and revision is # passed, the revlink should be constructed by calling the revlink callable # in the config. We thus expect a revlink of 'foogit://warnerbar0e92a098bbaz' expectedMessage = { 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'codebase': '', 'comments': 'fix whitespace', 'changeid': 500, 'files': ['master/buildbot/__init__.py'], 'parent_changeids': [], 'project': 'Buildbot', 'properties': {'foo': (20, 'Change')}, 'repository': 'git://warner', 'revision': '0e92a098b', 'revlink': 'foogit://warnerbar0e92a098bbaz', 'when_timestamp': 256738404, 'sourcestamp': { 'branch': 'warnerdb', 'codebase': '', 'patch': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '0e92a098b', 'created_at': epoch2datetime(10000000), 'ssid': 100, }, # uid } expectedRow = fakedb.Change( changeid=500, author='warner', committer='david', comments='fix whitespace', branch='warnerdb', revision='0e92a098b', revlink='foogit://warnerbar0e92a098bbaz', when_timestamp=256738404, category='devel', repository='git://warner', codebase='', project='Buildbot', sourcestampid=100, ) return self.do_test_addChange(kwargs, expectedRoutingKey, expectedMessage, expectedRow) buildbot-3.4.0/master/buildbot/test/unit/data/test_changesources.py000066400000000000000000000204551413250514000255330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.python import failure from twisted.trial import unittest from buildbot.data import changesources from buildbot.db.changesources import ChangeSourceAlreadyClaimedError from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class ChangeSourceEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = changesources.ChangeSourceEndpoint resourceTypeClass = changesources.ChangeSource def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Master(id=33, active=1), fakedb.ChangeSource(id=13, name='some:changesource'), fakedb.ChangeSourceMaster(changesourceid=13, masterid=None), fakedb.ChangeSource(id=14, name='other:changesource'), fakedb.ChangeSourceMaster(changesourceid=14, masterid=22), fakedb.ChangeSource(id=15, name='another:changesource'), fakedb.ChangeSourceMaster(changesourceid=15, masterid=33), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): """get an existing changesource by id""" changesource = yield self.callGet(('changesources', 14)) self.validateData(changesource) self.assertEqual(changesource['name'], 'other:changesource') @defer.inlineCallbacks def test_get_no_master(self): """get a changesource with no master""" changesource = yield self.callGet(('changesources', 13)) self.validateData(changesource) self.assertEqual(changesource['master'], None), @defer.inlineCallbacks def test_get_masterid_existing(self): """get an existing changesource by id on certain master""" changesource = yield self.callGet(('masters', 22, 'changesources', 14)) self.validateData(changesource) self.assertEqual(changesource['name'], 'other:changesource') @defer.inlineCallbacks def test_get_masterid_no_match(self): """get an existing changesource by id on the wrong master""" changesource = yield self.callGet(('masters', 33, 'changesources', 13)) self.assertEqual(changesource, None) @defer.inlineCallbacks def test_get_masterid_missing(self): """get an existing changesource by id on an invalid master""" changesource = yield self.callGet(('masters', 25, 'changesources', 13)) self.assertEqual(changesource, None) @defer.inlineCallbacks def test_get_missing(self): """get an invalid changesource by id""" changesource = yield self.callGet(('changesources', 99)) self.assertEqual(changesource, None) class ChangeSourcesEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = changesources.ChangeSourcesEndpoint resourceTypeClass = changesources.ChangeSource def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Master(id=33, active=1), fakedb.ChangeSource(id=13, name='some:changesource'), fakedb.ChangeSourceMaster(changesourceid=13, masterid=None), fakedb.ChangeSource(id=14, name='other:changesource'), fakedb.ChangeSourceMaster(changesourceid=14, masterid=22), fakedb.ChangeSource(id=15, name='another:changesource'), fakedb.ChangeSourceMaster(changesourceid=15, masterid=33), fakedb.ChangeSource(id=16, name='wholenother:changesource'), fakedb.ChangeSourceMaster(changesourceid=16, masterid=33), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): changesources = yield self.callGet(('changesources',)) [self.validateData(cs) for cs in changesources] self.assertEqual(sorted([m['changesourceid'] for m in changesources]), [13, 14, 15, 16]) @defer.inlineCallbacks def test_get_masterid(self): changesources = yield self.callGet(('masters', 33, 'changesources')) [self.validateData(cs) for cs in changesources] self.assertEqual(sorted([m['changesourceid'] for m in changesources]), [15, 16]) @defer.inlineCallbacks def test_get_masterid_missing(self): changesources = yield self.callGet(('masters', 23, 'changesources')) self.assertEqual(changesources, []) class ChangeSource(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = changesources.ChangeSource(self.master) def test_signature_findChangeSourceId(self): @self.assertArgSpecMatches( self.master.data.updates.findChangeSourceId, # fake self.rtype.findChangeSourceId) # real def findChangeSourceId(self, name): pass @defer.inlineCallbacks def test_findChangeSourceId(self): self.master.db.changesources.findChangeSourceId = mock.Mock( return_value=defer.succeed(10)) self.assertEqual((yield self.rtype.findChangeSourceId('cs')), 10) self.master.db.changesources.findChangeSourceId.assert_called_with( 'cs') def test_signature_trySetChangeSourceMaster(self): @self.assertArgSpecMatches( self.master.data.updates.trySetChangeSourceMaster, # fake self.rtype.trySetChangeSourceMaster) # real def trySetChangeSourceMaster(self, changesourceid, masterid): pass @defer.inlineCallbacks def test_trySetChangeSourceMaster_succeeds(self): self.master.db.changesources.setChangeSourceMaster = mock.Mock( return_value=defer.succeed(None)) yield self.rtype.trySetChangeSourceMaster(10, 20) self.master.db.changesources.setChangeSourceMaster.assert_called_with( 10, 20) @defer.inlineCallbacks def test_trySetChangeSourceMaster_fails(self): d = defer.fail(failure.Failure( ChangeSourceAlreadyClaimedError('oh noes'))) self.master.db.changesources.setChangeSourceMaster = mock.Mock( return_value=d) result = yield self.rtype.trySetChangeSourceMaster(10, 20) self.assertFalse(result) @defer.inlineCallbacks def test_trySetChangeSourceMaster_raisesOddException(self): d = defer.fail(failure.Failure(RuntimeError('oh noes'))) self.master.db.changesources.setChangeSourceMaster = mock.Mock( return_value=d) try: yield self.rtype.trySetChangeSourceMaster(10, 20) except RuntimeError: pass else: self.fail("The RuntimeError did not propagate") @defer.inlineCallbacks def test__masterDeactivated(self): yield self.master.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.ChangeSource(id=13, name='some:changesource'), fakedb.ChangeSourceMaster(changesourceid=13, masterid=22), fakedb.ChangeSource(id=14, name='other:changesource'), fakedb.ChangeSourceMaster(changesourceid=14, masterid=22), ]) yield self.rtype._masterDeactivated(22) self.master.db.changesources.assertChangeSourceMaster(13, None) self.master.db.changesources.assertChangeSourceMaster(14, None) buildbot-3.4.0/master/buildbot/test/unit/data/test_connector.py000066400000000000000000000217001413250514000246660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.python import reflect from twisted.trial import unittest from buildbot.data import base from buildbot.data import connector from buildbot.data import exceptions from buildbot.data import resultspec from buildbot.data import types from buildbot.test.fake import fakemaster from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class Tests(interfaces.InterfaceTests): def setUp(self): raise NotImplementedError def test_signature_get(self): @self.assertArgSpecMatches(self.data.get) def get(self, path, filters=None, fields=None, order=None, limit=None, offset=None): pass def test_signature_getEndpoint(self): @self.assertArgSpecMatches(self.data.getEndpoint) def getEndpoint(self, path): pass def test_signature_control(self): @self.assertArgSpecMatches(self.data.control) def control(self, action, args, path): pass def test_signature_updates_addChange(self): @self.assertArgSpecMatches(self.data.updates.addChange) def addChange(self, files=None, comments=None, author=None, committer=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase=None, project='', src=None): pass def test_signature_updates_masterActive(self): @self.assertArgSpecMatches(self.data.updates.masterActive) def masterActive(self, name, masterid): pass def test_signature_updates_masterStopped(self): @self.assertArgSpecMatches(self.data.updates.masterStopped) def masterStopped(self, name, masterid): pass def test_signature_updates_addBuildset(self): @self.assertArgSpecMatches(self.data.updates.addBuildset) def addBuildset(self, waited_for, scheduler=None, sourcestamps=None, reason='', properties=None, builderids=None, external_idstring=None, parent_buildid=None, parent_relationship=None): pass def test_signature_updates_maybeBuildsetComplete(self): @self.assertArgSpecMatches(self.data.updates.maybeBuildsetComplete) def maybeBuildsetComplete(self, bsid): pass def test_signature_updates_updateBuilderList(self): @self.assertArgSpecMatches(self.data.updates.updateBuilderList) def updateBuilderList(self, masterid, builderNames): pass class TestFakeData(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) self.data = self.master.data class TestDataConnector(TestReactorMixin, unittest.TestCase, Tests): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True) self.data = connector.DataConnector() yield self.data.setServiceParent(self.master) class DataConnector(TestReactorMixin, unittest.TestCase): maxDiff = None @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) # don't load by default self.patch(connector.DataConnector, 'submodules', []) self.data = connector.DataConnector() yield self.data.setServiceParent(self.master) def patchFooPattern(self): cls = type('FooEndpoint', (base.Endpoint,), {}) ep = cls(None, self.master) ep.get = mock.Mock(name='FooEndpoint.get') ep.get.return_value = defer.succeed({'val': 9999}) self.data.matcher[('foo', 'n:fooid', 'bar')] = ep return ep def patchFooListPattern(self): cls = type('FoosEndpoint', (base.Endpoint,), {}) ep = cls(None, self.master) ep.get = mock.Mock(name='FoosEndpoint.get') ep.get.return_value = defer.succeed( [{'val': v} for v in range(900, 920)]) self.data.matcher[('foo',)] = ep return ep # tests def test_sets_master(self): self.assertIdentical(self.master, self.data.master) def test_scanModule(self): # use this module as a test mod = reflect.namedModule('buildbot.test.unit.data.test_connector') self.data._scanModule(mod) # check that it discovered MyResourceType and updated endpoints match = self.data.matcher[('test', '10')] self.assertIsInstance(match[0], TestEndpoint) self.assertEqual(match[1], dict(testid=10)) match = self.data.matcher[('test', '10', 'p1')] self.assertIsInstance(match[0], TestEndpoint) match = self.data.matcher[('test', '10', 'p2')] self.assertIsInstance(match[0], TestEndpoint) match = self.data.matcher[('tests',)] self.assertIsInstance(match[0], TestsEndpoint) self.assertEqual(match[1], dict()) match = self.data.matcher[('test', 'foo')] self.assertIsInstance(match[0], TestsEndpointSubclass) self.assertEqual(match[1], dict()) # and that it found the update method self.assertEqual(self.data.updates.testUpdate(), "testUpdate return") # and that it added the single root link self.assertEqual(self.data.rootLinks, [{'name': 'tests'}]) # and that it added an attribute self.assertIsInstance(self.data.rtypes.test, TestResourceType) def test_getEndpoint(self): ep = self.patchFooPattern() got = self.data.getEndpoint(('foo', '10', 'bar')) self.assertEqual(got, (ep, {'fooid': 10})) def test_getEndpoint_missing(self): with self.assertRaises(exceptions.InvalidPathError): self.data.getEndpoint(('xyz',)) @defer.inlineCallbacks def test_get(self): ep = self.patchFooPattern() gotten = yield self.data.get(('foo', '10', 'bar')) self.assertEqual(gotten, {'val': 9999}) ep.get.assert_called_once_with(mock.ANY, {'fooid': 10}) @defer.inlineCallbacks def test_get_filters(self): ep = self.patchFooListPattern() gotten = yield self.data.get(('foo',), filters=[resultspec.Filter('val', 'lt', [902])]) self.assertEqual(gotten, base.ListResult( [{'val': 900}, {'val': 901}], total=2)) ep.get.assert_called_once_with(mock.ANY, {}) @defer.inlineCallbacks def test_get_resultSpec_args(self): ep = self.patchFooListPattern() f = resultspec.Filter('val', 'gt', [909]) gotten = yield self.data.get(('foo',), filters=[f], fields=['val'], order=['-val'], limit=2) self.assertEqual(gotten, base.ListResult( [{'val': 919}, {'val': 918}], total=10, limit=2)) ep.get.assert_called_once_with(mock.ANY, {}) @defer.inlineCallbacks def test_control(self): ep = self.patchFooPattern() ep.control = mock.Mock(name='MyEndpoint.control') ep.control.return_value = defer.succeed('controlled') gotten = yield self.data.control('foo!', {'arg': 2}, ('foo', '10', 'bar')) self.assertEqual(gotten, 'controlled') ep.control.assert_called_once_with('foo!', {'arg': 2}, {'fooid': 10}) # classes discovered by test_scanModule, above class TestsEndpoint(base.Endpoint): pathPatterns = "/tests" rootLinkName = 'tests' class TestsEndpointParentClass(base.Endpoint): rootLinkName = 'shouldnt-see-this' class TestsEndpointSubclass(TestsEndpointParentClass): pathPatterns = "/test/foo" class TestEndpoint(base.Endpoint): pathPatterns = """ /test/n:testid /test/n:testid/p1 /test/n:testid/p2 """ class TestResourceType(base.ResourceType): name = 'test' plural = 'tests' endpoints = [TestsEndpoint, TestEndpoint, TestsEndpointSubclass] keyField = 'testid' class EntityType(types.Entity): testid = types.Integer() entityType = EntityType(name, 'Test') @base.updateMethod def testUpdate(self): return "testUpdate return" buildbot-3.4.0/master/buildbot/test/unit/data/test_forceschedulers.py000066400000000000000000000166711413250514000260670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import forceschedulers from buildbot.schedulers.forcesched import ForceScheduler from buildbot.test.util import endpoint expected_default = { 'all_fields': [{'columns': 1, 'autopopulate': None, 'default': '', 'fields': [{'default': '', 'autopopulate': None, 'fullName': 'username', 'hide': False, 'label': 'Your name:', 'maxsize': None, 'multiple': False, 'name': 'username', 'need_email': True, 'regex': None, 'required': False, 'size': 30, 'tablabel': 'Your name:', 'type': 'username'}, {'default': 'force build', 'autopopulate': None, 'fullName': 'reason', 'hide': False, 'label': 'reason', 'maxsize': None, 'multiple': False, 'name': 'reason', 'regex': None, 'required': False, 'size': 20, 'tablabel': 'reason', 'type': 'text'}], 'fullName': None, 'hide': False, 'label': '', 'layout': 'vertical', 'maxsize': None, 'multiple': False, 'name': '', 'regex': None, 'required': False, 'tablabel': '', 'type': 'nested'}, {'columns': 2, 'default': '', 'fields': [{'default': '', 'autopopulate': None, 'fullName': 'branch', 'hide': False, 'label': 'Branch:', 'multiple': False, 'maxsize': None, 'name': 'branch', 'regex': None, 'required': False, 'size': 10, 'tablabel': 'Branch:', 'type': 'text'}, {'default': '', 'autopopulate': None, 'fullName': 'project', 'hide': False, 'label': 'Project:', 'maxsize': None, 'multiple': False, 'name': 'project', 'regex': None, 'required': False, 'size': 10, 'tablabel': 'Project:', 'type': 'text'}, {'default': '', 'autopopulate': None, 'fullName': 'repository', 'hide': False, 'label': 'Repository:', 'maxsize': None, 'multiple': False, 'name': 'repository', 'regex': None, 'required': False, 'size': 10, 'tablabel': 'Repository:', 'type': 'text'}, {'default': '', 'autopopulate': None, 'fullName': 'revision', 'hide': False, 'label': 'Revision:', 'maxsize': None, 'multiple': False, 'name': 'revision', 'regex': None, 'required': False, 'size': 10, 'tablabel': 'Revision:', 'type': 'text'}], 'autopopulate': None, 'fullName': None, 'hide': False, 'label': '', 'layout': 'vertical', 'maxsize': None, 'multiple': False, 'name': '', 'regex': None, 'required': False, 'tablabel': '', 'type': 'nested'}], 'builder_names': ['builder'], 'button_name': 'defaultforce', 'label': 'defaultforce', 'name': 'defaultforce', 'enabled': True} class ForceschedulerEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = forceschedulers.ForceSchedulerEndpoint resourceTypeClass = forceschedulers.ForceScheduler maxDiff = None def setUp(self): self.setUpEndpoint() scheds = [ForceScheduler( name="defaultforce", builderNames=["builder"])] self.master.allSchedulers = lambda: scheds def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): res = yield self.callGet(('forceschedulers', "defaultforce")) self.validateData(res) self.assertEqual(res, expected_default) @defer.inlineCallbacks def test_get_missing(self): res = yield self.callGet(('forceschedulers', 'foo')) self.assertEqual(res, None) class ForceSchedulersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = forceschedulers.ForceSchedulersEndpoint resourceTypeClass = forceschedulers.ForceScheduler maxDiff = None def setUp(self): self.setUpEndpoint() scheds = [ForceScheduler( name="defaultforce", builderNames=["builder"])] self.master.allSchedulers = lambda: scheds def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): res = yield self.callGet(('forceschedulers', )) self.assertEqual(res, [expected_default]) buildbot-3.4.0/master/buildbot/test/unit/data/test_graphql.py000066400000000000000000000126531413250514000243410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.internet import defer from twisted.python import reflect from twisted.trial import unittest from buildbot.data import connector from buildbot.data.graphql import GraphQLConnector from buildbot.test.fake import fakemaster from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin try: import graphql except ImportError: graphql = None class TestGraphQlConnector(TestReactorMixin, unittest.TestCase, interfaces.InterfaceTests): maxDiff = None @defer.inlineCallbacks def setUp(self): if not graphql: raise unittest.SkipTest('Test requires graphql-core module installed') self.setUpTestReactor(use_asyncio=True) self.master = fakemaster.make_master(self) # don't load by default self.all_submodules = connector.DataConnector.submodules self.patch(connector.DataConnector, 'submodules', []) self.master.data = self.data = connector.DataConnector() yield self.data.setServiceParent(self.master) self.graphql = GraphQLConnector() yield self.graphql.setServiceParent(self.master) def configure_graphql(self): self.master.config.www = {'graphql': {}} self.graphql.reconfigServiceWithBuildbotConfig(self.master.config) def test_signature_query(self): @self.assertArgSpecMatches(self.graphql.query) def query(self, query): pass def test_graphql_get_schema(self): # use the test module for basic graphQLSchema generation mod = reflect.namedModule('buildbot.test.unit.data.test_connector') self.data._scanModule(mod) self.configure_graphql() schema = self.graphql.get_schema() self.assertEqual(schema, textwrap.dedent(""" # custom scalar types for buildbot data model scalar Date # stored as utc unix timestamp scalar Binary # arbitrary data stored as base85 scalar JSON # arbitrary json stored as string, mainly used for properties values type Query { tests(testid: Int, testid__contains: Int, testid__eq: Int, testid__ge: Int, testid__gt: Int, testid__in: [Int], testid__le: Int, testid__lt: Int, testid__ne: Int, testid__notin: [Int], order: String, limit: Int, offset: Int): [Test]! test(testid: Int): Test } type Subscription { tests(testid: Int, testid__contains: Int, testid__eq: Int, testid__ge: Int, testid__gt: Int, testid__in: [Int], testid__le: Int, testid__lt: Int, testid__ne: Int, testid__notin: [Int], order: String, limit: Int, offset: Int): [Test]! test(testid: Int): Test } type Test { testid: Int! } """)) schema = graphql.build_schema(schema) def test_get_fake_graphql_schema(self): # use the test module for basic graphQLSchema generation mod = reflect.namedModule('buildbot.test.fake.endpoint') self.data._scanModule(mod) self.configure_graphql() schema = self.graphql.get_schema() self.assertEqual(schema, mod.graphql_schema) schema = graphql.build_schema(schema) def test_graphql_get_full_schema(self): if not graphql: raise unittest.SkipTest('Test requires graphql') for mod in self.all_submodules: mod = reflect.namedModule(mod) self.data._scanModule(mod) self.configure_graphql() schema = self.graphql.get_schema() # graphql parses the schema and raise an error if it is incorrect # or incoherent (e.g. missing type definition) schema = graphql.build_schema(schema) class TestGraphQlConnectorService(TestReactorMixin, unittest.TestCase): def setUp(self): if not graphql: raise unittest.SkipTest('Test requires graphql-core module installed') self.setUpTestReactor(use_asyncio=False) @defer.inlineCallbacks def test_start_stop(self): self.master = fakemaster.make_master(self) self.master.data = self.data = connector.DataConnector() yield self.data.setServiceParent(self.master) self.graphql = GraphQLConnector() yield self.graphql.setServiceParent(self.master) yield self.master.startService() self.master.config.www = {'graphql': {}} self.graphql.reconfigServiceWithBuildbotConfig(self.master.config) self.assertIsNotNone(self.graphql.asyncio_loop) yield self.master.stopService() self.assertIsNone(self.graphql.asyncio_loop) buildbot-3.4.0/master/buildbot/test/unit/data/test_logchunks.py000066400000000000000000000174031413250514000246760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.internet import defer from twisted.trial import unittest from buildbot.data import logchunks from buildbot.data import resultspec from buildbot.test import fakedb from buildbot.test.util import endpoint class LogChunkEndpointBase(endpoint.EndpointMixin, unittest.TestCase): endpointClass = logchunks.LogChunkEndpoint resourceTypeClass = logchunks.LogChunk endpointname = "contents" log60Lines = ['line zero', 'line 1', 'line TWO', 'line 3', 'line 2**2', 'another line', 'yet another line'] log61Lines = ['%08d' % i for i in range(100)] def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77), fakedb.Worker(id=13, name='wrk'), fakedb.Master(id=88), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Step(id=50, buildid=13, number=9, name='make'), fakedb.Log(id=60, stepid=50, name='stdio', slug='stdio', type='s', num_lines=7), fakedb.LogChunk(logid=60, first_line=0, last_line=1, compressed=0, content=textwrap.dedent("""\ line zero line 1""")), fakedb.LogChunk(logid=60, first_line=2, last_line=4, compressed=0, content=textwrap.dedent("""\ line TWO line 3 line 2**2""")), fakedb.LogChunk(logid=60, first_line=5, last_line=5, compressed=0, content="another line"), fakedb.LogChunk(logid=60, first_line=6, last_line=6, compressed=0, content="yet another line"), fakedb.Log(id=61, stepid=50, name='errors', slug='errors', type='t', num_lines=100), ] + [ fakedb.LogChunk(logid=61, first_line=i, last_line=i, compressed=0, content="%08d" % i) for i in range(100) ] + [ fakedb.Log(id=62, stepid=50, name='notes', slug='notes', type='t', num_lines=0), # logid 62 is empty ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def do_test_chunks(self, path, logid, expLines): # get the whole thing in one go logchunk = yield self.callGet(path) self.validateData(logchunk) expContent = '\n'.join(expLines) + '\n' self.assertEqual(logchunk, {'logid': logid, 'firstline': 0, 'content': expContent}) # line-by-line for i, expLine in enumerate(expLines): logchunk = yield self.callGet(path, resultSpec=resultspec.ResultSpec(offset=i, limit=1)) self.validateData(logchunk) self.assertEqual(logchunk, {'logid': logid, 'firstline': i, 'content': expLines[i] + '\n'}) # half and half mid = int(len(expLines) / 2) for f, length in (0, mid), (mid, len(expLines) - 1): result_spec = resultspec.ResultSpec(offset=f, limit=length - f + 1) logchunk = yield self.callGet(path, resultSpec=result_spec) self.validateData(logchunk) expContent = '\n'.join(expLines[f:length + 1]) + '\n' self.assertEqual(logchunk, {'logid': logid, 'firstline': f, 'content': expContent}) # truncated at EOF f, length = len(expLines) - 2, len(expLines) + 10 result_spec = resultspec.ResultSpec(offset=f, limit=length - f + 1) logchunk = yield self.callGet(path, resultSpec=result_spec) self.validateData(logchunk) expContent = '\n'.join(expLines[-2:]) + '\n' self.assertEqual(logchunk, {'logid': logid, 'firstline': f, 'content': expContent}) # some illegal stuff self.assertEqual( (yield self.callGet(path, resultSpec=resultspec.ResultSpec(offset=-1))), None) self.assertEqual( (yield self.callGet(path, resultSpec=resultspec.ResultSpec(offset=10, limit=-1))), None) def test_get_logid_60(self): return self.do_test_chunks(('logs', 60, self.endpointname), 60, self.log60Lines) def test_get_logid_61(self): return self.do_test_chunks(('logs', 61, self.endpointname), 61, self.log61Lines) class LogChunkEndpoint(LogChunkEndpointBase): @defer.inlineCallbacks def test_get_missing(self): logchunk = yield self.callGet(('logs', 99, self.endpointname)) self.assertEqual(logchunk, None) @defer.inlineCallbacks def test_get_empty(self): logchunk = yield self.callGet(('logs', 62, self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['content'], '') @defer.inlineCallbacks def test_get_by_stepid(self): logchunk = yield self.callGet( ('steps', 50, 'logs', 'errors', self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['logid'], 61) @defer.inlineCallbacks def test_get_by_buildid(self): logchunk = yield self.callGet( ('builds', 13, 'steps', 9, 'logs', 'stdio', self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['logid'], 60) @defer.inlineCallbacks def test_get_by_builder(self): logchunk = yield self.callGet( ('builders', 77, 'builds', 3, 'steps', 9, 'logs', 'errors', self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['logid'], 61) @defer.inlineCallbacks def test_get_by_builder_step_name(self): logchunk = yield self.callGet( ('builders', 77, 'builds', 3, 'steps', 'make', 'logs', 'errors', self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['logid'], 61) class RawLogChunkEndpoint(LogChunkEndpointBase): endpointClass = logchunks.RawLogChunkEndpoint endpointname = "raw" def validateData(self, data): self.assertIsInstance(data['raw'], str) self.assertIsInstance(data['mime-type'], str) self.assertIsInstance(data['filename'], str) @defer.inlineCallbacks def do_test_chunks(self, path, logid, expLines): # get the whole thing in one go logchunk = yield self.callGet(path) self.validateData(logchunk) if logid == 60: expContent = '\n'.join([line[1:] for line in expLines]) expFilename = "stdio" else: expContent = '\n'.join(expLines) + '\n' expFilename = "errors" self.assertEqual(logchunk, {'filename': expFilename, 'mime-type': "text/plain", 'raw': expContent}) buildbot-3.4.0/master/buildbot/test/unit/data/test_logs.py000066400000000000000000000227131413250514000236450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import logs from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class LogEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = logs.LogEndpoint resourceTypeClass = logs.Log def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='builder77'), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Step(id=50, buildid=13, number=5, name='make'), fakedb.Log(id=60, stepid=50, name='stdio', slug='stdio', type='s'), fakedb.Log(id=61, stepid=50, name='errors', slug='errors', type='t'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): log = yield self.callGet(('logs', 60)) self.validateData(log) self.assertEqual(log, { 'logid': 60, 'name': 'stdio', 'slug': 'stdio', 'stepid': 50, 'complete': False, 'num_lines': 0, 'type': 's'}) @defer.inlineCallbacks def test_get_missing(self): log = yield self.callGet(('logs', 62)) self.assertEqual(log, None) @defer.inlineCallbacks def test_get_by_stepid(self): log = yield self.callGet(('steps', 50, 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') @defer.inlineCallbacks def test_get_by_buildid(self): log = yield self.callGet(('builds', 13, 'steps', 5, 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') @defer.inlineCallbacks def test_get_by_builder(self): log = yield self.callGet( ('builders', '77', 'builds', 3, 'steps', 5, 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') @defer.inlineCallbacks def test_get_by_builder_step_name(self): log = yield self.callGet( ('builders', '77', 'builds', 3, 'steps', 'make', 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') @defer.inlineCallbacks def test_get_by_buildername_step_name(self): log = yield self.callGet( ('builders', 'builder77', 'builds', 3, 'steps', 'make', 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') class LogsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = logs.LogsEndpoint resourceTypeClass = logs.Log def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Step(id=50, buildid=13, number=9, name='make'), fakedb.Log(id=60, stepid=50, name='stdio', type='s'), fakedb.Log(id=61, stepid=50, name='errors', type='t'), fakedb.Step(id=51, buildid=13, number=10, name='make_install'), fakedb.Log(id=70, stepid=51, name='stdio', type='s'), fakedb.Log(id=71, stepid=51, name='results_html', type='h'), fakedb.Step(id=52, buildid=13, number=11, name='nothing'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_stepid(self): logs = yield self.callGet(('steps', 50, 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['errors', 'stdio']) @defer.inlineCallbacks def test_get_stepid_empty(self): logs = yield self.callGet(('steps', 52, 'logs')) self.assertEqual(logs, []) @defer.inlineCallbacks def test_get_stepid_missing(self): logs = yield self.callGet(('steps', 99, 'logs')) self.assertEqual(logs, []) @defer.inlineCallbacks def test_get_buildid_step_name(self): logs = yield self.callGet( ('builds', 13, 'steps', 'make_install', 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['results_html', 'stdio']) @defer.inlineCallbacks def test_get_buildid_step_number(self): logs = yield self.callGet(('builds', 13, 'steps', 10, 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['results_html', 'stdio']) @defer.inlineCallbacks def test_get_builder_build_number_step_name(self): logs = yield self.callGet( ('builders', 77, 'builds', 3, 'steps', 'make', 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['errors', 'stdio']) @defer.inlineCallbacks def test_get_builder_build_number_step_number(self): logs = yield self.callGet( ('builders', 77, 'builds', 3, 'steps', 10, 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['results_html', 'stdio']) class Log(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = logs.Log(self.master) @defer.inlineCallbacks def do_test_callthrough(self, dbMethodName, method, exp_args=None, exp_kwargs=None, *args, **kwargs): rv = (1, 2) m = mock.Mock(return_value=defer.succeed(rv)) setattr(self.master.db.logs, dbMethodName, m) res = yield method(*args, **kwargs) self.assertIdentical(res, rv) m.assert_called_with(*(exp_args or args), **(exp_kwargs or kwargs)) def test_signature_addLog(self): @self.assertArgSpecMatches( self.master.data.updates.addLog, # fake self.rtype.addLog) # real def addLog(self, stepid, name, type): pass @defer.inlineCallbacks def test_addLog_uniquify(self): tries = [] @self.assertArgSpecMatches(self.master.db.logs.addLog) def addLog(stepid, name, slug, type): tries.append((stepid, name, slug, type)) if len(tries) < 3: return defer.fail(KeyError()) return defer.succeed(23) self.patch(self.master.db.logs, 'addLog', addLog) logid = yield self.rtype.addLog( stepid=13, name='foo', type='s') self.assertEqual(logid, 23) self.assertEqual(tries, [ (13, 'foo', 'foo', 's'), (13, 'foo', 'foo_2', 's'), (13, 'foo', 'foo_3', 's'), ]) def test_signature_finishLog(self): @self.assertArgSpecMatches( self.master.data.updates.finishLog, # fake self.rtype.finishLog) # real def finishLog(self, logid): pass def test_finishLog(self): self.do_test_callthrough('finishLog', self.rtype.finishLog, logid=10) def test_signature_compressLog(self): @self.assertArgSpecMatches( self.master.data.updates.compressLog, # fake self.rtype.compressLog) # real def compressLog(self, logid): pass def test_compressLog(self): self.do_test_callthrough('compressLog', self.rtype.compressLog, logid=10) def test_signature_appendLog(self): @self.assertArgSpecMatches( self.master.data.updates.appendLog, # fake self.rtype.appendLog) # real def appendLog(self, logid, content): pass def test_appendLog(self): self.do_test_callthrough('appendLog', self.rtype.appendLog, logid=10, content='foo\nbar\n') buildbot-3.4.0/master/buildbot/test/unit/data/test_masters.py000066400000000000000000000261221413250514000243550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import masters from buildbot.process.results import RETRY from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime SOMETIME = 1349016870 OTHERTIME = 1249016870 class MasterEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = masters.MasterEndpoint resourceTypeClass = masters.Master def setUp(self): self.setUpEndpoint() self.master.name = "myname" self.db.insertTestData([ fakedb.Master(id=13, name='some:master', active=False, last_active=SOMETIME), fakedb.Master(id=14, name='other:master', active=False, last_active=SOMETIME), fakedb.Builder(id=23, name='bldr1'), fakedb.BuilderMaster(builderid=23, masterid=13), fakedb.Builder(id=24, name='bldr2'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): master = yield self.callGet(('masters', 14)) self.validateData(master) self.assertEqual(master['name'], 'other:master') @defer.inlineCallbacks def test_get_builderid_existing(self): master = yield self.callGet(('builders', 23, 'masters', 13)) self.validateData(master) self.assertEqual(master['name'], 'some:master') @defer.inlineCallbacks def test_get_builderid_no_match(self): master = yield self.callGet(('builders', 24, 'masters', 13)) self.assertEqual(master, None) @defer.inlineCallbacks def test_get_builderid_missing(self): master = yield self.callGet(('builders', 25, 'masters', 13)) self.assertEqual(master, None) @defer.inlineCallbacks def test_get_missing(self): master = yield self.callGet(('masters', 99)) self.assertEqual(master, None) class MastersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = masters.MastersEndpoint resourceTypeClass = masters.Master def setUp(self): self.setUpEndpoint() self.master.name = "myname" self.db.insertTestData([ fakedb.Master(id=13, name='some:master', active=False, last_active=SOMETIME), fakedb.Master(id=14, name='other:master', active=True, last_active=OTHERTIME), fakedb.Builder(id=22), fakedb.BuilderMaster(masterid=13, builderid=22), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): masters = yield self.callGet(('masters',)) [self.validateData(m) for m in masters] self.assertEqual(sorted([m['masterid'] for m in masters]), [13, 14]) @defer.inlineCallbacks def test_get_builderid(self): masters = yield self.callGet(('builders', 22, 'masters')) [self.validateData(m) for m in masters] self.assertEqual(sorted([m['masterid'] for m in masters]), [13]) @defer.inlineCallbacks def test_get_builderid_missing(self): masters = yield self.callGet(('builders', 23, 'masters')) self.assertEqual(masters, []) class Master(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = masters.Master(self.master) def test_signature_masterActive(self): @self.assertArgSpecMatches( self.master.data.updates.masterActive, # fake self.rtype.masterActive) # real def masterActive(self, name, masterid): pass @defer.inlineCallbacks def test_masterActive(self): self.reactor.advance(60) self.master.db.insertTestData([ fakedb.Master(id=13, name='myname', active=0, last_active=0), fakedb.Master(id=14, name='other', active=1, last_active=0), fakedb.Master(id=15, name='other2', active=1, last_active=0), ]) # initial checkin yield self.rtype.masterActive(name='myname', masterid=13) master = yield self.master.db.masters.getMaster(13) self.assertEqual(master, dict(id=13, name='myname', active=True, last_active=epoch2datetime(60))) self.assertEqual(self.master.mq.productions, [ (('masters', '13', 'started'), dict(masterid=13, name='myname', active=True)), ]) self.master.mq.productions = [] # updated checkin time, re-activation self.reactor.advance(60) yield self.master.db.masters.markMasterInactive(13) yield self.rtype.masterActive('myname', masterid=13) master = yield self.master.db.masters.getMaster(13) self.assertEqual(master, dict(id=13, name='myname', active=True, last_active=epoch2datetime(120))) self.assertEqual(self.master.mq.productions, [ (('masters', '13', 'started'), dict(masterid=13, name='myname', active=True)), ]) self.master.mq.productions = [] def test_signature_masterStopped(self): @self.assertArgSpecMatches( self.master.data.updates.masterStopped, # fake self.rtype.masterStopped) # real def masterStopped(self, name, masterid): pass @defer.inlineCallbacks def test_masterStopped(self): self.reactor.advance(60) self.master.db.insertTestData([ fakedb.Master(id=13, name='aname', active=1, last_active=self.reactor.seconds()), ]) self.rtype._masterDeactivated = mock.Mock() yield self.rtype.masterStopped(name='aname', masterid=13) self.rtype._masterDeactivated. \ assert_called_with(13, 'aname') @defer.inlineCallbacks def test_masterStopped_already(self): self.reactor.advance(60) self.master.db.insertTestData([ fakedb.Master(id=13, name='aname', active=0, last_active=0), ]) self.rtype._masterDeactivated = mock.Mock() yield self.rtype.masterStopped(name='aname', masterid=13) self.rtype._masterDeactivated.assert_not_called() def test_signature_expireMasters(self): @self.assertArgSpecMatches( self.master.data.updates.expireMasters, # fake self.rtype.expireMasters) # real def expireMasters(self, forceHouseKeeping=False): pass @defer.inlineCallbacks def test_expireMasters(self): self.reactor.advance(60) self.master.db.insertTestData([ fakedb.Master(id=14, name='other', active=1, last_active=0), fakedb.Master(id=15, name='other', active=1, last_active=0), ]) self.rtype._masterDeactivated = mock.Mock() # check after 10 minutes, and see #14 deactivated; #15 gets deactivated # by another master, so it's not included here self.reactor.advance(600) yield self.master.db.masters.markMasterInactive(15) yield self.rtype.expireMasters() master = yield self.master.db.masters.getMaster(14) self.assertEqual(master, dict(id=14, name='other', active=False, last_active=epoch2datetime(0))) self.rtype._masterDeactivated. \ assert_called_with(14, 'other') @defer.inlineCallbacks def test_masterDeactivated(self): self.master.db.insertTestData([ fakedb.Master(id=14, name='other', active=0, last_active=0), # set up a running build with some steps fakedb.Builder(id=77, name='b1'), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, builderid=77, buildsetid=8822), fakedb.BuildRequestClaim(brid=82, masterid=14, claimed_at=SOMETIME), fakedb.Build(id=13, builderid=77, masterid=14, workerid=13, buildrequestid=82, number=3, results=None), fakedb.Step(id=200, buildid=13), fakedb.Log(id=2000, stepid=200, num_lines=2), fakedb.LogChunk(logid=2000, first_line=1, last_line=2, content='ab\ncd') ]) # mock out the _masterDeactivated methods this will call for rtype in 'builder', 'scheduler', 'changesource': rtype_obj = getattr(self.master.data.rtypes, rtype) m = mock.Mock(name='{}._masterDeactivated'.format(rtype), spec=rtype_obj._masterDeactivated) m.side_effect = lambda masterid: defer.succeed(None) rtype_obj._masterDeactivated = m # and the update methods.. for meth in 'finishBuild', 'finishStep', 'finishLog': m = mock.create_autospec(getattr(self.master.data.updates, meth)) m.side_effect = lambda *args, **kwargs: defer.succeed(None) setattr(self.master.data.updates, meth, m) yield self.rtype._masterDeactivated(14, 'other') self.master.data.rtypes.builder._masterDeactivated. \ assert_called_with(masterid=14) self.master.data.rtypes.scheduler._masterDeactivated. \ assert_called_with(masterid=14) self.master.data.rtypes.changesource._masterDeactivated. \ assert_called_with(masterid=14) # see that we finished off that build and its steps and logs updates = self.master.data.updates updates.finishLog.assert_called_with(logid=2000) updates.finishStep.assert_called_with( stepid=200, results=RETRY, hidden=False) updates.finishBuild.assert_called_with(buildid=13, results=RETRY) self.assertEqual(self.master.mq.productions, [ (('masters', '14', 'stopped'), dict(masterid=14, name='other', active=False)), ]) buildbot-3.4.0/master/buildbot/test/unit/data/test_patches.py000066400000000000000000000022661413250514000243310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.data import patches from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class Patch(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = patches.Patch(self.master) # no update methods -> nothing to test buildbot-3.4.0/master/buildbot/test/unit/data/test_properties.py000066400000000000000000000150701413250514000250730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import properties from buildbot.process.properties import Properties as processProperties from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class BuildsetPropertiesEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = properties.BuildsetPropertiesEndpoint resourceTypeClass = properties.Properties def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Buildset(id=13, reason='because I said so'), fakedb.SourceStamp(id=92), fakedb.SourceStamp(id=93), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=92), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=93), fakedb.Buildset(id=14, reason='no sourcestamps'), fakedb.BuildsetProperty(buildsetid=14) ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_properties(self): props = yield self.callGet(('buildsets', 14, 'properties')) self.assertEqual(props, {'prop': (22, 'fakedb')}) class BuildPropertiesEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = properties.BuildPropertiesEndpoint resourceTypeClass = properties.Properties def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Buildset(id=28), fakedb.BuildRequest(id=5, buildsetid=28), fakedb.Master(id=3), fakedb.Worker(id=42, name="Friday"), fakedb.Build(id=786, buildrequestid=5, masterid=3, workerid=42), fakedb.BuildProperty( buildid=786, name="year", value=1651, source="Wikipedia"), fakedb.BuildProperty( buildid=786, name="island_name", value="despair", source="Book"), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_properties(self): props = yield self.callGet(('builds', 786, 'properties')) self.assertEqual(props, {'year': (1651, 'Wikipedia'), 'island_name': ("despair", 'Book')}) @defer.inlineCallbacks def test_get_properties_from_builder(self): props = yield self.callGet(('builders', 1, 'builds', 786, 'properties')) self.assertEqual(props, {'year': (1651, 'Wikipedia'), 'island_name': ("despair", 'Book')}) class Properties(interfaces.InterfaceTests, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=False, wantDb=True, wantData=True) self.rtype = properties.Properties(self.master) @defer.inlineCallbacks def do_test_callthrough(self, dbMethodName, method, exp_args=None, exp_kwargs=None, *args, **kwargs): rv = (1, 2) m = mock.Mock(return_value=defer.succeed(rv)) setattr(self.master.db.builds, dbMethodName, m) res = yield method(*args, **kwargs) self.assertIdentical(res, rv) m.assert_called_with( *(exp_args or args), **((exp_kwargs is None) and kwargs or exp_kwargs)) def test_signature_setBuildProperty(self): @self.assertArgSpecMatches( self.master.data.updates.setBuildProperty, # fake self.rtype.setBuildProperty) # real def setBuildProperty(self, buildid, name, value, source): pass def test_setBuildProperty(self): return self.do_test_callthrough('setBuildProperty', self.rtype.setBuildProperty, buildid=1234, name='property', value=[42, 45], source='testsuite', exp_args=(1234, 'property', [42, 45], 'testsuite'), exp_kwargs={}) @defer.inlineCallbacks def test_setBuildProperties(self): self.master.db.insertTestData([ fakedb.Buildset(id=28), fakedb.BuildRequest(id=5, buildsetid=28), fakedb.Master(id=3), fakedb.Worker(id=42, name="Friday"), fakedb.Build(id=1234, buildrequestid=5, masterid=3, workerid=42), ]) self.master.db.builds.setBuildProperty = mock.Mock( wraps=self.master.db.builds.setBuildProperty) props = processProperties.fromDict( dict(a=(1, 't'), b=(['abc', 9], 't'))) yield self.rtype.setBuildProperties(1234, props) setBuildPropertiesCalls = sorted(self.master.db.builds.setBuildProperty.mock_calls) self.assertEqual(setBuildPropertiesCalls, [ mock.call(1234, 'a', 1, 't'), mock.call(1234, 'b', ['abc', 9], 't')]) self.master.mq.assertProductions([ (('builds', '1234', 'properties', 'update'), {'a': (1, 't'), 'b': (['abc', 9], 't')}), ]) # sync without changes: no db write self.master.db.builds.setBuildProperty.reset_mock() self.master.mq.clearProductions() yield self.rtype.setBuildProperties(1234, props) self.master.db.builds.setBuildProperty.assert_not_called() self.master.mq.assertProductions([]) # sync with one changes: one db write props.setProperty('b', 2, 'step') self.master.db.builds.setBuildProperty.reset_mock() yield self.rtype.setBuildProperties(1234, props) self.master.db.builds.setBuildProperty.assert_called_with( 1234, 'b', 2, 'step') self.master.mq.assertProductions([ (('builds', '1234', 'properties', 'update'), {'b': (2, 'step')}) ]) buildbot-3.4.0/master/buildbot/test/unit/data/test_resultspec.py000066400000000000000000000345151413250514000250750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import random from twisted.trial import unittest from buildbot.data import base from buildbot.data import resultspec from buildbot.data.resultspec import NoneComparator from buildbot.data.resultspec import ReverseComparator def mklist(fld, *values): if isinstance(fld, tuple): return [dict(zip(fld, val)) for val in values] return [{fld: val} for val in values] class Filter(unittest.TestCase): def test_eq(self): f = resultspec.Filter('num', 'eq', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10))), mklist('num', 10)) def test_eq_plural(self): f = resultspec.Filter('num', 'eq', [10, 15, 20]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 10, 15)) def test_ne(self): f = resultspec.Filter('num', 'ne', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10))), mklist('num', 5)) def test_ne_plural(self): f = resultspec.Filter('num', 'ne', [10, 15, 20]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 5)) def test_lt(self): f = resultspec.Filter('num', 'lt', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 5)) def test_le(self): f = resultspec.Filter('num', 'le', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 5, 10)) def test_gt(self): f = resultspec.Filter('num', 'gt', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 15)) def test_ge(self): f = resultspec.Filter('num', 'ge', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 10, 15)) def test_contains(self): f = resultspec.Filter('num', 'contains', [10]) self.assertEqual(list(f.apply(mklist('num', [5, 1], [10, 1], [15, 1]))), mklist('num', [10, 1])) def test_contains_plural(self): f = resultspec.Filter('num', 'contains', [10, 5]) self.assertEqual(list(f.apply(mklist('num', [5, 1], [10, 1], [15, 1]))), mklist('num', [5, 1], [10, 1])) class ResultSpec(unittest.TestCase): def assertListResultEqual(self, a, b): self.assertIsInstance(a, base.ListResult) self.assertIsInstance(b, base.ListResult) self.assertEqual(a, b) def test_apply_None(self): self.assertEqual(resultspec.ResultSpec().apply(None), None) def test_apply_details_fields(self): data = dict(name="clyde", id=14, favcolor="red") self.assertEqual( resultspec.ResultSpec(fields=['name']).apply(data), dict(name="clyde")) self.assertEqual( resultspec.ResultSpec(fields=['name', 'id']).apply(data), dict(name="clyde", id=14)) def test_apply_collection_fields(self): data = mklist(('a', 'b', 'c'), (1, 11, 111), (2, 22, 222)) self.assertEqual( resultspec.ResultSpec(fields=['a']).apply(data), mklist('a', 1, 2)) self.assertEqual( resultspec.ResultSpec(fields=['a', 'c']).apply(data), mklist(('a', 'c'), (1, 111), (2, 222))) def test_apply_ordering(self): data = mklist('name', 'albert', 'bruce', 'cedric', 'dwayne') exp = mklist('name', 'albert', 'bruce', 'cedric', 'dwayne') random.shuffle(data) self.assertEqual( resultspec.ResultSpec(order=['name']).apply(data), exp) self.assertEqual( resultspec.ResultSpec(order=['-name']).apply(data), list(reversed(exp))) def test_apply_ordering_multi(self): data = mklist(('fn', 'ln'), ('cedric', 'willis'), ('albert', 'engelbert'), ('bruce', 'willis'), ('dwayne', 'montague')) exp = base.ListResult(mklist(('fn', 'ln'), ('albert', 'engelbert'), ('dwayne', 'montague'), ('bruce', 'willis'), ('cedric', 'willis')), total=4) random.shuffle(data) self.assertListResultEqual( resultspec.ResultSpec(order=['ln', 'fn']).apply(data), exp) exp = base.ListResult(mklist(('fn', 'ln'), ('cedric', 'willis'), ('bruce', 'willis'), ('dwayne', 'montague'), ('albert', 'engelbert')), total=4) self.assertListResultEqual( resultspec.ResultSpec(order=['-ln', '-fn']).apply(data), exp) def test_apply_filter(self): data = mklist('name', 'albert', 'bruce', 'cedric', 'dwayne') f = resultspec.Filter(field='name', op='gt', values=['bruce']) self.assertListResultEqual( resultspec.ResultSpec(filters=[f]).apply(data), base.ListResult(mklist('name', 'cedric', 'dwayne'), total=2)) f2 = resultspec.Filter(field='name', op='le', values=['cedric']) self.assertListResultEqual( resultspec.ResultSpec(filters=[f, f2]).apply(data), base.ListResult(mklist('name', 'cedric'), total=1)) def test_apply_missing_fields(self): data = mklist(('fn', 'ln'), ('cedric', 'willis'), ('albert', 'engelbert'), ('bruce', 'willis'), ('dwayne', 'montague')) # note that the REST interface catches this with a nicer error message with self.assertRaises(KeyError): resultspec.ResultSpec(fields=['fn'], order=['ln']).apply(data) def test_sort_null_datetimefields(self): data = mklist(('fn', 'ln'), ('albert', datetime.datetime(1, 1, 1)), ('cedric', None)) exp = mklist(('fn', 'ln'), ('cedric', None), ('albert', datetime.datetime(1, 1, 1))) self.assertListResultEqual( resultspec.ResultSpec(order=['ln']).apply(data), base.ListResult(exp, total=2)) def do_test_pagination(self, bareList): data = mklist('x', *list(range(101, 131))) if not bareList: data = base.ListResult(data) data.offset = None data.total = len(data) data.limit = None self.assertListResultEqual( resultspec.ResultSpec(offset=0).apply(data), base.ListResult(mklist('x', *list(range(101, 131))), offset=0, total=30)) self.assertListResultEqual( resultspec.ResultSpec(offset=10).apply(data), base.ListResult(mklist('x', *list(range(111, 131))), offset=10, total=30)) self.assertListResultEqual( resultspec.ResultSpec(offset=10, limit=10).apply(data), base.ListResult(mklist('x', *list(range(111, 121))), offset=10, total=30, limit=10)) self.assertListResultEqual( resultspec.ResultSpec(offset=20, limit=15).apply(data), base.ListResult(mklist('x', *list(range(121, 131))), offset=20, total=30, limit=15)) # off the end def test_pagination_bare_list(self): return self.do_test_pagination(bareList=True) def test_pagination_ListResult(self): return self.do_test_pagination(bareList=False) def test_pagination_prepaginated(self): data = base.ListResult(mklist('x', *list(range(10, 20)))) data.offset = 10 data.total = 30 data.limit = 10 self.assertListResultEqual( # ResultSpec has its offset/limit fields cleared resultspec.ResultSpec().apply(data), base.ListResult(mklist('x', *list(range(10, 20))), offset=10, total=30, limit=10)) def test_pagination_prepaginated_without_clearing_resultspec(self): data = base.ListResult(mklist('x', *list(range(10, 20)))) data.offset = 10 data.limit = 10 # ResultSpec does not have its offset/limit fields cleared - this is # detected as an assertion failure with self.assertRaises(AssertionError): resultspec.ResultSpec(offset=10, limit=20).apply(data) def test_endpoint_returns_total_without_applying_filters(self): data = base.ListResult(mklist('x', *list(range(10, 20)))) data.total = 99 # apply doesn't want to get a total with filters still outstanding f = resultspec.Filter(field='x', op='gt', values=[23]) with self.assertRaises(AssertionError): resultspec.ResultSpec(filters=[f]).apply(data) def test_popProperties(self): expected = ['prop1', 'prop2'] rs = resultspec.ResultSpec(properties=[ resultspec.Property(b'property', 'eq', expected) ]) self.assertEqual(len(rs.properties), 1) self.assertEqual(rs.popProperties(), expected) self.assertEqual(len(rs.properties), 0) def test_popFilter(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', [10]), resultspec.Filter('foo', 'gt', [5]), resultspec.Filter('base', 'ne', [20]), ]) self.assertEqual(rs.popFilter('baz', 'lt'), None) # no match self.assertEqual(rs.popFilter('foo', 'eq'), [10]) self.assertEqual(len(rs.filters), 2) def test_popBooleanFilter(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', [True]), resultspec.Filter('bar', 'ne', [False]), ]) self.assertEqual(rs.popBooleanFilter('foo'), True) self.assertEqual(rs.popBooleanFilter('bar'), True) self.assertEqual(len(rs.filters), 0) def test_popStringFilter(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['foo']), ]) self.assertEqual(rs.popStringFilter('foo'), 'foo') def test_popStringFilterSeveral(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['foo', 'bar']), ]) self.assertEqual(rs.popStringFilter('foo'), None) def test_popIntegerFilter(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['12']), ]) self.assertEqual(rs.popIntegerFilter('foo'), 12) def test_popIntegerFilterSeveral(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['12', '13']), ]) self.assertEqual(rs.popIntegerFilter('foo'), None) def test_popIntegerFilterNotInt(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['bar']), ]) with self.assertRaises(ValueError): rs.popIntegerFilter('foo') def test_removeOrder(self): rs = resultspec.ResultSpec(order=['foo', '-bar']) rs.removeOrder() self.assertEqual(rs.order, None) def test_popField(self): rs = resultspec.ResultSpec(fields=['foo', 'bar']) self.assertTrue(rs.popField('foo')) self.assertEqual(rs.fields, ['bar']) def test_popField_not_present(self): rs = resultspec.ResultSpec(fields=['foo', 'bar']) self.assertFalse(rs.popField('nosuch')) self.assertEqual(rs.fields, ['foo', 'bar']) class Comparator(unittest.TestCase): def test_noneComparator(self): self.assertNotEqual(NoneComparator(None), NoneComparator(datetime.datetime(1, 1, 1))) self.assertNotEqual(NoneComparator(datetime.datetime(1, 1, 1)), NoneComparator(None)) self.assertLess(NoneComparator(None), NoneComparator(datetime.datetime(1, 1, 1))) self.assertGreater(NoneComparator(datetime.datetime(1, 1, 1)), NoneComparator(None)) self.assertLess(NoneComparator(datetime.datetime(1, 1, 1)), NoneComparator(datetime.datetime(1, 1, 2))) self.assertEqual(NoneComparator(datetime.datetime(1, 1, 1)), NoneComparator(datetime.datetime(1, 1, 1))) self.assertGreater(NoneComparator(datetime.datetime(1, 1, 2)), NoneComparator(datetime.datetime(1, 1, 1))) self.assertEqual(NoneComparator(None), NoneComparator(None)) def test_noneComparison(self): noneInList = ["z", None, None, "q", "a", None, "v"] sortedList = sorted(noneInList, key=NoneComparator) self.assertEqual(sortedList, [None, None, None, "a", "q", "v", "z"]) def test_reverseComparator(self): reverse35 = ReverseComparator(35) reverse36 = ReverseComparator(36) self.assertEqual(reverse35, reverse35) self.assertNotEqual(reverse35, reverse36) self.assertLess(reverse36, reverse35) self.assertGreater(reverse35, reverse36) self.assertLess(reverse36, reverse35) def test_reverseComparison(self): nums = [1, 2, 3, 4, 5] nums.sort(key=ReverseComparator) self.assertEqual(nums, [5, 4, 3, 2, 1]) def test_reverseComparisonWithNone(self): noneInList = ["z", None, None, "q", "a", None, "v"] sortedList = sorted(noneInList, key=lambda x: ReverseComparator(NoneComparator(x))) self.assertEqual(sortedList, ["z", "v", "q", "a", None, None, None]) buildbot-3.4.0/master/buildbot/test/unit/data/test_root.py000066400000000000000000000071341413250514000236640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import connector from buildbot.data import root from buildbot.test.util import endpoint class RootEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = root.RootEndpoint resourceTypeClass = root.Root def setUp(self): self.setUpEndpoint() self.master.data.rootLinks = [ {'name': 'abc'}, ] def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): rootlinks = yield self.callGet(('',)) [self.validateData(root) for root in rootlinks] self.assertEqual(rootlinks, [ {'name': 'abc'}, ]) class SpecEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = root.SpecEndpoint resourceTypeClass = root.Spec @defer.inlineCallbacks def setUp(self): self.setUpEndpoint() # replace fakeConnector with real DataConnector self.master.data.disownServiceParent() self.master.data = connector.DataConnector() yield self.master.data.setServiceParent(self.master) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): specs = yield self.callGet(('application.spec',)) [self.validateData(s) for s in specs] for s in specs: # only test an endpoint that is reasonably stable if s['path'] != "master": continue self.assertEqual(s, {'path': 'master', 'type': 'master', 'type_spec': {'fields': [{'name': 'active', 'type': 'boolean', 'type_spec': {'name': 'boolean'}}, {'name': 'masterid', 'type': 'integer', 'type_spec': {'name': 'integer'}}, {'name': 'link', 'type': 'link', 'type_spec': {'name': 'link'}}, {'name': 'name', 'type': 'string', 'type_spec': {'name': 'string'}}, {'name': 'last_active', 'type': 'datetime', 'type_spec': {'name': 'datetime'}}], 'type': 'master'}, 'plural': 'masters'}) buildbot-3.4.0/master/buildbot/test/unit/data/test_schedulers.py000066400000000000000000000223501413250514000250370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.python import failure from twisted.trial import unittest from buildbot.data import schedulers from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime class SchedulerEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = schedulers.SchedulerEndpoint resourceTypeClass = schedulers.Scheduler def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Master(id=33, active=1), fakedb.Scheduler(id=13, name='some:scheduler'), fakedb.SchedulerMaster(schedulerid=13, masterid=None), fakedb.Scheduler(id=14, name='other:scheduler'), fakedb.SchedulerMaster(schedulerid=14, masterid=22), fakedb.Scheduler(id=15, name='another:scheduler'), fakedb.SchedulerMaster(schedulerid=15, masterid=33), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): scheduler = yield self.callGet(('schedulers', 14)) self.validateData(scheduler) self.assertEqual(scheduler['name'], 'other:scheduler') @defer.inlineCallbacks def test_get_no_master(self): scheduler = yield self.callGet(('schedulers', 13)) self.validateData(scheduler) self.assertEqual(scheduler['master'], None), @defer.inlineCallbacks def test_get_masterid_existing(self): scheduler = yield self.callGet(('masters', 22, 'schedulers', 14)) self.validateData(scheduler) self.assertEqual(scheduler['name'], 'other:scheduler') @defer.inlineCallbacks def test_get_masterid_no_match(self): scheduler = yield self.callGet(('masters', 33, 'schedulers', 13)) self.assertEqual(scheduler, None) @defer.inlineCallbacks def test_get_masterid_missing(self): scheduler = yield self.callGet(('masters', 99, 'schedulers', 13)) self.assertEqual(scheduler, None) @defer.inlineCallbacks def test_get_missing(self): scheduler = yield self.callGet(('schedulers', 99)) self.assertEqual(scheduler, None) @defer.inlineCallbacks def test_action_enable(self): r = yield self.callControl("enable", {'enabled': False}, ('schedulers', 13)) self.assertEqual(r, None) class SchedulersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = schedulers.SchedulersEndpoint resourceTypeClass = schedulers.Scheduler def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Master(id=33, active=1), fakedb.Scheduler(id=13, name='some:scheduler'), fakedb.SchedulerMaster(schedulerid=13, masterid=None), fakedb.Scheduler(id=14, name='other:scheduler'), fakedb.SchedulerMaster(schedulerid=14, masterid=22), fakedb.Scheduler(id=15, name='another:scheduler'), fakedb.SchedulerMaster(schedulerid=15, masterid=33), fakedb.Scheduler(id=16, name='wholenother:scheduler'), fakedb.SchedulerMaster(schedulerid=16, masterid=33), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): schedulers = yield self.callGet(('schedulers',)) [self.validateData(m) for m in schedulers] self.assertEqual(sorted([m['schedulerid'] for m in schedulers]), [13, 14, 15, 16]) @defer.inlineCallbacks def test_get_masterid(self): schedulers = yield self.callGet(('masters', 33, 'schedulers')) [self.validateData(m) for m in schedulers] self.assertEqual(sorted([m['schedulerid'] for m in schedulers]), [15, 16]) @defer.inlineCallbacks def test_get_masterid_missing(self): schedulers = yield self.callGet(('masters', 23, 'schedulers')) self.assertEqual(schedulers, []) class Scheduler(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = schedulers.Scheduler(self.master) def test_signature_schedulerEnable(self): @self.assertArgSpecMatches( self.master.data.updates.schedulerEnable, self.rtype.schedulerEnable) def schedulerEnable(self, schedulerid, v): pass @defer.inlineCallbacks def test_schedulerEnable(self): SOMETIME = 1348971992 yield self.master.db.insertTestData([ fakedb.Master(id=22, active=0, last_active=SOMETIME), fakedb.Scheduler(id=13, name='some:scheduler'), fakedb.SchedulerMaster(schedulerid=13, masterid=22), ]) yield self.rtype.schedulerEnable(13, False) self.master.mq.assertProductions( [(('schedulers', '13', 'updated'), {'enabled': False, 'master': {'active': False, 'last_active': epoch2datetime(SOMETIME), 'masterid': 22, 'name': 'some:master'}, 'name': 'some:scheduler', 'schedulerid': 13})]) yield self.rtype.schedulerEnable(13, True) self.master.mq.assertProductions( [(('schedulers', '13', 'updated'), {'enabled': True, 'master': {'active': False, 'last_active': epoch2datetime(SOMETIME), 'masterid': 22, 'name': 'some:master'}, 'name': 'some:scheduler', 'schedulerid': 13})]) def test_signature_findSchedulerId(self): @self.assertArgSpecMatches( self.master.data.updates.findSchedulerId, # fake self.rtype.findSchedulerId) # real def findSchedulerId(self, name): pass @defer.inlineCallbacks def test_findSchedulerId(self): self.master.db.schedulers.findSchedulerId = mock.Mock( return_value=defer.succeed(10)) self.assertEqual((yield self.rtype.findSchedulerId('sch')), 10) self.master.db.schedulers.findSchedulerId.assert_called_with('sch') def test_signature_trySetSchedulerMaster(self): @self.assertArgSpecMatches( self.master.data.updates.trySetSchedulerMaster, # fake self.rtype.trySetSchedulerMaster) # real def trySetSchedulerMaster(self, schedulerid, masterid): pass @defer.inlineCallbacks def test_trySetSchedulerMaster_succeeds(self): self.master.db.schedulers.setSchedulerMaster = mock.Mock( return_value=defer.succeed(None)) result = yield self.rtype.trySetSchedulerMaster(10, 20) self.assertTrue(result) self.master.db.schedulers.setSchedulerMaster.assert_called_with(10, 20) @defer.inlineCallbacks def test_trySetSchedulerMaster_fails(self): d = defer.fail(failure.Failure( schedulers.SchedulerAlreadyClaimedError('oh noes'))) self.master.db.schedulers.setSchedulerMaster = mock.Mock( return_value=d) result = yield self.rtype.trySetSchedulerMaster(10, 20) self.assertFalse(result) @defer.inlineCallbacks def test_trySetSchedulerMaster_raisesOddException(self): d = defer.fail(failure.Failure(RuntimeError('oh noes'))) self.master.db.schedulers.setSchedulerMaster = mock.Mock( return_value=d) try: yield self.rtype.trySetSchedulerMaster(10, 20) except RuntimeError: pass else: self.fail("The RuntimeError did not propagate") @defer.inlineCallbacks def test__masterDeactivated(self): yield self.master.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Scheduler(id=13, name='some:scheduler'), fakedb.SchedulerMaster(schedulerid=13, masterid=22), fakedb.Scheduler(id=14, name='other:scheduler'), fakedb.SchedulerMaster(schedulerid=14, masterid=22), ]) yield self.rtype._masterDeactivated(22) self.master.db.schedulers.assertSchedulerMaster(13, None) self.master.db.schedulers.assertSchedulerMaster(14, None) buildbot-3.4.0/master/buildbot/test/unit/data/test_sourcestamps.py000066400000000000000000000061231413250514000254260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import sourcestamps from buildbot.test import fakedb from buildbot.test.util import endpoint class SourceStampEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = sourcestamps.SourceStampEndpoint resourceTypeClass = sourcestamps.SourceStamp def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=13, branch='oak'), fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='bar', patch_comment='foo', subdir='/foo', patchlevel=3), fakedb.SourceStamp(id=14, patchid=99, branch='poplar'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): sourcestamp = yield self.callGet(('sourcestamps', 13)) self.validateData(sourcestamp) self.assertEqual(sourcestamp['branch'], 'oak') self.assertEqual(sourcestamp['patch'], None) @defer.inlineCallbacks def test_get_existing_patch(self): sourcestamp = yield self.callGet(('sourcestamps', 14)) self.validateData(sourcestamp) self.assertEqual(sourcestamp['branch'], 'poplar') self.assertEqual(sourcestamp['patch'], { 'patchid': 99, 'author': 'bar', 'body': b'hello, world', 'comment': 'foo', 'level': 3, 'subdir': '/foo', }) @defer.inlineCallbacks def test_get_missing(self): sourcestamp = yield self.callGet(('sourcestamps', 99)) self.assertEqual(sourcestamp, None) class SourceStampsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = sourcestamps.SourceStampsEndpoint resourceTypeClass = sourcestamps.SourceStamp def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=13), fakedb.SourceStamp(id=14), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): sourcestamps = yield self.callGet(('sourcestamps',)) [self.validateData(m) for m in sourcestamps] self.assertEqual(sorted([m['ssid'] for m in sourcestamps]), [13, 14]) class SourceStamp(unittest.TestCase): pass buildbot-3.4.0/master/buildbot/test/unit/data/test_steps.py000066400000000000000000000340051413250514000240340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import steps from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime TIME1 = 2001111 TIME2 = 2002222 TIME3 = 2003333 class StepEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = steps.StepEndpoint resourceTypeClass = steps.Step def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Builder(id=77, name='builder77'), fakedb.Master(id=88), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=30, builderid=77, number=7, masterid=88, buildrequestid=82, workerid=47), fakedb.Step(id=70, number=0, name='one', buildid=30, started_at=TIME1, complete_at=TIME2, results=0), fakedb.Step(id=71, number=1, name='two', buildid=30, started_at=TIME2, complete_at=TIME3, results=2, urls_json='[{"name":"url","url":"http://url"}]'), fakedb.Step(id=72, number=2, name='three', buildid=30, started_at=TIME3, hidden=True), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): step = yield self.callGet(('steps', 72)) self.validateData(step) self.assertEqual(step, { 'buildid': 30, 'complete': False, 'complete_at': None, 'name': 'three', 'number': 2, 'results': None, 'started_at': epoch2datetime(TIME3), 'state_string': '', 'stepid': 72, 'urls': [], 'hidden': True}) @defer.inlineCallbacks def test_get_existing_buildid_name(self): step = yield self.callGet(('builds', 30, 'steps', 'two')) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_existing_buildid_number(self): step = yield self.callGet(('builds', 30, 'steps', 1)) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_existing_builder_name(self): step = yield self.callGet(('builders', 77, 'builds', 7, 'steps', 'two')) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_existing_buildername_name(self): step = yield self.callGet(('builders', 'builder77', 'builds', 7, 'steps', 'two')) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_existing_builder_number(self): step = yield self.callGet(('builders', 77, 'builds', 7, 'steps', 1)) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_missing_buildername_builder_number(self): step = yield self.callGet(('builders', 'builder77_nope', 'builds', 7, 'steps', 1)) self.assertEqual(step, None) @defer.inlineCallbacks def test_get_missing(self): step = yield self.callGet(('steps', 9999)) self.assertEqual(step, None) class StepsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = steps.StepsEndpoint resourceTypeClass = steps.Step def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Builder(id=77, name='builder77'), fakedb.Master(id=88), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=30, builderid=77, number=7, masterid=88, buildrequestid=82, workerid=47), fakedb.Build(id=31, builderid=77, number=8, masterid=88, buildrequestid=82, workerid=47), fakedb.Step(id=70, number=0, name='one', buildid=30, started_at=TIME1, complete_at=TIME2, results=0), fakedb.Step(id=71, number=1, name='two', buildid=30, started_at=TIME2, complete_at=TIME3, results=2, urls_json='[{"name":"url","url":"http://url"}]'), fakedb.Step(id=72, number=2, name='three', buildid=30, started_at=TIME3), fakedb.Step(id=73, number=0, name='otherbuild', buildid=31, started_at=TIME2), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_buildid(self): steps = yield self.callGet(('builds', 30, 'steps')) [self.validateData(step) for step in steps] self.assertEqual([s['number'] for s in steps], [0, 1, 2]) @defer.inlineCallbacks def test_get_builder(self): steps = yield self.callGet(('builders', 77, 'builds', 7, 'steps')) [self.validateData(step) for step in steps] self.assertEqual([s['number'] for s in steps], [0, 1, 2]) @defer.inlineCallbacks def test_get_buildername(self): steps = yield self.callGet(('builders', 'builder77', 'builds', 7, 'steps')) [self.validateData(step) for step in steps] self.assertEqual([s['number'] for s in steps], [0, 1, 2]) class Step(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = steps.Step(self.master) def test_signature_addStep(self): @self.assertArgSpecMatches( self.master.data.updates.addStep, # fake self.rtype.addStep) # real def addStep(self, buildid, name): pass @defer.inlineCallbacks def test_addStep(self): stepid, number, name = yield self.rtype.addStep(buildid=10, name='name') msgBody = { 'buildid': 10, 'complete': False, 'complete_at': None, 'name': name, 'number': number, 'results': None, 'started_at': None, 'state_string': 'pending', 'stepid': stepid, 'urls': [], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(stepid), 'new'), msgBody), (('steps', str(stepid), 'new'), msgBody), ]) step = yield self.master.db.steps.getStep(stepid) self.assertEqual(step, { 'buildid': 10, 'complete_at': None, 'id': stepid, 'name': name, 'number': number, 'results': None, 'started_at': None, 'state_string': 'pending', 'urls': [], 'hidden': False, }) @defer.inlineCallbacks def test_fake_addStep(self): self.assertEqual( len((yield self.master.data.updates.addStep(buildid=10, name='ten'))), 3) def test_signature_startStep(self): @self.assertArgSpecMatches( self.master.data.updates.startStep, # fake self.rtype.startStep) # real def addStep(self, stepid): pass @defer.inlineCallbacks def test_startStep(self): self.reactor.advance(TIME1) yield self.master.db.steps.addStep(buildid=10, name='ten', state_string='pending') yield self.rtype.startStep(stepid=100) msgBody = { 'buildid': 10, 'complete': False, 'complete_at': None, 'name': 'ten', 'number': 0, 'results': None, 'started_at': epoch2datetime(TIME1), 'state_string': 'pending', 'stepid': 100, 'urls': [], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(100), 'started'), msgBody), (('steps', str(100), 'started'), msgBody), ]) step = yield self.master.db.steps.getStep(100) self.assertEqual(step, { 'buildid': 10, 'complete_at': None, 'id': 100, 'name': 'ten', 'number': 0, 'results': None, 'started_at': epoch2datetime(TIME1), 'state_string': 'pending', 'urls': [], 'hidden': False, }) def test_signature_setStepStateString(self): @self.assertArgSpecMatches( self.master.data.updates.setStepStateString, # fake self.rtype.setStepStateString) # real def setStepStateString(self, stepid, state_string): pass @defer.inlineCallbacks def test_setStepStateString(self): yield self.master.db.steps.addStep(buildid=10, name='ten', state_string='pending') yield self.rtype.setStepStateString(stepid=100, state_string='hi') msgBody = { 'buildid': 10, 'complete': False, 'complete_at': None, 'name': 'ten', 'number': 0, 'results': None, 'started_at': None, 'state_string': 'hi', 'stepid': 100, 'urls': [], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(100), 'updated'), msgBody), (('steps', str(100), 'updated'), msgBody), ]) step = yield self.master.db.steps.getStep(100) self.assertEqual(step, { 'buildid': 10, 'complete_at': None, 'id': 100, 'name': 'ten', 'number': 0, 'results': None, 'started_at': None, 'state_string': 'hi', 'urls': [], 'hidden': False, }) def test_signature_finishStep(self): @self.assertArgSpecMatches( self.master.data.updates.finishStep, # fake self.rtype.finishStep) # real def finishStep(self, stepid, results, hidden): pass @defer.inlineCallbacks def test_finishStep(self): yield self.master.db.steps.addStep(buildid=10, name='ten', state_string='pending') self.reactor.advance(TIME1) yield self.rtype.startStep(stepid=100) self.reactor.advance(TIME2 - TIME1) self.master.mq.clearProductions() yield self.rtype.finishStep(stepid=100, results=9, hidden=False) msgBody = { 'buildid': 10, 'complete': True, 'complete_at': epoch2datetime(TIME2), 'name': 'ten', 'number': 0, 'results': 9, 'started_at': epoch2datetime(TIME1), 'state_string': 'pending', 'stepid': 100, 'urls': [], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(100), 'finished'), msgBody), (('steps', str(100), 'finished'), msgBody), ]) step = yield self.master.db.steps.getStep(100) self.assertEqual(step, { 'buildid': 10, 'complete_at': epoch2datetime(TIME2), 'id': 100, 'name': 'ten', 'number': 0, 'results': 9, 'started_at': epoch2datetime(TIME1), 'state_string': 'pending', 'urls': [], 'hidden': False, }) def test_signature_addStepURL(self): @self.assertArgSpecMatches( self.master.data.updates.addStepURL, # fake self.rtype.addStepURL) # real def addStepURL(self, stepid, name, url): pass @defer.inlineCallbacks def test_addStepURL(self): yield self.master.db.steps.addStep(buildid=10, name='ten', state_string='pending') yield self.rtype.addStepURL(stepid=100, name="foo", url="bar") msgBody = { 'buildid': 10, 'complete': False, 'complete_at': None, 'name': 'ten', 'number': 0, 'results': None, 'started_at': None, 'state_string': 'pending', 'stepid': 100, 'urls': [{'name': 'foo', 'url': 'bar'}], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(100), 'updated'), msgBody), (('steps', str(100), 'updated'), msgBody), ]) step = yield self.master.db.steps.getStep(100) self.assertEqual(step, { 'buildid': 10, 'complete_at': None, 'id': 100, 'name': 'ten', 'number': 0, 'results': None, 'started_at': None, 'state_string': 'pending', 'urls': [{'name': 'foo', 'url': 'bar'}], 'hidden': False, }) buildbot-3.4.0/master/buildbot/test/unit/data/test_test_result_sets.py000066400000000000000000000235671413250514000263240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import test_result_sets from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class TestResultSetEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = test_result_sets.TestResultSetEndpoint resourceTypeClass = test_result_sets.TestResultSet def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Step(id=131, number=132, name='step132', buildid=30), fakedb.TestResultSet(id=13, builderid=88, buildid=30, stepid=131, description='desc', category='cat', value_unit='ms', complete=1), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing_result_set(self): result = yield self.callGet(('test_result_sets', 13)) self.validateData(result) self.assertEqual(result, { 'test_result_setid': 13, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc', 'category': 'cat', 'value_unit': 'ms', 'tests_passed': None, 'tests_failed': None, 'complete': True }) @defer.inlineCallbacks def test_get_missing_result_set(self): results = yield self.callGet(('test_result_sets', 14)) self.assertIsNone(results) class TestResultSetsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = test_result_sets.TestResultSetsEndpoint resourceTypeClass = test_result_sets.TestResultSet def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Step(id=131, number=132, name='step132', buildid=30), fakedb.TestResultSet(id=13, builderid=88, buildid=30, stepid=131, description='desc', category='cat', value_unit='ms', complete=1), fakedb.TestResultSet(id=14, builderid=88, buildid=30, stepid=131, description='desc', category='cat', value_unit='ms', complete=1), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_result_sets_builders_builderid(self): results = yield self.callGet(('builders', 88, 'test_result_sets')) for result in results: self.validateData(result) self.assertEqual([r['test_result_setid'] for r in results], [13, 14]) @defer.inlineCallbacks def test_get_result_sets_builders_buildername(self): results = yield self.callGet(('builders', 'b1', 'test_result_sets')) for result in results: self.validateData(result) self.assertEqual([r['test_result_setid'] for r in results], [13, 14]) @defer.inlineCallbacks def test_get_result_sets_builds_buildid(self): results = yield self.callGet(('builds', 30, 'test_result_sets')) for result in results: self.validateData(result) self.assertEqual([r['test_result_setid'] for r in results], [13, 14]) @defer.inlineCallbacks def test_get_result_sets_steps_stepid(self): results = yield self.callGet(('steps', 131, 'test_result_sets')) for result in results: self.validateData(result) self.assertEqual([r['test_result_setid'] for r in results], [13, 14]) class TestResultSet(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = test_result_sets.TestResultSet(self.master) def test_signature_add_test_result_set(self): @self.assertArgSpecMatches(self.master.data.updates.addTestResultSet, self.rtype.addTestResultSet) def addTestResultSet(self, builderid, buildid, stepid, description, category, value_unit): pass def test_signature_complete_test_result_set(self): @self.assertArgSpecMatches(self.master.data.updates.completeTestResultSet, self.rtype.completeTestResultSet) def completeTestResultSet(self, test_result_setid, tests_passed=None, tests_failed=None): pass @defer.inlineCallbacks def test_add_test_result_set(self): test_result_setid = yield self.rtype.addTestResultSet(builderid=1, buildid=2, stepid=3, description='desc', category='cat4', value_unit='ms') msg_body = { 'test_result_setid': test_result_setid, 'builderid': 1, 'buildid': 2, 'stepid': 3, 'description': 'desc', 'category': 'cat4', 'value_unit': 'ms', 'tests_passed': None, 'tests_failed': None, 'complete': False, } self.master.mq.assertProductions([ (('test_result_sets', str(test_result_setid), 'new'), msg_body), ]) result = yield self.master.db.test_result_sets.getTestResultSet(test_result_setid) self.assertEqual(result, { 'id': test_result_setid, 'builderid': 1, 'buildid': 2, 'stepid': 3, 'description': 'desc', 'category': 'cat4', 'value_unit': 'ms', 'tests_passed': None, 'tests_failed': None, 'complete': False, }) @defer.inlineCallbacks def test_complete_test_result_set_no_results(self): test_result_setid = \ yield self.master.db.test_result_sets.addTestResultSet(builderid=1, buildid=2, stepid=3, description='desc', category='cat4', value_unit='ms') yield self.rtype.completeTestResultSet(test_result_setid) msg_body = { 'test_result_setid': test_result_setid, 'builderid': 1, 'buildid': 2, 'stepid': 3, 'description': 'desc', 'category': 'cat4', 'value_unit': 'ms', 'tests_passed': None, 'tests_failed': None, 'complete': True, } self.master.mq.assertProductions([ (('test_result_sets', str(test_result_setid), 'completed'), msg_body), ]) result = yield self.master.db.test_result_sets.getTestResultSet(test_result_setid) self.assertEqual(result, { 'id': test_result_setid, 'builderid': 1, 'buildid': 2, 'stepid': 3, 'description': 'desc', 'category': 'cat4', 'value_unit': 'ms', 'tests_passed': None, 'tests_failed': None, 'complete': True, }) @defer.inlineCallbacks def test_complete_test_result_set_with_results(self): test_result_setid = \ yield self.master.db.test_result_sets.addTestResultSet(builderid=1, buildid=2, stepid=3, description='desc', category='cat4', value_unit='ms') yield self.rtype.completeTestResultSet(test_result_setid, tests_passed=12, tests_failed=34) msg_body = { 'test_result_setid': test_result_setid, 'builderid': 1, 'buildid': 2, 'stepid': 3, 'description': 'desc', 'category': 'cat4', 'value_unit': 'ms', 'tests_passed': 12, 'tests_failed': 34, 'complete': True, } self.master.mq.assertProductions([ (('test_result_sets', str(test_result_setid), 'completed'), msg_body), ]) result = yield self.master.db.test_result_sets.getTestResultSet(test_result_setid) self.assertEqual(result, { 'id': test_result_setid, 'builderid': 1, 'buildid': 2, 'stepid': 3, 'description': 'desc', 'category': 'cat4', 'value_unit': 'ms', 'tests_passed': 12, 'tests_failed': 34, 'complete': True, }) buildbot-3.4.0/master/buildbot/test/unit/data/test_test_results.py000066400000000000000000000133231413250514000254360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import test_results from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class TestResultsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = test_results.TestResultsEndpoint resourceTypeClass = test_results.TestResult def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Step(id=131, number=132, name='step132', buildid=30), fakedb.TestResultSet(id=13, builderid=88, buildid=30, stepid=131, description='desc', category='cat', value_unit='ms', complete=1), fakedb.TestName(id=301, builderid=88, name='name301'), fakedb.TestName(id=302, builderid=88, name='name302'), fakedb.TestCodePath(id=401, builderid=88, path='path401'), fakedb.TestCodePath(id=402, builderid=88, path='path402'), fakedb.TestResult(id=101, builderid=88, test_result_setid=13, line=400, value='v101'), fakedb.TestResult(id=102, builderid=88, test_result_setid=13, test_nameid=301, test_code_pathid=401, line=401, value='v102'), fakedb.TestResult(id=103, builderid=88, test_result_setid=13, test_nameid=302, test_code_pathid=402, line=402, duration_ns=1012, value='v103'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing_results(self): results = yield self.callGet(('test_result_sets', 13, 'results')) for result in results: self.validateData(result) self.assertEqual([r['test_resultid'] for r in results], [101, 102, 103]) @defer.inlineCallbacks def test_get_missing_results(self): results = yield self.callGet(('test_result_sets', 14, 'results')) self.assertEqual(results, []) class TestResult(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = test_results.TestResult(self.master) def test_signature_add_test_results(self): @self.assertArgSpecMatches(self.master.data.updates.addTestResults, self.rtype.addTestResults) def addTestResults(self, builderid, test_result_setid, result_values): pass @defer.inlineCallbacks def test_add_test_results(self): result_values = [ {'test_name': 'name1', 'value': '1'}, {'test_name': 'name2', 'duration_ns': 1000, 'value': '1'}, {'test_name': 'name3', 'test_code_path': 'path2', 'value': '2'}, {'test_name': 'name4', 'test_code_path': 'path3', 'value': '3'}, {'test_name': 'name5', 'test_code_path': 'path4', 'line': 4, 'value': '4'}, {'test_code_path': 'path5', 'line': 5, 'value': '5'}, ] yield self.rtype.addTestResults(builderid=88, test_result_setid=13, result_values=result_values) self.master.mq.assertProductions([]) results = yield self.master.db.test_results.getTestResults(builderid=88, test_result_setid=13) resultid = results[0]['id'] self.assertEqual(results, [ {'id': resultid, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name1', 'test_code_path': None, 'line': None, 'duration_ns': None, 'value': '1'}, {'id': resultid + 1, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name2', 'test_code_path': None, 'line': None, 'duration_ns': 1000, 'value': '1'}, {'id': resultid + 2, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name3', 'test_code_path': 'path2', 'line': None, 'duration_ns': None, 'value': '2'}, {'id': resultid + 3, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name4', 'test_code_path': 'path3', 'line': None, 'duration_ns': None, 'value': '3'}, {'id': resultid + 4, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name5', 'test_code_path': 'path4', 'line': 4, 'duration_ns': None, 'value': '4'}, {'id': resultid + 5, 'builderid': 88, 'test_result_setid': 13, 'test_name': None, 'test_code_path': 'path5', 'line': 5, 'duration_ns': None, 'value': '5'}, ]) buildbot-3.4.0/master/buildbot/test/unit/data/test_types.py000066400000000000000000000125711413250514000240460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from datetime import datetime from twisted.trial import unittest from buildbot.data import types class TypeMixin: klass = None good = [] bad = [] stringValues = [] badStringValues = [] cmpResults = [] def setUp(self): self.ty = self.makeInstance() def makeInstance(self): return self.klass() def test_valueFromString(self): for string, expValue in self.stringValues: self.assertEqual(self.ty.valueFromString(string), expValue, "value of string %r" % (string,)) for string in self.badStringValues: with self.assertRaises(Exception): self.ty.valueFromString(string, "expected error for %r" % (string,)) def test_cmp(self): for val, string, expResult in self.cmpResults: self.assertEqual(self.ty.cmp(val, string), expResult, "compare of %r and %r" % (val, string)) def test_validate(self): for o in self.good: errors = list(self.ty.validate(repr(o), o)) self.assertEqual(errors, [], "{} -> {}".format(repr(o), errors)) for o in self.bad: errors = list(self.ty.validate(repr(o), o)) self.assertNotEqual(errors, [], "no error for {}".format(repr(o))) class NoneOk(TypeMixin, unittest.TestCase): def makeInstance(self): return types.NoneOk(types.Integer()) good = [None, 1] bad = ['abc'] stringValues = [('0', 0), ('-10', -10)] badStringValues = ['one', '', '0x10'] cmpResults = [(10, '9', 1), (-2, '-1', -1)] class Integer(TypeMixin, unittest.TestCase): klass = types.Integer good = [0, -1, 1000, 100 ** 100] bad = [None, '', '0'] stringValues = [('0', 0), ('-10', -10)] badStringValues = ['one', '', '0x10'] cmpResults = [(10, '9', 1), (-2, '-1', -1)] class DateTime(TypeMixin, unittest.TestCase): klass = types.DateTime good = [0, 1604843464, datetime(2020, 11, 15, 18, 40, 1, 630219)] bad = [int(1e60), 'bad', 1604843464.388657] stringValues = [ ('1604843464', 1604843464), ] badStringValues = ['one', '', '0x10'] class String(TypeMixin, unittest.TestCase): klass = types.String good = ['', 'hello', '\N{SNOWMAN}'] bad = [None, b'', b'hello', 10] stringValues = [ (b'hello', 'hello'), ('\N{SNOWMAN}'.encode('utf-8'), '\N{SNOWMAN}'), ] badStringValues = ['\xe0\xe0'] cmpResults = [('bbb', 'aaa', 1)] class Binary(TypeMixin, unittest.TestCase): klass = types.Binary good = [b'', b'\x01\x80\xfe', '\N{SNOWMAN}'.encode('utf-8')] bad = [None, 10, 'xyz'] stringValues = [('hello', 'hello')] cmpResults = [('\x00\x80', '\x10\x10', -1)] class Boolean(TypeMixin, unittest.TestCase): klass = types.Boolean good = [True, False] bad = [None, 0, 1] stringValues = [ (b'on', True), (b'true', True), (b'yes', True), (b'1', True), (b'off', False), (b'false', False), (b'no', False), (b'0', False), (b'ON', True), (b'TRUE', True), (b'YES', True), (b'OFF', False), (b'FALSE', False), (b'NO', False), ] cmpResults = [ (False, b'no', 0), (True, b'true', 0), ] class Identifier(TypeMixin, unittest.TestCase): def makeInstance(self): return types.Identifier(len=5) good = ['a', 'abcde', 'a1234'] bad = ['', 'abcdef', b'abcd', '1234', '\N{SNOWMAN}'] stringValues = [ (b'abcd', 'abcd'), ] badStringValues = [ b'', r'\N{SNOWMAN}', b'abcdef' ] cmpResults = [ ('aaaa', b'bbbb', -1), ] class List(TypeMixin, unittest.TestCase): def makeInstance(self): return types.List(of=types.Integer()) good = [[], [1], [1, 2]] bad = [1, (1,), ['1']] badStringValues = [ '1', '1,2' ] class SourcedProperties(TypeMixin, unittest.TestCase): klass = types.SourcedProperties good = [{'p': (b'["a"]', 's')}] bad = [ None, (), [], {b'not-unicode': ('["a"]', 'unicode')}, {'unicode': ('["a"]', b'not-unicode')}, {'unicode': ('not, json', 'unicode')}, ] class Entity(TypeMixin, unittest.TestCase): class MyEntity(types.Entity): field1 = types.Integer() field2 = types.NoneOk(types.String()) def makeInstance(self): return self.MyEntity('myentity', 'MyEntity') good = [ {'field1': 1, 'field2': 'f2'}, {'field1': 1, 'field2': None}, ] bad = [ None, [], (), {'field1': 1}, {'field1': 1, 'field2': 'f2', 'field3': 10}, {'field1': 'one', 'field2': 'f2'}, ] buildbot-3.4.0/master/buildbot/test/unit/data/test_workers.py000066400000000000000000000243711413250514000243770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import exceptions from buildbot.data import resultspec from buildbot.data import workers from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin testData = [ fakedb.Builder(id=40, name='b1'), fakedb.Builder(id=41, name='b2'), fakedb.Master(id=13), fakedb.Master(id=14), fakedb.BuilderMaster(id=4013, builderid=40, masterid=13), fakedb.BuilderMaster(id=4014, builderid=40, masterid=14), fakedb.BuilderMaster(id=4113, builderid=41, masterid=13), fakedb.Worker(id=1, name='linux', info={}), fakedb.ConfiguredWorker(id=14013, workerid=1, buildermasterid=4013), fakedb.ConfiguredWorker(id=14014, workerid=1, buildermasterid=4014), fakedb.ConnectedWorker(id=113, masterid=13, workerid=1), fakedb.Worker(id=2, name='windows', info={"a": "b"}), fakedb.ConfiguredWorker(id=24013, workerid=2, buildermasterid=4013), fakedb.ConfiguredWorker(id=24014, workerid=2, buildermasterid=4014), fakedb.ConfiguredWorker(id=24113, workerid=2, buildermasterid=4113), fakedb.ConnectedWorker(id=214, masterid=14, workerid=2), ] def configuredOnKey(worker): return (worker.get('masterid', 0), worker.get('builderid', 0)) def _filt(bs, builderid, masterid): bs['connected_to'] = sorted( [d for d in bs['connected_to'] if not masterid or masterid == d['masterid']]) bs['configured_on'] = sorted( [d for d in bs['configured_on'] if (not masterid or masterid == d['masterid']) and (not builderid or builderid == d['builderid'])], key=configuredOnKey) return bs def w1(builderid=None, masterid=None): return _filt({ 'workerid': 1, 'name': 'linux', 'workerinfo': {}, 'paused': False, 'graceful': False, 'connected_to': [ {'masterid': 13}, ], 'configured_on': sorted([ {'builderid': 40, 'masterid': 13}, {'builderid': 40, 'masterid': 14}, ], key=configuredOnKey), }, builderid, masterid) def w2(builderid=None, masterid=None): return _filt({ 'workerid': 2, 'name': 'windows', 'workerinfo': {'a': 'b'}, 'paused': False, 'graceful': False, 'connected_to': [ {'masterid': 14}, ], 'configured_on': sorted([ {'builderid': 40, 'masterid': 13}, {'builderid': 41, 'masterid': 13}, {'builderid': 40, 'masterid': 14}, ], key=configuredOnKey), }, builderid, masterid) class WorkerEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = workers.WorkerEndpoint resourceTypeClass = workers.Worker def setUp(self): self.setUpEndpoint() return self.db.insertTestData(testData) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): worker = yield self.callGet(('workers', 2)) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w2()) @defer.inlineCallbacks def test_get_existing_name(self): worker = yield self.callGet(('workers', 'linux')) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w1()) @defer.inlineCallbacks def test_get_existing_masterid(self): worker = yield self.callGet(('masters', 14, 'workers', 2)) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w2(masterid=14)) @defer.inlineCallbacks def test_get_existing_builderid(self): worker = yield self.callGet(('builders', 40, 'workers', 2)) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w2(builderid=40)) @defer.inlineCallbacks def test_get_existing_masterid_builderid(self): worker = yield self.callGet(('masters', 13, 'builders', 40, 'workers', 2)) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w2(masterid=13, builderid=40)) @defer.inlineCallbacks def test_get_missing(self): worker = yield self.callGet(('workers', 99)) self.assertEqual(worker, None) @defer.inlineCallbacks def test_setWorkerState(self): yield self.master.data.updates.setWorkerState(2, True, False) worker = yield self.callGet(('workers', 2)) self.validateData(worker) self.assertEqual(worker['paused'], True) @defer.inlineCallbacks def test_actions(self): for action in ("stop", "pause", "unpause", "kill"): yield self.callControl(action, {}, ('masters', 13, 'builders', 40, 'workers', 2)) self.master.mq.assertProductions( [(('control', 'worker', '2', action), {'reason': 'no reason'})]) @defer.inlineCallbacks def test_bad_actions(self): with self.assertRaises(exceptions.InvalidControlException): yield self.callControl("bad_action", {}, ('masters', 13, 'builders', 40, 'workers', 2)) class WorkersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = workers.WorkersEndpoint resourceTypeClass = workers.Worker def setUp(self): self.setUpEndpoint() return self.db.insertTestData(testData) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): workers = yield self.callGet(('workers',)) for b in workers: self.validateData(b) b['configured_on'] = sorted(b['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workers, key=configuredOnKey), sorted([w1(), w2()], key=configuredOnKey)) @defer.inlineCallbacks def test_get_masterid(self): workers = yield self.callGet(('masters', '13', 'workers',)) [self.validateData(b) for b in workers] [sorted(b['configured_on'], key=configuredOnKey) for b in workers] self.assertEqual(sorted(workers, key=configuredOnKey), sorted([w1(masterid=13), w2(masterid=13)], key=configuredOnKey)) @defer.inlineCallbacks def test_get_builderid(self): workers = yield self.callGet(('builders', '41', 'workers',)) [self.validateData(b) for b in workers] [sorted(b['configured_on'], key=configuredOnKey) for b in workers] self.assertEqual(sorted(workers, key=configuredOnKey), sorted([w2(builderid=41)], key=configuredOnKey)) @defer.inlineCallbacks def test_get_masterid_builderid(self): workers = yield self.callGet(('masters', '13', 'builders', '41', 'workers',)) [self.validateData(b) for b in workers] [sorted(b['configured_on'], key=configuredOnKey) for b in workers] self.assertEqual(sorted(workers, key=configuredOnKey), sorted([w2(masterid=13, builderid=41)], key=configuredOnKey)) @defer.inlineCallbacks def test_setWorkerStateFindByPaused(self): yield self.master.data.updates.setWorkerState(2, True, False) resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('paused', 'eq', [True])]) workers = yield self.callGet(('workers',), resultSpec=resultSpec) self.assertEqual(len(workers), 1) worker = workers[0] self.validateData(worker) self.assertEqual(worker['paused'], True) class Worker(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = workers.Worker(self.master) return self.master.db.insertTestData([ fakedb.Master(id=13), fakedb.Master(id=14), ]) def test_signature_findWorkerId(self): @self.assertArgSpecMatches( self.master.data.updates.findWorkerId, # fake self.rtype.findWorkerId) # real def findWorkerId(self, name): pass def test_signature_workerConfigured(self): @self.assertArgSpecMatches( self.master.data.updates.workerConfigured, # fake self.rtype.workerConfigured) # real def workerConfigured(self, workerid, masterid, builderids): pass def test_findWorkerId(self): # this just passes through to the db method, so test that rv = defer.succeed(None) self.master.db.workers.findWorkerId = \ mock.Mock(return_value=rv) self.assertIdentical(self.rtype.findWorkerId('foo'), rv) def test_findWorkerId_not_id(self): with self.assertRaises(ValueError): self.rtype.findWorkerId(b'foo') with self.assertRaises(ValueError): self.rtype.findWorkerId('123/foo') buildbot-3.4.0/master/buildbot/test/unit/db/000077500000000000000000000000001413250514000207375ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/db/__init__.py000066400000000000000000000000001413250514000230360ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/db/test_base.py000066400000000000000000000170341413250514000232670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib import sqlalchemy as sa import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.db import base from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.util import sautils class TestBase(unittest.TestCase): def setUp(self): meta = sa.MetaData() self.tbl = sautils.Table('tbl', meta, sa.Column('str32', sa.String(length=32)), sa.Column('txt', sa.Text)) self.db = mock.Mock() self.db.pool.engine.dialect.name = 'mysql' self.comp = base.DBConnectorComponent(self.db) def test_checkLength_ok(self): self.comp.checkLength(self.tbl.c.str32, "short string") def test_checkLength_long(self): with self.assertRaises(RuntimeError): self.comp.checkLength(self.tbl.c.str32, ("long string" * 5)) def test_ensureLength_ok(self): v = self.comp.ensureLength(self.tbl.c.str32, "short string") self.assertEqual(v, "short string") def test_ensureLength_long(self): v = self.comp.ensureLength(self.tbl.c.str32, "short string" * 5) self.assertEqual(v, "short stringshordacf5a81f8ae3873") self.comp.checkLength(self.tbl.c.str32, v) def test_checkLength_text(self): with self.assertRaises(AssertionError): self.comp.checkLength(self.tbl.c.txt, ("long string" * 5)) def test_checkLength_long_not_mysql(self): self.db.pool.engine.dialect.name = 'sqlite' self.comp.checkLength(self.tbl.c.str32, "long string" * 5) # run that again since the method gets stubbed out self.comp.checkLength(self.tbl.c.str32, "long string" * 5) def _sha1(self, s): return hashlib.sha1(s).hexdigest() def test_hashColumns_single(self): self.assertEqual(self.comp.hashColumns('master'), self._sha1(b'master')) def test_hashColumns_multiple(self): self.assertEqual(self.comp.hashColumns('a', None, 'b', 1), self._sha1(b'a\0\xf5\x00b\x001')) def test_hashColumns_None(self): self.assertEqual(self.comp.hashColumns(None), self._sha1(b'\xf5')) def test_hashColumns_integer(self): self.assertEqual(self.comp.hashColumns(11), self._sha1(b'11')) def test_hashColumns_unicode_ascii_match(self): self.assertEqual(self.comp.hashColumns('master'), self.comp.hashColumns('master')) class TestBaseAsConnectorComponent(unittest.TestCase, connector_component.ConnectorComponentMixin): @defer.inlineCallbacks def setUp(self): # this co-opts the masters table to test findSomethingId yield self.setUpConnectorComponent( table_names=['masters']) self.db.base = base.DBConnectorComponent(self.db) @defer.inlineCallbacks def test_findSomethingId_race(self): tbl = self.db.model.masters hash = hashlib.sha1(b'somemaster').hexdigest() def race_thd(conn): conn.execute(tbl.insert(), id=5, name='somemaster', name_hash=hash, active=1, last_active=1) id = yield self.db.base.findSomethingId( tbl=self.db.model.masters, whereclause=(tbl.c.name_hash == hash), insert_values=dict(name='somemaster', name_hash=hash, active=1, last_active=1), _race_hook=race_thd) self.assertEqual(id, 5) @defer.inlineCallbacks def test_findSomethingId_new(self): tbl = self.db.model.masters hash = hashlib.sha1(b'somemaster').hexdigest() id = yield self.db.base.findSomethingId( tbl=self.db.model.masters, whereclause=(tbl.c.name_hash == hash), insert_values=dict(name='somemaster', name_hash=hash, active=1, last_active=1)) self.assertEqual(id, 1) @defer.inlineCallbacks def test_findSomethingId_existing(self): tbl = self.db.model.masters hash = hashlib.sha1(b'somemaster').hexdigest() yield self.insertTestData([ fakedb.Master(id=7, name='somemaster', name_hash=hash), ]) id = yield self.db.base.findSomethingId( tbl=self.db.model.masters, whereclause=(tbl.c.name_hash == hash), insert_values=dict(name='somemaster', name_hash=hash, active=1, last_active=1)) self.assertEqual(id, 7) @defer.inlineCallbacks def test_findSomethingId_new_noCreate(self): tbl = self.db.model.masters hash = hashlib.sha1(b'somemaster').hexdigest() id = yield self.db.base.findSomethingId( tbl=self.db.model.masters, whereclause=(tbl.c.name_hash == hash), insert_values=dict(name='somemaster', name_hash=hash, active=1, last_active=1), autoCreate=False) self.assertEqual(id, None) class TestCachedDecorator(unittest.TestCase): def setUp(self): # set this to True to check that cache.get isn't called (for # no_cache=1) self.cache_get_raises_exception = False class TestConnectorComponent(base.DBConnectorComponent): invocations = None @base.cached("mycache") def getThing(self, key): if self.invocations is None: self.invocations = [] self.invocations.append(key) return defer.succeed(key * 2) def get_cache(self, cache_name, miss_fn): self.assertEqual(cache_name, "mycache") cache = mock.Mock(name="mycache") if self.cache_get_raises_exception: def ex(key): raise RuntimeError("cache.get called unexpectedly") cache.get = ex else: cache.get = miss_fn return cache # tests @defer.inlineCallbacks def test_cached(self): # attach it to the connector connector = mock.Mock(name="connector") connector.master.caches.get_cache = self.get_cache # build an instance comp = self.TestConnectorComponent(connector) # test it twice (to test an implementation detail) res1 = yield comp.getThing("foo") res2 = yield comp.getThing("bar") self.assertEqual((res1, res2, comp.invocations), ('foofoo', 'barbar', ['foo', 'bar'])) @defer.inlineCallbacks def test_cached_no_cache(self): # attach it to the connector connector = mock.Mock(name="connector") connector.master.caches.get_cache = self.get_cache self.cache_get_raises_exception = True # build an instance comp = self.TestConnectorComponent(connector) yield comp.getThing("foo", no_cache=1) buildbot-3.4.0/master/buildbot/test/unit/db/test_build_data.py000066400000000000000000000240421413250514000244420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.trial import unittest from buildbot.db import build_data from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation class Tests(interfaces.InterfaceTests): common_data = [ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.Builder(id=89, name='b2'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.BuildRequest(id=42, buildsetid=20, builderid=88), fakedb.BuildRequest(id=43, buildsetid=20, builderid=89), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Build(id=31, buildrequestid=42, number=8, masterid=88, builderid=88, workerid=47), fakedb.Build(id=40, buildrequestid=43, number=9, masterid=88, builderid=89, workerid=47), ] def test_signature_add_build_data(self): @self.assertArgSpecMatches(self.db.build_data.setBuildData) def setBuildData(self, buildid, name, value, source): pass def test_signature_get_build_data(self): @self.assertArgSpecMatches(self.db.build_data.getBuildData) def getBuildData(self, buildid, name): pass def test_signature_get_build_data_no_value(self): @self.assertArgSpecMatches(self.db.build_data.getBuildDataNoValue) def getBuildDataNoValue(self, buildid, name): pass def test_signature_get_all_build_data_no_values(self): @self.assertArgSpecMatches(self.db.build_data.getAllBuildDataNoValues) def getAllBuildDataNoValues(self, buildid): pass @defer.inlineCallbacks def test_add_data_get_data(self): yield self.insertTestData(self.common_data) yield self.db.build_data.setBuildData(buildid=30, name='mykey', value=b'myvalue', source='mysource') data_dict = yield self.db.build_data.getBuildData(buildid=30, name='mykey') validation.verifyDbDict(self, 'build_datadict', data_dict) self.assertEqual(data_dict, { 'buildid': 30, 'name': 'mykey', 'value': b'myvalue', 'length': 7, 'source': 'mysource' }) @defer.inlineCallbacks def test_get_data_non_existing(self): yield self.insertTestData(self.common_data) data_dict = yield self.db.build_data.getBuildData(buildid=30, name='mykey') self.assertIsNone(data_dict) @defer.inlineCallbacks def test_add_data_replace_value(self): yield self.insertTestData(self.common_data) yield self.db.build_data.setBuildData(buildid=30, name='mykey', value=b'myvalue', source='mysource') yield self.db.build_data.setBuildData(buildid=30, name='mykey', value=b'myvalue2', source='mysource2') data_dict = yield self.db.build_data.getBuildData(buildid=30, name='mykey') validation.verifyDbDict(self, 'build_datadict', data_dict) self.assertEqual(data_dict, { 'buildid': 30, 'name': 'mykey', 'value': b'myvalue2', 'length': 8, 'source': 'mysource2' }) @defer.inlineCallbacks def test_add_data_insert_race(self): yield self.insertTestData(self.common_data) def hook(conn): value = b'myvalue_old' insert_values = { 'buildid': 30, 'name': 'mykey', 'value': value, 'length': len(value), 'source': 'mysourec_old' } q = self.db.model.build_data.insert().values(insert_values) conn.execute(q) self.db.build_data._test_timing_hook = hook yield self.db.build_data.setBuildData(buildid=30, name='mykey', value=b'myvalue', source='mysource') data_dict = yield self.db.build_data.getBuildData(buildid=30, name='mykey') validation.verifyDbDict(self, 'build_datadict', data_dict) self.assertEqual(data_dict, { 'buildid': 30, 'name': 'mykey', 'value': b'myvalue', 'length': 7, 'source': 'mysource' }) @defer.inlineCallbacks def test_add_data_get_data_no_value(self): yield self.insertTestData(self.common_data) yield self.db.build_data.setBuildData(buildid=30, name='mykey', value=b'myvalue', source='mysource') data_dict = yield self.db.build_data.getBuildDataNoValue(buildid=30, name='mykey') validation.verifyDbDict(self, 'build_datadict', data_dict) self.assertEqual(data_dict, { 'buildid': 30, 'name': 'mykey', 'value': None, 'length': 7, 'source': 'mysource' }) @defer.inlineCallbacks def test_get_data_no_values_non_existing(self): yield self.insertTestData(self.common_data) data_dict = yield self.db.build_data.getBuildDataNoValue(buildid=30, name='mykey') self.assertIsNone(data_dict) @defer.inlineCallbacks def test_get_all_build_data_no_values(self): yield self.insertTestData(self.common_data + [ fakedb.BuildData(id=91, buildid=30, name='name1', value=b'value1', source='source1'), fakedb.BuildData(id=92, buildid=30, name='name2', value=b'value2', source='source2'), fakedb.BuildData(id=93, buildid=31, name='name3', value=b'value3', source='source3'), ]) data_dicts = yield self.db.build_data.getAllBuildDataNoValues(30) self.assertEqual([d['name'] for d in data_dicts], ['name1', 'name2']) for d in data_dicts: validation.verifyDbDict(self, 'build_datadict', d) # note that value is not in dict, but length is self.assertEqual(data_dicts[0], { 'buildid': 30, 'name': 'name1', 'value': None, 'length': 6, 'source': 'source1' }) data_dicts = yield self.db.build_data.getAllBuildDataNoValues(31) self.assertEqual([d['name'] for d in data_dicts], ['name3']) data_dicts = yield self.db.build_data.getAllBuildDataNoValues(32) self.assertEqual([d['name'] for d in data_dicts], []) @parameterized.expand([ (1000000, 0, ['name1', 'name2', 'name3', 'name4', 'name5', 'name6']), (1000001, 0, ['name1', 'name2', 'name3', 'name4', 'name5', 'name6']), (1000002, 2, ['name1', 'name2', 'name5', 'name6']), (1000003, 3, ['name1', 'name2', 'name6']), (1000004, 4, ['name1', 'name2']), (1000005, 4, ['name1', 'name2']), ]) @defer.inlineCallbacks def test_remove_old_build_data(self, older_than_timestamp, exp_num_deleted, exp_remaining_names): yield self.insertTestData(self.common_data + [ fakedb.Build(id=50, buildrequestid=41, number=17, masterid=88, builderid=88, workerid=47, complete_at=None), fakedb.Build(id=51, buildrequestid=42, number=18, masterid=88, builderid=88, workerid=47, complete_at=1000001), fakedb.Build(id=52, buildrequestid=43, number=19, masterid=88, builderid=89, workerid=47, complete_at=1000002), fakedb.Build(id=53, buildrequestid=43, number=20, masterid=88, builderid=89, workerid=47, complete_at=1000003), fakedb.BuildData(id=91, buildid=50, name='name1', value=b'value1', source='src1'), fakedb.BuildData(id=92, buildid=50, name='name2', value=b'value2', source='src2'), fakedb.BuildData(id=93, buildid=51, name='name3', value=b'value3', source='src3'), fakedb.BuildData(id=94, buildid=51, name='name4', value=b'value4', source='src4'), fakedb.BuildData(id=95, buildid=52, name='name5', value=b'value5', source='src5'), fakedb.BuildData(id=96, buildid=53, name='name6', value=b'value6', source='src6'), ]) num_deleted = yield self.db.build_data.deleteOldBuildData(older_than_timestamp) self.assertEqual(num_deleted, exp_num_deleted) remaining_names = [] for buildid in [50, 51, 52, 53]: data_dicts = yield self.db.build_data.getAllBuildDataNoValues(buildid) remaining_names += [d['name'] for d in data_dicts] self.assertEqual(sorted(remaining_names), sorted(exp_remaining_names)) class TestFakeDB(Tests, connector_component.FakeConnectorComponentMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers', 'build_data']) self.db.build_data = build_data.BuildDataConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_builders.py000066400000000000000000000254241413250514000241700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import builders from buildbot.db import tags from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation def builderKey(builder): return builder['id'] class Tests(interfaces.InterfaceTests): # common sample data builder_row = [ fakedb.Builder(id=7, name="some:builder"), ] # tests def test_signature_findBuilderId(self): @self.assertArgSpecMatches(self.db.builders.findBuilderId) def findBuilderId(self, name, autoCreate=True): pass def test_signature_addBuilderMaster(self): @self.assertArgSpecMatches(self.db.builders.addBuilderMaster) def addBuilderMaster(self, builderid=None, masterid=None): pass def test_signature_removeBuilderMaster(self): @self.assertArgSpecMatches(self.db.builders.removeBuilderMaster) def removeBuilderMaster(self, builderid=None, masterid=None): pass def test_signature_getBuilder(self): @self.assertArgSpecMatches(self.db.builders.getBuilder) def getBuilder(self, builderid): pass def test_signature_getBuilders(self): @self.assertArgSpecMatches(self.db.builders.getBuilders) def getBuilders(self, masterid=None): pass def test_signature_updateBuilderInfo(self): @self.assertArgSpecMatches(self.db.builders.updateBuilderInfo) def updateBuilderInfo(self, builderid, description, tags): pass @defer.inlineCallbacks def test_updateBuilderInfo(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder7'), fakedb.Builder(id=8, name='some:builder8'), ]) yield self.db.builders.updateBuilderInfo(7, 'a string which describe the builder', ['cat1', 'cat2']) yield self.db.builders.updateBuilderInfo(8, 'a string which describe the builder', []) builderdict7 = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict7) builderdict7['tags'].sort() # order is unspecified self.assertEqual(builderdict7, dict(id=7, name='some:builder7', tags=['cat1', 'cat2'], masterids=[], description='a string which describe the builder')) builderdict8 = yield self.db.builders.getBuilder(8) validation.verifyDbDict(self, 'builderdict', builderdict8) self.assertEqual(builderdict8, dict(id=8, name='some:builder8', tags=[], masterids=[], description='a string which describe the builder')) @defer.inlineCallbacks def test_findBuilderId_new(self): id = yield self.db.builders.findBuilderId('some:builder') builderdict = yield self.db.builders.getBuilder(id) self.assertEqual(builderdict, dict(id=id, name='some:builder', tags=[], masterids=[], description=None)) @defer.inlineCallbacks def test_findBuilderId_new_no_autoCreate(self): id = yield self.db.builders.findBuilderId('some:builder', autoCreate=False) self.assertIsNone(id) @defer.inlineCallbacks def test_findBuilderId_exists(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), ]) id = yield self.db.builders.findBuilderId('some:builder') self.assertEqual(id, 7) @defer.inlineCallbacks def test_addBuilderMaster(self): yield self.insertTestData([ fakedb.Builder(id=7), fakedb.Master(id=9, name='abc'), fakedb.Master(id=10, name='def'), fakedb.BuilderMaster(builderid=7, masterid=10), ]) yield self.db.builders.addBuilderMaster(builderid=7, masterid=9) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[9, 10], description=None)) @defer.inlineCallbacks def test_addBuilderMaster_already_present(self): yield self.insertTestData([ fakedb.Builder(id=7), fakedb.Master(id=9, name='abc'), fakedb.Master(id=10, name='def'), fakedb.BuilderMaster(builderid=7, masterid=9), ]) yield self.db.builders.addBuilderMaster(builderid=7, masterid=9) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[9], description=None)) @defer.inlineCallbacks def test_removeBuilderMaster(self): yield self.insertTestData([ fakedb.Builder(id=7), fakedb.Master(id=9, name='some:master'), fakedb.Master(id=10, name='other:master'), fakedb.BuilderMaster(builderid=7, masterid=9), fakedb.BuilderMaster(builderid=7, masterid=10), ]) yield self.db.builders.removeBuilderMaster(builderid=7, masterid=9) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[10], description=None)) @defer.inlineCallbacks def test_getBuilder_no_masters(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), ]) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[], description=None)) @defer.inlineCallbacks def test_getBuilder_with_masters(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), fakedb.Master(id=3, name='m1'), fakedb.Master(id=4, name='m2'), fakedb.BuilderMaster(builderid=7, masterid=3), fakedb.BuilderMaster(builderid=7, masterid=4), ]) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[3, 4], description=None)) @defer.inlineCallbacks def test_getBuilder_missing(self): builderdict = yield self.db.builders.getBuilder(7) self.assertEqual(builderdict, None) @defer.inlineCallbacks def test_getBuilders(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), fakedb.Builder(id=8, name='other:builder'), fakedb.Builder(id=9, name='third:builder'), fakedb.Master(id=3, name='m1'), fakedb.Master(id=4, name='m2'), fakedb.BuilderMaster(builderid=7, masterid=3), fakedb.BuilderMaster(builderid=8, masterid=3), fakedb.BuilderMaster(builderid=8, masterid=4), ]) builderlist = yield self.db.builders.getBuilders() for builderdict in builderlist: validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(sorted(builderlist, key=builderKey), sorted([ dict(id=7, name='some:builder', masterids=[ 3], tags=[], description=None), dict(id=8, name='other:builder', masterids=[ 3, 4], tags=[], description=None), dict(id=9, name='third:builder', masterids=[], tags=[], description=None), ], key=builderKey)) @defer.inlineCallbacks def test_getBuilders_masterid(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), fakedb.Builder(id=8, name='other:builder'), fakedb.Builder(id=9, name='third:builder'), fakedb.Master(id=3, name='m1'), fakedb.Master(id=4, name='m2'), fakedb.BuilderMaster(builderid=7, masterid=3), fakedb.BuilderMaster(builderid=8, masterid=3), fakedb.BuilderMaster(builderid=8, masterid=4), ]) builderlist = yield self.db.builders.getBuilders(masterid=3) for builderdict in builderlist: validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(sorted(builderlist, key=builderKey), sorted([ dict(id=7, name='some:builder', masterids=[ 3], tags=[], description=None), dict(id=8, name='other:builder', masterids=[ 3, 4], tags=[], description=None), ], key=builderKey)) @defer.inlineCallbacks def test_getBuilders_empty(self): builderlist = yield self.db.builders.getBuilders() self.assertEqual(sorted(builderlist), []) class RealTests(Tests): # tests that only "real" implementations will pass pass class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['builders', 'masters', 'builder_masters', 'builders_tags', 'tags']) self.db.builders = builders.BuildersConnectorComponent(self.db) self.db.tags = tags.TagsConnectorComponent(self.db) self.master = self.db.master self.master.db = self.db def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_buildrequests.py000066400000000000000000000707311413250514000252530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from twisted.internet import defer from twisted.trial import unittest from buildbot.db import buildrequests from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import db from buildbot.test.util import interfaces from buildbot.util import UTC from buildbot.util import epoch2datetime class Tests(interfaces.InterfaceTests): # test that the datetime translations are done correctly by specifying # the epoch timestamp and datetime objects explicitly. These should # pass regardless of the local timezone used while running tests! CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC) CLAIMED_AT_EPOCH = 266761875 SUBMITTED_AT = datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC) SUBMITTED_AT_EPOCH = 298297875 COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC) COMPLETE_AT_EPOCH = 329920275 BSID = 567 BLDRID1 = 890 BLDRID2 = 891 BLDRID3 = 893 MASTER_ID = "set in setUp" OTHER_MASTER_ID = "set in setUp" def setUpTests(self): # set up a sourcestamp and buildset for use below self.MASTER_ID = fakedb.FakeBuildRequestsComponent.MASTER_ID self.OTHER_MASTER_ID = self.MASTER_ID + 1111 self.db.master.masterid = self.MASTER_ID return self.insertTestData([ fakedb.SourceStamp(id=234), fakedb.Master(id=self.MASTER_ID, name="fake master"), fakedb.Master(id=self.OTHER_MASTER_ID, name="other"), fakedb.Buildset(id=self.BSID), fakedb.Builder(id=self.BLDRID1, name="builder1"), fakedb.Builder(id=self.BLDRID2, name="builder2"), fakedb.Builder(id=self.BLDRID3, name="builder3"), fakedb.BuildsetSourceStamp(buildsetid=self.BSID, sourcestampid=234), ]) # tests @defer.inlineCallbacks def test_getBuildRequest(self): yield self.insertTestData([ fakedb.BuildRequest(id=44, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=75, priority=7, submitted_at=self.SUBMITTED_AT_EPOCH, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim( brid=44, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), ]) brdict = yield self.db.buildrequests.getBuildRequest(44) yield self.assertEqual(brdict, dict(buildrequestid=44, buildsetid=self.BSID, builderid=self.BLDRID1, buildername="builder1", priority=7, claimed=True, claimed_by_masterid=self.MASTER_ID, complete=True, results=75, claimed_at=self.CLAIMED_AT, submitted_at=self.SUBMITTED_AT, complete_at=self.COMPLETE_AT, waited_for=False)) @defer.inlineCallbacks def test_getBuildRequest_missing(self): brdict = yield self.db.buildrequests.getBuildRequest(44) self.assertEqual(brdict, None) @defer.inlineCallbacks def do_test_getBuildRequests_claim_args(self, **kwargs): expected = kwargs.pop('expected') yield self.insertTestData([ # 50: claimed by this master fakedb.BuildRequest( id=50, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=50, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 51: claimed by another master fakedb.BuildRequest( id=51, buildsetid=self.BSID, builderid=self.BLDRID2), fakedb.BuildRequestClaim(brid=51, masterid=self.OTHER_MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 52: unclaimed fakedb.BuildRequest( id=52, buildsetid=self.BSID, builderid=self.BLDRID1), # 53: unclaimed but complete (should not appear for claimed=False) fakedb.BuildRequest( id=53, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1), ]) brlist = yield self.db.buildrequests.getBuildRequests(**kwargs) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted(expected)) def test_getBuildRequests_no_claimed_arg(self): return self.do_test_getBuildRequests_claim_args( expected=[50, 51, 52, 53]) def test_getBuildRequests_claimed_mine(self): return self.do_test_getBuildRequests_claim_args( claimed=self.MASTER_ID, expected=[50]) def test_getBuildRequests_claimed_true(self): return self.do_test_getBuildRequests_claim_args( claimed=True, expected=[50, 51]) def test_getBuildRequests_unclaimed(self): return self.do_test_getBuildRequests_claim_args( claimed=False, expected=[52]) @defer.inlineCallbacks def do_test_getBuildRequests_buildername_arg(self, **kwargs): expected = kwargs.pop('expected') yield self.insertTestData([ # 8: 'bb' fakedb.BuildRequest( id=8, buildsetid=self.BSID, builderid=self.BLDRID1), # 9: 'cc' fakedb.BuildRequest( id=9, buildsetid=self.BSID, builderid=self.BLDRID2), # 10: 'cc' fakedb.BuildRequest( id=10, buildsetid=self.BSID, builderid=self.BLDRID2), ]) brlist = yield self.db.buildrequests.getBuildRequests(**kwargs) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted(expected)) @defer.inlineCallbacks def do_test_getBuildRequests_complete_arg(self, **kwargs): expected = kwargs.pop('expected') yield self.insertTestData([ # 70: incomplete fakedb.BuildRequest(id=70, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=None), # 80: complete fakedb.BuildRequest(id=80, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, complete_at=self.COMPLETE_AT_EPOCH), # 81: complete but no complete_at fakedb.BuildRequest(id=81, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, complete_at=0), # 82: complete_at set but complete is false, so not complete fakedb.BuildRequest(id=82, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=self.COMPLETE_AT_EPOCH), ]) brlist = yield self.db.buildrequests.getBuildRequests(**kwargs) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted(expected)) def test_getBuildRequests_complete_none(self): return self.do_test_getBuildRequests_complete_arg( expected=[70, 80, 81, 82]) def test_getBuildRequests_complete_true(self): return self.do_test_getBuildRequests_complete_arg( complete=True, expected=[80, 81]) def test_getBuildRequests_complete_false(self): return self.do_test_getBuildRequests_complete_arg( complete=False, expected=[70, 82]) @defer.inlineCallbacks def test_getBuildRequests_bsid_arg(self): yield self.insertTestData([ # the buildset that we are *not* looking for fakedb.Buildset(id=self.BSID + 1), fakedb.BuildRequest(id=70, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=None), fakedb.BuildRequest(id=71, buildsetid=self.BSID + 1, builderid=self.BLDRID1, complete=0, complete_at=None), fakedb.BuildRequest(id=72, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=None), ]) brlist = yield self.db.buildrequests.getBuildRequests(bsid=self.BSID) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted([70, 72])) @defer.inlineCallbacks def test_getBuildRequests_combo(self): yield self.insertTestData([ # 44: everything we want fakedb.BuildRequest(id=44, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 45: different builderid fakedb.BuildRequest(id=45, buildsetid=self.BSID, builderid=self.BLDRID2, complete=1, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=45, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 46: incomplete fakedb.BuildRequest(id=46, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, results=92, complete_at=0), fakedb.BuildRequestClaim(brid=46, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 47: unclaimed fakedb.BuildRequest(id=47, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), # 48: claimed by other fakedb.BuildRequest(id=48, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=48, masterid=self.OTHER_MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 49: different bsid fakedb.Buildset(id=self.BSID + 1), fakedb.BuildRequest(id=49, buildsetid=self.BSID + 1, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=49, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), ]) brlist = yield self.db.buildrequests.getBuildRequests( builderid=self.BLDRID1, claimed=self.MASTER_ID, complete=True, bsid=self.BSID) self.assertEqual([br['buildrequestid'] for br in brlist], [44]) @defer.inlineCallbacks def do_test_getBuildRequests_branch_arg(self, **kwargs): expected = kwargs.pop('expected') yield self.insertTestData([ fakedb.Buildset(id=self.BSID + 1), fakedb.BuildRequest( id=70, buildsetid=self.BSID + 1, builderid=self.BLDRID1), fakedb.SourceStamp(id=self.BSID + 1, branch='branch_A'), fakedb.BuildsetSourceStamp(buildsetid=self.BSID + 1, sourcestampid=self.BSID + 1), fakedb.Buildset(id=self.BSID + 2), fakedb.BuildRequest( id=80, buildsetid=self.BSID + 2, builderid=self.BLDRID1), fakedb.SourceStamp(id=self.BSID + 2, repository='repository_A'), fakedb.BuildsetSourceStamp(buildsetid=self.BSID + 2, sourcestampid=self.BSID + 2), fakedb.Buildset(id=self.BSID + 3), fakedb.BuildRequest( id=90, buildsetid=self.BSID + 3, builderid=self.BLDRID1), fakedb.SourceStamp(id=self.BSID + 3, branch='branch_A', repository='repository_A'), fakedb.BuildsetSourceStamp(buildsetid=self.BSID + 3, sourcestampid=self.BSID + 3), # multiple sourcestamps on the same buildset are possible fakedb.SourceStamp(id=self.BSID + 4, branch='branch_B', repository='repository_B'), fakedb.BuildsetSourceStamp(buildsetid=self.BSID + 3, sourcestampid=self.BSID + 4), ]) brlist = yield self.db.buildrequests.getBuildRequests(**kwargs) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted(expected)) def test_getBuildRequests_branch(self): return self.do_test_getBuildRequests_branch_arg(branch='branch_A', expected=[70, 90]) def test_getBuildRequests_branch_empty(self): return self.do_test_getBuildRequests_branch_arg(branch='absent_branch', expected=[]) def test_getBuildRequests_repository(self): return self.do_test_getBuildRequests_branch_arg( repository='repository_A', expected=[80, 90]) def test_getBuildRequests_repository_empty(self): return self.do_test_getBuildRequests_branch_arg( repository='absent_repository', expected=[]) def test_getBuildRequests_repository_and_branch(self): return self.do_test_getBuildRequests_branch_arg( repository='repository_A', branch='branch_A', expected=[90]) def test_getBuildRequests_no_repository_nor_branch(self): return self.do_test_getBuildRequests_branch_arg(expected=[70, 80, 90]) def failWithExpFailure(self, exc, expfailure=None): if not expfailure: raise exc self.flushLoggedErrors(expfailure) if isinstance(exc, expfailure): return raise exc @defer.inlineCallbacks def do_test_claimBuildRequests(self, rows, now, brids, expected=None, expfailure=None, claimed_at=None): self.reactor.advance(now) try: yield self.insertTestData(rows) yield self.db.buildrequests.claimBuildRequests(brids=brids, claimed_at=claimed_at) results = yield self.db.buildrequests.getBuildRequests() self.assertNotEqual(expected, None, "unexpected success from claimBuildRequests") self.assertEqual( sorted([(r['buildrequestid'], r['claimed_at'], r['claimed_by_masterid']) for r in results]), sorted(expected)) except Exception as e: self.failWithExpFailure(e, expfailure) def test_claimBuildRequests_single(self): return self.do_test_claimBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), ], 1300305712, [44], [(44, epoch2datetime(1300305712), self.MASTER_ID)]) def test_claimBuildRequests_single_explicit_claimed_at(self): return self.do_test_claimBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), ], 1300305712, [44], [(44, epoch2datetime(14000000), self.MASTER_ID)], claimed_at=epoch2datetime(14000000)) def test_claimBuildRequests_multiple(self): return self.do_test_claimBuildRequests( [ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequest( id=46, buildsetid=self.BSID, builderid=self.BLDRID1), ], 1300305712, [44, 46], [ (44, epoch2datetime(1300305712), self.MASTER_ID), (45, None, None), (46, epoch2datetime(1300305712), self.MASTER_ID), ]) def test_claimBuildRequests_stress(self): return self.do_test_claimBuildRequests( [ fakedb.BuildRequest( id=id, buildsetid=self.BSID, builderid=self.BLDRID1) for id in range(1, 1000) ], 1300305713, list(range(1, 1000)), [ (id, epoch2datetime(1300305713), self.MASTER_ID) for id in range(1, 1000) ] ) def test_claimBuildRequests_other_master_claim(self): return self.do_test_claimBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=44, masterid=self.OTHER_MASTER_ID, claimed_at=1300103810), ], 1300305712, [44], expfailure=buildrequests.AlreadyClaimedError) @db.skip_for_dialect('mysql') @defer.inlineCallbacks def test_claimBuildRequests_other_master_claim_stress(self): yield self.do_test_claimBuildRequests( [fakedb.BuildRequest(id=id, buildsetid=self.BSID, builderid=self.BLDRID1) for id in range(1, 1000)] + [ fakedb.BuildRequest( id=1000, buildsetid=self.BSID, builderid=self.BLDRID1), # the fly in the ointment.. fakedb.BuildRequestClaim(brid=1000, masterid=self.OTHER_MASTER_ID, claimed_at=1300103810), ], 1300305712, list(range(1, 1001)), expfailure=buildrequests.AlreadyClaimedError) results = yield self.db.buildrequests.getBuildRequests(claimed=True) # check that [1,1000) were not claimed, and 1000 is still claimed self.assertEqual([ (r['buildrequestid'], r[ 'claimed_by_masterid'], r['claimed_at']) for r in results ][:10], [ (1000, self.OTHER_MASTER_ID, epoch2datetime(1300103810)) ]) @defer.inlineCallbacks def test_claimBuildRequests_sequential(self): now = 120350934 self.reactor.advance(now) yield self.insertTestData([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), ]) yield self.db.buildrequests.claimBuildRequests(brids=[44]) yield self.db.buildrequests.claimBuildRequests(brids=[45]) results = yield self.db.buildrequests.getBuildRequests(claimed=False) self.assertEqual(results, []) @defer.inlineCallbacks def do_test_completeBuildRequests(self, rows, now, expected=None, expfailure=None, brids=None, complete_at=None): if brids is None: brids = [44] self.reactor.advance(now) try: yield self.insertTestData(rows) yield self.db.buildrequests.completeBuildRequests( brids=brids, results=7, complete_at=complete_at) results = yield self.db.buildrequests.getBuildRequests() self.assertNotEqual(expected, None, "unexpected success from completeBuildRequests") self.assertEqual(sorted( (r['buildrequestid'], r['complete'], r['results'], r['complete_at']) for r in results ), sorted(expected)) except Exception as e: self.failWithExpFailure(e, expfailure) def test_completeBuildRequests(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=1300103810), ], 1300305712, [(44, True, 7, epoch2datetime(1300305712))]) def test_completeBuildRequests_explicit_time(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=1300103810), ], 1300305712, [(44, True, 7, epoch2datetime(999999))], complete_at=epoch2datetime(999999)) def test_completeBuildRequests_multiple(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=1300103810), fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=45, masterid=self.OTHER_MASTER_ID, claimed_at=1300103811), fakedb.BuildRequest( id=46, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=46, masterid=self.MASTER_ID, claimed_at=1300103812), ], 1300305712, [(44, True, 7, epoch2datetime(1300305712)), (45, False, -1, None), (46, True, 7, epoch2datetime(1300305712)), ], brids=[44, 46]) def test_completeBuildRequests_stress(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=id, buildsetid=self.BSID, builderid=self.BLDRID1) for id in range(1, 280) ] + [ fakedb.BuildRequestClaim(brid=id, masterid=self.MASTER_ID, claimed_at=1300103810) for id in range(1, 280) ], 1300305712, [(id, True, 7, epoch2datetime(1300305712)) for id in range(1, 280) ], brids=list(range(1, 280))) def test_completeBuildRequests_multiple_notmine(self): # note that the requests are completed even though they are not mine! return self.do_test_completeBuildRequests([ # two unclaimed requests fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), # and one claimed by another master fakedb.BuildRequest( id=46, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=46, masterid=self.OTHER_MASTER_ID, claimed_at=1300103812), ], 1300305712, [(44, True, 7, epoch2datetime(1300305712)), (45, True, 7, epoch2datetime(1300305712)), (46, True, 7, epoch2datetime(1300305712)), ], brids=[44, 45, 46]) def test_completeBuildRequests_already_completed(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest(id=44, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, complete_at=1300104190), ], 1300305712, expfailure=buildrequests.NotClaimedError) def test_completeBuildRequests_no_such(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), ], 1300305712, expfailure=buildrequests.NotClaimedError) @defer.inlineCallbacks def do_test_unclaimMethod(self, method, expected): yield self.insertTestData([ # 44: a complete build (should not be unclaimed) fakedb.BuildRequest(id=44, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 45: incomplete build belonging to this incarnation fakedb.BuildRequest(id=45, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), fakedb.BuildRequestClaim(brid=45, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 46: incomplete build belonging to another master fakedb.BuildRequest(id=46, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), fakedb.BuildRequestClaim(brid=46, masterid=self.OTHER_MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 47: unclaimed fakedb.BuildRequest(id=47, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), # 48: claimed by this master, but recently fakedb.BuildRequest(id=48, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), fakedb.BuildRequestClaim(brid=48, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH - 50), # 49: incomplete old build belonging to another master fakedb.BuildRequest(id=49, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), fakedb.BuildRequestClaim(brid=49, masterid=self.OTHER_MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH - 1000), ]) yield method() # just select the unclaimed requests results = yield self.db.buildrequests.getBuildRequests(claimed=False) self.assertEqual(sorted([r['buildrequestid'] for r in results]), sorted(expected)) def test_unclaimBuildRequests(self): to_unclaim = [ 44, # completed -> should not be unclaimed 45, # incomplete -> unclaimed 46, # from another master -> not unclaimed 47, # unclaimed -> still unclaimed 48, # claimed -> unclaimed 49, # another master -> not unclaimed 50 # no such buildrequest -> no error ] return self.do_test_unclaimMethod( lambda: self.db.buildrequests.unclaimBuildRequests(to_unclaim), [45, 47, 48]) class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): # Compatibility with some checks in the "real" tests. class db_engine: class dialect: name = 'buildbot_fake' @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() yield self.setUpTests() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['patches', 'changes', 'builders', 'buildsets', 'buildset_properties', 'buildrequests', 'buildset_sourcestamps', 'masters', 'buildrequest_claims', 'sourcestamps', 'sourcestampsets', 'builds', 'workers', ]) self.db.buildrequests = \ buildrequests.BuildRequestsConnectorComponent(self.db) yield self.setUpTests() def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_builds.py000066400000000000000000000554021413250514000236400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import resultspec from buildbot.db import builds from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.util import epoch2datetime TIME1 = 1304262222 TIME2 = 1304262223 TIME3 = 1304262224 TIME4 = 1304262235 CREATED_AT = 927845299 class Tests(interfaces.InterfaceTests): # common sample data backgroundData = [ fakedb.Buildset(id=20), fakedb.Builder(id=77, name="b1"), fakedb.Builder(id=88, name="b2"), fakedb.BuildRequest(id=40, buildsetid=20, builderid=77), fakedb.BuildRequest(id=41, buildsetid=20, builderid=77), fakedb.BuildRequest(id=42, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Master(id=89, name="bar"), fakedb.Worker(id=13, name='wrk'), fakedb.Worker(id=12, name='sl2'), ] threeBuilds = [ fakedb.Build(id=50, buildrequestid=42, number=5, masterid=88, builderid=77, workerid=13, state_string="build 5", started_at=TIME1), fakedb.Build(id=51, buildrequestid=41, number=6, masterid=88, builderid=88, workerid=13, state_string="build 6", started_at=TIME2), fakedb.Build(id=52, buildrequestid=42, number=7, masterid=88, builderid=77, workerid=12, state_string="build 7", started_at=TIME3, complete_at=TIME4, results=5), ] threeBdicts = { 50: {'id': 50, 'buildrequestid': 42, 'builderid': 77, 'masterid': 88, 'number': 5, 'workerid': 13, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'state_string': 'build 5', 'results': None}, 51: {'id': 51, 'buildrequestid': 41, 'builderid': 88, 'masterid': 88, 'number': 6, 'workerid': 13, 'started_at': epoch2datetime(TIME2), 'complete_at': None, 'state_string': 'build 6', 'results': None}, 52: {'id': 52, 'buildrequestid': 42, 'builderid': 77, 'masterid': 88, 'number': 7, 'workerid': 12, 'started_at': epoch2datetime(TIME3), 'complete_at': epoch2datetime(TIME4), 'state_string': 'build 7', 'results': 5}, } # signature tests def test_signature_getBuild(self): @self.assertArgSpecMatches(self.db.builds.getBuild) def getBuild(self, buildid): pass def test_signature_getBuildByNumber(self): @self.assertArgSpecMatches(self.db.builds.getBuildByNumber) def getBuild(self, builderid, number): pass def test_signature_getBuilds(self): @self.assertArgSpecMatches(self.db.builds.getBuilds) def getBuilds(self, builderid=None, buildrequestid=None, workerid=None, complete=None, resultSpec=None): pass def test_signature_addBuild(self): @self.assertArgSpecMatches(self.db.builds.addBuild) def addBuild(self, builderid, buildrequestid, workerid, masterid, state_string): pass def test_signature_setBuildStateString(self): @self.assertArgSpecMatches(self.db.builds.setBuildStateString) def setBuildStateString(self, buildid, state_string): pass def test_signature_finishBuild(self): @self.assertArgSpecMatches(self.db.builds.finishBuild) def finishBuild(self, buildid, results): pass def test_signature_getBuildProperties(self): @self.assertArgSpecMatches(self.db.builds.getBuildProperties) def getBuildProperties(self, bid, resultSpec=None): pass def test_signature_setBuildProperty(self): @self.assertArgSpecMatches(self.db.builds.setBuildProperty) def setBuildProperty(self, bid, name, value, source): pass # method tests @defer.inlineCallbacks def test_getBuild(self): yield self.insertTestData(self.backgroundData + [self.threeBuilds[0]]) bdict = yield self.db.builds.getBuild(50) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict, dict(id=50, number=5, buildrequestid=42, masterid=88, builderid=77, workerid=13, started_at=epoch2datetime(TIME1), complete_at=None, state_string='build 5', results=None)) @defer.inlineCallbacks def test_getBuild_missing(self): bdict = yield self.db.builds.getBuild(50) self.assertEqual(bdict, None) @defer.inlineCallbacks def test_getBuildByNumber(self): yield self.insertTestData(self.backgroundData + [self.threeBuilds[0]]) bdict = yield self.db.builds.getBuildByNumber(builderid=77, number=5) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict['id'], 50) @defer.inlineCallbacks def test_getBuilds(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds() for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50], self.threeBdicts[51], self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_builderid(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(builderid=88) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[51]]) @defer.inlineCallbacks def test_getBuilds_buildrequestid(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(buildrequestid=42) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50], self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_workerid(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(workerid=13) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50], self.threeBdicts[51]]) def test_signature_getBuildsForChange(self): @self.assertArgSpecMatches(self.db.builds.getBuildsForChange) def getBuildsForChange(self, changeid): pass @defer.inlineCallbacks def do_test_getBuildsForChange(self, rows, changeid, expected): yield self.insertTestData(rows) builds = yield self.db.builds.getBuildsForChange(changeid) self.assertEqual(sorted(builds), sorted(expected)) def test_getBuildsForChange_OneCodebase(self): rows = [fakedb.Master(id=88, name="bar"), fakedb.Worker(id=13, name='one'), fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, created_at=CREATED_AT, revision="aaa"), fakedb.Change(changeid=14, codebase='A', sourcestampid=234), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=1, complete=0, complete_at=None), fakedb.Build(id=50, buildrequestid=19, number=5, masterid=88, builderid=77, state_string="test", workerid=13, started_at=1304262222, results=1), ] expected = [{ 'id': 50, 'number': 5, 'builderid': 77, 'buildrequestid': 19, 'workerid': 13, 'masterid': 88, 'started_at': epoch2datetime(1304262222), 'complete_at': None, 'state_string': 'test', 'results': 1}] return self.do_test_getBuildsForChange(rows, 14, expected) @defer.inlineCallbacks def test_getBuilds_complete(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(complete=True) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[52]]) @defer.inlineCallbacks def test_addBuild_first(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData) id, number = yield self.db.builds.addBuild(builderid=77, buildrequestid=41, workerid=13, masterid=88, state_string='test test2') bdict = yield self.db.builds.getBuild(id) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict, {'buildrequestid': 41, 'builderid': 77, 'id': id, 'masterid': 88, 'number': number, 'workerid': 13, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'state_string': 'test test2', 'results': None}) @defer.inlineCallbacks def test_addBuild_existing(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [ fakedb.Build(number=10, buildrequestid=41, builderid=77, masterid=88, workerid=13), ]) id, number = yield self.db.builds.addBuild(builderid=77, buildrequestid=41, workerid=13, masterid=88, state_string='test test2') bdict = yield self.db.builds.getBuild(id) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(number, 11) self.assertEqual(bdict, {'buildrequestid': 41, 'builderid': 77, 'id': id, 'masterid': 88, 'number': number, 'workerid': 13, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'state_string': 'test test2', 'results': None}) @defer.inlineCallbacks def test_setBuildStateString(self): yield self.insertTestData(self.backgroundData + [self.threeBuilds[0]]) yield self.db.builds.setBuildStateString(buildid=50, state_string='test test2') bdict = yield self.db.builds.getBuild(50) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict, dict(id=50, number=5, buildrequestid=42, masterid=88, builderid=77, workerid=13, started_at=epoch2datetime(TIME1), complete_at=None, state_string='test test2', results=None)) @defer.inlineCallbacks def test_finishBuild(self): self.reactor.advance(TIME4) yield self.insertTestData(self.backgroundData + [self.threeBuilds[0]]) yield self.db.builds.finishBuild(buildid=50, results=7) bdict = yield self.db.builds.getBuild(50) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict, dict(id=50, number=5, buildrequestid=42, masterid=88, builderid=77, workerid=13, started_at=epoch2datetime(TIME1), complete_at=epoch2datetime(TIME4), state_string='build 5', results=7)) @defer.inlineCallbacks def testgetBuildPropertiesEmpty(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) for buildid in (50, 51, 52): props = yield self.db.builds.getBuildProperties(buildid) self.assertEqual(0, len(props)) @defer.inlineCallbacks def test_testgetBuildProperties_resultSpecFilter(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('name', 'eq', ["prop", "prop2"])]) rs.fieldMapping = {'name': 'build_properties.name'} yield self.insertTestData(self.backgroundData + self.threeBuilds) yield self.db.builds.setBuildProperty(50, 'prop', 42, 'test') yield self.db.builds.setBuildProperty(50, 'prop2', 43, 'test') yield self.db.builds.setBuildProperty(50, 'prop3', 44, 'test') props = yield self.db.builds.getBuildProperties(50, resultSpec=rs) self.assertEqual(props, { 'prop': (42, 'test'), 'prop2': (43, 'test') }) rs = resultspec.ResultSpec( filters=[resultspec.Filter('name', 'eq', ["prop"])]) rs.fieldMapping = {'name': 'build_properties.name'} props = yield self.db.builds.getBuildProperties(50, resultSpec=rs) self.assertEqual(props, { 'prop': (42, 'test'), }) @defer.inlineCallbacks def testsetandgetProperties(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) yield self.db.builds.setBuildProperty(50, 'prop', 42, 'test') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (42, 'test')}) @defer.inlineCallbacks def testsetgetsetProperties(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {}) yield self.db.builds.setBuildProperty(50, 'prop', 42, 'test') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (42, 'test')}) # set a new value yield self.db.builds.setBuildProperty(50, 'prop', 45, 'test') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (45, 'test')}) # set a new source yield self.db.builds.setBuildProperty(50, 'prop', 45, 'test_source') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (45, 'test_source')}) # set the same yield self.db.builds.setBuildProperty(50, 'prop', 45, 'test_source') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (45, 'test_source')}) class RealTests(Tests): @defer.inlineCallbacks def test_addBuild_existing_race(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData) # add new builds at *just* the wrong time, repeatedly numbers = list(range(1, 8)) def raceHook(conn): if not numbers: return conn.execute(self.db.model.builds.insert(), {'number': numbers.pop(0), 'buildrequestid': 41, 'masterid': 88, 'workerid': 13, 'builderid': 77, 'started_at': TIME1, 'state_string': "hi"}) id, number = yield self.db.builds.addBuild(builderid=77, buildrequestid=41, workerid=13, masterid=88, state_string='test test2', _race_hook=raceHook) bdict = yield self.db.builds.getBuild(id) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(number, 8) self.assertEqual(bdict, {'buildrequestid': 41, 'builderid': 77, 'id': id, 'masterid': 88, 'number': number, 'workerid': 13, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'state_string': 'test test2', 'results': None}) @defer.inlineCallbacks def test_getBuilds_resultSpecFilter(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('complete_at', 'ne', [None])]) rs.fieldMapping = {'complete_at': 'builds.complete_at'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_resultSpecOrder(self): rs = resultspec.ResultSpec(order=['-started_at']) rs.fieldMapping = {'started_at': 'builds.started_at'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) # applying the spec in the db layer should have emptied the order in # resultSpec self.assertEqual(rs.order, None) # assert applying the same order at the data layer will give the same # results rs = resultspec.ResultSpec(order=['-started_at']) ordered_bdicts = rs.apply(bdicts) self.assertEqual(ordered_bdicts, bdicts) # assert applying an opposite order at the data layer will give different # results rs = resultspec.ResultSpec(order=['started_at']) ordered_bdicts = rs.apply(bdicts) self.assertNotEqual(ordered_bdicts, bdicts) @defer.inlineCallbacks def test_getBuilds_limit(self): rs = resultspec.ResultSpec(order=['-started_at'], limit=1, offset=2) rs.fieldMapping = {'started_at': 'builds.started_at'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) # applying the spec in the db layer should have emptied the limit and # offset in resultSpec self.assertEqual(rs.limit, None) self.assertEqual(rs.offset, None) # assert applying the same filter at the data layer will give the same # results rs = resultspec.ResultSpec(order=['-started_at'], limit=1, offset=2) bdicts2 = yield self.db.builds.getBuilds() ordered_bdicts = rs.apply(bdicts2) self.assertEqual(ordered_bdicts, bdicts) @defer.inlineCallbacks def test_getBuilds_resultSpecFilterEqTwoValues(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('number', 'eq', [6, 7])]) rs.fieldMapping = {'number': 'builds.number'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[51], self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_resultSpecFilterNeTwoValues(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('number', 'ne', [6, 7])]) rs.fieldMapping = {'number': 'builds.number'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50]]) @defer.inlineCallbacks def test_getBuilds_resultSpecFilterContainsOneValue(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('state_string', 'contains', ['7'])]) rs.fieldMapping = {'state_string': 'builds.state_string'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_resultSpecFilterContainsTwoValues(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('state_string', 'contains', ['build 5', 'build 6'])]) rs.fieldMapping = {'state_string': 'builds.state_string'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50], self.threeBdicts[51]]) class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers', 'build_properties', 'changes', 'sourcestamps', 'buildset_sourcestamps', 'patches']) self.db.builds = builds.BuildsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_buildsets.py000066400000000000000000000577361413250514000243700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import json import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.db import buildsets from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import db from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.util import UTC from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class Tests(interfaces.InterfaceTests): def setUpTests(self): self.now = 9272359 self.reactor.advance(self.now) # set up a sourcestamp with id 234 for use below return self.insertTestData([ fakedb.SourceStamp(id=234), fakedb.Builder(id=1, name='bldr1'), fakedb.Builder(id=2, name='bldr2'), ]) def test_signature_addBuildset(self): @self.assertArgSpecMatches(self.db.buildsets.addBuildset) def addBuildset(self, sourcestamps, reason, properties, builderids, waited_for, external_idstring=None, submitted_at=None, parent_buildid=None, parent_relationship=None): pass def test_signature_completeBuildset(self): @self.assertArgSpecMatches(self.db.buildsets.completeBuildset) def completeBuildset(self, bsid, results, complete_at=None): pass def test_signature_getBuildset(self): @self.assertArgSpecMatches(self.db.buildsets.getBuildset) def getBuildset(self, bsid): pass def test_signature_getBuildsets(self): @self.assertArgSpecMatches(self.db.buildsets.getBuildsets) def getBuildsets(self, complete=None, resultSpec=None): pass def test_signature_getRecentBuildsets(self): @self.assertArgSpecMatches(self.db.buildsets.getRecentBuildsets) def getBuildsets(self, count=None, branch=None, repository=None, complete=None): pass def test_signature_getBuildsetProperties(self): @self.assertArgSpecMatches(self.db.buildsets.getBuildsetProperties) def getBuildsetProperties(self, key, no_cache=False): pass @defer.inlineCallbacks def test_addBuildset_getBuildset(self): bsid, brids = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', properties={}, builderids=[1], external_idstring='extid', waited_for=False) # TODO: verify buildrequests too bsdict = yield self.db.buildsets.getBuildset(bsid) validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdict, dict(external_idstring='extid', reason='because', sourcestamps=[234], submitted_at=datetime.datetime(1970, 4, 18, 7, 39, 19, tzinfo=UTC), complete=False, complete_at=None, results=-1, parent_buildid=None, parent_relationship=None, bsid=bsid)) @defer.inlineCallbacks def test_addBuildset_getBuildset_explicit_submitted_at(self): bsid_brids = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', properties={}, builderids=[1], external_idstring='extid', submitted_at=epoch2datetime(8888888), waited_for=False) bsdict = yield self.db.buildsets.getBuildset(bsid_brids[0]) validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdict, dict(external_idstring='extid', reason='because', sourcestamps=[234], submitted_at=datetime.datetime(1970, 4, 13, 21, 8, 8, tzinfo=UTC), complete=False, complete_at=None, results=-1, parent_buildid=None, parent_relationship=None, bsid=bsdict['bsid'])) @defer.inlineCallbacks def do_test_getBuildsetProperties(self, buildsetid, rows, expected): yield self.insertTestData(rows) props = yield self.db.buildsets.getBuildsetProperties(buildsetid) self.assertEqual(props, expected) def test_getBuildsetProperties_multiple(self): return self.do_test_getBuildsetProperties(91, [ fakedb.Buildset(id=91, complete=0, results=-1, submitted_at=0), fakedb.BuildsetProperty(buildsetid=91, property_name='prop1', property_value='["one", "fake1"]'), fakedb.BuildsetProperty(buildsetid=91, property_name='prop2', property_value='["two", "fake2"]'), ], dict(prop1=("one", "fake1"), prop2=("two", "fake2"))) def test_getBuildsetProperties_empty(self): return self.do_test_getBuildsetProperties(91, [ fakedb.Buildset(id=91, complete=0, results=-1, submitted_at=0), ], dict()) def test_getBuildsetProperties_nosuch(self): "returns an empty dict even if no such buildset exists" return self.do_test_getBuildsetProperties(91, [], dict()) @defer.inlineCallbacks def test_getBuildset_incomplete_zero(self): yield self.insertTestData([ fakedb.Buildset(id=91, complete=0, complete_at=0, results=-1, submitted_at=266761875, external_idstring='extid', reason='rsn'), fakedb.BuildsetSourceStamp(buildsetid=91, sourcestampid=234), ]) bsdict = yield self.db.buildsets.getBuildset(91) validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdict, dict(external_idstring='extid', reason='rsn', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete=False, complete_at=epoch2datetime(0), results=-1, bsid=91, parent_buildid=None, parent_relationship=None)) @defer.inlineCallbacks def test_getBuildset_complete(self): yield self.insertTestData([ fakedb.Buildset(id=91, complete=1, complete_at=298297875, results=-1, submitted_at=266761875, external_idstring='extid', reason='rsn'), fakedb.BuildsetSourceStamp(buildsetid=91, sourcestampid=234), ]) bsdict = yield self.db.buildsets.getBuildset(91) validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdict, dict(external_idstring='extid', reason='rsn', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete=True, complete_at=datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC), results=-1, bsid=91, parent_buildid=None, parent_relationship=None)) @defer.inlineCallbacks def test_getBuildset_nosuch(self): bsdict = yield self.db.buildsets.getBuildset(91) self.assertEqual(bsdict, None) def insert_test_getBuildsets_data(self): return self.insertTestData([ fakedb.Buildset(id=91, complete=0, complete_at=298297875, results=-1, submitted_at=266761875, external_idstring='extid', reason='rsn1'), fakedb.BuildsetSourceStamp(buildsetid=91, sourcestampid=234), fakedb.Buildset(id=92, complete=1, complete_at=298297876, results=7, submitted_at=266761876, external_idstring='extid', reason='rsn2'), fakedb.BuildsetSourceStamp(buildsetid=92, sourcestampid=234), ]) @defer.inlineCallbacks def test_getBuildsets_empty(self): bsdictlist = yield self.db.buildsets.getBuildsets() self.assertEqual(bsdictlist, []) @defer.inlineCallbacks def test_getBuildsets_all(self): yield self.insert_test_getBuildsets_data() bsdictlist = yield self.db.buildsets.getBuildsets() def bsdictKey(bsdict): return bsdict['reason'] for bsdict in bsdictlist: validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(sorted(bsdictlist, key=bsdictKey), sorted([ dict(external_idstring='extid', reason='rsn1', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC), complete=False, results=-1, bsid=91, parent_buildid=None, parent_relationship=None), dict(external_idstring='extid', reason='rsn2', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 16, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 16, tzinfo=UTC), complete=True, results=7, bsid=92, parent_buildid=None, parent_relationship=None), ], key=bsdictKey)) @defer.inlineCallbacks def test_getBuildsets_complete(self): yield self.insert_test_getBuildsets_data() bsdictlist = yield self.db.buildsets.getBuildsets(complete=True) for bsdict in bsdictlist: validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdictlist, [ dict(external_idstring='extid', reason='rsn2', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 16, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 16, tzinfo=UTC), complete=True, results=7, bsid=92, parent_buildid=None, parent_relationship=None), ]) @defer.inlineCallbacks def test_getBuildsets_incomplete(self): yield self.insert_test_getBuildsets_data() bsdictlist = yield self.db.buildsets.getBuildsets(complete=False) for bsdict in bsdictlist: validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdictlist, [ dict(external_idstring='extid', reason='rsn1', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC), complete=False, results=-1, bsid=91, parent_buildid=None, parent_relationship=None), ]) def test_completeBuildset_already_completed(self): d = self.insert_test_getBuildsets_data() d.addCallback(lambda _: self.db.buildsets.completeBuildset(bsid=92, results=6)) return self.assertFailure(d, buildsets.AlreadyCompleteError) def test_completeBuildset_missing(self): d = self.insert_test_getBuildsets_data() d.addCallback(lambda _: self.db.buildsets.completeBuildset(bsid=93, results=6)) return self.assertFailure(d, buildsets.AlreadyCompleteError) @defer.inlineCallbacks def test_completeBuildset(self): yield self.insert_test_getBuildsets_data() yield self.db.buildsets.completeBuildset(bsid=91, results=6) bsdicts = yield self.db.buildsets.getBuildsets() bsdicts = [(bsdict['bsid'], bsdict['complete'], datetime2epoch(bsdict['complete_at']), bsdict['results']) for bsdict in bsdicts] self.assertEqual(sorted(bsdicts), sorted([ (91, 1, self.now, 6), (92, 1, 298297876, 7)])) @defer.inlineCallbacks def test_completeBuildset_explicit_complete_at(self): yield self.insert_test_getBuildsets_data() yield self.db.buildsets.completeBuildset(bsid=91, results=6, complete_at=epoch2datetime(72759)) bsdicts = yield self.db.buildsets.getBuildsets() bsdicts = [(bsdict['bsid'], bsdict['complete'], datetime2epoch(bsdict['complete_at']), bsdict['results']) for bsdict in bsdicts] self.assertEqual(sorted(bsdicts), sorted([ (91, 1, 72759, 6), (92, 1, 298297876, 7)])) def insert_test_getRecentBuildsets_data(self): return self.insertTestData([ fakedb.SourceStamp(id=91, branch='branch_a', repository='repo_a'), fakedb.Buildset(id=91, complete=0, complete_at=298297875, results=-1, submitted_at=266761875, external_idstring='extid', reason='rsn1'), fakedb.BuildsetSourceStamp(buildsetid=91, sourcestampid=91), fakedb.Buildset(id=92, complete=1, complete_at=298297876, results=7, submitted_at=266761876, external_idstring='extid', reason='rsn2'), fakedb.BuildsetSourceStamp(buildsetid=92, sourcestampid=91), # buildset unrelated to the change fakedb.Buildset(id=93, complete=1, complete_at=298297877, results=7, submitted_at=266761877, external_idstring='extid', reason='rsn2'), ]) @defer.inlineCallbacks def test_getRecentBuildsets_all(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(2, branch='branch_a', repository='repo_a') self.assertEqual(bsdictlist, [ dict(external_idstring='extid', reason='rsn1', sourcestamps=[91], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC), complete=False, results=-1, bsid=91, parent_buildid=None, parent_relationship=None), dict(external_idstring='extid', reason='rsn2', sourcestamps=[91], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 16, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 16, tzinfo=UTC), complete=True, results=7, bsid=92, parent_buildid=None, parent_relationship=None) ]) @defer.inlineCallbacks def test_getRecentBuildsets_one(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(1, branch='branch_a', repository='repo_a') self.assertEqual(bsdictlist, [ dict(external_idstring='extid', reason='rsn2', sourcestamps=[91], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 16, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 16, tzinfo=UTC), complete=True, results=7, bsid=92, parent_buildid=None, parent_relationship=None), ]) @defer.inlineCallbacks def test_getRecentBuildsets_zero(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(0, branch='branch_a', repository='repo_a') self.assertEqual(bsdictlist, []) @defer.inlineCallbacks def test_getRecentBuildsets_noBranchMatch(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(2, branch='bad_branch', repository='repo_a') self.assertEqual(bsdictlist, []) @defer.inlineCallbacks def test_getRecentBuildsets_noRepoMatch(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(2, branch='branch_a', repository='bad_repo') self.assertEqual(bsdictlist, []) class RealTests(Tests): @defer.inlineCallbacks def test_addBuildset_simple(self): (bsid, brids) = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', properties={}, builderids=[2], external_idstring='extid', waited_for=True) def thd(conn): # we should only have one brid self.assertEqual(len(brids), 1) # should see one buildset row r = conn.execute(self.db.model.buildsets.select()) rows = [(row.id, row.external_idstring, row.reason, row.complete, row.complete_at, row.submitted_at, row.results) for row in r.fetchall()] self.assertEqual(rows, [(bsid, 'extid', 'because', 0, None, self.now, -1)]) # one buildrequests row r = conn.execute(self.db.model.buildrequests.select()) self.assertEqual(r.keys(), ['id', 'buildsetid', 'builderid', 'priority', 'complete', 'results', 'submitted_at', 'complete_at', 'waited_for']) self.assertEqual(r.fetchall(), [(bsid, brids[2], 2, 0, 0, -1, self.now, None, 1)]) # one buildset_sourcestamps row r = conn.execute(self.db.model.buildset_sourcestamps.select()) self.assertEqual( list(r.keys()), ['id', 'buildsetid', 'sourcestampid']) self.assertEqual(r.fetchall(), [(1, bsid, 234)]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_addBuildset_bigger(self): props = dict(prop=(['list'], 'test')) yield defer.succeed(None) xxx_todo_changeme1 = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', waited_for=False, properties=props, builderids=[1, 2]) (bsid, brids) = xxx_todo_changeme1 def thd(conn): self.assertEqual(len(brids), 2) # should see one buildset row r = conn.execute(self.db.model.buildsets.select()) rows = [(row.id, row.external_idstring, row.reason, row.complete, row.complete_at, row.results) for row in r.fetchall()] self.assertEqual(rows, [(bsid, None, 'because', 0, None, -1)]) # one property row r = conn.execute(self.db.model.buildset_properties.select()) rows = [(row.buildsetid, row.property_name, row.property_value) for row in r.fetchall()] self.assertEqual(rows, [(bsid, 'prop', json.dumps([['list'], 'test']))]) # one buildset_sourcestamps row r = conn.execute(self.db.model.buildset_sourcestamps.select()) rows = [(row.buildsetid, row.sourcestampid) for row in r.fetchall()] self.assertEqual(rows, [(bsid, 234)]) # and two buildrequests rows (and don't re-check the default # columns) r = conn.execute(self.db.model.buildrequests.select()) rows = [(row.buildsetid, row.id, row.builderid) for row in r.fetchall()] # we don't know which of the brids is assigned to which # buildername, but either one will do self.assertEqual(sorted(rows), [(bsid, brids[1], 1), (bsid, brids[2], 2)]) yield self.db.pool.do(thd) class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() yield self.setUpTests() @defer.inlineCallbacks def test_addBuildset_bad_waited_for(self): # only the fake db asserts on the type of waited_for d = self.db.buildsets.addBuildset(sourcestamps=[234], reason='because', properties={}, builderids=[1], external_idstring='extid', waited_for='wat') yield self.assertFailure(d, AssertionError) class TestRealDB(db.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['patches', 'buildsets', 'buildset_properties', 'objects', 'buildrequests', 'sourcestamps', 'buildset_sourcestamps', 'builders', 'builds', 'masters', 'workers']) self.db.buildsets = buildsets.BuildsetsConnectorComponent(self.db) yield self.setUpTests() def tearDown(self): return self.tearDownConnectorComponent() @defer.inlineCallbacks def test_addBuildset_properties_cache(self): """ Test that `addChange` properly seeds the `getChange` cache. """ # Patchup the buildset properties cache so we can verify that # it got called form `addBuildset`. mockedCachePut = mock.Mock() self.patch( self.db.buildsets.getBuildsetProperties.cache, "put", mockedCachePut) # Setup a dummy set of properties to insert with the buildset. props = dict(prop=(['list'], 'test')) # Now, call `addBuildset`, and verify that the above properties # were seed in the `getBuildsetProperties` cache. bsid, _ = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', properties=props, builderids=[1, 2], waited_for=False) mockedCachePut.assert_called_once_with(bsid, props) buildbot-3.4.0/master/buildbot/test/unit/db/test_changes.py000066400000000000000000000734011413250514000237650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.internet import defer from twisted.trial import unittest from buildbot.data import resultspec from buildbot.data.changes import FixerMixin from buildbot.db import builds from buildbot.db import changes from buildbot.db import sourcestamps from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.util import epoch2datetime SOMETIME = 20398573 OTHERTIME = 937239287 class Tests(interfaces.InterfaceTests): # common sample data change13_rows = [ fakedb.SourceStamp(id=92, branch="thirteen"), fakedb.Change(changeid=13, author="dustin", comments="fix spelling", branch="master", revision="deadbeef", committer="justin", when_timestamp=266738400, revlink=None, category=None, repository='', codebase='', project='', sourcestampid=92), fakedb.ChangeFile(changeid=13, filename='master/README.txt'), fakedb.ChangeFile(changeid=13, filename='worker/README.txt'), fakedb.ChangeProperty(changeid=13, property_name='notest', property_value='["no","Change"]'), ] change14_rows = [ fakedb.SourceStamp(id=233, branch="fourteen"), fakedb.Change(changeid=14, author="warner", comments="fix whitespace", branch="warnerdb", revision="0e92a098b", committer="david", when_timestamp=266738404, revlink='http://warner/0e92a098b', category='devel', repository='git://warner', codebase='mainapp', project='Buildbot', sourcestampid=233), fakedb.ChangeFile(changeid=14, filename='master/buildbot/__init__.py'), ] change14_dict = { 'changeid': 14, 'parent_changeids': [], 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'comments': 'fix whitespace', 'files': ['master/buildbot/__init__.py'], 'project': 'Buildbot', 'properties': {}, 'repository': 'git://warner', 'codebase': 'mainapp', 'revision': '0e92a098b', 'revlink': 'http://warner/0e92a098b', 'when_timestamp': epoch2datetime(266738404), 'sourcestampid': 233, } # tests def test_signature_addChange(self): @self.assertArgSpecMatches(self.db.changes.addChange) def addChange(self, author=None, committer=None, files=None, comments=None, is_dir=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase='', project='', uid=None): pass def test_signature_getChange(self): @self.assertArgSpecMatches(self.db.changes.getChange) def getChange(self, key, no_cache=False): pass @defer.inlineCallbacks def test_addChange_getChange(self): self.reactor.advance(SOMETIME) changeid = yield self.db.changes.addChange( author='dustin', committer='justin', files=[], comments='fix spelling', revision='2d6caa52', when_timestamp=epoch2datetime(OTHERTIME), branch='master', category=None, revlink=None, properties={}, repository='repo://', codebase='cb', project='proj') chdict = yield self.db.changes.getChange(changeid) validation.verifyDbDict(self, 'chdict', chdict) chdict = chdict.copy() ss = yield self.db.sourcestamps.getSourceStamp(chdict['sourcestampid']) chdict['sourcestampid'] = ss self.assertEqual(chdict, { 'author': 'dustin', 'committer': 'justin', 'branch': 'master', 'category': None, 'changeid': changeid, 'parent_changeids': [], 'codebase': 'cb', 'comments': 'fix spelling', 'files': [], 'project': 'proj', 'properties': {}, 'repository': 'repo://', 'revision': '2d6caa52', 'revlink': None, 'sourcestampid': { 'branch': 'master', 'codebase': 'cb', 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo://', 'revision': '2d6caa52', 'created_at': epoch2datetime(SOMETIME), 'ssid': ss['ssid'], }, 'when_timestamp': epoch2datetime(OTHERTIME), }) @defer.inlineCallbacks def test_addChange_withParent(self): yield self.insertTestData(self.change14_rows) self.reactor.advance(SOMETIME) changeid = yield self.db.changes.addChange( author='delanne', committer='melanne', files=[], comments='child of changeid14', revision='50adad56', when_timestamp=epoch2datetime(OTHERTIME), branch='warnerdb', category='devel', revlink=None, properties={}, repository='git://warner', codebase='mainapp', project='Buildbot') chdict = yield self.db.changes.getChange(changeid) validation.verifyDbDict(self, 'chdict', chdict) chdict = chdict.copy() ss = yield self.db.sourcestamps.getSourceStamp(chdict['sourcestampid']) chdict['sourcestampid'] = ss self.assertEqual(chdict, { 'author': 'delanne', 'committer': 'melanne', 'branch': 'warnerdb', 'category': 'devel', 'changeid': changeid, 'parent_changeids': [14], 'codebase': 'mainapp', 'comments': 'child of changeid14', 'files': [], 'project': 'Buildbot', 'properties': {}, 'repository': 'git://warner', 'revision': '50adad56', 'revlink': None, 'sourcestampid': { 'branch': 'warnerdb', 'codebase': 'mainapp', 'created_at': epoch2datetime(SOMETIME), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '50adad56', 'ssid': ss['ssid'] }, 'when_timestamp': epoch2datetime(OTHERTIME), }) @defer.inlineCallbacks def test_getChange_chdict(self): yield self.insertTestData(self.change14_rows) chdict = yield self.db.changes.getChange(14) validation.verifyDbDict(self, 'chdict', chdict) self.assertEqual(chdict, self.change14_dict) @defer.inlineCallbacks def test_getChange_missing(self): chdict = yield self.db.changes.getChange(14) self.assertTrue(chdict is None) def test_signature_getChangeUids(self): @self.assertArgSpecMatches(self.db.changes.getChangeUids) def getChangeUids(self, changeid): pass @defer.inlineCallbacks def test_getChangeUids_missing(self): res = yield self.db.changes.getChangeUids(1) self.assertEqual(res, []) @defer.inlineCallbacks def test_getChangeUids_found(self): yield self.insertTestData(self.change14_rows + [ fakedb.SourceStamp(id=92), fakedb.User(uid=1), fakedb.ChangeUser(changeid=14, uid=1), ]) res = yield self.db.changes.getChangeUids(14) self.assertEqual(res, [1]) @defer.inlineCallbacks def test_getChangeUids_multi(self): yield self.insertTestData(self.change14_rows + self.change13_rows + [ fakedb.User(uid=1, identifier="one"), fakedb.User(uid=2, identifier="two"), fakedb.User(uid=99, identifier="nooo"), fakedb.ChangeUser(changeid=14, uid=1), fakedb.ChangeUser(changeid=14, uid=2), fakedb.ChangeUser(changeid=13, uid=99), # not selected ]) res = yield self.db.changes.getChangeUids(14) self.assertEqual(sorted(res), [1, 2]) def test_signature_getChanges(self): @self.assertArgSpecMatches(self.db.changes.getChanges) def getChanges(self, resultSpec=None): pass def insert7Changes(self): return self.insertTestData([ fakedb.SourceStamp(id=922), fakedb.Change(changeid=8, sourcestampid=922), fakedb.Change(changeid=9, sourcestampid=922), fakedb.Change(changeid=10, sourcestampid=922), fakedb.Change(changeid=11, sourcestampid=922), fakedb.Change(changeid=12, sourcestampid=922), ] + self.change13_rows + self.change14_rows) @defer.inlineCallbacks def test_getChanges_subset(self): yield self.insert7Changes() rs = resultspec.ResultSpec(order=['-changeid'], limit=5) rs.fieldMapping = FixerMixin.fieldMapping changes = yield self.db.changes.getChanges(resultSpec=rs) changeids = [c['changeid'] for c in changes] self.assertEqual(changeids, [10, 11, 12, 13, 14]) @defer.inlineCallbacks def test_getChangesCount(self): yield self.insert7Changes() n = yield self.db.changes.getChangesCount() self.assertEqual(n, 7) @defer.inlineCallbacks def test_getChangesHugeCount(self): yield self.insertTestData([ fakedb.SourceStamp(id=92), ] + [ fakedb.Change(changeid=i) for i in range(2, 102)]) n = yield self.db.changes.getChangesCount() self.assertEqual(n, 100) @defer.inlineCallbacks def test_getChanges_empty(self): rs = resultspec.ResultSpec(order=['-changeid'], limit=5) changes = yield self.db.changes.getChanges(resultSpec=rs) changeids = [c['changeid'] for c in changes] self.assertEqual(changeids, []) yield self.db.changes.getChanges() changeids = [c['changeid'] for c in changes] self.assertEqual(changeids, []) @defer.inlineCallbacks def test_getChanges_missing(self): yield self.insertTestData(self.change13_rows + self.change14_rows) def check(changes): # requested all, but only got 2 # sort by changeid, since we assert on change 13 at index 0 changes.sort(key=lambda c: c['changeid']) changeids = [c['changeid'] for c in changes] self.assertEqual(changeids, [13, 14]) # double-check that they have .files, etc. self.assertEqual(sorted(changes[0]['files']), sorted(['master/README.txt', 'worker/README.txt'])) self.assertEqual(changes[0]['properties'], {'notest': ('no', 'Change')}) rs = resultspec.ResultSpec(order=['-changeid'], limit=5) changes = yield self.db.changes.getChanges(resultSpec=rs) check(changes) changes = yield self.db.changes.getChanges() check(changes) def test_signature_getLatestChangeid(self): @self.assertArgSpecMatches(self.db.changes.getLatestChangeid) def getLatestChangeid(self): pass @defer.inlineCallbacks def test_getLatestChangeid(self): yield self.insertTestData(self.change13_rows) changeid = yield self.db.changes.getLatestChangeid() self.assertEqual(changeid, 13) @defer.inlineCallbacks def test_getLatestChangeid_empty(self): changeid = yield self.db.changes.getLatestChangeid() self.assertEqual(changeid, None) def test_signature_getParentChangeIds(self): @self.assertArgSpecMatches(self.db.changes.getParentChangeIds) def getParentChangeIds(self, branch, repository, project, codebase): pass @defer.inlineCallbacks def test_getParentChangeIds(self): yield self.insertTestData(self.change14_rows + self.change13_rows) changeid = yield self.db.changes.getParentChangeIds(branch='warnerdb', repository='git://warner', project='Buildbot', codebase='mainapp') self.assertEqual(changeid, [14]) class RealTests(Tests): # tests that only "real" implementations will pass @defer.inlineCallbacks def test_addChange(self): self.reactor.advance(SOMETIME) changeid = yield self.db.changes.addChange( author='dustin', committer='justin', files=['master/LICENSING.txt', 'worker/LICENSING.txt'], comments='fix spelling', revision='2d6caa52', when_timestamp=epoch2datetime(266738400), branch='master', category=None, revlink=None, properties={'platform': ('linux', 'Change')}, repository='', codebase='cb', project='') # check all of the columns of the four relevant tables def thd_change(conn): self.assertEqual(changeid, 1) r = conn.execute(self.db.model.changes.select()) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].changeid, changeid) self.assertEqual(r[0].author, 'dustin') self.assertEqual(r[0].committer, 'justin') self.assertEqual(r[0].comments, 'fix spelling') self.assertEqual(r[0].branch, 'master') self.assertEqual(r[0].revision, '2d6caa52') self.assertEqual(r[0].when_timestamp, 266738400) self.assertEqual(r[0].category, None) self.assertEqual(r[0].repository, '') self.assertEqual(r[0].codebase, 'cb') self.assertEqual(r[0].project, '') self.assertEqual(r[0].sourcestampid, 1) yield self.db.pool.do(thd_change) def thd_change_files(conn): query = self.db.model.change_files.select() query.where(self.db.model.change_files.c.changeid == 1) query.order_by(self.db.model.change_files.c.filename) r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 2) self.assertEqual(r[0].filename, 'master/LICENSING.txt') self.assertEqual(r[1].filename, 'worker/LICENSING.txt') yield self.db.pool.do(thd_change_files) def thd_change_properties(conn): query = self.db.model.change_properties.select() query.where(self.db.model.change_properties.c.changeid == 1) query.order_by(self.db.model.change_properties.c.property_name) r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].property_name, 'platform') self.assertEqual(r[0].property_value, '["linux", "Change"]') yield self.db.pool.do(thd_change_properties) def thd_change_users(conn): query = self.db.model.change_users.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_users) def thd_change_sourcestamps(conn): query = self.db.model.sourcestamps.select() r = conn.execute(query) self.assertEqual([dict(row) for row in r.fetchall()], [{ 'branch': 'master', 'codebase': 'cb', 'id': 1, 'patchid': None, 'project': '', 'repository': '', 'revision': '2d6caa52', 'created_at': SOMETIME, 'ss_hash': 'b777dbd10d1d4c76651335f6a78e278e88b010d6', }]) yield self.db.pool.do(thd_change_sourcestamps) @defer.inlineCallbacks def test_addChange_when_timestamp_None(self): self.reactor.advance(OTHERTIME) changeid = yield self.db.changes.addChange( author='dustin', committer='justin', files=[], comments='fix spelling', revision='2d6caa52', when_timestamp=None, branch='master', category=None, revlink=None, properties={}, repository='', codebase='', project='') # check all of the columns of the four relevant tables def thd(conn): r = conn.execute(self.db.model.changes.select()) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].changeid, changeid) self.assertEqual(r[0].when_timestamp, OTHERTIME) yield self.db.pool.do(thd) def thd_change(conn): query = self.db.model.change_files.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change) def thd_change_file(conn): query = self.db.model.change_properties.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_file) def thd_change_properties(conn): query = self.db.model.change_users.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_properties) @defer.inlineCallbacks def test_addChange_with_uid(self): yield self.insertTestData([ fakedb.User(uid=1, identifier="one"), ]) changeid = yield self.db.changes.addChange( author='dustin', committer='justin', files=[], comments='fix spelling', revision='2d6caa52', when_timestamp=epoch2datetime(OTHERTIME), branch='master', category=None, revlink=None, properties={}, repository='', codebase='', project='', uid=1) # check all of the columns of the five relevant tables def thd_change(conn): r = conn.execute(self.db.model.changes.select()) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].changeid, changeid) self.assertEqual(r[0].when_timestamp, OTHERTIME) yield self.db.pool.do(thd_change) def thd_change_files(conn): query = self.db.model.change_files.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_files) def thd_change_properties(conn): query = self.db.model.change_properties.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_properties) def thd_change_users(conn): query = self.db.model.change_users.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].changeid, 1) self.assertEqual(r[0].uid, 1) yield self.db.pool.do(thd_change_users) @defer.inlineCallbacks def test_pruneChanges(self): yield self.insertTestData([ fakedb.Scheduler(id=29), fakedb.SourceStamp(id=234, branch='aa'), fakedb.SourceStamp(id=235, branch='bb'), fakedb.Change(changeid=11), fakedb.Change(changeid=12, sourcestampid=234), fakedb.SchedulerChange(schedulerid=29, changeid=12), ] + self.change13_rows + [ fakedb.SchedulerChange(schedulerid=29, changeid=13), ] + self.change14_rows + [ fakedb.SchedulerChange(schedulerid=29, changeid=14), fakedb.Change(changeid=15, sourcestampid=235), ] ) # pruning with a horizon of 2 should delete changes 11, 12 and 13 yield self.db.changes.pruneChanges(2) def thd(conn): results = {} for tbl_name in ('scheduler_changes', 'change_files', 'change_properties', 'changes'): tbl = self.db.model.metadata.tables[tbl_name] res = conn.execute(sa.select([tbl.c.changeid])) results[tbl_name] = sorted( [row[0] for row in res.fetchall()]) self.assertEqual(results, { 'scheduler_changes': [14], 'change_files': [14], 'change_properties': [], 'changes': [14, 15], }) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_pruneChanges_lots(self): yield self.insertTestData([ fakedb.SourceStamp(id=29), ] + [ fakedb.Change(changeid=n, sourcestampid=29) for n in range(1, 151) ]) yield self.db.changes.pruneChanges(1) def thd(conn): results = {} for tbl_name in ('scheduler_changes', 'change_files', 'change_properties', 'changes'): tbl = self.db.model.metadata.tables[tbl_name] res = conn.execute(sa.select([sa.func.count()]).select_from(tbl)) results[tbl_name] = res.fetchone()[0] res.close() self.assertEqual(results, { 'scheduler_changes': 0, 'change_files': 0, 'change_properties': 0, 'changes': 1, }) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_pruneChanges_None(self): yield self.insertTestData(self.change13_rows) yield self.db.changes.pruneChanges(None) def thd(conn): tbl = self.db.model.changes res = conn.execute(tbl.select()) self.assertEqual([row.changeid for row in res.fetchall()], [13]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_getChangesForBuild(self): rows = [fakedb.Master(id=88, name="bar"), fakedb.Worker(id=13, name='one'), fakedb.Builder(id=77, name='A')] lastID = {"changeid": 0, "sourcestampid": 0, "buildsetid": 0, "buildsetSourceStampid": 0, "buildrequestid": 0, "buildid": 0} codebase_ss = {} # shared state between addChange and addBuild def addChange(codebase, revision, author, committer, comments, branch='master', category='cat', project='proj', repository='repo'): lastID["sourcestampid"] += 1 lastID["changeid"] += 1 parent_changeids = codebase_ss.get(codebase, None) codebase_ss[codebase] = lastID["sourcestampid"] changeRows = [fakedb.SourceStamp(id=lastID["sourcestampid"], codebase=codebase, revision=revision), fakedb.Change(changeid=lastID["changeid"], author=author, committer=committer, comments=comments, revision=revision, sourcestampid=lastID["sourcestampid"], parent_changeids=parent_changeids, when_timestamp=SOMETIME + lastID["changeid"], branch=branch, category=category, project=project, repository=repository)] return changeRows def addBuild(codebase_ss, results=0): lastID["buildid"] += 1 lastID["buildsetid"] += 1 lastID["buildrequestid"] += 1 buildRows = [fakedb.Buildset(id=lastID["buildsetid"], reason='foo', submitted_at=1300305012, results=-1)] for cb, ss in codebase_ss.items(): lastID["buildsetSourceStampid"] += 1 buildRows.append( fakedb.BuildsetSourceStamp(id=lastID["buildsetSourceStampid"], sourcestampid=ss, buildsetid=lastID["buildsetid"])) codebase_ss.clear() buildRows.extend([ fakedb.BuildRequest(id=lastID["buildrequestid"], buildsetid=lastID["buildsetid"], builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.Build(id=lastID["buildid"], buildrequestid=lastID["buildrequestid"], number=lastID["buildid"], masterid=88, builderid=77, state_string="test", workerid=13, started_at=SOMETIME + lastID["buildid"], complete_at=SOMETIME + 2 * lastID["buildid"], results=results)]) return buildRows # Build1 has 1 change per code base rows.extend(addChange('A', 1, 'franck', 'franck', '1st commit')) rows.extend(addChange('B', 1, 'alice', 'alice', '2nd commit')) rows.extend(addChange('C', 1, 'bob', 'bob', '3rd commit')) rows.extend(addBuild(codebase_ss)) # Build 2 has only one change for codebase A rows.extend(addChange('A', 2, 'delanne', 'delanne', '4th commit')) rows.extend(addBuild(codebase_ss)) # Build 3 has only one change for codebase B rows.extend(addChange('B', 2, 'bob', 'bob', '6th commit')) rows.extend(addBuild(codebase_ss)) # Build 4 has no change rows.extend(addBuild(codebase_ss)) # Build 5 has 2 changes for codebase A and 1 change for codebase C rows.extend(addChange('A', 3, 'franck', 'franck', '7th commit')) rows.extend(addChange('A', 4, 'alice', 'alice', '8th commit')) rows.extend(addChange('B', 3, 'bob', 'bob', '9th commit')) rows.extend(addBuild(codebase_ss)) # Build 6 has only one change for codebase C rows.extend(addChange('C', 2, 'bob', 'bob', '10th commit')) rows.extend(addBuild(codebase_ss, 2)) # Build 7 has only one change for codebase C rows.extend(addChange('C', 3, 'bob', 'bob', '11th commit')) rows.extend(addBuild(codebase_ss, 2)) yield self.insertTestData(rows) @defer.inlineCallbacks def expect(buildid, commits): got = yield self.db.changes.getChangesForBuild(buildid) got_commits = [c['comments'] for c in got] self.assertEqual(sorted(got_commits), sorted(commits)) yield expect(1, ['2nd commit', '3rd commit', '1st commit']) yield expect(2, ['4th commit']) yield expect(3, ['6th commit']) yield expect(4, []) yield expect(5, ['8th commit', '9th commit', '7th commit']) yield expect(6, ['10th commit']) yield expect(7, ['11th commit']) class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['changes', 'change_files', 'change_properties', 'scheduler_changes', 'schedulers', 'sourcestampsets', 'sourcestamps', 'patches', 'change_users', 'users', 'buildsets', 'workers', 'builders', 'masters', 'buildrequests', 'builds', 'buildset_sourcestamps', 'workers']) self.db.changes = changes.ChangesConnectorComponent(self.db) self.db.builds = builds.BuildsConnectorComponent(self.db) self.db.sourcestamps = \ sourcestamps.SourceStampsConnectorComponent(self.db) self.master = self.db.master self.master.db = self.db def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_changesources.py000066400000000000000000000267731413250514000252200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import changesources from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import db from buildbot.test.util import interfaces from buildbot.test.util import validation def changeSourceKey(changeSource): return changeSource['id'] class Tests(interfaces.InterfaceTests): # test data cs42 = fakedb.ChangeSource(id=42, name='cool_source') cs87 = fakedb.ChangeSource(id=87, name='lame_source') master13 = fakedb.Master(id=13, name='m1', active=1) cs42master13 = fakedb.ChangeSourceMaster(changesourceid=42, masterid=13) master14 = fakedb.Master(id=14, name='m2', active=0) cs87master14 = fakedb.ChangeSourceMaster(changesourceid=87, masterid=14) # tests def test_signature_findChangeSourceId(self): """The signature of findChangeSourceId is correct""" @self.assertArgSpecMatches(self.db.changesources.findChangeSourceId) def findChangeSourceId(self, name): pass @defer.inlineCallbacks def test_findChangeSourceId_new(self): """findChangeSourceId for a new changesource creates it""" id = yield self.db.changesources.findChangeSourceId('csname') cs = yield self.db.changesources.getChangeSource(id) self.assertEqual(cs['name'], 'csname') @defer.inlineCallbacks def test_findChangeSourceId_existing(self): """findChangeSourceId gives the same answer for the same inputs""" id1 = yield self.db.changesources.findChangeSourceId('csname') id2 = yield self.db.changesources.findChangeSourceId('csname') self.assertEqual(id1, id2) def test_signature_setChangeSourceMaster(self): """setChangeSourceMaster has the right signature""" @self.assertArgSpecMatches(self.db.changesources.setChangeSourceMaster) def setChangeSourceMaster(self, changesourceid, masterid): pass @defer.inlineCallbacks def test_setChangeSourceMaster_fresh(self): """setChangeSourceMaster with a good pair""" yield self.insertTestData([self.cs42, self.master13]) yield self.db.changesources.setChangeSourceMaster(42, 13) cs = yield self.db.changesources.getChangeSource(42) self.assertEqual(cs['masterid'], 13) @defer.inlineCallbacks def test_setChangeSourceMaster_inactive_but_linked(self): """Inactive changesource but already claimed by an active master""" d = self.insertTestData([ self.cs87, self.master13, self.master14, self.cs87master14, ]) d.addCallback(lambda _: self.db.changesources.setChangeSourceMaster(87, 13)) yield self.assertFailure(d, changesources.ChangeSourceAlreadyClaimedError) @defer.inlineCallbacks def test_setChangeSourceMaster_active(self): """Active changesource already claimed by an active master""" d = self.insertTestData([ self.cs42, self.master13, self.cs42master13, ]) d.addCallback(lambda _: self.db.changesources.setChangeSourceMaster(42, 14)) yield self.assertFailure(d, changesources.ChangeSourceAlreadyClaimedError) @defer.inlineCallbacks def test_setChangeSourceMaster_None(self): """A 'None' master disconnects the changesource""" yield self.insertTestData([ self.cs87, self.master14, self.cs87master14, ]) yield self.db.changesources.setChangeSourceMaster(87, None) cs = yield self.db.changesources.getChangeSource(87) self.assertEqual(cs['masterid'], None) @defer.inlineCallbacks def test_setChangeSourceMaster_None_unowned(self): """A 'None' master for a disconnected changesource""" yield self.insertTestData([self.cs87]) yield self.db.changesources.setChangeSourceMaster(87, None) cs = yield self.db.changesources.getChangeSource(87) self.assertEqual(cs['masterid'], None) def test_signature_getChangeSource(self): """getChangeSource has the right signature""" @self.assertArgSpecMatches(self.db.changesources.getChangeSource) def getChangeSource(self, changesourceid): pass @defer.inlineCallbacks def test_getChangeSource(self): """getChangeSource for a changesource that exists""" yield self.insertTestData([self.cs87]) cs = yield self.db.changesources.getChangeSource(87) validation.verifyDbDict(self, 'changesourcedict', cs) self.assertEqual(cs, dict( id=87, name='lame_source', masterid=None)) @defer.inlineCallbacks def test_getChangeSource_missing(self): """getChangeSource for a changesource that doesn't exist""" cs = yield self.db.changesources.getChangeSource(87) self.assertEqual(cs, None) @defer.inlineCallbacks def test_getChangeSource_active(self): """getChangeSource for a changesource that exists and is active""" yield self.insertTestData([self.cs42, self.master13, self.cs42master13]) cs = yield self.db.changesources.getChangeSource(42) validation.verifyDbDict(self, 'changesourcedict', cs) self.assertEqual(cs, dict( id=42, name='cool_source', masterid=13)) @defer.inlineCallbacks def test_getChangeSource_inactive_but_linked(self): """getChangeSource for a changesource that is assigned but is inactive""" yield self.insertTestData([self.cs87, self.master14, self.cs87master14]) cs = yield self.db.changesources.getChangeSource(87) validation.verifyDbDict(self, 'changesourcedict', cs) self.assertEqual(cs, dict( id=87, name='lame_source', masterid=14)) # row exists, but marked inactive def test_signature_getChangeSources(self): """getChangeSources has right signature""" @self.assertArgSpecMatches(self.db.changesources.getChangeSources) def getChangeSources(self, active=None, masterid=None): pass @defer.inlineCallbacks def test_getChangeSources(self): """getChangeSources returns all changesources""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87, ]) cslist = yield self.db.changesources.getChangeSources() [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist, key=changeSourceKey), sorted([ dict(id=42, name='cool_source', masterid=13), dict(id=87, name='lame_source', masterid=None), ], key=changeSourceKey)) @defer.inlineCallbacks def test_getChangeSources_masterid(self): """getChangeSources returns all changesources for a given master""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87, ]) cslist = yield self.db.changesources.getChangeSources(masterid=13) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist, key=changeSourceKey), sorted([ dict(id=42, name='cool_source', masterid=13), ], key=changeSourceKey)) @defer.inlineCallbacks def test_getChangeSources_active(self): """getChangeSources for (active changesources, all masters)""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87 ]) cslist = yield self.db.changesources.getChangeSources(active=True) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), sorted([ dict(id=42, name='cool_source', masterid=13), ])) @defer.inlineCallbacks def test_getChangeSources_active_masterid(self): """getChangeSources returns (active changesources, given masters)""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87 ]) cslist = yield self.db.changesources.getChangeSources( active=True, masterid=13) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), sorted([ dict(id=42, name='cool_source', masterid=13), ])) cslist = yield self.db.changesources.getChangeSources( active=True, masterid=14) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), []) @defer.inlineCallbacks def test_getChangeSources_inactive(self): """getChangeSources returns (inactive changesources, all masters)""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87 ]) cslist = yield self.db.changesources.getChangeSources(active=False) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), sorted([ dict(id=87, name='lame_source', masterid=None), ])) @defer.inlineCallbacks def test_getChangeSources_inactive_masterid(self): """getChangeSources returns (active changesources, given masters)""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87 ]) cslist = yield self.db.changesources.getChangeSources( active=False, masterid=13) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), []) cslist = yield self.db.changesources.getChangeSources( active=False, masterid=14) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), []) # always returns [] by spec! class RealTests(Tests): # tests that only "real" implementations will pass pass class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(db.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['changes', 'changesources', 'masters', 'patches', 'sourcestamps', 'changesource_masters']) self.db.changesources = \ changesources.ChangeSourcesConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_connector.py000066400000000000000000000065611413250514000243520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.db import connector from buildbot.db import exceptions from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util.misc import TestReactorMixin class TestDBConnector(TestReactorMixin, db.RealDatabaseMixin, unittest.TestCase): """ Basic tests of the DBConnector class - all start with an empty DB """ @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpRealDatabase(table_names=[ 'changes', 'change_properties', 'change_files', 'patches', 'sourcestamps', 'buildset_properties', 'buildsets', 'sourcestampsets', 'builds', 'builders', 'masters', 'buildrequests', 'workers']) self.master = fakemaster.make_master(self) self.master.config = config.MasterConfig() self.db = connector.DBConnector(os.path.abspath('basedir')) yield self.db.setServiceParent(self.master) @defer.inlineCallbacks def tearDown(self): if self.db.running: yield self.db.stopService() yield self.tearDownRealDatabase() @defer.inlineCallbacks def startService(self, check_version=False): self.master.config.db['db_url'] = self.db_url yield self.db.setup(check_version=check_version) self.db.startService() yield self.db.reconfigServiceWithBuildbotConfig(self.master.config) # tests @defer.inlineCallbacks def test_doCleanup_service(self): yield self.startService() self.assertTrue(self.db.cleanup_timer.running) def test_doCleanup_unconfigured(self): self.db.changes.pruneChanges = mock.Mock( return_value=defer.succeed(None)) self.db._doCleanup() self.assertFalse(self.db.changes.pruneChanges.called) @defer.inlineCallbacks def test_doCleanup_configured(self): self.db.changes.pruneChanges = mock.Mock( return_value=defer.succeed(None)) yield self.startService() self.db._doCleanup() self.assertTrue(self.db.changes.pruneChanges.called) def test_setup_check_version_bad(self): if self.db_url == 'sqlite://': raise unittest.SkipTest( 'sqlite in-memory model is always upgraded at connection') d = self.startService(check_version=True) return self.assertFailure(d, exceptions.DatabaseNotReadyError) def test_setup_check_version_good(self): self.db.model.is_current = lambda: defer.succeed(True) return self.startService(check_version=True) buildbot-3.4.0/master/buildbot/test/unit/db/test_dbconfig.py000066400000000000000000000100671413250514000241270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import threads from twisted.trial import unittest from buildbot.db import dbconfig from buildbot.test.util import db class TestDbConfig(db.RealDatabaseMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): # as we will open the db twice, we can't use in memory sqlite yield self.setUpRealDatabase(table_names=['objects', 'object_state'], sqlite_memory=False) yield threads.deferToThread(self.createDbConfig) def createDbConfig(self): self.dbConfig = dbconfig.DbConfig( {"db_url": self.db_url}, self.basedir) def tearDown(self): return self.tearDownRealDatabase() def test_basic(self): def thd(): workersInDB = ['foo', 'bar'] self.dbConfig.set("workers", workersInDB) workers = self.dbConfig.get("workers") self.assertEqual(workers, workersInDB) return threads.deferToThread(thd) def test_default(self): def thd(): workers = self.dbConfig.get("workers", "default") self.assertEqual(workers, "default") return threads.deferToThread(thd) def test_error(self): def thd(): with self.assertRaises(KeyError): self.dbConfig.get("workers") return threads.deferToThread(thd) # supports the 3 different ways to declare db_url in the master.cfg def test_init1(self): obj = dbconfig.DbConfig({"db_url": self.db_url}, self.basedir) self.assertEqual(obj.db_url, self.db_url) def test_init2(self): obj = dbconfig.DbConfig({"db": {"db_url": self.db_url}}, self.basedir) self.assertEqual(obj.db_url, self.db_url) def test_init3(self): obj = dbconfig.DbConfig({}, self.basedir) self.assertEqual(obj.db_url, "sqlite:///state.sqlite") class TestDbConfigNotInitialized(db.RealDatabaseMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): # as we will open the db twice, we can't use in memory sqlite yield self.setUpRealDatabase(table_names=[], sqlite_memory=False) @defer.inlineCallbacks def tearDown(self): yield self.tearDownRealDatabase() def createDbConfig(self, db_url=None): return dbconfig.DbConfig({"db_url": db_url or self.db_url}, self.basedir) def test_default(self): def thd(): db = self.createDbConfig() self.assertEqual("foo", db.get("workers", "foo")) return threads.deferToThread(thd) def test_error(self): def thd(): db = self.createDbConfig() with self.assertRaises(KeyError): db.get("workers") return threads.deferToThread(thd) def test_bad_url(self): def thd(): db = self.createDbConfig("garbage://") with self.assertRaises(KeyError): db.get("workers") return threads.deferToThread(thd) def test_bad_url2(self): def thd(): db = self.createDbConfig("trash") with self.assertRaises(KeyError): db.get("workers") return threads.deferToThread(thd) def test_bad_url3(self): def thd(): db = self.createDbConfig("sqlite://bad") with self.assertRaises(KeyError): db.get("workers") return threads.deferToThread(thd) buildbot-3.4.0/master/buildbot/test/unit/db/test_enginestrategy.py000066400000000000000000000174201413250514000254040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from sqlalchemy.engine import url from sqlalchemy.pool import NullPool from twisted.python import runtime from twisted.trial import unittest from buildbot.db import enginestrategy class BuildbotCreateEngineTest(unittest.TestCase): "Test the special case methods, without actually creating a db" # used several times below mysql_kwargs = dict( basedir='my-base-dir', connect_args=dict(init_command='SET default_storage_engine=MyISAM'), pool_recycle=3600) sqlite_kwargs = dict(basedir='/my-base-dir', poolclass=NullPool) # utility def filter_kwargs(self, kwargs): # filter out the listeners list to just include the class name if 'listeners' in kwargs: kwargs['listeners'] = [lstnr.__class__.__name__ for lstnr in kwargs['listeners']] return kwargs # tests def test_sqlite_pct_sub(self): u = url.make_url("sqlite:///%(basedir)s/x/state.sqlite") kwargs = dict(basedir='/my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_sqlite(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["sqlite:////my-base-dir/x/state.sqlite", 1, self.sqlite_kwargs]) def test_sqlite_relpath(self): url_src = "sqlite:///x/state.sqlite" basedir = "/my-base-dir" expected_url = "sqlite:////my-base-dir/x/state.sqlite" # this looks a whole lot different on windows if runtime.platformType == 'win32': url_src = r'sqlite:///X\STATE.SQLITE' basedir = r'C:\MYBASE~1' expected_url = r'sqlite:///C:\MYBASE~1\X\STATE.SQLITE' exp_kwargs = self.sqlite_kwargs.copy() exp_kwargs['basedir'] = basedir u = url.make_url(url_src) kwargs = dict(basedir=basedir) u, kwargs, max_conns = enginestrategy.special_case_sqlite(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], [expected_url, 1, exp_kwargs]) def test_sqlite_abspath(self): u = url.make_url("sqlite:////x/state.sqlite") kwargs = dict(basedir='/my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_sqlite(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["sqlite:////x/state.sqlite", 1, self.sqlite_kwargs]) def test_sqlite_memory(self): u = url.make_url("sqlite://") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_sqlite(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["sqlite://", 1, # only one conn at a time dict(basedir='my-base-dir', connect_args=dict(check_same_thread=False))]) def test_mysql_simple(self): u = url.make_url("mysql://host/dbname") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql://host/dbname?charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_userport(self): u = url.make_url("mysql://user:pass@host:1234/dbname") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql://user:pass@host:1234/dbname?" "charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_local(self): u = url.make_url("mysql:///dbname") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_args(self): u = url.make_url("mysql:///dbname?foo=bar") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&foo=bar&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_max_idle(self): u = url.make_url("mysql:///dbname?max_idle=1234") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_mysql(u, kwargs) exp = self.mysql_kwargs.copy() exp['pool_recycle'] = 1234 self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, exp]) def test_mysql_good_charset(self): u = url.make_url("mysql:///dbname?charset=utf8") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_bad_charset(self): u = url.make_url("mysql:///dbname?charset=ebcdic") kwargs = dict(basedir='my-base-dir') with self.assertRaises(TypeError): enginestrategy.special_case_mysql(u, kwargs) def test_mysql_good_use_unicode(self): u = url.make_url("mysql:///dbname?use_unicode=True") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_bad_use_unicode(self): u = url.make_url("mysql:///dbname?use_unicode=maybe") kwargs = dict(basedir='my-base-dir') with self.assertRaises(TypeError): enginestrategy.special_case_mysql(u, kwargs) def test_mysql_storage_engine(self): u = url.make_url("mysql:///dbname?storage_engine=foo") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = enginestrategy.special_case_mysql(u, kwargs) exp = self.mysql_kwargs.copy() exp['connect_args'] = dict( init_command='SET default_storage_engine=foo') self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, exp]) class BuildbotEngineStrategy(unittest.TestCase): "Test create_engine by creating a sqlite in-memory db" def test_create_engine(self): engine = enginestrategy.create_engine('sqlite://', basedir="/base") self.assertEqual(engine.scalar("SELECT 13 + 14"), 27) buildbot-3.4.0/master/buildbot/test/unit/db/test_logs.py000066400000000000000000000556061413250514000233300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import bz2 import textwrap import zlib import sqlalchemy as sa from twisted.internet import defer from twisted.trial import unittest from buildbot.db import logs from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes class Tests(interfaces.InterfaceTests): TIMESTAMP_STEP101 = 100000 TIMESTAMP_STEP102 = 200000 backgroundData = [ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Step(id=101, buildid=30, number=1, name='one', started_at=TIMESTAMP_STEP101), fakedb.Step(id=102, buildid=30, number=2, name='two', started_at=TIMESTAMP_STEP102), ] testLogLines = [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=7, type='s'), fakedb.LogChunk(logid=201, first_line=0, last_line=1, compressed=0, content=textwrap.dedent("""\ line zero line 1""" + "x" * 200)), fakedb.LogChunk(logid=201, first_line=2, last_line=4, compressed=0, content=textwrap.dedent("""\ line TWO line 2**2""")), fakedb.LogChunk(logid=201, first_line=5, last_line=5, compressed=0, content="another line"), fakedb.LogChunk(logid=201, first_line=6, last_line=6, compressed=0, content="yet another line"), ] bug3101Content = base64.b64decode(""" PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0 9PT09PT09PT09PT09PT09PT09PT09PT09PT09PQpbU0tJUFBFRF0Kbm90IGEgd2luMz IgcGxhdGZvcm0KCmJ1aWxkc2xhdmUudGVzdC51bml0LnRlc3RfcnVucHJvY2Vzcy5UZ XN0UnVuUHJvY2Vzcy50ZXN0UGlwZVN0cmluZwotLS0tLS0tLS0tLS0tLS0tLS0tLS0t LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0 tLS0tLS0tClJhbiAyNjcgdGVzdHMgaW4gNS4zNzhzCgpQQVNTRUQgKHNraXBzPTEsIH N1Y2Nlc3Nlcz0yNjYpCnByb2dyYW0gZmluaXNoZWQgd2l0aCBleGl0IGNvZGUgMAplb GFwc2VkVGltZT04LjI0NTcwMg==""") bug3101Rows = [ fakedb.Log(id=1470, stepid=101, name='problems', slug='problems', complete=1, num_lines=11, type='t'), fakedb.LogChunk(logid=1470, first_line=0, last_line=10, compressed=0, content=bug3101Content), ] @defer.inlineCallbacks def checkTestLogLines(self): expLines = ['line zero', 'line 1' + "x" * 200, 'line TWO', '', 'line 2**2', 'another line', 'yet another line'] for first_line in range(0, 7): for last_line in range(first_line, 7): got_lines = yield self.db.logs.getLogLines( 201, first_line, last_line) self.assertEqual( got_lines, "\n".join(expLines[first_line:last_line + 1] + [""])) # check overflow self.assertEqual((yield self.db.logs.getLogLines(201, 5, 20)), "\n".join(expLines[5:7] + [""])) # signature tests def test_signature_getLog(self): @self.assertArgSpecMatches(self.db.logs.getLog) def getLog(self, logid): pass def test_signature_getLogBySlug(self): @self.assertArgSpecMatches(self.db.logs.getLogBySlug) def getLogBySlug(self, stepid, slug): pass def test_signature_getLogs(self): @self.assertArgSpecMatches(self.db.logs.getLogs) def getLogs(self, stepid=None): pass def test_signature_getLogLines(self): @self.assertArgSpecMatches(self.db.logs.getLogLines) def getLogLines(self, logid, first_line, last_line): pass def test_signature_addLog(self): @self.assertArgSpecMatches(self.db.logs.addLog) def addLog(self, stepid, name, slug, type): pass def test_signature_appendLog(self): @self.assertArgSpecMatches(self.db.logs.appendLog) def appendLog(self, logid, content): pass def test_signature_finishLog(self): @self.assertArgSpecMatches(self.db.logs.finishLog) def finishLog(self, logid): pass def test_signature_compressLog(self): @self.assertArgSpecMatches(self.db.logs.compressLog) def compressLog(self, logid, force=False): pass def test_signature_deleteOldLogChunks(self): @self.assertArgSpecMatches(self.db.logs.deleteOldLogChunks) def deleteOldLogChunks(self, older_than_timestamp): pass # method tests @defer.inlineCallbacks def test_getLog(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), ]) logdict = yield self.db.logs.getLog(201) validation.verifyDbDict(self, 'logdict', logdict) self.assertEqual(logdict, { 'id': 201, 'stepid': 101, 'name': 'stdio', 'slug': 'stdio', 'complete': False, 'num_lines': 200, 'type': 's', }) @defer.inlineCallbacks def test_getLog_missing(self): logdict = yield self.db.logs.getLog(201) self.assertEqual(logdict, None) @defer.inlineCallbacks def test_getLogBySlug(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), fakedb.Log(id=202, stepid=101, name='dbg.log', slug='dbg_log', complete=1, num_lines=200, type='s'), ]) logdict = yield self.db.logs.getLogBySlug(101, 'dbg_log') validation.verifyDbDict(self, 'logdict', logdict) self.assertEqual(logdict['id'], 202) @defer.inlineCallbacks def test_getLogBySlug_missing(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), ]) logdict = yield self.db.logs.getLogBySlug(102, 'stdio') self.assertEqual(logdict, None) @defer.inlineCallbacks def test_getLogs(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), fakedb.Log(id=202, stepid=101, name='dbg.log', slug='dbg_log', complete=1, num_lines=300, type='t'), fakedb.Log(id=203, stepid=102, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), ]) logdicts = yield self.db.logs.getLogs(101) for logdict in logdicts: validation.verifyDbDict(self, 'logdict', logdict) self.assertEqual(sorted([ld['id'] for ld in logdicts]), [201, 202]) @defer.inlineCallbacks def test_getLogLines(self): yield self.insertTestData(self.backgroundData + self.testLogLines) yield self.checkTestLogLines() # check line number reversal self.assertEqual((yield self.db.logs.getLogLines(201, 6, 3)), '') @defer.inlineCallbacks def test_getLogLines_empty(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), ]) self.assertEqual((yield self.db.logs.getLogLines(201, 9, 99)), '') self.assertEqual((yield self.db.logs.getLogLines(999, 9, 99)), '') @defer.inlineCallbacks def test_getLogLines_bug3101(self): # regression test for #3101 content = self.bug3101Content yield self.insertTestData(self.backgroundData + self.bug3101Rows) # overall content is the same, with '\n' padding at the end expected = bytes2unicode(self.bug3101Content + b'\n') self.assertEqual((yield self.db.logs.getLogLines(1470, 0, 99)), expected) # try to fetch just one line expected = bytes2unicode(content.split(b'\n')[0] + b'\n') self.assertEqual((yield self.db.logs.getLogLines(1470, 0, 0)), expected) @defer.inlineCallbacks def test_addLog_getLog(self): yield self.insertTestData(self.backgroundData) logid = yield self.db.logs.addLog( stepid=101, name='config.log', slug='config_log', type='t') logdict = yield self.db.logs.getLog(logid) validation.verifyDbDict(self, 'logdict', logdict) self.assertEqual(logdict, { 'id': logid, 'stepid': 101, 'name': 'config.log', 'slug': 'config_log', 'complete': False, 'num_lines': 0, 'type': 't', }) @defer.inlineCallbacks def test_appendLog_getLogLines(self): yield self.insertTestData(self.backgroundData + self.testLogLines) logid = yield self.db.logs.addLog( stepid=102, name='another', slug='another', type='s') self.assertEqual((yield self.db.logs.appendLog(logid, 'xyz\n')), (0, 0)) self.assertEqual((yield self.db.logs.appendLog(201, 'abc\ndef\n')), (7, 8)) self.assertEqual((yield self.db.logs.appendLog(logid, 'XYZ\n')), (1, 1)) self.assertEqual((yield self.db.logs.getLogLines(201, 6, 7)), "yet another line\nabc\n") self.assertEqual((yield self.db.logs.getLogLines(201, 7, 8)), "abc\ndef\n") self.assertEqual((yield self.db.logs.getLogLines(201, 8, 8)), "def\n") self.assertEqual((yield self.db.logs.getLogLines(logid, 0, 1)), "xyz\nXYZ\n") self.assertEqual((yield self.db.logs.getLog(logid)), { 'complete': False, 'id': logid, 'name': 'another', 'slug': 'another', 'num_lines': 2, 'stepid': 102, 'type': 's', }) @defer.inlineCallbacks def test_compressLog(self): yield self.insertTestData(self.backgroundData + self.testLogLines) yield self.db.logs.compressLog(201) # test log lines should still be readable just the same yield self.checkTestLogLines() @defer.inlineCallbacks def test_addLogLines_big_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) self.assertEqual( (yield self.db.logs.appendLog(201, 'abc\n' * 20000)), # 80k (7, 20006)) lines = yield self.db.logs.getLogLines(201, 7, 50000) self.assertEqual(len(lines), 80000) self.assertEqual(lines, ('abc\n' * 20000)) @defer.inlineCallbacks def test_addLogLines_big_chunk_big_lines(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'x' * 33000 + '\n' self.assertEqual((yield self.db.logs.appendLog(201, line * 3)), (7, 9)) # three long lines, all truncated lines = yield self.db.logs.getLogLines(201, 7, 100) self.assertEqual(len(lines), 99003) self.assertEqual(lines, (line * 3)) class RealTests(Tests): @defer.inlineCallbacks def test_addLogLines_db(self): yield self.insertTestData(self.backgroundData + self.testLogLines) self.assertEqual( (yield self.db.logs.appendLog(201, 'abc\ndef\nghi\njkl\n')), (7, 10)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 10, 'content': b'abc\ndef\nghi\njkl', 'compressed': 0}) @defer.inlineCallbacks def test_addLogLines_huge_lines(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 70000 + '\n' yield self.db.logs.appendLog(201, line * 3) for lineno in 7, 8, 9: line = yield self.db.logs.getLogLines(201, lineno, lineno) self.assertEqual(len(line), 65537) def test_splitBigChunk_unicode_misalignment(self): unaligned = ('a ' + '\N{SNOWMAN}' * 30000 + '\n').encode('utf-8') # the first 65536 bytes of that line are not valid utf-8 with self.assertRaises(UnicodeDecodeError): unaligned[:65536].decode('utf-8') chunk, remainder = self.db.logs._splitBigChunk(unaligned, 1) # see that it was truncated by two bytes, and now properly decodes self.assertEqual(len(chunk), 65534) chunk.decode('utf-8') @defer.inlineCallbacks def test_no_compress_small_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) self.db.master.config.logCompressionMethod = "gz" self.assertEqual( (yield self.db.logs.appendLog(201, 'abc\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': b'abc', 'compressed': 0}) @defer.inlineCallbacks def test_raw_compress_big_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 10000 self.db.master.config.logCompressionMethod = "raw" self.assertEqual( (yield self.db.logs.appendLog(201, line + '\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': unicode2bytes(line), 'compressed': 0}) @defer.inlineCallbacks def test_gz_compress_big_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 10000 self.db.master.config.logCompressionMethod = "gz" self.assertEqual( (yield self.db.logs.appendLog(201, line + '\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': zlib.compress(unicode2bytes(line), 9), 'compressed': 1}) @defer.inlineCallbacks def test_bz2_compress_big_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 10000 self.db.master.config.logCompressionMethod = "bz2" self.assertEqual( (yield self.db.logs.appendLog(201, line + '\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': bz2.compress(unicode2bytes(line), 9), 'compressed': 2}) @defer.inlineCallbacks def test_lz4_compress_big_chunk(self): try: import lz4 # noqa pylint: disable=unused-import,import-outside-toplevel except ImportError as e: raise unittest.SkipTest("lz4 not installed, skip the test") from e yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 10000 self.db.master.config.logCompressionMethod = "lz4" self.assertEqual( (yield self.db.logs.appendLog(201, line + '\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': logs.dumps_lz4(line.encode('utf-8')), 'compressed': 3}) @defer.inlineCallbacks def do_addLogLines_huge_log(self, NUM_CHUNKS=3000, chunk=('xy' * 70 + '\n') * 3): if chunk.endswith("\n"): chunk = chunk[:-1] linesperchunk = chunk.count("\n") + 1 test_data = [ fakedb.LogChunk(logid=201, first_line=i * linesperchunk, last_line=i * linesperchunk + linesperchunk - 1, compressed=0, content=chunk) for i in range(NUM_CHUNKS) ] yield self.insertTestData( self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=NUM_CHUNKS * 3, type='s')] + test_data) wholeLog = yield self.db.logs.getLogLines(201, 0, NUM_CHUNKS * 3) for i in range(10): yield self.db.logs.compressLog(201) wholeLog2 = yield self.db.logs.getLogLines(201, 0, NUM_CHUNKS * 3) self.assertEqual(wholeLog, wholeLog2) self.assertEqual(wholeLog, wholeLog2) def countChunk(conn): tbl = self.db.model.logchunks q = sa.select([sa.func.count(tbl.c.content)]) q = q.where(tbl.c.logid == 201) return conn.execute(q).fetchone()[0] chunks = yield self.db.pool.do(countChunk) # make sure MAX_CHUNK_LINES is taken in account self.assertGreaterEqual( chunks, NUM_CHUNKS * linesperchunk / logs.LogsConnectorComponent.MAX_CHUNK_LINES) def test_addLogLines_huge_log(self): return self.do_addLogLines_huge_log() def test_addLogLines_huge_log_lots_line(self): return self.do_addLogLines_huge_log(NUM_CHUNKS=3000, chunk='x\n' * 50) def test_addLogLines_huge_log_lots_snowmans(self): return self.do_addLogLines_huge_log(NUM_CHUNKS=3000, chunk='\N{SNOWMAN}\n' * 50) @defer.inlineCallbacks def test_compressLog_non_existing_log(self): yield self.db.logs.compressLog(201) logdict = yield self.db.logs.getLog(201) self.assertEqual(logdict, None) @defer.inlineCallbacks def test_compressLog_empty_log(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=1, num_lines=0, type='s'), ]) yield self.db.logs.compressLog(201) logdict = yield self.db.logs.getLog(201) self.assertEqual(logdict, { 'stepid': 101, 'num_lines': 0, 'name': 'stdio', 'id': 201, 'type': 's', 'slug': 'stdio', 'complete': True}) @defer.inlineCallbacks def test_deleteOldLogChunks_basic(self): yield self.insertTestData(self.backgroundData) logids = [] for stepid in (101, 102): for i in range(stepid): logid = yield self.db.logs.addLog( stepid=stepid, name='another' + str(i), slug='another' + str(i), type='s') yield self.db.logs.appendLog(logid, 'xyz\n') logids.append(logid) deleted_chunks = yield self.db.logs.deleteOldLogChunks( (self.TIMESTAMP_STEP102 + self.TIMESTAMP_STEP101) / 2) self.assertEqual(deleted_chunks, 101) deleted_chunks = yield self.db.logs.deleteOldLogChunks( self.TIMESTAMP_STEP102 + self.TIMESTAMP_STEP101) self.assertEqual(deleted_chunks, 102) deleted_chunks = yield self.db.logs.deleteOldLogChunks( self.TIMESTAMP_STEP102 + self.TIMESTAMP_STEP101) self.assertEqual(deleted_chunks, 0) deleted_chunks = yield self.db.logs.deleteOldLogChunks(0) self.assertEqual(deleted_chunks, 0) for logid in logids: logdict = yield self.db.logs.getLog(logid) self.assertEqual(logdict['type'], 'd') # we make sure we can still getLogLines, it will just return empty value lines = yield self.db.logs.getLogLines(logid, 0, logdict['num_lines']) self.assertEqual(lines, '') class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers']) self.db.logs = logs.LogsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_masters.py000066400000000000000000000201271413250514000240300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import masters from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.util import epoch2datetime SOMETIME = 1348971992 SOMETIME_DT = epoch2datetime(SOMETIME) OTHERTIME = 1008971992 OTHERTIME_DT = epoch2datetime(OTHERTIME) class Tests(interfaces.InterfaceTests): # common sample data master_row = [ fakedb.Master(id=7, name="some:master", active=1, last_active=SOMETIME), ] # tests def test_signature_findMasterId(self): @self.assertArgSpecMatches(self.db.masters.findMasterId) def findMasterId(self, name): pass def test_signature_setMasterState(self): @self.assertArgSpecMatches(self.db.masters.setMasterState) def setMasterState(self, masterid, active): pass def test_signature_getMaster(self): @self.assertArgSpecMatches(self.db.masters.getMaster) def getMaster(self, masterid): pass def test_signature_getMasters(self): @self.assertArgSpecMatches(self.db.masters.getMasters) def getMasters(self): pass @defer.inlineCallbacks def test_findMasterId_new(self): id = yield self.db.masters.findMasterId('some:master') masterdict = yield self.db.masters.getMaster(id) self.assertEqual(masterdict, dict(id=id, name='some:master', active=False, last_active=SOMETIME_DT)) @defer.inlineCallbacks def test_findMasterId_exists(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master'), ]) id = yield self.db.masters.findMasterId('some:master') self.assertEqual(id, 7) @defer.inlineCallbacks def test_setMasterState_when_missing(self): activated = \ yield self.db.masters.setMasterState(masterid=7, active=True) self.assertFalse(activated) @defer.inlineCallbacks def test_setMasterState_true_when_active(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=1, last_active=OTHERTIME), ]) activated = yield self.db.masters.setMasterState( masterid=7, active=True) self.assertFalse(activated) # it was already active masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, dict(id=7, name='some:master', active=True, last_active=SOMETIME_DT)) # timestamp updated @defer.inlineCallbacks def test_setMasterState_true_when_inactive(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=OTHERTIME), ]) activated = yield self.db.masters.setMasterState( masterid=7, active=True) self.assertTrue(activated) masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, dict(id=7, name='some:master', active=True, last_active=SOMETIME_DT)) @defer.inlineCallbacks def test_setMasterState_false_when_active(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=1, last_active=OTHERTIME), ]) deactivated = yield self.db.masters.setMasterState( masterid=7, active=False) self.assertTrue(deactivated) masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, dict(id=7, name='some:master', active=False, last_active=OTHERTIME_DT)) @defer.inlineCallbacks def test_setMasterState_false_when_inactive(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=OTHERTIME), ]) deactivated = yield self.db.masters.setMasterState( masterid=7, active=False) self.assertFalse(deactivated) masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, dict(id=7, name='some:master', active=False, last_active=OTHERTIME_DT)) @defer.inlineCallbacks def test_getMaster(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=SOMETIME), ]) masterdict = yield self.db.masters.getMaster(7) validation.verifyDbDict(self, 'masterdict', masterdict) self.assertEqual(masterdict, dict(id=7, name='some:master', active=False, last_active=SOMETIME_DT)) @defer.inlineCallbacks def test_getMaster_missing(self): masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, None) @defer.inlineCallbacks def test_getMasters(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=SOMETIME), fakedb.Master(id=8, name='other:master', active=1, last_active=OTHERTIME), ]) masterlist = yield self.db.masters.getMasters() for masterdict in masterlist: validation.verifyDbDict(self, 'masterdict', masterdict) def masterKey(master): return master['id'] expected = sorted([ dict(id=7, name='some:master', active=0, last_active=SOMETIME_DT), dict(id=8, name='other:master', active=1, last_active=OTHERTIME_DT), ], key=masterKey) self.assertEqual(sorted(masterlist, key=masterKey), expected) class RealTests(Tests): # tests that only "real" implementations will pass @defer.inlineCallbacks def test_setMasterState_false_deletes_links(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=1, last_active=OTHERTIME), fakedb.Scheduler(id=21), fakedb.SchedulerMaster(schedulerid=21, masterid=7), ]) deactivated = yield self.db.masters.setMasterState( masterid=7, active=False) self.assertTrue(deactivated) # check that the scheduler_masters row was deleted def thd(conn): tbl = self.db.model.scheduler_masters self.assertEqual(conn.execute(tbl.select()).fetchall(), []) yield self.db.pool.do(thd) class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() self.reactor.advance(SOMETIME) class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['masters', 'schedulers', 'scheduler_masters']) self.reactor.advance(SOMETIME) self.db.masters = masters.MastersConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_model.py000066400000000000000000000041301413250514000234460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.db import enginestrategy from buildbot.db import model from buildbot.test.util import db class DBConnector_Basic(db.RealDatabaseMixin, unittest.TestCase): """ Basic tests of the DBConnector class - all start with an empty DB """ @defer.inlineCallbacks def setUp(self): yield self.setUpRealDatabase() engine = enginestrategy.create_engine(self.db_url, basedir=os.path.abspath('basedir')) # mock out the pool, and set up the model self.db = mock.Mock() self.db.pool.do = lambda thd: defer.maybeDeferred(thd, engine.connect()) self.db.pool.do_with_engine = lambda thd: defer.maybeDeferred(thd, engine) self.db.model = model.Model(self.db) self.db.start() def tearDown(self): self.db.stop() return self.tearDownRealDatabase() @defer.inlineCallbacks def test_is_current_empty(self): res = yield self.db.model.is_current() self.assertFalse(res) @defer.inlineCallbacks def test_is_current_full(self): yield self.db.model.upgrade() res = yield self.db.model.is_current() self.assertTrue(res) # the upgrade method is very well-tested by the integration tests; the # remainder of the object is just tables. buildbot-3.4.0/master/buildbot/test/unit/db/test_pool.py000066400000000000000000000153401413250514000233240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import time import sqlalchemy as sa from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.db import pool from buildbot.test.util import db from buildbot.util import sautils class Basic(unittest.TestCase): # basic tests, just using an in-memory SQL db and one thread def setUp(self): self.engine = sa.create_engine('sqlite://') self.engine.should_retry = lambda _: False self.engine.optimal_thread_pool_size = 1 self.pool = pool.DBThreadPool(self.engine, reactor=reactor) @defer.inlineCallbacks def tearDown(self): yield self.pool.shutdown() @defer.inlineCallbacks def test_do(self): def add(conn, addend1, addend2): rp = conn.execute("SELECT %d + %d" % (addend1, addend2)) return rp.scalar() res = yield self.pool.do(add, 10, 11) self.assertEqual(res, 21) @defer.inlineCallbacks def expect_failure(self, d, expected_exception, expect_logged_error=False): exception = None try: yield d except Exception as e: exception = e errors = self.flushLoggedErrors(expected_exception) if expect_logged_error: self.assertEqual(len(errors), 1) self.assertTrue(isinstance(exception, expected_exception)) def test_do_error(self): def fail(conn): rp = conn.execute("EAT COOKIES") return rp.scalar() return self.expect_failure(self.pool.do(fail), sa.exc.OperationalError, expect_logged_error=True) def test_do_exception(self): def raise_something(conn): raise RuntimeError("oh noes") return self.expect_failure(self.pool.do(raise_something), RuntimeError, expect_logged_error=True) @defer.inlineCallbacks def test_do_with_engine(self): def add(engine, addend1, addend2): rp = engine.execute("SELECT %d + %d" % (addend1, addend2)) return rp.scalar() res = yield self.pool.do_with_engine(add, 10, 11) self.assertEqual(res, 21) def test_do_with_engine_exception(self): def fail(engine): rp = engine.execute("EAT COOKIES") return rp.scalar() return self.expect_failure(self.pool.do_with_engine(fail), sa.exc.OperationalError) @defer.inlineCallbacks def test_persistence_across_invocations(self): # NOTE: this assumes that both methods are called with the same # connection; if they run in parallel threads then it is not valid to # assume that the database engine will have finalized the first # transaction (and thus created the table) by the time the second # transaction runs. This is why we set optimal_thread_pool_size in # setUp. def create_table(engine): engine.execute("CREATE TABLE tmp ( a integer )") yield self.pool.do_with_engine(create_table) def insert_into_table(engine): engine.execute("INSERT INTO tmp values ( 1 )") yield self.pool.do_with_engine(insert_into_table) class Stress(unittest.TestCase): def setUp(self): setup_engine = sa.create_engine('sqlite:///test.sqlite') setup_engine.execute("pragma journal_mode = wal") setup_engine.execute("CREATE TABLE test (a integer, b integer)") self.engine = sa.create_engine('sqlite:///test.sqlite') self.engine.optimal_thread_pool_size = 2 self.pool = pool.DBThreadPool(self.engine, reactor=reactor) @defer.inlineCallbacks def tearDown(self): yield self.pool.shutdown() os.unlink("test.sqlite") @defer.inlineCallbacks def test_inserts(self): def write(conn): trans = conn.begin() conn.execute("INSERT INTO test VALUES (1, 1)") time.sleep(31) trans.commit() d1 = self.pool.do(write) def write2(conn): trans = conn.begin() conn.execute("INSERT INTO test VALUES (1, 1)") trans.commit() d2 = defer.Deferred() d2.addCallback(lambda _: self.pool.do(write2)) reactor.callLater(0.1, d2.callback, None) yield defer.DeferredList([d1, d2]) # don't run this test, since it takes 30s del test_inserts class BasicWithDebug(Basic): # same thing, but with debug=True def setUp(self): pool.debug = True return super().setUp() def tearDown(self): pool.debug = False return super().tearDown() class Native(unittest.TestCase, db.RealDatabaseMixin): # similar tests, but using the BUILDBOT_TEST_DB_URL @defer.inlineCallbacks def setUp(self): yield self.setUpRealDatabase(want_pool=False) self.pool = pool.DBThreadPool(self.db_engine, reactor=reactor) @defer.inlineCallbacks def tearDown(self): # try to delete the 'native_tests' table meta = sa.MetaData() native_tests = sautils.Table("native_tests", meta) def thd(conn): native_tests.drop(bind=self.db_engine, checkfirst=True) yield self.pool.do(thd) # tearDownRealDatabase() won't shutdown the pool as want_pool was false in # setUpRealDatabase call yield self.pool.shutdown() yield self.tearDownRealDatabase() @defer.inlineCallbacks def test_ddl_and_queries(self): meta = sa.MetaData() native_tests = sautils.Table("native_tests", meta, sa.Column('name', sa.String(length=200))) # perform a DDL operation and immediately try to access that table; # this has caused problems in the past, so this is basically a # regression test. def ddl(conn): t = conn.begin() native_tests.create(bind=conn) t.commit() yield self.pool.do(ddl) def access(conn): native_tests.insert(bind=conn).execute([{'name': 'foo'}]) yield self.pool.do(access) buildbot-3.4.0/master/buildbot/test/unit/db/test_schedulers.py000066400000000000000000000406211413250514000245140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import schedulers from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import db from buildbot.test.util import interfaces from buildbot.test.util import validation class Tests(interfaces.InterfaceTests): # test data ss92 = fakedb.SourceStamp(id=92) change3 = fakedb.Change(changeid=3) change4 = fakedb.Change(changeid=4) change5 = fakedb.Change(changeid=5) change6 = fakedb.Change(changeid=6, branch='sql') scheduler24 = fakedb.Scheduler(id=24, name='schname') master13 = fakedb.Master(id=13, name='m1', active=1) scheduler24master = fakedb.SchedulerMaster(schedulerid=24, masterid=13) scheduler25 = fakedb.Scheduler(id=25, name='schname2') master14 = fakedb.Master(id=14, name='m2', active=0) scheduler25master = fakedb.SchedulerMaster(schedulerid=25, masterid=14) # tests def test_signature_enable(self): @self.assertArgSpecMatches(self.db.schedulers.enable) def enable(self, schedulerid, v): pass @defer.inlineCallbacks def test_enable(self): yield self.insertTestData([self.scheduler24, self.master13, self.scheduler24master]) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=True, masterid=13)) yield self.db.schedulers.enable(24, False) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=False, masterid=13)) yield self.db.schedulers.enable(24, True) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=True, masterid=13)) def test_signature_classifyChanges(self): @self.assertArgSpecMatches(self.db.schedulers.classifyChanges) def classifyChanges(self, schedulerid, classifications): pass @defer.inlineCallbacks def test_classifyChanges(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.scheduler24]) yield self.db.schedulers.classifyChanges(24, {3: False, 4: True}) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {3: False, 4: True}) @defer.inlineCallbacks def test_classifyChanges_again(self): # test reclassifying changes, which may happen during some timing # conditions. It's important that this test uses multiple changes, # only one of which already exists yield self.insertTestData([ self.ss92, self.change3, self.change4, self.change5, self.change6, self.scheduler24, fakedb.SchedulerChange(schedulerid=24, changeid=5, important=0), ]) yield self.db.schedulers.classifyChanges( 24, {3: True, 4: False, 5: True, 6: False}) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {3: True, 4: False, 5: True, 6: False}) def test_signature_flushChangeClassifications(self): @self.assertArgSpecMatches( self.db.schedulers.flushChangeClassifications) def flushChangeClassifications(self, schedulerid, less_than=None): pass @defer.inlineCallbacks def test_flushChangeClassifications(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.change5, self.scheduler24]) yield self.addClassifications(24, (3, 1), (4, 0), (5, 1)) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {3: True, 4: False, 5: True}) yield self.db.schedulers.flushChangeClassifications(24) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {}) @defer.inlineCallbacks def test_flushChangeClassifications_less_than(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.change5, self.scheduler24]) yield self.addClassifications(24, (3, 1), (4, 0), (5, 1)) yield self.db.schedulers.flushChangeClassifications(24, less_than=5) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {5: True}) def test_signature_getChangeClassifications(self): @self.assertArgSpecMatches(self.db.schedulers.getChangeClassifications) def getChangeClassifications(self, schedulerid, branch=-1, repository=-1, project=-1, codebase=-1): pass @defer.inlineCallbacks def test_getChangeClassifications(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.change5, self.change6, self.scheduler24]) yield self.addClassifications(24, (3, 1), (4, 0), (5, 1), (6, 1)) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {3: True, 4: False, 5: True, 6: True}) @defer.inlineCallbacks def test_getChangeClassifications_branch(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.change5, self.change6, self.scheduler24]) yield self.addClassifications(24, (3, 1), (4, 0), (5, 1), (6, 1)) res = yield self.db.schedulers.getChangeClassifications(24, branch='sql') self.assertEqual(res, {6: True}) def test_signature_findSchedulerId(self): @self.assertArgSpecMatches(self.db.schedulers.findSchedulerId) def findSchedulerId(self, name): pass @defer.inlineCallbacks def test_findSchedulerId_new(self): id = yield self.db.schedulers.findSchedulerId('schname') sch = yield self.db.schedulers.getScheduler(id) self.assertEqual(sch['name'], 'schname') @defer.inlineCallbacks def test_findSchedulerId_existing(self): id = yield self.db.schedulers.findSchedulerId('schname') id2 = yield self.db.schedulers.findSchedulerId('schname') self.assertEqual(id, id2) def test_signature_setSchedulerMaster(self): @self.assertArgSpecMatches(self.db.schedulers.setSchedulerMaster) def setSchedulerMaster(self, schedulerid, masterid): pass @defer.inlineCallbacks def test_setSchedulerMaster_fresh(self): yield self.insertTestData([self.scheduler24, self.master13]) yield self.db.schedulers.setSchedulerMaster(24, 13) sch = yield self.db.schedulers.getScheduler(24) self.assertEqual(sch['masterid'], 13) @defer.inlineCallbacks def test_setSchedulerMaster_inactive_but_linked(self): d = self.insertTestData([ self.master13, self.scheduler25, self.master14, self.scheduler25master, ]) d.addCallback(lambda _: self.db.schedulers.setSchedulerMaster(25, 13)) yield self.assertFailure(d, schedulers.SchedulerAlreadyClaimedError) @defer.inlineCallbacks def test_setSchedulerMaster_inactive_but_linked_to_this_master(self): yield self.insertTestData([ self.scheduler25, self.master14, self.scheduler25master, ]) yield self.db.schedulers.setSchedulerMaster(25, 14) @defer.inlineCallbacks def test_setSchedulerMaster_active(self): d = self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, ]) d.addCallback(lambda _: self.db.schedulers.setSchedulerMaster(24, 14)) yield self.assertFailure(d, schedulers.SchedulerAlreadyClaimedError) @defer.inlineCallbacks def test_setSchedulerMaster_None(self): yield self.insertTestData([ self.scheduler25, self.master14, self.scheduler25master, ]) yield self.db.schedulers.setSchedulerMaster(25, None) sch = yield self.db.schedulers.getScheduler(25) self.assertEqual(sch['masterid'], None) @defer.inlineCallbacks def test_setSchedulerMaster_None_unowned(self): yield self.insertTestData([self.scheduler25]) yield self.db.schedulers.setSchedulerMaster(25, None) sch = yield self.db.schedulers.getScheduler(25) self.assertEqual(sch['masterid'], None) def test_signature_getScheduler(self): @self.assertArgSpecMatches(self.db.schedulers.getScheduler) def getScheduler(self, schedulerid): pass @defer.inlineCallbacks def test_getScheduler(self): yield self.insertTestData([self.scheduler24]) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=True, masterid=None)) @defer.inlineCallbacks def test_getScheduler_missing(self): sch = yield self.db.schedulers.getScheduler(24) self.assertEqual(sch, None) @defer.inlineCallbacks def test_getScheduler_active(self): yield self.insertTestData([self.scheduler24, self.master13, self.scheduler24master]) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=True, masterid=13)) @defer.inlineCallbacks def test_getScheduler_inactive_but_linked(self): yield self.insertTestData([self.scheduler25, self.master14, self.scheduler25master]) sch = yield self.db.schedulers.getScheduler(25) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=25, name='schname2', enabled=True, masterid=14)) # row exists, but marked inactive def test_signature_getSchedulers(self): @self.assertArgSpecMatches(self.db.schedulers.getSchedulers) def getSchedulers(self, active=None, masterid=None): pass @defer.inlineCallbacks def test_getSchedulers(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25, ]) def schKey(sch): return sch['id'] schlist = yield self.db.schedulers.getSchedulers() [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist, key=schKey), sorted([ dict(id=24, name='schname', enabled=True, masterid=13), dict(id=25, name='schname2', enabled=True, masterid=None), ], key=schKey)) @defer.inlineCallbacks def test_getSchedulers_masterid(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25, ]) schlist = yield self.db.schedulers.getSchedulers(masterid=13) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), sorted([ dict(id=24, name='schname', enabled=True, masterid=13), ])) @defer.inlineCallbacks def test_getSchedulers_active(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25 ]) schlist = yield self.db.schedulers.getSchedulers(active=True) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), sorted([ dict(id=24, name='schname', enabled=True, masterid=13), ])) @defer.inlineCallbacks def test_getSchedulers_active_masterid(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25 ]) schlist = yield self.db.schedulers.getSchedulers( active=True, masterid=13) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), sorted([ dict(id=24, name='schname', enabled=True, masterid=13), ])) schlist = yield self.db.schedulers.getSchedulers( active=True, masterid=14) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), []) @defer.inlineCallbacks def test_getSchedulers_inactive(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25 ]) schlist = yield self.db.schedulers.getSchedulers(active=False) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), sorted([ dict(id=25, name='schname2', enabled=True, masterid=None), ])) @defer.inlineCallbacks def test_getSchedulers_inactive_masterid(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25 ]) schlist = yield self.db.schedulers.getSchedulers( active=False, masterid=13) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), []) schlist = yield self.db.schedulers.getSchedulers( active=False, masterid=14) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), []) # always returns [] by spec! class RealTests(Tests): # tests that only "real" implementations will pass pass class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() def addClassifications(self, schedulerid, *classifications): self.db.schedulers.fakeClassifications(schedulerid, dict(classifications)) return defer.succeed(None) class TestRealDB(db.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['changes', 'schedulers', 'masters', 'sourcestamps', 'patches', 'scheduler_masters', 'scheduler_changes']) self.db.schedulers = \ schedulers.SchedulersConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() @defer.inlineCallbacks def addClassifications(self, schedulerid, *classifications): def thd(conn): q = self.db.model.scheduler_changes.insert() conn.execute(q, [ dict(changeid=c[0], schedulerid=schedulerid, important=c[1]) for c in classifications]) yield self.db.pool.do(thd) buildbot-3.4.0/master/buildbot/test/unit/db/test_sourcestamps.py000066400000000000000000000407021413250514000251030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import sourcestamps from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.util import epoch2datetime CREATED_AT = 927845299 def sourceStampKey(sourceStamp): return (sourceStamp['repository'], sourceStamp['branch'], sourceStamp['created_at']) class Tests(interfaces.InterfaceTests): def test_signature_findSourceStampId(self): @self.assertArgSpecMatches(self.db.sourcestamps.findSourceStampId) def findSourceStampId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): pass def test_signature_getSourceStamp(self): @self.assertArgSpecMatches(self.db.sourcestamps.getSourceStamp) def getSourceStamp(self, key, no_cache=False): pass def test_signature_getSourceStamps(self): @self.assertArgSpecMatches(self.db.sourcestamps.getSourceStamps) def getSourceStamps(self): pass @defer.inlineCallbacks def test_findSourceStampId_simple(self): self.reactor.advance(CREATED_AT) ssid = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper') ssdict = yield self.db.sourcestamps.getSourceStamp(ssid) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual(ssdict, { 'branch': 'production', 'codebase': 'cb', 'patchid': None, 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'project': 'stamper', 'repository': 'test://repo', 'revision': 'abdef', 'ssid': ssid, 'created_at': epoch2datetime(CREATED_AT), }) @defer.inlineCallbacks def test_findSourceStampId_simple_unique(self): ssid1 = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper') ssid2 = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='xxxxx', # different revision repository='test://repo', codebase='cb', project='stamper') ssid3 = yield self.db.sourcestamps.findSourceStampId( # same as ssid1 branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper') self.assertEqual(ssid1, ssid3) self.assertNotEqual(ssid1, ssid2) @defer.inlineCallbacks def test_findSourceStampId_simple_unique_patch(self): ssid1 = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper', patch_body=b'++ --', patch_level=1, patch_author='me', patch_comment='hi', patch_subdir='.') ssid2 = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper', patch_body=b'++ --', patch_level=1, patch_author='me', patch_comment='hi', patch_subdir='.') # even with the same patch contents, we get different ids self.assertNotEqual(ssid1, ssid2) @defer.inlineCallbacks def test_findSourceStampId_patch(self): self.reactor.advance(CREATED_AT) ssid = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper', patch_body=b'my patch', patch_level=3, patch_subdir='master/', patch_author='me', patch_comment="comment") ssdict = yield self.db.sourcestamps.getSourceStamp(ssid) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual(ssdict, { 'branch': 'production', 'codebase': 'cb', 'patchid': 1, 'patch_author': 'me', 'patch_body': b'my patch', 'patch_comment': 'comment', 'patch_level': 3, 'patch_subdir': 'master/', 'project': 'stamper', 'repository': 'test://repo', 'revision': 'abdef', 'created_at': epoch2datetime(CREATED_AT), 'ssid': ssid, }) @defer.inlineCallbacks def test_getSourceStamp_simple(self): yield self.insertTestData([ fakedb.SourceStamp(id=234, branch='br', revision='rv', repository='rep', codebase='cb', project='prj', created_at=CREATED_AT), ]) ssdict = yield self.db.sourcestamps.getSourceStamp(234) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual(ssdict, { 'ssid': 234, 'created_at': epoch2datetime(CREATED_AT), 'branch': 'br', 'revision': 'rv', 'repository': 'rep', 'codebase': 'cb', 'project': 'prj', 'patchid': None, 'patch_body': None, 'patch_level': None, 'patch_subdir': None, 'patch_author': None, 'patch_comment': None, }) @defer.inlineCallbacks def test_getSourceStamp_simple_None(self): "check that NULL branch and revision are handled correctly" yield self.insertTestData([ fakedb.SourceStamp(id=234, branch=None, revision=None, repository='rep', codebase='cb', project='prj'), ]) ssdict = yield self.db.sourcestamps.getSourceStamp(234) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual((ssdict['branch'], ssdict['revision']), (None, None)) @defer.inlineCallbacks def test_getSourceStamp_patch(self): yield self.insertTestData([ fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='bar', patch_comment='foo', subdir='/foo', patchlevel=3), fakedb.SourceStamp(id=234, patchid=99), ]) ssdict = yield self.db.sourcestamps.getSourceStamp(234) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual(dict((k, v) for k, v in ssdict.items() if k.startswith('patch_')), dict(patch_body=b'hello, world', patch_level=3, patch_author='bar', patch_comment='foo', patch_subdir='/foo')) @defer.inlineCallbacks def test_getSourceStamp_nosuch(self): ssdict = yield self.db.sourcestamps.getSourceStamp(234) self.assertEqual(ssdict, None) @defer.inlineCallbacks def test_getSourceStamps(self): yield self.insertTestData([ fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='bar', patch_comment='foo', subdir='/foo', patchlevel=3), fakedb.SourceStamp(id=234, revision='r', project='p', codebase='c', repository='rep', branch='b', patchid=99, created_at=CREATED_AT), fakedb.SourceStamp(id=235, revision='r2', project='p2', codebase='c2', repository='rep2', branch='b2', patchid=None, created_at=CREATED_AT + 10), ]) sourcestamps = yield self.db.sourcestamps.getSourceStamps() self.assertEqual(sorted(sourcestamps, key=sourceStampKey), sorted([{ 'branch': 'b', 'codebase': 'c', 'patch_author': 'bar', 'patchid': 99, 'patch_body': b'hello, world', 'patch_comment': 'foo', 'patch_level': 3, 'patch_subdir': '/foo', 'project': 'p', 'repository': 'rep', 'revision': 'r', 'created_at': epoch2datetime(CREATED_AT), 'ssid': 234, }, { 'branch': 'b2', 'codebase': 'c2', 'patchid': None, 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'project': 'p2', 'repository': 'rep2', 'revision': 'r2', 'created_at': epoch2datetime(CREATED_AT + 10), 'ssid': 235, }], key=sourceStampKey)) @defer.inlineCallbacks def test_getSourceStamps_empty(self): sourcestamps = yield self.db.sourcestamps.getSourceStamps() self.assertEqual(sourcestamps, []) def test_signature_getSourceStampsForBuild(self): @self.assertArgSpecMatches(self.db.sourcestamps.getSourceStampsForBuild) def getSourceStampsForBuild(self, buildid): pass @defer.inlineCallbacks def do_test_getSourceStampsForBuild(self, rows, buildid, expected): yield self.insertTestData(rows) sourcestamps = yield self.db.sourcestamps.getSourceStampsForBuild(buildid) self.assertEqual(sorted(sourcestamps, key=sourceStampKey), sorted(expected, key=sourceStampKey)) def test_getSourceStampsForBuild_OneCodeBase(self): rows = [fakedb.Master(id=88, name="bar"), fakedb.Worker(id=13, name='one'), fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, codebase='A', created_at=CREATED_AT, revision="aaa"), # fakedb.Change(changeid=14, codebase='A', sourcestampid=234), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.Build(id=50, buildrequestid=19, number=5, masterid=88, builderid=77, state_string="test", workerid=13, started_at=1304262222), ] expected = [{ 'branch': 'master', 'codebase': 'A', 'created_at': epoch2datetime(CREATED_AT), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo', 'revision': 'aaa', 'ssid': 234}] return self.do_test_getSourceStampsForBuild(rows, 50, expected) def test_getSourceStampsForBuild_3CodeBases(self): rows = [fakedb.Master(id=88, name="bar"), fakedb.Worker(id=13, name='one'), fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, codebase='A', created_at=CREATED_AT, revision="aaa"), fakedb.SourceStamp(id=235, codebase='B', created_at=CREATED_AT + 10, revision="bbb"), fakedb.SourceStamp(id=236, codebase='C', created_at=CREATED_AT + 20, revision="ccc"), # fakedb.Change(changeid=14, codebase='A', sourcestampid=234), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.BuildsetSourceStamp(sourcestampid=235, buildsetid=30), fakedb.BuildsetSourceStamp(sourcestampid=236, buildsetid=30), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.Build(id=50, buildrequestid=19, number=5, masterid=88, builderid=77, state_string="test", workerid=13, started_at=1304262222), ] expected = [{'branch': 'master', 'codebase': 'A', 'created_at': epoch2datetime(CREATED_AT), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo', 'revision': 'aaa', 'ssid': 234}, {'branch': 'master', 'codebase': 'B', 'created_at': epoch2datetime(CREATED_AT + 10), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo', 'revision': 'bbb', 'ssid': 235}, {'branch': 'master', 'codebase': 'C', 'created_at': epoch2datetime(CREATED_AT + 20), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo', 'revision': 'ccc', 'ssid': 236}] return self.do_test_getSourceStampsForBuild(rows, 50, expected) class RealTests(Tests): pass class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['sourcestamps', 'patches', 'masters', 'workers', 'buildsets', 'builders', 'buildrequests', 'buildset_sourcestamps', 'builds']) self.db.sourcestamps = \ sourcestamps.SourceStampsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_state.py000066400000000000000000000171271413250514000235000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.db import state from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import db class TestStateConnectorComponent( connector_component.ConnectorComponentMixin, db.TestCase): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['objects', 'object_state']) self.db.state = state.StateConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() @defer.inlineCallbacks def test_getObjectId_new(self): objectid = yield self.db.state.getObjectId('someobj', 'someclass') yield self.assertNotEqual(objectid, None) def thd(conn): q = self.db.model.objects.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.id, r.name, r.class_name) for r in rows], [(objectid, 'someobj', 'someclass')]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_getObjectId_existing(self): yield self.insertTestData([ fakedb.Object(id=19, name='someobj', class_name='someclass')]) objectid = yield self.db.state.getObjectId('someobj', 'someclass') self.assertEqual(objectid, 19) @defer.inlineCallbacks def test_getObjectId_conflict(self): # set up to insert a row between looking for an existing object # and adding a new one, triggering the fallback to re-running # the select. def hook(conn): conn.execute(self.db.model.objects.insert(), id=27, name='someobj', class_name='someclass') self.db.state._test_timing_hook = hook objectid = yield self.db.state.getObjectId('someobj', 'someclass') self.assertEqual(objectid, 27) @defer.inlineCallbacks def test_getObjectId_new_big_name(self): objectid = yield self.db.state.getObjectId('someobj' * 150, 'someclass') expn = 'someobj' * 9 + 's132bf9b89b0cdbc040d1ebc69e0dbee85dff720a' self.assertNotEqual(objectid, None) def thd(conn): q = self.db.model.objects.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.id, r.name, r.class_name) for r in rows], [(objectid, expn, 'someclass')]) yield self.db.pool.do(thd) def test_getState_missing(self): d = self.db.state.getState(10, 'nosuch') return self.assertFailure(d, KeyError) @defer.inlineCallbacks def test_getState_missing_default(self): val = yield self.db.state.getState(10, 'nosuch', 'abc') self.assertEqual(val, 'abc') @defer.inlineCallbacks def test_getState_missing_default_None(self): val = yield self.db.state.getState(10, 'nosuch', None) self.assertEqual(val, None) @defer.inlineCallbacks def test_getState_present(self): yield self.insertTestData([ fakedb.Object(id=10, name='x', class_name='y'), fakedb.ObjectState(objectid=10, name='x', value_json='[1,2]'), ]) val = yield self.db.state.getState(10, 'x') self.assertEqual(val, [1, 2]) def test_getState_badjson(self): d = self.insertTestData([ fakedb.Object(id=10, name='x', class_name='y'), fakedb.ObjectState(objectid=10, name='x', value_json='ff[1'), ]) d.addCallback(lambda _: self.db.state.getState(10, 'x')) return self.assertFailure(d, TypeError) @defer.inlineCallbacks def test_setState(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) yield self.db.state.setState(10, 'x', [1, 2]) def thd(conn): q = self.db.model.object_state.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.objectid, r.name, r.value_json) for r in rows], [(10, 'x', '[1, 2]')]) yield self.db.pool.do(thd) def test_setState_badjson(self): d = self.insertTestData([ fakedb.Object(id=10, name='x', class_name='y'), ]) d.addCallback(lambda _: self.db.state.setState(10, 'x', self)) # self is not JSON-able.. return self.assertFailure(d, TypeError) @defer.inlineCallbacks def test_setState_existing(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), fakedb.ObjectState(objectid=10, name='x', value_json='99'), ]) yield self.db.state.setState(10, 'x', [1, 2]) def thd(conn): q = self.db.model.object_state.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.objectid, r.name, r.value_json) for r in rows], [(10, 'x', '[1, 2]')]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_setState_conflict(self): def hook(conn): conn.execute(self.db.model.object_state.insert(), objectid=10, name='x', value_json='22') self.db.state._test_timing_hook = hook yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) yield self.db.state.setState(10, 'x', [1, 2]) def thd(conn): q = self.db.model.object_state.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.objectid, r.name, r.value_json) for r in rows], [(10, 'x', '22')]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_atomicCreateState(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) res = yield self.db.state.atomicCreateState(10, 'x', lambda: [1, 2]) self.assertEqual(res, [1, 2]) res = yield self.db.state.getState(10, 'x') self.assertEqual(res, [1, 2]) @defer.inlineCallbacks def test_atomicCreateState_conflict(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) def hook(conn): conn.execute(self.db.model.object_state.insert(), objectid=10, name='x', value_json='22') self.db.state._test_timing_hook = hook res = yield self.db.state.atomicCreateState(10, 'x', lambda: [1, 2]) self.assertEqual(res, 22) res = yield self.db.state.getState(10, 'x') self.assertEqual(res, 22) @defer.inlineCallbacks def test_atomicCreateState_nojsonable(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) d = self.db.state.atomicCreateState(10, 'x', object) yield self.assertFailure(d, TypeError) buildbot-3.4.0/master/buildbot/test/unit/db/test_steps.py000066400000000000000000000341351413250514000235140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import time from twisted.internet import defer from twisted.trial import unittest from buildbot.db import steps from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.util import epoch2datetime TIME1 = 1304262222 TIME2 = 1304262223 TIME3 = 1304262224 TIME4 = 1304262235 class Tests(interfaces.InterfaceTests): # common sample data backgroundData = [ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Build(id=31, buildrequestid=41, number=8, masterid=88, builderid=88, workerid=47), ] stepRows = [ fakedb.Step(id=70, number=0, name='one', buildid=30, started_at=TIME1, complete_at=TIME2, state_string='test', results=0), fakedb.Step(id=71, number=1, name='two', buildid=30, started_at=TIME2, complete_at=TIME3, state_string='test', results=2, urls_json='["http://url"]', hidden=1), fakedb.Step(id=72, number=2, name='three', buildid=30, started_at=TIME3), fakedb.Step(id=73, number=0, name='wrong-build', buildid=31), ] stepDicts = [ {'id': 70, 'buildid': 30, 'number': 0, 'name': 'one', 'results': 0, 'started_at': epoch2datetime(TIME1), 'complete_at': epoch2datetime(TIME2), 'state_string': 'test', 'urls': [], 'hidden': False}, {'id': 71, 'buildid': 30, 'number': 1, 'name': 'two', 'results': 2, 'started_at': epoch2datetime(TIME2), 'complete_at': epoch2datetime(TIME3), 'state_string': 'test', 'urls': ['http://url'], 'hidden': True}, {'id': 72, 'buildid': 30, 'number': 2, 'name': 'three', 'results': None, 'started_at': epoch2datetime(TIME3), 'complete_at': None, 'state_string': '', 'urls': [], 'hidden': False}, ] # signature tests def test_signature_getStep(self): @self.assertArgSpecMatches(self.db.steps.getStep) def getStep(self, stepid=None, buildid=None, number=None, name=None): pass def test_signature_getSteps(self): @self.assertArgSpecMatches(self.db.steps.getSteps) def getSteps(self, buildid): pass def test_signature_addStep(self): @self.assertArgSpecMatches(self.db.steps.addStep) def addStep(self, buildid, name, state_string): pass def test_signature_startStep(self): @self.assertArgSpecMatches(self.db.steps.startStep) def addStep(self, stepid): pass def test_signature_setStepStateString(self): @self.assertArgSpecMatches(self.db.steps.setStepStateString) def setStepStateString(self, stepid, state_string): pass def test_signature_finishStep(self): @self.assertArgSpecMatches(self.db.steps.finishStep) def finishStep(self, stepid, results, hidden): pass # method tests @defer.inlineCallbacks def test_getStep(self): yield self.insertTestData(self.backgroundData + [self.stepRows[0]]) stepdict = yield self.db.steps.getStep(70) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict, self.stepDicts[0]) @defer.inlineCallbacks def test_getStep_missing(self): stepdict = yield self.db.steps.getStep(50) self.assertEqual(stepdict, None) @defer.inlineCallbacks def test_getStep_number(self): yield self.insertTestData(self.backgroundData + [self.stepRows[1]]) stepdict = yield self.db.steps.getStep(buildid=30, number=1) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['id'], 71) @defer.inlineCallbacks def test_getStep_number_missing(self): yield self.insertTestData(self.backgroundData + [self.stepRows[1]]) stepdict = yield self.db.steps.getStep(buildid=30, number=9) self.assertEqual(stepdict, None) @defer.inlineCallbacks def test_getStep_name(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) stepdict = yield self.db.steps.getStep(buildid=30, name='three') validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['id'], 72) @defer.inlineCallbacks def test_getStep_name_missing(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) stepdict = yield self.db.steps.getStep(buildid=30, name='five') self.assertEqual(stepdict, None) @defer.inlineCallbacks def test_getStep_invalid(self): d = self.db.steps.getStep(buildid=30) yield self.assertFailure(d, RuntimeError) @defer.inlineCallbacks def test_getSteps(self): yield self.insertTestData(self.backgroundData + self.stepRows) stepdicts = yield self.db.steps.getSteps(buildid=30) [validation.verifyDbDict(self, 'stepdict', stepdict) for stepdict in stepdicts] self.assertEqual(stepdicts, self.stepDicts[:3]) @defer.inlineCallbacks def test_getSteps_none(self): yield self.insertTestData(self.backgroundData + self.stepRows) stepdicts = yield self.db.steps.getSteps(buildid=33) self.assertEqual(stepdicts, []) @defer.inlineCallbacks def test_addStep_getStep(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData) stepid, number, name = yield self.db.steps.addStep(buildid=30, name='new', state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (0, 'new')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict, { 'id': stepid, 'buildid': 30, 'name': 'new', 'number': 0, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'results': None, 'state_string': 'new', 'urls': [], 'hidden': False}) @defer.inlineCallbacks def test_addStep_getStep_existing_step(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [self.stepRows[0]]) stepid, number, name = yield self.db.steps.addStep( buildid=30, name='new', state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (1, 'new')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['number'], number) self.assertEqual(stepdict['name'], name) @defer.inlineCallbacks def test_addStep_getStep_name_collisions(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [ fakedb.Step(id=73, number=0, name='new', buildid=30), fakedb.Step(id=74, number=1, name='new_1', buildid=30), fakedb.Step(id=75, number=2, name='new_2', buildid=30), fakedb.Step(id=76, number=3, name='new_step', buildid=30), ]) stepid, number, name = yield self.db.steps.addStep( buildid=30, name='new', state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (4, 'new_3')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['number'], number) self.assertEqual(stepdict['name'], name) @defer.inlineCallbacks def test_setStepStateString(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield self.db.steps.setStepStateString(stepid=72, state_string='aaa') stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['state_string'], 'aaa') @defer.inlineCallbacks def test_addURL(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield self.db.steps.addURL(stepid=72, name='foo', url='bar') stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['urls'], [{'name': 'foo', 'url': 'bar'}]) @defer.inlineCallbacks def test_addURL_race(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield defer.gatherResults([ # only a tiny sleep is required to see the problem. self.db.steps.addURL(stepid=72, name='foo', url='bar', _racehook=lambda: time.sleep(.01)), self.db.steps.addURL(stepid=72, name='foo2', url='bar2')]) stepdict = yield self.db.steps.getStep(stepid=72) def urlKey(url): return url['name'] # order is not guaranteed though self.assertEqual(sorted(stepdict['urls'], key=urlKey), sorted([{'name': 'foo', 'url': 'bar'}, {'name': 'foo2', 'url': 'bar2'}], key=urlKey)) @defer.inlineCallbacks def test_addURL_no_duplicate(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield defer.gatherResults([ self.db.steps.addURL(stepid=72, name='foo', url='bar'), self.db.steps.addURL(stepid=72, name='foo', url='bar')]) stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['urls'], [{'name': 'foo', 'url': 'bar'}]) @defer.inlineCallbacks def test_finishStep(self): self.reactor.advance(TIME2) yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield self.db.steps.finishStep(stepid=72, results=11, hidden=False) stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['results'], 11) self.assertEqual(stepdict['complete_at'], epoch2datetime(TIME2)) self.assertEqual(stepdict['hidden'], False) @defer.inlineCallbacks def test_finishStep_hidden(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield self.db.steps.finishStep(stepid=72, results=11, hidden=True) stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['hidden'], True) class RealTests(Tests): # the fake connector doesn't deal with this edge case @defer.inlineCallbacks def test_addStep_getStep_name_collisions_too_long(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [ fakedb.Step(id=73, number=0, name='a' * 49, buildid=30), fakedb.Step(id=74, number=1, name='a' * 48 + '_1', buildid=30), ]) stepid, number, name = yield self.db.steps.addStep( buildid=30, name='a' * 49, state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (2, 'a' * 48 + '_2')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['number'], number) self.assertEqual(stepdict['name'], name) @defer.inlineCallbacks def test_addStep_getStep_name_collisions_too_long_extra_digits(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [ fakedb.Step(id=73, number=0, name='a' * 50, buildid=30), ] + [fakedb.Step(id=73 + i, number=i, name='a' * 48 + ('_%d' % i), buildid=30) for i in range(1, 10) ] + [fakedb.Step(id=73 + i, number=i, name='a' * 47 + ('_%d' % i), buildid=30) for i in range(10, 100) ]) stepid, number, name = yield self.db.steps.addStep( buildid=30, name='a' * 50, state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (100, 'a' * 46 + '_100')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['number'], number) self.assertEqual(stepdict['name'], name) class TestFakeDB(Tests, unittest.TestCase, connector_component.FakeConnectorComponentMixin): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers']) self.db.steps = steps.StepsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_test_result_sets.py000066400000000000000000000247251413250514000257750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import test_result_sets from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation class Tests(interfaces.InterfaceTests): common_data = [ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.Builder(id=89, name='b2'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.BuildRequest(id=42, buildsetid=20, builderid=88), fakedb.BuildRequest(id=43, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Build(id=31, buildrequestid=42, number=8, masterid=88, builderid=88, workerid=47), fakedb.Build(id=40, buildrequestid=43, number=9, masterid=88, builderid=89, workerid=47), fakedb.Step(id=131, number=231, name='step231', buildid=30), fakedb.Step(id=132, number=232, name='step232', buildid=30), fakedb.Step(id=141, number=241, name='step241', buildid=31), fakedb.Step(id=142, number=242, name='step242', buildid=40), ] common_test_result_set_data = [ fakedb.TestResultSet(id=91, builderid=88, buildid=30, stepid=131, description='desc1', category='cat', value_unit='ms', tests_failed=None, tests_passed=None, complete=0), fakedb.TestResultSet(id=92, builderid=88, buildid=30, stepid=131, description='desc2', category='cat', value_unit='ms', tests_failed=None, tests_passed=None, complete=1), ] def test_signature_add_test_result_set(self): @self.assertArgSpecMatches(self.db.test_result_sets.addTestResultSet) def addTestResultSet(self, builderid, buildid, stepid, description, category, value_unit): pass def test_signature_get_test_result_set(self): @self.assertArgSpecMatches(self.db.test_result_sets.getTestResultSet) def getTestResultSet(self, test_result_setid): pass def test_signature_get_test_result_sets(self): @self.assertArgSpecMatches(self.db.test_result_sets.getTestResultSets) def getTestResultSets(self, builderid, buildid=None, stepid=None, complete=None, result_spec=None): pass def test_signature_complete_test_result_set(self): @self.assertArgSpecMatches(self.db.test_result_sets.completeTestResultSet) def completeTestResultSet(self, test_result_setid, tests_passed=None, tests_failed=None): pass @defer.inlineCallbacks def test_add_set_get_set(self): yield self.insertTestData(self.common_data) set_id = yield self.db.test_result_sets.addTestResultSet(builderid=88, buildid=30, stepid=131, description='desc', category='cat', value_unit='ms') set_dict = yield self.db.test_result_sets.getTestResultSet(set_id) validation.verifyDbDict(self, 'test_result_setdict', set_dict) self.assertEqual(set_dict, { 'id': set_id, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc', 'category': 'cat', 'value_unit': 'ms', 'tests_failed': None, 'tests_passed': None, 'complete': False }) @defer.inlineCallbacks def test_get_sets(self): yield self.insertTestData(self.common_data + [ fakedb.TestResultSet(id=91, builderid=88, buildid=30, stepid=131, description='desc1', category='cat', value_unit='ms', tests_failed=None, tests_passed=None, complete=0), fakedb.TestResultSet(id=92, builderid=89, buildid=40, stepid=142, description='desc2', category='cat', value_unit='ms', tests_failed=None, tests_passed=None, complete=1), fakedb.TestResultSet(id=93, builderid=88, buildid=31, stepid=141, description='desc3', category='cat', value_unit='ms', tests_failed=None, tests_passed=None, complete=1), fakedb.TestResultSet(id=94, builderid=88, buildid=30, stepid=132, description='desc4', category='cat', value_unit='ms', tests_failed=None, tests_passed=None, complete=1), fakedb.TestResultSet(id=95, builderid=88, buildid=30, stepid=131, description='desc4', category='cat', value_unit='ms', tests_failed=None, tests_passed=None, complete=0), ]) set_dicts = yield self.db.test_result_sets.getTestResultSets(builderid=88) self.assertEqual([d['id'] for d in set_dicts], [91, 93, 94, 95]) for d in set_dicts: validation.verifyDbDict(self, 'test_result_setdict', d) set_dicts = yield self.db.test_result_sets.getTestResultSets(builderid=89) self.assertEqual([d['id'] for d in set_dicts], [92]) set_dicts = yield self.db.test_result_sets.getTestResultSets(builderid=88, buildid=30) self.assertEqual([d['id'] for d in set_dicts], [91, 94, 95]) set_dicts = yield self.db.test_result_sets.getTestResultSets(builderid=88, buildid=31) self.assertEqual([d['id'] for d in set_dicts], [93]) set_dicts = yield self.db.test_result_sets.getTestResultSets(builderid=88, stepid=131) self.assertEqual([d['id'] for d in set_dicts], [91, 95]) set_dicts = yield self.db.test_result_sets.getTestResultSets(builderid=88, stepid=132) self.assertEqual([d['id'] for d in set_dicts], [94]) set_dicts = yield self.db.test_result_sets.getTestResultSets(builderid=88, complete=True) self.assertEqual([d['id'] for d in set_dicts], [93, 94]) set_dicts = yield self.db.test_result_sets.getTestResultSets(builderid=88, complete=False) self.assertEqual([d['id'] for d in set_dicts], [91, 95]) @defer.inlineCallbacks def test_get_set_from_data(self): yield self.insertTestData(self.common_data + self.common_test_result_set_data) set_dict = yield self.db.test_result_sets.getTestResultSet(91) self.assertEqual(set_dict, { 'id': 91, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc1', 'category': 'cat', 'value_unit': 'ms', 'tests_failed': None, 'tests_passed': None, 'complete': False }) @defer.inlineCallbacks def test_get_non_existing_set(self): set_dict = yield self.db.test_result_sets.getTestResultSet(91) self.assertEqual(set_dict, None) @defer.inlineCallbacks def test_complete_already_completed_set(self): yield self.insertTestData(self.common_data + self.common_test_result_set_data) with self.assertRaises(test_result_sets.TestResultSetAlreadyCompleted): yield self.db.test_result_sets.completeTestResultSet(92) self.flushLoggedErrors(test_result_sets.TestResultSetAlreadyCompleted) @defer.inlineCallbacks def test_complete_set_with_test_counts(self): yield self.insertTestData(self.common_data + self.common_test_result_set_data) yield self.db.test_result_sets.completeTestResultSet(91, tests_passed=12, tests_failed=2) set_dict = yield self.db.test_result_sets.getTestResultSet(91) self.assertEqual(set_dict, { 'id': 91, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc1', 'category': 'cat', 'value_unit': 'ms', 'tests_failed': 2, 'tests_passed': 12, 'complete': True }) @defer.inlineCallbacks def test_complete_set_without_test_counts(self): yield self.insertTestData(self.common_data + self.common_test_result_set_data) yield self.db.test_result_sets.completeTestResultSet(91) set_dict = yield self.db.test_result_sets.getTestResultSet(91) self.assertEqual(set_dict, { 'id': 91, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc1', 'category': 'cat', 'value_unit': 'ms', 'tests_failed': None, 'tests_passed': None, 'complete': True }) class TestFakeDB(Tests, connector_component.FakeConnectorComponentMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers', 'test_result_sets']) self.db.test_result_sets = test_result_sets.TestResultSetsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_test_results.py000066400000000000000000000203171413250514000251130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import test_results from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation class Tests(interfaces.InterfaceTests): common_data = [ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Step(id=131, number=132, name='step132', buildid=30), fakedb.TestResultSet(id=13, builderid=88, buildid=30, stepid=131, description='desc', category='cat', value_unit='ms', complete=1), ] def test_signature_get_test_code_paths(self): @self.assertArgSpecMatches(self.db.test_results.getTestCodePaths) def getTestCodePaths(self, builderid, path_prefix=None, result_spec=None): pass def test_signature_get_test_names(self): @self.assertArgSpecMatches(self.db.test_results.getTestNames) def getTestNames(self, builderid, name_prefix=None, result_spec=None): pass def test_signature_add_test_results(self): @self.assertArgSpecMatches(self.db.test_results.addTestResults) def addTestResults(self, builderid, test_result_setid, result_values): pass def test_signature_get_test_result(self): @self.assertArgSpecMatches(self.db.test_results.getTestResult) def getTestResult(self, test_resultid): pass def test_signature_get_test_results(self): @self.assertArgSpecMatches(self.db.test_results.getTestResults) def getTestResults(self, builderid, test_result_setid, result_spec=None): pass @defer.inlineCallbacks def test_add_set_results(self): yield self.insertTestData(self.common_data) result_values = [ {'test_name': 'name1', 'value': '1'}, {'test_name': 'name1', 'duration_ns': 1000, 'value': '2'}, {'test_name': 'name2', 'test_code_path': 'path2', 'value': '3'}, {'test_name': 'name3', 'test_code_path': 'path3', 'value': '4'}, {'test_name': 'name4', 'test_code_path': 'path4', 'line': 4, 'value': '5'}, {'test_code_path': 'path5', 'line': 5, 'value': '6'}, ] yield self.db.test_results.addTestResults(builderid=88, test_result_setid=13, result_values=result_values) result_dicts = yield self.db.test_results.getTestResults(builderid=88, test_result_setid=13) for d in result_dicts: validation.verifyDbDict(self, 'test_resultdict', d) result_dicts = sorted(result_dicts, key=lambda x: x['id']) resultid = result_dicts[0]['id'] self.assertEqual(result_dicts, [ {'id': resultid, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name1', 'test_code_path': None, 'line': None, 'duration_ns': None, 'value': '1'}, {'id': resultid + 1, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name1', 'test_code_path': None, 'line': None, 'duration_ns': 1000, 'value': '2'}, {'id': resultid + 2, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name2', 'test_code_path': 'path2', 'line': None, 'duration_ns': None, 'value': '3'}, {'id': resultid + 3, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name3', 'test_code_path': 'path3', 'line': None, 'duration_ns': None, 'value': '4'}, {'id': resultid + 4, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name4', 'test_code_path': 'path4', 'line': 4, 'duration_ns': None, 'value': '5'}, {'id': resultid + 5, 'builderid': 88, 'test_result_setid': 13, 'test_name': None, 'test_code_path': 'path5', 'line': 5, 'duration_ns': None, 'value': '6'}, ]) result_dict = yield self.db.test_results.getTestResult(test_resultid=resultid) self.assertEqual(result_dict, { 'id': resultid, 'builderid': 88, 'test_result_setid': 13, 'test_name': 'name1', 'test_code_path': None, 'line': None, 'duration_ns': None, 'value': '1' }) @defer.inlineCallbacks def test_get_names(self): yield self.insertTestData(self.common_data + [ fakedb.TestName(id=103, builderid=88, name='name103'), fakedb.TestName(id=104, builderid=88, name='name104'), fakedb.TestName(id=105, builderid=88, name='name105'), fakedb.TestName(id=116, builderid=88, name='name116'), fakedb.TestName(id=117, builderid=88, name='name117'), ]) name_dicts = yield self.db.test_results.getTestNames(builderid=88) self.assertEqual(name_dicts, ['name103', 'name104', 'name105', 'name116', 'name117']) name_dicts = yield self.db.test_results.getTestNames(builderid=88, name_prefix='non_existing') self.assertEqual(name_dicts, []) name_dicts = yield self.db.test_results.getTestNames(builderid=88, name_prefix='name10') self.assertEqual(name_dicts, ['name103', 'name104', 'name105']) name_dicts = yield self.db.test_results.getTestNames(builderid=88, name_prefix='name11') self.assertEqual(name_dicts, ['name116', 'name117']) @defer.inlineCallbacks def test_get_code_paths(self): yield self.insertTestData(self.common_data + [ fakedb.TestCodePath(id=103, builderid=88, path='path103'), fakedb.TestCodePath(id=104, builderid=88, path='path104'), fakedb.TestCodePath(id=105, builderid=88, path='path105'), fakedb.TestCodePath(id=116, builderid=88, path='path116'), fakedb.TestCodePath(id=117, builderid=88, path='path117'), ]) path_dicts = yield self.db.test_results.getTestCodePaths(builderid=88) self.assertEqual(path_dicts, ['path103', 'path104', 'path105', 'path116', 'path117']) path_dicts = yield self.db.test_results.getTestCodePaths(builderid=88, path_prefix='non_existing') self.assertEqual(path_dicts, []) path_dicts = yield self.db.test_results.getTestCodePaths(builderid=88, path_prefix='path10') self.assertEqual(path_dicts, ['path103', 'path104', 'path105']) path_dicts = yield self.db.test_results.getTestCodePaths(builderid=88, path_prefix='path11') self.assertEqual(path_dicts, ['path116', 'path117']) class TestFakeDB(Tests, connector_component.FakeConnectorComponentMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers', 'test_names', 'test_code_paths', 'test_results', 'test_result_sets']) self.db.test_results = test_results.TestResultsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/db/test_users.py000066400000000000000000000406131413250514000235150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy from twisted.internet import defer from twisted.trial import unittest from buildbot.db import users from buildbot.test import fakedb from buildbot.test.util import connector_component class TestUsersConnectorComponent(connector_component.ConnectorComponentMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['users', 'users_info', 'changes', 'change_users', 'sourcestamps', 'patches']) self.db.users = users.UsersConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() # sample user data user1_rows = [ fakedb.User(uid=1, identifier='soap'), fakedb.UserInfo(uid=1, attr_type='IPv9', attr_data='0578cc6.8db024'), ] user2_rows = [ fakedb.User(uid=2, identifier='lye'), fakedb.UserInfo(uid=2, attr_type='git', attr_data='Tyler Durden '), fakedb.UserInfo(uid=2, attr_type='irc', attr_data='durden') ] user3_rows = [ fakedb.User(uid=3, identifier='marla', bb_username='marla', bb_password='cancer') ] user1_dict = { 'uid': 1, 'identifier': 'soap', 'bb_username': None, 'bb_password': None, 'IPv9': '0578cc6.8db024', } user2_dict = { 'uid': 2, 'identifier': 'lye', 'bb_username': None, 'bb_password': None, 'irc': 'durden', 'git': 'Tyler Durden ' } user3_dict = { 'uid': 3, 'identifier': 'marla', 'bb_username': 'marla', 'bb_password': 'cancer', } # tests @defer.inlineCallbacks def test_addUser_new(self): uid = yield self.db.users.findUserByAttr(identifier='soap', attr_type='subspace_net_handle', attr_data='Durden0924') def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute(users_tbl.select()).fetchall() infos = conn.execute(users_info_tbl.select()).fetchall() self.assertEqual(len(users), 1) self.assertEqual(users[0].uid, uid) self.assertEqual(users[0].identifier, 'soap') self.assertEqual(len(infos), 1) self.assertEqual(infos[0].uid, uid) self.assertEqual(infos[0].attr_type, 'subspace_net_handle') self.assertEqual(infos[0].attr_data, 'Durden0924') yield self.db.pool.do(thd) @defer.inlineCallbacks def test_addUser_existing(self): yield self.insertTestData(self.user1_rows) uid = yield self.db.users.findUserByAttr( identifier='soapy', attr_type='IPv9', attr_data='0578cc6.8db024') self.assertEqual(uid, 1) def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute(users_tbl.select()).fetchall() infos = conn.execute(users_info_tbl.select()).fetchall() self.assertEqual(len(users), 1) self.assertEqual(users[0].uid, uid) self.assertEqual(users[0].identifier, 'soap') # not changed! self.assertEqual(len(infos), 1) self.assertEqual(infos[0].uid, uid) self.assertEqual(infos[0].attr_type, 'IPv9') self.assertEqual(infos[0].attr_data, '0578cc6.8db024') yield self.db.pool.do(thd) @defer.inlineCallbacks def test_findUser_existing(self): yield self.insertTestData( self.user1_rows + self.user2_rows + self.user3_rows) uid = yield self.db.users.findUserByAttr( identifier='lye', attr_type='git', attr_data='Tyler Durden ') self.assertEqual(uid, 2) def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute(users_tbl.select()).fetchall() infos = conn.execute(users_info_tbl.select()).fetchall() self.assertEqual(( sorted([tuple(u) for u in users]), sorted([tuple(i) for i in infos]) ), ( [ (1, 'soap', None, None), (2, 'lye', None, None), (3, 'marla', 'marla', 'cancer'), ], [ (1, 'IPv9', '0578cc6.8db024'), (2, 'git', 'Tyler Durden '), (2, 'irc', 'durden') ])) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_addUser_race(self): def race_thd(conn): # note that this assumes that both inserts can happen "at once". # This is the case for DB engines that support transactions, but # not for MySQL. so this test does not detect the potential MySQL # failure, which will generally result in a spurious failure. conn.execute(self.db.model.users.insert(), uid=99, identifier='soap') conn.execute(self.db.model.users_info.insert(), uid=99, attr_type='subspace_net_handle', attr_data='Durden0924') uid = yield self.db.users.findUserByAttr(identifier='soap', attr_type='subspace_net_handle', attr_data='Durden0924', _race_hook=race_thd) self.assertEqual(uid, 99) def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute(users_tbl.select()).fetchall() infos = conn.execute(users_info_tbl.select()).fetchall() self.assertEqual(len(users), 1) self.assertEqual(users[0].uid, uid) self.assertEqual(users[0].identifier, 'soap') self.assertEqual(len(infos), 1) self.assertEqual(infos[0].uid, uid) self.assertEqual(infos[0].attr_type, 'subspace_net_handle') self.assertEqual(infos[0].attr_data, 'Durden0924') yield self.db.pool.do(thd) @defer.inlineCallbacks def test_addUser_existing_identifier(self): # see http://trac.buildbot.net/ticket/2587 yield self.insertTestData(self.user1_rows) uid = yield self.db.users.findUserByAttr( identifier='soap', # same identifier attr_type='IPv9', attr_data='fffffff.ffffff') # different attr # creates a new user self.assertNotEqual(uid, 1) def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute( users_tbl.select(order_by=users_tbl.c.identifier)).fetchall() infos = conn.execute( users_info_tbl.select(users_info_tbl.c.uid == uid)).fetchall() self.assertEqual(len(users), 2) self.assertEqual(users[1].uid, uid) self.assertEqual(users[1].identifier, 'soap_2') # unique'd self.assertEqual(len(infos), 1) self.assertEqual(infos[0].attr_type, 'IPv9') self.assertEqual(infos[0].attr_data, 'fffffff.ffffff') yield self.db.pool.do(thd) @defer.inlineCallbacks def test_getUser(self): yield self.insertTestData(self.user1_rows) usdict = yield self.db.users.getUser(1) self.assertEqual(usdict, self.user1_dict) @defer.inlineCallbacks def test_getUser_bb(self): yield self.insertTestData(self.user3_rows) usdict = yield self.db.users.getUser(3) self.assertEqual(usdict, self.user3_dict) @defer.inlineCallbacks def test_getUser_multi_attr(self): yield self.insertTestData(self.user2_rows) usdict = yield self.db.users.getUser(2) self.assertEqual(usdict, self.user2_dict) @defer.inlineCallbacks def test_getUser_no_match(self): yield self.insertTestData(self.user1_rows) none = yield self.db.users.getUser(3) self.assertEqual(none, None) @defer.inlineCallbacks def test_getUsers_none(self): res = yield self.db.users.getUsers() self.assertEqual(res, []) @defer.inlineCallbacks def test_getUsers(self): yield self.insertTestData(self.user1_rows) res = yield self.db.users.getUsers() self.assertEqual(res, [dict(uid=1, identifier='soap')]) @defer.inlineCallbacks def test_getUsers_multiple(self): yield self.insertTestData(self.user1_rows + self.user2_rows) res = yield self.db.users.getUsers() self.assertEqual(res, [dict(uid=1, identifier='soap'), dict(uid=2, identifier='lye')]) @defer.inlineCallbacks def test_getUserByUsername(self): yield self.insertTestData(self.user3_rows) res = yield self.db.users.getUserByUsername("marla") self.assertEqual(res, self.user3_dict) @defer.inlineCallbacks def test_getUserByUsername_no_match(self): yield self.insertTestData(self.user3_rows) none = yield self.db.users.getUserByUsername("tyler") self.assertEqual(none, None) @defer.inlineCallbacks def test_updateUser_existing_type(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=1, attr_type='IPv9', attr_data='abcd.1234') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['IPv9'], 'abcd.1234') self.assertEqual(usdict['identifier'], 'soap') # no change @defer.inlineCallbacks def test_updateUser_new_type(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=1, attr_type='IPv4', attr_data='123.134.156.167') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['IPv4'], '123.134.156.167') self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change self.assertEqual(usdict['identifier'], 'soap') # no change @defer.inlineCallbacks def test_updateUser_identifier(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=1, identifier='lye') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['identifier'], 'lye') self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_updateUser_bb(self): yield self.insertTestData(self.user3_rows) yield self.db.users.updateUser(uid=3, bb_username='boss', bb_password='fired') usdict = yield self.db.users.getUser(3) self.assertEqual(usdict['bb_username'], 'boss') self.assertEqual(usdict['bb_password'], 'fired') self.assertEqual(usdict['identifier'], 'marla') # no change @defer.inlineCallbacks def test_updateUser_all(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser( uid=1, identifier='lye', bb_username='marla', bb_password='cancer', attr_type='IPv4', attr_data='123.134.156.167') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['identifier'], 'lye') self.assertEqual(usdict['bb_username'], 'marla') self.assertEqual(usdict['bb_password'], 'cancer') self.assertEqual(usdict['IPv4'], '123.134.156.167') self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_updateUser_race(self): # called from the db thread, this opens a *new* connection (to avoid # the existing transaction) and executes a conflicting insert in that # connection. This will cause the insert in the db method to fail, and # the data in this insert (8.8.8.8) will appear below. transaction_wins = [] if (self.db.pool.engine.dialect.name == 'sqlite' and self.db.pool.engine.url.database not in [None, ':memory:']): # It's not easy to work with file-based SQLite via multiple # connections, because SQLAlchemy (in it's default configuration) # locks file during working session. # TODO: This probably can be supported. raise unittest.SkipTest( "It's hard to test race condition with not in-memory SQLite") def race_thd(conn): conn = self.db.pool.engine.connect() try: r = conn.execute(self.db.model.users_info.insert(), uid=1, attr_type='IPv4', attr_data='8.8.8.8') r.close() except sqlalchemy.exc.OperationalError: # some engine (mysql innodb) will enforce lock until the transaction is over transaction_wins.append(True) # scope variable, we modify a list so that modification is visible in parent scope yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=1, attr_type='IPv4', attr_data='123.134.156.167', _race_hook=race_thd) usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['identifier'], 'soap') if transaction_wins: self.assertEqual(usdict['IPv4'], '123.134.156.167') else: self.assertEqual(usdict['IPv4'], '8.8.8.8') self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_update_NoMatch_identifier(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=3, identifier='abcd') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['identifier'], 'soap') # no change @defer.inlineCallbacks def test_update_NoMatch_attribute(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=3, attr_type='abcd', attr_data='efgh') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_update_NoMatch_bb(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser( uid=3, attr_type='marla', attr_data='cancer') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_removeUser_uid(self): yield self.insertTestData(self.user1_rows) yield self.db.users.removeUser(1) def thd(conn): r = conn.execute(self.db.model.users.select()) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_removeNoMatch(self): yield self.insertTestData(self.user1_rows) yield self.db.users.removeUser(uid=3) @defer.inlineCallbacks def test_identifierToUid_NoMatch(self): res = yield self.db.users.identifierToUid(identifier="soap") self.assertEqual(res, None) @defer.inlineCallbacks def test_identifierToUid_match(self): yield self.insertTestData(self.user1_rows) res = yield self.db.users.identifierToUid(identifier="soap") self.assertEqual(res, 1) buildbot-3.4.0/master/buildbot/test/unit/db/test_workers.py000066400000000000000000000753611413250514000240600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import workers from buildbot.test import fakedb from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import querylog from buildbot.test.util import validation def workerKey(worker): return worker['id'] def configuredOnKey(worker): return (worker['builderid'], worker['masterid']) class Tests(interfaces.InterfaceTests): # common sample data baseRows = [ fakedb.Master(id=10, name='m10'), fakedb.Master(id=11, name='m11'), fakedb.Builder(id=20, name='a'), fakedb.Builder(id=21, name='b'), fakedb.Builder(id=22, name='c'), fakedb.Worker(id=30, name='zero'), fakedb.Worker(id=31, name='one'), ] multipleMasters = [ fakedb.BuilderMaster(id=12, builderid=20, masterid=10), fakedb.BuilderMaster(id=13, builderid=21, masterid=10), fakedb.BuilderMaster(id=14, builderid=20, masterid=11), fakedb.BuilderMaster(id=15, builderid=22, masterid=11), fakedb.BuilderMaster(id=16, builderid=22, masterid=10), fakedb.ConfiguredWorker( id=3012, workerid=30, buildermasterid=12), fakedb.ConfiguredWorker( id=3013, workerid=30, buildermasterid=13), fakedb.ConfiguredWorker( id=3014, workerid=30, buildermasterid=14), fakedb.ConfiguredWorker( id=3114, workerid=31, buildermasterid=14), fakedb.ConfiguredWorker( id=3115, workerid=31, buildermasterid=15), fakedb.ConnectedWorker(id=3010, workerid=30, masterid=10), fakedb.ConnectedWorker(id=3111, workerid=31, masterid=11), ] # sample worker data, with id's avoiding the postgres id sequence BOGUS_NAME = 'bogus' W1_NAME, W1_ID, W1_INFO = 'w1', 100, {'a': 1} worker1_rows = [ fakedb.Worker(id=W1_ID, name=W1_NAME, info=W1_INFO), ] W2_NAME, W2_ID, W2_INFO = 'w2', 200, {'a': 1, 'b': 2} worker2_rows = [ fakedb.Worker(id=W2_ID, name=W2_NAME, info=W2_INFO), ] # tests def test_signature_findWorkerId(self): @self.assertArgSpecMatches(self.db.workers.findWorkerId) def findWorkerId(self, name): pass def test_signature_getWorker(self): @self.assertArgSpecMatches(self.db.workers.getWorker) def getWorker(self, workerid=None, name=None, masterid=None, builderid=None): pass def test_signature_getWorkers(self): @self.assertArgSpecMatches(self.db.workers.getWorkers) def getWorkers(self, masterid=None, builderid=None, paused=None, graceful=None): pass def test_signature_workerConnected(self): @self.assertArgSpecMatches(self.db.workers.workerConnected) def workerConnected(self, workerid, masterid, workerinfo): pass def test_signature_workerDisconnected(self): @self.assertArgSpecMatches(self.db.workers.workerDisconnected) def workerDisconnected(self, workerid, masterid): pass def test_signature_workerConfigured(self): @self.assertArgSpecMatches(self.db.workers.workerConfigured) def workerConfigured(self, workerid, masterid, builderids): pass def test_signature_deconfigureAllWorkersForMaster(self): @self.assertArgSpecMatches(self.db.workers.deconfigureAllWorkersForMaster) def deconfigureAllWorkersForMaster(self, masterid): pass def test_signature_setWorkerState(self): @self.assertArgSpecMatches(self.db.workers.setWorkerState) def setWorkerState(self, workerid, paused, graceful): pass @defer.inlineCallbacks def test_findWorkerId_insert(self): id = yield self.db.workers.findWorkerId(name="xyz") worker = yield self.db.workers.getWorker(workerid=id) self.assertEqual(worker['name'], 'xyz') self.assertEqual(worker['workerinfo'], {}) @defer.inlineCallbacks def test_findWorkerId_existing(self): yield self.insertTestData(self.baseRows) id = yield self.db.workers.findWorkerId(name="one") self.assertEqual(id, 31) @defer.inlineCallbacks def test_getWorker_no_such(self): yield self.insertTestData(self.baseRows) workerdict = yield self.db.workers.getWorker(workerid=99) self.assertEqual(workerdict, None) @defer.inlineCallbacks def test_getWorker_by_name_no_such(self): yield self.insertTestData(self.baseRows) workerdict = yield self.db.workers.getWorker(name='NOSUCH') self.assertEqual(workerdict, None) @defer.inlineCallbacks def test_getWorker_not_configured(self): yield self.insertTestData(self.baseRows) workerdict = yield self.db.workers.getWorker(workerid=30) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, connected_to=[], configured_on=[])) @defer.inlineCallbacks def test_getWorker_connected_not_configured(self): yield self.insertTestData(self.baseRows + [ # the worker is connected to this master, but not configured. # weird, but the DB should represent it. fakedb.Worker(id=32, name='two'), fakedb.ConnectedWorker(workerid=32, masterid=11), ]) workerdict = yield self.db.workers.getWorker(workerid=32) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=32, name='two', workerinfo={'a': 'b'}, paused=False, graceful=False, connected_to=[11], configured_on=[])) @defer.inlineCallbacks def test_getWorker_multiple_connections(self): yield self.insertTestData(self.baseRows + [ # the worker is connected to two masters at once. # weird, but the DB should represent it. fakedb.Worker(id=32, name='two'), fakedb.ConnectedWorker(workerid=32, masterid=10), fakedb.ConnectedWorker(workerid=32, masterid=11), fakedb.BuilderMaster(id=24, builderid=20, masterid=10), fakedb.BuilderMaster(id=25, builderid=20, masterid=11), fakedb.ConfiguredWorker(workerid=32, buildermasterid=24), fakedb.ConfiguredWorker(workerid=32, buildermasterid=25), ]) workerdict = yield self.db.workers.getWorker(workerid=32) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=32, name='two', workerinfo={'a': 'b'}, paused=False, graceful=False, connected_to=[10, 11], configured_on=[ {'builderid': 20, 'masterid': 10}, {'builderid': 20, 'masterid': 11}, ])) @defer.inlineCallbacks def test_getWorker_by_name_not_configured(self): yield self.insertTestData(self.baseRows) workerdict = yield self.db.workers.getWorker(name='zero') validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, connected_to=[], configured_on=[])) @defer.inlineCallbacks def test_getWorker_not_connected(self): yield self.insertTestData(self.baseRows + [ fakedb.BuilderMaster(id=12, builderid=20, masterid=10), fakedb.ConfiguredWorker(workerid=30, buildermasterid=12), ]) workerdict = yield self.db.workers.getWorker(workerid=30) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[{'masterid': 10, 'builderid': 20}], connected_to=[])) @defer.inlineCallbacks def test_getWorker_connected(self): yield self.insertTestData(self.baseRows + [ fakedb.BuilderMaster(id=12, builderid=20, masterid=10), fakedb.ConfiguredWorker(workerid=30, buildermasterid=12), fakedb.ConnectedWorker(workerid=30, masterid=10), ]) workerdict = yield self.db.workers.getWorker(workerid=30) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[{'masterid': 10, 'builderid': 20}], connected_to=[10])) @defer.inlineCallbacks def test_getWorker_with_multiple_masters(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(workerid=30) validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 10, 'builderid': 21}, {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[10])) @defer.inlineCallbacks def test_getWorker_with_multiple_masters_builderid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(workerid=30, builderid=20) validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[10])) @defer.inlineCallbacks def test_getWorker_with_multiple_masters_masterid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(workerid=30, masterid=11) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[{'masterid': 11, 'builderid': 20}, ], connected_to=[])) @defer.inlineCallbacks def test_getWorker_with_multiple_masters_builderid_masterid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(workerid=30, builderid=20, masterid=11) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[{'masterid': 11, 'builderid': 20}, ], connected_to=[])) @defer.inlineCallbacks def test_getWorker_by_name_with_multiple_masters_builderid_masterid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(name='zero', builderid=20, masterid=11) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[{'masterid': 11, 'builderid': 20}, ], connected_to=[])) @defer.inlineCallbacks def test_getWorkers_no_config(self): yield self.insertTestData(self.baseRows) workerdicts = yield self.db.workers.getWorkers() [validation.verifyDbDict(self, 'workerdict', workerdict) for workerdict in workerdicts] self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[], connected_to=[]), dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[], connected_to=[]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_config(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers() for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 10, 'builderid': 21}, {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[10]), dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 20}, {'masterid': 11, 'builderid': 22}, ], key=configuredOnKey), connected_to=[11]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_empty(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers(masterid=11, builderid=21) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), []) @defer.inlineCallbacks def test_getWorkers_with_config_builderid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers(builderid=20) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[10]), dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[11]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_config_masterid_10(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers(masterid=10) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 10, 'builderid': 21}, ], key=configuredOnKey), connected_to=[10]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_config_masterid_11(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers(masterid=11) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[]), dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 20}, {'masterid': 11, 'builderid': 22}, ], key=configuredOnKey), connected_to=[11]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_config_masterid_11_builderid_22(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers( masterid=11, builderid=22) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 22}, ], key=configuredOnKey), connected_to=[11]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_paused(self): yield self.insertTestData(self.baseRows + self.multipleMasters) yield self.db.workers.setWorkerState(31, paused=True, graceful=False) workerdicts = yield self.db.workers.getWorkers( paused=True) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = [] self.assertEqual(workerdicts, [ dict(id=31, name='one', workerinfo={'a': 'b'}, paused=True, graceful=False, configured_on=[], connected_to=[11]), ]) @defer.inlineCallbacks def test_getWorkers_with_graceful(self): yield self.insertTestData(self.baseRows + self.multipleMasters) yield self.db.workers.setWorkerState(31, paused=False, graceful=True) workerdicts = yield self.db.workers.getWorkers( graceful=True) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = [] self.assertEqual(workerdicts, [ dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=True, configured_on=[], connected_to=[11]), ]) @defer.inlineCallbacks def test_workerConnected_existing(self): yield self.insertTestData(self.baseRows + self.worker1_rows) NEW_INFO = {'other': [1, 2, 3]} yield self.db.workers.workerConnected( workerid=self.W1_ID, masterid=11, workerinfo=NEW_INFO) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w, { 'id': self.W1_ID, 'name': self.W1_NAME, 'workerinfo': NEW_INFO, 'paused': False, 'graceful': False, 'configured_on': [], 'connected_to': [11]}) @defer.inlineCallbacks def test_workerConnected_already_connected(self): yield self.insertTestData(self.baseRows + self.worker1_rows + [ fakedb.ConnectedWorker(id=888, workerid=self.W1_ID, masterid=11), ]) yield self.db.workers.workerConnected( workerid=self.W1_ID, masterid=11, workerinfo={}) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w['connected_to'], [11]) @defer.inlineCallbacks def test_workerDisconnected(self): yield self.insertTestData(self.baseRows + self.worker1_rows + [ fakedb.ConnectedWorker(id=888, workerid=self.W1_ID, masterid=10), fakedb.ConnectedWorker(id=889, workerid=self.W1_ID, masterid=11), ]) yield self.db.workers.workerDisconnected( workerid=self.W1_ID, masterid=11) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w['connected_to'], [10]) @defer.inlineCallbacks def test_workerDisconnected_already_disconnected(self): yield self.insertTestData(self.baseRows + self.worker1_rows) yield self.db.workers.workerDisconnected( workerid=self.W1_ID, masterid=11) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w['connected_to'], []) @defer.inlineCallbacks def test_setWorkerState_existing(self): yield self.insertTestData(self.baseRows + self.worker1_rows) yield self.db.workers.setWorkerState( workerid=self.W1_ID, paused=False, graceful=True) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w, { 'id': self.W1_ID, 'name': self.W1_NAME, 'workerinfo': self.W1_INFO, 'paused': False, 'graceful': True, 'configured_on': [], 'connected_to': []}) yield self.db.workers.setWorkerState( workerid=self.W1_ID, paused=True, graceful=False) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w, { 'id': self.W1_ID, 'name': self.W1_NAME, 'workerinfo': self.W1_INFO, 'paused': True, 'graceful': False, 'configured_on': [], 'connected_to': []}) @defer.inlineCallbacks def test_workerConfigured(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove builder 21, and add 22 yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[20, 22]) w = yield self.db.workers.getWorker(30) self.assertEqual(sorted(w['configured_on'], key=configuredOnKey), sorted([ {'builderid': 20, 'masterid': 11}, {'builderid': 20, 'masterid': 10}, {'builderid': 22, 'masterid': 10}], key=configuredOnKey)) @defer.inlineCallbacks def test_workerConfiguredTwice(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove builder 21, and add 22 yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[20, 22]) # configure again (should eat the duplicate insertion errors) yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[20, 21, 22]) w = yield self.db.workers.getWorker(30) x1 = sorted(w['configured_on'], key=configuredOnKey) x2 = sorted([{'builderid': 20, 'masterid': 11}, {'builderid': 20, 'masterid': 10}, {'builderid': 21, 'masterid': 10}, {'builderid': 22, 'masterid': 10}], key=configuredOnKey) self.assertEqual(x1, x2) @defer.inlineCallbacks def test_workerReConfigured(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove builder 21, and add 22 yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[20, 22]) w = yield self.db.workers.getWorker(30) w['configured_on'] = sorted(w['configured_on'], key=configuredOnKey) self.assertEqual(w['configured_on'], sorted([{'builderid': 20, 'masterid': 11}, {'builderid': 20, 'masterid': 10}, {'builderid': 22, 'masterid': 10}], key=configuredOnKey)) @defer.inlineCallbacks def test_workerReConfigured_should_not_affect_other_worker(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove all the builders in master 11 yield self.db.workers.workerConfigured( workerid=30, masterid=11, builderids=[]) w = yield self.db.workers.getWorker(30) x1 = sorted(w['configured_on'], key=configuredOnKey) x2 = sorted([{'builderid': 20, 'masterid': 10}, {'builderid': 21, 'masterid': 10}], key=configuredOnKey) self.assertEqual(x1, x2) # ensure worker 31 is not affected (see GitHub issue#3392) w = yield self.db.workers.getWorker(31) x1 = sorted(w['configured_on'], key=configuredOnKey) x2 = sorted([{'builderid': 20, 'masterid': 11}, {'builderid': 22, 'masterid': 11}], key=configuredOnKey) self.assertEqual(x1, x2) @defer.inlineCallbacks def test_workerUnconfigured(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove all builders from master 10 yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[]) w = yield self.db.workers.getWorker(30) w['configured_on'] = sorted(w['configured_on'], key=configuredOnKey) expected = sorted([ {'builderid': 20, 'masterid': 11}], key=configuredOnKey) self.assertEqual(w['configured_on'], expected) @defer.inlineCallbacks def test_nothingConfigured(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove builder 21, and add 22 yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[]) # should only keep builder for master 11 w = yield self.db.workers.getWorker(30) self.assertEqual(sorted(w['configured_on']), sorted([ {'builderid': 20, 'masterid': 11}])) @defer.inlineCallbacks def test_deconfiguredAllWorkers(self): yield self.insertTestData(self.baseRows + self.multipleMasters) res = yield self.db.workers.getWorkers(masterid=11) self.assertEqual(len(res), 2) # should remove all worker configured for masterid 11 yield self.db.workers.deconfigureAllWorkersForMaster(masterid=11) res = yield self.db.workers.getWorkers(masterid=11) self.assertEqual(len(res), 0) class RealTests(Tests): # tests that only "real" implementations will pass pass class TestFakeDB(unittest.TestCase, connector_component.FakeConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests, querylog.SqliteMaxVariableMixin): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['workers', 'masters', 'builders', 'builder_masters', 'connected_workers', 'configured_workers']) self.db.workers = \ workers.WorkersConnectorComponent(self.db) @defer.inlineCallbacks def test_workerConfiguredMany(self): manyWorkers = [ fakedb.BuilderMaster(id=1000, builderid=20, masterid=10), ] + [ fakedb.Worker(id=50 + n, name='zero' + str(n)) for n in range(1000) ] + [ fakedb.ConfiguredWorker( id=n + 3000, workerid=50 + n, buildermasterid=1000) for n in range(1000) ] yield self.insertTestData(self.baseRows + manyWorkers) # should successfully remove all ConfiguredWorker rows with self.assertNoMaxVariables(): yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) w = yield self.db.workers.getWorker(30) self.assertEqual(sorted(w['configured_on']), []) @defer.inlineCallbacks def test_workerConfiguredManyBuilders(self): manyWorkers = [ fakedb.Builder(id=100 + n, name='a' + str(n)) for n in range(1000) ] + [ fakedb.Worker(id=50 + n, name='zero' + str(n)) for n in range(2000) ] + [ fakedb.BuilderMaster(id=1000 + n, builderid=100 + n, masterid=10) for n in range(1000) ] + [ fakedb.ConfiguredWorker( id=n + 3000, workerid=50 + n, buildermasterid=int(1000 + n / 2)) for n in range(2000) ] yield self.insertTestData(self.baseRows + manyWorkers) # should successfully remove all ConfiguredWorker rows with self.assertNoMaxVariables(): yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) w = yield self.db.workers.getWorker(30) self.assertEqual(sorted(w['configured_on']), []) def tearDown(self): return self.tearDownConnectorComponent() buildbot-3.4.0/master/buildbot/test/unit/process/000077500000000000000000000000001413250514000220305ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/process/__init__.py000066400000000000000000000000001413250514000241270ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/process/test_botmaster_BotMaster.py000066400000000000000000000173121413250514000274250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import factory from buildbot.process.botmaster import BotMaster from buildbot.process.results import CANCELLED from buildbot.process.results import RETRY from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class TestCleanShutdown(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) self.botmaster = BotMaster() yield self.botmaster.setServiceParent(self.master) self.botmaster.startService() def assertReactorStopped(self, _=None): self.assertTrue(self.reactor.stop_called) def assertReactorNotStopped(self, _=None): self.assertFalse(self.reactor.stop_called) def makeFakeBuild(self, waitedFor=False): self.fake_builder = builder = mock.Mock() self.build_deferred = defer.Deferred() request = mock.Mock() request.waitedFor = waitedFor build = mock.Mock() build.stopBuild = self.stopFakeBuild build.waitUntilFinished.return_value = self.build_deferred build.requests = [request] builder.building = [build] self.botmaster.builders = mock.Mock() self.botmaster.builders.values.return_value = [builder] def stopFakeBuild(self, reason, results): self.reason = reason self.results = results self.finishFakeBuild() def finishFakeBuild(self): self.fake_builder.building = [] self.build_deferred.callback(None) # tests def test_shutdown_idle(self): """Test that the master shuts down when it's idle""" self.botmaster.cleanShutdown() self.assertReactorStopped() def test_shutdown_busy(self): """Test that the master shuts down after builds finish""" self.makeFakeBuild() self.botmaster.cleanShutdown() # check that we haven't stopped yet, since there's a running build self.assertReactorNotStopped() # try to shut it down again, just to check that this does not fail self.botmaster.cleanShutdown() # Now we cause the build to finish self.finishFakeBuild() # And now we should be stopped self.assertReactorStopped() def test_shutdown_busy_quick(self): """Test that the master shuts down after builds finish""" self.makeFakeBuild() self.botmaster.cleanShutdown(quickMode=True) # And now we should be stopped self.assertReactorStopped() self.assertEqual(self.results, RETRY) def test_shutdown_busy_quick_cancelled(self): """Test that the master shuts down after builds finish""" self.makeFakeBuild(waitedFor=True) self.botmaster.cleanShutdown(quickMode=True) # And now we should be stopped self.assertReactorStopped() self.assertEqual(self.results, CANCELLED) def test_shutdown_cancel_not_shutting_down(self): """Test that calling cancelCleanShutdown when none is in progress works""" # this just shouldn't fail.. self.botmaster.cancelCleanShutdown() def test_shutdown_cancel(self): """Test that we can cancel a shutdown""" self.makeFakeBuild() self.botmaster.cleanShutdown() # Next we check that we haven't stopped yet, since there's a running # build. self.assertReactorNotStopped() # but the BuildRequestDistributor should not be running self.assertFalse(self.botmaster.brd.running) # Cancel the shutdown self.botmaster.cancelCleanShutdown() # Now we cause the build to finish self.finishFakeBuild() # We should still be running! self.assertReactorNotStopped() # and the BuildRequestDistributor should be, as well self.assertTrue(self.botmaster.brd.running) class TestBotMaster(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True) self.master.mq = self.master.mq self.master.botmaster.disownServiceParent() self.botmaster = BotMaster() yield self.botmaster.setServiceParent(self.master) self.new_config = mock.Mock() self.botmaster.startService() def tearDown(self): return self.botmaster.stopService() @defer.inlineCallbacks def test_reconfigServiceWithBuildbotConfig(self): # check that reconfigServiceBuilders is called. self.patch(self.botmaster, 'reconfigServiceBuilders', mock.Mock(side_effect=lambda c: defer.succeed(None))) self.patch(self.botmaster, 'maybeStartBuildsForAllBuilders', mock.Mock()) new_config = mock.Mock() yield self.botmaster.reconfigServiceWithBuildbotConfig(new_config) self.botmaster.reconfigServiceBuilders.assert_called_with( new_config) self.assertTrue( self.botmaster.maybeStartBuildsForAllBuilders.called) @defer.inlineCallbacks def test_reconfigServiceBuilders_add_remove(self): bc = config.BuilderConfig(name='bldr', factory=factory.BuildFactory(), workername='f') self.new_config.builders = [bc] yield self.botmaster.reconfigServiceBuilders(self.new_config) bldr = self.botmaster.builders['bldr'] self.assertIdentical(bldr.parent, self.botmaster) self.assertIdentical(bldr.master, self.master) self.assertEqual(self.botmaster.builderNames, ['bldr']) self.new_config.builders = [] yield self.botmaster.reconfigServiceBuilders(self.new_config) self.assertIdentical(bldr.parent, None) self.assertIdentical(bldr.master, None) self.assertEqual(self.botmaster.builders, {}) self.assertEqual(self.botmaster.builderNames, []) def test_maybeStartBuildsForBuilder(self): brd = self.botmaster.brd = mock.Mock() self.botmaster.maybeStartBuildsForBuilder('frank') brd.maybeStartBuildsOn.assert_called_once_with(['frank']) def test_maybeStartBuildsForWorker(self): brd = self.botmaster.brd = mock.Mock() b1 = mock.Mock(name='frank') b1.name = 'frank' b2 = mock.Mock(name='larry') b2.name = 'larry' self.botmaster.getBuildersForWorker = mock.Mock(return_value=[b1, b2]) self.botmaster.maybeStartBuildsForWorker('centos') self.botmaster.getBuildersForWorker.assert_called_once_with('centos') brd.maybeStartBuildsOn.assert_called_once_with(['frank', 'larry']) def test_maybeStartBuildsForAll(self): brd = self.botmaster.brd = mock.Mock() self.botmaster.builderNames = ['frank', 'larry'] self.botmaster.maybeStartBuildsForAllBuilders() brd.maybeStartBuildsOn.assert_called_once_with(['frank', 'larry']) buildbot-3.4.0/master/buildbot/test/unit/process/test_build.py000066400000000000000000001076671413250514000245610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import operator import posixpath from mock import Mock from mock import call from twisted.internet import defer from twisted.trial import unittest from zope.interface import implementer from buildbot import interfaces from buildbot.locks import WorkerLock from buildbot.process.build import Build from buildbot.process.buildstep import BuildStep from buildbot.process.metrics import MetricLogObserver from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.test.fake import fakemaster from buildbot.test.fake import fakeprotocol from buildbot.test.fake import worker from buildbot.test.util.misc import TestReactorMixin class FakeChange: def __init__(self, number=None): self.properties = Properties() self.number = number self.who = "me" class FakeSource: def __init__(self): self.sourcestampsetid = None self.changes = [] self.branch = None self.revision = None self.repository = '' self.codebase = '' self.project = '' self.patch_info = None self.patch = None def getRepository(self): return self.repository class FakeRequest: def __init__(self): self.sources = [] self.reason = "Because" self.properties = Properties() self.id = 9385 def mergeSourceStampsWith(self, others): return self.sources def mergeReasons(self, others): return self.reason class FakeBuildStep(BuildStep): def __init__(self): super().__init__(haltOnFailure=False, flunkOnWarnings=False, flunkOnFailure=True, warnOnWarnings=True, warnOnFailure=False, alwaysRun=False, name='fake') self._summary = {'step': 'result', 'build': 'build result'} self._expected_results = SUCCESS def run(self): return self._expected_results def getResultSummary(self): return self._summary def interrupt(self, reason): self.running = False self.interrupted = reason class FakeBuilder: def __init__(self, master): self.config = Mock() self.config.workerbuilddir = 'wbd' self.name = 'fred' self.master = master self.botmaster = master.botmaster self.builderid = 83 self._builders = {} self.config_version = 0 def getBuilderId(self): return defer.succeed(self.builderid) def setupProperties(self, props): pass def buildFinished(self, build, workerforbuilder): pass def getBuilderIdForName(self, name): return defer.succeed(self._builders.get(name, None) or self.builderid) @implementer(interfaces.IBuildStepFactory) class FakeStepFactory: """Fake step factory that just returns a fixed step object.""" def __init__(self, step): self.step = step def buildStep(self): return self.step class TestException(Exception): pass @implementer(interfaces.IBuildStepFactory) class FailingStepFactory: """Fake step factory that just returns a fixed step object.""" def buildStep(self): raise TestException("FailingStepFactory") class _StepController(): def __init__(self, step): self._step = step def finishStep(self, result): self._step._deferred.callback(result) class _ControllableStep(BuildStep): def __init__(self): super().__init__() self._deferred = defer.Deferred() def run(self): return self._deferred def makeControllableStepFactory(): step = _ControllableStep() controller = _StepController(step) return controller, FakeStepFactory(step) class TestBuild(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() r = FakeRequest() r.sources = [FakeSource()] r.sources[0].changes = [FakeChange()] r.sources[0].revision = "12345" self.request = r self.master = fakemaster.make_master(self, wantData=True) self.worker = worker.FakeWorker(self.master) self.worker.attached(None) self.builder = FakeBuilder(self.master) self.build = Build([r]) self.build.conn = fakeprotocol.FakeConnection(self.worker) self.workerforbuilder = Mock(name='workerforbuilder') self.workerforbuilder.worker = self.worker self.workerforbuilder.substantiate_if_needed = lambda _: True self.workerforbuilder.ping = lambda: True self.build.setBuilder(self.builder) self.build.text = [] self.build.buildid = 666 def assertWorkerPreparationFailure(self, reason): states = "".join(self.master.data.updates.stepStateString.values()) self.assertIn(states, reason) def testRunSuccessfulBuild(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) b.startBuild(self.workerforbuilder) self.assertEqual(b.results, SUCCESS) def testStopBuild(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) def startStep(*args, **kw): # Now interrupt the build b.stopBuild("stop it") return defer.Deferred() step.startStep = startStep b.startBuild(self.workerforbuilder) self.assertEqual(b.results, CANCELLED) self.assertIn('stop it', step.interrupted) def test_build_retry_when_worker_substantiate_returns_false(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) self.workerforbuilder.substantiate_if_needed = lambda _: False b.startBuild(self.workerforbuilder) self.assertEqual(b.results, RETRY) self.assertWorkerPreparationFailure('error while worker_prepare') def test_build_cancelled_when_worker_substantiate_returns_false_due_to_cancel(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) d = defer.Deferred() self.workerforbuilder.substantiate_if_needed = lambda _: d b.startBuild(self.workerforbuilder) b.stopBuild('Cancel Build', CANCELLED) d.callback(False) self.assertEqual(b.results, CANCELLED) self.assertWorkerPreparationFailure('error while worker_prepare') def test_build_retry_when_worker_substantiate_returns_false_due_to_cancel(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) d = defer.Deferred() self.workerforbuilder.substantiate_if_needed = lambda _: d b.startBuild(self.workerforbuilder) b.stopBuild('Cancel Build', RETRY) d.callback(False) self.assertEqual(b.results, RETRY) self.assertWorkerPreparationFailure('error while worker_prepare') @defer.inlineCallbacks def testAlwaysRunStepStopBuild(self): """Test that steps marked with alwaysRun=True still get run even if the build is stopped.""" # Create a build with 2 steps, the first one will get interrupted, and # the second one is marked with alwaysRun=True b = self.build step1 = FakeBuildStep() step1.alwaysRun = False step1.results = None step2 = FakeBuildStep() step2.alwaysRun = True step2.results = None b.setStepFactories([ FakeStepFactory(step1), FakeStepFactory(step2), ]) def startStep1(*args, **kw): # Now interrupt the build b.stopBuild("stop it") return defer.succeed(SUCCESS) step1.startStep = startStep1 step1.stepDone = lambda: False step2Started = [False] def startStep2(*args, **kw): step2Started[0] = True return defer.succeed(SUCCESS) step2.startStep = startStep2 step1.stepDone = lambda: False yield b.startBuild(self.workerforbuilder) self.assertEqual(b.results, CANCELLED) self.assertIn('stop it', step1.interrupted) self.assertTrue(step2Started[0]) @defer.inlineCallbacks def test_start_step_throws_exception(self): b = self.build step1 = FakeBuildStep() b.setStepFactories([ FakeStepFactory(step1), ]) def startStep(*args, **kw): raise TestException() step1.startStep = startStep yield b.startBuild(self.workerforbuilder) self.assertEqual(b.results, EXCEPTION) self.flushLoggedErrors(TestException) @defer.inlineCallbacks def testBuild_canAcquireLocks(self): b = self.build workerforbuilder1 = Mock() workerforbuilder2 = Mock() lock = WorkerLock('lock') counting_access = lock.access('counting') real_lock = yield b.builder.botmaster.getLockByID(lock, 0) # no locks, so both these pass (call twice to verify there's no # state/memory) lock_list = [(real_lock, counting_access)] self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder2)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder2)) worker_lock_1 = real_lock.getLockForWorker( workerforbuilder1.worker.workername) worker_lock_2 = real_lock.getLockForWorker( workerforbuilder2.worker.workername) # then have workerforbuilder2 claim its lock: worker_lock_2.claim(workerforbuilder2, counting_access) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertFalse( Build._canAcquireLocks(lock_list, workerforbuilder2)) self.assertFalse( Build._canAcquireLocks(lock_list, workerforbuilder2)) worker_lock_2.release(workerforbuilder2, counting_access) # then have workerforbuilder1 claim its lock: worker_lock_1.claim(workerforbuilder1, counting_access) self.assertFalse( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertFalse( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder2)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder2)) worker_lock_1.release(workerforbuilder1, counting_access) def testBuilddirPropType(self): b = self.build b.builder.config.workerbuilddir = 'test' self.workerforbuilder.worker.worker_basedir = "/srv/buildbot/worker" self.workerforbuilder.worker.path_module = posixpath b.getProperties = Mock() b.setProperty = Mock() b.setupWorkerBuildirProperty(self.workerforbuilder) expected_path = '/srv/buildbot/worker/test' b.setProperty.assert_has_calls( [call('builddir', expected_path, 'Worker')], any_order=True) @defer.inlineCallbacks def testBuildLocksAcquired(self): b = self.build lock = WorkerLock('lock') claimCount = [0] lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) def claim(owner, access): claimCount[0] += 1 return real_lock.old_claim(owner, access) real_lock.old_claim = real_lock.claim real_lock.claim = claim yield b.setLocks([lock_access]) step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) b.startBuild(self.workerforbuilder) self.assertEqual(b.results, SUCCESS) self.assertEqual(claimCount[0], 1) @defer.inlineCallbacks def testBuildLocksOrder(self): """Test that locks are acquired in FIFO order; specifically that counting locks cannot jump ahead of exclusive locks""" eBuild = self.build cBuilder = FakeBuilder(self.master) cBuild = Build([self.request]) cBuild.setBuilder(cBuilder) eWorker = Mock() cWorker = Mock() eWorker.worker = self.worker cWorker.worker = self.worker eWorker.substantiate_if_needed = cWorker.substantiate_if_needed = lambda _: True eWorker.ping = cWorker.ping = lambda: True lock = WorkerLock('lock', 2) claimLog = [] real_workerlock = yield self.master.botmaster.getLockByID(lock, 0) realLock = real_workerlock.getLockForWorker(self.worker.workername) def claim(owner, access): claimLog.append(owner) return realLock.oldClaim(owner, access) realLock.oldClaim = realLock.claim realLock.claim = claim yield eBuild.setLocks([lock.access('exclusive')]) yield cBuild.setLocks([lock.access('counting')]) fakeBuild = Mock() fakeBuildAccess = lock.access('counting') realLock.claim(fakeBuild, fakeBuildAccess) step = FakeBuildStep() eBuild.setStepFactories([FakeStepFactory(step)]) cBuild.setStepFactories([FakeStepFactory(step)]) e = eBuild.startBuild(eWorker) c = cBuild.startBuild(cWorker) d = defer.DeferredList([e, c]) realLock.release(fakeBuild, fakeBuildAccess) yield d self.assertEqual(eBuild.results, SUCCESS) self.assertEqual(cBuild.results, SUCCESS) self.assertEqual(claimLog, [fakeBuild, eBuild, cBuild]) @defer.inlineCallbacks def testBuildWaitingForLocks(self): b = self.build lock = WorkerLock('lock') claimCount = [0] lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) def claim(owner, access): claimCount[0] += 1 return real_lock.old_claim(owner, access) real_lock.old_claim = real_lock.claim real_lock.claim = claim yield b.setLocks([lock_access]) step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) real_lock.claim(Mock(), lock.access('counting')) b.startBuild(self.workerforbuilder) self.assertEqual(claimCount[0], 1) self.assertTrue(b.currentStep is None) self.assertTrue(b._acquiringLock is not None) @defer.inlineCallbacks def testStopBuildWaitingForLocks(self): b = self.build lock = WorkerLock('lock') lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) yield b.setLocks([lock_access]) step = FakeBuildStep() step.alwaysRun = False b.setStepFactories([FakeStepFactory(step)]) real_lock.claim(Mock(), lock.access('counting')) def acquireLocks(res=None): retval = Build.acquireLocks(b, res) b.stopBuild('stop it') return retval b.acquireLocks = acquireLocks b.startBuild(self.workerforbuilder) self.assertTrue(b.currentStep is None) self.assertEqual(b.results, CANCELLED) @defer.inlineCallbacks def testStopBuildWaitingForLocks_lostRemote(self): b = self.build lock = WorkerLock('lock') lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) yield b.setLocks([lock_access]) step = FakeBuildStep() step.alwaysRun = False b.setStepFactories([FakeStepFactory(step)]) real_lock.claim(Mock(), lock.access('counting')) def acquireLocks(res=None): retval = Build.acquireLocks(b, res) b.lostRemote() return retval b.acquireLocks = acquireLocks b.startBuild(self.workerforbuilder) self.assertTrue(b.currentStep is None) self.assertEqual(b.results, RETRY) @defer.inlineCallbacks def testStopBuildWaitingForStepLocks(self): b = self.build lock = WorkerLock('lock') lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) step = BuildStep(locks=[lock_access]) b.setStepFactories([FakeStepFactory(step)]) real_lock.claim(Mock(), lock.access('counting')) gotLocks = [False] def acquireLocks(res=None): gotLocks[0] = True retval = BuildStep.acquireLocks(step, res) self.assertTrue(b.currentStep is step) b.stopBuild('stop it') return retval step.acquireLocks = acquireLocks b.startBuild(self.workerforbuilder) self.assertEqual(gotLocks, [True]) self.assertEqual(b.results, CANCELLED) def testStepDone(self): b = self.build b.results = SUCCESS step = FakeBuildStep() terminate = b.stepDone(SUCCESS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, SUCCESS) def testStepDoneHaltOnFailure(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.haltOnFailure = True terminate = b.stepDone(FAILURE, step) self.assertTrue(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneHaltOnFailureNoFlunkOnFailure(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.flunkOnFailure = False step.haltOnFailure = True terminate = b.stepDone(FAILURE, step) self.assertTrue(terminate.result) self.assertEqual(b.results, SUCCESS) def testStepDoneFlunkOnWarningsFlunkOnFailure(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.flunkOnFailure = True step.flunkOnWarnings = True b.stepDone(WARNINGS, step) terminate = b.stepDone(FAILURE, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneNoWarnOnWarnings(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.warnOnWarnings = False terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, SUCCESS) def testStepDoneWarnings(self): b = self.build b.results = SUCCESS step = FakeBuildStep() terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, WARNINGS) def testStepDoneFail(self): b = self.build b.results = SUCCESS step = FakeBuildStep() terminate = b.stepDone(FAILURE, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneFailOverridesWarnings(self): b = self.build b.results = WARNINGS step = FakeBuildStep() terminate = b.stepDone(FAILURE, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneWarnOnFailure(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.warnOnFailure = True step.flunkOnFailure = False terminate = b.stepDone(FAILURE, step) self.assertFalse(terminate.result) self.assertEqual(b.results, WARNINGS) def testStepDoneFlunkOnWarnings(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.flunkOnWarnings = True terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneHaltOnFailureFlunkOnWarnings(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.flunkOnWarnings = True self.haltOnFailure = True terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneWarningsDontOverrideFailure(self): b = self.build b.results = FAILURE step = FakeBuildStep() terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneRetryOverridesAnythingElse(self): b = self.build b.results = RETRY step = FakeBuildStep() step.alwaysRun = True b.stepDone(WARNINGS, step) b.stepDone(FAILURE, step) b.stepDone(SUCCESS, step) terminate = b.stepDone(EXCEPTION, step) self.assertTrue(terminate.result) self.assertEqual(b.results, RETRY) def test_getSummaryStatistic(self): b = self.build b.executedSteps = [ BuildStep(), BuildStep(), BuildStep() ] b.executedSteps[0].setStatistic('casualties', 7) b.executedSteps[2].setStatistic('casualties', 4) add = operator.add self.assertEqual(b.getSummaryStatistic('casualties', add), 11) self.assertEqual(b.getSummaryStatistic('casualties', add, 10), 21) def create_fake_steps(self, names): steps = [] def create_fake_step(name): step = FakeBuildStep() step.name = name return step for name in names: step = create_fake_step(name) steps.append(step) return steps @defer.inlineCallbacks def test_start_build_sets_properties(self): b = self.build b.setProperty("foo", "bar", "test") step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) yield b.startBuild(self.workerforbuilder) self.assertEqual(b.results, SUCCESS) # remove duplicates, note that set() can't be used as properties contain complex # data structures. Also, remove builddir which depends on the platform got_properties = [] for prop in sorted(self.master.data.updates.properties): if prop not in got_properties and prop[1] != 'builddir': got_properties.append(prop) self.assertEqual(got_properties, [ (10, 'branch', None, 'Build'), (10, 'buildnumber', 1, 'Build'), (10, 'codebase', '', 'Build'), (10, 'foo', 'bar', 'test'), # custom property (10, 'owners', ['me'], 'Build'), (10, 'project', '', 'Build'), (10, 'repository', '', 'Build'), (10, 'revision', '12345', 'Build') ]) @defer.inlineCallbacks def testAddStepsAfterCurrentStep(self): b = self.build steps = self.create_fake_steps(["a", "b", "c"]) def startStepB(*args, **kw): new_steps = self.create_fake_steps(["d", "e"]) b.addStepsAfterCurrentStep([FakeStepFactory(s) for s in new_steps]) return SUCCESS steps[1].startStep = startStepB b.setStepFactories([FakeStepFactory(s) for s in steps]) yield b.startBuild(self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["a", "b", "d", "e", "c"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) @defer.inlineCallbacks def testAddStepsAfterLastStep(self): b = self.build steps = self.create_fake_steps(["a", "b", "c"]) def startStepB(*args, **kw): new_steps = self.create_fake_steps(["d", "e"]) b.addStepsAfterLastStep([FakeStepFactory(s) for s in new_steps]) return SUCCESS steps[1].startStep = startStepB b.setStepFactories([FakeStepFactory(s) for s in steps]) yield b.startBuild(self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["a", "b", "c", "d", "e"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) def testStepNamesUnique(self): # if the step names are unique they should remain unchanged b = self.build steps = self.create_fake_steps(["clone", "command", "clean"]) b.setStepFactories([FakeStepFactory(s) for s in steps]) b.startBuild(self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["clone", "command", "clean"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) def testStepNamesDuplicate(self): b = self.build steps = self.create_fake_steps(["stage", "stage", "stage"]) b.setStepFactories([FakeStepFactory(s) for s in steps]) b.startBuild(self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["stage", "stage_1", "stage_2"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) def testStepNamesDuplicateAfterAdd(self): b = self.build steps = self.create_fake_steps(["a", "b", "c"]) def startStepB(*args, **kw): new_steps = self.create_fake_steps(["c", "c"]) b.addStepsAfterCurrentStep([FakeStepFactory(s) for s in new_steps]) return SUCCESS steps[1].startStep = startStepB b.setStepFactories([FakeStepFactory(s) for s in steps]) b.startBuild(self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["a", "b", "c", "c_1", "c_2"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) @defer.inlineCallbacks def testGetUrl(self): self.build.number = 3 url = yield self.build.getUrl() self.assertEqual(url, 'http://localhost:8080/#builders/83/builds/3') @defer.inlineCallbacks def testGetUrlForVirtualBuilder(self): # Let's fake a virtual builder self.builder._builders['wilma'] = 108 self.build.setProperty('virtual_builder_name', 'wilma', 'Build') self.build.setProperty('virtual_builder_tags', ['_virtual_']) self.build.number = 33 url = yield self.build.getUrl() self.assertEqual(url, 'http://localhost:8080/#builders/108/builds/33') def test_active_builds_metric(self): """ The number of active builds is increased when a build starts and decreased when it finishes. """ b = self.build controller, step_factory = makeControllableStepFactory() b.setStepFactories([step_factory]) observer = MetricLogObserver() observer.enable() self.addCleanup(observer.disable) def get_active_builds(): return observer.asDict()['counters'].get('active_builds', 0) self.assertEqual(get_active_builds(), 0) b.startBuild(self.workerforbuilder) self.assertEqual(get_active_builds(), 1) controller.finishStep(SUCCESS) self.assertEqual(get_active_builds(), 0) def test_active_builds_metric_failure(self): """ The number of active builds is increased when a build starts and decreased when it finishes.. """ b = self.build b.setStepFactories([FailingStepFactory()]) observer = MetricLogObserver() observer.enable() self.addCleanup(observer.disable) def get_active_builds(): return observer.asDict()['counters'].get('active_builds', 0) self.assertEqual(get_active_builds(), 0) b.startBuild(self.workerforbuilder) self.flushLoggedErrors(TestException) self.assertEqual(get_active_builds(), 0) class TestMultipleSourceStamps(unittest.TestCase): def setUp(self): r = FakeRequest() s1 = FakeSource() s1.repository = "repoA" s1.codebase = "A" s1.changes = [FakeChange(10), FakeChange(11)] s1.revision = "12345" s2 = FakeSource() s2.repository = "repoB" s2.codebase = "B" s2.changes = [FakeChange(12), FakeChange(13)] s2.revision = "67890" s3 = FakeSource() s3.repository = "repoC" # no codebase defined s3.changes = [FakeChange(14), FakeChange(15)] s3.revision = "111213" r.sources.extend([s1, s2, s3]) self.build = Build([r]) def test_buildReturnSourceStamp(self): """ Test that a build returns the correct sourcestamp """ source1 = self.build.getSourceStamp("A") source2 = self.build.getSourceStamp("B") self.assertEqual( [source1.repository, source1.revision], ["repoA", "12345"]) self.assertEqual( [source2.repository, source2.revision], ["repoB", "67890"]) def test_buildReturnSourceStamp_empty_codebase(self): """ Test that a build returns the correct sourcestamp if codebase is empty """ codebase = '' source3 = self.build.getSourceStamp(codebase) self.assertTrue(source3 is not None) self.assertEqual( [source3.repository, source3.revision], ["repoC", "111213"]) class TestBuildBlameList(unittest.TestCase): def setUp(self): self.sourceByMe = FakeSource() self.sourceByMe.repository = "repoA" self.sourceByMe.codebase = "A" self.sourceByMe.changes = [FakeChange(10), FakeChange(11)] self.sourceByMe.changes[0].who = "me" self.sourceByMe.changes[1].who = "me" self.sourceByHim = FakeSource() self.sourceByHim.repository = "repoB" self.sourceByHim.codebase = "B" self.sourceByHim.changes = [FakeChange(12), FakeChange(13)] self.sourceByHim.changes[0].who = "him" self.sourceByHim.changes[1].who = "him" self.patchSource = FakeSource() self.patchSource.repository = "repoB" self.patchSource.codebase = "B" self.patchSource.changes = [] self.patchSource.revision = "67890" self.patchSource.patch_info = ("jeff", "jeff's new feature") def test_blamelist_for_changes(self): r = FakeRequest() r.sources.extend([self.sourceByMe, self.sourceByHim]) build = Build([r]) blamelist = build.blamelist() self.assertEqual(blamelist, ['him', 'me']) def test_blamelist_for_patch(self): r = FakeRequest() r.sources.extend([self.patchSource]) build = Build([r]) blamelist = build.blamelist() # If no patch is set, author will not be est self.assertEqual(blamelist, []) class TestSetupProperties_MultipleSources(TestReactorMixin, unittest.TestCase): """ Test that the property values, based on the available requests, are initialized properly """ def setUp(self): self.setUpTestReactor() self.props = {} self.r = FakeRequest() self.r.sources = [] self.r.sources.append(FakeSource()) self.r.sources[0].changes = [FakeChange()] self.r.sources[0].repository = "http://svn-repo-A" self.r.sources[0].codebase = "A" self.r.sources[0].branch = "develop" self.r.sources[0].revision = "12345" self.r.sources.append(FakeSource()) self.r.sources[1].changes = [FakeChange()] self.r.sources[1].repository = "http://svn-repo-B" self.r.sources[1].codebase = "B" self.r.sources[1].revision = "34567" self.build = Build([self.r]) self.build.setStepFactories([]) self.builder = FakeBuilder(fakemaster.make_master(self, wantData=True)) self.build.setBuilder(self.builder) # record properties that will be set self.build.properties.setProperty = self.setProperty def setProperty(self, n, v, s, runtime=False): if s not in self.props: self.props[s] = {} if not self.props[s]: self.props[s] = {} self.props[s][n] = v def test_sourcestamp_properties_not_set(self): Build.setupBuildProperties(self.build.getProperties(), [self.r], self.r.sources) self.assertNotIn("codebase", self.props["Build"]) self.assertNotIn("revision", self.props["Build"]) self.assertNotIn("branch", self.props["Build"]) self.assertNotIn("project", self.props["Build"]) self.assertNotIn("repository", self.props["Build"]) class TestSetupProperties_SingleSource(TestReactorMixin, unittest.TestCase): """ Test that the property values, based on the available requests, are initialized properly """ def setUp(self): self.setUpTestReactor() self.props = {} self.r = FakeRequest() self.r.sources = [] self.r.sources.append(FakeSource()) self.r.sources[0].changes = [FakeChange()] self.r.sources[0].repository = "http://svn-repo-A" self.r.sources[0].codebase = "A" self.r.sources[0].branch = "develop" self.r.sources[0].revision = "12345" self.build = Build([self.r]) self.build.setStepFactories([]) self.builder = FakeBuilder(fakemaster.make_master(self, wantData=True)) self.build.setBuilder(self.builder) # record properties that will be set self.build.properties.setProperty = self.setProperty def setProperty(self, n, v, s, runtime=False): if s not in self.props: self.props[s] = {} if not self.props[s]: self.props[s] = {} self.props[s][n] = v def test_properties_codebase(self): Build.setupBuildProperties(self.build.getProperties(), [self.r], self.r.sources) codebase = self.props["Build"]["codebase"] self.assertEqual(codebase, "A") def test_properties_repository(self): Build.setupBuildProperties(self.build.getProperties(), [self.r], self.r.sources) repository = self.props["Build"]["repository"] self.assertEqual(repository, "http://svn-repo-A") def test_properties_revision(self): Build.setupBuildProperties(self.build.getProperties(), [self.r], self.r.sources) revision = self.props["Build"]["revision"] self.assertEqual(revision, "12345") def test_properties_branch(self): Build.setupBuildProperties(self.build.getProperties(), [self.r], self.r.sources) branch = self.props["Build"]["branch"] self.assertEqual(branch, "develop") def test_property_project(self): Build.setupBuildProperties(self.build.getProperties(), [self.r], self.r.sources) project = self.props["Build"]["project"] self.assertEqual(project, '') buildbot-3.4.0/master/buildbot/test/unit/process/test_builder.py000066400000000000000000000571031413250514000250750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import random from parameterized import parameterized import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import builder from buildbot.process import factory from buildbot.process.properties import Properties from buildbot.process.properties import renderer from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.warnings import assertProducesWarning from buildbot.util import epoch2datetime from buildbot.worker import AbstractLatentWorker class BuilderMixin: def setUpBuilderMixin(self): self.factory = factory.BuildFactory() self.master = fakemaster.make_master(self, wantData=True) self.mq = self.master.mq self.db = self.master.db # returns a Deferred that returns None def makeBuilder(self, name="bldr", patch_random=False, noReconfig=False, **config_kwargs): """Set up C{self.bldr}""" # only include the necessary required config, plus user-requested self.config_args = { 'name': name, 'workername': 'wrk', 'builddir': 'bdir', 'workerbuilddir': "wbdir", 'factory': self.factory } self.config_args.update(config_kwargs) self.builder_config = config.BuilderConfig(**self.config_args) self.bldr = builder.Builder( self.builder_config.name) self.bldr.master = self.master self.bldr.botmaster = self.master.botmaster # patch into the _startBuildsFor method self.builds_started = [] def _startBuildFor(workerforbuilder, buildrequests): self.builds_started.append((workerforbuilder, buildrequests)) return defer.succeed(True) self.bldr._startBuildFor = _startBuildFor if patch_random: # patch 'random.choice' to always take the worker that sorts # last, based on its name self.patch(random, "choice", lambda lst: sorted(lst, key=lambda m: m.name)[-1]) self.bldr.startService() mastercfg = config.MasterConfig() mastercfg.builders = [self.builder_config] if not noReconfig: return self.bldr.reconfigServiceWithBuildbotConfig(mastercfg) class FakeWorker: builds_may_be_incompatible = False def __init__(self, workername): self.workername = workername class FakeLatentWorker(AbstractLatentWorker): builds_may_be_incompatible = True def __init__(self, is_compatible_with_build): self.is_compatible_with_build = is_compatible_with_build def isCompatibleWithBuild(self, build_props): return defer.succeed(self.is_compatible_with_build) def checkConfig(self, name, _, **kwargs): pass def reconfigService(self, name, _, **kwargs): pass class TestBuilder(TestReactorMixin, BuilderMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() # a collection of rows that would otherwise clutter up every test self.setUpBuilderMixin() self.base_rows = [ fakedb.SourceStamp(id=21), fakedb.Buildset(id=11, reason='because'), fakedb.BuildsetSourceStamp(buildsetid=11, sourcestampid=21), ] @defer.inlineCallbacks def makeBuilder(self, patch_random=False, startBuildsForSucceeds=True, **config_kwargs): yield super().makeBuilder(patch_random=patch_random, **config_kwargs) # patch into the _startBuildsFor method self.builds_started = [] def _startBuildFor(workerforbuilder, buildrequests): self.builds_started.append((workerforbuilder, buildrequests)) return defer.succeed(startBuildsForSucceeds) self.bldr._startBuildFor = _startBuildFor def assertBuildsStarted(self, exp): # munge builds_started into a list of (worker, [brids]) builds_started = [ (wrk.name, [br.id for br in buildreqs]) for (wrk, buildreqs) in self.builds_started] self.assertEqual(sorted(builds_started), sorted(exp)) def setWorkerForBuilders(self, workerforbuilders): """C{workerforbuilders} maps name : available""" self.bldr.workers = [] for name, avail in workerforbuilders.items(): wfb = mock.Mock(spec=['isAvailable'], name=name) wfb.name = name wfb.isAvailable.return_value = avail self.bldr.workers.append(wfb) # services @defer.inlineCallbacks def test_maybeStartBuild_builder_stopped(self): yield self.makeBuilder() # this will cause an exception if maybeStartBuild tries to start self.bldr.workers = None # so we just hope this does not fail yield self.bldr.stopService() started = yield self.bldr.maybeStartBuild(None, []) self.assertEqual(started, False) # maybeStartBuild def _makeMocks(self): worker = mock.Mock() worker.name = 'worker' buildrequest = mock.Mock() buildrequest.id = 10 buildrequests = [buildrequest] return worker, buildrequests @defer.inlineCallbacks def test_maybeStartBuild(self): yield self.makeBuilder() worker, buildrequests = self._makeMocks() started = yield self.bldr.maybeStartBuild(worker, buildrequests) self.assertEqual(started, True) self.assertBuildsStarted([('worker', [10])]) @defer.inlineCallbacks def test_maybeStartBuild_failsToStart(self): yield self.makeBuilder(startBuildsForSucceeds=False) worker, buildrequests = self._makeMocks() started = yield self.bldr.maybeStartBuild(worker, buildrequests) self.assertEqual(started, False) self.assertBuildsStarted([('worker', [10])]) @defer.inlineCallbacks def do_test_getCollapseRequestsFn(self, builder_param=None, global_param=None, expected=0): def cble(): pass builder_param = cble if builder_param == 'callable' else builder_param global_param = cble if global_param == 'callable' else global_param # omit the constructor parameter if None was given if builder_param is None: yield self.makeBuilder() else: yield self.makeBuilder(collapseRequests=builder_param) self.master.config.collapseRequests = global_param fn = self.bldr.getCollapseRequestsFn() if fn == builder.Builder._defaultCollapseRequestFn: fn = "default" elif fn is cble: fn = 'callable' self.assertEqual(fn, expected) def test_getCollapseRequestsFn_defaults(self): self.do_test_getCollapseRequestsFn(None, None, "default") def test_getCollapseRequestsFn_global_True(self): self.do_test_getCollapseRequestsFn(None, True, "default") def test_getCollapseRequestsFn_global_False(self): self.do_test_getCollapseRequestsFn(None, False, None) def test_getCollapseRequestsFn_global_function(self): self.do_test_getCollapseRequestsFn(None, 'callable', 'callable') def test_getCollapseRequestsFn_builder_True(self): self.do_test_getCollapseRequestsFn(True, False, "default") def test_getCollapseRequestsFn_builder_False(self): self.do_test_getCollapseRequestsFn(False, True, None) def test_getCollapseRequestsFn_builder_function(self): self.do_test_getCollapseRequestsFn('callable', None, 'callable') # canStartBuild @defer.inlineCallbacks def test_canStartBuild_no_constraints(self): yield self.makeBuilder() wfb = mock.Mock() wfb.worker = FakeWorker('worker') startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, True) startable = yield self.bldr.canStartBuild(wfb, 101) self.assertEqual(startable, True) @defer.inlineCallbacks def test_canStartBuild_config_canStartBuild_returns_value(self): yield self.makeBuilder() def canStartBuild(bldr, worker, breq): return breq == 100 canStartBuild = mock.Mock(side_effect=canStartBuild) self.bldr.config.canStartBuild = canStartBuild wfb = mock.Mock() wfb.worker = FakeWorker('worker') startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, True) canStartBuild.assert_called_with(self.bldr, wfb, 100) canStartBuild.reset_mock() startable = yield self.bldr.canStartBuild(wfb, 101) self.assertEqual(startable, False) canStartBuild.assert_called_with(self.bldr, wfb, 101) canStartBuild.reset_mock() @defer.inlineCallbacks def test_canStartBuild_config_canStartBuild_returns_deferred(self): yield self.makeBuilder() wfb = mock.Mock() wfb.worker = FakeWorker('worker') def canStartBuild(bldr, wfb, breq): return defer.succeed(breq == 100) canStartBuild = mock.Mock(side_effect=canStartBuild) self.bldr.config.canStartBuild = canStartBuild startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, True) canStartBuild.assert_called_with(self.bldr, wfb, 100) canStartBuild.reset_mock() startable = yield self.bldr.canStartBuild(wfb, 101) self.assertEqual(startable, False) canStartBuild.assert_called_with(self.bldr, wfb, 101) canStartBuild.reset_mock() @defer.inlineCallbacks def test_canStartBuild_cant_acquire_locks_but_no_locks(self): yield self.makeBuilder() self.bldr.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[]) wfb = mock.Mock() wfb.worker = FakeWorker('worker') with mock.patch( 'buildbot.process.build.Build._canAcquireLocks', mock.Mock(return_value=False)): startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, True) @defer.inlineCallbacks def test_canStartBuild_with_locks(self): yield self.makeBuilder() self.bldr.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[mock.Mock()]) wfb = mock.Mock() wfb.worker = FakeWorker('worker') with mock.patch( 'buildbot.process.build.Build._canAcquireLocks', mock.Mock(return_value=False)): startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, False) @defer.inlineCallbacks def test_canStartBuild_with_renderable_locks(self): yield self.makeBuilder() self.bldr.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[mock.Mock()]) renderedLocks = [False] @renderer def rendered_locks(props): renderedLocks[0] = True return [mock.Mock()] self.bldr.config.locks = rendered_locks wfb = mock.Mock() wfb.worker = FakeWorker('worker') with mock.patch( 'buildbot.process.build.Build._canAcquireLocks', mock.Mock(return_value=False)): with mock.patch( 'buildbot.process.build.Build.setupPropertiesKnownBeforeBuildStarts', mock.Mock()): startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, False) self.assertTrue(renderedLocks[0]) @defer.inlineCallbacks def test_canStartBuild_with_incompatible_latent_worker(self): yield self.makeBuilder() wfb = mock.Mock() wfb.worker = FakeLatentWorker(is_compatible_with_build=False) with mock.patch( 'buildbot.process.build.Build.setupPropertiesKnownBeforeBuildStarts', mock.Mock()): startable = yield self.bldr.canStartBuild(wfb, 100) self.assertFalse(startable) @defer.inlineCallbacks def test_canStartBuild_with_renderable_locks_with_compatible_latent_worker(self): yield self.makeBuilder() self.bldr.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[mock.Mock()]) rendered_locks = [False] @renderer def locks_renderer(props): rendered_locks[0] = True return [mock.Mock()] self.bldr.config.locks = locks_renderer wfb = mock.Mock() wfb.worker = FakeLatentWorker(is_compatible_with_build=True) with mock.patch( 'buildbot.process.build.Build._canAcquireLocks', mock.Mock(return_value=False)): with mock.patch( 'buildbot.process.build.Build.setupPropertiesKnownBeforeBuildStarts', mock.Mock()): startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, False) self.assertFalse(startable) self.assertTrue(rendered_locks[0]) @defer.inlineCallbacks def test_canStartBuild_enforceChosenWorker(self): """enforceChosenWorker rejects and accepts builds""" yield self.makeBuilder() self.bldr.config.canStartBuild = builder.enforceChosenWorker workerforbuilder = mock.Mock() workerforbuilder.worker = FakeWorker('worker5') breq = mock.Mock() # no worker requested breq.properties = {} result = yield self.bldr.canStartBuild(workerforbuilder, breq) self.assertIdentical(True, result) # worker requested as the right one breq.properties = {'workername': 'worker5'} result = yield self.bldr.canStartBuild(workerforbuilder, breq) self.assertIdentical(True, result) # worker requested as the wrong one breq.properties = {'workername': 'worker4'} result = yield self.bldr.canStartBuild(workerforbuilder, breq) self.assertIdentical(False, result) # worker set to non string value gets skipped breq.properties = {'workername': 0} result = yield self.bldr.canStartBuild(workerforbuilder, breq) self.assertIdentical(True, result) # other methods @defer.inlineCallbacks def test_getBuilderId(self): self.factory = factory.BuildFactory() self.master = fakemaster.make_master(self, wantData=True) # only include the necessary required config, plus user-requested self.bldr = builder.Builder('bldr') self.bldr.master = self.master self.master.data.updates.findBuilderId = fbi = mock.Mock() fbi.return_value = defer.succeed(13) builderid = yield self.bldr.getBuilderId() self.assertEqual(builderid, 13) fbi.assert_called_with('bldr') fbi.reset_mock() builderid = yield self.bldr.getBuilderId() self.assertEqual(builderid, 13) fbi.assert_not_called() @defer.inlineCallbacks def test_expectations_deprecated(self): yield self.makeBuilder() with assertProducesWarning( Warning, message_pattern="'Builder.expectations' is deprecated."): deprecated = self.bldr.expectations self.assertIdentical(deprecated, None) @defer.inlineCallbacks def test_defaultProperties(self): props = Properties() props.setProperty('foo', 1, 'Scheduler') props.setProperty('bar', 'bleh', 'Change') yield self.makeBuilder(defaultProperties={'bar': 'onoes', 'cuckoo': 42}) self.bldr.setupProperties(props) self.assertEquals(props.getProperty('bar'), 'bleh') self.assertEquals(props.getProperty('cuckoo'), 42) class TestGetBuilderId(TestReactorMixin, BuilderMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpBuilderMixin() @defer.inlineCallbacks def test_getBuilderId(self): # noReconfig because reconfigService calls getBuilderId, and we haven't # set up the mock findBuilderId yet. yield self.makeBuilder(name='b1', noReconfig=True) fbi = self.master.data.updates.findBuilderId = mock.Mock(name='fbi') fbi.side_effect = lambda name: defer.succeed(13) # call twice.. self.assertEqual((yield self.bldr.getBuilderId()), 13) self.assertEqual((yield self.bldr.getBuilderId()), 13) # and see that fbi was only called once fbi.assert_called_once_with('b1') # check that the name was unicodified arg = fbi.mock_calls[0][1][0] self.assertIsInstance(arg, str) class TestGetOldestRequestTime(TestReactorMixin, BuilderMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpBuilderMixin() # a collection of rows that would otherwise clutter up every test master_id = fakedb.FakeBuildRequestsComponent.MASTER_ID self.base_rows = [ fakedb.SourceStamp(id=21), fakedb.Buildset(id=11, reason='because'), fakedb.BuildsetSourceStamp(buildsetid=11, sourcestampid=21), fakedb.Builder(id=77, name='bldr1'), fakedb.Builder(id=78, name='bldr2'), fakedb.Builder(id=182, name='foo@bar'), fakedb.BuildRequest(id=111, submitted_at=1000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=222, submitted_at=2000, builderid=77, buildsetid=11), fakedb.BuildRequestClaim(brid=222, masterid=master_id, claimed_at=2001), fakedb.BuildRequest(id=333, submitted_at=3000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=444, submitted_at=2500, builderid=78, buildsetid=11), fakedb.BuildRequestClaim(brid=444, masterid=master_id, claimed_at=2501), fakedb.BuildRequest(id=555, submitted_at=2800, builderid=182, buildsetid=11), ] yield self.db.insertTestData(self.base_rows) @defer.inlineCallbacks def test_gort_unclaimed(self): yield self.makeBuilder(name='bldr1') rqtime = yield self.bldr.getOldestRequestTime() self.assertEqual(rqtime, epoch2datetime(1000)) @defer.inlineCallbacks def test_gort_bldr_name_not_identifier(self): # this is a regression test for #2940 yield self.makeBuilder(name='foo@bar') rqtime = yield self.bldr.getOldestRequestTime() self.assertEqual(rqtime, epoch2datetime(2800)) @defer.inlineCallbacks def test_gort_all_claimed(self): yield self.makeBuilder(name='bldr2') rqtime = yield self.bldr.getOldestRequestTime() self.assertEqual(rqtime, None) class TestGetNewestCompleteTime(TestReactorMixin, BuilderMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpBuilderMixin() # a collection of rows that would otherwise clutter up every test master_id = fakedb.FakeBuildRequestsComponent.MASTER_ID self.base_rows = [ fakedb.SourceStamp(id=21), fakedb.Buildset(id=11, reason='because'), fakedb.BuildsetSourceStamp(buildsetid=11, sourcestampid=21), fakedb.Builder(id=77, name='bldr1'), fakedb.Builder(id=78, name='bldr2'), fakedb.BuildRequest(id=111, submitted_at=1000, complete=1, complete_at=1000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=222, submitted_at=2000, complete=1, complete_at=4000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=333, submitted_at=3000, complete=1, complete_at=3000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=444, submitted_at=2500, builderid=78, buildsetid=11), fakedb.BuildRequestClaim(brid=444, masterid=master_id, claimed_at=2501), ] yield self.db.insertTestData(self.base_rows) @defer.inlineCallbacks def test_gnct_completed(self): yield self.makeBuilder(name='bldr1') rqtime = yield self.bldr.getNewestCompleteTime() self.assertEqual(rqtime, epoch2datetime(4000)) @defer.inlineCallbacks def test_gnct_no_completed(self): yield self.makeBuilder(name='bldr2') rqtime = yield self.bldr.getNewestCompleteTime() self.assertEqual(rqtime, None) class TestReconfig(TestReactorMixin, BuilderMixin, unittest.TestCase): """Tests that a reconfig properly updates all attributes""" def setUp(self): self.setUpTestReactor() self.setUpBuilderMixin() @defer.inlineCallbacks def test_reconfig(self): yield self.makeBuilder(description="Old", tags=["OldTag"]) new_builder_config = config.BuilderConfig(**self.config_args) new_builder_config.description = "New" new_builder_config.tags = ["NewTag"] mastercfg = config.MasterConfig() mastercfg.builders = [new_builder_config] yield self.bldr.reconfigServiceWithBuildbotConfig(mastercfg) # check that the reconfig grabbed a builderid self.assertIsNotNone(self.bldr._builderid) builder_dict = yield self.master.data.get(('builders', self.bldr._builderid)) self.assertEqual(builder_dict['description'], 'New') self.assertEqual(builder_dict['tags'], ['NewTag']) self.assertIdentical(self.bldr.config, new_builder_config) @parameterized.expand([ ('only_description', 'New', ['OldTag']), ('only_tags', 'Old', ['NewTag']), ]) @defer.inlineCallbacks def test_reconfig_changed(self, name, new_desc, new_tags): yield self.makeBuilder(description="Old", tags=["OldTag"]) new_builder_config = config.BuilderConfig(**self.config_args) new_builder_config.description = new_desc new_builder_config.tags = new_tags mastercfg = config.MasterConfig() mastercfg.builders = [new_builder_config] builder_updates = [] self.master.data.updates.updateBuilderInfo = \ lambda builderid, desc, tags: builder_updates.append((builderid, desc, tags)) yield self.bldr.reconfigServiceWithBuildbotConfig(mastercfg) self.assertEqual(builder_updates, [(1, new_desc, new_tags)]) @defer.inlineCallbacks def test_does_not_reconfig_identical(self): yield self.makeBuilder(description="Old", tags=["OldTag"]) new_builder_config = config.BuilderConfig(**self.config_args) mastercfg = config.MasterConfig() mastercfg.builders = [new_builder_config] builder_updates = [] self.master.data.updates.updateBuilderInfo = \ lambda builderid, desc, tags: builder_updates.append((builderid, desc, tags)) yield self.bldr.reconfigServiceWithBuildbotConfig(mastercfg) self.assertEqual(builder_updates, []) buildbot-3.4.0/master/buildbot/test/unit/process/test_buildrequest.py000066400000000000000000000740131413250514000261560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import json import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process import buildrequest from buildbot.process.builder import Builder from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class TestBuildRequestCollapser(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True) self.master.botmaster = mock.Mock(name='botmaster') self.master.botmaster.builders = {} self.builders = {} self.bldr = yield self.createBuilder('A', builderid=77) @defer.inlineCallbacks def createBuilder(self, name, builderid=None): if builderid is None: b = fakedb.Builder(name=name) yield self.master.db.insertTestData([b]) builderid = b.id bldr = mock.Mock(name=name) bldr.name = name bldr.master = self.master self.master.botmaster.builders[name] = bldr self.builders[name] = bldr bldr.getCollapseRequestsFn = lambda: False return bldr def tearDown(self): pass @defer.inlineCallbacks def do_request_collapse(self, rows, brids, exp): yield self.master.db.insertTestData(rows) brCollapser = buildrequest.BuildRequestCollapser(self.master, brids) self.assertEqual(exp, (yield brCollapser.collapse())) def test_collapseRequests_no_other_request(self): def collapseRequests_fn(master, builder, brdict1, brdict2): # Allow all requests self.fail("Should never be called") return True self.bldr.getCollapseRequestsFn = lambda: collapseRequests_fn rows = [ fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, codebase='A'), fakedb.Change(changeid=14, codebase='A', sourcestampid=234), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=-1), ] return self.do_request_collapse(rows, [19], []) BASE_ROWS = [ fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, codebase='C'), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.SourceStamp(id=235, codebase='C'), fakedb.Buildset(id=31, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=235, buildsetid=31), fakedb.SourceStamp(id=236, codebase='C'), fakedb.Buildset(id=32, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=236, buildsetid=32), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.BuildRequest(id=20, buildsetid=31, builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.BuildRequest(id=21, buildsetid=32, builderid=77, priority=13, submitted_at=1300305712, results=-1), ] def test_collapseRequests_no_collapse(self): def collapseRequests_fn(master, builder, brdict1, brdict2): # Fail all collapse attempts return False self.bldr.getCollapseRequestsFn = lambda: collapseRequests_fn return self.do_request_collapse(self.BASE_ROWS, [21], []) def test_collapseRequests_collapse_all(self): def collapseRequests_fn(master, builder, brdict1, brdict2): # collapse all attempts return True self.bldr.getCollapseRequestsFn = lambda: collapseRequests_fn return self.do_request_collapse(self.BASE_ROWS, [21], [19, 20]) def test_collapseRequests_collapse_all_duplicates(self): def collapseRequests_fn(master, builder, brdict1, brdict2): # collapse all attempts return True self.bldr.getCollapseRequestsFn = lambda: collapseRequests_fn return self.do_request_collapse(self.BASE_ROWS, [21, 21], [19, 20]) # As documented: # Sourcestamps are compatible if all of the below conditions are met: # # * Their codebase, branch, project, and repository attributes match exactly # * Neither source stamp has a patch (e.g., from a try scheduler) # * Either both source stamps are associated with changes, or neither are associated with # changes but they have matching revisions. def makeBuildRequestRows(self, brid, bsid, changeid, ssid, codebase, branch=None, project=None, repository=None, patchid=None, revision=None, bs_properties=None): rows = [ fakedb.SourceStamp(id=ssid, codebase=codebase, branch=branch, project=project, repository=repository, patchid=patchid, revision=revision), fakedb.Buildset(id=bsid, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=ssid, buildsetid=bsid), fakedb.BuildRequest(id=brid, buildsetid=bsid, builderid=77, priority=13, submitted_at=1300305712, results=-1), ] if changeid: rows.append( fakedb.Change(changeid=changeid, branch='trunk', revision='9283', repository='svn://...', project='world-domination', sourcestampid=ssid) ) if patchid: rows.append( fakedb.Patch(id=patchid, patch_base64='aGVsbG8sIHdvcmxk', patch_author='bar', patch_comment='foo', subdir='/foo', patchlevel=3)) if bs_properties: for prop_name, prop_value in bs_properties.items(): rows.append( fakedb.BuildsetProperty(buildsetid=bsid, property_name=prop_name, property_value=json.dumps(prop_value)), ) return rows @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_codebases(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [19, 20]) @defer.inlineCallbacks def test_collapseRequests_collapse_default_does_not_collapse_older(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(21, 121, None, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [19], []) yield self.do_request_collapse(rows, [20], [19]) yield self.do_request_collapse(rows, [21], [20]) @defer.inlineCallbacks def test_collapseRequests_collapse_default_does_not_collapse_concurrent_claims(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(21, 121, None, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C') claimed = [] @defer.inlineCallbacks def collapse_fn(master, builder, brdict1, brdict2): if not claimed: yield self.master.data.updates.claimBuildRequests([20]) claimed.append(20) res = yield Builder._defaultCollapseRequestFn(master, builder, brdict1, brdict2) return res self.bldr.getCollapseRequestsFn = lambda: collapse_fn yield self.do_request_collapse(rows, [21], [19]) @defer.inlineCallbacks def test_collapseRequests_collapse_default_does_not_collapse_scheduler_props(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(21, 121, None, 221, 'C', bs_properties={'prop': ('value', 'Scheduler')}) rows += self.makeBuildRequestRows(20, 120, None, 220, 'C', bs_properties={'prop': ('value', 'Other source')}) rows += self.makeBuildRequestRows(19, 119, None, 219, 'C', bs_properties={'prop': ('value2', 'Scheduler')}) rows += self.makeBuildRequestRows(18, 118, None, 218, 'C', bs_properties={'prop': ('value', 'Scheduler')}) rows += self.makeBuildRequestRows(17, 117, None, 217, 'C', bs_properties={'prop': ('value3', 'Other source')}) rows += self.makeBuildRequestRows(16, 116, None, 216, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn # only the same property coming from a scheduler is matched yield self.do_request_collapse(rows, [21], [18]) # only takes into account properties coming from scheduler yield self.do_request_collapse(rows, [20], [16, 17]) @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_codebases_branches(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A', 'br1') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C', 'br2') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', 'br2') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C', 'br3') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [19]) @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_codebases_repository(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A', None, 'p1') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C', None, 'p2') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', None, 'p2') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C', None, 'p3') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [19]) @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_codebases_projects(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A', None, None, 'project1') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C', None, None, 'project2') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', None, None, 'project2') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C', None, None, 'project3') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [19]) # * Neither source stamp has a patch (e.g., from a try scheduler) @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_a_patch(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', patchid=123) rows += self.makeBuildRequestRows(20, 120, None, 220, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [20]) # * Either both source stamps are associated with changes.. @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_changes(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A') rows += self.makeBuildRequestRows(21, 121, 123, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C') rows += self.makeBuildRequestRows(20, 120, 124, 220, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [20]) # * ... or neither are associated with changes but they have matching revisions. @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_non_matching_revision(self): rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', revision='abcd1234') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [20]) class TestSourceStamp(unittest.TestCase): def test_asdict_minimal(self): ssdatadict = { 'ssid': '123', 'branch': None, 'revision': None, 'patch': None, 'repository': 'testrepo', 'codebase': 'testcodebase', 'project': 'testproject', 'created_at': datetime.datetime(2019, 4, 1, 23, 38, 33, 154354), } ss = buildrequest.TempSourceStamp(ssdatadict) self.assertEqual(ss.asDict(), { 'branch': None, 'codebase': 'testcodebase', 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'project': 'testproject', 'repository': 'testrepo', 'revision': None }) def test_asdict_no_patch(self): ssdatadict = { 'ssid': '123', 'branch': 'testbranch', 'revision': 'testrev', 'patch': None, 'repository': 'testrepo', 'codebase': 'testcodebase', 'project': 'testproject', 'created_at': datetime.datetime(2019, 4, 1, 23, 38, 33, 154354), } ss = buildrequest.TempSourceStamp(ssdatadict) self.assertEqual(ss.asDict(), { 'branch': 'testbranch', 'codebase': 'testcodebase', 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'project': 'testproject', 'repository': 'testrepo', 'revision': 'testrev', }) def test_asdict_with_patch(self): ssdatadict = { 'ssid': '123', 'branch': 'testbranch', 'revision': 'testrev', 'patch': { 'patchid': 1234, 'body': b'testbody', 'level': 2, 'author': 'testauthor', 'comment': 'testcomment', 'subdir': 'testsubdir', }, 'repository': 'testrepo', 'codebase': 'testcodebase', 'project': 'testproject', 'created_at': datetime.datetime(2019, 4, 1, 23, 38, 33, 154354), } ss = buildrequest.TempSourceStamp(ssdatadict) self.assertEqual(ss.asDict(), { 'branch': 'testbranch', 'codebase': 'testcodebase', 'patch_author': 'testauthor', 'patch_body': b'testbody', 'patch_comment': 'testcomment', 'patch_level': 2, 'patch_subdir': 'testsubdir', 'project': 'testproject', 'repository': 'testrepo', 'revision': 'testrev' }) class TestBuildRequest(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_fromBrdict(self): master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=234, branch='trunk', revision='9284', repository='svn://...', project='world-domination'), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://...', project='world-domination', sourcestampid=234), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=234), fakedb.BuildsetProperty(buildsetid=539, property_name='x', property_value='[1, "X"]'), fakedb.BuildsetProperty(buildsetid=539, property_name='y', property_value='[2, "Y"]'), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77, priority=13, submitted_at=1200000000), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict brdict = yield master.db.buildrequests.getBuildRequest(288) br = yield buildrequest.BuildRequest.fromBrdict(master, brdict) # check enough of the source stamp to verify it found the changes self.assertEqual([ss.ssid for ss in br.sources.values()], [234]) self.assertEqual(br.reason, 'triggered') self.assertEqual(br.properties.getProperty('x'), 1) self.assertEqual(br.properties.getProperty('y'), 2) self.assertEqual(br.submittedAt, 1200000000) self.assertEqual(br.buildername, 'bldr') self.assertEqual(br.priority, 13) self.assertEqual(br.id, 288) self.assertEqual(br.bsid, 539) @defer.inlineCallbacks def test_fromBrdict_submittedAt_NULL(self): master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=234, branch='trunk', revision='9284', repository='svn://...', project='world-domination'), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=234), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77, priority=13, submitted_at=None), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict brdict = yield master.db.buildrequests.getBuildRequest(288) br = yield buildrequest.BuildRequest.fromBrdict(master, brdict) # remaining fields assumed to be checked in test_fromBrdict self.assertEqual(br.submittedAt, None) def test_fromBrdict_no_sourcestamps(self): master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=78, name='not important'), fakedb.Buildset(id=539, reason='triggered'), # buildset has no sourcestamps fakedb.BuildRequest(id=288, buildsetid=539, builderid=78, priority=0, submitted_at=None), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict d = master.db.buildrequests.getBuildRequest(288) d.addCallback(lambda brdict: buildrequest.BuildRequest.fromBrdict(master, brdict)) return self.assertFailure(d, AssertionError) @defer.inlineCallbacks def test_fromBrdict_multiple_sourcestamps(self): master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=234, branch='trunk', revision='9283', repository='svn://a..', codebase='A', project='world-domination'), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://a..', codebase='A', project='world-domination', sourcestampid=234), fakedb.SourceStamp(id=235, branch='trunk', revision='9284', repository='svn://b..', codebase='B', project='world-domination'), fakedb.Change(changeid=14, branch='trunk', revision='9284', repository='svn://b..', codebase='B', project='world-domination', sourcestampid=235), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=234), fakedb.BuildsetProperty(buildsetid=539, property_name='x', property_value='[1, "X"]'), fakedb.BuildsetProperty(buildsetid=539, property_name='y', property_value='[2, "Y"]'), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77, priority=13, submitted_at=1200000000), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict brdict = yield master.db.buildrequests.getBuildRequest(288) br = yield buildrequest.BuildRequest.fromBrdict(master, brdict) self.assertEqual(br.reason, 'triggered') self.assertEqual(br.properties.getProperty('x'), 1) self.assertEqual(br.properties.getProperty('y'), 2) self.assertEqual(br.submittedAt, 1200000000) self.assertEqual(br.buildername, 'bldr') self.assertEqual(br.priority, 13) self.assertEqual(br.id, 288) self.assertEqual(br.bsid, 539) @defer.inlineCallbacks def test_mergeSourceStampsWith_common_codebases(self): """ This testcase has two buildrequests Request Change Codebase Revision Comment ---------------------------------------------------------------------- 288 13 A 9283 289 15 A 9284 288 14 B 9200 289 16 B 9201 -------------------------------- After merged in Build: Source1 has rev 9284 and contains changes 13 and 15 from repository svn://a Source2 has rev 9201 and contains changes 14 and 16 from repository svn://b """ brs = [] # list of buildrequests master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=234, branch='trunk', revision='9283', repository='svn://a..', codebase='A', project='world-domination'), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://a..', codebase='A', project='world-domination', sourcestampid=234), fakedb.SourceStamp(id=235, branch='trunk', revision='9200', repository='svn://b..', codebase='B', project='world-domination'), fakedb.Change(changeid=14, branch='trunk', revision='9200', repository='svn://b..', codebase='A', project='world-domination', sourcestampid=235), fakedb.SourceStamp(id=236, branch='trunk', revision='9284', repository='svn://a..', codebase='A', project='world-domination'), fakedb.Change(changeid=15, branch='trunk', revision='9284', repository='svn://a..', codebase='A', project='world-domination', sourcestampid=236), fakedb.SourceStamp(id=237, branch='trunk', revision='9201', repository='svn://b..', codebase='B', project='world-domination'), fakedb.Change(changeid=16, branch='trunk', revision='9201', repository='svn://b..', codebase='B', project='world-domination', sourcestampid=237), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=234), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=235), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77), fakedb.Buildset(id=540, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=540, sourcestampid=236), fakedb.BuildsetSourceStamp(buildsetid=540, sourcestampid=237), fakedb.BuildRequest(id=289, buildsetid=540, builderid=77), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict brdict = yield master.db.buildrequests.getBuildRequest(288) res = yield buildrequest.BuildRequest.fromBrdict(master, brdict) brs.append(res) brdict = yield master.db.buildrequests.getBuildRequest(289) res = yield buildrequest.BuildRequest.fromBrdict(master, brdict) brs.append(res) sources = brs[0].mergeSourceStampsWith(brs[1:]) source1 = source2 = None for source in sources: if source.codebase == 'A': source1 = source if source.codebase == 'B': source2 = source self.assertFalse(source1 is None) self.assertEqual(source1.revision, '9284') self.assertFalse(source2 is None) self.assertEqual(source2.revision, '9201') @defer.inlineCallbacks def test_canBeCollapsed_different_codebases_raises_error(self): """ This testcase has two buildrequests Request Change Codebase Revision Comment ---------------------------------------------------------------------- 288 17 C 1800 request 1 has repo not in request 2 289 18 D 2100 request 2 has repo not in request 1 -------------------------------- Merge cannot be performed and raises error: Merging requests requires both requests to have the same codebases """ brDicts = [] # list of buildrequests dictionary master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=238, branch='trunk', revision='1800', repository='svn://c..', codebase='C', project='world-domination'), fakedb.Change(changeid=17, branch='trunk', revision='1800', repository='svn://c..', codebase='C', project='world-domination', sourcestampid=238), fakedb.SourceStamp(id=239, branch='trunk', revision='2100', repository='svn://d..', codebase='D', project='world-domination'), fakedb.Change(changeid=18, branch='trunk', revision='2100', repository='svn://d..', codebase='D', project='world-domination', sourcestampid=239), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=238), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77), fakedb.Buildset(id=540, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=540, sourcestampid=239), fakedb.BuildRequest(id=289, buildsetid=540, builderid=77), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict req = yield master.db.buildrequests.getBuildRequest(288) brDicts.append(req) req = yield master.db.buildrequests.getBuildRequest(289) brDicts.append(req) can_collapse = \ yield buildrequest.BuildRequest.canBeCollapsed(master, brDicts[0], brDicts[1]) self.assertEqual(can_collapse, False) buildbot-3.4.0/master/buildbot/test/unit/process/test_buildrequestdistributor.py000066400000000000000000001041131413250514000304440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import random import mock from twisted.internet import defer from twisted.python import failure from twisted.trial import unittest from buildbot import config from buildbot.db import buildrequests from buildbot.process import buildrequestdistributor from buildbot.process import factory from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.warnings import assertProducesWarning from buildbot.util import epoch2datetime from buildbot.util.eventual import fireEventually from buildbot.warnings import DeprecatedApiWarning def nth_worker(n): def pick_nth_by_name(builder, workers=None, br=None): if workers is None: workers = builder workers = workers[:] workers.sort(key=lambda a: a.name) return workers[n] return pick_nth_by_name class TestBRDBase(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.botmaster = mock.Mock(name='botmaster') self.botmaster.builders = {} self.builders = {} def prioritizeBuilders(master, builders): # simple sort-by-name by default return sorted(builders, key=lambda b1: b1.name) self.master = self.botmaster.master = \ fakemaster.make_master(self, wantData=True, wantDb=True) self.master.caches = fakemaster.FakeCaches() self.master.config.prioritizeBuilders = prioritizeBuilders self.brd = buildrequestdistributor.BuildRequestDistributor( self.botmaster) self.brd.parent = self.botmaster self.brd.startService() # a collection of rows that would otherwise clutter up every test self.base_rows = [ fakedb.SourceStamp(id=21), fakedb.Builder(id=77, name='A'), fakedb.Buildset(id=11, reason='because'), fakedb.BuildsetSourceStamp(sourcestampid=21, buildsetid=11), ] def tearDown(self): if self.brd.running: return self.brd.stopService() return None def make_workers(self, worker_count): rows = self.base_rows[:] for i in range(worker_count): self.addWorkers({'test-worker%d' % i: 1}) rows.append(fakedb.Buildset(id=100 + i, reason='because')) rows.append( fakedb.BuildsetSourceStamp(buildsetid=100 + i, sourcestampid=21)) rows.append( fakedb.BuildRequest(id=10 + i, buildsetid=100 + i, builderid=77)) return rows def addWorkers(self, workerforbuilders): """C{workerforbuilders} maps name : available""" for name, avail in workerforbuilders.items(): wfb = mock.Mock(spec=['isAvailable'], name=name) wfb.name = name wfb.isAvailable.return_value = avail for bldr in self.builders.values(): bldr.workers.append(wfb) @defer.inlineCallbacks def createBuilder(self, name, builderid=None, builder_config=None): if builderid is None: b = fakedb.Builder(name=name) yield self.master.db.insertTestData([b]) builderid = b.id bldr = mock.Mock(name=name) bldr.name = name self.botmaster.builders[name] = bldr self.builders[name] = bldr def maybeStartBuild(worker, builds): self.startedBuilds.append((worker.name, builds)) d = defer.Deferred() self.reactor.callLater(0, d.callback, True) return d bldr.maybeStartBuild = maybeStartBuild bldr.getCollapseRequestsFn = lambda: False bldr.workers = [] bldr.getAvailableWorkers = lambda: [ w for w in bldr.workers if w.isAvailable()] bldr.getBuilderId = lambda: (builderid) if builder_config is None: bldr.config.nextWorker = None bldr.config.nextBuild = None else: bldr.config = builder_config def canStartBuild(*args): can = bldr.config.canStartBuild return not can or can(*args) bldr.canStartBuild = canStartBuild return bldr @defer.inlineCallbacks def addBuilders(self, names): self.startedBuilds = [] for name in names: yield self.createBuilder(name) def assertMyClaims(self, brids): self.assertEqual(self.master.data.updates.claimedBuildRequests, set(brids)) class Test(TestBRDBase): def checkAllCleanedUp(self): # check that the BRD didn't end with a stuck lock or in the 'active' state (which would mean # it ended without unwinding correctly) self.assertEqual(self.brd.pending_builders_lock.locked, False) self.assertEqual(self.brd.activity_lock.locked, False) self.assertEqual(self.brd.active, False) def useMock_maybeStartBuildsOnBuilder(self): # sets up a mock "maybeStartBuildsOnBuilder" so we can track # how the method gets invoked # keep track of the calls to brd.maybeStartBuildsOnBuilder self.maybeStartBuildsOnBuilder_calls = [] def maybeStartBuildsOnBuilder(bldr): self.assertIdentical(self.builders[bldr.name], bldr) self.maybeStartBuildsOnBuilder_calls.append(bldr.name) return fireEventually() self.brd._maybeStartBuildsOnBuilder = maybeStartBuildsOnBuilder def removeBuilder(self, name): del self.builders[name] del self.botmaster.builders[name] # tests @defer.inlineCallbacks def test_maybeStartBuildsOn_simple(self): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['bldr1']) yield self.brd.maybeStartBuildsOn(['bldr1']) yield self.brd._waitForFinish() self.assertEqual(self.maybeStartBuildsOnBuilder_calls, ['bldr1']) self.checkAllCleanedUp() @defer.inlineCallbacks def test_maybeStartBuildsOn_parallel(self): # test 15 "parallel" invocations of maybeStartBuildsOn, with a # _sortBuilders that takes a while. This is a regression test for bug # 1979. builders = ['bldr%02d' % i for i in range(15)] def slow_sorter(master, bldrs): bldrs.sort(key=lambda b1: b1.name) d = defer.Deferred() self.reactor.callLater(0, d.callback, bldrs) def done(_): return _ d.addCallback(done) return d self.master.config.prioritizeBuilders = slow_sorter self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(builders) for bldr in builders: yield self.brd.maybeStartBuildsOn([bldr]) yield self.brd._waitForFinish() self.assertEqual(self.maybeStartBuildsOnBuilder_calls, builders) self.checkAllCleanedUp() @defer.inlineCallbacks def test_maybeStartBuildsOn_exception(self): self.addBuilders(['bldr1']) def _maybeStartBuildsOnBuilder(n): # fail slowly, so that the activity loop doesn't exit too soon d = defer.Deferred() self.reactor.callLater(0, d.errback, failure.Failure(RuntimeError("oh noes"))) return d self.brd._maybeStartBuildsOnBuilder = _maybeStartBuildsOnBuilder yield self.brd.maybeStartBuildsOn(['bldr1']) yield self.brd._waitForFinish() self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) self.checkAllCleanedUp() @defer.inlineCallbacks def test_maybeStartBuildsOn_collapsing(self): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['bldr1', 'bldr2', 'bldr3']) yield self.brd.maybeStartBuildsOn(['bldr3']) yield self.brd.maybeStartBuildsOn(['bldr2', 'bldr1']) yield self.brd.maybeStartBuildsOn(['bldr4']) # should be ignored yield self.brd.maybeStartBuildsOn(['bldr2']) # already queued - ignored yield self.brd.maybeStartBuildsOn(['bldr3', 'bldr2']) yield self.brd._waitForFinish() # bldr3 gets invoked twice, since it's considered to have started # already when the first call to maybeStartBuildsOn returns self.assertEqual(self.maybeStartBuildsOnBuilder_calls, ['bldr3', 'bldr1', 'bldr2', 'bldr3']) self.checkAllCleanedUp() @defer.inlineCallbacks def test_maybeStartBuildsOn_builders_missing(self): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['bldr1', 'bldr2', 'bldr3']) yield self.brd.maybeStartBuildsOn(['bldr1', 'bldr2', 'bldr3']) # bldr1 is already run, so surreptitiously remove the other # two - nothing should crash, but the builders should not run self.removeBuilder('bldr2') self.removeBuilder('bldr3') yield self.brd._waitForFinish() self.assertEqual(self.maybeStartBuildsOnBuilder_calls, ['bldr1']) self.checkAllCleanedUp() @defer.inlineCallbacks def do_test_sortBuilders(self, prioritizeBuilders, oldestRequestTimes, expected, returnDeferred=False): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(list(oldestRequestTimes)) self.master.config.prioritizeBuilders = prioritizeBuilders def mklambda(t): # work around variable-binding issues if returnDeferred: return lambda: defer.succeed(t) return lambda: t for n, t in oldestRequestTimes.items(): if t is not None: t = epoch2datetime(t) self.builders[n].getOldestRequestTime = mklambda(t) result = yield self.brd._sortBuilders(list(oldestRequestTimes)) self.assertEqual(result, expected) self.checkAllCleanedUp() def test_sortBuilders_default_sync(self): return self.do_test_sortBuilders(None, # use the default sort dict(bldr1=777, bldr2=999, bldr3=888), ['bldr1', 'bldr3', 'bldr2']) def test_sortBuilders_default_asyn(self): return self.do_test_sortBuilders(None, # use the default sort dict(bldr1=777, bldr2=999, bldr3=888), ['bldr1', 'bldr3', 'bldr2'], returnDeferred=True) def test_sortBuilders_default_None(self): return self.do_test_sortBuilders(None, # use the default sort dict( bldr1=777, bldr2=None, bldr3=888), ['bldr1', 'bldr3', 'bldr2']) def test_sortBuilders_custom(self): def prioritizeBuilders(master, builders): self.assertIdentical(master, self.master) return sorted(builders, key=lambda b: b.name) return self.do_test_sortBuilders(prioritizeBuilders, dict(bldr1=1, bldr2=1, bldr3=1), ['bldr1', 'bldr2', 'bldr3']) def test_sortBuilders_custom_async(self): def prioritizeBuilders(master, builders): self.assertIdentical(master, self.master) return defer.succeed(sorted(builders, key=lambda b: b.name)) return self.do_test_sortBuilders(prioritizeBuilders, dict(bldr1=1, bldr2=1, bldr3=1), ['bldr1', 'bldr2', 'bldr3']) @defer.inlineCallbacks def test_sortBuilders_custom_exception(self): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['x', 'y']) def fail(m, b): raise RuntimeError("oh noes") self.master.config.prioritizeBuilders = fail # expect to get the builders back in the same order in the event of an # exception result = yield self.brd._sortBuilders(['y', 'x']) self.assertEqual(result, ['y', 'x']) # and expect the exception to be logged self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_stopService(self): # check that stopService waits for a builder run to complete, but does not # allow a subsequent run to start self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['A', 'B']) oldMSBOB = self.brd._maybeStartBuildsOnBuilder def maybeStartBuildsOnBuilder(bldr): d = oldMSBOB(bldr) stop_d = self.brd.stopService() stop_d.addCallback(lambda _: self.maybeStartBuildsOnBuilder_calls.append('(stopped)')) d.addCallback(lambda _: self.maybeStartBuildsOnBuilder_calls.append('finished')) return d self.brd._maybeStartBuildsOnBuilder = maybeStartBuildsOnBuilder # start both builds; A should start and complete *before* the service stops, # and B should not run. yield self.brd.maybeStartBuildsOn(['A', 'B']) yield self.brd._waitForFinish() self.assertEqual(self.maybeStartBuildsOnBuilder_calls, ['A', 'finished', '(stopped)']) class TestMaybeStartBuilds(TestBRDBase): @defer.inlineCallbacks def setUp(self): yield super().setUp() self.startedBuilds = [] self.bldr = yield self.createBuilder('A', builderid=77) self.builders['A'] = self.bldr def assertBuildsStarted(self, exp): # munge builds_started into (worker, [brids]) builds_started = [ (worker, [br.id for br in breqs]) for (worker, breqs) in self.startedBuilds] self.assertEqual(builds_started, exp) # _maybeStartBuildsOnBuilder @defer.inlineCallbacks def do_test_maybeStartBuildsOnBuilder(self, rows=None, exp_claims=None, exp_builds=None): rows = rows or [] exp_claims = exp_claims or [] exp_builds = exp_builds or [] yield self.master.db.insertTestData(rows) yield self.brd._maybeStartBuildsOnBuilder(self.bldr) self.assertMyClaims(exp_claims) self.assertBuildsStarted(exp_builds) @defer.inlineCallbacks def test_no_buildrequests(self): self.addWorkers({'test-worker11': 1}) yield self.do_test_maybeStartBuildsOnBuilder(exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_no_workerforbuilders(self): rows = [ fakedb.Builder(id=78, name='bldr'), fakedb.BuildRequest(id=11, buildsetid=10, builderid=78), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_limited_by_workers(self): self.addWorkers({'test-worker1': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10], exp_builds=[('test-worker1', [10])]) @defer.inlineCallbacks def test_sorted_by_submit_time(self): # same as "limited_by_workers" but with rows swapped self.addWorkers({'test-worker1': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10], exp_builds=[('test-worker1', [10])]) @defer.inlineCallbacks def test_limited_by_available_workers(self): self.addWorkers({'test-worker1': 0, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10], exp_builds=[('test-worker2', [10])]) @defer.inlineCallbacks def test_slow_db(self): # test what happens if the "getBuildRequests" fetch takes a "long time" self.addWorkers({'test-worker1': 1}) # wrap to simulate a "long" db access old_getBuildRequests = self.master.db.buildrequests.getBuildRequests def longGetBuildRequests(*args, **kwargs): res_d = old_getBuildRequests(*args, **kwargs) long_d = defer.Deferred() long_d.addCallback(lambda _: res_d) self.reactor.callLater(0, long_d.callback, None) return long_d self.master.db.buildrequests.getBuildRequests = longGetBuildRequests rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10], exp_builds=[('test-worker1', [10])]) @defer.inlineCallbacks def test_limited_by_canStartBuild(self): """Set the 'canStartBuild' value in the config to something that limits the possible options.""" self.bldr.config.nextWorker = nth_worker(-1) pairs_tested = [] def _canStartBuild(worker, breq): result = (worker.name, breq.id) pairs_tested.append(result) allowed = [ ("test-worker1", 10), ("test-worker3", 11), ] return result in allowed self.bldr.config.canStartBuild = _canStartBuild self.addWorkers( {'test-worker1': 1, 'test-worker2': 1, 'test-worker3': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), fakedb.BuildRequest(id=12, buildsetid=11, builderid=77, submitted_at=140000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10, 11], exp_builds=[('test-worker1', [10]), ('test-worker3', [11])]) # we expect brids in order (10-11-12), # with each searched in reverse order of workers (3-2-1) available (due # to nth_worker(-1)) self.assertEqual(pairs_tested, [ ('test-worker3', 10), ('test-worker2', 10), ('test-worker1', 10), ('test-worker3', 11), ('test-worker2', 12)]) @defer.inlineCallbacks def test_limited_by_canStartBuild_deferreds(self): # Another variant that returns Deferred types, self.bldr.config.nextWorker = nth_worker(-1) pairs_tested = [] def _canStartBuild(worker, breq): result = (worker.name, breq.id) pairs_tested.append(result) allowed = [ ("test-worker1", 10), ("test-worker3", 11), ] return defer.succeed(result in allowed) self.bldr.config.canStartBuild = _canStartBuild self.addWorkers( {'test-worker1': 1, 'test-worker2': 1, 'test-worker3': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), fakedb.BuildRequest(id=12, buildsetid=11, builderid=77, submitted_at=140000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10, 11], exp_builds=[ ('test-worker1', [10]), ('test-worker3', [11]) ]) # we expect brids in order (10-11-12), # with worker2 unable to pair self.assertEqual(pairs_tested, [ ('test-worker3', 10), ('test-worker2', 10), ('test-worker1', 10), ('test-worker3', 11), ('test-worker2', 12)]) @defer.inlineCallbacks def test_unlimited(self): self.bldr.config.nextWorker = nth_worker(-1) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10, 11], exp_builds=[('test-worker2', [10]), ('test-worker1', [11])]) @defer.inlineCallbacks def test_bldr_maybeStartBuild_fails_always(self): self.bldr.config.nextWorker = nth_worker(-1) # the builder fails to start the build; we'll see that the build # was requested, but the brids will get claimed again def maybeStartBuild(worker, builds): self.startedBuilds.append((worker.name, builds)) return defer.succeed(False) self.bldr.maybeStartBuild = maybeStartBuild self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, # claimed again so none taken! exp_claims=[], exp_builds=[('test-worker2', [10]), ('test-worker1', [11])]) @defer.inlineCallbacks def test_bldr_maybeStartBuild_fails_once(self): self.bldr.config.nextWorker = nth_worker(-1) # the builder fails to start the build; we'll see that the build # was requested, but the brids will get claimed again start_build_results = [False, True, True] def maybeStartBuild(worker, builds): self.startedBuilds.append((worker.name, builds)) return defer.succeed(start_build_results.pop(0)) self.bldr.maybeStartBuild = maybeStartBuild self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.master.db.insertTestData(rows) # first time around, only #11 stays claimed yield self.brd._maybeStartBuildsOnBuilder(self.bldr) self.assertMyClaims([11]) # claimed again so none taken! self.assertBuildsStarted( [('test-worker2', [10]), ('test-worker1', [11])]) # second time around the #10 will pass, adding another request and it # is claimed yield self.brd._maybeStartBuildsOnBuilder(self.bldr) self.assertMyClaims([10, 11]) self.assertBuildsStarted( [('test-worker2', [10]), ('test-worker1', [11]), ('test-worker2', [10])]) @defer.inlineCallbacks def test_limited_by_requests(self): self.bldr.config.nextWorker = nth_worker(1) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[11], exp_builds=[('test-worker2', [11])]) @defer.inlineCallbacks def test_nextWorker_None(self): self.bldr.config.nextWorker = lambda _1, _2, _3: defer.succeed(None) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_nextWorker_bogus(self): self.bldr.config.nextWorker = lambda _1, _2, _3: defer.succeed( mock.Mock()) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_nextBuild_None(self): self.bldr.config.nextBuild = lambda _1, _2: defer.succeed(None) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_nextBuild_bogus(self): self.bldr.config.nextBuild = lambda _1, _2: mock.Mock() self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_nextBuild_fails(self): def nextBuildRaises(*args): raise RuntimeError("xx") self.bldr.config.nextBuild = nextBuildRaises self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] result = self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) yield result # check concurrency edge cases @defer.inlineCallbacks def test_claim_race(self): self.bldr.config.nextWorker = nth_worker(0) # fake a race condition on the buildrequests table old_claimBuildRequests = self.master.db.buildrequests.claimBuildRequests def claimBuildRequests(brids, claimed_at=None): # first, ensure this only happens the first time self.master.db.buildrequests.claimBuildRequests = old_claimBuildRequests # claim brid 10 for some other master assert 10 in brids self.master.db.buildrequests.fakeClaimBuildRequest(10, 136000, masterid=9999) # some other masterid # ..and fail return defer.fail(buildrequests.AlreadyClaimedError()) self.master.db.buildrequests.claimBuildRequests = claimBuildRequests self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), # will turn out to be claimed! fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[11], exp_builds=[('test-worker1', [11])]) # nextWorker @defer.inlineCallbacks def do_test_nextWorker(self, nextWorker, exp_choice=None, exp_warning=False): def makeBuilderConfig(): return config.BuilderConfig(name='bldrconf', workernames=['wk1', 'wk2'], builddir='bdir', factory=factory.BuildFactory(), nextWorker=nextWorker) if exp_warning: with assertProducesWarning(DeprecatedApiWarning, message_pattern=r"nextWorker now takes a 3rd argument"): builder_config = makeBuilderConfig() else: builder_config = makeBuilderConfig() self.bldr = yield self.createBuilder('B', builderid=78, builder_config=builder_config) for i in range(4): self.addWorkers({'test-worker%d' % i: 1}) rows = [ fakedb.SourceStamp(id=21), fakedb.Builder(id=78, name='B'), fakedb.Buildset(id=12, reason='because'), fakedb.BuildsetSourceStamp(sourcestampid=21, buildsetid=12), fakedb.BuildRequest(id=12, buildsetid=12, builderid=78), ] if exp_choice is None: exp_claims = [] exp_builds = [] else: exp_claims = [12] exp_builds = [('test-worker%d' % exp_choice, [12])] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=exp_claims, exp_builds=exp_builds) def test_nextWorker_gets_buildrequest(self): def nextWorker(bldr, lst, br=None): self.assertNotEqual(br, None) return self.do_test_nextWorker(nextWorker) def test_nextWorker_default(self): self.patch(random, 'choice', nth_worker(2)) return self.do_test_nextWorker(None, exp_choice=2) def test_nextWorker_simple(self): def nextWorker(bldr, lst, br=None): self.assertIdentical(bldr, self.bldr) return lst[1] return self.do_test_nextWorker(nextWorker, exp_choice=1) def test_nextWorker_deferred(self): def nextWorker(bldr, lst, br=None): self.assertIdentical(bldr, self.bldr) return defer.succeed(lst[1]) return self.do_test_nextWorker(nextWorker, exp_choice=1) @defer.inlineCallbacks def test_nextWorker_exception(self): def nextWorker(bldr, lst, br=None): raise RuntimeError("") yield self.do_test_nextWorker(nextWorker) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) @defer.inlineCallbacks def test_nextWorker_failure(self): def nextWorker(bldr, lst, br=None): return defer.fail(failure.Failure(RuntimeError())) yield self.do_test_nextWorker(nextWorker) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) # _nextBuild @defer.inlineCallbacks def do_test_nextBuild(self, nextBuild, exp_choice=None): self.bldr.config.nextWorker = nth_worker(-1) self.bldr.config.nextBuild = nextBuild rows = self.make_workers(4) exp_claims = [] exp_builds = [] if exp_choice is not None: worker = 3 for choice in exp_choice: exp_claims.append(choice) exp_builds.append(('test-worker%d' % worker, [choice])) worker = worker - 1 yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=sorted(exp_claims), exp_builds=exp_builds) def test_nextBuild_default(self): "default chooses the first in the list, which should be the earliest" return self.do_test_nextBuild(None, exp_choice=[10, 11, 12, 13]) def test_nextBuild_simple(self): def nextBuild(bldr, lst): self.assertIdentical(bldr, self.bldr) return lst[-1] return self.do_test_nextBuild(nextBuild, exp_choice=[13, 12, 11, 10]) def test_nextBuild_deferred(self): def nextBuild(bldr, lst): self.assertIdentical(bldr, self.bldr) return defer.succeed(lst[-1]) return self.do_test_nextBuild(nextBuild, exp_choice=[13, 12, 11, 10]) def test_nextBuild_exception(self): def nextBuild(bldr, lst): raise RuntimeError("") result = self.do_test_nextBuild(nextBuild) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) return result def test_nextBuild_failure(self): def nextBuild(bldr, lst): return defer.fail(failure.Failure(RuntimeError())) result = self.do_test_nextBuild(nextBuild) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) return result buildbot-3.4.0/master/buildbot/test/unit/process/test_buildstep.py000066400000000000000000001402141413250514000254360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import reactor from twisted.internet.task import deferLater from twisted.python import log from twisted.trial import unittest from buildbot import locks from buildbot.interfaces import WorkerSetupError from buildbot.plugins import util from buildbot.process import buildstep from buildbot.process import properties from buildbot.process import remotecommand from buildbot.process.properties import renderer from buildbot.process.results import ALL_RESULTS from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.test.fake import fakebuild from buildbot.test.fake import fakemaster from buildbot.test.fake import worker from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config from buildbot.test.util import interfaces from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin from buildbot.util.eventual import eventually class NewStyleStep(buildstep.BuildStep): def run(self): pass class CustomActionBuildStep(buildstep.BuildStep): # The caller is expected to set the action attribute on the step def run(self): return self.action() class TestBuildStep(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): class FakeBuildStep(buildstep.BuildStep): def run(self): d = defer.Deferred() eventually(d.callback, 0) # FIXME: this uses real reactor instead of fake one return d class SkippingBuildStep(buildstep.BuildStep): def run(self): return SKIPPED class LockBuildStep(buildstep.BuildStep): def __init__(self, testcase=None, lock_accesses=None, **kwargs): super().__init__(**kwargs) self.testcase = testcase self.lock_accesses = lock_accesses @defer.inlineCallbacks def run(self): botmaster = self.build.builder.botmaster real_master_lock = yield botmaster.getLockFromLockAccess(self.lock_accesses[0], self.build.config_version) real_worker_lock = yield botmaster.getLockFromLockAccess(self.lock_accesses[1], self.build.config_version) self.testcase.assertFalse(real_master_lock.isAvailable(self.testcase, self.lock_accesses[0])) self.testcase.assertIn('workername', real_worker_lock.locks) self.testcase.assertFalse(real_worker_lock.locks['workername'].isAvailable( self.testcase, self.lock_accesses[1])) return SUCCESS def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() # support def _setupWaterfallTest(self, hideStepIf, expect, expectedResult=SUCCESS): self.setupStep(TestBuildStep.FakeBuildStep(hideStepIf=hideStepIf)) self.expectOutcome(result=expectedResult) self.expectHidden(expect) # tests def test_nameIsntString(self): """ When BuildStep is passed a name that isn't a string, it reports a config error. """ with self.assertRaisesConfigError("BuildStep name must be a string"): buildstep.BuildStep(name=5) def test_unexpectedKeywordArgument(self): """ When BuildStep is passed an unknown keyword argument, it reports a config error. """ with self.assertRaisesConfigError( "__init__ got unexpected keyword argument(s) ['oogaBooga']"): buildstep.BuildStep(oogaBooga=5) def test_updateBuildSummaryPolicyDefaults(self): """ updateBuildSummaryPolicy builds default value according to resultsMixin parameters (flunkOnFailure..) """ step = buildstep.BuildStep() self.assertEqual(sorted(step.updateBuildSummaryPolicy), sorted([ EXCEPTION, RETRY, CANCELLED, FAILURE])) step = buildstep.BuildStep(warnOnWarnings=True) self.assertEqual(sorted(step.updateBuildSummaryPolicy), sorted([ EXCEPTION, RETRY, CANCELLED, FAILURE, WARNINGS])) step = buildstep.BuildStep(flunkOnFailure=False) self.assertEqual(sorted(step.updateBuildSummaryPolicy), sorted([ EXCEPTION, RETRY, CANCELLED])) step = buildstep.BuildStep(updateBuildSummaryPolicy=False) self.assertEqual(sorted(step.updateBuildSummaryPolicy), []) step = buildstep.BuildStep(updateBuildSummaryPolicy=True) self.assertEqual(sorted(step.updateBuildSummaryPolicy), sorted(ALL_RESULTS)) def test_updateBuildSummaryPolicyBadType(self): """ updateBuildSummaryPolicy raise ConfigError in case of bad type """ with self.assertRaisesConfigError("BuildStep updateBuildSummaryPolicy must be " "a list of result ids or boolean but it is 2"): buildstep.BuildStep(updateBuildSummaryPolicy=FAILURE) def test_getProperty(self): bs = buildstep.BuildStep() bs.build = fakebuild.FakeBuild() props = bs.build.properties = mock.Mock() bs.getProperty("xyz", 'b') props.getProperty.assert_called_with("xyz", 'b') bs.getProperty("xyz") props.getProperty.assert_called_with("xyz", None) def test_setProperty(self): bs = buildstep.BuildStep() bs.build = fakebuild.FakeBuild() props = bs.build.properties = mock.Mock() bs.setProperty("x", "y", "t") props.setProperty.assert_called_with("x", "y", "t", runtime=True) bs.setProperty("x", "abc", "test", runtime=True) props.setProperty.assert_called_with("x", "abc", "test", runtime=True) @defer.inlineCallbacks def test_renderableLocks(self): master_lock = locks.MasterLock("masterlock") worker_lock = locks.WorkerLock("workerlock") lock_accesses = [] @renderer def rendered_locks(props): master_access = locks.LockAccess(master_lock, 'counting') worker_access = locks.LockAccess(worker_lock, 'exclusive') lock_accesses.append(master_access) lock_accesses.append(worker_access) return [master_access, worker_access] self.setupStep(self.LockBuildStep(testcase=self, lock_accesses=lock_accesses, locks=rendered_locks)) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(len(lock_accesses), 2) botmaster = self.step.build.builder.botmaster real_master_lock = yield botmaster.getLockFromLockAccess(lock_accesses[0], self.build.config_version) real_worker_lock = yield botmaster.getLockFromLockAccess(lock_accesses[1], self.build.config_version) self.assertTrue(real_master_lock.isAvailable(self, lock_accesses[0])) self.assertIn('workername', real_worker_lock.locks) self.assertTrue(real_worker_lock.locks['workername'].isAvailable(self, lock_accesses[1])) def test_compare(self): lbs1 = buildstep.BuildStep(name="me") lbs2 = buildstep.BuildStep(name="me") lbs3 = buildstep.BuildStep(name="me2") self.assertEqual(lbs1, lbs2) self.assertNotEqual(lbs1, lbs3) def test_repr(self): self.assertEqual( repr(buildstep.BuildStep(name="me")), 'BuildStep(name=' + repr("me") + ')') self.assertEqual( repr(NewStyleStep(name="me")), 'NewStyleStep(name=' + repr("me") + ')') @defer.inlineCallbacks def test_regularLocks(self): master_lock = locks.MasterLock("masterlock") worker_lock = locks.WorkerLock("workerlock") lock_accesses = [locks.LockAccess(master_lock, 'counting'), locks.LockAccess(worker_lock, 'exclusive')] self.setupStep(self.LockBuildStep(testcase=self, lock_accesses=lock_accesses, locks=lock_accesses)) self.expectOutcome(result=SUCCESS) yield self.runStep() botmaster = self.step.build.builder.botmaster real_master_lock = yield botmaster.getLockFromLockAccess(lock_accesses[0], self.build.config_version) real_worker_lock = yield botmaster.getLockFromLockAccess(lock_accesses[1], self.build.config_version) self.assertTrue(real_master_lock.isAvailable(self, lock_accesses[0])) self.assertIn('workername', real_worker_lock.locks) self.assertTrue(real_worker_lock.locks['workername'].isAvailable(self, lock_accesses[1])) @defer.inlineCallbacks def test_cancelWhileLocksAvailable(self): def _owns_lock(step, lock): access = [step_access for step_lock, step_access in step.locks if step_lock == lock][0] return lock.isOwner(step, access) def _lock_available(step, lock): access = [step_access for step_lock, step_access in step.locks if step_lock == lock][0] return lock.isAvailable(step, access) lock1 = locks.MasterLock("masterlock1") real_lock1 = locks.RealMasterLock(lock1) lock2 = locks.MasterLock("masterlock2") real_lock2 = locks.RealMasterLock(lock2) stepa = self.setupStep(self.FakeBuildStep(locks=[ (real_lock1, locks.LockAccess(lock1, 'exclusive')) ])) stepb = self.setupStep(self.FakeBuildStep(locks=[ (real_lock2, locks.LockAccess(lock2, 'exclusive')) ])) stepc = self.setupStep(self.FakeBuildStep(locks=[ (real_lock1, locks.LockAccess(lock1, 'exclusive')), (real_lock2, locks.LockAccess(lock2, 'exclusive')) ])) stepd = self.setupStep(self.FakeBuildStep(locks=[ (real_lock1, locks.LockAccess(lock1, 'exclusive')), (real_lock2, locks.LockAccess(lock2, 'exclusive')) ])) # Start all the steps yield stepa.acquireLocks() yield stepb.acquireLocks() c_d = stepc.acquireLocks() d_d = stepd.acquireLocks() # Check that step a and step b have the locks self.assertTrue(_owns_lock(stepa, real_lock1)) self.assertTrue(_owns_lock(stepb, real_lock2)) # Check that step c does not have a lock self.assertFalse(_owns_lock(stepc, real_lock1)) self.assertFalse(_owns_lock(stepc, real_lock2)) # Check that step d does not have a lock self.assertFalse(_owns_lock(stepd, real_lock1)) self.assertFalse(_owns_lock(stepd, real_lock2)) # Release lock 1 stepa.releaseLocks() yield deferLater(reactor, 0, lambda: None) # lock1 should be available for step c self.assertTrue(_lock_available(stepc, real_lock1)) self.assertFalse(_lock_available(stepc, real_lock2)) self.assertFalse(_lock_available(stepd, real_lock1)) self.assertFalse(_lock_available(stepd, real_lock2)) # Cancel step c stepc.interrupt("cancelling") yield c_d # Check that step c does not have a lock self.assertFalse(_owns_lock(stepc, real_lock1)) self.assertFalse(_owns_lock(stepc, real_lock2)) # No lock should be available for step c self.assertFalse(_lock_available(stepc, real_lock1)) self.assertFalse(_lock_available(stepc, real_lock2)) # lock 1 should be available for step d self.assertTrue(_lock_available(stepd, real_lock1)) self.assertFalse(_lock_available(stepd, real_lock2)) # Release lock 2 stepb.releaseLocks() # Both locks should be available for step d self.assertTrue(_lock_available(stepd, real_lock1)) self.assertTrue(_lock_available(stepd, real_lock2)) # So it should run yield d_d # Check that step d owns the locks self.assertTrue(_owns_lock(stepd, real_lock1)) self.assertTrue(_owns_lock(stepd, real_lock2)) @defer.inlineCallbacks def test_multiple_cancel(self): step = self.setupStep(CustomActionBuildStep()) def double_interrupt(): step.interrupt('reason1') step.interrupt('reason2') return CANCELLED step.action = double_interrupt self.expectOutcome(result=CANCELLED) yield self.runStep() @defer.inlineCallbacks def test_runCommand(self): bs = buildstep.BuildStep() bs.worker = worker.FakeWorker(master=None) # master is not used here bs.remote = 'dummy' bs.build = fakebuild.FakeBuild() bs.build.builder.name = 'fake' cmd = remotecommand.RemoteShellCommand("build", ["echo", "hello"]) def run(*args, **kwargs): # check that runCommand sets step.cmd self.assertIdentical(bs.cmd, cmd) return SUCCESS cmd.run = run yield bs.runCommand(cmd) # check that step.cmd is cleared after the command runs self.assertEqual(bs.cmd, None) @defer.inlineCallbacks def test_run_command_after_interrupt(self): step = self.setupStep(CustomActionBuildStep()) cmd = remotecommand.RemoteShellCommand("build", ["echo", "hello"]) def run(*args, **kwargs): raise RuntimeError("Command must not be run when step is interrupted") cmd.run = run @defer.inlineCallbacks def interrupt_and_run_command(): step.interrupt('reason1') res = yield step.runCommand(cmd) return res step.action = interrupt_and_run_command self.expectOutcome(result=CANCELLED) yield self.runStep() @defer.inlineCallbacks def test_start_returns_SKIPPED(self): self.setupStep(self.SkippingBuildStep()) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() @defer.inlineCallbacks def test_doStepIf_false(self): self.setupStep(self.FakeBuildStep(doStepIf=False)) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() @defer.inlineCallbacks def test_doStepIf_renderable_false(self): @util.renderer def dostepif(props): return False self.setupStep(self.FakeBuildStep(doStepIf=dostepif)) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() @defer.inlineCallbacks def test_doStepIf_returns_false(self): self.setupStep(self.FakeBuildStep(doStepIf=lambda step: False)) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() @defer.inlineCallbacks def test_doStepIf_returns_deferred_false(self): self.setupStep(self.FakeBuildStep( doStepIf=lambda step: defer.succeed(False))) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() def test_hideStepIf_False(self): self._setupWaterfallTest(False, False) return self.runStep() def test_hideStepIf_True(self): self._setupWaterfallTest(True, True) return self.runStep() @defer.inlineCallbacks def test_hideStepIf_Callable_False(self): called = [False] def shouldHide(result, step): called[0] = True self.assertTrue(step is self.step) self.assertEqual(result, SUCCESS) return False self._setupWaterfallTest(shouldHide, False) yield self.runStep() self.assertTrue(called[0]) @defer.inlineCallbacks def test_hideStepIf_Callable_True(self): called = [False] def shouldHide(result, step): called[0] = True self.assertTrue(step is self.step) self.assertEqual(result, SUCCESS) return True self._setupWaterfallTest(shouldHide, True) yield self.runStep() self.assertTrue(called[0]) @defer.inlineCallbacks def test_hideStepIf_fails(self): # 0/0 causes DivideByZeroError, which should be flagged as an exception self._setupWaterfallTest( lambda x, y: 0 / 0, False, expectedResult=EXCEPTION) self.step.addLogWithFailure = mock.Mock() yield self.runStep() self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1) @defer.inlineCallbacks def test_hideStepIf_Callable_Exception(self): called = [False] def shouldHide(result, step): called[0] = True self.assertTrue(step is self.step) self.assertEqual(result, EXCEPTION) return True def createException(*args, **kwargs): raise RuntimeError() self.setupStep(self.FakeBuildStep(hideStepIf=shouldHide, doStepIf=createException)) self.expectOutcome(result=EXCEPTION, state_string='finished (exception)') self.expectHidden(True) try: yield self.runStep() except Exception as e: log.err(e) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) self.assertTrue(called[0]) @defer.inlineCallbacks def test_step_getLog(self): testcase = self class TestGetLogStep(buildstep.BuildStep): @defer.inlineCallbacks def run(self): testcase.assertRaises(KeyError, lambda: self.getLog('testy')) log1 = yield self.addLog('testy') log2 = self.getLog('testy') testcase.assertIdentical(log1, log2) return SUCCESS self.setupStep(TestGetLogStep()) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_step_renders_flunkOnFailure(self): self.setupStep( TestBuildStep.FakeBuildStep(flunkOnFailure=properties.Property('fOF'))) self.properties.setProperty('fOF', 'yes', 'test') self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.flunkOnFailure, 'yes') def test_hasStatistic(self): step = buildstep.BuildStep() self.assertFalse(step.hasStatistic('rbi')) step.setStatistic('rbi', 13) self.assertTrue(step.hasStatistic('rbi')) def test_setStatistic(self): step = buildstep.BuildStep() step.setStatistic('rbi', 13) self.assertEqual(step.getStatistic('rbi'), 13) def test_getStatistic(self): step = buildstep.BuildStep() self.assertEqual(step.getStatistic('rbi', 99), 99) self.assertEqual(step.getStatistic('rbi'), None) step.setStatistic('rbi', 13) self.assertEqual(step.getStatistic('rbi'), 13) def test_getStatistics(self): step = buildstep.BuildStep() step.setStatistic('rbi', 13) step.setStatistic('ba', 0.298) self.assertEqual(step.getStatistics(), {'rbi': 13, 'ba': 0.298}) def setup_summary_test(self): self.patch(NewStyleStep, 'getCurrentSummary', lambda self: defer.succeed({'step': 'C'})) self.patch(NewStyleStep, 'getResultSummary', lambda self: defer.succeed({'step': 'CS', 'build': 'CB'})) step = NewStyleStep() step.master = fakemaster.make_master(self, wantData=True, wantDb=True) step.stepid = 13 step.build = fakebuild.FakeBuild() return step def test_updateSummary_running(self): step = self.setup_summary_test() step._running = True step.updateSummary() self.reactor.advance(1) self.assertEqual(step.master.data.updates.stepStateString[13], 'C') def test_updateSummary_running_empty_dict(self): step = self.setup_summary_test() step.getCurrentSummary = lambda: {} step._running = True step.updateSummary() self.reactor.advance(1) self.assertEqual(step.master.data.updates.stepStateString[13], 'finished') def test_updateSummary_running_not_unicode(self): step = self.setup_summary_test() step.getCurrentSummary = lambda: {'step': b'bytestring'} step._running = True step.updateSummary() self.reactor.advance(1) self.assertEqual(len(self.flushLoggedErrors(TypeError)), 1) def test_updateSummary_running_not_dict(self): step = self.setup_summary_test() step.getCurrentSummary = lambda: 'foo!' step._running = True step.updateSummary() self.reactor.advance(1) self.assertEqual(len(self.flushLoggedErrors(TypeError)), 1) def test_updateSummary_finished(self): step = self.setup_summary_test() step._running = False step.updateSummary() self.reactor.advance(1) self.assertEqual(step.master.data.updates.stepStateString[13], 'CS') def test_updateSummary_finished_empty_dict(self): step = self.setup_summary_test() step.getResultSummary = lambda: {} step._running = False step.updateSummary() self.reactor.advance(1) self.assertEqual(step.master.data.updates.stepStateString[13], 'finished') def test_updateSummary_finished_not_dict(self): step = self.setup_summary_test() step.getResultSummary = lambda: 'foo!' step._running = False step.updateSummary() self.reactor.advance(1) self.assertEqual(len(self.flushLoggedErrors(TypeError)), 1) def checkSummary(self, got, step, build=None): self.assertTrue(all(isinstance(k, str) for k in got)) self.assertTrue(all(isinstance(k, str) for k in got.values())) exp = {'step': step} if build: exp['build'] = build self.assertEqual(got, exp) def test_getCurrentSummary(self): st = buildstep.BuildStep() st.description = None self.checkSummary(st.getCurrentSummary(), 'running') def test_getCurrentSummary_description(self): st = buildstep.BuildStep() st.description = 'fooing' self.checkSummary(st.getCurrentSummary(), 'fooing') def test_getCurrentSummary_descriptionSuffix(self): st = buildstep.BuildStep() st.description = 'fooing' st.descriptionSuffix = 'bar' self.checkSummary(st.getCurrentSummary(), 'fooing bar') def test_getCurrentSummary_description_list(self): st = buildstep.BuildStep() st.description = ['foo', 'ing'] self.checkSummary(st.getCurrentSummary(), 'foo ing') def test_getCurrentSummary_descriptionSuffix_list(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = ['foo', 'ing'] st.descriptionSuffix = ['bar', 'bar2'] self.checkSummary(st.getCurrentSummary(), 'foo ing bar bar2') def test_getResultSummary(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = None self.checkSummary(st.getResultSummary(), 'finished') def test_getResultSummary_description(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = 'fooing' self.checkSummary(st.getResultSummary(), 'fooing') def test_getResultSummary_descriptionDone(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = 'fooing' st.descriptionDone = 'fooed' self.checkSummary(st.getResultSummary(), 'fooed') def test_getResultSummary_descriptionSuffix(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = 'fooing' st.descriptionSuffix = 'bar' self.checkSummary(st.getResultSummary(), 'fooing bar') def test_getResultSummary_descriptionDone_and_Suffix(self): st = buildstep.BuildStep() st.results = SUCCESS st.descriptionDone = 'fooed' st.descriptionSuffix = 'bar' self.checkSummary(st.getResultSummary(), 'fooed bar') def test_getResultSummary_description_list(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = ['foo', 'ing'] self.checkSummary(st.getResultSummary(), 'foo ing') def test_getResultSummary_descriptionSuffix_list(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = ['foo', 'ing'] st.descriptionSuffix = ['bar', 'bar2'] self.checkSummary(st.getResultSummary(), 'foo ing bar bar2') @defer.inlineCallbacks def test_getResultSummary_descriptionSuffix_failure(self): st = buildstep.BuildStep() st.results = FAILURE st.description = 'fooing' self.checkSummary((yield st.getBuildResultSummary()), 'fooing (failure)', 'fooing (failure)') self.checkSummary(st.getResultSummary(), 'fooing (failure)') @defer.inlineCallbacks def test_getResultSummary_descriptionSuffix_skipped(self): st = buildstep.BuildStep() st.results = SKIPPED st.description = 'fooing' self.checkSummary((yield st.getBuildResultSummary()), 'fooing (skipped)') self.checkSummary(st.getResultSummary(), 'fooing (skipped)') # Test calling checkWorkerHasCommand() when worker have support for # requested remote command. def testcheckWorkerHasCommandGood(self): # patch BuildStep.workerVersion() to return success mockedWorkerVersion = mock.Mock() self.patch(buildstep.BuildStep, "workerVersion", mockedWorkerVersion) # check that no exceptions are raised buildstep.BuildStep().checkWorkerHasCommand("foo") # make sure workerVersion() was called with correct arguments mockedWorkerVersion.assert_called_once_with("foo") # Test calling checkWorkerHasCommand() when worker is to old to support # requested remote command. def testcheckWorkerHasCommandTooOld(self): # patch BuildStep.workerVersion() to return error self.patch(buildstep.BuildStep, "workerVersion", mock.Mock(return_value=None)) # make sure appropriate exception is raised step = buildstep.BuildStep() with self.assertRaisesRegex(WorkerSetupError, "worker is too old, does not know about foo"): step.checkWorkerHasCommand("foo") @defer.inlineCallbacks def testRunRaisesException(self): step = NewStyleStep() step.master = mock.Mock() step.build = mock.Mock() step.build.builder.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[]) step.locks = [] step.renderables = [] step.build.render = defer.succeed step.master.data.updates.addStep = lambda **kwargs: defer.succeed( (0, 0, 0)) step.addLogWithFailure = lambda x: defer.succeed(None) step.run = lambda: defer.fail(RuntimeError('got exception')) res = yield step.startStep(mock.Mock()) self.assertFalse(step._running) errors = self.flushLoggedErrors() self.assertEqual(len(errors), 1) self.assertEqual(errors[0].getErrorMessage(), 'got exception') self.assertEqual(res, EXCEPTION) class InterfaceTests(interfaces.InterfaceTests): # ensure that steps.BuildStepMixin creates a convincing facsimile of the # real BuildStep def test_signature_attributes(self): for attr in [ 'name', 'description', 'descriptionDone', 'descriptionSuffix', 'locks', 'progressMetrics', 'useProgress', 'doStepIf', 'hideStepIf', 'haltOnFailure', 'flunkOnWarnings', 'flunkOnFailure', 'warnOnWarnings', 'warnOnFailure', 'alwaysRun', 'build', 'worker', 'progress', 'stopped', ]: self.assertTrue(hasattr(self.step, attr)) def test_signature_setBuild(self): @self.assertArgSpecMatches(self.step.setBuild) def setBuild(self, build): pass def test_signature_setWorker(self): @self.assertArgSpecMatches(self.step.setWorker) def setWorker(self, worker): pass def test_signature_setupProgress(self): @self.assertArgSpecMatches(self.step.setupProgress) def setupProgress(self): pass def test_signature_startStep(self): @self.assertArgSpecMatches(self.step.startStep) def startStep(self, remote): pass def test_signature_run(self): @self.assertArgSpecMatches(self.step.run) def run(self): pass def test_signature_interrupt(self): @self.assertArgSpecMatches(self.step.interrupt) def interrupt(self, reason): pass def test_signature_setProgress(self): @self.assertArgSpecMatches(self.step.setProgress) def setProgress(self, metric, value): pass def test_signature_workerVersion(self): @self.assertArgSpecMatches(self.step.workerVersion) def workerVersion(self, command, oldversion=None): pass def test_signature_workerVersionIsOlderThan(self): @self.assertArgSpecMatches(self.step.workerVersionIsOlderThan) def workerVersionIsOlderThan(self, command, minversion): pass def test_signature_getWorkerName(self): @self.assertArgSpecMatches(self.step.getWorkerName) def getWorkerName(self): pass def test_signature_runCommand(self): @self.assertArgSpecMatches(self.step.runCommand) def runCommand(self, command): pass def test_signature_addURL(self): @self.assertArgSpecMatches(self.step.addURL) def addURL(self, name, url): pass def test_signature_addLog(self): @self.assertArgSpecMatches(self.step.addLog) def addLog(self, name, type='s', logEncoding=None): pass def test_signature_getLog(self): @self.assertArgSpecMatches(self.step.getLog) def getLog(self, name): pass def test_signature_addCompleteLog(self): @self.assertArgSpecMatches(self.step.addCompleteLog) def addCompleteLog(self, name, text): pass def test_signature_addHTMLLog(self): @self.assertArgSpecMatches(self.step.addHTMLLog) def addHTMLLog(self, name, html): pass def test_signature_addLogObserver(self): @self.assertArgSpecMatches(self.step.addLogObserver) def addLogObserver(self, logname, observer): pass class TestFakeItfc(unittest.TestCase, steps.BuildStepMixin, TestReactorMixin, InterfaceTests): def setUp(self): self.setUpTestReactor() self.setUpBuildStep() self.setupStep(buildstep.BuildStep()) class TestRealItfc(unittest.TestCase, InterfaceTests): def setUp(self): self.step = buildstep.BuildStep() class CommandMixinExample(buildstep.CommandMixin, buildstep.BuildStep): @defer.inlineCallbacks def run(self): rv = yield self.testMethod() self.method_return_value = rv return SUCCESS class TestCommandMixin(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpBuildStep() self.step = CommandMixinExample() self.setupStep(self.step) def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def test_runRmdir(self): self.step.testMethod = lambda: self.step.runRmdir('/some/path') self.expectCommands( Expect('rmdir', {'dir': '/some/path', 'logEnviron': False}) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertTrue(self.step.method_return_value) @defer.inlineCallbacks def test_runMkdir(self): self.step.testMethod = lambda: self.step.runMkdir('/some/path') self.expectCommands( Expect('mkdir', {'dir': '/some/path', 'logEnviron': False}) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertTrue(self.step.method_return_value) @defer.inlineCallbacks def test_runMkdir_fails(self): self.step.testMethod = lambda: self.step.runMkdir('/some/path') self.expectCommands( Expect('mkdir', {'dir': '/some/path', 'logEnviron': False}) + 1, ) self.expectOutcome(result=FAILURE) yield self.runStep() @defer.inlineCallbacks def test_runMkdir_fails_no_abandon(self): self.step.testMethod = lambda: self.step.runMkdir( '/some/path', abandonOnFailure=False) self.expectCommands( Expect('mkdir', {'dir': '/some/path', 'logEnviron': False}) + 1, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertFalse(self.step.method_return_value) @defer.inlineCallbacks def test_pathExists(self): self.step.testMethod = lambda: self.step.pathExists('/some/path') self.expectCommands( Expect('stat', {'file': '/some/path', 'logEnviron': False}) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertTrue(self.step.method_return_value) @defer.inlineCallbacks def test_pathExists_doesnt(self): self.step.testMethod = lambda: self.step.pathExists('/some/path') self.expectCommands( Expect('stat', {'file': '/some/path', 'logEnviron': False}) + 1, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertFalse(self.step.method_return_value) @defer.inlineCallbacks def test_pathExists_logging(self): self.step.testMethod = lambda: self.step.pathExists('/some/path') self.expectCommands( Expect('stat', {'file': '/some/path', 'logEnviron': False}) + Expect.log('stdio', header='NOTE: never mind\n') + 1, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertFalse(self.step.method_return_value) self.assertEqual(self.step.getLog('stdio').header, 'NOTE: never mind\n') def test_glob(self): @defer.inlineCallbacks def testFunc(): res = yield self.step.runGlob("*.pyc") self.assertEqual(res, ["one.pyc", "two.pyc"]) self.step.testMethod = testFunc self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + Expect.update('files', ["one.pyc", "two.pyc"]) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_glob_empty(self): self.step.testMethod = lambda: self.step.runGlob("*.pyc") self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + Expect.update('files', []) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_glob_fail(self): self.step.testMethod = lambda: self.step.runGlob("*.pyc") self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() class SimpleShellCommand(buildstep.ShellMixin, buildstep.BuildStep): def __init__(self, make_cmd_kwargs=None, prohibit_args=None, **kwargs): self.make_cmd_kwargs = make_cmd_kwargs or {} kwargs = self.setupShellMixin(kwargs, prohibitArgs=prohibit_args) super().__init__(**kwargs) @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand(**self.make_cmd_kwargs) yield self.runCommand(cmd) return cmd.results() class TestShellMixin(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_setupShellMixin_bad_arg(self): mixin = SimpleShellCommand() with self.assertRaisesConfigError("invalid SimpleShellCommand argument invarg"): mixin.setupShellMixin({'invarg': 13}) def test_setupShellMixin_prohibited_arg(self): mixin = SimpleShellCommand() with self.assertRaisesConfigError("invalid SimpleShellCommand argument logfiles"): mixin.setupShellMixin({'logfiles': None}, prohibitArgs=['logfiles']) def test_constructor_defaults(self): class MySubclass(SimpleShellCommand): timeout = 9999 # ShellMixin arg self.assertEqual(MySubclass().timeout, 9999) self.assertEqual(MySubclass(timeout=88).timeout, 88) # BuildStep arg self.assertEqual(MySubclass().logEncoding, None) self.assertEqual(MySubclass(logEncoding='latin-1').logEncoding, 'latin-1') self.assertEqual(MySubclass().description, None) self.assertEqual(MySubclass(description='charming').description, ['charming']) @defer.inlineCallbacks def test_prohibit_args(self): self.setupStep(SimpleShellCommand(prohibit_args=['command'], make_cmd_kwargs={'command': ['cmd', 'arg']})) self.expectCommands( ExpectShell(workdir='wkdir', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_no_default_workdir(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg']), wantDefaultWorkdir=False) self.expectCommands( ExpectShell(workdir='build', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_build_workdir(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg']), wantDefaultWorkdir=False) self.build.workdir = '/alternate' self.expectCommands( ExpectShell(workdir='/alternate', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_build_workdir_callable(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg']), wantDefaultWorkdir=False) self.build.workdir = lambda x: '/alternate' self.expectCommands( ExpectShell(workdir='/alternate', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_build_workdir_callable_error(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg']), wantDefaultWorkdir=False) self.build.workdir = lambda x: x.nosuchattribute # will raise AttributeError self.expectException(buildstep.CallableAttributeError) yield self.runStep() @defer.inlineCallbacks def test_build_workdir_renderable(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg']), wantDefaultWorkdir=False) self.build.workdir = properties.Property("myproperty") self.properties.setProperty("myproperty", "/myproperty", "test") self.expectCommands( ExpectShell(workdir='/myproperty', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_step_workdir(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], workdir='/stepdir')) self.build.workdir = '/builddir' self.expectCommands( ExpectShell(workdir='/stepdir', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_step_renderable_workdir(self): @renderer def rendered_workdir(_): return '/stepdir' self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], workdir=rendered_workdir)) self.build.workdir = '/builddir' self.expectCommands( ExpectShell(workdir='/stepdir', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_step_workdir_overridden(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], workdir='/stepdir', make_cmd_kwargs={'workdir': '/overridden'})) self.build.workdir = '/builddir' self.expectCommands( ExpectShell(workdir='/overridden', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_extra_logfile(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], logfiles={'logname': 'logpath.log'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['cmd', 'arg'], logfiles={'logname': 'logpath.log'}) + Expect.log('logname', stdout='logline\nlogline2\n') + Expect.log('stdio', stdout="some log\n") + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.getLog('logname').stdout, 'logline\nlogline2\n') @defer.inlineCallbacks def test_lazy_logfiles_stdout_has_stdout(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], lazylogfiles=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['cmd', 'arg']) + Expect.log('stdio', stdout="some log\n") + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.getLog('stdio').stdout, 'some log\n') @defer.inlineCallbacks def test_lazy_logfiles_stdout_no_stdout(self): # lazy log files do not apply to stdout self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], lazylogfiles=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.getLog('stdio').stdout, '') @defer.inlineCallbacks def test_lazy_logfiles_logfile(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], lazylogfiles=True, logfiles={'logname': 'logpath.log'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['cmd', 'arg'], logfiles={'logname': 'logpath.log'}) + Expect.log('logname', stdout='logline\nlogline2\n') + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.getLog('logname').stdout, 'logline\nlogline2\n') @defer.inlineCallbacks def test_lazy_logfiles_no_logfile(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], lazylogfiles=True, logfiles={'logname': 'logpath.log'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['cmd', 'arg'], logfiles={'logname': 'logpath.log'}) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() with self.assertRaises(KeyError): self.step.getLog('logname') @defer.inlineCallbacks def test_env(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], env={'BAR': 'BAR'})) self.build.builder.config.env = {'FOO': 'FOO'} self.expectCommands( ExpectShell(workdir='wkdir', command=['cmd', 'arg'], env={'FOO': 'FOO', 'BAR': 'BAR'}) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_old_worker_args(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], usePTY=False, interruptSignal='DIE'), worker_version={'*': "1.1"}) self.expectCommands( ExpectShell(workdir='wkdir', command=['cmd', 'arg']) + # note missing parameters 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.getLog('stdio').header, 'NOTE: worker does not allow master to override usePTY\n' 'NOTE: worker does not allow master to specify interruptSignal\n') @defer.inlineCallbacks def test_new_worker_args(self): self.setupStep(SimpleShellCommand(command=['cmd', 'arg'], usePTY=False, interruptSignal='DIE'), worker_version={'*': "3.0"}) self.expectCommands( ExpectShell(workdir='wkdir', usePTY=False, interruptSignal='DIE', command=['cmd', 'arg']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.getLog('stdio').header, '') @defer.inlineCallbacks def test_description(self): self.setupStep(SimpleShellCommand(command=['foo', properties.Property('bar', 'BAR')])) self.expectCommands( ExpectShell(workdir='wkdir', command=['foo', 'BAR']) + 0, ) self.expectOutcome(result=SUCCESS, state_string="'foo BAR'") yield self.runStep() def test_getResultSummary(self): self.setupStep(SimpleShellCommand(command=['a', ['b', 'c']])) self.step.results = SUCCESS self.assertEqual(self.step.getResultSummary(), {'step': "'a b ...'"}) buildbot-3.4.0/master/buildbot/test/unit/process/test_cache.py000066400000000000000000000040671413250514000245130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process import cache class CacheManager(unittest.TestCase): def setUp(self): self.caches = cache.CacheManager() def make_config(self, **kwargs): cfg = mock.Mock() cfg.caches = kwargs return cfg def test_get_cache_idempotency(self): foo_cache = self.caches.get_cache("foo", None) bar_cache = self.caches.get_cache("bar", None) foo_cache2 = self.caches.get_cache("foo", None) self.assertIdentical(foo_cache, foo_cache2) self.assertNotIdentical(foo_cache, bar_cache) @defer.inlineCallbacks def test_reconfigServiceWithBuildbotConfig(self): # load config with one cache loaded and the other not foo_cache = self.caches.get_cache("foo", None) yield self.caches.reconfigServiceWithBuildbotConfig( self.make_config(foo=5, bar=6, bing=11)) bar_cache = self.caches.get_cache("bar", None) self.assertEqual((foo_cache.max_size, bar_cache.max_size), (5, 6)) def test_get_metrics(self): self.caches.get_cache("foo", None) self.assertIn('foo', self.caches.get_metrics()) metric = self.caches.get_metrics()['foo'] for k in 'hits', 'refhits', 'misses', 'max_size': self.assertIn(k, metric) buildbot-3.4.0/master/buildbot/test/unit/process/test_debug.py000066400000000000000000000045221413250514000245320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import debug from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import service class FakeManhole(service.AsyncService): pass class TestDebugServices(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = mock.Mock(name='master') self.config = config.MasterConfig() @defer.inlineCallbacks def test_reconfigService_manhole(self): master = fakemaster.make_master(self) ds = debug.DebugServices() yield ds.setServiceParent(master) yield master.startService() # start off with no manhole yield ds.reconfigServiceWithBuildbotConfig(self.config) # set a manhole, fire it up self.config.manhole = manhole = FakeManhole() yield ds.reconfigServiceWithBuildbotConfig(self.config) self.assertTrue(manhole.running) self.assertIdentical(manhole.master, master) # unset it, see it stop self.config.manhole = None yield ds.reconfigServiceWithBuildbotConfig(self.config) self.assertFalse(manhole.running) self.assertIdentical(manhole.master, None) # re-start to test stopService self.config.manhole = manhole yield ds.reconfigServiceWithBuildbotConfig(self.config) # disown the service, and see that it unregisters yield ds.disownServiceParent() self.assertFalse(manhole.running) self.assertIdentical(manhole.master, None) buildbot-3.4.0/master/buildbot/test/unit/process/test_factory.py000066400000000000000000000134371413250514000251200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from random import choice from string import ascii_uppercase from twisted.trial import unittest from buildbot.process.buildstep import BuildStep from buildbot.process.buildstep import _BuildStepFactory from buildbot.process.factory import BuildFactory from buildbot.process.factory import GNUAutoconf from buildbot.process.factory import s from buildbot.steps.shell import Configure class TestBuildFactory(unittest.TestCase): def setUp(self): self.factory = BuildFactory() def test_init(self): step = BuildStep() self.factory = BuildFactory([step]) self.assertEqual(self.factory.steps, [_BuildStepFactory(BuildStep)]) def test_addStep(self): # create a string random string that will probably not collide # with what is already in the factory string = ''.join(choice(ascii_uppercase) for x in range(6)) length = len(self.factory.steps) step = BuildStep(name=string) self.factory.addStep(step) # check if the number of nodes grew by one self.assertTrue(length + 1, len(self.factory.steps)) # check if the 'right' node added in the factory self.assertEqual(self.factory.steps[-1], _BuildStepFactory(BuildStep, name=string)) def test_s(self): """ L{s} is deprecated, but pass keyword arguments to the first argument, to construct a step. """ stepFactory = s(BuildStep, name='test') self.assertEqual( stepFactory, _BuildStepFactory(BuildStep, name='test')) warnings = self.flushWarnings([self.test_s]) self.assertEqual(len(warnings), 1) self.assertEqual(warnings[0]['category'], DeprecationWarning) def test_addStep_notAStep(self): # This fails because object isn't adaptable to IBuildStepFactory with self.assertRaises(TypeError): self.factory.addStep(object()) def test_addStep_ArgumentsInTheWrongPlace(self): with self.assertRaises(TypeError): self.factory.addStep(BuildStep(), name="name") # this also raises a deprecation error, which we don't care about (see # test_s) self.flushWarnings() def test_addSteps(self): self.factory.addSteps([BuildStep(), BuildStep()]) self.assertEqual(self.factory.steps[-2:], [_BuildStepFactory(BuildStep), _BuildStepFactory(BuildStep)]) class TestGNUAutoconf(TestBuildFactory): def setUp(self): self.factory = GNUAutoconf(source=BuildStep()) def test_init(self): # actual initialization is already done by setUp configurePresent = False compilePresent = False checkPresent = False distcheckPresent = False for step in self.factory.steps: if isinstance(step.buildStep(), Configure): configurePresent = True # the following checks are rather hairy and should be # rewritten less implementation dependent. try: if step.buildStep().command == ['make', 'all']: compilePresent = True if step.buildStep().command == ['make', 'check']: checkPresent = True if step.buildStep().command == ['make', 'distcheck']: distcheckPresent = True except(AttributeError, KeyError): pass self.assertTrue(configurePresent) self.assertTrue(compilePresent) self.assertTrue(checkPresent) self.assertTrue(distcheckPresent) def test_init_none(self): """Default steps can be uninitialized by setting None""" self.factory = GNUAutoconf(source=BuildStep(), compile=None, test=None, distcheck=None) for step in self.factory.steps: try: cmd = step.buildStep().command self.assertNotIn(cmd, [['make', 'all'], ['make', 'check'], ['make', 'distcheck']], "Build step {} should not be present.".format(cmd)) except(AttributeError, KeyError): pass def test_init_reconf(self): # test reconf = True self.factory = GNUAutoconf(source=BuildStep(), reconf=True) self.test_init() reconfPresent = False selfreconfPresent = False for step in self.factory.steps: try: if step.buildStep().command[0] == 'autoreconf': reconfPresent = True except(AttributeError, KeyError): pass self.assertTrue(reconfPresent) # test setting your own reconfiguration step self.factory = GNUAutoconf(source=BuildStep(), reconf=['notsoautoreconf']) self.test_init() for step in self.factory.steps: try: if step.buildStep().command == ['notsoautoreconf']: selfreconfPresent = True except(AttributeError, KeyError): pass self.assertTrue(selfreconfPresent) buildbot-3.4.0/master/buildbot/test/unit/process/test_log.py000066400000000000000000000263301413250514000242260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.process import log from buildbot.test.fake import fakemaster from buildbot.test.fake import logfile as fakelogfile from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class Tests(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def makeLog(self, type, logEncoding='utf-8'): logid = yield self.master.data.updates.addLog( stepid=27, name='testlog', type=str(type)) return log.Log.new(self.master, 'testlog', type, logid, logEncoding) @defer.inlineCallbacks def test_creation(self): for type in 'ths': yield self.makeLog(type) def test_logDecodeFunctionFromConfig(self): otilde = '\u00f5' otilde_utf8 = otilde.encode('utf-8') otilde_latin1 = otilde.encode('latin1') invalid_utf8 = b'\xff' replacement = '\ufffd' f = log.Log._decoderFromString('latin-1') self.assertEqual(f(otilde_latin1), otilde) f = log.Log._decoderFromString('utf-8') self.assertEqual(f(otilde_utf8), otilde) self.assertEqual(f(invalid_utf8), replacement) f = log.Log._decoderFromString(lambda s: str(s[::-1])) self.assertEqual(f('abc'), 'cba') @defer.inlineCallbacks def test_updates_plain(self): _log = yield self.makeLog('t') _log.addContent('hello\n') _log.addContent('hello ') _log.addContent('cruel ') _log.addContent('world\nthis is a second line') # unfinished _log.finish() self.assertEqual(self.master.data.updates.logs[_log.logid], { 'content': ['hello\n', 'hello cruel world\n', 'this is a second line\n'], 'finished': True, 'type': 't', 'name': 'testlog', }) @defer.inlineCallbacks def test_updates_different_encoding(self): _log = yield self.makeLog('t', logEncoding='latin-1') # 0xa2 is latin-1 encoding for CENT SIGN _log.addContent('$ and \xa2\n') _log.finish() self.assertEqual(self.master.data.updates.logs[_log.logid]['content'], ['$ and \N{CENT SIGN}\n']) @defer.inlineCallbacks def test_updates_unicode_input(self): _log = yield self.makeLog('t', logEncoding='something-invalid') _log.addContent('\N{SNOWMAN}\n') _log.finish() self.assertEqual(self.master.data.updates.logs[_log.logid]['content'], ['\N{SNOWMAN}\n']) @defer.inlineCallbacks def test_subscription_plain(self): _log = yield self.makeLog('t') calls = [] _log.subscribe(lambda stream, content: calls.append((stream, content))) self.assertEqual(calls, []) yield _log.addContent('hello\n') self.assertEqual(calls, [(None, 'hello\n')]) calls = [] yield _log.addContent('hello ') self.assertEqual(calls, []) yield _log.addContent('cruel ') self.assertEqual(calls, []) yield _log.addContent('world\nthis is a second line\n') self.assertEqual(calls, [ (None, 'hello cruel world\nthis is a second line\n')]) calls = [] yield _log.finish() self.assertEqual(calls, [(None, None)]) @defer.inlineCallbacks def test_subscription_unsubscribe(self): _log = yield self.makeLog('t') sub_fn = mock.Mock() sub = _log.subscribe(sub_fn) sub.unsubscribe() yield _log.finish() sub_fn.assert_not_called() @defer.inlineCallbacks def test_subscription_stream(self): _log = yield self.makeLog('s') calls = [] _log.subscribe(lambda stream, content: calls.append((stream, content))) self.assertEqual(calls, []) yield _log.addStdout('hello\n') self.assertEqual(calls, [('o', 'hello\n')]) calls = [] yield _log.addStdout('hello ') self.assertEqual(calls, []) yield _log.addStdout('cruel ') self.assertEqual(calls, []) yield _log.addStderr('!!\n') self.assertEqual(calls, [('e', '!!\n')]) calls = [] yield _log.addHeader('**\n') self.assertEqual(calls, [('h', '**\n')]) calls = [] yield _log.addStdout('world\nthis is a second line') # unfinished self.assertEqual(calls, [ ('o', 'hello cruel world\n')]) calls = [] yield _log.finish() self.assertEqual(calls, [ ('o', 'this is a second line\n'), (None, None)]) @defer.inlineCallbacks def test_updates_stream(self): _log = yield self.makeLog('s') _log.addStdout('hello\n') _log.addStdout('hello ') _log.addStderr('oh noes!\n') _log.addStdout('cruel world\n') _log.addStderr('bad things!') # unfinished _log.finish() self.assertEqual(self.master.data.updates.logs[_log.logid], { 'content': ['ohello\n', 'eoh noes!\n', 'ohello cruel world\n', 'ebad things!\n'], 'finished': True, 'name': 'testlog', 'type': 's', }) @defer.inlineCallbacks def test_unyielded_finish(self): _log = yield self.makeLog('s') _log.finish() with self.assertRaises(AssertionError): yield _log.finish() @defer.inlineCallbacks def test_isFinished(self): _log = yield self.makeLog('s') self.assertFalse(_log.isFinished()) yield _log.finish() self.assertTrue(_log.isFinished()) @defer.inlineCallbacks def test_waitUntilFinished(self): _log = yield self.makeLog('s') d = _log.waitUntilFinished() self.assertFalse(d.called) yield _log.finish() self.assertTrue(d.called) class InterfaceTests(interfaces.InterfaceTests): # for compatibility between old-style and new-style steps, both # buildbot.status.logfile.LogFile and buildbot.process.log.StreamLog must # meet this interface, at least until support for old-style steps is # removed. # ILogFile def test_signature_addStdout(self): @self.assertArgSpecMatches(self.log.addStdout) def addStdout(self, text): pass def test_signature_addStderr(self): @self.assertArgSpecMatches(self.log.addStderr) def addStderr(self, text): pass def test_signature_addHeader(self): @self.assertArgSpecMatches(self.log.addHeader) def addHeader(self, text): pass def test_signature_finish(self): @self.assertArgSpecMatches(self.log.finish) def finish(self): pass # IStatusLog def test_signature_getName(self): @self.assertArgSpecMatches(self.log.getName) def getName(self): pass def test_getName(self): self.assertEqual(self.log.getName(), 'stdio') def test_signature_isFinished(self): @self.assertArgSpecMatches(self.log.isFinished) def isFinished(self): pass def test_signature_waitUntilFinished(self): @self.assertArgSpecMatches(self.log.waitUntilFinished) def waitUntilFinished(self): pass def test_signature_subscribe(self): @self.assertArgSpecMatches(self.log.subscribe) def subscribe(self, callback): pass def test_signature_unsubscribe(self): # method has been removed self.assertFalse(hasattr(self.log, 'unsubscribe')) def test_signature_getStep_removed(self): self.assertFalse(hasattr(self.log, 'getStep')) def test_signature_subscribeConsumer_removed(self): self.assertFalse(hasattr(self.log, 'subscribeConsumer')) def test_signature_hasContents_removed(self): self.assertFalse(hasattr(self.log, 'hasContents')) def test_signature_getText_removed(self): self.assertFalse(hasattr(self.log, 'getText')) def test_signature_readlines_removed(self): self.assertFalse(hasattr(self.log, 'readlines')) def test_signature_getTextWithHeaders_removed(self): self.assertFalse(hasattr(self.log, 'getTextWithHeaders')) def test_signature_getChunks_removed(self): self.assertFalse(hasattr(self.log, 'getChunks')) class TestProcessItfc(unittest.TestCase, InterfaceTests): def setUp(self): self.log = log.StreamLog(mock.Mock(name='master'), 'stdio', 's', 101, str) class TestFakeLogFile(unittest.TestCase, InterfaceTests): def setUp(self): self.log = fakelogfile.FakeLogFile('stdio') class TestErrorRaised(unittest.TestCase): def instrumentTestedLoggerForError(self, testedLog): def addRawLines(msg): d = defer.Deferred() def raiseError(_): d.errback(RuntimeError('DB has gone away')) reactor.callLater(10 ** (-6), raiseError, None) return d self.patch(testedLog, 'addRawLines', addRawLines) return testedLog @defer.inlineCallbacks def testErrorOnStreamLog(self): tested_log = self.instrumentTestedLoggerForError( log.StreamLog(mock.Mock(name='master'), 'stdio', 's', 101, str)) correct_error_raised = False try: yield tested_log.addStdout('msg\n') except Exception as e: correct_error_raised = 'DB has gone away' in str(e) self.assertTrue(correct_error_raised) @defer.inlineCallbacks def testErrorOnPlainLog(self): tested_log = self.instrumentTestedLoggerForError( log.PlainLog(mock.Mock(name='master'), 'stdio', 's', 101, str)) correct_error_raised = False try: yield tested_log.addContent('msg\n') except Exception as e: correct_error_raised = 'DB has gone away' in str(e) self.assertTrue(correct_error_raised) @defer.inlineCallbacks def testErrorOnPlainLogFlush(self): tested_log = self.instrumentTestedLoggerForError( log.PlainLog(mock.Mock(name='master'), 'stdio', 's', 101, str)) correct_error_raised = False try: yield tested_log.addContent('msg') yield tested_log.finish() except Exception as e: correct_error_raised = 'DB has gone away' in str(e) self.assertTrue(correct_error_raised) buildbot-3.4.0/master/buildbot/test/unit/process/test_logobserver.py000066400000000000000000000165451413250514000260050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process import log from buildbot.process import logobserver from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class MyLogObserver(logobserver.LogObserver): def __init__(self): self.obs = [] def outReceived(self, data): self.obs.append(('out', data)) def errReceived(self, data): self.obs.append(('err', data)) def headerReceived(self, data): self.obs.append(('hdr', data)) def finishReceived(self): self.obs.append(('fin',)) class TestLogObserver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def test_sequence(self): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo = MyLogObserver() lo.setLog(_log) yield _log.addStdout('hello\n') yield _log.addStderr('cruel\n') yield _log.addStdout('world\n') yield _log.addStdout('multi\nline\nchunk\n') yield _log.addHeader('HDR\n') yield _log.finish() self.assertEqual(lo.obs, [ ('out', 'hello\n'), ('err', 'cruel\n'), ('out', 'world\n'), ('out', 'multi\nline\nchunk\n'), ('hdr', 'HDR\n'), ('fin',), ]) class MyLogLineObserver(logobserver.LogLineObserver): def __init__(self): super().__init__() self.obs = [] def outLineReceived(self, data): self.obs.append(('out', data)) def errLineReceived(self, data): self.obs.append(('err', data)) def headerLineReceived(self, data): self.obs.append(('hdr', data)) def finishReceived(self): self.obs.append(('fin',)) class TestLineConsumerLogObesrver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def do_test_sequence(self, consumer): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo = logobserver.LineConsumerLogObserver(consumer) lo.setLog(_log) yield _log.addStdout('hello\n') yield _log.addStderr('cruel\n') yield _log.addStdout('multi\nline\nchunk\n') yield _log.addHeader('H1\nH2\n') yield _log.finish() @defer.inlineCallbacks def test_sequence_finish(self): results = [] def consumer(): while True: try: stream, line = yield results.append((stream, line)) except GeneratorExit: results.append('finish') raise yield self.do_test_sequence(consumer) self.assertEqual(results, [ ('o', 'hello'), ('e', 'cruel'), ('o', 'multi'), ('o', 'line'), ('o', 'chunk'), ('h', 'H1'), ('h', 'H2'), 'finish', ]) @defer.inlineCallbacks def test_sequence_no_finish(self): results = [] def consumer(): while True: stream, line = yield results.append((stream, line)) yield self.do_test_sequence(consumer) self.assertEqual(results, [ ('o', 'hello'), ('e', 'cruel'), ('o', 'multi'), ('o', 'line'), ('o', 'chunk'), ('h', 'H1'), ('h', 'H2'), ]) class TestLogLineObserver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def test_sequence(self): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo = MyLogLineObserver() lo.setLog(_log) yield _log.addStdout('hello\n') yield _log.addStderr('cruel\n') yield _log.addStdout('multi\nline\nchunk\n') yield _log.addHeader('H1\nH2\n') yield _log.finish() self.assertEqual(lo.obs, [ ('out', 'hello'), ('err', 'cruel'), ('out', 'multi'), ('out', 'line'), ('out', 'chunk'), ('hdr', 'H1'), ('hdr', 'H2'), ('fin',), ]) def test_old_setMaxLineLength(self): # this method is gone, but used to be documented, so it's still # callable. Just don't fail. lo = MyLogLineObserver() lo.setMaxLineLength(120939403) class TestOutputProgressObserver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def test_sequence(self): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo = logobserver.OutputProgressObserver('stdio') step = mock.Mock() lo.setStep(step) lo.setLog(_log) yield _log.addStdout('hello\n') step.setProgress.assert_called_with('stdio', 6) yield _log.finish() class TestBufferObserver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def do_test_sequence(self, lo): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo.setLog(_log) yield _log.addStdout('hello\n') yield _log.addStderr('cruel\n') yield _log.addStdout('multi\nline\nchunk\n') yield _log.addHeader('H1\nH2\n') yield _log.finish() @defer.inlineCallbacks def test_stdout_only(self): lo = logobserver.BufferLogObserver(wantStdout=True, wantStderr=False) yield self.do_test_sequence(lo) self.assertEqual(lo.getStdout(), 'hello\nmulti\nline\nchunk\n') self.assertEqual(lo.getStderr(), '') @defer.inlineCallbacks def test_both(self): lo = logobserver.BufferLogObserver(wantStdout=True, wantStderr=True) yield self.do_test_sequence(lo) self.assertEqual(lo.getStdout(), 'hello\nmulti\nline\nchunk\n') self.assertEqual(lo.getStderr(), 'cruel\n') buildbot-3.4.0/master/buildbot/test/unit/process/test_metrics.py000066400000000000000000000205121413250514000251070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import gc import sys from twisted.internet import task from twisted.trial import unittest from buildbot.process import metrics from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class TestMetricBase(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.observer = metrics.MetricLogObserver() self.observer.parent = self.master = fakemaster.make_master(self) self.master.config.metrics = dict(log_interval=0, periodic_interval=0) self.observer._reactor = self.reactor self.observer.startService() self.observer.reconfigServiceWithBuildbotConfig(self.master.config) def tearDown(self): if self.observer.running: self.observer.stopService() class TestMetricCountEvent(TestMetricBase): def testIncrement(self): metrics.MetricCountEvent.log('num_widgets', 1) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 1) metrics.MetricCountEvent.log('num_widgets', 1) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 2) def testDecrement(self): metrics.MetricCountEvent.log('num_widgets', 1) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 1) metrics.MetricCountEvent.log('num_widgets', -1) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 0) def testAbsolute(self): metrics.MetricCountEvent.log('num_widgets', 10, absolute=True) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 10) def testCountMethod(self): @metrics.countMethod('foo_called') def foo(): return "foo!" for i in range(10): foo() report = self.observer.asDict() self.assertEqual(report['counters']['foo_called'], 10) class TestMetricTimeEvent(TestMetricBase): def testManualEvent(self): metrics.MetricTimeEvent.log('foo_time', 0.001) report = self.observer.asDict() self.assertEqual(report['timers']['foo_time'], 0.001) def testTimer(self): clock = task.Clock() t = metrics.Timer('foo_time') t._reactor = clock t.start() clock.advance(5) t.stop() report = self.observer.asDict() self.assertEqual(report['timers']['foo_time'], 5) def testStartStopDecorators(self): clock = task.Clock() t = metrics.Timer('foo_time') t._reactor = clock @t.startTimer def foo(): clock.advance(5) return "foo!" @t.stopTimer def bar(): clock.advance(5) return "bar!" foo() bar() report = self.observer.asDict() self.assertEqual(report['timers']['foo_time'], 10) def testTimeMethod(self): clock = task.Clock() @metrics.timeMethod('foo_time', _reactor=clock) def foo(): clock.advance(5) return "foo!" foo() report = self.observer.asDict() self.assertEqual(report['timers']['foo_time'], 5) def testAverages(self): data = list(range(10)) for i in data: metrics.MetricTimeEvent.log('foo_time', i) report = self.observer.asDict() self.assertEqual( report['timers']['foo_time'], sum(data) / float(len(data))) class TestPeriodicChecks(TestMetricBase): def testPeriodicCheck(self): # fake out that there's no garbage (since we can't rely on Python # not having any garbage while running tests) self.patch(gc, 'garbage', []) clock = task.Clock() metrics.periodicCheck(_reactor=clock) clock.pump([0.1, 0.1, 0.1]) # We should have 0 reactor delay since we're using a fake clock report = self.observer.asDict() self.assertEqual(report['timers']['reactorDelay'], 0) self.assertEqual(report['counters']['gc.garbage'], 0) self.assertEqual(report['alarms']['gc.garbage'][0], 'OK') def testUncollectable(self): # make some fake garbage self.patch(gc, 'garbage', [1, 2]) clock = task.Clock() metrics.periodicCheck(_reactor=clock) clock.pump([0.1, 0.1, 0.1]) # We should have 0 reactor delay since we're using a fake clock report = self.observer.asDict() self.assertEqual(report['timers']['reactorDelay'], 0) self.assertEqual(report['counters']['gc.garbage'], 2) self.assertEqual(report['alarms']['gc.garbage'][0], 'WARN') def testGetRSS(self): self.assertTrue(metrics._get_rss() > 0) if sys.platform != 'linux': testGetRSS.skip = "only available on linux platforms" class TestReconfig(TestMetricBase): def testReconfig(self): observer = self.observer new_config = self.master.config # starts up without running tasks self.assertEqual(observer.log_task, None) self.assertEqual(observer.periodic_task, None) # enable log_interval new_config.metrics = dict(log_interval=10, periodic_interval=0) observer.reconfigServiceWithBuildbotConfig(new_config) self.assertTrue(observer.log_task) self.assertEqual(observer.periodic_task, None) # disable that and enable periodic_interval new_config.metrics = dict(periodic_interval=10, log_interval=0) observer.reconfigServiceWithBuildbotConfig(new_config) self.assertTrue(observer.periodic_task) self.assertEqual(observer.log_task, None) # Make the periodic check run self.reactor.pump([0.1]) # disable the whole listener new_config.metrics = None observer.reconfigServiceWithBuildbotConfig(new_config) self.assertFalse(observer.enabled) self.assertEqual(observer.log_task, None) self.assertEqual(observer.periodic_task, None) # disable both new_config.metrics = dict(periodic_interval=0, log_interval=0) observer.reconfigServiceWithBuildbotConfig(new_config) self.assertEqual(observer.log_task, None) self.assertEqual(observer.periodic_task, None) # enable both new_config.metrics = dict(periodic_interval=10, log_interval=10) observer.reconfigServiceWithBuildbotConfig(new_config) self.assertTrue(observer.log_task) self.assertTrue(observer.periodic_task) # (service will be stopped by tearDown) class _LogObserver: def __init__(self): self.events = [] def gotEvent(self, event): self.events.append(event) class TestReports(unittest.TestCase): def testMetricCountReport(self): handler = metrics.MetricCountHandler(None) handler.handle({}, metrics.MetricCountEvent('num_foo', 1)) self.assertEqual("Counter num_foo: 1", handler.report()) self.assertEqual({"counters": {"num_foo": 1}}, handler.asDict()) def testMetricTimeReport(self): handler = metrics.MetricTimeHandler(None) handler.handle({}, metrics.MetricTimeEvent('time_foo', 1)) self.assertEqual("Timer time_foo: 1", handler.report()) self.assertEqual({"timers": {"time_foo": 1}}, handler.asDict()) def testMetricAlarmReport(self): handler = metrics.MetricAlarmHandler(None) handler.handle({}, metrics.MetricAlarmEvent( 'alarm_foo', msg='Uh oh', level=metrics.ALARM_WARN)) self.assertEqual("WARN alarm_foo: Uh oh", handler.report()) self.assertEqual( {"alarms": {"alarm_foo": ("WARN", "Uh oh")}}, handler.asDict()) buildbot-3.4.0/master/buildbot/test/unit/process/test_properties.py000066400000000000000000002033131413250514000256370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from copy import deepcopy import mock from twisted.internet import defer from twisted.trial import unittest from zope.interface import implementer from buildbot.interfaces import IRenderable from buildbot.process.buildrequest import TempChange from buildbot.process.buildrequest import TempSourceStamp from buildbot.process.properties import FlattenList from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.properties import PropertiesMixin from buildbot.process.properties import Property from buildbot.process.properties import Transform from buildbot.process.properties import WithProperties from buildbot.process.properties import _Lazy from buildbot.process.properties import _Lookup from buildbot.process.properties import _SourceStampDict from buildbot.process.properties import renderer from buildbot.test.fake.fakebuild import FakeBuild from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.properties import ConstantRenderable class FakeSource: def __init__(self): self.branch = None self.codebase = '' self.project = '' self.repository = '' self.revision = None def asDict(self): ds = { 'branch': self.branch, 'codebase': self.codebase, 'project': self.project, 'repository': self.repository, 'revision': self.revision } return ds @implementer(IRenderable) class DeferredRenderable: def __init__(self): self.d = defer.Deferred() def getRenderingFor(self, build): return self.d def callback(self, value): self.d.callback(value) class TestPropertyMap(unittest.TestCase): """ Test the behavior of PropertyMap, using the external interface provided by WithProperties. """ def setUp(self): self.props = Properties( prop_str='a-string', prop_none=None, prop_list=['a', 'b'], prop_zero=0, prop_one=1, prop_false=False, prop_true=True, prop_empty='', ) self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def doTestSimpleWithProperties(self, fmtstring, expect, **kwargs): res = yield self.build.render(WithProperties(fmtstring, **kwargs)) self.assertEqual(res, "{}".format(expect)) def testSimpleStr(self): return self.doTestSimpleWithProperties('%(prop_str)s', 'a-string') def testSimpleNone(self): # None is special-cased to become an empty string return self.doTestSimpleWithProperties('%(prop_none)s', '') def testSimpleList(self): return self.doTestSimpleWithProperties('%(prop_list)s', ['a', 'b']) def testSimpleZero(self): return self.doTestSimpleWithProperties('%(prop_zero)s', 0) def testSimpleOne(self): return self.doTestSimpleWithProperties('%(prop_one)s', 1) def testSimpleFalse(self): return self.doTestSimpleWithProperties('%(prop_false)s', False) def testSimpleTrue(self): return self.doTestSimpleWithProperties('%(prop_true)s', True) def testSimpleEmpty(self): return self.doTestSimpleWithProperties('%(prop_empty)s', '') def testSimpleUnset(self): d = self.build.render(WithProperties('%(prop_nosuch)s')) return self.assertFailure(d, KeyError) def testColonMinusSet(self): return self.doTestSimpleWithProperties('%(prop_str:-missing)s', 'a-string') def testColonMinusNone(self): # None is special-cased here, too return self.doTestSimpleWithProperties('%(prop_none:-missing)s', '') def testColonMinusZero(self): return self.doTestSimpleWithProperties('%(prop_zero:-missing)s', 0) def testColonMinusOne(self): return self.doTestSimpleWithProperties('%(prop_one:-missing)s', 1) def testColonMinusFalse(self): return self.doTestSimpleWithProperties('%(prop_false:-missing)s', False) def testColonMinusTrue(self): return self.doTestSimpleWithProperties('%(prop_true:-missing)s', True) def testColonMinusEmpty(self): return self.doTestSimpleWithProperties('%(prop_empty:-missing)s', '') def testColonMinusUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:-missing)s', 'missing') def testColonTildeSet(self): return self.doTestSimpleWithProperties('%(prop_str:~missing)s', 'a-string') def testColonTildeNone(self): # None is special-cased *differently* for ~: return self.doTestSimpleWithProperties('%(prop_none:~missing)s', 'missing') def testColonTildeZero(self): return self.doTestSimpleWithProperties('%(prop_zero:~missing)s', 'missing') def testColonTildeOne(self): return self.doTestSimpleWithProperties('%(prop_one:~missing)s', 1) def testColonTildeFalse(self): return self.doTestSimpleWithProperties('%(prop_false:~missing)s', 'missing') def testColonTildeTrue(self): return self.doTestSimpleWithProperties('%(prop_true:~missing)s', True) def testColonTildeEmpty(self): return self.doTestSimpleWithProperties('%(prop_empty:~missing)s', 'missing') def testColonTildeUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~missing)s', 'missing') def testColonPlusSet(self): return self.doTestSimpleWithProperties('%(prop_str:+present)s', 'present') def testColonPlusNone(self): return self.doTestSimpleWithProperties('%(prop_none:+present)s', 'present') def testColonPlusZero(self): return self.doTestSimpleWithProperties('%(prop_zero:+present)s', 'present') def testColonPlusOne(self): return self.doTestSimpleWithProperties('%(prop_one:+present)s', 'present') def testColonPlusFalse(self): return self.doTestSimpleWithProperties('%(prop_false:+present)s', 'present') def testColonPlusTrue(self): return self.doTestSimpleWithProperties('%(prop_true:+present)s', 'present') def testColonPlusEmpty(self): return self.doTestSimpleWithProperties('%(prop_empty:+present)s', 'present') def testColonPlusUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:+present)s', '') @defer.inlineCallbacks def testClearTempValues(self): yield self.doTestSimpleWithProperties('', '', prop_temp=lambda b: 'present') yield self.doTestSimpleWithProperties('%(prop_temp:+present)s', '') def testTempValue(self): return self.doTestSimpleWithProperties('%(prop_temp)s', 'present', prop_temp=lambda b: 'present') def testTempValueOverrides(self): return self.doTestSimpleWithProperties('%(prop_one)s', 2, prop_one=lambda b: 2) def testTempValueColonMinusSet(self): return self.doTestSimpleWithProperties('%(prop_one:-missing)s', 2, prop_one=lambda b: 2) def testTempValueColonMinusUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:-missing)s', 'temp', prop_nosuch=lambda b: 'temp') def testTempValueColonTildeTrueSet(self): return self.doTestSimpleWithProperties('%(prop_false:~nontrue)s', 'temp', prop_false=lambda b: 'temp') def testTempValueColonTildeTrueUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~nontrue)s', 'temp', prop_nosuch=lambda b: 'temp') def testTempValueColonTildeFalseFalse(self): return self.doTestSimpleWithProperties('%(prop_false:~nontrue)s', 'nontrue', prop_false=lambda b: False) def testTempValueColonTildeTrueFalse(self): return self.doTestSimpleWithProperties('%(prop_true:~nontrue)s', True, prop_true=lambda b: False) def testTempValueColonTildeNoneFalse(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~nontrue)s', 'nontrue', prop_nosuch=lambda b: False) def testTempValueColonTildeFalseZero(self): return self.doTestSimpleWithProperties('%(prop_false:~nontrue)s', 'nontrue', prop_false=lambda b: 0) def testTempValueColonTildeTrueZero(self): return self.doTestSimpleWithProperties('%(prop_true:~nontrue)s', True, prop_true=lambda b: 0) def testTempValueColonTildeNoneZero(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~nontrue)s', 'nontrue', prop_nosuch=lambda b: 0) def testTempValueColonTildeFalseBlank(self): return self.doTestSimpleWithProperties('%(prop_false:~nontrue)s', 'nontrue', prop_false=lambda b: '') def testTempValueColonTildeTrueBlank(self): return self.doTestSimpleWithProperties('%(prop_true:~nontrue)s', True, prop_true=lambda b: '') def testTempValueColonTildeNoneBlank(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~nontrue)s', 'nontrue', prop_nosuch=lambda b: '') def testTempValuePlusSetSet(self): return self.doTestSimpleWithProperties('%(prop_one:+set)s', 'set', prop_one=lambda b: 2) def testTempValuePlusUnsetSet(self): return self.doTestSimpleWithProperties('%(prop_nosuch:+set)s', 'set', prop_nosuch=lambda b: 1) class TestInterpolateConfigure(unittest.TestCase, ConfigErrorsMixin): """ Test that Interpolate reports errors in the interpolation string at configure time. """ def test_invalid_args_and_kwargs(self): with self.assertRaisesConfigError("Interpolate takes either positional"): Interpolate("%s %(foo)s", 1, foo=2) def test_invalid_selector(self): with self.assertRaisesConfigError( "invalid Interpolate selector 'garbage'"): Interpolate("%(garbage:test)s") def test_no_selector(self): with self.assertRaisesConfigError( "invalid Interpolate substitution without selector 'garbage'"): Interpolate("%(garbage)s") def test_invalid_default_type(self): with self.assertRaisesConfigError( "invalid Interpolate default type '@'"): Interpolate("%(prop:some_prop:@wacky)s") def test_nested_invalid_selector(self): with self.assertRaisesConfigError( "invalid Interpolate selector 'garbage'"): Interpolate("%(prop:some_prop:~%(garbage:test)s)s") def test_colon_ternary_missing_delimeter(self): with self.assertRaisesConfigError( "invalid Interpolate ternary expression 'one' with delimiter ':'"): Interpolate("echo '%(prop:P:?:one)s'") def test_colon_ternary_paren_delimiter(self): with self.assertRaisesConfigError( "invalid Interpolate ternary expression 'one(:)' with delimiter ':'"): Interpolate("echo '%(prop:P:?:one(:))s'") def test_colon_ternary_hash_bad_delimeter(self): with self.assertRaisesConfigError( "invalid Interpolate ternary expression 'one' with delimiter '|'"): Interpolate("echo '%(prop:P:#?|one)s'") def test_prop_invalid_character(self): with self.assertRaisesConfigError( "Property name must be alphanumeric for prop Interpolation 'a+a'"): Interpolate("echo '%(prop:a+a)s'") def test_kw_invalid_character(self): with self.assertRaisesConfigError( "Keyword must be alphanumeric for kw Interpolation 'a+a'"): Interpolate("echo '%(kw:a+a)s'") def test_src_codebase_invalid_character(self): with self.assertRaisesConfigError( "Codebase must be alphanumeric for src Interpolation 'a+a:a'"): Interpolate("echo '%(src:a+a:a)s'") def test_src_attr_invalid_character(self): with self.assertRaisesConfigError( "Attribute must be alphanumeric for src Interpolation 'a:a+a'"): Interpolate("echo '%(src:a:a+a)s'") def test_src_missing_attr(self): with self.assertRaisesConfigError( "Must specify both codebase and attr"): Interpolate("echo '%(src:a)s'") class TestInterpolatePositional(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def test_string(self): command = Interpolate("test %s", "one fish") rendered = yield self.build.render(command) self.assertEqual(rendered, "test one fish") @defer.inlineCallbacks def test_twoString(self): command = Interpolate("test %s, %s", "one fish", "two fish") rendered = yield self.build.render(command) self.assertEqual(rendered, "test one fish, two fish") def test_deferred(self): renderable = DeferredRenderable() command = Interpolate("echo '%s'", renderable) d = self.build.render(command) d.addCallback(self.assertEqual, "echo 'red fish'") renderable.callback("red fish") return d @defer.inlineCallbacks def test_renderable(self): self.props.setProperty("buildername", "blue fish", "test") command = Interpolate("echo '%s'", Property("buildername")) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'blue fish'") class TestInterpolateProperties(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def test_properties(self): self.props.setProperty("buildername", "winbld", "test") command = Interpolate("echo buildby-%(prop:buildername)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-winbld") @defer.inlineCallbacks def test_properties_newline(self): self.props.setProperty("buildername", "winbld", "test") command = Interpolate("aa\n%(prop:buildername)s\nbb") rendered = yield self.build.render(command) self.assertEqual(rendered, "aa\nwinbld\nbb") @defer.inlineCallbacks def test_property_not_set(self): command = Interpolate("echo buildby-%(prop:buildername)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-") @defer.inlineCallbacks def test_property_colon_minus(self): command = Interpolate("echo buildby-%(prop:buildername:-blddef)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-blddef") @defer.inlineCallbacks def test_deepcopy(self): # After a deepcopy, Interpolate instances used to lose track # that they didn't have a ``hasKey`` value # see http://trac.buildbot.net/ticket/3505 self.props.setProperty("buildername", "linux4", "test") command = deepcopy( Interpolate("echo buildby-%(prop:buildername:-blddef)s")) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-linux4") @defer.inlineCallbacks def test_property_colon_tilde_true(self): self.props.setProperty("buildername", "winbld", "test") command = Interpolate("echo buildby-%(prop:buildername:~blddef)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-winbld") @defer.inlineCallbacks def test_property_colon_tilde_false(self): self.props.setProperty("buildername", "", "test") command = Interpolate("echo buildby-%(prop:buildername:~blddef)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-blddef") @defer.inlineCallbacks def test_property_colon_plus(self): self.props.setProperty("project", "proj1", "test") command = Interpolate("echo %(prop:project:+projectdefined)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo projectdefined") @defer.inlineCallbacks def test_nested_property(self): self.props.setProperty("project", "so long!", "test") command = Interpolate("echo '%(prop:missing:~%(prop:project)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'so long!'") @defer.inlineCallbacks def test_property_substitute_recursively(self): self.props.setProperty("project", "proj1", "test") command = Interpolate("echo '%(prop:no_such:-%(prop:project)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'proj1'") @defer.inlineCallbacks def test_property_colon_ternary_present(self): self.props.setProperty("project", "proj1", "test") command = Interpolate("echo %(prop:project:?:defined:missing)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo defined") @defer.inlineCallbacks def test_property_colon_ternary_missing(self): command = Interpolate("echo %(prop:project:?|defined|missing)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo missing") @defer.inlineCallbacks def test_property_colon_ternary_hash_true(self): self.props.setProperty("project", "winbld", "test") command = Interpolate("echo buildby-%(prop:project:#?:T:F)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-T") @defer.inlineCallbacks def test_property_colon_ternary_hash_false(self): self.props.setProperty("project", "", "test") command = Interpolate("echo buildby-%(prop:project:#?|T|F)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-F") @defer.inlineCallbacks def test_property_colon_ternary_substitute_recursively_true(self): self.props.setProperty("P", "present", "test") self.props.setProperty("one", "proj1", "test") self.props.setProperty("two", "proj2", "test") command = Interpolate("echo '%(prop:P:?|%(prop:one)s|%(prop:two)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'proj1'") @defer.inlineCallbacks def test_property_colon_ternary_substitute_recursively_false(self): self.props.setProperty("one", "proj1", "test") self.props.setProperty("two", "proj2", "test") command = Interpolate("echo '%(prop:P:?|%(prop:one)s|%(prop:two)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'proj2'") @defer.inlineCallbacks def test_property_colon_ternary_substitute_recursively_delimited_true(self): self.props.setProperty("P", "present", "test") self.props.setProperty("one", "proj1", "test") self.props.setProperty("two", "proj2", "test") command = Interpolate( "echo '%(prop:P:?|%(prop:one:?|true|false)s|%(prop:two:?|false|true)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'true'") @defer.inlineCallbacks def test_property_colon_ternary_substitute_recursively_delimited_false(self): self.props.setProperty("one", "proj1", "test") self.props.setProperty("two", "proj2", "test") command = Interpolate( "echo '%(prop:P:?|%(prop:one:?|true|false)s|%(prop:two:?|false|true)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'false'") class TestInterpolateSrc(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) sa = FakeSource() wfb = FakeSource() sc = FakeSource() sa.repository = 'cvs://A..' sa.codebase = 'cbA' sa.project = "Project" self.build.sources['cbA'] = sa wfb.repository = 'cvs://B..' wfb.codebase = 'cbB' wfb.project = "Project" self.build.sources['cbB'] = wfb sc.repository = 'cvs://C..' sc.codebase = 'cbC' sc.project = None self.build.sources['cbC'] = sc @defer.inlineCallbacks def test_src(self): command = Interpolate("echo %(src:cbB:repository)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_src_src(self): command = Interpolate( "echo %(src:cbB:repository)s %(src:cbB:project)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B.. Project") @defer.inlineCallbacks def test_src_attr_empty(self): command = Interpolate("echo %(src:cbC:project)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_src_attr_codebase_notfound(self): command = Interpolate("echo %(src:unknown_codebase:project)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_src_colon_plus_false(self): command = Interpolate("echo '%(src:cbD:project:+defaultrepo)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ''") @defer.inlineCallbacks def test_src_colon_plus_true(self): command = Interpolate("echo '%(src:cbB:project:+defaultrepo)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'defaultrepo'") @defer.inlineCallbacks def test_src_colon_minus(self): command = Interpolate("echo %(src:cbB:nonattr:-defaultrepo)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo defaultrepo") @defer.inlineCallbacks def test_src_colon_minus_false(self): command = Interpolate("echo '%(src:cbC:project:-noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ''") @defer.inlineCallbacks def test_src_colon_minus_true(self): command = Interpolate("echo '%(src:cbB:project:-noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'Project'") @defer.inlineCallbacks def test_src_colon_minus_codebase_notfound(self): command = Interpolate( "echo '%(src:unknown_codebase:project:-noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'noproject'") @defer.inlineCallbacks def test_src_colon_tilde_true(self): command = Interpolate("echo '%(src:cbB:project:~noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'Project'") @defer.inlineCallbacks def test_src_colon_tilde_false(self): command = Interpolate("echo '%(src:cbC:project:~noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'noproject'") @defer.inlineCallbacks def test_src_colon_tilde_false_src_as_replacement(self): command = Interpolate( "echo '%(src:cbC:project:~%(src:cbA:project)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'Project'") @defer.inlineCallbacks def test_src_colon_tilde_codebase_notfound(self): command = Interpolate( "echo '%(src:unknown_codebase:project:~noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'noproject'") class TestInterpolateKwargs(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) sa = FakeSource() sa.repository = 'cvs://A..' sa.codebase = 'cbA' sa.project = None sa.branch = "default" self.build.sources['cbA'] = sa @defer.inlineCallbacks def test_kwarg(self): command = Interpolate("echo %(kw:repository)s", repository="cvs://A..") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://A..") @defer.inlineCallbacks def test_kwarg_kwarg(self): command = Interpolate("echo %(kw:repository)s %(kw:branch)s", repository="cvs://A..", branch="default") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://A.. default") @defer.inlineCallbacks def test_kwarg_not_mapped(self): command = Interpolate("echo %(kw:repository)s", project="projectA") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_kwarg_colon_minus_not_available(self): command = Interpolate("echo %(kw:repository)s", project="projectA") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_kwarg_colon_minus_not_available_default(self): command = Interpolate( "echo %(kw:repository:-cvs://A..)s", project="projectA") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://A..") @defer.inlineCallbacks def test_kwarg_colon_minus_available(self): command = Interpolate( "echo %(kw:repository:-cvs://A..)s", repository="cvs://B..") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_kwarg_colon_tilde_true(self): command = Interpolate( "echo %(kw:repository:~cvs://B..)s", repository="cvs://A..") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://A..") @defer.inlineCallbacks def test_kwarg_colon_tilde_false(self): command = Interpolate( "echo %(kw:repository:~cvs://B..)s", repository="") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_kwarg_colon_tilde_none(self): command = Interpolate( "echo %(kw:repository:~cvs://B..)s", repository=None) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_kwarg_colon_plus_false(self): command = Interpolate( "echo %(kw:repository:+cvs://B..)s", project="project") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_kwarg_colon_plus_true(self): command = Interpolate( "echo %(kw:repository:+cvs://B..)s", repository=None) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_kwargs_colon_minus_false_src_as_replacement(self): command = Interpolate( "echo '%(kw:text:-%(src:cbA:branch)s)s'", notext='ddd') rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'default'") @defer.inlineCallbacks def test_kwargs_renderable(self): command = Interpolate( "echo '%(kw:test)s'", test=ConstantRenderable('testing')) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'testing'") def test_kwargs_deferred(self): renderable = DeferredRenderable() command = Interpolate("echo '%(kw:test)s'", test=renderable) d = self.build.render(command) d.addCallback(self.assertEqual, "echo 'testing'") renderable.callback('testing') def test_kwarg_deferred(self): renderable = DeferredRenderable() command = Interpolate("echo '%(kw:project)s'", project=renderable) d = self.build.render(command) d.addCallback(self.assertEqual, "echo 'testing'") renderable.callback('testing') def test_nested_kwarg_deferred(self): renderable = DeferredRenderable() command = Interpolate( "echo '%(kw:missing:~%(kw:fishy)s)s'", missing=renderable, fishy="so long!") d = self.build.render(command) d.addCallback(self.assertEqual, "echo 'so long!'") renderable.callback(False) return d class TestWithProperties(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) def testInvalidParams(self): with self.assertRaises(ValueError): WithProperties("%s %(foo)s", 1, foo=2) @defer.inlineCallbacks def testBasic(self): # test basic substitution with WithProperties self.props.setProperty("revision", "47", "test") command = WithProperties("build-%s.tar.gz", "revision") res = yield self.build.render(command) self.assertEqual(res, "build-47.tar.gz") @defer.inlineCallbacks def testDict(self): # test dict-style substitution with WithProperties self.props.setProperty("other", "foo", "test") command = WithProperties("build-%(other)s.tar.gz") res = yield self.build.render(command) self.assertEqual(res, "build-foo.tar.gz") @defer.inlineCallbacks def testDictColonMinus(self): # test dict-style substitution with WithProperties self.props.setProperty("prop1", "foo", "test") command = WithProperties( "build-%(prop1:-empty)s-%(prop2:-empty)s.tar.gz") res = yield self.build.render(command) self.assertEqual(res, "build-foo-empty.tar.gz") @defer.inlineCallbacks def testDictColonPlus(self): # test dict-style substitution with WithProperties self.props.setProperty("prop1", "foo", "test") command = WithProperties( "build-%(prop1:+exists)s-%(prop2:+exists)s.tar.gz") res = yield self.build.render(command) self.assertEqual(res, "build-exists-.tar.gz") @defer.inlineCallbacks def testEmpty(self): # None should render as '' self.props.setProperty("empty", None, "test") command = WithProperties("build-%(empty)s.tar.gz") res = yield self.build.render(command) self.assertEqual(res, "build-.tar.gz") @defer.inlineCallbacks def testRecursiveList(self): self.props.setProperty("x", 10, "test") self.props.setProperty("y", 20, "test") command = [WithProperties("%(x)s %(y)s"), "and", WithProperties("%(y)s %(x)s")] res = yield self.build.render(command) self.assertEqual(res, ["10 20", "and", "20 10"]) @defer.inlineCallbacks def testRecursiveTuple(self): self.props.setProperty("x", 10, "test") self.props.setProperty("y", 20, "test") command = (WithProperties("%(x)s %(y)s"), "and", WithProperties("%(y)s %(x)s")) res = yield self.build.render(command) self.assertEqual(res, ("10 20", "and", "20 10")) @defer.inlineCallbacks def testRecursiveDict(self): self.props.setProperty("x", 10, "test") self.props.setProperty("y", 20, "test") command = {WithProperties("%(x)s %(y)s"): WithProperties("%(y)s %(x)s")} res = yield self.build.render(command) self.assertEqual(res, {"10 20": "20 10"}) @defer.inlineCallbacks def testLambdaSubst(self): command = WithProperties('%(foo)s', foo=lambda _: 'bar') res = yield self.build.render(command) self.assertEqual(res, 'bar') @defer.inlineCallbacks def testLambdaHasattr(self): command = WithProperties('%(foo)s', foo=lambda b: b.hasProperty('x') and 'x' or 'y') res = yield self.build.render(command) self.assertEqual(res, 'y') @defer.inlineCallbacks def testLambdaOverride(self): self.props.setProperty('x', 10, 'test') command = WithProperties('%(x)s', x=lambda _: 20) res = yield self.build.render(command) self.assertEqual(res, '20') def testLambdaCallable(self): with self.assertRaises(ValueError): WithProperties('%(foo)s', foo='bar') @defer.inlineCallbacks def testLambdaUseExisting(self): self.props.setProperty('x', 10, 'test') self.props.setProperty('y', 20, 'test') command = WithProperties( '%(z)s', z=lambda props: props.getProperty('x') + props.getProperty('y')) res = yield self.build.render(command) self.assertEqual(res, '30') @defer.inlineCallbacks def testColon(self): self.props.setProperty('some:property', 10, 'test') command = WithProperties('%(some:property:-with-default)s') res = yield self.build.render(command) self.assertEqual(res, '10') @defer.inlineCallbacks def testColon_default(self): command = WithProperties('%(some:property:-with-default)s') res = yield self.build.render(command) self.assertEqual(res, 'with-default') @defer.inlineCallbacks def testColon_colon(self): command = WithProperties('%(some:property:-with:default)s') res = yield self.build.render(command) self.assertEqual(res, 'with:default') class TestProperties(unittest.TestCase): def setUp(self): self.props = Properties() def testDictBehavior(self): # note that dictionary-like behavior is deprecated and not exposed to # users! self.props.setProperty("do-tests", 1, "scheduler") self.props.setProperty("do-install", 2, "scheduler") self.assertTrue('do-tests' in self.props) self.assertEqual(self.props['do-tests'], 1) self.assertEqual(self.props['do-install'], 2) with self.assertRaises(KeyError): self.props['do-nothing'] self.assertEqual(self.props.getProperty('do-install'), 2) self.assertIn('do-tests', self.props) self.assertNotIn('missing-do-tests', self.props) def testAsList(self): self.props.setProperty("happiness", 7, "builder") self.props.setProperty("flames", True, "tester") self.assertEqual(sorted(self.props.asList()), [('flames', True, 'tester'), ('happiness', 7, 'builder')]) def testAsDict(self): self.props.setProperty("msi_filename", "product.msi", 'packager') self.props.setProperty("dmg_filename", "product.dmg", 'packager') self.assertEqual(self.props.asDict(), dict(msi_filename=('product.msi', 'packager'), dmg_filename=('product.dmg', 'packager'))) def testUpdate(self): self.props.setProperty("x", 24, "old") newprops = {'a': 1, 'b': 2} self.props.update(newprops, "new") self.assertEqual(self.props.getProperty('x'), 24) self.assertEqual(self.props.getPropertySource('x'), 'old') self.assertEqual(self.props.getProperty('a'), 1) self.assertEqual(self.props.getPropertySource('a'), 'new') def testUpdateRuntime(self): self.props.setProperty("x", 24, "old") newprops = {'a': 1, 'b': 2} self.props.update(newprops, "new", runtime=True) self.assertEqual(self.props.getProperty('x'), 24) self.assertEqual(self.props.getPropertySource('x'), 'old') self.assertEqual(self.props.getProperty('a'), 1) self.assertEqual(self.props.getPropertySource('a'), 'new') self.assertEqual(self.props.runtime, set(['a', 'b'])) def testUpdateFromProperties(self): self.props.setProperty("a", 94, "old") self.props.setProperty("x", 24, "old") newprops = Properties() newprops.setProperty('a', 1, "new") newprops.setProperty('b', 2, "new") self.props.updateFromProperties(newprops) self.assertEqual(self.props.getProperty('x'), 24) self.assertEqual(self.props.getPropertySource('x'), 'old') self.assertEqual(self.props.getProperty('a'), 1) self.assertEqual(self.props.getPropertySource('a'), 'new') def testUpdateFromPropertiesNoRuntime(self): self.props.setProperty("a", 94, "old") self.props.setProperty("b", 84, "old") self.props.setProperty("x", 24, "old") newprops = Properties() newprops.setProperty('a', 1, "new", runtime=True) newprops.setProperty('b', 2, "new", runtime=False) newprops.setProperty('c', 3, "new", runtime=True) newprops.setProperty('d', 3, "new", runtime=False) self.props.updateFromPropertiesNoRuntime(newprops) self.assertEqual(self.props.getProperty('a'), 94) self.assertEqual(self.props.getPropertySource('a'), 'old') self.assertEqual(self.props.getProperty('b'), 2) self.assertEqual(self.props.getPropertySource('b'), 'new') self.assertEqual(self.props.getProperty('c'), None) # not updated self.assertEqual(self.props.getProperty('d'), 3) self.assertEqual(self.props.getPropertySource('d'), 'new') self.assertEqual(self.props.getProperty('x'), 24) self.assertEqual(self.props.getPropertySource('x'), 'old') def test_setProperty_notJsonable(self): with self.assertRaises(TypeError): self.props.setProperty("project", object, "test") # IProperties methods def test_getProperty(self): self.props.properties['p1'] = (['p', 1], 'test') self.assertEqual(self.props.getProperty('p1'), ['p', 1]) def test_getProperty_default_None(self): self.assertEqual(self.props.getProperty('p1'), None) def test_getProperty_default(self): self.assertEqual(self.props.getProperty('p1', 2), 2) def test_hasProperty_false(self): self.assertFalse(self.props.hasProperty('x')) def test_hasProperty_true(self): self.props.properties['x'] = (False, 'test') self.assertTrue(self.props.hasProperty('x')) def test_has_key_false(self): self.assertFalse('x' in self.props) def test_setProperty(self): self.props.setProperty('x', 'y', 'test') self.assertEqual(self.props.properties['x'], ('y', 'test')) self.assertNotIn('x', self.props.runtime) def test_setProperty_runtime(self): self.props.setProperty('x', 'y', 'test', runtime=True) self.assertEqual(self.props.properties['x'], ('y', 'test')) self.assertIn('x', self.props.runtime) def test_setProperty_no_source(self): # pylint: disable=no-value-for-parameter with self.assertRaises(TypeError): self.props.setProperty('x', 'y') def test_getProperties(self): self.assertIdentical(self.props.getProperties(), self.props) def test_getBuild(self): self.assertIdentical(self.props.getBuild(), self.props.build) def test_unset_sourcestamps(self): with self.assertRaises(AttributeError): self.props.sourcestamps() def test_unset_changes(self): with self.assertRaises(AttributeError): self.props.changes() with self.assertRaises(AttributeError): self.props.files() def test_build_attributes(self): build = FakeBuild(self.props) change = TempChange({'author': 'me', 'files': ['main.c']}) ss = TempSourceStamp({'branch': 'master'}) ss.changes = [change] build.sources[''] = ss self.assertEqual(self.props.sourcestamps[0]['branch'], 'master') self.assertEqual(self.props.changes[0]['author'], 'me') self.assertEqual(self.props.files[0], 'main.c') def test_own_attributes(self): self.props.sourcestamps = [{'branch': 'master'}] self.props.changes = [{'author': 'me', 'files': ['main.c']}] self.assertEqual(self.props.sourcestamps[0]['branch'], 'master') self.assertEqual(self.props.changes[0]['author'], 'me') self.assertEqual(self.props.files[0], 'main.c') @defer.inlineCallbacks def test_render(self): @implementer(IRenderable) class Renderable: def getRenderingFor(self, props): return props.getProperty('x') + 'z' self.props.setProperty('x', 'y', 'test') res = yield self.props.render(Renderable()) self.assertEqual(res, 'yz') class MyPropertiesThing(PropertiesMixin): set_runtime_properties = True def getProperties(self): return self.properties class TestPropertiesMixin(unittest.TestCase): def setUp(self): self.mp = MyPropertiesThing() self.mp.properties = mock.Mock() def test_getProperty(self): self.mp.getProperty('abc') self.mp.properties.getProperty.assert_called_with('abc', None) def xtest_getProperty_default(self): self.mp.getProperty('abc', 'def') self.mp.properties.getProperty.assert_called_with('abc', 'def') def test_hasProperty(self): self.mp.properties.hasProperty.return_value = True self.assertTrue(self.mp.hasProperty('abc')) self.mp.properties.hasProperty.assert_called_with('abc') def test_has_key(self): self.mp.properties.hasProperty.return_value = True # getattr because pep8 doesn't like calls to has_key self.assertTrue(getattr(self.mp, 'has_key')('abc')) self.mp.properties.hasProperty.assert_called_with('abc') def test_setProperty(self): self.mp.setProperty('abc', 'def', 'src') self.mp.properties.setProperty.assert_called_with('abc', 'def', 'src', runtime=True) def test_setProperty_no_source(self): # this compatibility is maintained for old code self.mp.setProperty('abc', 'def') self.mp.properties.setProperty.assert_called_with('abc', 'def', 'Unknown', runtime=True) def test_render(self): self.mp.render([1, 2]) self.mp.properties.render.assert_called_with([1, 2]) class TestProperty(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def testIntProperty(self): self.props.setProperty("do-tests", 1, "scheduler") value = Property("do-tests") res = yield self.build.render(value) self.assertEqual(res, 1) @defer.inlineCallbacks def testStringProperty(self): self.props.setProperty("do-tests", "string", "scheduler") value = Property("do-tests") res = yield self.build.render(value) self.assertEqual(res, "string") @defer.inlineCallbacks def testMissingProperty(self): value = Property("do-tests") res = yield self.build.render(value) self.assertEqual(res, None) @defer.inlineCallbacks def testDefaultValue(self): value = Property("do-tests", default="Hello!") res = yield self.build.render(value) self.assertEqual(res, "Hello!") @defer.inlineCallbacks def testDefaultValueNested(self): self.props.setProperty("xxx", 'yyy', "scheduler") value = Property("do-tests", default=WithProperties("a-%(xxx)s-b")) res = yield self.build.render(value) self.assertEqual(res, "a-yyy-b") @defer.inlineCallbacks def testIgnoreDefaultValue(self): self.props.setProperty("do-tests", "string", "scheduler") value = Property("do-tests", default="Hello!") res = yield self.build.render(value) self.assertEqual(res, "string") @defer.inlineCallbacks def testIgnoreFalseValue(self): self.props.setProperty("do-tests-string", "", "scheduler") self.props.setProperty("do-tests-int", 0, "scheduler") self.props.setProperty("do-tests-list", [], "scheduler") self.props.setProperty("do-tests-None", None, "scheduler") value = [Property("do-tests-string", default="Hello!"), Property("do-tests-int", default="Hello!"), Property("do-tests-list", default="Hello!"), Property("do-tests-None", default="Hello!")] res = yield self.build.render(value) self.assertEqual(res, ["Hello!"] * 4) @defer.inlineCallbacks def testDefaultWhenFalse(self): self.props.setProperty("do-tests-string", "", "scheduler") self.props.setProperty("do-tests-int", 0, "scheduler") self.props.setProperty("do-tests-list", [], "scheduler") self.props.setProperty("do-tests-None", None, "scheduler") value = [Property("do-tests-string", default="Hello!", defaultWhenFalse=False), Property( "do-tests-int", default="Hello!", defaultWhenFalse=False), Property( "do-tests-list", default="Hello!", defaultWhenFalse=False), Property("do-tests-None", default="Hello!", defaultWhenFalse=False)] res = yield self.build.render(value) self.assertEqual(res, ["", 0, [], None]) def testDeferredDefault(self): default = DeferredRenderable() value = Property("no-such-property", default) d = self.build.render(value) d.addCallback(self.assertEqual, "default-value") default.callback("default-value") return d @defer.inlineCallbacks def testFlattenList(self): self.props.setProperty("do-tests", "string", "scheduler") value = FlattenList([Property("do-tests"), ["bla"]]) res = yield self.build.render(value) self.assertEqual(res, ["string", "bla"]) @defer.inlineCallbacks def testFlattenListAdd(self): self.props.setProperty("do-tests", "string", "scheduler") value = FlattenList([Property("do-tests"), ["bla"]]) value = value + FlattenList([Property("do-tests"), ["bla"]]) res = yield self.build.render(value) self.assertEqual(res, ["string", "bla", "string", "bla"]) @defer.inlineCallbacks def testFlattenListAdd2(self): self.props.setProperty("do-tests", "string", "scheduler") value = FlattenList([Property("do-tests"), ["bla"]]) value = value + [Property("do-tests"), ["bla"]] res = yield self.build.render(value) self.assertEqual(res, ["string", "bla", "string", "bla"]) @defer.inlineCallbacks def testCompEq(self): self.props.setProperty("do-tests", "string", "scheduler") result = yield self.build.render(Property("do-tests") == "string") self.assertEqual(result, True) @defer.inlineCallbacks def testCompNe(self): self.props.setProperty("do-tests", "not-string", "scheduler") result = yield self.build.render(Property("do-tests") != "string") self.assertEqual(result, True) @defer.inlineCallbacks def testCompLt(self): self.props.setProperty("do-tests", 1, "scheduler") x = Property("do-tests") < 2 self.assertEqual(repr(x), 'Property(do-tests) < 2') result = yield self.build.render(x) self.assertEqual(result, True) @defer.inlineCallbacks def testCompLe(self): self.props.setProperty("do-tests", 1, "scheduler") result = yield self.build.render(Property("do-tests") <= 2) self.assertEqual(result, True) @defer.inlineCallbacks def testCompGt(self): self.props.setProperty("do-tests", 3, "scheduler") result = yield self.build.render(Property("do-tests") > 2) self.assertEqual(result, True) @defer.inlineCallbacks def testCompGe(self): self.props.setProperty("do-tests", 3, "scheduler") result = yield self.build.render(Property("do-tests") >= 2) self.assertEqual(result, True) @defer.inlineCallbacks def testStringCompEq(self): self.props.setProperty("do-tests", "string", "scheduler") test_string = "string" result = yield self.build.render(test_string == Property("do-tests")) self.assertEqual(result, True) @defer.inlineCallbacks def testIntCompLe(self): self.props.setProperty("do-tests", 1, "scheduler") test_int = 1 result = yield self.build.render(test_int <= Property("do-tests")) self.assertEqual(result, True) @defer.inlineCallbacks def testPropCompGe(self): self.props.setProperty("do-tests", 1, "scheduler") result = yield self.build.render(Property("do-tests") >= Property("do-tests")) self.assertEqual(result, True) @defer.inlineCallbacks def testPropAdd(self): self.props.setProperty("do-tests", 1, "scheduler") result = yield self.build.render(Property("do-tests") + Property("do-tests")) self.assertEqual(result, 2) @defer.inlineCallbacks def testPropSub(self): self.props.setProperty("do-tests", 1, "scheduler") result = yield self.build.render(Property("do-tests") - Property("do-tests")) self.assertEqual(result, 0) @defer.inlineCallbacks def testPropDiv(self): self.props.setProperty("do-tests", 1, "scheduler") self.props.setProperty("do-tests2", 3, "scheduler") result = yield self.build.render(Property("do-tests") / Property("do-tests2")) self.assertEqual(result, 1 / 3) @defer.inlineCallbacks def testPropFDiv(self): self.props.setProperty("do-tests", 5, "scheduler") self.props.setProperty("do-tests2", 2, "scheduler") result = yield self.build.render(Property("do-tests") // Property("do-tests2")) self.assertEqual(result, 2) @defer.inlineCallbacks def testPropMod(self): self.props.setProperty("do-tests", 5, "scheduler") self.props.setProperty("do-tests2", 3, "scheduler") result = yield self.build.render(Property("do-tests") % Property("do-tests2")) self.assertEqual(result, 2) @defer.inlineCallbacks def testPropMult(self): self.props.setProperty("do-tests", 2, "scheduler") result = yield self.build.render(Property("do-tests") * Interpolate("%(prop:do-tests)s")) self.assertEqual(result, '22') @defer.inlineCallbacks def testPropIn(self): self.props.setProperty("do-tests", 2, "scheduler") result = yield self.build.render(Property("do-tests").in_([1, 2])) self.assertEqual(result, True) @defer.inlineCallbacks def testPropIn2(self): self.props.setProperty("do-tests", 2, "scheduler") result = yield self.build.render(Property("do-tests").in_([1, 3])) self.assertEqual(result, False) class TestRenderableAdapters(unittest.TestCase): """ Tests for list, tuple and dict renderers. """ def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) def test_list_deferred(self): r1 = DeferredRenderable() r2 = DeferredRenderable() d = self.build.render([r1, r2]) d.addCallback(self.assertEqual, ["lispy", "lists"]) r2.callback("lists") r1.callback("lispy") return d def test_tuple_deferred(self): r1 = DeferredRenderable() r2 = DeferredRenderable() d = self.build.render((r1, r2)) d.addCallback(self.assertEqual, ("totally", "tupled")) r2.callback("tupled") r1.callback("totally") return d def test_dict(self): r1 = DeferredRenderable() r2 = DeferredRenderable() k1 = DeferredRenderable() k2 = DeferredRenderable() d = self.build.render({k1: r1, k2: r2}) d.addCallback(self.assertEqual, {"lock": "load", "dict": "lookup"}) k1.callback("lock") r1.callback("load") k2.callback("dict") r2.callback("lookup") return d class Renderer(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def test_renderer(self): self.props.setProperty("x", "X", "test") def rend(p): return 'x{}x'.format(p.getProperty('x')) res = yield self.build.render(renderer(rend)) self.assertEqual('xXx', res) @defer.inlineCallbacks def test_renderer_called(self): # it's tempting to try to call the decorated function. Don't do that. # It's not a function anymore. def rend(p): return 'x' with self.assertRaises(TypeError): yield self.build.render(renderer(rend)('y')) @defer.inlineCallbacks def test_renderer_decorator(self): self.props.setProperty("x", "X", "test") @renderer def rend(p): return 'x{}x'.format(p.getProperty('x')) res = yield self.build.render(rend) self.assertEqual('xXx', res) @defer.inlineCallbacks def test_renderer_deferred(self): self.props.setProperty("x", "X", "test") def rend(p): return defer.succeed('y{}y'.format(p.getProperty('x'))) res = yield self.build.render(renderer(rend)) self.assertEqual('yXy', res) @defer.inlineCallbacks def test_renderer_fails(self): @defer.inlineCallbacks def rend(p): raise RuntimeError("oops") with self.assertRaises(RuntimeError): yield self.build.render(renderer(rend)) @defer.inlineCallbacks def test_renderer_recursive(self): self.props.setProperty("x", "X", "test") def rend(p): return Interpolate("x%(prop:x)sx") ret = yield self.build.render(renderer(rend)) self.assertEqual('xXx', ret) def test_renderer_repr(self): @renderer def myrend(p): pass self.assertIn('renderer(', repr(myrend)) # py3 and py2 do not have the same way of repr functions # but they always contain the name of function self.assertIn('myrend', repr(myrend)) @defer.inlineCallbacks def test_renderer_with_state(self): self.props.setProperty("x", "X", "test") def rend(p, arg, kwarg='y'): return 'x-{}-{}-{}'.format(p.getProperty('x'), arg, kwarg) res = yield self.build.render(renderer(rend).withArgs('a', kwarg='kw')) self.assertEqual('x-X-a-kw', res) @defer.inlineCallbacks def test_renderer_with_state_called(self): # it's tempting to try to call the decorated function. Don't do that. # It's not a function anymore. def rend(p, arg, kwarg='y'): return 'x' with self.assertRaises(TypeError): rend_with_args = renderer(rend).withArgs('a', kwarg='kw') yield self.build.render(rend_with_args('y')) @defer.inlineCallbacks def test_renderer_with_state_renders_args(self): self.props.setProperty("x", "X", "test") self.props.setProperty('arg', 'ARG', 'test2') self.props.setProperty('kw', 'KW', 'test3') def rend(p, arg, kwarg='y'): return 'x-{}-{}-{}'.format(p.getProperty('x'), arg, kwarg) res = yield self.build.render( renderer(rend).withArgs(Property('arg'), kwarg=Property('kw'))) self.assertEqual('x-X-ARG-KW', res) @defer.inlineCallbacks def test_renderer_decorator_with_state(self): self.props.setProperty("x", "X", "test") @renderer def rend(p, arg, kwarg='y'): return 'x-{}-{}-{}'.format(p.getProperty('x'), arg, kwarg) res = yield self.build.render(rend.withArgs('a', kwarg='kw')) self.assertEqual('x-X-a-kw', res) @defer.inlineCallbacks def test_renderer_decorator_with_state_does_not_share_state(self): self.props.setProperty("x", "X", "test") @renderer def rend(p, *args, **kwargs): return 'x-{}-{}-{}'.format(p.getProperty('x'), str(args), str(kwargs)) rend1 = rend.withArgs('a', kwarg1='kw1') rend2 = rend.withArgs('b', kwarg2='kw2') res1 = yield self.build.render(rend1) res2 = yield self.build.render(rend2) self.assertEqual('x-X-(\'a\',)-{\'kwarg1\': \'kw1\'}', res1) self.assertEqual('x-X-(\'b\',)-{\'kwarg2\': \'kw2\'}', res2) @defer.inlineCallbacks def test_renderer_deferred_with_state(self): self.props.setProperty("x", "X", "test") def rend(p, arg, kwarg='y'): return defer.succeed('x-{}-{}-{}'.format(p.getProperty('x'), arg, kwarg)) res = yield self.build.render(renderer(rend).withArgs('a', kwarg='kw')) self.assertEqual('x-X-a-kw', res) @defer.inlineCallbacks def test_renderer_fails_with_state(self): self.props.setProperty("x", "X", "test") def rend(p, arg, kwarg='y'): raise RuntimeError('oops') with self.assertRaises(RuntimeError): yield self.build.render(renderer(rend).withArgs('a', kwarg='kw')) @defer.inlineCallbacks def test_renderer_recursive_with_state(self): self.props.setProperty("x", "X", "test") def rend(p, arg, kwarg='y'): return Interpolate('x-%(prop:x)s-%(kw:arg)s-%(kw:kwarg)s', arg=arg, kwarg=kwarg) res = yield self.build.render(renderer(rend).withArgs('a', kwarg='kw')) self.assertEqual('x-X-a-kw', res) def test_renderer_repr_with_state(self): @renderer def rend(p): pass rend = rend.withArgs('a', kwarg='kw') # pylint: disable=assignment-from-no-return self.assertIn('renderer(', repr(rend)) # py3 and py2 do not have the same way of repr functions # but they always contain the name of function self.assertIn('args=[\'a\']', repr(rend)) self.assertIn('kwargs={\'kwarg\': \'kw\'}', repr(rend)) @defer.inlineCallbacks def test_interpolate_worker(self): self.build.workerforbuilder.worker.info.setProperty('test', 'testvalue', 'Worker') rend = yield self.build.render(Interpolate("%(worker:test)s")) self.assertEqual(rend, "testvalue") class Compare(unittest.TestCase): def test_WithProperties_lambda(self): self.assertNotEqual(WithProperties("%(key)s", key=lambda p: 'val'), WithProperties( "%(key)s", key=lambda p: 'val')) def rend(p): return "val" self.assertEqual( WithProperties("%(key)s", key=rend), WithProperties("%(key)s", key=rend)) self.assertNotEqual( WithProperties("%(key)s", key=rend), WithProperties("%(key)s", otherkey=rend)) def test_WithProperties_positional(self): self.assertNotEqual( WithProperties("%s", 'key'), WithProperties("%s", 'otherkey')) self.assertEqual( WithProperties("%s", 'key'), WithProperties("%s", 'key')) self.assertNotEqual( WithProperties("%s", 'key'), WithProperties("k%s", 'key')) def test_Interpolate_constant(self): self.assertNotEqual( Interpolate('some text here'), Interpolate('and other text there')) self.assertEqual( Interpolate('some text here'), Interpolate('some text here')) def test_Interpolate_positional(self): self.assertNotEqual( Interpolate('%s %s', "test", "text"), Interpolate('%s %s', "other", "text")) self.assertEqual( Interpolate('%s %s', "test", "text"), Interpolate('%s %s', "test", "text")) def test_Interpolate_kwarg(self): self.assertNotEqual( Interpolate("%(kw:test)s", test=object(), other=2), Interpolate("%(kw:test)s", test=object(), other=2)) self.assertEqual( Interpolate('testing: %(kw:test)s', test="test", other=3), Interpolate('testing: %(kw:test)s', test="test", other=3)) def test_Interpolate_worker(self): self.assertEqual( Interpolate('testing: %(worker:test)s'), Interpolate('testing: %(worker:test)s')) def test_renderer(self): self.assertNotEqual( renderer(lambda p: 'val'), renderer(lambda p: 'val')) def rend(p): return "val" self.assertEqual( renderer(rend), renderer(rend)) def test_Lookup_simple(self): self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'other'), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test'), _Lookup({'test': 5, 'other': 6}, 'test')) def test_Lookup_default(self): self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', default='default'), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', default='default'), _Lookup({'test': 5, 'other': 6}, 'test', default='default')) def test_Lookup_defaultWhenFalse(self): self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=False), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=False), _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=True)) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=True), _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=True)) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test'), _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=True)) def test_Lookup_hasKey(self): self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', hasKey=None), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', hasKey='has-key'), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', hasKey='has-key'), _Lookup({'test': 5, 'other': 6}, 'test', hasKey='other-key')) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', hasKey='has-key'), _Lookup({'test': 5, 'other': 6}, 'test', hasKey='has-key')) def test_Lookup_elideNoneAs(self): self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs=None), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs=''), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs='got None'), _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs='')) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs='got None'), _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs='got None')) def test_Lazy(self): self.assertNotEqual( _Lazy(5), _Lazy(6)) self.assertEqual( _Lazy(5), _Lazy(5)) def test_SourceStampDict(self): self.assertNotEqual( _SourceStampDict('binary'), _SourceStampDict('library')) self.assertEqual( _SourceStampDict('binary'), _SourceStampDict('binary')) class TestTransform(unittest.TestCase, ConfigErrorsMixin): def setUp(self): self.props = Properties(propname='propvalue') def test_invalid_first_arg(self): with self.assertRaisesConfigError( "function given to Transform neither callable nor renderable"): Transform(None) @defer.inlineCallbacks def test_argless(self): t = Transform(lambda: 'abc') res = yield self.props.render(t) self.assertEqual(res, 'abc') @defer.inlineCallbacks def test_argless_renderable(self): @renderer def function(iprops): return lambda: iprops.getProperty('propname') t = Transform(function) res = yield self.props.render(t) self.assertEqual(res, 'propvalue') @defer.inlineCallbacks def test_args(self): t = Transform(lambda x, y: x + '|' + y, 'abc', Property('propname')) res = yield self.props.render(t) self.assertEqual(res, 'abc|propvalue') @defer.inlineCallbacks def test_kwargs(self): t = Transform(lambda x, y: x + '|' + y, x='abc', y=Property('propname')) res = yield self.props.render(t) self.assertEqual(res, 'abc|propvalue') def test_deferred(self): function = DeferredRenderable() arg = DeferredRenderable() kwarg = DeferredRenderable() t = Transform(function, arg, y=kwarg) d = self.props.render(t) d.addCallback(self.assertEqual, 'abc|def') function.callback(lambda x, y: x + '|' + y) arg.callback('abc') kwarg.callback('def') return d buildbot-3.4.0/master/buildbot/test/unit/process/test_remotecommand.py000066400000000000000000000164461413250514000263060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot.process import remotecommand from buildbot.test.fake import logfile from buildbot.test.fake import remotecommand as fakeremotecommand from buildbot.test.util import interfaces from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.warnings import DeprecatedApiWarning class TestRemoteShellCommand(unittest.TestCase): def test_obfuscated_arguments(self): command = ["echo", ("obfuscated", "real", "fake"), "test", ("obfuscated", "real2", "fake2"), ("not obfuscated", "a", "b"), ("obfuscated"), # not obfuscated ("obfuscated", "test"), # not obfuscated ("obfuscated", "1", "2", "3"), # not obfuscated) ] cmd = remotecommand.RemoteShellCommand("build", command) self.assertEqual(cmd.command, command) self.assertEqual(cmd.fake_command, ["echo", "fake", "test", "fake2", ("not obfuscated", "a", "b"), ("obfuscated"), # not obfuscated # not obfuscated ("obfuscated", "test"), # not obfuscated) ("obfuscated", "1", "2", "3"), ]) def test_not_obfuscated_arguments(self): command = "echo test" cmd = remotecommand.RemoteShellCommand("build", command) self.assertEqual(cmd.command, command) self.assertEqual(cmd.fake_command, command) # NOTE: # # This interface is considered private to Buildbot and may change without # warning in future versions. class Tests(interfaces.InterfaceTests): remoteCommandClass = None def makeRemoteCommand(self, stdioLogName='stdio'): return self.remoteCommandClass('ping', {'arg': 'val'}, stdioLogName=stdioLogName) def test_signature_RemoteCommand_constructor(self): @self.assertArgSpecMatches(self.remoteCommandClass.__init__) def __init__(self, remote_command, args, ignore_updates=False, collectStdout=False, collectStderr=False, decodeRC=None, stdioLogName='stdio'): pass def test_signature_RemoteShellCommand_constructor(self): @self.assertArgSpecMatches(self.remoteShellCommandClass.__init__) def __init__(self, workdir, command, env=None, want_stdout=1, want_stderr=1, timeout=20 * 60, maxTime=None, sigtermTime=None, logfiles=None, usePTY=None, logEnviron=True, collectStdout=False, collectStderr=False, interruptSignal=None, initialStdin=None, decodeRC=None, stdioLogName='stdio'): pass def test_signature_run(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.run) def run(self, step, conn, builder_name): pass def test_signature_useLog(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.useLog) def useLog(self, log_, closeWhenFinished=False, logfileName=None): pass def test_signature_useLogDelayed(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.useLogDelayed) def useLogDelayed(self, logfileName, activateCallBack, closeWhenFinished=False): pass def test_signature_interrupt(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.interrupt) def useLogDelayed(self, why): pass def test_signature_didFail(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.didFail) def useLogDelayed(self): pass def test_signature_logs(self): cmd = self.makeRemoteCommand() self.assertIsInstance(cmd.logs, dict) def test_signature_active(self): cmd = self.makeRemoteCommand() self.assertIsInstance(cmd.active, bool) def test_RemoteShellCommand_constructor(self): self.remoteShellCommandClass('wkdir', 'some-command') class TestRunCommand(unittest.TestCase, Tests): remoteCommandClass = remotecommand.RemoteCommand remoteShellCommandClass = remotecommand.RemoteShellCommand def test_notStdioLog(self): logname = 'notstdio' cmd = self.makeRemoteCommand(stdioLogName=logname) log = logfile.FakeLogFile(logname) cmd.useLog(log) cmd.addStdout('some stdout') self.assertEqual(log.stdout, 'some stdout') cmd.addStderr('some stderr') self.assertEqual(log.stderr, 'some stderr') cmd.addHeader('some header') self.assertEqual(log.header, 'some header') def test_RemoteShellCommand_usePTY_on_worker_2_16(self): cmd = remotecommand.RemoteShellCommand('workdir', 'shell') def workerVersion(command, oldversion=None): return '2.16' def workerVersionIsOlderThan(command, minversion): return ['2', '16'] < minversion.split('.') step = mock.Mock() step.workerVersionIsOlderThan = workerVersionIsOlderThan step.workerVersion = workerVersion conn = mock.Mock() conn.remoteStartCommand = mock.Mock(return_value=None) cmd.run(step, conn, 'builder') self.assertEqual(cmd.args['usePTY'], 'slave-config') class TestFakeRunCommand(unittest.TestCase, Tests): remoteCommandClass = fakeremotecommand.FakeRemoteCommand remoteShellCommandClass = fakeremotecommand.FakeRemoteShellCommand class TestWorkerTransition(unittest.TestCase): def test_RemoteShellCommand_usePTY(self): with assertNotProducesWarnings(DeprecatedApiWarning): cmd = remotecommand.RemoteShellCommand( 'workdir', 'command') self.assertTrue(cmd.args['usePTY'] is None) with assertNotProducesWarnings(DeprecatedApiWarning): cmd = remotecommand.RemoteShellCommand( 'workdir', 'command', usePTY=True) self.assertTrue(cmd.args['usePTY']) with assertNotProducesWarnings(DeprecatedApiWarning): cmd = remotecommand.RemoteShellCommand( 'workdir', 'command', usePTY=False) self.assertFalse(cmd.args['usePTY']) buildbot-3.4.0/master/buildbot/test/unit/process/test_remotetransfer.py000066400000000000000000000046761413250514000265160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat import tempfile from mock import Mock from twisted.trial import unittest from buildbot.process import remotetransfer # Test buildbot.steps.remotetransfer.FileWriter class. class TestFileWriter(unittest.TestCase): # test FileWriter.__init__() method. def testInit(self): # # patch functions called in constructor # # patch os.path.exists() to always return False mockedExists = Mock(return_value=False) self.patch(os.path, "exists", mockedExists) # capture calls to os.makedirs() mockedMakedirs = Mock() self.patch(os, 'makedirs', mockedMakedirs) # capture calls to tempfile.mkstemp() mockedMkstemp = Mock(return_value=(7, "tmpname")) self.patch(tempfile, "mkstemp", mockedMkstemp) # capture calls to os.fdopen() mockedFdopen = Mock() self.patch(os, "fdopen", mockedFdopen) # # call _FileWriter constructor # destfile = os.path.join("dir", "file") remotetransfer.FileWriter(destfile, 64, stat.S_IRUSR) # # validate captured calls # absdir = os.path.dirname(os.path.abspath(os.path.join("dir", "file"))) mockedExists.assert_called_once_with(absdir) mockedMakedirs.assert_called_once_with(absdir) mockedMkstemp.assert_called_once_with(dir=absdir) mockedFdopen.assert_called_once_with(7, 'wb') class TestStringFileWriter(unittest.TestCase): def testBasic(self): sfw = remotetransfer.StringFileWriter() # StringFileWriter takes bytes or native string and outputs native strings sfw.remote_write(b'bytes') sfw.remote_write(' or str') self.assertEqual(sfw.buffer, 'bytes or str') buildbot-3.4.0/master/buildbot/test/unit/process/test_results.py000066400000000000000000000201501413250514000251400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from twisted.trial import unittest from buildbot.process import results class TestResults(unittest.TestCase): def test_Results(self): for r in results.Results: i = getattr(results, r.upper()) self.assertEqual(results.Results[i], r) def test_worst_status(self): self.assertEqual(results.WARNINGS, results.worst_status(results.SUCCESS, results.WARNINGS)) self.assertEqual(results.CANCELLED, results.worst_status(results.SKIPPED, results.CANCELLED)) def test_sort_worst_status(self): res = list(range(len(results.Results))) res.sort( key=lambda a: a if a != results.SKIPPED else -1) self.assertEqual(res, [ results.SKIPPED, results.SUCCESS, results.WARNINGS, results.FAILURE, results.EXCEPTION, results.RETRY, results.CANCELLED, ]) def do_test_carc(self, result, previousResult, newResult, terminate, haltOnFailure=None, flunkOnWarnings=None, flunkOnFailure=None, warnOnWarnings=None, warnOnFailure=None): if haltOnFailure is None: haltOnFailure = [True, False] if flunkOnWarnings is None: flunkOnWarnings = [ True, False] if flunkOnFailure is None: flunkOnFailure = [True, False] if warnOnWarnings is None: warnOnWarnings = [ True, False] if warnOnFailure is None: warnOnFailure = [True, False] for hof in haltOnFailure: for fow in flunkOnWarnings: for fof in flunkOnFailure: for wow in warnOnWarnings: for wof in warnOnFailure: self.haltOnFailure = hof self.flunkOnWarnings = fow self.flunkOnFailure = fof self.warnOnWarnings = wow self.warnOnFailure = wof nr, term = results.computeResultAndTermination( self, result, previousResult) log.msg("res=%r prevRes=%r hof=%r fow=%r fof=%r " "wow=%r wof=%r => %r %r" % (results.Results[result], results.Results[previousResult], hof, fow, fof, wow, wof, results.Results[nr], term)) self.assertEqual((nr, term), (newResult, terminate), "see test.log for details") def test_carc_success_after_success(self): self.do_test_carc(results.SUCCESS, results.SUCCESS, results.SUCCESS, False) def test_carc_success_after_warnings(self): self.do_test_carc(results.SUCCESS, results.WARNINGS, results.WARNINGS, False) def test_carc_success_after_failure(self): self.do_test_carc(results.SUCCESS, results.FAILURE, results.FAILURE, False) def test_carc_warnings_after_success(self): self.do_test_carc(results.WARNINGS, results.SUCCESS, results.WARNINGS, False, flunkOnWarnings=[False], warnOnWarnings=[True]) self.do_test_carc(results.WARNINGS, results.SUCCESS, results.SUCCESS, False, flunkOnWarnings=[False], warnOnWarnings=[False]) self.do_test_carc(results.WARNINGS, results.SUCCESS, results.FAILURE, False, flunkOnWarnings=[True], warnOnWarnings=[True]) self.do_test_carc(results.WARNINGS, results.SUCCESS, results.FAILURE, False, flunkOnWarnings=[True], warnOnWarnings=[False]) def test_carc_warnings_after_warnings(self): self.do_test_carc(results.WARNINGS, results.WARNINGS, results.WARNINGS, False, flunkOnWarnings=[False]) self.do_test_carc(results.WARNINGS, results.WARNINGS, results.FAILURE, False, flunkOnWarnings=[True]) def test_carc_warnings_after_failure(self): self.do_test_carc(results.WARNINGS, results.FAILURE, results.FAILURE, False, flunkOnWarnings=[False]) self.do_test_carc(results.WARNINGS, results.FAILURE, results.FAILURE, False, flunkOnWarnings=[True]) def test_carc_failure_after_success(self): for hof in False, True: self.do_test_carc(results.FAILURE, results.SUCCESS, results.FAILURE, hof, haltOnFailure=[hof], flunkOnFailure=[True], warnOnFailure=[False]) self.do_test_carc(results.FAILURE, results.SUCCESS, results.FAILURE, hof, haltOnFailure=[hof], flunkOnFailure=[True], warnOnFailure=[True]) self.do_test_carc(results.FAILURE, results.SUCCESS, results.SUCCESS, hof, haltOnFailure=[hof], flunkOnFailure=[False], warnOnFailure=[False]) self.do_test_carc(results.FAILURE, results.SUCCESS, results.WARNINGS, hof, haltOnFailure=[hof], flunkOnFailure=[False], warnOnFailure=[True]) def test_carc_failure_after_warnings(self): for hof in False, True: self.do_test_carc(results.FAILURE, results.WARNINGS, results.FAILURE, hof, haltOnFailure=[hof], flunkOnFailure=[True]) self.do_test_carc(results.FAILURE, results.WARNINGS, results.WARNINGS, hof, haltOnFailure=[hof], flunkOnFailure=[False]) def test_carc_failure_after_failure(self): for hof in False, True: self.do_test_carc(results.FAILURE, results.FAILURE, results.FAILURE, hof, haltOnFailure=[hof]) def test_carc_exception(self): for prev in results.FAILURE, results.WARNINGS, results.SUCCESS: self.do_test_carc(results.EXCEPTION, prev, results.EXCEPTION, True) def test_carc_retry(self): for prev in results.FAILURE, results.WARNINGS, results.SUCCESS: self.do_test_carc(results.RETRY, prev, results.RETRY, True) def test_carc_cancelled(self): for prev in results.FAILURE, results.WARNINGS, results.SUCCESS: self.do_test_carc(results.CANCELLED, prev, results.CANCELLED, True) def test_carc_skipped(self): for prev in results.FAILURE, results.WARNINGS, results.SUCCESS: self.do_test_carc(results.SKIPPED, prev, prev, False) buildbot-3.4.0/master/buildbot/test/unit/process/test_users_manager.py000066400000000000000000000033701413250514000262770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.users import manager from buildbot.util import service class FakeUserManager(service.AsyncMultiService): pass class TestUserManager(unittest.TestCase): def setUp(self): self.master = mock.Mock() self.umm = manager.UserManagerManager(self.master) self.umm.startService() self.config = config.MasterConfig() def tearDown(self): self.umm.stopService() @defer.inlineCallbacks def test_reconfigServiceWithBuildbotConfig(self): # add a user manager um1 = FakeUserManager() self.config.user_managers = [um1] yield self.umm.reconfigServiceWithBuildbotConfig(self.config) self.assertTrue(um1.running) self.assertIdentical(um1.master, self.master) # and back to nothing self.config.user_managers = [] yield self.umm.reconfigServiceWithBuildbotConfig(self.config) self.assertIdentical(um1.master, None) buildbot-3.4.0/master/buildbot/test/unit/process/test_users_manual.py000066400000000000000000000257561413250514000261560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this class is known to contain cruft and will be looked at later, so # no current implementation utilizes it aside from scripts.runner. import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process.users import manual from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class ManualUsersMixin: """ This class fakes out the master/db components to test the manual user managers located in process.users.manual. """ def setUpManualUsers(self): self.master = fakemaster.make_master(self, wantDb=True) class TestUsersBase(unittest.TestCase): """ Not really sure what there is to test, aside from _setUpManualUsers getting self.master set. """ class TestCommandlineUserManagerPerspective(TestReactorMixin, unittest.TestCase, ManualUsersMixin): def setUp(self): self.setUpTestReactor() self.setUpManualUsers() def call_perspective_commandline(self, *args): persp = manual.CommandlineUserManagerPerspective(self.master) return persp.perspective_commandline(*args) @defer.inlineCallbacks def test_perspective_commandline_add(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'git': 'x'}]) usdict = yield self.master.db.users.getUser(1) self.assertEqual(usdict, dict(uid=1, identifier='x', bb_username=None, bb_password=None, git='x')) @defer.inlineCallbacks def test_perspective_commandline_update(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) yield self.call_perspective_commandline('update', None, None, None, [{'identifier': 'x', 'svn': 'y'}]) usdict = yield self.master.db.users.getUser(1) self.assertEqual(usdict, dict(uid=1, identifier='x', bb_username=None, bb_password=None, svn='y')) @defer.inlineCallbacks def test_perspective_commandline_update_bb(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) yield self.call_perspective_commandline('update', 'bb_user', 'hashed_bb_pass', None, [{'identifier': 'x'}]) usdict = yield self.master.db.users.getUser(1) self.assertEqual(usdict, dict(uid=1, identifier='x', bb_username='bb_user', bb_password='hashed_bb_pass', svn='x')) @defer.inlineCallbacks def test_perspective_commandline_update_both(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) yield self.call_perspective_commandline('update', 'bb_user', 'hashed_bb_pass', None, [{'identifier': 'x', 'svn': 'y'}]) usdict = yield self.master.db.users.getUser(1) self.assertEqual(usdict, dict(uid=1, identifier='x', bb_username='bb_user', bb_password='hashed_bb_pass', svn='y')) @defer.inlineCallbacks def test_perspective_commandline_remove(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'h@c', 'git': 'hi '}]) yield self.call_perspective_commandline('remove', None, None, ['x'], None) res = yield self.master.db.users.getUser('x') self.assertEqual(res, None) @defer.inlineCallbacks def test_perspective_commandline_get(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) yield self.call_perspective_commandline('get', None, None, ['x'], None) res = yield self.master.db.users.getUser(1) self.assertEqual(res, dict(uid=1, identifier='x', bb_username=None, bb_password=None, svn='x')) @defer.inlineCallbacks def test_perspective_commandline_get_multiple_attrs(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x', 'git': 'x@c'}]) yield self.call_perspective_commandline('get', None, None, ['x'], None) res = yield self.master.db.users.getUser(1) self.assertEqual(res, dict(uid=1, identifier='x', bb_username=None, bb_password=None, svn='x', git='x@c')) @defer.inlineCallbacks def test_perspective_commandline_add_format(self): result = yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) exp_format = "user(s) added:\nidentifier: x\nuid: 1\n\n" self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_update_format(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) result = yield self.call_perspective_commandline('update', None, None, None, [{'identifier': 'x', 'svn': 'y'}]) exp_format = 'user(s) updated:\nidentifier: x\n' self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_remove_format(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'h@c', 'git': 'hi '}]) result = yield self.call_perspective_commandline('remove', None, None, ['h@c'], None) exp_format = "user(s) removed:\nidentifier: h@c\n" self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_get_format(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x@y', 'git': 'x '}]) result = yield self.call_perspective_commandline('get', None, None, ['x@y'], None) exp_format = ('user(s) found:\nbb_username: None\n' 'git: x \nidentifier: x@y\n' 'uid: 1\n\n') self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_remove_no_match_format(self): result = yield self.call_perspective_commandline( 'remove', None, None, ['x'], None) exp_format = "user(s) removed:\n" self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_get_no_match_format(self): result = yield self.call_perspective_commandline('get', None, None, ['x'], None) exp_format = "user(s) found:\nno match found\n" self.assertEqual(result, exp_format) class TestCommandlineUserManager(TestReactorMixin, unittest.TestCase, ManualUsersMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpManualUsers() self.manual_component = manual.CommandlineUserManager(username="user", passwd="userpw", port="9990") yield self.manual_component.setServiceParent(self.master) def test_no_userpass(self): d = defer.maybeDeferred(manual.CommandlineUserManager) return self.assertFailure(d, AssertionError) def test_no_port(self): d = defer.maybeDeferred(manual.CommandlineUserManager, username="x", passwd="y") return self.assertFailure(d, AssertionError) @defer.inlineCallbacks def test_service(self): # patch out the pbmanager's 'register' command both to be sure # the registration is correct and to get a copy of the factory registration = mock.Mock() registration.unregister = lambda: defer.succeed(None) self.master.pbmanager = mock.Mock() def register(portstr, user, passwd, factory): self.assertEqual([portstr, user, passwd], ['9990', 'user', 'userpw']) self.got_factory = factory return defer.succeed(registration) self.master.pbmanager.register = register yield self.manual_component.startService() persp = self.got_factory(mock.Mock(), 'user') self.assertTrue( isinstance(persp, manual.CommandlineUserManagerPerspective)) yield self.manual_component.stopService() buildbot-3.4.0/master/buildbot/test/unit/process/test_users_users.py000066400000000000000000000144631413250514000260330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.process.users import users from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class UsersTests(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.test_sha = users.encrypt("cancer") @defer.inlineCallbacks def test_createUserObject_no_src(self): yield users.createUserObject(self.master, "Tyler Durden", None) self.assertEqual(self.db.users.users, {}) self.assertEqual(self.db.users.users_info, {}) @defer.inlineCallbacks def test_createUserObject_unrecognized_src(self): yield users.createUserObject(self.master, "Tyler Durden", 'blah') self.assertEqual(self.db.users.users, {}) self.assertEqual(self.db.users.users_info, {}) @defer.inlineCallbacks def test_createUserObject_git(self): yield users.createUserObject(self.master, "Tyler Durden ", 'git') self.assertEqual(self.db.users.users, {1: dict(identifier='Tyler Durden ', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="git", attr_data="Tyler Durden ")]}) @defer.inlineCallbacks def test_createUserObject_svn(self): yield users.createUserObject(self.master, "tdurden", 'svn') self.assertEqual(self.db.users.users, {1: dict(identifier='tdurden', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="svn", attr_data="tdurden")]}) @defer.inlineCallbacks def test_createUserObject_hg(self): yield users.createUserObject(self.master, "Tyler Durden ", 'hg') self.assertEqual(self.db.users.users, {1: dict(identifier='Tyler Durden ', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="hg", attr_data="Tyler Durden ")]}) @defer.inlineCallbacks def test_createUserObject_cvs(self): yield users.createUserObject(self.master, "tdurden", 'cvs') self.assertEqual(self.db.users.users, {1: dict(identifier='tdurden', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="cvs", attr_data="tdurden")]}) @defer.inlineCallbacks def test_createUserObject_darcs(self): yield users.createUserObject(self.master, "tyler@mayhem.net", 'darcs') self.assertEqual(self.db.users.users, {1: dict(identifier='tyler@mayhem.net', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="darcs", attr_data="tyler@mayhem.net")]}) @defer.inlineCallbacks def test_createUserObject_bzr(self): yield users.createUserObject(self.master, "Tyler Durden", 'bzr') self.assertEqual(self.db.users.users, {1: dict(identifier='Tyler Durden', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="bzr", attr_data="Tyler Durden")]}) @defer.inlineCallbacks def test_getUserContact_found(self): self.db.insertTestData([fakedb.User(uid=1, identifier='tdurden'), fakedb.UserInfo(uid=1, attr_type='svn', attr_data='tdurden'), fakedb.UserInfo(uid=1, attr_type='email', attr_data='tyler@mayhem.net')]) contact = yield users.getUserContact(self.master, contact_types=['email'], uid=1) self.assertEqual(contact, 'tyler@mayhem.net') @defer.inlineCallbacks def test_getUserContact_key_not_found(self): self.db.insertTestData([fakedb.User(uid=1, identifier='tdurden'), fakedb.UserInfo(uid=1, attr_type='svn', attr_data='tdurden'), fakedb.UserInfo(uid=1, attr_type='email', attr_data='tyler@mayhem.net')]) contact = yield users.getUserContact(self.master, contact_types=['blargh'], uid=1) self.assertEqual(contact, None) @defer.inlineCallbacks def test_getUserContact_uid_not_found(self): contact = yield users.getUserContact(self.master, contact_types=['email'], uid=1) self.assertEqual(contact, None) def test_check_passwd(self): res = users.check_passwd("cancer", self.test_sha) self.assertEqual(res, True) buildbot-3.4.0/master/buildbot/test/unit/process/test_workerforbuilder.py000066400000000000000000000046571413250514000270440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial.unittest import TestCase from buildbot.process.workerforbuilder import AbstractWorkerForBuilder from buildbot.worker.base import AbstractWorker class TestAbstractWorkerForBuilder(TestCase): """ Tests for ``AbstractWorkerForBuilder``. """ def test_buildStarted_called(self): """ If the worker associated to worker builder has a ``buildStarted`` method, calling ``buildStarted`` on the worker builder calls the method on the worker with the workerforbuilder as an argument. """ class ConcreteWorker(AbstractWorker): _buildStartedCalls = [] def buildStarted(self, workerforbuilder): self._buildStartedCalls.append(workerforbuilder) worker = ConcreteWorker("worker", "pass") workerforbuilder = AbstractWorkerForBuilder() # FIXME: This should call attached, instead of setting the attribute # directly workerforbuilder.worker = worker workerforbuilder.buildStarted() self.assertEqual(ConcreteWorker._buildStartedCalls, [workerforbuilder]) def test_buildStarted_missing(self): """ If the worker associated to worker builder doesn't not have a ``buildStarted`` method, calling ``buildStarted`` on the worker builder doesn't raise an exception. """ class ConcreteWorker(AbstractWorker): pass worker = ConcreteWorker("worker", "pass") workerforbuilder = AbstractWorkerForBuilder() # FIXME: This should call attached, instead of setting the attribute # directly workerforbuilder.worker = worker # The following shouldn't raise an exception. workerforbuilder.buildStarted() buildbot-3.4.0/master/buildbot/test/unit/reporters/000077500000000000000000000000001413250514000223775ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/reporters/__init__.py000066400000000000000000000000001413250514000244760ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/reporters/test_base.py000066400000000000000000000160561413250514000247320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.reporters.base import ReporterBase from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.reporters.generators.worker import WorkerMissingGenerator from buildbot.reporters.message import MessageFormatter from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.logging import LoggingMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestException(Exception): pass class TestReporterBase(ConfigErrorsMixin, TestReactorMixin, LoggingMixin, unittest.TestCase, ReporterTestMixin): def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.setUpLogging() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def setupNotifier(self, generators): mn = ReporterBase(generators=generators) mn.sendMessage = mock.Mock(spec=mn.sendMessage) mn.sendMessage.return_value = "" yield mn.setServiceParent(self.master) yield mn.startService() return mn @defer.inlineCallbacks def setupBuildMessage(self, **kwargs): build = yield self.insert_build_finished(FAILURE) formatter = mock.Mock(spec=MessageFormatter) formatter.format_message_for_build.return_value = { "body": "body", "type": "text", "subject": "subject" } formatter.want_properties = False formatter.want_steps = False formatter.want_logs = False formatter.want_logs_content = False generator = BuildStatusGenerator(message_formatter=formatter, **kwargs) mn = yield self.setupNotifier(generators=[generator]) yield mn._got_event(('builds', 20, 'finished'), build) return (mn, build, formatter) def setup_mock_generator(self, events_filter): gen = mock.Mock() gen.wanted_event_keys = events_filter gen.generate_name = lambda: '' return gen def test_check_config_raises_error_when_generators_not_list(self): with self.assertRaisesConfigError('generators argument must be a list'): ReporterBase(generators='abc') @defer.inlineCallbacks def test_buildMessage_nominal(self): mn, build, formatter = yield self.setupBuildMessage(mode=("failing",)) formatter.format_message_for_build.assert_called_with(self.master, build, mode=('failing',), users=['me@foo']) report = { 'body': 'body', 'subject': 'subject', 'type': 'text', 'results': FAILURE, 'builds': [build], 'users': ['me@foo'], 'patches': [], 'logs': [] } self.assertEqual(mn.sendMessage.call_count, 1) mn.sendMessage.assert_called_with([report]) @defer.inlineCallbacks def test_worker_missing_sends_message(self): generator = WorkerMissingGenerator(workers=['myworker']) mn = yield self.setupNotifier(generators=[generator]) worker_dict = { 'name': 'myworker', 'notify': ["workeradmin@example.org"], 'workerinfo': {"admin": "myadmin"}, 'last_connection': "yesterday" } yield mn._got_event(('workers', 98, 'missing'), worker_dict) self.assertEqual(mn.sendMessage.call_count, 1) @defer.inlineCallbacks def test_generators_subscribes_events(self): gen1 = self.setup_mock_generator([('fake1', None, None)]) yield self.setupNotifier(generators=[gen1]) self.assertEqual(len(self.master.mq.qrefs), 1) self.assertEqual(self.master.mq.qrefs[0].filter, ('fake1', None, None)) @defer.inlineCallbacks def test_generators_subscribes_equal_events_once(self): gen1 = self.setup_mock_generator([('fake1', None, None)]) gen2 = self.setup_mock_generator([('fake1', None, None)]) yield self.setupNotifier(generators=[gen1, gen2]) self.assertEqual(len(self.master.mq.qrefs), 1) self.assertEqual(self.master.mq.qrefs[0].filter, ('fake1', None, None)) @defer.inlineCallbacks def test_generators_subscribes_equal_different_events_once(self): gen1 = self.setup_mock_generator([('fake1', None, None)]) gen2 = self.setup_mock_generator([('fake2', None, None)]) yield self.setupNotifier(generators=[gen1, gen2]) self.assertEqual(len(self.master.mq.qrefs), 2) self.assertEqual(self.master.mq.qrefs[0].filter, ('fake1', None, None)) self.assertEqual(self.master.mq.qrefs[1].filter, ('fake2', None, None)) @defer.inlineCallbacks def test_generators_unsubscribes_on_stop_service(self): gen1 = self.setup_mock_generator([('fake1', None, None)]) notifier = yield self.setupNotifier(generators=[gen1]) yield notifier.stopService() self.assertEqual(len(self.master.mq.qrefs), 0) @defer.inlineCallbacks def test_generators_resubscribes_on_reconfig(self): gen1 = self.setup_mock_generator([('fake1', None, None)]) gen2 = self.setup_mock_generator([('fake2', None, None)]) notifier = yield self.setupNotifier(generators=[gen1]) self.assertEqual(len(self.master.mq.qrefs), 1) self.assertEqual(self.master.mq.qrefs[0].filter, ('fake1', None, None)) yield notifier.reconfigService(generators=[gen2]) self.assertEqual(len(self.master.mq.qrefs), 1) self.assertEqual(self.master.mq.qrefs[0].filter, ('fake2', None, None)) @defer.inlineCallbacks def test_generator_throw_exception_on_generate(self): gen = self.setup_mock_generator([('fake1', None, None)]) @defer.inlineCallbacks def generate_throw(*args, **kwargs): raise TestException() gen.generate = generate_throw notifier = yield self.setupNotifier(generators=[gen]) yield notifier._got_event(('fake1', None, None), None) self.assertEqual(len(self.flushLoggedErrors(TestException)), 1) self.assertLogged('Got exception when handling reporter events') buildbot-3.4.0/master/buildbot/test/unit/reporters/test_bitbucket.py000066400000000000000000000314061413250514000257700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.bitbucket import _BASE_URL from buildbot.reporters.bitbucket import _OAUTH_URL from buildbot.reporters.bitbucket import BitbucketStatusPush from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.reporters.message import MessageFormatter from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.logging import LoggingMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestBitbucketStatusPush(TestReactorMixin, unittest.TestCase, ConfigErrorsMixin, ReporterTestMixin, LoggingMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.reporter_test_repo = 'https://example.org/user/repo' self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, _BASE_URL, debug=None, verify=None) self.oauthhttp = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, _OAUTH_URL, auth=('key', 'secret'), debug=None, verify=None) self.bsp = BitbucketStatusPush(Interpolate('key'), Interpolate('secret')) yield self.bsp.setServiceParent(self.master) yield self.bsp.startService() @defer.inlineCallbacks def tearDown(self): yield self.bsp.stopService() @defer.inlineCallbacks def test_basic(self): build = yield self.insert_build_new() self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'state': 'INPROGRESS', 'key': 'Builder0', 'name': 'Builder0', 'description': '', 'url': 'http://localhost:8080/#builders/79/builds/0', }, code=201) self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'state': 'SUCCESSFUL', 'key': 'Builder0', 'name': 'Builder0', 'description': '', 'url': 'http://localhost:8080/#builders/79/builds/0', }, code=201) self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'state': 'FAILED', 'key': 'Builder0', 'name': 'Builder0', 'description': '', 'url': 'http://localhost:8080/#builders/79/builds/0', }, code=201) yield self.bsp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.bsp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.bsp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_success_return_codes(self): build = yield self.insert_build_finished(SUCCESS) # make sure a 201 return code does not trigger an error self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'state': 'SUCCESSFUL', 'key': 'Builder0', 'name': 'Builder0', 'description': '', 'url': 'http://localhost:8080/#builders/79/builds/0', }, code=201) self.setUpLogging() yield self.bsp._got_event(('builds', 20, 'finished'), build) self.assertNotLogged('201: unable to upload Bitbucket status') # make sure a 200 return code does not trigger an error self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'state': 'SUCCESSFUL', 'key': 'Builder0', 'name': 'Builder0', 'description': '', 'url': 'http://localhost:8080/#builders/79/builds/0', }, code=200) self.setUpLogging() yield self.bsp._got_event(('builds', 20, 'finished'), build) self.assertNotLogged('200: unable to upload Bitbucket status') @defer.inlineCallbacks def test_unable_to_authenticate(self): build = yield self.insert_build_new() self.oauthhttp.expect( 'post', '', data={'grant_type': 'client_credentials'}, content_json={ "error_description": "Unsupported grant type: None", "error": "invalid_grant" }, code=400) self.setUpLogging() yield self.bsp._got_event(('builds', 20, 'new'), build) self.assertLogged('400: unable to authenticate to Bitbucket') @defer.inlineCallbacks def test_unable_to_send_status(self): build = yield self.insert_build_new() self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'state': 'INPROGRESS', 'key': 'Builder0', 'name': 'Builder0', 'description': '', 'url': 'http://localhost:8080/#builders/79/builds/0', }, code=404, content_json={ "error_description": "This commit is unknown to us", "error": "invalid_commit"}) self.setUpLogging() yield self.bsp._got_event(('builds', 20, 'new'), build) self.assertLogged('404: unable to upload Bitbucket status') self.assertLogged('This commit is unknown to us') self.assertLogged('invalid_commit') @defer.inlineCallbacks def test_empty_repository(self): self.reporter_test_repo = '' build = yield self.insert_build_new() self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) self.setUpLogging() yield self.bsp._got_event(('builds', 20, 'new'), build) self.assertLogged('Empty repository URL for Bitbucket status') class TestBitbucketStatusPushProperties(TestReactorMixin, unittest.TestCase, ConfigErrorsMixin, ReporterTestMixin, LoggingMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.reporter_test_repo = 'https://example.org/user/repo' self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, _BASE_URL, debug=None, verify=None) self.oauthhttp = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, _OAUTH_URL, auth=('key', 'secret'), debug=None, verify=None) self.bsp = BitbucketStatusPush( Interpolate('key'), Interpolate('secret'), status_key=Interpolate("%(prop:buildername)s/%(prop:buildnumber)s"), status_name=Interpolate("%(prop:buildername)s-%(prop:buildnumber)s"), generators=[ BuildStartEndStatusGenerator( start_formatter=MessageFormatter(subject="{{ status_detected }}"), end_formatter=MessageFormatter(subject="{{ summary }}") ) ] ) yield self.bsp.setServiceParent(self.master) yield self.bsp.startService() @defer.inlineCallbacks def tearDown(self): yield self.bsp.stopService() @defer.inlineCallbacks def test_properties(self): build = yield self.insert_build_new() self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'state': 'INPROGRESS', 'key': 'Builder0/0', 'name': 'Builder0-0', 'description': 'not finished build', 'url': 'http://localhost:8080/#builders/79/builds/0', }, code=201) self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'state': 'SUCCESSFUL', 'key': 'Builder0/0', 'name': 'Builder0-0', 'description': 'Build succeeded!', 'url': 'http://localhost:8080/#builders/79/builds/0', }, code=201) yield self.bsp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.bsp._got_event(('builds', 20, 'finished'), build) class TestBitbucketStatusPushRepoParsing(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) self.bsp = BitbucketStatusPush( Interpolate('key'), Interpolate('secret')) yield self.bsp.setServiceParent(self.master) yield self.bsp.startService() @defer.inlineCallbacks def tearDown(self): yield self.bsp.stopService() def parse(self, repourl): return tuple(self.bsp.get_owner_and_repo(repourl)) def test_parse_no_scheme(self): self.assertEqual( ('user', 'repo'), self.parse('git@bitbucket.com:user/repo.git')) self.assertEqual( ('user', 'repo'), self.parse('git@bitbucket.com:user/repo')) def test_parse_with_scheme(self): self.assertEqual(('user', 'repo'), self.parse( 'https://bitbucket.com/user/repo.git')) self.assertEqual(('user', 'repo'), self.parse( 'https://bitbucket.com/user/repo')) self.assertEqual(('user', 'repo'), self.parse( 'ssh://git@bitbucket.com/user/repo.git')) self.assertEqual(('user', 'repo'), self.parse( 'ssh://git@bitbucket.com/user/repo')) self.assertEqual(('user', 'repo'), self.parse( 'https://api.bitbucket.org/2.0/repositories/user/repo')) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_bitbucketserver.py000066400000000000000000000627401413250514000272240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from dateutil.tz import tzutc from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot.plugins import util from buildbot.process.properties import Interpolate from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.bitbucketserver import HTTP_CREATED from buildbot.reporters.bitbucketserver import HTTP_PROCESSED from buildbot.reporters.bitbucketserver import BitbucketServerCoreAPIStatusPush from buildbot.reporters.bitbucketserver import BitbucketServerPRCommentPush from buildbot.reporters.bitbucketserver import BitbucketServerStatusPush from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.reporters.generators.buildset import BuildSetStatusGenerator from buildbot.reporters.message import MessageFormatter from buildbot.reporters.message import MessageFormatterRenderable from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.logging import LoggingMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin HTTP_NOT_FOUND = 404 class TestException(Exception): pass class TestBitbucketServerStatusPush(TestReactorMixin, ConfigErrorsMixin, unittest.TestCase, ReporterTestMixin, LoggingMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() @defer.inlineCallbacks def setupReporter(self, **kwargs): self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'serv', auth=('username', 'passwd'), debug=None, verify=None) self.sp = BitbucketServerStatusPush("serv", Interpolate("username"), Interpolate("passwd"), **kwargs) yield self.sp.setServiceParent(self.master) @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def _check_start_and_finish_build(self, build): # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'INPROGRESS', 'key': 'Builder0', 'description': 'Build started.'}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'SUCCESSFUL', 'key': 'Builder0', 'description': 'Build done.'}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'FAILED', 'key': 'Builder0', 'description': 'Build done.'}) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_basic(self): self.setupReporter() build = yield self.insert_build_finished(SUCCESS) yield self._check_start_and_finish_build(build) @defer.inlineCallbacks def test_setting_options(self): generator = BuildStartEndStatusGenerator( start_formatter=MessageFormatterRenderable('Build started.'), end_formatter=MessageFormatterRenderable('Build finished.') ) self.setupReporter(statusName='Build', generators=[generator]) build = yield self.insert_build_finished(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'INPROGRESS', 'key': 'Builder0', 'name': 'Build', 'description': 'Build started.'}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'SUCCESSFUL', 'key': 'Builder0', 'name': 'Build', 'description': 'Build finished.'}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'FAILED', 'key': 'Builder0', 'name': 'Build', 'description': 'Build finished.'}, code=HTTP_PROCESSED) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_error(self): self.setupReporter() build = yield self.insert_build_finished(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'INPROGRESS', 'key': 'Builder0', 'description': 'Build started.'}, code=HTTP_NOT_FOUND, content_json={ "error_description": "This commit is unknown to us", "error": "invalid_commit"}) build['complete'] = False self.setUpLogging() yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged('404: Unable to send Bitbucket Server status') @defer.inlineCallbacks def test_basic_with_no_revision(self): yield self.setupReporter() self.reporter_test_revision = None build = yield self.insert_build_finished(SUCCESS) self.setUpLogging() # we don't expect any request build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged("Unable to get the commit hash") build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) class TestBitbucketServerCoreAPIStatusPush(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase, ReporterTestMixin, LoggingMixin): @defer.inlineCallbacks def setupReporter(self, token=None, **kwargs): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) http_headers = {} if token is None else {'Authorization': 'Bearer tokentoken'} http_auth = ('username', 'passwd') if token is None else None self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'serv', auth=http_auth, headers=http_headers, debug=None, verify=None) auth = (Interpolate("username"), Interpolate("passwd")) if token is None else None self.sp = BitbucketServerCoreAPIStatusPush("serv", token=token, auth=auth, **kwargs) yield self.sp.setServiceParent(self.master) yield self.master.startService() def setUp(self): self.master = None @defer.inlineCallbacks def tearDown(self): if self.master and self.master.running: yield self.master.stopService() @defer.inlineCallbacks def _check_start_and_finish_build(self, build, parentPlan=False): # we make sure proper calls to txrequests have been made _name = "Builder_parent #1 \u00BB Builder0 #0" if parentPlan else "Builder0 #0" _parent = "Builder_parent" if parentPlan else "Builder0" self._http.expect( 'post', '/rest/api/1.0/projects/example.org/repos/repo/commits/d34db33fd43db33f/builds', json={'name': _name, 'description': 'Build started.', 'key': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'refs/heads/master', 'buildNumber': '0', 'state': 'INPROGRESS', 'parent': _parent, 'duration': None, 'testResults': None}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/api/1.0/projects/example.org/repos/repo/commits/d34db33fd43db33f/builds', json={'name': _name, 'description': 'Build done.', 'key': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'refs/heads/master', 'buildNumber': '0', 'state': 'SUCCESSFUL', 'parent': _parent, 'duration': 10000, 'testResults': None}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/api/1.0/projects/example.org/repos/repo/commits/d34db33fd43db33f/builds', json={'name': _name, 'description': 'Build done.', 'key': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'refs/heads/master', 'buildNumber': '0', 'state': 'FAILED', 'parent': _parent, 'duration': 10000, 'testResults': None}, code=HTTP_PROCESSED) build['started_at'] = datetime.datetime(2019, 4, 1, 23, 38, 33, 154354, tzinfo=tzutc()) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build["complete_at"] = datetime.datetime(2019, 4, 1, 23, 38, 43, 154354, tzinfo=tzutc()) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) def test_config_no_base_url(self): with self.assertRaisesConfigError("Parameter base_url has to be given"): BitbucketServerCoreAPIStatusPush(base_url=None) def test_config_auth_and_token_mutually_exclusive(self): with self.assertRaisesConfigError( "Only one authentication method can be given (token or auth)"): BitbucketServerCoreAPIStatusPush("serv", token="x", auth=("username", "passwd")) @defer.inlineCallbacks def test_basic(self): yield self.setupReporter() build = yield self.insert_build_finished(SUCCESS) yield self._check_start_and_finish_build(build) @defer.inlineCallbacks def test_with_parent(self): yield self.setupReporter() build = yield self.insert_build_finished(SUCCESS, parent_plan=True) yield self._check_start_and_finish_build(build, parentPlan=True) @defer.inlineCallbacks def test_with_token(self): yield self.setupReporter(token='tokentoken') build = yield self.insert_build_finished(SUCCESS) yield self._check_start_and_finish_build(build) @defer.inlineCallbacks def test_error_setup_status(self): yield self.setupReporter() @defer.inlineCallbacks def raise_deferred_exception(**kwargs): raise TestException() self.sp.createStatus = Mock(side_effect=raise_deferred_exception) build = yield self.insert_build_finished(SUCCESS) yield self.sp._got_event(('builds', 20, 'new'), build) self.assertEqual(len(self.flushLoggedErrors(TestException)), 1) @defer.inlineCallbacks def test_error(self): self.setupReporter() build = yield self.insert_build_finished(SUCCESS) self.setUpLogging() # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/rest/api/1.0/projects/example.org/repos/repo/commits/d34db33fd43db33f/builds', json={'name': 'Builder0 #0', 'description': 'Build started.', 'key': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'refs/heads/master', 'buildNumber': '0', 'state': 'INPROGRESS', 'parent': 'Builder0', 'duration': None, 'testResults': None}, code=HTTP_NOT_FOUND) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged('404: Unable to send Bitbucket Server status') @defer.inlineCallbacks def test_with_full_ref(self): yield self.setupReporter() self.reporter_test_branch = "refs/heads/master" build = yield self.insert_build_finished(SUCCESS) yield self._check_start_and_finish_build(build) @defer.inlineCallbacks def test_with_no_ref(self): yield self.setupReporter() self.reporter_test_branch = None build = yield self.insert_build_finished(SUCCESS) self.setUpLogging() self._http.expect( 'post', '/rest/api/1.0/projects/example.org/repos/repo/commits/d34db33fd43db33f/builds', json={'name': 'Builder0 #0', 'description': 'Build started.', 'key': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'ref': None, 'buildNumber': '0', 'state': 'INPROGRESS', 'parent': 'Builder0', 'duration': None, 'testResults': None}, code=HTTP_PROCESSED) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged("WARNING: Unable to resolve ref for SSID: 234.") @defer.inlineCallbacks def test_with_no_revision(self): yield self.setupReporter() self.reporter_test_revision = None build = yield self.insert_build_finished(SUCCESS) self.setUpLogging() # we don't expect any request build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged("Unable to get the commit hash for SSID: 234") @defer.inlineCallbacks def test_with_no_repo(self): yield self.setupReporter() self.reporter_test_repo = None build = yield self.insert_build_finished(SUCCESS) self.setUpLogging() # we don't expect any request build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged("Unable to parse repository info from 'None' for SSID: 234") @defer.inlineCallbacks def test_with_renderers(self): @util.renderer def r_testresults(props): return { "failed": props.getProperty("unittests_failed", 0), "skipped": props.getProperty("unittests_skipped", 0), "successful": props.getProperty("unittests_successful", 0), } @util.renderer def r_duration(props): return props.getProperty("unittests_runtime") yield self.setupReporter(statusName=Interpolate("%(prop:plan_name)s"), statusSuffix=Interpolate(" [%(prop:unittests_os)s]"), buildNumber=Interpolate('100'), ref=Interpolate("%(prop:branch)s"), parentName=Interpolate("%(prop:master_plan)s"), testResults=r_testresults, duration=r_duration) self.reporter_test_props['unittests_failed'] = 0 self.reporter_test_props['unittests_skipped'] = 2 self.reporter_test_props['unittests_successful'] = 3 self.reporter_test_props['unittests_runtime'] = 50000 self.reporter_test_props['unittests_os'] = "win10" self.reporter_test_props['plan_name'] = "Unittests" self.reporter_test_props['master_plan'] = "Unittests-master" build = yield self.insert_build_finished(SUCCESS) self._http.expect( 'post', '/rest/api/1.0/projects/example.org/repos/repo/commits/d34db33fd43db33f/builds', json={'name': 'Unittests [win10]', 'description': 'Build done.', 'key': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'ref': "refs/pull/34/merge", 'buildNumber': '100', 'state': 'SUCCESSFUL', 'parent': 'Unittests-master', 'duration': 50000, 'testResults': {'failed': 0, 'skipped': 2, 'successful': 3}}, code=HTTP_PROCESSED) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_with_test_results(self): yield self.setupReporter() self.reporter_test_props['tests_skipped'] = 2 self.reporter_test_props['tests_successful'] = 3 build = yield self.insert_build_finished(SUCCESS) self._http.expect( 'post', '/rest/api/1.0/projects/example.org/repos/repo/commits/d34db33fd43db33f/builds', json={'name': 'Builder0 #0', 'description': 'Build done.', 'key': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'refs/heads/master', 'buildNumber': '0', 'state': 'SUCCESSFUL', 'parent': 'Builder0', 'duration': 10000, 'testResults': {'failed': 0, 'skipped': 2, 'successful': 3}}, code=HTTP_PROCESSED) build['started_at'] = datetime.datetime(2019, 4, 1, 23, 38, 33, 154354, tzinfo=tzutc()) build["complete_at"] = datetime.datetime(2019, 4, 1, 23, 38, 43, 154354, tzinfo=tzutc()) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_verbose(self): yield self.setupReporter(verbose=True) build = yield self.insert_build_finished(SUCCESS) self.setUpLogging() self._http.expect( 'post', '/rest/api/1.0/projects/example.org/repos/repo/commits/d34db33fd43db33f/builds', json={'name': 'Builder0 #0', 'description': 'Build started.', 'key': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'ref': "refs/heads/master", 'buildNumber': '0', 'state': 'INPROGRESS', 'parent': 'Builder0', 'duration': None, 'testResults': None}, code=HTTP_PROCESSED) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged('Sending payload:') self.assertLogged('Status "INPROGRESS" sent for example.org/repo d34db33fd43db33f') UNICODE_BODY = "body: \u00E5\u00E4\u00F6 text" EXPECTED_API = '/rest/api/1.0/projects/PRO/repos/myrepo/pull-requests/20/comments' PR_URL = "http://example.com/projects/PRO/repos/myrepo/pull-requests/20" class TestBitbucketServerPRCommentPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin, LoggingMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() @defer.inlineCallbacks def setupReporter(self, verbose=True, generator_class=BuildStatusGenerator, **kwargs): self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'serv', auth=('username', 'passwd'), debug=None, verify=None) formatter = Mock(spec=MessageFormatter) formatter.format_message_for_build.return_value = { "body": UNICODE_BODY, "type": "text", "subject": "subject" } formatter.want_properties = True formatter.want_steps = False formatter.want_logs = False formatter.want_logs_content = False generator = generator_class(message_formatter=formatter) self.cp = BitbucketServerPRCommentPush("serv", Interpolate("username"), Interpolate("passwd"), verbose=verbose, generators=[generator], **kwargs) yield self.cp.setServiceParent(self.master) @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def setupBuildResults(self, buildResults, set_pr=True): yield super().insertTestData([buildResults], buildResults) build = yield self.master.data.get(('builds', 20)) if set_pr: yield self.master.db.builds.setBuildProperty( 20, "pullrequesturl", PR_URL, "test") return build @defer.inlineCallbacks def test_reporter_basic(self): yield self.setupReporter() build = yield self.setupBuildResults(SUCCESS) self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=HTTP_CREATED) build["complete"] = True self.setUpLogging() yield self.cp._got_event(('builds', 20, 'finished'), build) self.assertLogged('Comment sent to {}'.format(PR_URL)) @defer.inlineCallbacks def test_reporter_basic_without_logging(self): yield self.setupReporter(verbose=False) build = yield self.setupBuildResults(SUCCESS) self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=HTTP_CREATED) build["complete"] = True self.setUpLogging() yield self.cp._got_event(('builds', 20, 'finished'), build) self.assertNotLogged('Comment sent to {}'.format(PR_URL)) @defer.inlineCallbacks def test_reporter_without_pullrequest(self): yield self.setupReporter() build = yield self.setupBuildResults(SUCCESS, set_pr=False) build["complete"] = True # we don't expect any request yield self.cp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_reporter_with_buildset(self): yield self.setupReporter(generator_class=BuildSetStatusGenerator) yield self.setupBuildResults(SUCCESS) buildset = yield self.master.data.get(('buildsets', 98)) self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=HTTP_CREATED) yield self.cp._got_event(("buildsets", 98, "complete"), buildset) @defer.inlineCallbacks def test_reporter_logs_error_code_and_content_on_invalid_return_code(self): yield self.setupReporter() build = yield self.setupBuildResults(SUCCESS) http_error_code = 500 error_body = {"errors": [ {"message": "A dataXXXbase error has occurred."}]} self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=http_error_code, content_json=error_body) self.setUpLogging() build['complete'] = True yield self.cp._got_event(('builds', 20, 'finished'), build) self.assertLogged( "^{}: Unable to send a comment: ".format(http_error_code)) self.assertLogged("A dataXXXbase error has occurred") @defer.inlineCallbacks def test_reporter_logs_error_code_without_content_on_invalid_return_code(self): yield self.setupReporter() build = yield self.setupBuildResults(SUCCESS) http_error_code = 503 self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=http_error_code) self.setUpLogging() build['complete'] = True yield self.cp._got_event(('builds', 20, 'finished'), build) self.assertLogged("^{}: Unable to send a comment: ".format( http_error_code)) @defer.inlineCallbacks def test_reporter_does_not_log_return_code_on_valid_return_code( self): yield self.setupReporter() build = yield self.setupBuildResults(SUCCESS) http_code = 201 self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=http_code) self.setUpLogging() build['complete'] = True yield self.cp._got_event(('builds', 20, 'finished'), build) self.assertNotLogged("^{}:".format(http_code)) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_generators_build.py000066400000000000000000000361101413250514000273410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.reporters import utils from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestBuildGenerator(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase, ReporterTestMixin): def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def insert_build_finished_get_props(self, results, **kwargs): build = yield self.insert_build_finished(results, **kwargs) yield utils.getDetailsForBuild(self.master, build, want_properties=True) return build @defer.inlineCallbacks def setup_generator(self, results=SUCCESS, message=None, db_args=None, **kwargs): if message is None: message = { "body": "body", "type": "text", "subject": "subject" } if db_args is None: db_args = {} build = yield self.insert_build_finished_get_props(results, **db_args) g = BuildStatusGenerator(**kwargs) g.formatter = Mock(spec=g.formatter) g.formatter.format_message_for_build.return_value = message return (g, build) @defer.inlineCallbacks def build_message(self, g, build, results=SUCCESS): reporter = Mock() reporter.getResponsibleUsersForBuild.return_value = [] report = yield g.build_message(g.formatter, self.master, reporter, build) return report @defer.inlineCallbacks def generate(self, g, key, build): reporter = Mock() reporter.getResponsibleUsersForBuild.return_value = [] report = yield g.generate(self.master, reporter, key, build) return report @defer.inlineCallbacks def test_build_message_nominal(self): g, build = yield self.setup_generator(mode=("change",)) report = yield self.build_message(g, build) g.formatter.format_message_for_build.assert_called_with(self.master, build, mode=('change',), users=[]) self.assertEqual(report, { 'body': 'body', 'subject': 'subject', 'type': 'text', 'results': SUCCESS, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_build_message_no_result(self): g, build = yield self.setup_generator(results=None, mode=("change",)) report = yield self.build_message(g, build, results=None) g.formatter.format_message_for_build.assert_called_with(self.master, build, mode=('change',), users=[]) self.assertEqual(report, { 'body': 'body', 'subject': 'subject', 'type': 'text', 'results': None, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_build_message_no_result_default_subject(self): subject = 'result: %(result)s builder: %(builder)s title: %(title)s' message = { "body": "body", "type": "text", "subject": None, } g, build = yield self.setup_generator(results=None, subject=subject, message=message, mode=("change",)) report = yield self.build_message(g, build, results=None) g.formatter.format_message_for_build.assert_called_with(self.master, build, mode=('change',), users=[]) self.assertEqual(report, { 'body': 'body', 'subject': 'result: not finished builder: Builder0 title: Buildbot', 'type': 'text', 'results': None, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_build_message_addLogs(self): g, build = yield self.setup_generator(mode=("change",), add_logs=True) report = yield self.build_message(g, build) self.assertEqual(report['logs'][0]['logid'], 60) self.assertIn("log with", report['logs'][0]['content']['content']) @defer.inlineCallbacks def test_build_message_add_patch(self): g, build = yield self.setup_generator(mode=("change",), add_patch=True, db_args={'insert_patch': True}) report = yield self.build_message(g, build) patch_dict = { 'author': 'him@foo', 'body': b'hello, world', 'comment': 'foo', 'level': 3, 'patchid': 99, 'subdir': '/foo' } self.assertEqual(report['patches'], [patch_dict]) @defer.inlineCallbacks def test_build_message_add_patch_no_patch(self): g, build = yield self.setup_generator(mode=("change",), add_patch=True, db_args={'insert_patch': False}) report = yield self.build_message(g, build) self.assertEqual(report['patches'], []) @defer.inlineCallbacks def test_generate_finished(self): g, build = yield self.setup_generator() report = yield self.generate(g, ('builds', 123, 'finished'), build) self.assertEqual(report, { 'body': 'body', 'subject': 'subject', 'type': 'text', 'results': SUCCESS, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_generate_finished_non_matching_builder(self): g, build = yield self.setup_generator(builders=['non-matched']) report = yield self.generate(g, ('builds', 123, 'finished'), build) self.assertIsNone(report) @defer.inlineCallbacks def test_generate_finished_non_matching_result(self): g, build = yield self.setup_generator(mode=('failing',)) report = yield self.generate(g, ('builds', 123, 'finished'), build) self.assertIsNone(report) @defer.inlineCallbacks def test_generate_new(self): g, build = yield self.setup_generator(results=None, mode=('failing',), report_new=True) report = yield self.generate(g, ('builds', 123, 'new'), build) self.assertEqual(report, { 'body': 'body', 'subject': 'subject', 'type': 'text', 'results': None, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) class TestBuildStartEndGenerator(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase, ReporterTestMixin): all_messages = ('failing', 'passing', 'warnings', 'exception', 'cancelled') def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def insert_build_finished_get_props(self, results, **kwargs): build = yield self.insert_build_finished(results, **kwargs) yield utils.getDetailsForBuild(self.master, build, want_properties=True) return build @parameterized.expand([ ('tags', 'tag'), ('tags', 1), ('builders', 'builder'), ('builders', 1), ('schedulers', 'scheduler'), ('schedulers', 1), ('branches', 'branch'), ('branches', 1), ]) def test_list_params_check_raises(self, arg_name, arg_value): kwargs = {arg_name: arg_value} g = BuildStartEndStatusGenerator(**kwargs) with self.assertRaisesConfigError('must be a list or None'): g.check() def setup_generator(self, results=SUCCESS, start_message=None, end_message=None, **kwargs): if start_message is None: start_message = { "body": "start body", "type": "plain", "subject": "start subject" } if end_message is None: end_message = { "body": "end body", "type": "plain", "subject": "end subject" } g = BuildStartEndStatusGenerator(**kwargs) g.start_formatter = Mock(spec=g.start_formatter) g.start_formatter.format_message_for_build.return_value = start_message g.end_formatter = Mock(spec=g.end_formatter) g.end_formatter.format_message_for_build.return_value = end_message return g @defer.inlineCallbacks def build_message(self, g, build, results=SUCCESS): reporter = Mock() reporter.getResponsibleUsersForBuild.return_value = [] report = yield g.build_message(g.start_formatter, self.master, reporter, build) return report @defer.inlineCallbacks def generate(self, g, key, build): reporter = Mock() reporter.getResponsibleUsersForBuild.return_value = [] report = yield g.generate(self.master, reporter, key, build) return report @defer.inlineCallbacks def test_build_message_start(self): g = yield self.setup_generator() build = yield self.insert_build_finished_get_props(SUCCESS) report = yield self.build_message(g, build) g.start_formatter.format_message_for_build.assert_called_with(self.master, build, mode=self.all_messages, users=[]) self.assertEqual(report, { 'body': 'start body', 'subject': 'start subject', 'type': 'plain', 'results': SUCCESS, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_build_message_start_no_result(self): g = yield self.setup_generator(results=None) build = yield self.insert_build_new() report = yield self.build_message(g, build, results=None) g.start_formatter.format_message_for_build.assert_called_with(self.master, build, mode=self.all_messages, users=[]) self.assertEqual(report, { 'body': 'start body', 'subject': 'start subject', 'type': 'plain', 'results': None, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_is_message_needed_ignores_unspecified_tags(self): build = yield self.insert_build_finished_get_props(SUCCESS) # force tags build['builder']['tags'] = ['tag'] g = BuildStartEndStatusGenerator(tags=['not_existing_tag']) self.assertFalse(g.is_message_needed_by_props(build)) @defer.inlineCallbacks def test_is_message_needed_tags(self): build = yield self.insert_build_finished_get_props(SUCCESS) # force tags build['builder']['tags'] = ['tag'] g = BuildStartEndStatusGenerator(tags=['tag']) self.assertTrue(g.is_message_needed_by_props(build)) @defer.inlineCallbacks def test_build_message_add_logs(self): g = yield self.setup_generator(add_logs=True) build = yield self.insert_build_finished_get_props(SUCCESS) report = yield self.build_message(g, build) self.assertEqual(report['logs'][0]['logid'], 60) self.assertIn("log with", report['logs'][0]['content']['content']) @defer.inlineCallbacks def test_build_message_add_patch(self): g = yield self.setup_generator(add_patch=True) build = yield self.insert_build_finished_get_props(SUCCESS, insert_patch=True) report = yield self.build_message(g, build) patch_dict = { 'author': 'him@foo', 'body': b'hello, world', 'comment': 'foo', 'level': 3, 'patchid': 99, 'subdir': '/foo' } self.assertEqual(report['patches'], [patch_dict]) @defer.inlineCallbacks def test_build_message_add_patch_no_patch(self): g = yield self.setup_generator(add_patch=True) build = yield self.insert_build_finished_get_props(SUCCESS, insert_patch=False) report = yield self.build_message(g, build) self.assertEqual(report['patches'], []) @defer.inlineCallbacks def test_generate_new(self): g = yield self.setup_generator() build = yield self.insert_build_new() report = yield self.generate(g, ('builds', 123, 'new'), build) self.assertEqual(report, { 'body': 'start body', 'subject': 'start subject', 'type': 'plain', 'results': None, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_generate_finished(self): g = yield self.setup_generator() build = yield self.insert_build_finished_get_props(SUCCESS) report = yield self.generate(g, ('builds', 123, 'finished'), build) self.assertEqual(report, { 'body': 'end body', 'subject': 'end subject', 'type': 'plain', 'results': SUCCESS, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_generate_none(self): g = yield self.setup_generator(builders=['other builder']) build = yield self.insert_build_new() report = yield self.generate(g, ('builds', 123, 'new'), build) self.assertIsNone(report, None) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_generators_buildrequest.py000066400000000000000000000126151413250514000307560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process.builder import Builder from buildbot.reporters.generators.buildrequest import BuildRequestGenerator from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestBuildRequestGenerator(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase, ReporterTestMixin): all_messages = ('failing', 'passing', 'warnings', 'exception', 'cancelled') def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) builder = Mock(spec=Builder) builder.master = self.master self.master.botmaster.getBuilderById = Mock(return_value=builder) @parameterized.expand([ ('tags', 'tag'), ('tags', 1), ('builders', 'builder'), ('builders', 1), ('schedulers', 'scheduler'), ('schedulers', 1), ('branches', 'branch'), ('branches', 1), ]) def test_list_params_check_raises(self, arg_name, arg_value): kwargs = {arg_name: arg_value} g = BuildRequestGenerator(**kwargs) with self.assertRaisesConfigError('must be a list or None'): g.check() def setup_generator(self, message=None, **kwargs): if message is None: message = { "body": "start body", "type": "plain", "subject": "start subject" } g = BuildRequestGenerator(**kwargs) g.formatter = Mock(spec=g.formatter) g.formatter.format_message_for_build.return_value = message return g @defer.inlineCallbacks def test_build_message_start_no_result(self): g = yield self.setup_generator() buildrequest = yield self.insert_buildrequest_new() build = yield g.partial_build_dict(self.master, buildrequest) report = yield g.buildrequest_message(self.master, build) g.formatter.format_message_for_build.assert_called_with(self.master, build, mode=self.all_messages, users=[]) self.assertEqual(report, { 'body': 'start body', 'subject': 'start subject', 'type': 'plain', 'results': None, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_build_message_add_patch(self): g = yield self.setup_generator(add_patch=True) buildrequest = yield self.insert_buildrequest_new(insert_patch=True) build = yield g.partial_build_dict(self.master, buildrequest) report = yield g.buildrequest_message(self.master, build) patch_dict = { 'author': 'him@foo', 'body': b'hello, world', 'comment': 'foo', 'level': 3, 'patchid': 99, 'subdir': '/foo' } self.assertEqual(report['patches'], [patch_dict]) @defer.inlineCallbacks def test_build_message_add_patch_no_patch(self): g = yield self.setup_generator(add_patch=True) buildrequest = yield self.insert_buildrequest_new(insert_patch=False) build = yield g.partial_build_dict(self.master, buildrequest) report = yield g.buildrequest_message(self.master, build) self.assertEqual(report['patches'], []) @defer.inlineCallbacks def test_generate_new(self): g = yield self.setup_generator(add_patch=True) buildrequest = yield self.insert_buildrequest_new(insert_patch=False) build = yield g.partial_build_dict(self.master, buildrequest) report = yield g.generate(self.master, None, ('buildrequests', 11, 'new'), buildrequest) self.assertEqual(report, { 'body': 'start body', 'subject': 'start subject', 'type': 'plain', 'results': None, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_generate_none(self): g = BuildRequestGenerator(builders=['not_existing_builder']) buildrequest = yield self.insert_buildrequest_new() report = yield g.generate(self.master, None, ('buildrequests', 11, 'new'), buildrequest) self.assertIsNone(report, None) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_generators_buildset.py000066400000000000000000000153011413250514000300540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.reporters import utils from buildbot.reporters.generators.buildset import BuildSetStatusGenerator from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestBuildSetGenerator(ConfigErrorsMixin, TestReactorMixin, ReporterTestMixin, unittest.TestCase): # Note: most of the functionality of BuildSetStatusGenerator is shared with # BuildStatusGenerator and is tested there. def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def insert_build_finished_get_props(self, results, **kwargs): build = yield self.insert_build_finished(results, **kwargs) yield utils.getDetailsForBuild(self.master, build, want_properties=True) return build @defer.inlineCallbacks def setup_generator(self, results=SUCCESS, message=None, db_args=None, **kwargs): if message is None: message = { "body": "body", "type": "text", "subject": "subject" } if db_args is None: db_args = {} build = yield self.insert_build_finished_get_props(results, **db_args) buildset = yield self.master.data.get(("buildsets", 98)) g = BuildSetStatusGenerator(**kwargs) g.formatter = Mock(spec=g.formatter) g.formatter.format_message_for_build.return_value = message g.formatter.want_logs = False g.formatter.want_logs_content = False g.formatter.want_steps = False return (g, build, buildset) @defer.inlineCallbacks def buildset_message(self, g, builds, results=SUCCESS): reporter = Mock() reporter.getResponsibleUsersForBuild.return_value = [] report = yield g.buildset_message(g.formatter, self.master, reporter, builds, results) return report @defer.inlineCallbacks def generate(self, g, key, build): reporter = Mock() reporter.getResponsibleUsersForBuild.return_value = [] report = yield g.generate(self.master, reporter, key, build) return report @defer.inlineCallbacks def test_buildset_message_nominal(self): g, build, _ = yield self.setup_generator(mode=("change",)) report = yield self.buildset_message(g, [build]) g.formatter.format_message_for_build.assert_called_with(self.master, build, mode=('change',), users=[]) self.assertEqual(report, { 'body': 'body', 'subject': 'subject', 'type': 'text', 'results': SUCCESS, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_buildset_message_no_result(self): g, build, _ = yield self.setup_generator(results=None, mode=("change",)) report = yield self.buildset_message(g, [build], results=None) g.formatter.format_message_for_build.assert_called_with(self.master, build, mode=('change',), users=[]) self.assertEqual(report, { 'body': 'body', 'subject': 'subject', 'type': 'text', 'results': None, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_buildset_message_no_result_default_subject(self): subject = 'result: %(result)s builder: %(builder)s title: %(title)s' message = { "body": "body", "type": "text", "subject": None, } g, build, _ = yield self.setup_generator(results=None, subject=subject, message=message, mode=("change",)) report = yield self.buildset_message(g, [build], results=None) g.formatter.format_message_for_build.assert_called_with(self.master, build, mode=('change',), users=[]) self.assertEqual(report, { 'body': 'body', 'subject': 'result: not finished builder: whole buildset title: Buildbot', 'type': 'text', 'results': None, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_generate_complete(self): g, build, buildset = yield self.setup_generator() report = yield self.generate(g, ('buildsets', 98, 'complete'), buildset) # we retrieve build data differently when processing the buildset, so adjust it to match del build['buildrequest'] del build['parentbuild'] del build['parentbuilder'] self.assertEqual(report, { 'body': 'body', 'subject': 'subject', 'type': 'text', 'results': SUCCESS, 'builds': [build], 'users': [], 'patches': [], 'logs': [] }) @defer.inlineCallbacks def test_generate_complete_non_matching_builder(self): g, build, buildset = yield self.setup_generator(builders=['non-matched']) report = yield self.generate(g, ('buildsets', 98, 'complete'), buildset) self.assertIsNone(report) @defer.inlineCallbacks def test_generate_complete_non_matching_result(self): g, build, buildset = yield self.setup_generator(mode=('failing',)) report = yield self.generate(g, ('buildsets', 98, 'complete'), buildset) self.assertIsNone(report) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_generators_utils.py000066400000000000000000000323061413250514000274050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy from parameterized import parameterized from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters import utils from buildbot.reporters.generators.utils import BuildStatusGeneratorMixin from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestBuildGenerator(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase, ReporterTestMixin): def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def insert_build_finished_get_props(self, results, **kwargs): build = yield self.insert_build_finished(results, **kwargs) yield utils.getDetailsForBuild(self.master, build, want_properties=True) return build def create_generator(self, mode=("failing", "passing", "warnings"), tags=None, builders=None, schedulers=None, branches=None, subject="Some subject", add_logs=False, add_patch=False): return BuildStatusGeneratorMixin(mode, tags, builders, schedulers, branches, subject, add_logs, add_patch) def test_generate_name(self): g = self.create_generator(tags=['tag1', 'tag2'], builders=['b1', 'b2'], schedulers=['s1', 's2'], branches=['b1', 'b2']) self.assertEqual(g.generate_name(), 'BuildStatusGeneratorMixin_tags_tag1+tag2_builders_b1+b2_' + 'schedulers_s1+s2_branches_b1+b2failing_passing_warnings') @parameterized.expand([ ('tags', 'tag'), ('tags', 1), ('builders', 'builder'), ('builders', 1), ('schedulers', 'scheduler'), ('schedulers', 1), ('branches', 'branch'), ('branches', 1), ]) def test_list_params_check_raises(self, arg_name, arg_value): kwargs = {arg_name: arg_value} g = self.create_generator(**kwargs) with self.assertRaisesConfigError('must be a list or None'): g.check() @parameterized.expand([ ('unknown_str', 'unknown', 'not a valid mode'), ('unknown_list', ['unknown'], 'not a valid mode'), ('unknown_list_two', ['unknown', 'failing'], 'not a valid mode'), ('all_in_list', ['all', 'failing'], 'must be passed in as a separate string'), ]) def test_tag_check_raises(self, name, mode, expected_exception): g = self.create_generator(mode=mode) with self.assertRaisesConfigError(expected_exception): g.check() def test_subject_newlines_not_allowed(self): g = self.create_generator(subject='subject\nwith\nnewline') with self.assertRaisesConfigError('Newlines are not allowed'): g.check() @defer.inlineCallbacks def test_is_message_needed_ignores_unspecified_tags(self): build = yield self.insert_build_finished_get_props(SUCCESS) # force tags build['builder']['tags'] = ['slow'] g = self.create_generator(tags=["fast"]) self.assertFalse(g.is_message_needed_by_props(build)) @defer.inlineCallbacks def test_is_message_needed_tags(self): build = yield self.insert_build_finished_get_props(SUCCESS) # force tags build['builder']['tags'] = ['fast'] g = self.create_generator(tags=["fast"]) self.assertTrue(g.is_message_needed_by_props(build)) @defer.inlineCallbacks def test_is_message_needed_schedulers_sends_mail(self): build = yield self.insert_build_finished_get_props(SUCCESS) g = self.create_generator(schedulers=['checkin']) self.assertTrue(g.is_message_needed_by_props(build)) @defer.inlineCallbacks def test_is_message_needed_schedulers_doesnt_send_mail(self): build = yield self.insert_build_finished_get_props(SUCCESS) g = self.create_generator(schedulers=['some-random-scheduler']) self.assertFalse(g.is_message_needed_by_props(build)) @defer.inlineCallbacks def test_is_message_needed_branches_sends_mail(self): build = yield self.insert_build_finished_get_props(SUCCESS) g = self.create_generator(branches=['refs/pull/34/merge']) self.assertTrue(g.is_message_needed_by_props(build)) @defer.inlineCallbacks def test_is_message_needed_branches_doesnt_send_mail(self): build = yield self.insert_build_finished_get_props(SUCCESS) g = self.create_generator(branches=['some-random-branch']) self.assertFalse(g.is_message_needed_by_props(build)) @defer.inlineCallbacks def run_simple_test_sends_message_for_mode(self, mode, result, should_send=True): build = yield self.insert_build_finished_get_props(result) g = self.create_generator(mode=mode) self.assertEqual(g.is_message_needed_by_results(build), should_send) def run_simple_test_ignores_message_for_mode(self, mode, result): return self.run_simple_test_sends_message_for_mode(mode, result, False) def test_is_message_needed_mode_all_for_success(self): return self.run_simple_test_sends_message_for_mode("all", SUCCESS) def test_is_message_needed_mode_all_for_failure(self): return self.run_simple_test_sends_message_for_mode("all", FAILURE) def test_is_message_needed_mode_all_for_warnings(self): return self.run_simple_test_sends_message_for_mode("all", WARNINGS) def test_is_message_needed_mode_all_for_exception(self): return self.run_simple_test_sends_message_for_mode("all", EXCEPTION) def test_is_message_needed_mode_all_for_cancelled(self): return self.run_simple_test_sends_message_for_mode("all", CANCELLED) def test_is_message_needed_mode_failing_for_success(self): return self.run_simple_test_ignores_message_for_mode("failing", SUCCESS) def test_is_message_needed_mode_failing_for_failure(self): return self.run_simple_test_sends_message_for_mode("failing", FAILURE) def test_is_message_needed_mode_failing_for_warnings(self): return self.run_simple_test_ignores_message_for_mode("failing", WARNINGS) def test_is_message_needed_mode_failing_for_exception(self): return self.run_simple_test_ignores_message_for_mode("failing", EXCEPTION) def test_is_message_needed_mode_exception_for_success(self): return self.run_simple_test_ignores_message_for_mode("exception", SUCCESS) def test_is_message_needed_mode_exception_for_failure(self): return self.run_simple_test_ignores_message_for_mode("exception", FAILURE) def test_is_message_needed_mode_exception_for_warnings(self): return self.run_simple_test_ignores_message_for_mode("exception", WARNINGS) def test_is_message_needed_mode_exception_for_exception(self): return self.run_simple_test_sends_message_for_mode("exception", EXCEPTION) def test_is_message_needed_mode_warnings_for_success(self): return self.run_simple_test_ignores_message_for_mode("warnings", SUCCESS) def test_is_message_needed_mode_warnings_for_failure(self): return self.run_simple_test_sends_message_for_mode("warnings", FAILURE) def test_is_message_needed_mode_warnings_for_warnings(self): return self.run_simple_test_sends_message_for_mode("warnings", WARNINGS) def test_is_message_needed_mode_warnings_for_exception(self): return self.run_simple_test_ignores_message_for_mode("warnings", EXCEPTION) def test_is_message_needed_mode_passing_for_success(self): return self.run_simple_test_sends_message_for_mode("passing", SUCCESS) def test_is_message_needed_mode_passing_for_failure(self): return self.run_simple_test_ignores_message_for_mode("passing", FAILURE) def test_is_message_needed_mode_passing_for_warnings(self): return self.run_simple_test_ignores_message_for_mode("passing", WARNINGS) def test_is_message_needed_mode_passing_for_exception(self): return self.run_simple_test_ignores_message_for_mode("passing", EXCEPTION) @defer.inlineCallbacks def run_sends_message_for_problems(self, mode, results1, results2, should_send=True): build = yield self.insert_build_finished_get_props(results2) g = self.create_generator(mode=mode) if results1 is not None: build['prev_build'] = copy.deepcopy(build) build['prev_build']['results'] = results1 else: build['prev_build'] = None self.assertEqual(g.is_message_needed_by_results(build), should_send) def test_is_message_needed_mode_problem_sends_on_problem(self): return self.run_sends_message_for_problems("problem", SUCCESS, FAILURE, True) def test_is_message_needed_mode_problem_ignores_successful_build(self): return self.run_sends_message_for_problems("problem", SUCCESS, SUCCESS, False) def test_is_message_needed_mode_problem_ignores_two_failed_builds_in_sequence(self): return self.run_sends_message_for_problems("problem", FAILURE, FAILURE, False) def test_is_message_needed_mode_change_sends_on_change(self): return self.run_sends_message_for_problems("change", FAILURE, SUCCESS, True) def test_is_message_needed_mode_change_sends_on_failure(self): return self.run_sends_message_for_problems("change", SUCCESS, FAILURE, True) def test_is_message_needed_mode_change_ignores_first_build(self): return self.run_sends_message_for_problems("change", None, FAILURE, False) def test_is_message_needed_mode_change_ignores_first_build2(self): return self.run_sends_message_for_problems("change", None, SUCCESS, False) def test_is_message_needed_mode_change_ignores_same_result_in_sequence(self): return self.run_sends_message_for_problems("change", SUCCESS, SUCCESS, False) def test_is_message_needed_mode_change_ignores_same_result_in_sequence2(self): return self.run_sends_message_for_problems("change", FAILURE, FAILURE, False) @parameterized.expand([ ('bool_true', True, 'step', 'log', True), ('bool_false', False, 'step', 'log', False), ('match_by_log_name', ['log'], 'step', 'log', True), ('no_match_by_log_name', ['not_existing'], 'step', 'log', False), ('match_by_log_step_name', ['step.log'], 'step', 'log', True), ('no_match_by_log_step_name', ['step1.log1'], 'step', 'log', False), ]) def test_should_attach_log(self, name, add_logs, log_step_name, log_name, expected_result): g = self.create_generator(add_logs=add_logs) log = {'stepname': log_step_name, 'name': log_name} self.assertEqual(g._should_attach_log(log), expected_result) @parameterized.expand([ ('both_none', None, None, (None, False)), ('old_none', None, 'type', ('type', True)), ('new_none', 'type', None, ('type', False)), ('same', 'type', 'type', ('type', True)), ('different', 'type1', 'type2', ('type1', False)), ]) def test_merge_msgtype(self, name, old, new, expected_result): g = self.create_generator() self.assertEqual(g._merge_msgtype(old, new), expected_result) @parameterized.expand([ ('both_none', None, None, None), ('old_none', None, 'sub', 'sub'), ('new_none', 'sub', None, 'sub'), ('same', 'sub', 'sub', 'sub'), ('different', 'sub1', 'sub2', 'sub1'), ]) def test_merge_subject(self, name, old, new, expected_result): g = self.create_generator() self.assertEqual(g._merge_subject(old, new), expected_result) @parameterized.expand([ ('both_none', None, None, (None, True)), ('old_none', None, 'body', ('body', True)), ('new_none', 'body', None, ('body', True)), ('both_str', 'body1\n', 'body2\n', ('body1\nbody2\n', True)), ('both_list', ['body1'], ['body2'], (['body1', 'body2'], True)), ('both_dict', {'v': 'body1'}, {'v': 'body2'}, ({'v': 'body1'}, False)), ('str_list', ['body1'], 'body2', (['body1'], False)), ]) def test_merge_body(self, name, old, new, expected_result): g = self.create_generator() self.assertEqual(g._merge_body(old, new), expected_result) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_generators_worker.py000066400000000000000000000051461413250514000275600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.trial import unittest from buildbot.reporters.generators.worker import WorkerMissingGenerator from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin class TestWorkerMissingGenerator(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) def _get_worker_dict(self, worker_name): return { 'name': worker_name, 'notify': ["workeradmin@example.org"], 'workerinfo': {"admin": "myadmin"}, 'last_connection': "yesterday" } @parameterized.expand([ (['myworker'],), ('all',), ]) @defer.inlineCallbacks def test_report_matched_worker(self, worker_filter): g = WorkerMissingGenerator(workers=worker_filter) report = yield g.generate(self.master, None, 'worker.98.complete', self._get_worker_dict('myworker')) self.assertEqual(report['users'], ['workeradmin@example.org']) self.assertIn(b"has noticed that the worker named myworker went away", report['body']) @defer.inlineCallbacks def test_report_not_matched_worker(self): g = WorkerMissingGenerator(workers=['other']) report = yield g.generate(self.master, None, 'worker.98.complete', self._get_worker_dict('myworker')) self.assertIsNone(report) def test_unsupported_workers(self): g = WorkerMissingGenerator(workers='string worker') with self.assertRaisesConfigError("workers must be 'all', or list of worker names"): g.check() buildbot-3.4.0/master/buildbot/test/unit/reporters/test_gerrit.py000066400000000000000000000561711413250514000253160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings from pkg_resources import parse_version from mock import Mock from mock import call from twisted.internet import defer from twisted.internet import error from twisted.internet import reactor from twisted.python import failure from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.reporters import utils from buildbot.reporters.gerrit import GERRIT_LABEL_REVIEWED from buildbot.reporters.gerrit import GERRIT_LABEL_VERIFIED from buildbot.reporters.gerrit import GerritStatusPush from buildbot.reporters.gerrit import defaultReviewCB from buildbot.reporters.gerrit import defaultSummaryCB from buildbot.reporters.gerrit import makeReviewResult from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin warnings.filterwarnings('error', message='.*Gerrit status') def sampleReviewCB(builderName, build, result, status, arg): verified = 1 if result == SUCCESS else -1 return makeReviewResult(str({'name': builderName, 'result': result}), (GERRIT_LABEL_VERIFIED, verified)) @defer.inlineCallbacks def sampleReviewCBDeferred(builderName, build, result, status, arg): verified = 1 if result == SUCCESS else -1 result = yield makeReviewResult(str({'name': builderName, 'result': result}), (GERRIT_LABEL_VERIFIED, verified)) return result def sampleStartCB(builderName, build, arg): return makeReviewResult(str({'name': builderName}), (GERRIT_LABEL_REVIEWED, 0)) @defer.inlineCallbacks def sampleStartCBDeferred(builderName, build, arg): result = yield makeReviewResult(str({'name': builderName}), (GERRIT_LABEL_REVIEWED, 0)) return result def sampleSummaryCB(buildInfoList, results, status, arg): success = False failure = False for buildInfo in buildInfoList: if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement success = True else: failure = True if failure: verified = -1 elif success: verified = 1 else: verified = 0 return makeReviewResult(str(buildInfoList), (GERRIT_LABEL_VERIFIED, verified)) @defer.inlineCallbacks def sampleSummaryCBDeferred(buildInfoList, results, master, arg): success = False failure = False for buildInfo in buildInfoList: if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement success = True else: failure = True if failure: verified = -1 elif success: verified = 1 else: verified = 0 result = yield makeReviewResult(str(buildInfoList), (GERRIT_LABEL_VERIFIED, verified)) return result def legacyTestReviewCB(builderName, build, result, status, arg): msg = str({'name': builderName, 'result': result}) return (msg, 1 if result == SUCCESS else -1, 0) def legacyTestSummaryCB(buildInfoList, results, status, arg): success = False failure = False for buildInfo in buildInfoList: if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement success = True else: failure = True if failure: verified = -1 elif success: verified = 1 else: verified = 0 return (str(buildInfoList), verified, 0) class TestGerritStatusPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin): def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def setupGerritStatusPushSimple(self, *args, **kwargs): serv = kwargs.pop("server", "serv") username = kwargs.pop("username", "user") gsp = GerritStatusPush(serv, username, *args, **kwargs) yield gsp.setServiceParent(self.master) yield gsp.startService() return gsp @defer.inlineCallbacks def setupGerritStatusPush(self, *args, **kwargs): gsp = yield self.setupGerritStatusPushSimple(*args, **kwargs) gsp.sendCodeReview = Mock() return gsp @defer.inlineCallbacks def setupBuildResults(self, buildResults, finalResult): self.insertTestData(buildResults, finalResult) res = yield utils.getDetailsForBuildset(self.master, 98, want_properties=True) builds = res['builds'] buildset = res['buildset'] @defer.inlineCallbacks def getChangesForBuild(buildid): assert buildid == 20 ch = yield self.master.db.changes.getChange(13) return [ch] self.master.db.changes.getChangesForBuild = getChangesForBuild return (buildset, builds) def makeBuildInfo(self, buildResults, resultText, builds): info = [] for i, buildResult in enumerate(buildResults): info.append({'name': "Builder%d" % i, 'result': buildResults[i], 'resultText': resultText[i], 'text': 'buildText', 'url': "http://localhost:8080/#builders/%d/builds/%d" % (79 + i, i), 'build': builds[i]}) return info @defer.inlineCallbacks def run_fake_summary_build(self, gsp, buildResults, finalResult, resultText, expWarning=False): buildset, builds = yield self.setupBuildResults(buildResults, finalResult) yield gsp.buildsetComplete('buildset.98.complete'.split("."), buildset) info = self.makeBuildInfo(buildResults, resultText, builds) if expWarning: self.assertEqual([w['message'] for w in self.flushWarnings()], ['The Gerrit status callback uses the old ' 'way to communicate results. The outcome ' 'might be not what is expected.']) return str(info) # check_summary_build and check_summary_build_legacy differ in two things: # * the callback used # * the expected result @defer.inlineCallbacks def check_summary_build_deferred(self, buildResults, finalResult, resultText, verifiedScore): gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCBDeferred) msg = yield self.run_fake_summary_build(gsp, buildResults, finalResult, resultText) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore)) gsp.sendCodeReview.assert_called_once_with(self.reporter_test_project, self.reporter_test_revision, result) @defer.inlineCallbacks def check_summary_build(self, buildResults, finalResult, resultText, verifiedScore): gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCB) msg = yield self.run_fake_summary_build(gsp, buildResults, finalResult, resultText) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore)) gsp.sendCodeReview.assert_called_once_with(self.reporter_test_project, self.reporter_test_revision, result) @defer.inlineCallbacks def check_summary_build_legacy(self, buildResults, finalResult, resultText, verifiedScore): gsp = yield self.setupGerritStatusPush(summaryCB=legacyTestSummaryCB) msg = yield self.run_fake_summary_build(gsp, buildResults, finalResult, resultText, expWarning=True) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore), (GERRIT_LABEL_REVIEWED, 0)) gsp.sendCodeReview.assert_called_once_with(self.reporter_test_project, self.reporter_test_revision, result) @defer.inlineCallbacks def test_gerrit_ssh_cmd(self): kwargs = { 'server': 'example.com', 'username': 'buildbot', } without_identity = yield self.setupGerritStatusPush(**kwargs) expected1 = [ 'ssh', '-o', 'BatchMode=yes', 'buildbot@example.com', '-p', '29418', 'gerrit', 'foo'] self.assertEqual(expected1, without_identity._gerritCmd('foo')) yield without_identity.disownServiceParent() with_identity = yield self.setupGerritStatusPush( identity_file='/path/to/id_rsa', **kwargs) expected2 = [ 'ssh', '-o', 'BatchMode=yes', '-i', '/path/to/id_rsa', 'buildbot@example.com', '-p', '29418', 'gerrit', 'foo', ] self.assertEqual(expected2, with_identity._gerritCmd('foo')) def test_buildsetComplete_success_sends_summary_review_deferred(self): d = self.check_summary_build_deferred(buildResults=[SUCCESS, SUCCESS], finalResult=SUCCESS, resultText=[ "succeeded", "succeeded"], verifiedScore=1) return d def test_buildsetComplete_success_sends_summary_review(self): d = self.check_summary_build(buildResults=[SUCCESS, SUCCESS], finalResult=SUCCESS, resultText=["succeeded", "succeeded"], verifiedScore=1) return d def test_buildsetComplete_failure_sends_summary_review(self): d = self.check_summary_build(buildResults=[FAILURE, FAILURE], finalResult=FAILURE, resultText=["failed", "failed"], verifiedScore=-1) return d def test_buildsetComplete_mixed_sends_summary_review(self): d = self.check_summary_build(buildResults=[SUCCESS, FAILURE], finalResult=FAILURE, resultText=["succeeded", "failed"], verifiedScore=-1) return d def test_buildsetComplete_success_sends_summary_review_legacy(self): d = self.check_summary_build_legacy(buildResults=[SUCCESS, SUCCESS], finalResult=SUCCESS, resultText=[ "succeeded", "succeeded"], verifiedScore=1) return d def test_buildsetComplete_failure_sends_summary_review_legacy(self): d = self.check_summary_build_legacy(buildResults=[FAILURE, FAILURE], finalResult=FAILURE, resultText=["failed", "failed"], verifiedScore=-1) return d def test_buildsetComplete_mixed_sends_summary_review_legacy(self): d = self.check_summary_build_legacy(buildResults=[SUCCESS, FAILURE], finalResult=FAILURE, resultText=["succeeded", "failed"], verifiedScore=-1) return d @defer.inlineCallbacks def test_buildsetComplete_filtered_builder(self): gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCB) gsp.builders = ["foo"] yield self.run_fake_summary_build(gsp, [FAILURE, FAILURE], FAILURE, ["failed", "failed"]) self.assertFalse( gsp.sendCodeReview.called, "sendCodeReview should not be called") @defer.inlineCallbacks def test_buildsetComplete_filtered_matching_builder(self): gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCB) gsp.builders = ["Builder1"] yield self.run_fake_summary_build(gsp, [FAILURE, FAILURE], FAILURE, ["failed", "failed"]) self.assertTrue( gsp.sendCodeReview.called, "sendCodeReview should be called") @defer.inlineCallbacks def run_fake_single_build(self, gsp, buildResult, expWarning=False): buildset, builds = yield self.setupBuildResults([buildResult], buildResult) yield gsp._got_event(('builds', builds[0]['buildid'], 'new'), builds[0]) yield gsp._got_event(('builds', builds[0]['buildid'], 'finished'), builds[0]) if expWarning: self.assertEqual([w['message'] for w in self.flushWarnings()], ['The Gerrit status callback uses the old ' 'way to communicate results. The outcome ' 'might be not what is expected.']) return str({'name': 'Builder0', 'result': buildResult}) # same goes for check_single_build and check_single_build_legacy @defer.inlineCallbacks def check_single_build(self, buildResult, verifiedScore): gsp = yield self.setupGerritStatusPush(reviewCB=sampleReviewCB, startCB=sampleStartCB) msg = yield self.run_fake_single_build(gsp, buildResult) start = makeReviewResult(str({'name': self.reporter_test_builder_name}), (GERRIT_LABEL_REVIEWED, 0)) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore)) calls = [call(self.reporter_test_project, self.reporter_test_revision, start), call(self.reporter_test_project, self.reporter_test_revision, result)] gsp.sendCodeReview.assert_has_calls(calls) # same goes for check_single_build and check_single_build_legacy @defer.inlineCallbacks def check_single_build_deferred(self, buildResult, verifiedScore): gsp = yield self.setupGerritStatusPush(reviewCB=sampleReviewCBDeferred, startCB=sampleStartCBDeferred) msg = yield self.run_fake_single_build(gsp, buildResult) start = makeReviewResult(str({'name': self.reporter_test_builder_name}), (GERRIT_LABEL_REVIEWED, 0)) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore)) calls = [call(self.reporter_test_project, self.reporter_test_revision, start), call(self.reporter_test_project, self.reporter_test_revision, result)] gsp.sendCodeReview.assert_has_calls(calls) @defer.inlineCallbacks def check_single_build_legacy(self, buildResult, verifiedScore): gsp = yield self.setupGerritStatusPush(reviewCB=legacyTestReviewCB, startCB=sampleStartCB) msg = yield self.run_fake_single_build(gsp, buildResult, expWarning=True) start = makeReviewResult(str({'name': self.reporter_test_builder_name}), (GERRIT_LABEL_REVIEWED, 0)) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore), (GERRIT_LABEL_REVIEWED, 0)) calls = [call(self.reporter_test_project, self.reporter_test_revision, start), call(self.reporter_test_project, self.reporter_test_revision, result)] gsp.sendCodeReview.assert_has_calls(calls) def test_buildComplete_success_sends_review(self): return self.check_single_build(SUCCESS, 1) def test_buildComplete_failure_sends_review(self): return self.check_single_build(FAILURE, -1) def test_buildComplete_success_sends_review_legacy(self): return self.check_single_build_legacy(SUCCESS, 1) def test_buildComplete_failure_sends_review_legacy(self): return self.check_single_build_legacy(FAILURE, -1) # same goes for check_single_build and check_single_build_legacy @defer.inlineCallbacks def test_single_build_filtered(self): gsp = yield self.setupGerritStatusPush(reviewCB=sampleReviewCB, startCB=sampleStartCB) gsp.builders = ["Builder0"] yield self.run_fake_single_build(gsp, SUCCESS) self.assertTrue( gsp.sendCodeReview.called, "sendCodeReview should be called") gsp.sendCodeReview = Mock() gsp.builders = ["foo"] yield self.run_fake_single_build(gsp, SUCCESS) self.assertFalse( gsp.sendCodeReview.called, "sendCodeReview should not be called") def test_defaultReviewCBSuccess(self): res = defaultReviewCB("builderName", {}, SUCCESS, None, None) self.assertEqual(res['labels'], {'Verified': 1}) res = defaultReviewCB("builderName", {}, RETRY, None, None) self.assertEqual(res['labels'], {}) def test_defaultSummaryCB(self): info = self.makeBuildInfo( [SUCCESS, FAILURE], ["yes", "no"], [None, None]) res = defaultSummaryCB(info, SUCCESS, None, None) self.assertEqual(res['labels'], {'Verified': -1}) info = self.makeBuildInfo( [SUCCESS, SUCCESS], ["yes", "yes"], [None, None]) res = defaultSummaryCB(info, SUCCESS, None, None) self.assertEqual(res['labels'], {'Verified': 1}) @defer.inlineCallbacks def testBuildGerritCommand(self): gsp = yield self.setupGerritStatusPushSimple() spawnSkipFirstArg = Mock() gsp.spawnProcess = lambda _, *a, **k: spawnSkipFirstArg(*a, **k) yield gsp.sendCodeReview("project", "revision", {"message": "bla", "labels": {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', '-o', 'BatchMode=yes', 'user@serv', '-p', '29418', 'gerrit', 'version'], env=None) gsp.processVersion(parse_version("2.6"), lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview("project", "revision", {"message": "bla", "labels": {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', '-o', 'BatchMode=yes', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', "--message 'bla'", '--label Verified=1', 'revision'], env=None) # <=2.5 uses other syntax gsp.processVersion(parse_version("2.4"), lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview("project", "revision", {"message": "bla", "labels": {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', '-o', 'BatchMode=yes', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', "--message 'bla'", '--verified 1', 'revision'], env=None) # now test the notify argument, even though _gerrit_notify # is private, work around that gsp._gerrit_notify = 'OWNER' gsp.processVersion(parse_version('2.6'), lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview('project', 'revision', {'message': 'bla', 'labels': {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', '-o', 'BatchMode=yes', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', '--notify OWNER', "--message 'bla'", '--label Verified=1', 'revision'], env=None) # gerrit versions <= 2.5 uses other syntax gsp.processVersion(parse_version('2.4'), lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview('project', 'revision', {'message': 'bla', 'labels': {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', '-o', 'BatchMode=yes', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', '--notify OWNER', "--message 'bla'", '--verified 1', 'revision'], env=None) gsp.processVersion(parse_version("2.13"), lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview("project", "revision", {"message": "bla", "labels": {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', '-o', 'BatchMode=yes', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', '--tag autogenerated:buildbot', '--notify OWNER', "--message 'bla'", '--label Verified=1', 'revision'], env=None) @defer.inlineCallbacks def test_callWithVersion_bytes_output(self): gsp = yield self.setupGerritStatusPushSimple() exp_argv = ['ssh', '-o', 'BatchMode=yes', 'user@serv', '-p', '29418', 'gerrit', 'version'] def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], [exp_argv[0], exp_argv]) pp.errReceived(b'test stderr\n') pp.outReceived(b'gerrit version 2.14\n') pp.outReceived(b'(garbage that should not cause a crash)\n') so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) gsp.callWithVersion(lambda: self.assertEqual( gsp.gerrit_version, parse_version('2.14'))) def test_name_as_class_attribute(self): class FooStatusPush(GerritStatusPush): name = 'foo' reporter = FooStatusPush('gerrit.server.com', 'password') self.assertEqual(reporter.name, 'foo') def test_name_as_kwarg(self): reporter = GerritStatusPush('gerrit.server.com', 'password', name='foo') self.assertEqual(reporter.name, 'foo') def test_default_name(self): reporter = GerritStatusPush('gerrit.server.com', 'password') self.assertEqual(reporter.name, 'GerritStatusPush') buildbot-3.4.0/master/buildbot/test/unit/reporters/test_gerrit_verify_status.py000066400000000000000000000364561413250514000303110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.properties import renderer from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.generators.build import BuildStartEndStatusGenerator from buildbot.reporters.gerrit_verify_status import GerritVerifyStatusPush from buildbot.reporters.message import MessageFormatterRenderable from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util import logging from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestGerritVerifyStatusPush(TestReactorMixin, ReporterTestMixin, ConfigErrorsMixin, logging.LoggingMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.reporter_test_props = { 'gerrit_changes': [{'change_id': 12, 'revision_id': 2}] } self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() @defer.inlineCallbacks def createGerritStatus(self, **kwargs): auth = kwargs.pop('auth', ('log', Interpolate('pass'))) self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, "gerrit", auth=('log', 'pass'), debug=None, verify=None) self.sp = GerritVerifyStatusPush("gerrit", auth=auth, **kwargs) yield self.sp.setServiceParent(self.master) def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def test_basic(self): yield self.createGerritStatus() build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': -1, 'duration': '2h 1m 4s' }) yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_custom_description(self): start_formatter = MessageFormatterRenderable(Interpolate("started %(prop:buildername)s")) end_formatter = MessageFormatterRenderable(Interpolate("finished %(prop:buildername)s")) generator = BuildStartEndStatusGenerator(start_formatter=start_formatter, end_formatter=end_formatter) yield self.createGerritStatus(generators=[generator]) build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'started Builder0', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'finished Builder0', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_custom_name(self): yield self.createGerritStatus( verification_name=Interpolate("builder %(prop:buildername)s")) build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'builder Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'name': 'builder Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_custom_abstain(self): yield self.createGerritStatus( abstain=renderer(lambda p: p.getProperty("buildername") == 'Builder0')) build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': True, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': True, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_custom_category(self): yield self.createGerritStatus( category=renderer(lambda p: p.getProperty("buildername"))) build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'category': 'Builder0', 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'category': 'Builder0', 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_custom_reporter(self): yield self.createGerritStatus( reporter=renderer(lambda p: p.getProperty("buildername"))) build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'Builder0', 'reporter': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'name': 'Builder0', 'reporter': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_verbose(self): yield self.createGerritStatus(verbose=True) build = yield self.insert_build_new() self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self.setUpLogging() yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged("Sending Gerrit status for") @defer.inlineCallbacks def test_not_verbose(self): yield self.createGerritStatus(verbose=False) build = yield self.insert_build_new() self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self.setUpLogging() self._http.quiet = True yield self.sp._got_event(('builds', 20, 'new'), build) self.assertWasQuiet() @defer.inlineCallbacks def test_format_duration(self): yield self.createGerritStatus(verbose=False) self.assertEqual( self.sp.formatDuration(datetime.timedelta(seconds=1)), "0m 1s") self.assertEqual( self.sp.formatDuration(datetime.timedelta(hours=1, seconds=1)), "1h 0m 1s") self.assertEqual( self.sp.formatDuration(datetime.timedelta(days=1, seconds=1)), "1 day 0h 0m 1s") self.assertEqual( self.sp.formatDuration(datetime.timedelta(days=2, seconds=1)), "2 days 0h 0m 1s") @defer.inlineCallbacks def test_gerrit_changes(self): yield self.createGerritStatus() # from chdict: change_props = { 'event.change.owner.email': 'dustin@mozilla.com', 'event.change.subject': 'fix 1234', 'event.change.project': 'pr', 'event.change.owner.name': 'Dustin', 'event.change.number': '4321', 'event.change.url': 'http://buildbot.net', 'event.change.branch': 'br', 'event.type': 'patchset-created', 'event.patchSet.revision': 'abcdef', 'event.patchSet.number': '12', 'event.source': 'GerritChangeSource' } props = Properties.fromDict({k: (v, 'change') for k, v in change_props.items()}) changes = self.sp.getGerritChanges(props) self.assertEqual(changes, [ {'change_id': '4321', 'revision_id': '12'} ]) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_github.py000066400000000000000000000455561413250514000253110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.github import HOSTED_BASE_URL from buildbot.reporters.github import GitHubCommentPush from buildbot.reporters.github import GitHubStatusPush from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestGitHubStatusPush(TestReactorMixin, unittest.TestCase, ConfigErrorsMixin, ReporterTestMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_reporter_test() # project must be in the form / self.reporter_test_project = 'buildbot/buildbot' self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, HOSTED_BASE_URL, headers={ 'Authorization': 'token XXYYZZ', 'User-Agent': 'Buildbot' }, debug=None, verify=None) self.sp = self.createService() yield self.sp.setServiceParent(self.master) def createService(self): return GitHubStatusPush(Interpolate('XXYYZZ')) def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def test_basic(self): build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/repos/buildbot/buildbot/statuses/d34db33fd43db33f', json={'state': 'pending', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build started.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot/buildbot/statuses/d34db33fd43db33f', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot/buildbot/statuses/d34db33fd43db33f', json={'state': 'failure', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) build['complete'] = False build['results'] = None yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_empty(self): build = yield self.insert_build_new(insert_ss=False) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_source_stamp_no_props_nightly_scheduler(self): # no status updates are expected self.master.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Builder(id=79, name='Builder0'), fakedb.Buildset(id=98, results=SUCCESS, reason="test_reason1"), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.SourceStamp(id=234, project=None, branch=None, revision=None, repository=None, codebase=None), fakedb.BuildRequest(id=11, buildsetid=98, builderid=79), fakedb.Build(id=20, number=0, builderid=79, buildrequestid=11, workerid=13, masterid=92, results=SUCCESS, state_string="build_text"), fakedb.BuildProperty(buildid=20, name="workername", value="wrk"), fakedb.BuildProperty(buildid=20, name="reason", value="because"), fakedb.BuildProperty(buildid=20, name="buildername", value="Builder0"), fakedb.BuildProperty(buildid=20, name="branch", value=None), fakedb.BuildProperty(buildid=20, name="codebase", value=""), fakedb.BuildProperty(buildid=20, name="project", value=""), fakedb.BuildProperty(buildid=20, name="repository", value=""), fakedb.BuildProperty(buildid=20, name="revision", value=None), ]) self.setup_fake_get_changes_for_build(has_change=False) build = yield self.master.data.get(("builds", 20)) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_multiple_source_stamps_no_props(self): repository = 'http://test_repo' project = 'test_user/test_project' codebase1 = 'test_codebase1' codebase2 = 'test_codebase2' codebase3 = 'test_codebase3' self._http.expect( 'post', '/repos/test_user/test_project/statuses/rev1', json={'state': 'pending', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build started.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/test_user/test_project/statuses/rev3', json={'state': 'pending', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build started.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/test_user/test_project/statuses/rev1', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/test_user/test_project/statuses/rev3', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/test_user/test_project/statuses/rev1', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/test_user/test_project/statuses/rev3', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) # note that the first sourcestamp only has revision, second only branch and only the third # has both self.master.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Builder(id=79, name='Builder0'), fakedb.Buildset(id=98, results=SUCCESS, reason="test_reason1"), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=235), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=236), fakedb.SourceStamp(id=234, project=project, branch=None, revision='rev1', repository=repository, codebase=codebase1), fakedb.SourceStamp(id=235, project=project, branch='branch2', revision=None, repository=repository, codebase=codebase2), fakedb.SourceStamp(id=236, project=project, branch='branch3', revision='rev3', repository=repository, codebase=codebase3), fakedb.BuildRequest(id=11, buildsetid=98, builderid=79), fakedb.Build(id=20, number=0, builderid=79, buildrequestid=11, workerid=13, masterid=92, results=SUCCESS, state_string="build_text"), fakedb.BuildProperty(buildid=20, name="buildername", value="Builder0"), ]) self.setup_fake_get_changes_for_build(has_change=False) build = yield self.master.data.get(("builds", 20)) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) class TestGitHubStatusPushURL(TestReactorMixin, unittest.TestCase, ReporterTestMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_reporter_test() # project must be in the form / self.reporter_test_project = 'buildbot' self.reporter_test_repo = 'https://github.com/buildbot1/buildbot1.git' self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, HOSTED_BASE_URL, headers={ 'Authorization': 'token XXYYZZ', 'User-Agent': 'Buildbot' }, debug=None, verify=None) self.sp = self.createService() yield self.sp.setServiceParent(self.master) def createService(self): return GitHubStatusPush('XXYYZZ') def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def test_ssh(self): self.reporter_test_repo = 'git@github.com:buildbot2/buildbot2.git' build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/repos/buildbot2/buildbot2/statuses/d34db33fd43db33f', json={'state': 'pending', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build started.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot2/buildbot2/statuses/d34db33fd43db33f', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot2/buildbot2/statuses/d34db33fd43db33f', json={'state': 'failure', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_https(self): build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/repos/buildbot1/buildbot1/statuses/d34db33fd43db33f', json={'state': 'pending', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build started.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot1/buildbot1/statuses/d34db33fd43db33f', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot1/buildbot1/statuses/d34db33fd43db33f', json={'state': 'failure', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) class TestGitHubCommentPush(TestGitHubStatusPush): def createService(self): return GitHubCommentPush('XXYYZZ') @defer.inlineCallbacks def test_basic(self): build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/repos/buildbot/buildbot/issues/34/comments', json={'body': 'Build done.'}) self._http.expect( 'post', '/repos/buildbot/buildbot/issues/34/comments', json={'body': 'Build done.'}) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_empty(self): build = yield self.insert_build_new(insert_ss=False) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_basic_branch_head(self): self.reporter_test_props['branch'] = 'refs/pull/13/head' build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/repos/buildbot/buildbot/issues/13/comments', json={'body': 'Build done.'}) self._http.expect( 'post', '/repos/buildbot/buildbot/issues/13/comments', json={'body': 'Build done.'}) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_multiple_source_stamps_no_props(self): repository = 'http://test_repo' project = 'test_user/test_project' codebase1 = 'test_codebase1' codebase2 = 'test_codebase2' codebase3 = 'test_codebase3' branch2 = 'refs/pull/4192/merge' branch3 = 'refs/pull/4193/merge' self._http.expect( 'post', '/repos/test_user/test_project/issues/4192/comments', json={'body': 'Build done.'}) self._http.expect( 'post', '/repos/test_user/test_project/issues/4192/comments', json={'body': 'Build done.'}) self._http.expect( 'post', '/repos/test_user/test_project/issues/4192/comments', json={'body': 'Build done.'}) self._http.expect( 'post', '/repos/test_user/test_project/issues/4192/comments', json={'body': 'Build done.'}) # note that the first sourcestamp only has revision, second only branch and only the third # has both self.master.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Builder(id=79, name='Builder0'), fakedb.Buildset(id=98, results=SUCCESS, reason="test_reason1"), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=235), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=236), fakedb.SourceStamp(id=234, project=project, branch=None, revision='rev1', repository=repository, codebase=codebase1), fakedb.SourceStamp(id=235, project=project, branch=branch2, revision=None, repository=repository, codebase=codebase2), fakedb.SourceStamp(id=236, project=project, branch=branch3, revision='rev3', repository=repository, codebase=codebase3), fakedb.BuildRequest(id=11, buildsetid=98, builderid=79), fakedb.Build(id=20, number=0, builderid=79, buildrequestid=11, workerid=13, masterid=92, results=SUCCESS, state_string="build_text"), fakedb.BuildProperty(buildid=20, name="buildername", value="Builder0"), fakedb.BuildProperty(buildid=20, name="branch", value=branch2), ]) self.setup_fake_get_changes_for_build(has_change=False) build = yield self.master.data.get(("builds", 20)) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_gitlab.py000066400000000000000000000202631413250514000252550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.gitlab import HOSTED_BASE_URL from buildbot.reporters.gitlab import GitLabStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util import logging from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestGitLabStatusPush(TestReactorMixin, ConfigErrorsMixin, unittest.TestCase, ReporterTestMixin, logging.LoggingMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_reporter_test() # repository must be in the form http://gitlab// self.reporter_test_repo = 'http://gitlab/buildbot/buildbot' self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, HOSTED_BASE_URL, headers={'PRIVATE-TOKEN': 'XXYYZZ'}, debug=None, verify=None) self.sp = GitLabStatusPush(Interpolate('XXYYZZ')) yield self.sp.setServiceParent(self.master) def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def test_basic(self): build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "id": 1 }) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'running', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build started.', 'name': 'buildbot/Builder0'}) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build done.', 'name': 'buildbot/Builder0'}) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'failed', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build done.', 'name': 'buildbot/Builder0'}) yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) build['results'] = FAILURE yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_sshurl(self): self.reporter_test_repo = 'git@gitlab:buildbot/buildbot.git' build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "id": 1 }) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'running', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build started.', 'name': 'buildbot/Builder0'}) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) @defer.inlineCallbacks def test_merge_request_forked(self): self.reporter_test_repo = 'git@gitlab:buildbot/buildbot.git' self.reporter_test_props['source_project_id'] = 20922342342 build = yield self.insert_build_new() self._http.expect( 'post', '/api/v4/projects/20922342342/statuses/d34db33fd43db33f', json={'state': 'running', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build started.', 'name': 'buildbot/Builder0'}) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) # Don't run these tests in parallel! del self.reporter_test_props['source_project_id'] @defer.inlineCallbacks def test_noproject(self): self.reporter_test_repo = 'git@gitlab:buildbot/buildbot.git' self.setUpLogging() build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "message": 'project not found' }, code=404) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged(r"Unknown \(or hidden\) gitlab projectbuildbot%2Fbuildbot:" r" project not found") @defer.inlineCallbacks def test_nourl(self): self.reporter_test_repo = '' build = yield self.insert_build_new() build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) # implicit check that no http request is done # nothing is logged as well @defer.inlineCallbacks def test_senderror(self): self.setUpLogging() build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "id": 1 }) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'running', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build started.', 'name': 'buildbot/Builder0'}, content_json={'message': 'sha1 not found for branch master'}, code=404) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged("Could not send status \"running\" for" " http://gitlab/buildbot/buildbot at d34db33fd43db33f:" " sha1 not found for branch master") @defer.inlineCallbacks def test_badchange(self): self.setUpLogging() build = yield self.insert_build_new() # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "id": 1 }) build['complete'] = False yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged("Failed to send status \"running\" for" " http://gitlab/buildbot/buildbot at d34db33fd43db33f\n" "Traceback") self.flushLoggedErrors(AssertionError) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_http.py000066400000000000000000000104751413250514000247760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.reporters.http import HttpStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import BuildDictLookAlike from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestHttpStatusPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin, ConfigErrorsMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() @defer.inlineCallbacks def createReporter(self, auth=("username", "passwd"), headers=None, **kwargs): self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, "serv", auth=auth, headers=headers, debug=None, verify=None) interpolated_auth = None if auth is not None: username, passwd = auth passwd = Interpolate(passwd) interpolated_auth = (username, passwd) self.sp = HttpStatusPush("serv", auth=interpolated_auth, headers=headers, **kwargs) yield self.sp.setServiceParent(self.master) @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_basic(self): yield self.createReporter() self._http.expect("post", "", json=BuildDictLookAlike(complete=False)) self._http.expect("post", "", json=BuildDictLookAlike(complete=True)) build = yield self.insert_build_new() yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_basic_noauth(self): yield self.createReporter(auth=None) self._http.expect("post", "", json=BuildDictLookAlike(complete=False)) self._http.expect("post", "", json=BuildDictLookAlike(complete=True)) build = yield self.insert_build_new() yield self.sp._got_event(('builds', 20, 'new'), build) build['complete'] = True build['results'] = SUCCESS yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_header(self): yield self.createReporter(headers={'Custom header': 'On'}) self._http.expect("post", "", json=BuildDictLookAlike()) build = yield self.insert_build_finished(SUCCESS) yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def http2XX(self, code, content): yield self.createReporter() self._http.expect('post', '', code=code, content=content, json=BuildDictLookAlike()) build = yield self.insert_build_finished(SUCCESS) yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_http200(self): yield self.http2XX(code=200, content="OK") @defer.inlineCallbacks def test_http201(self): # e.g. GitHub returns 201 yield self.http2XX(code=201, content="Created") @defer.inlineCallbacks def test_http202(self): yield self.http2XX(code=202, content="Accepted") buildbot-3.4.0/master/buildbot/test/unit/reporters/test_irc.py000066400000000000000000000472521413250514000245770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sys import mock from twisted.application import internet from twisted.internet import defer from twisted.trial import unittest from buildbot.config import ConfigErrors from buildbot.process.properties import Interpolate from buildbot.process.results import ALL_RESULTS from buildbot.process.results import SUCCESS from buildbot.reporters import irc from buildbot.reporters import words from buildbot.test.unit.reporters.test_words import ContactMixin from buildbot.test.util import config from buildbot.util import service class TestIrcContact(ContactMixin, unittest.TestCase): channelClass = irc.IRCChannel contactClass = irc.IRCContact def patch_act(self): self.actions = [] def act(msg): self.actions.append(msg) self.contact.act = act @defer.inlineCallbacks def test_op_required_authz(self): self.bot.authz = self.bot.expand_authz({ ('mute', 'unmute'): [self.USER] }) self.bot.getChannelOps = lambda channel: ['channelop'] self.assertFalse((yield self.contact.op_required('mute'))) @defer.inlineCallbacks def test_op_required_operator(self): self.bot.getChannelOps = lambda channel: [self.USER] self.assertFalse((yield self.contact.op_required('command'))) @defer.inlineCallbacks def test_op_required_unauthorized(self): self.bot.getChannelOps = lambda channel: ['channelop'] self.assertTrue((yield self.contact.op_required('command'))) @defer.inlineCallbacks def test_command_mute(self): self.bot.getChannelOps = lambda channel: [self.USER] yield self.do_test_command('mute') self.assertTrue(self.contact.channel.muted) @defer.inlineCallbacks def test_command_mute_unauthorized(self): self.bot.getChannelOps = lambda channel: [] yield self.do_test_command('mute') self.assertFalse(self.contact.channel.muted) self.assertIn("blah, blah", self.sent[0]) @defer.inlineCallbacks def test_command_unmute(self): self.bot.getChannelOps = lambda channel: [self.USER] self.contact.channel.muted = True yield self.do_test_command('unmute') self.assertFalse(self.contact.channel.muted) @defer.inlineCallbacks def test_command_unmute_unauthorized(self): self.bot.getChannelOps = lambda channel: [] self.contact.channel.muted = True yield self.do_test_command('unmute') self.assertTrue(self.contact.channel.muted) @defer.inlineCallbacks def test_command_unmute_not_muted(self): self.bot.getChannelOps = lambda channel: [self.USER] yield self.do_test_command('unmute') self.assertFalse(self.contact.channel.muted) self.assertIn("No one had told me to be quiet", self.sent[0]) @defer.inlineCallbacks def test_command_notify(self): self.bot.getChannelOps = lambda channel: [self.USER] self.assertNotIn('success', self.contact.channel.notify_events) yield self.do_test_command('notify', 'on success') self.assertIn('success', self.contact.channel.notify_events) @defer.inlineCallbacks def test_command_notify_unauthorized(self): self.bot.getChannelOps = lambda channel: [] self.assertNotIn('success', self.contact.channel.notify_events) yield self.do_test_command('notify', 'on success') self.assertNotIn('success', self.contact.channel.notify_events) @defer.inlineCallbacks def test_command_destroy(self): self.patch_act() yield self.do_test_command('destroy', exp_usage=False) self.assertEqual(self.actions, ['readies phasers']) @defer.inlineCallbacks def test_command_dance(self): yield self.do_test_command('dance', clock_ticks=[1.0] * 10, exp_usage=False) self.assertTrue(self.sent) # doesn't matter what it sent @defer.inlineCallbacks def test_command_hustle(self): self.patch_act() yield self.do_test_command('hustle', clock_ticks=[1.0] * 2, exp_usage=False) self.assertEqual(self.actions, ['does the hustle']) def test_send(self): events = [] def groupChat(dest, msg): events.append((dest, msg)) self.contact.bot.groupSend = groupChat self.contact.send("unmuted") self.contact.send("unmuted, unicode \N{SNOWMAN}") self.contact.channel.muted = True self.contact.send("muted") self.assertEqual(events, [ ('#buildbot', 'unmuted'), ('#buildbot', 'unmuted, unicode \u2603'), ]) def test_handleAction_ignored(self): self.patch_act() self.contact.handleAction('waves hi') self.assertEqual(self.actions, []) def test_handleAction_kick(self): self.patch_act() self.contact.handleAction('kicks nick') self.assertEqual(self.actions, ['kicks back']) def test_handleAction_stupid(self): self.patch_act() self.contact.handleAction('stupids nick') self.assertEqual(self.actions, ['stupids me too']) def test_act(self): events = [] def groupDescribe(dest, msg): events.append((dest, msg)) self.contact.bot.groupDescribe = groupDescribe self.contact.act("unmuted") self.contact.act("unmuted, unicode \N{SNOWMAN}") self.contact.channel.muted = True self.contact.act("muted") self.assertEqual(events, [ ('#buildbot', 'unmuted'), ('#buildbot', 'unmuted, unicode \u2603'), ]) class FakeContact(service.AsyncService): def __init__(self, user, channel=None): super().__init__() self.user_id = user self.channel = mock.Mock() self.messages = [] self.actions = [] def handleMessage(self, message): self.messages.append(message) def handleAction(self, data): self.actions.append(data) class TestIrcStatusBot(unittest.TestCase): def makeBot(self, *args, **kwargs): if not args: args = ('nick', 'pass', ['#ch'], [], False) bot = irc.IrcStatusBot(*args, **kwargs) bot.parent = mock.Mock() bot.parent.master.db.state.getState = lambda *args, **kwargs: None return bot def test_groupDescribe(self): b = self.makeBot() b.describe = lambda d, m: events.append(('n', d, m)) events = [] b.groupDescribe('#chan', 'hi') self.assertEqual(events, [('n', '#chan', 'hi')]) def test_groupChat(self): b = self.makeBot() b.msg = lambda d, m: events.append(('n', d, m)) events = [] b.groupSend('#chan', 'hi') self.assertEqual(events, [('n', '#chan', 'hi')]) def test_groupChat_notice(self): b = self.makeBot('nick', 'pass', ['#ch'], [], True) b.notice = lambda d, m: events.append(('n', d, m)) events = [] b.groupSend('#chan', 'hi') self.assertEqual(events, [('n', '#chan', 'hi')]) def test_msg(self): b = self.makeBot() b.msg = lambda d, m: events.append(('m', d, m)) events = [] b.msg('nick', 'hi') self.assertEqual(events, [('m', 'nick', 'hi')]) def test_getContact(self): b = self.makeBot() c1 = b.getContact(user='u1', channel='c1') c2 = b.getContact(user='u1', channel='c2') c1b = b.getContact(user='u1', channel='c1') self.assertIdentical(c1, c1b) self.assertIsInstance(c2, words.Contact) def test_getContact_case_insensitive(self): b = self.makeBot() c1 = b.getContact(user='u1') c1b = b.getContact(user='U1') self.assertIdentical(c1, c1b) def test_getContact_invalid(self): b = self.makeBot() b.authz = {'': None} u = b.getContact(user='u0', channel='c0') self.assertNotIn(('c0', 'u0'), b.contacts) self.assertNotIn('c0', b.channels) self.assertEqual(sys.getrefcount(u), 2) # local, sys c = u.channel self.assertEqual(sys.getrefcount(c), 3) # local, contact, sys del u self.assertEqual(sys.getrefcount(c), 2) # local, sys def test_getContact_valid(self): b = self.makeBot() b.authz = {'': None, 'command': ['u0']} b.getContact(user='u0', channel='c0') self.assertIn(('c0', 'u0'), b.contacts) def test_privmsg_user(self): b = self.makeBot() b.contactClass = FakeContact b.privmsg('jimmy!~foo@bar', 'nick', 'hello') c = b.getContact('jimmy') self.assertEqual(c.messages, ['hello']) def test_privmsg_user_uppercase(self): b = self.makeBot('NICK', 'pass', ['#ch'], [], False) b.contactClass = FakeContact b.privmsg('jimmy!~foo@bar', 'NICK', 'hello') c = b.getContact('jimmy') self.assertEqual(c.messages, ['hello']) def test_privmsg_channel_unrelated(self): b = self.makeBot() b.contactClass = FakeContact b.privmsg('jimmy!~foo@bar', '#ch', 'hello') c = b.getContact('jimmy', '#ch') self.assertEqual(c.messages, []) def test_privmsg_channel_related(self): b = self.makeBot() b.contactClass = FakeContact b.privmsg('jimmy!~foo@bar', '#ch', 'nick: hello') c = b.getContact('jimmy', '#ch') self.assertEqual(c.messages, [' hello']) def test_action_unrelated(self): b = self.makeBot() b.contactClass = FakeContact b.action('jimmy!~foo@bar', '#ch', 'waves') c = b.getContact('jimmy', '#ch') self.assertEqual(c.actions, []) def test_action_unrelated_buildbot(self): b = self.makeBot() b.contactClass = FakeContact # b.nickname is not 'buildbot' b.action('jimmy!~foo@bar', '#ch', 'waves at buildbot') c = b.getContact('jimmy', '#ch') self.assertEqual(c.actions, []) def test_action_related(self): b = self.makeBot() b.contactClass = FakeContact b.action('jimmy!~foo@bar', '#ch', 'waves at nick') c = b.getContact('jimmy', '#ch') self.assertEqual(c.actions, ['waves at nick']) def test_signedOn(self): b = self.makeBot('nick', 'pass', ['#ch1', dict(channel='#ch2', password='sekrits')], ['jimmy', 'bobby'], False) events = [] def msg(d, m): events.append(('m', d, m)) b.msg = msg def join(channel, key): events.append(('k', channel, key)) b.join = join b.contactClass = FakeContact b.signedOn() self.assertEqual(sorted(events), [ ('k', '#ch1', None), ('k', '#ch2', 'sekrits'), ('m', 'Nickserv', 'IDENTIFY pass'), ]) self.assertEqual(sorted(b.contacts.keys()), # channels don't get added until joined() is called sorted([('jimmy', 'jimmy'), ('bobby', 'bobby')])) def test_register_SASL(self): b = self.makeBot('nick', 'pass', ['#ch1'], ['jimmy'], False, useSASL=True) events = [] def sendLine(line): events.append(('l', line)) if line == "AUTHENTICATE PLAIN": events.append(('s', "AUTHENTICATE")) b.irc_AUTHENTICATE(None, None) b.sendLine = sendLine b.register("bot") self.assertEqual(events, [ ('l', 'CAP REQ :sasl'), ('l', 'NICK bot'), ('l', 'USER bot foo bar :None'), ('l', 'AUTHENTICATE PLAIN'), ('s', 'AUTHENTICATE'), ('l', 'AUTHENTICATE bmljawBuaWNrAHBhc3M='), ('l', 'CAP END') ]) def test_register_legacy(self): b = self.makeBot('nick', 'pass', ['#ch1'], ['jimmy'], False, useSASL=False) events = [] def sendLine(line): events.append(('l', line)) b.sendLine = sendLine b.register("bot") self.assertEqual(events, [ ('l', 'PASS pass'), ('l', 'NICK bot'), ('l', 'USER bot foo bar :None') ]) def test_joined(self): b = self.makeBot() b.joined('#ch1') b.joined('#ch2') self.assertEqual(sorted(b.channels.keys()), sorted(['#ch1', '#ch2'])) def test_userLeft_or_userKicked(self): b = self.makeBot() b.getContact(channel='c', user='u') self.assertIn(('c', 'u'), b.contacts) b.userKicked('u', 'c', 'k', 'm') self.assertNotIn(('c', 'u'), b.contacts) def test_userQuit(self): b = self.makeBot() b.getContact(channel='c1', user='u') b.getContact(channel='c2', user='u') b.getContact(user='u') self.assertEquals(len(b.contacts), 3) b.userQuit('u', 'm') self.assertEquals(len(b.contacts), 0) def test_other(self): # these methods just log, but let's get them covered anyway b = self.makeBot() b.left('#ch1') b.kickedFrom('#ch1', 'dustin', 'go away!') def test_format_build_status(self): b = self.makeBot() self.assertEquals(b.format_build_status({'results': SUCCESS}), "completed successfully") def test_format_build_status_short(self): b = self.makeBot() self.assertEquals(b.format_build_status({'results': SUCCESS}, True), ", Success") def test_format_build_status_colors(self): b = self.makeBot() b.useColors = True self.assertEqual(b.format_build_status({'results': SUCCESS}), "\x033completed successfully\x0f") colors_used = set() status_texts = set() for result in ALL_RESULTS: status = b.format_build_status({'results': result}) self.assertTrue(status.startswith('\x03')) self.assertTrue(status.endswith('\x0f')) for i, c in enumerate(status[1:-1], start=2): if c.isnumeric(): continue break colors_used.add(status[1:i]) status_texts.add(status[i:-1]) self.assertEqual(len(colors_used), len(ALL_RESULTS)) self.assertEqual(len(status_texts), len(ALL_RESULTS)) def test_getNames(self): b = self.makeBot() b.sendLine = lambda *args: None d = b.getNames('#channel') names = [] def cb(n): names.extend(n) d.addCallback(cb) b.irc_RPL_NAMREPLY('', ('test', '=', '#channel', 'user1 user2')) b.irc_RPL_ENDOFNAMES('', ('test', '#channel')) self.assertEqual(names, ['user1', 'user2']) def test_getChannelOps(self): b = self.makeBot() b.sendLine = lambda *args: None d = b.getChannelOps('#channel') names = [] def cb(n): names.extend(n) d.addCallback(cb) b.irc_RPL_NAMREPLY('', ('test', '=', '#channel', 'user1 @user2')) b.irc_RPL_ENDOFNAMES('', ('test', '#channel')) self.assertEqual(names, ['user2']) class TestIrcStatusFactory(unittest.TestCase): def makeFactory(self, *args, **kwargs): if not args: args = ('nick', 'pass', ['ch'], [], [], {}, {}) return irc.IrcStatusFactory(*args, **kwargs) def test_shutdown(self): # this is kinda lame, but the factory would be better tested # in an integration-test environment f = self.makeFactory() self.assertFalse(f.shuttingDown) f.shutdown() self.assertTrue(f.shuttingDown) class TestIRC(config.ConfigErrorsMixin, unittest.TestCase): def makeIRC(self, **kwargs): kwargs.setdefault('host', 'localhost') kwargs.setdefault('nick', 'russo') kwargs.setdefault('channels', ['#buildbot']) self.factory = None def TCPClient(host, port, factory): client = mock.Mock(name='tcp-client') client.host = host client.port = port client.factory = factory # keep for later self.factory = factory self.client = client return client self.patch(internet, 'TCPClient', TCPClient) return irc.IRC(**kwargs) @defer.inlineCallbacks def test_constr(self): ircStatus = self.makeIRC(host='foo', port=123) yield ircStatus.startService() self.client.setServiceParent.assert_called_with(ircStatus) self.assertEqual(self.client.host, 'foo') self.assertEqual(self.client.port, 123) self.assertIsInstance(self.client.factory, irc.IrcStatusFactory) @defer.inlineCallbacks def test_constr_args(self): # test that the args to IRC(..) make it all the way down to # the IrcStatusBot class s = self.makeIRC( host='host', nick='nick', channels=['channels'], pm_to_nicks=['pm', 'to', 'nicks'], noticeOnChannel=True, port=1234, tags=['tags'], password=Interpolate('pass'), notify_events={'successToFailure': 1, }, showBlameList=False, useRevisions=True, useSSL=False, useSASL=False, lostDelay=10, failedDelay=20, useColors=False) yield s.startService() # patch it up factory = self.factory proto_obj = mock.Mock(name='proto_obj') factory.protocol = mock.Mock(name='protocol', return_value=proto_obj) # run it p = factory.buildProtocol('address') self.assertIdentical(p, proto_obj) factory.protocol.assert_called_with( 'nick', 'pass', ['channels'], ['pm', 'to', 'nicks'], True, {}, ['tags'], {'successToFailure': 1}, useColors=False, useSASL=False, useRevisions=True, showBlameList=False) def test_service(self): irc = self.makeIRC() # just put it through its paces irc.startService() return irc.stopService() # deprecated @defer.inlineCallbacks def test_allowForce_allowShutdown(self): s = self.makeIRC( host='host', nick='nick', channels=['channels'], allowForce=True, allowShutdown=False) yield s.startService() self.assertEqual(words.StatusBot.expand_authz(s.authz), {'FORCE': True, 'STOP': True, 'SHUTDOWN': False}) # deprecated def test_allowForce_with_authz(self): with self.assertRaises(ConfigErrors): self.makeIRC( host='host', nick='nick', channels=['channels'], allowForce=True, authz={'force': [12345]}) # deprecated def test_allowShutdown_with_authz(self): with self.assertRaises(ConfigErrors): self.makeIRC( host='host', nick='nick', channels=['channels'], allowForce=True, authz={'': [12345]}) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_mail.py000066400000000000000000000420101413250514000247270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import copy from email import charset from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot.config import ConfigErrors from buildbot.process import properties from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.reporters import mail from buildbot.reporters import utils from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.reporters.mail import MailNotifier from buildbot.reporters.message import MessageFormatter from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin from buildbot.util import bytes2unicode from buildbot.util import ssl class TestMailNotifier(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase, ReporterTestMixin): def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def setupMailNotifier(self, *args, **kwargs): mn = MailNotifier(*args, **kwargs) yield mn.setServiceParent(self.master) yield mn.startService() return mn @defer.inlineCallbacks def test_change_name(self): mn = yield self.setupMailNotifier('from@example.org', name="custom_name") self.assertEqual(mn.name, "custom_name") @defer.inlineCallbacks def do_test_createEmail_cte(self, funnyChars, expEncoding): build = yield self.insert_build_finished(SUCCESS) yield utils.getDetailsForBuild(self.master, build, want_properties=True) msgdict = create_msgdict(funnyChars) mn = yield self.setupMailNotifier('from@example.org') m = yield mn.createEmail(msgdict, 'project-name', SUCCESS, [build]) cte_lines = [l for l in m.as_string().split("\n") if l.startswith('Content-Transfer-Encoding:')] self.assertEqual(cte_lines, ['Content-Transfer-Encoding: {}'.format(expEncoding)], repr(m.as_string())) def test_createEmail_message_content_transfer_encoding_7bit(self): # buildbot.reporters.mail.ENCODING is 'utf8' # On Python 3, the body_encoding for 'utf8' is base64. # On Python 2, the body_encoding for 'utf8' is None. # If the body_encoding is None, the email package # will try to deduce the 'Content-Transfer-Encoding' # by calling email.encoders.encode_7or8bit(). # If the foo.encode('ascii') works on the body, it # is assumed '7bit'. If it fails, it is assumed '8bit'. input_charset = charset.Charset(mail.ENCODING) if input_charset.body_encoding == charset.BASE64: expEncoding = 'base64' elif input_charset.body_encoding is None: expEncoding = '7bit' return self.do_test_createEmail_cte("old fashioned ascii", expEncoding) def test_createEmail_message_content_transfer_encoding_8bit(self): # buildbot.reporters.mail.ENCODING is 'utf8' # On Python 3, the body_encoding for 'utf8' is base64. # On Python 2, the body_encoding for 'utf8' is None. # If the body_encoding is None, the email package # will try to deduce the 'Content-Transfer-Encoding' # by calling email.encoders.encode_7or8bit(). # If the foo.encode('ascii') works on the body, it input_charset = charset.Charset(mail.ENCODING) if input_charset.body_encoding == charset.BASE64: expEncoding = 'base64' elif input_charset.body_encoding is None: expEncoding = '8bit' return self.do_test_createEmail_cte("\U0001F4A7", expEncoding) @defer.inlineCallbacks def test_createEmail_message_without_patch_and_log_contains_unicode(self): build = yield self.insert_build_finished(SUCCESS) msgdict = create_msgdict() mn = yield self.setupMailNotifier('from@example.org') m = yield mn.createEmail(msgdict, 'project-n\u00E5me', SUCCESS, [build]) try: m.as_string() except UnicodeEncodeError: self.fail('Failed to call as_string() on email message.') @defer.inlineCallbacks def test_createEmail_extraHeaders_one_build(self): build = yield self.insert_build_finished(SUCCESS) build['properties']['hhh'] = ('vvv', 'fake') msgdict = create_msgdict() mn = yield self.setupMailNotifier('from@example.org', extraHeaders=dict(hhh=properties.Property('hhh'))) # add some Unicode to detect encoding problems m = yield mn.createEmail(msgdict, 'project-n\u00E5me', SUCCESS, [build]) txt = m.as_string() # note that the headers *are* rendered self.assertIn('hhh: vvv', txt) @defer.inlineCallbacks def test_createEmail_extraHeaders_two_builds(self): build = yield self.insert_build_finished(SUCCESS) yield utils.getDetailsForBuild(self.master, build, want_properties=True) builds = [build, copy.deepcopy(build)] builds[1]['builder']['name'] = 'builder2' msgdict = create_msgdict() mn = yield self.setupMailNotifier('from@example.org', extraHeaders=dict(hhh='vvv')) m = yield mn.createEmail(msgdict, 'project-n\u00E5me', SUCCESS, builds) txt = m.as_string() # note that the headers are *not* rendered self.assertIn('hhh: vvv', txt) @defer.inlineCallbacks def test_createEmail_message_with_patch_and_log_containing_unicode(self): build = yield self.insert_build_finished(SUCCESS) msgdict = create_msgdict() patches = [{'body': '\u00E5\u00E4\u00F6'}] logs = yield self.master.data.get(("steps", 50, 'logs')) for l in logs: l['stepname'] = "fakestep" l['content'] = yield self.master.data.get(("logs", l['logid'], 'contents')) mn = yield self.setupMailNotifier('from@example.org', generators=[BuildStatusGenerator(add_logs=True)]) m = yield mn.createEmail(msgdict, 'project-n\u00E5me', SUCCESS, [build], patches, logs) try: s = m.as_string() # python 2.6 default transfer in base64 for utf-8 if "base64" not in s: self.assertIn("Unicode log", s) else: # b64encode and remove '=' padding (hence [:-1]) logStr = bytes2unicode(base64.b64encode(b"Unicode log")[:-1]) self.assertIn(logStr, s) self.assertIn( 'Content-Disposition: attachment; filename="fakestep.stdio"', s) except UnicodeEncodeError: self.fail('Failed to call as_string() on email message.') @defer.inlineCallbacks def setupBuildMessage(self, **generator_kwargs): build = yield self.insert_build_finished(SUCCESS) formatter = Mock(spec=MessageFormatter) formatter.format_message_for_build.return_value = { "body": "body", "type": "text", "subject": "subject" } formatter.want_properties = False formatter.want_steps = False formatter.want_logs = False formatter.want_logs_content = False generator = BuildStatusGenerator(message_formatter=formatter, **generator_kwargs) mn = yield self.setupMailNotifier('from@example.org', generators=[generator]) mn.findInterrestedUsersEmails = Mock( spec=mn.findInterrestedUsersEmails) mn.findInterrestedUsersEmails.return_value = "" mn.processRecipients = Mock(spec=mn.processRecipients) mn.processRecipients.return_value = "" mn.createEmail = Mock(spec=mn.createEmail) mn.createEmail.return_value = "" mn.sendMail = Mock(spec=mn.sendMail) yield mn._got_event(('builds', 10, 'finished'), build) return (mn, build, formatter) @defer.inlineCallbacks def test_buildMessage(self): mn, build, formatter = yield self.setupBuildMessage(mode=("passing",)) formatter.format_message_for_build.assert_called_with(self.master, build, mode=('passing',), users=['me@foo']) mn.findInterrestedUsersEmails.assert_called_with(['me@foo']) mn.processRecipients.assert_called_with('', '') mn.sendMail.assert_called_with('', '') self.assertEqual(mn.createEmail.call_count, 1) @defer.inlineCallbacks def do_test_sendToInterestedUsers(self, lookup=None, extraRecipients=None, sendToInterestedUsers=True, exp_called_with=None, exp_TO=None, exp_CC=None): if extraRecipients is None: extraRecipients = [] _ = yield self.insert_build_finished(SUCCESS) mn = yield self.setupMailNotifier('from@example.org', lookup=lookup, extraRecipients=extraRecipients, sendToInterestedUsers=sendToInterestedUsers) recipients = yield mn.findInterrestedUsersEmails(['Big Bob ', 'narrator']) m = {'To': None, 'CC': None} all_recipients = mn.processRecipients(recipients, m) self.assertEqual(sorted(all_recipients), sorted(exp_called_with)) self.assertEqual(m['To'], exp_TO) self.assertEqual(m['CC'], exp_CC) def test_sendToInterestedUsers_lookup(self): return self.do_test_sendToInterestedUsers( lookup="example.org", exp_called_with=['Big Bob ', 'narrator@example.org'], exp_TO='"=?utf-8?q?Big_Bob?=" , ' 'narrator@example.org') def test_buildMessage_sendToInterestedUsers_no_lookup(self): return self.do_test_sendToInterestedUsers( exp_called_with=['Big Bob '], exp_TO='"=?utf-8?q?Big_Bob?=" ') def test_buildMessage_sendToInterestedUsers_extraRecipients(self): return self.do_test_sendToInterestedUsers( extraRecipients=["marla@mayhem.net"], exp_called_with=['Big Bob ', 'marla@mayhem.net'], exp_TO='"=?utf-8?q?Big_Bob?=" ', exp_CC="marla@mayhem.net") def test_sendToInterestedUsers_False(self): return self.do_test_sendToInterestedUsers( extraRecipients=["marla@mayhem.net"], sendToInterestedUsers=False, exp_called_with=['marla@mayhem.net'], exp_TO="marla@mayhem.net") def test_valid_emails(self): valid_emails = [ 'foo+bar@example.com', # + comment in local part 'nobody@example.com.', # root dot 'My Name ', # With full name '', # With <> 'My Name ', # With full name (root dot) 'egypt@example.xn--wgbh1c'] # IDN TLD (.misr, Egypt) # If any of these email addresses fail, the test fails by # yield self.setupMailNotifier raising a ConfigErrors exception. MailNotifier('foo@example.com', extraRecipients=valid_emails) def test_invalid_email(self): for invalid in ['@', 'foo', 'foo@', '@example.com', 'foo@invalid', 'foobar@ex+ample.com', # + in domain part # whitespace in local part 'foo bar@example.net', 'Foo\nBar ', # newline in name 'test@example..invalid']: # empty label (..) with self.assertRaises(ConfigErrors): MailNotifier('foo@example.com', extraRecipients=[invalid]) @defer.inlineCallbacks def test_sendMail_real_name_addresses(self): fakeSenderFactory = Mock() fakeSenderFactory.side_effect = lambda *args, **kwargs: args[ 5].callback(True) self.patch(mail, 'ESMTPSenderFactory', fakeSenderFactory) self.patch(mail, 'reactor', Mock()) msg = Mock() msg.as_string = Mock(return_value='') mn = yield self.setupMailNotifier('John Doe ') yield mn.sendMail(msg, ['Jane Doe ']) self.assertIsInstance(fakeSenderFactory.call_args, tuple) self.assertTrue(len(fakeSenderFactory.call_args) > 0) self.assertTrue(len(fakeSenderFactory.call_args[0]) > 3) self.assertEquals(fakeSenderFactory.call_args[0][2], 'john.doe@domain.tld') self.assertEquals(fakeSenderFactory.call_args[0][3], ['jane.doe@domain.tld']) @defer.inlineCallbacks def do_test_sendMessage(self, **mn_kwargs): fakeSenderFactory = Mock() fakeSenderFactory.side_effect = lambda *args, **kwargs: args[ 5].callback(True) self.patch(mail, 'ESMTPSenderFactory', fakeSenderFactory) build = yield self.insert_build_finished(SUCCESS) formatter = Mock(spec=MessageFormatter) formatter.format_message_for_build.return_value = { "body": "body", "type": "text", "subject": "subject" } formatter.want_properties = False formatter.want_steps = False formatter.want_logs = False formatter.want_logs_content = False generator = BuildStatusGenerator(message_formatter=formatter) mn = yield self.setupMailNotifier('from@example.org', generators=[generator], **mn_kwargs) mn.findInterrestedUsersEmails = Mock( spec=mn.findInterrestedUsersEmails) mn.findInterrestedUsersEmails.return_value = list("") mn.processRecipients = Mock(spec=mn.processRecipients) mn.processRecipients.return_value = list("") mn.createEmail = Mock(spec=mn.createEmail) mn.createEmail.return_value.as_string = Mock(return_value="") yield mn._got_event(('builds', 10, 'finished'), build) return (mn, build) @defer.inlineCallbacks def test_sendMessageOverTcp(self): fakereactor = Mock() self.patch(mail, 'reactor', fakereactor) mn, build = yield self.do_test_sendMessage() self.assertEqual(1, len(fakereactor.method_calls)) self.assertIn(('connectTCP', ('localhost', 25, None), {}), fakereactor.method_calls) @defer.inlineCallbacks def test_sendMessageWithInterpolatedConfig(self): """Test that the secrets parameters are properly interpolated at reconfig stage Note: in the unit test, we don't test that it is interpolated with secret. That would require setting up secret manager. We just test that the interpolation works. """ fakereactor = Mock() self.patch(mail, 'reactor', fakereactor) mn, build = yield self.do_test_sendMessage(smtpUser=Interpolate("u$er"), smtpPassword=Interpolate("pa$$word")) self.assertEqual(mn.smtpUser, "u$er") self.assertEqual(mn.smtpPassword, "pa$$word") self.assertEqual(1, len(fakereactor.method_calls)) self.assertIn(('connectTCP', ('localhost', 25, None), {}), fakereactor.method_calls) @ssl.skipUnless @defer.inlineCallbacks def test_sendMessageOverSsl(self): fakereactor = Mock() self.patch(mail, 'reactor', fakereactor) mn, build = yield self.do_test_sendMessage(useSmtps=True) self.assertEqual(1, len(fakereactor.method_calls)) self.assertIn(('connectSSL', ('localhost', 25, None, fakereactor.connectSSL.call_args[ 0][3]), {}), fakereactor.method_calls) def create_msgdict(funny_chars='\u00E5\u00E4\u00F6'): unibody = 'Unicode body with non-ascii ({}).'.format(funny_chars) msg_dict = { "body": unibody, "subject": "testsubject", "type": 'plain' } return msg_dict buildbot-3.4.0/master/buildbot/test/unit/reporters/test_message.py000066400000000000000000000347721413250514000254510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters import message from buildbot.reporters import utils from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import BuildDictLookAlike from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.warnings import assertProducesWarning from buildbot.warnings import DeprecatedApiWarning class TestMessageFormatting(unittest.TestCase): def test_get_detected_status_text_failure(self): self.assertEqual(message.get_detected_status_text(['change'], FAILURE, FAILURE), 'failed build') self.assertEqual(message.get_detected_status_text(['change'], FAILURE, SUCCESS), 'new failure') self.assertEqual(message.get_detected_status_text(['change'], FAILURE, None), 'failed build') self.assertEqual(message.get_detected_status_text(['problem'], FAILURE, FAILURE), 'failed build') self.assertEqual(message.get_detected_status_text(['problem'], FAILURE, SUCCESS), 'new failure') self.assertEqual(message.get_detected_status_text(['problem'], FAILURE, None), 'failed build') def test_get_detected_status_text_warnings(self): self.assertEqual(message.get_detected_status_text(['change'], WARNINGS, SUCCESS), 'problem in the build') self.assertEqual(message.get_detected_status_text(['change'], WARNINGS, None), 'problem in the build') def test_get_detected_status_text_success(self): self.assertEqual(message.get_detected_status_text(['change'], SUCCESS, FAILURE), 'restored build') self.assertEqual(message.get_detected_status_text(['change'], SUCCESS, SUCCESS), 'passing build') self.assertEqual(message.get_detected_status_text(['change'], SUCCESS, None), 'passing build') self.assertEqual(message.get_detected_status_text(['problem'], SUCCESS, FAILURE), 'passing build') self.assertEqual(message.get_detected_status_text(['problem'], SUCCESS, SUCCESS), 'passing build') self.assertEqual(message.get_detected_status_text(['problem'], SUCCESS, None), 'passing build') def test_get_detected_status_text_exception(self): self.assertEqual(message.get_detected_status_text(['problem'], EXCEPTION, FAILURE), 'build exception') self.assertEqual(message.get_detected_status_text(['problem'], EXCEPTION, SUCCESS), 'build exception') self.assertEqual(message.get_detected_status_text(['problem'], EXCEPTION, None), 'build exception') def test_get_detected_status_text_other(self): self.assertEqual(message.get_detected_status_text(['problem'], SKIPPED, None), 'skipped build') self.assertEqual(message.get_detected_status_text(['problem'], RETRY, None), 'retry build') self.assertEqual(message.get_detected_status_text(['problem'], CANCELLED, None), 'cancelled build') def test_get_message_summary_text_success(self): self.assertEqual(message.get_message_summary_text({'state_string': 'mywarning'}, SUCCESS), 'Build succeeded!') def test_get_message_summary_text_warnings(self): self.assertEqual(message.get_message_summary_text({'state_string': 'mywarning'}, WARNINGS), 'Build Had Warnings: mywarning') self.assertEqual(message.get_message_summary_text({'state_string': None}, WARNINGS), 'Build Had Warnings') def test_get_message_summary_text_cancelled(self): self.assertEqual(message.get_message_summary_text({'state_string': 'mywarning'}, CANCELLED), 'Build was cancelled') def test_get_message_summary_text_skipped(self): self.assertEqual(message.get_message_summary_text({'state_string': 'mywarning'}, SKIPPED), 'BUILD FAILED: mywarning') self.assertEqual(message.get_message_summary_text({'state_string': None}, SKIPPED), 'BUILD FAILED') def test_get_message_source_stamp_text_empty(self): self.assertEqual(message.get_message_source_stamp_text([]), '') def test_get_message_source_stamp_text_multiple(self): stamps = [ {'codebase': 'a', 'branch': None, 'revision': None, 'patch': None}, {'codebase': 'b', 'branch': None, 'revision': None, 'patch': None}, ] self.assertEqual(message.get_message_source_stamp_text(stamps), "Build Source Stamp 'a': HEAD\n" "Build Source Stamp 'b': HEAD\n") def test_get_message_source_stamp_text_with_props(self): stamps = [ {'codebase': 'a', 'branch': 'br', 'revision': 'abc', 'patch': 'patch'} ] self.assertEqual(message.get_message_source_stamp_text(stamps), "Build Source Stamp 'a': [branch br] abc (plus patch)\n") class MessageFormatterTestBase(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) def setupDb(self, results1, results2): self.db = self.master.db self.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrkr'), fakedb.Buildset(id=98, results=results1, reason="testReason1"), fakedb.Buildset(id=99, results=results2, reason="testReason2"), fakedb.Builder(id=80, name='Builder1'), fakedb.BuildRequest(id=11, buildsetid=98, builderid=80), fakedb.BuildRequest(id=12, buildsetid=99, builderid=80), fakedb.Build(id=20, number=0, builderid=80, buildrequestid=11, workerid=13, masterid=92, results=results1), fakedb.Build(id=21, number=1, builderid=80, buildrequestid=12, workerid=13, masterid=92, results=results2), ]) for _id in (20, 21): self.db.insertTestData([ fakedb.BuildProperty( buildid=_id, name="workername", value="wrkr"), fakedb.BuildProperty( buildid=_id, name="reason", value="because"), ]) @defer.inlineCallbacks def do_one_test(self, formatter, lastresults, results, mode="all"): self.setupDb(lastresults, results) res = yield utils.getDetailsForBuildset(self.master, 99, want_properties=True, want_previous_build=True) build = res['builds'][0] res = yield formatter.format_message_for_build(self.master, build, mode=mode, users=["him@bar", "me@foo"]) return res class TestMessageFormatter(MessageFormatterTestBase): def test_want_properties_deprecated(self): with assertProducesWarning(DeprecatedApiWarning, "wantProperties has been deprecated"): formatter = message.MessageFormatter(wantProperties=True) self.assertEqual(formatter.want_properties, True) def test_want_steps_deprecated(self): with assertProducesWarning(DeprecatedApiWarning, "wantSteps has been deprecated"): formatter = message.MessageFormatter(wantSteps=True) self.assertEqual(formatter.want_steps, True) def test_want_logs_deprecated(self): with assertProducesWarning(DeprecatedApiWarning, "wantLogs has been deprecated"): formatter = message.MessageFormatter(wantLogs=True) self.assertEqual(formatter.want_logs, True) self.assertEqual(formatter.want_logs_content, True) @defer.inlineCallbacks def test_message_success(self): formatter = message.MessageFormatter() res = yield self.do_one_test(formatter, SUCCESS, SUCCESS) self.assertEqual(res['type'], "plain") self.assertEqual(res['body'], textwrap.dedent('''\ The Buildbot has detected a passing build on builder Builder1 while building Buildbot. Full details are available at: http://localhost:8080/#builders/80/builds/1 Buildbot URL: http://localhost:8080/ Worker for this Build: wrkr Build Reason: because Blamelist: him@bar, me@foo Build succeeded! Sincerely, -The Buildbot''')) self.assertIsNone(res['subject']) @defer.inlineCallbacks def test_inline_template(self): formatter = message.MessageFormatter(template="URL: {{ build_url }} -- {{ summary }}") res = yield self.do_one_test(formatter, SUCCESS, SUCCESS) self.assertEqual(res['type'], "plain") self.assertEqual(res['body'], "URL: http://localhost:8080/#builders/80/builds/1 -- Build succeeded!") @defer.inlineCallbacks def test_inline_subject(self): formatter = message.MessageFormatter(subject="subject") res = yield self.do_one_test(formatter, SUCCESS, SUCCESS) self.assertEqual(res['subject'], "subject") @defer.inlineCallbacks def test_message_failure(self): formatter = message.MessageFormatter() res = yield self.do_one_test(formatter, SUCCESS, FAILURE) self.assertIn( "The Buildbot has detected a failed build on builder", res['body']) @defer.inlineCallbacks def test_message_failure_change(self): formatter = message.MessageFormatter() res = yield self.do_one_test(formatter, SUCCESS, FAILURE, "change") self.assertIn( "The Buildbot has detected a new failure on builder", res['body']) @defer.inlineCallbacks def test_message_success_change(self): formatter = message.MessageFormatter() res = yield self.do_one_test(formatter, FAILURE, SUCCESS, "change") self.assertIn( "The Buildbot has detected a restored build on builder", res['body']) @defer.inlineCallbacks def test_message_success_nochange(self): formatter = message.MessageFormatter() res = yield self.do_one_test(formatter, SUCCESS, SUCCESS, "change") self.assertIn( "The Buildbot has detected a passing build on builder", res['body']) @defer.inlineCallbacks def test_message_failure_nochange(self): formatter = message.MessageFormatter() res = yield self.do_one_test(formatter, FAILURE, FAILURE, "change") self.assertIn( "The Buildbot has detected a failed build on builder", res['body']) class TestMessageFormatterRenderable(MessageFormatterTestBase): @defer.inlineCallbacks def test_basic(self): template = Interpolate('templ_%(prop:workername)s/%(prop:reason)s') subject = Interpolate('subj_%(prop:workername)s/%(prop:reason)s') formatter = message.MessageFormatterRenderable(template, subject) res = yield self.do_one_test(formatter, SUCCESS, SUCCESS) self.assertEqual(res, { 'body': 'templ_wrkr/because', 'type': 'plain', 'subject': 'subj_wrkr/because', }) class TestMessageFormatterFunction(MessageFormatterTestBase): @defer.inlineCallbacks def test_basic(self): function = mock.Mock(side_effect=lambda x: {'key': 'value'}) formatter = message.MessageFormatterFunction(function, 'json') res = yield self.do_one_test(formatter, SUCCESS, SUCCESS) function.assert_called_with({ 'build': BuildDictLookAlike(extra_keys=['prev_build'], expected_missing_keys=['parentbuilder', 'buildrequest', 'parentbuild']) }) self.assertEqual(res, { 'body': {'key': 'value'}, 'type': 'json', 'subject': None, }) @defer.inlineCallbacks def test_renderable(self): function = mock.Mock(side_effect=lambda x: {'key': 'value'}) def renderable_function(context): return defer.succeed(function(context)) formatter = message.MessageFormatterFunction(function, 'json') res = yield self.do_one_test(formatter, SUCCESS, SUCCESS) function.assert_called_with({ 'build': BuildDictLookAlike(extra_keys=['prev_build'], expected_missing_keys=['parentbuilder', 'buildrequest', 'parentbuild']) }) self.assertEqual(res, { 'body': {'key': 'value'}, 'type': 'json', 'subject': None, }) class TestMessageFormatterMissingWorker(MessageFormatterTestBase): @defer.inlineCallbacks def test_basic(self): formatter = message.MessageFormatterMissingWorker() self.setupDb(SUCCESS, SUCCESS) workers = yield self.master.data.get(('workers',)) worker = workers[0] worker['notify'] = ['e@mail'] worker['last_connection'] = ['yesterday'] res = yield formatter.formatMessageForMissingWorker(self.master, worker) text = res['body'] self.assertIn("has noticed that the worker named wrkr went away", text) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_pushjet.py000066400000000000000000000074471413250514000255060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from unittest import SkipTest from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.reporters.pushjet import PushjetNotifier from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util import httpclientservice class TestPushjetNotifier(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) # returns a Deferred def setupFakeHttp(self, base_url='https://api.pushjet.io'): return fakehttpclientservice.HTTPClientService.getService(self.master, self, base_url) @defer.inlineCallbacks def setupPushjetNotifier(self, secret=Interpolate("1234"), **kwargs): pn = PushjetNotifier(secret, **kwargs) yield pn.setServiceParent(self.master) yield pn.startService() return pn @defer.inlineCallbacks def test_sendMessage(self): _http = yield self.setupFakeHttp() pn = yield self.setupPushjetNotifier(levels={'passing': 2}) _http.expect("post", "/message", data={'secret': "1234", 'level': 2, 'message': "Test", 'title': "Tee"}, content_json={'status': 'ok'}) n = yield pn.sendMessage([{ "body": "Test", "subject": "Tee", "results": SUCCESS }]) j = yield n.json() self.assertEqual(j['status'], 'ok') @defer.inlineCallbacks def test_sendNotification(self): _http = yield self.setupFakeHttp('https://tests.io') pn = yield self.setupPushjetNotifier(base_url='https://tests.io') _http.expect("post", "/message", data={'secret': "1234", 'message': "Test"}, content_json={'status': 'ok'}) n = yield pn.sendNotification({'message': "Test"}) j = yield n.json() self.assertEqual(j['status'], 'ok') @defer.inlineCallbacks def test_sendRealNotification(self): secret = os.environ.get('TEST_PUSHJET_SECRET') if secret is None: raise SkipTest("real pushjet test runs only if the variable " "TEST_PUSHJET_SECRET is defined") _http = yield httpclientservice.HTTPClientService.getService( self.master, 'https://api.pushjet.io') yield _http.startService() pn = yield self.setupPushjetNotifier(secret=secret) n = yield pn.sendNotification({'message': "Buildbot Pushjet test passed!"}) j = yield n.json() self.assertEqual(j['status'], 'ok') # Test with: # TEST_PUSHJET_SECRET=edcfaf21ab1bbad7b12bd7602447e6cb # https://api.pushjet.io/message?uuid=b8b8b8b8-0000-b8b8-0000-b8b8b8b8b8b8 buildbot-3.4.0/master/buildbot/test/unit/reporters/test_pushover.py000066400000000000000000000077341413250514000256760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from unittest import SkipTest from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.reporters.pushover import PushoverNotifier from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util import httpclientservice class TestPushoverNotifier(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) # returns a Deferred def setupFakeHttp(self): return fakehttpclientservice.HTTPClientService.getService(self.master, self, 'https://api.pushover.net') @defer.inlineCallbacks def setupPushoverNotifier(self, user_key="1234", api_token=Interpolate("abcd"), **kwargs): pn = PushoverNotifier(user_key, api_token, **kwargs) yield pn.setServiceParent(self.master) yield pn.startService() return pn @defer.inlineCallbacks def test_sendMessage(self): _http = yield self.setupFakeHttp() pn = yield self.setupPushoverNotifier(priorities={'passing': 2}) _http.expect("post", "/1/messages.json", params={'user': "1234", 'token': "abcd", 'message': "Test", 'title': "Tee", 'priority': 2}, content_json={'status': 1, 'request': '98765'}) n = yield pn.sendMessage([{ "body": "Test", "subject": "Tee", "results": SUCCESS }]) j = yield n.json() self.assertEqual(j['status'], 1) self.assertEqual(j['request'], '98765') @defer.inlineCallbacks def test_sendNotification(self): _http = yield self.setupFakeHttp() pn = yield self.setupPushoverNotifier(otherParams={'sound': "silent"}) _http.expect("post", "/1/messages.json", params={'user': "1234", 'token': "abcd", 'sound': "silent", 'message': "Test"}, content_json={'status': 1, 'request': '98765'}) n = yield pn.sendNotification({'message': "Test"}) j = yield n.json() self.assertEqual(j['status'], 1) self.assertEqual(j['request'], '98765') @defer.inlineCallbacks def test_sendRealNotification(self): creds = os.environ.get('TEST_PUSHOVER_CREDENTIALS') if creds is None: raise SkipTest("real pushover test runs only if the variable " "TEST_PUSHOVER_CREDENTIALS is defined") user, token = creds.split(':') _http = yield httpclientservice.HTTPClientService.getService( self.master, 'https://api.pushover.net') yield _http.startService() pn = yield self.setupPushoverNotifier(user_key=user, api_token=token) n = yield pn.sendNotification({'message': "Buildbot Pushover test passed!"}) j = yield n.json() self.assertEqual(j['status'], 1) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_telegram.py000066400000000000000000001107051413250514000256140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import sys from unittest.case import SkipTest from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.plugins.db import get_plugins from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters import telegram from buildbot.reporters import words from buildbot.schedulers import forcesched from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.web import FakeRequest from buildbot.test.unit.reporters.test_words import ContactMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util import service from buildbot.util import unicode2bytes class FakeChannel(service.AsyncService): pass class FakeContact: def __init__(self, user=None, channel=None): super().__init__() self.user_id = user['id'] self.user_info = user self.channel = FakeChannel self.channel.chat_info = channel.chat_info self.template = None self.messages = [] def handleMessage(self, message, **kwargs): self.messages.append(message) return defer.succeed(message) class TestTelegramContact(ContactMixin, unittest.TestCase): channelClass = telegram.TelegramChannel contactClass = telegram.TelegramContact class botClass(words.StatusBot): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.query_cache = {} def send_message(self, chat, message, **kwargs): return {'message_id': 123} def edit_message(bot, chat, msgid, message, **kwargs): return {'message_id': 123} def delete_message(bot, chat, msgid): pass def send_sticker(bot, chat, sticker, **kwargs): pass def edit_keyboard(self, chat, msg, keyboard=None): pass def getChannel(self, channel): return self.channelClass(self, channel) USER = { "id": 123456789, "first_name": "Harry", "last_name": "Potter", "username": "harrypotter", } CHANNEL = { "id": -12345678, "title": "Hogwards", "type": "group" } PRIVATE = { "id": 123456789, "type": "private" } def patch_send(self): self.sent = [] self.stickers = 0 def send_message(chat, message, **kwargs): self.sent.append((chat, message, kwargs)) return {'message_id': 123} self.bot.send_message = send_message def send_sticker(chat, sticker, **kwargs): self.stickers += 1 self.bot.send_sticker = send_sticker @defer.inlineCallbacks def setUp(self): ContactMixin.setUp(self) self.contact1 = self.contactClass(user=self.USER, channel=self.channelClass(self.bot, self.PRIVATE)) yield self.contact1.channel.setServiceParent(self.master) @defer.inlineCallbacks def test_list_notified_events(self): self.patch_send() channel = telegram.TelegramChannel(self.bot, self.CHANNEL) channel.notify_events = {'success'} yield channel.list_notified_events() self.assertEquals(self.sent[0][1], "The following events are being notified:\n🔔 **success**") @defer.inlineCallbacks def test_list_notified_events_empty(self): self.patch_send() channel = telegram.TelegramChannel(self.bot, self.CHANNEL) channel.notify_events = set() yield channel.list_notified_events() self.assertEquals(self.sent[0][1], "🔕 No events are being notified.") def testDescribeUser(self): self.assertEquals(self.contact1.describeUser(), "Harry Potter (@harrypotter)") def testDescribeUserInGroup(self): self.assertEquals(self.contact.describeUser(), "Harry Potter (@harrypotter) on 'Hogwards'") @defer.inlineCallbacks def test_access_denied(self): self.patch_send() self.contact1.ACCESS_DENIED_MESSAGES = ["ACCESS DENIED"] yield self.contact1.access_denied(tmessage={'message_id': 123}) self.assertEqual("ACCESS DENIED", self.sent[0][1]) @defer.inlineCallbacks def test_access_denied_group(self): self.patch_send() self.contact.ACCESS_DENIED_MESSAGES = ["ACCESS DENIED"] yield self.contact.access_denied(tmessage={'message_id': 123}) self.assertEqual("ACCESS DENIED", self.sent[0][1]) def test_query_button_short(self): result = self.contact.query_button("Hello", "hello") self.assertEquals(result, {'text': "Hello", 'callback_data': "hello"}) def test_query_button_long(self): payload = 16 * "1234567890" key = hash(repr(payload)) result = self.contact.query_button("Hello", payload) self.assertEquals(result, {'text': "Hello", 'callback_data': key}) self.assertEquals(self.bot.query_cache[key], payload) def test_query_button_non_str(self): payload = {'data': "good"} key = hash(repr(payload)) result = self.contact.query_button("Hello", payload) self.assertEquals(result, {'text': "Hello", 'callback_data': key}) self.assertEquals(self.bot.query_cache[key], payload) def test_query_button_cache(self): payload = 16 * "1234567890" key = hash(repr(payload)) self.bot.query_cache[key] = payload result = self.contact.query_button("Hello", payload) self.assertEquals(result, {'text': "Hello", 'callback_data': key}) self.assertEquals(len(self.bot.query_cache), 1) def test_query_button_cache_conflict(self): payload = 16 * "1234567890" key = hash(repr(payload)) self.bot.query_cache[key] = "something other" result = self.contact.query_button("Hello", payload) self.assertEquals(result, {'text': "Hello", 'callback_data': key + 1}) self.assertEquals(self.bot.query_cache[key + 1], payload) @defer.inlineCallbacks def test_command_start(self): yield self.do_test_command('start', exp_usage=False) self.assertEqual(self.sent[0][0], self.CHANNEL['id']) @defer.inlineCallbacks def test_command_nay(self): yield self.do_test_command('nay', contact=self.contact1, tmessage={}) @defer.inlineCallbacks def test_command_nay_reply_markup(self): yield self.do_test_command('nay', tmessage={ 'reply_to_message': { 'message_id': 1234, 'reply_markup': {}, }}) @defer.inlineCallbacks def test_commmand_commands(self): yield self.do_test_command('commands') self.assertEqual(self.sent[0][0], self.CHANNEL['id']) @defer.inlineCallbacks def test_commmand_commands_botfather(self): yield self.do_test_command('commands', 'botfather') self.assertEqual(self.sent[0][0], self.CHANNEL['id']) self.assertRegex(self.sent[0][1], r"^\w+ - \S+") @defer.inlineCallbacks def test_command_getid_private(self): yield self.do_test_command('getid', contact=self.contact1) self.assertEqual(len(self.sent), 1) self.assertIn(str(self.USER['id']), self.sent[0][1]) @defer.inlineCallbacks def test_command_getid_group(self): yield self.do_test_command('getid') self.assertIn(str(self.USER['id']), self.sent[0][1]) self.assertIn(str(self.CHANNEL['id']), self.sent[1][1]) def assertButton(self, data, pos=None, sent=0): keyboard = self.sent[sent][2]['reply_markup']['inline_keyboard'] if pos is not None: r, c = pos self.assertEquals(keyboard[r][c]['callback_data'], data) else: dataset = [b['callback_data'] for row in keyboard for b in row] self.assertIn(data, dataset) @defer.inlineCallbacks def test_command_list(self): yield self.do_test_command('list') self.assertButton('/list builders') self.assertButton('/list workers') self.assertButton('/list changes') @defer.inlineCallbacks def test_command_list_builders(self): yield self.do_test_command('list', 'all builders') self.assertEqual(len(self.sent), 1) for builder in self.BUILDER_NAMES: self.assertIn('`{}` ❌'.format(builder), self.sent[0][1]) @defer.inlineCallbacks def test_command_list_workers(self): workers = ['worker1', 'worker2'] for worker in workers: self.master.db.workers.db.insertTestData([ fakedb.Worker(name=worker) ]) yield self.do_test_command('list', args='all workers') self.assertEqual(len(self.sent), 1) for worker in workers: self.assertIn('`{}` ❌'.format(worker), self.sent[0][1]) @defer.inlineCallbacks def test_command_list_workers_online(self): self.setup_multi_builders() # Also set the connectedness: self.master.db.insertTestData([ fakedb.ConnectedWorker(id=113, masterid=13, workerid=1) ]) yield self.do_test_command('list', args='all workers') self.assertEqual(len(self.sent), 1) self.assertNotIn('`linux1` ⚠️', self.sent[0][1]) self.assertIn('`linux2` ⚠️', self.sent[0][1]) @defer.inlineCallbacks def test_command_list_changes(self): self.master.db.workers.db.insertTestData([ fakedb.Change() ]) yield self.do_test_command('list', args='2 changes') self.assertEqual(len(self.sent), 2) @defer.inlineCallbacks def test_command_list_changes_long(self): self.master.db.workers.db.insertTestData([ fakedb.Change() for i in range(200) ]) yield self.do_test_command('list', args='all changes') self.assertIn('reply_markup', self.sent[1][2]) @defer.inlineCallbacks def test_command_watch(self): self.setupSomeBuilds() yield self.do_test_command('watch') self.assertButton('/watch builder1') @defer.inlineCallbacks def test_command_watch_no_builds(self): yield self.do_test_command('watch') @defer.inlineCallbacks def test_command_stop_no_args(self): self.setupSomeBuilds() yield self.do_test_command('stop') self.assertButton('/stop build builder1') @defer.inlineCallbacks def test_command_stop_ask_reason(self): self.patch_send() self.setupSomeBuilds() yield self.do_test_command('stop', 'build builder1') self.assertIn("give me the reason", self.sent[0][1]) self.assertEquals(self.contact.template, "/stop build builder1 {}") def test_ask_reply_group(self): self.patch_send() self.contact.ask_for_reply("test") self.assertEqual(self.sent[0][1], "Ok @harrypotter, now test...") def test_ask_reply_group_no_username(self): self.patch_send() self.contact.user_info = self.USER.copy() del self.contact.user_info['username'] self.contact.ask_for_reply("test") self.assertEqual(self.sent[0][1], "Ok, now reply to this message and test...") def test_ask_reply_group_no_username_no_greeting(self): self.patch_send() self.contact.user_info = self.USER.copy() del self.contact.user_info['username'] self.contact.ask_for_reply("test", None) self.assertEqual(self.sent[0][1], "Reply to this message and test...") def test_ask_reply_private_no_greeting(self): self.patch_send() self.contact1.ask_for_reply("test", None) self.assertEqual(self.sent[0][1], "Test...") @defer.inlineCallbacks def test_command_notify_no_args(self): self.patch_send() self.contact.channel.notify_events = {'success', 'failure'} yield self.do_test_command('notify') self.assertButton('/notify on-quiet finished') self.assertButton('/notify off-quiet success') self.assertButton('/notify list') @defer.inlineCallbacks def test_command_notify_list_with_query(self): self.patch_send() def delete_message(chat, msg): delete_message.msg = msg delete_message.msg = None self.bot.delete_message = delete_message yield self.do_test_command('notify', 'list', tquery={ 'message': {'message_id': 2345} }) self.assertEqual(delete_message.msg, 2345) @defer.inlineCallbacks def test_command_notify_toggle(self): self.patch_send() def edit_keyboard(chat, msg, keyboard): self.sent.append((chat, None, { 'reply_markup': {'inline_keyboard': keyboard}})) self.bot.edit_keyboard = edit_keyboard self.contact.channel.notify_events = {'success', 'failure'} yield self.do_test_command('notify', 'on-quiet finished', tquery={ 'message': {'message_id': 2345} }) self.assertIn('finished', self.contact.channel.notify_events) self.assertButton('/notify off-quiet finished') @defer.inlineCallbacks def test_command_shutdown(self): yield self.do_test_command('shutdown') self.assertButton('/shutdown start') self.assertButton('/shutdown now') @defer.inlineCallbacks def test_command_shutdown_shutting_down(self): yield self.do_test_command('shutdown', shuttingDown=True) self.assertButton('/shutdown stop') self.assertButton('/shutdown now') def allSchedulers(self): return self.schedulers def make_forcescheduler(self, two=False): scheduler = forcesched.ForceScheduler( name='force1', builderNames=['builder1', 'builder2'], codebases=[ forcesched.CodebaseParameter('', branch=forcesched.StringParameter( name='branch', default="master"), repository=forcesched.FixedParameter( name="repository", default="repository.git")), forcesched.CodebaseParameter('second', branch=forcesched.StringParameter( name='branch', default="master"), repository=forcesched.FixedParameter( name="repository", default="repository2.git"))], reason=forcesched.StringParameter( name='reason', required=True)) self.schedulers = [scheduler] if two: scheduler2 = forcesched.ForceScheduler( name='force2', builderNames=['builder2']) self.schedulers.append(scheduler2) self.bot.master.allSchedulers = self.allSchedulers @defer.inlineCallbacks def test_command_force_no_schedulers(self): yield self.do_test_command('force', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_noargs_multiple_schedulers(self): self.make_forcescheduler(two=True) yield self.do_test_command('force') self.assertButton('/force force1') self.assertButton('/force force2') @defer.inlineCallbacks def test_command_force_noargs(self): self.make_forcescheduler() yield self.do_test_command('force') self.assertButton('/force force1 config builder1') self.assertButton('/force force1 config builder2') @defer.inlineCallbacks def test_command_force_only_scheduler(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1') self.assertButton('/force force1 config builder1') self.assertButton('/force force1 config builder2') @defer.inlineCallbacks def test_command_force_bad_scheduler(self): self.make_forcescheduler(two=True) yield self.do_test_command('force', 'force3', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_bad_builder(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 config builder0', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_bad_command(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 bad builder1', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_only_bad_command(self): self.make_forcescheduler() yield self.do_test_command('force', 'bad builder1', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_config(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 config builder1') self.assertButton('/force force1 ask reason builder1 ') self.assertButton('/force force1 ask branch builder1 ') self.assertButton('/force force1 ask project builder1 ') self.assertButton('/force force1 ask revision builder1 ') self.assertButton('/force force1 ask second_branch builder1 ') self.assertButton('/force force1 ask second_project builder1 ') self.assertButton('/force force1 ask second_revision builder1 ') @defer.inlineCallbacks def test_command_force_config_more(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 config builder1 branch=master') self.assertButton('/force force1 ask reason builder1 branch=master') @defer.inlineCallbacks def test_command_force_config_nothing_missing(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 config builder1 reason=Ok') self.assertButton('/force force1 build builder1 reason=Ok') @defer.inlineCallbacks def test_command_force_ask(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 ask reason builder1 branch=master') self.assertEqual(self.contact.template, '/force force1 config builder1 branch=master reason={}') @defer.inlineCallbacks def test_command_force_build_missing(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 build builder1') self.assertButton('/force force1 ask reason builder1 ') @defer.inlineCallbacks def test_command_force_build(self): self.make_forcescheduler() force_args = {} def force(**kwargs): force_args.update(kwargs) self.schedulers[0].force = force yield self.do_test_command('force', 'force1 build builder1 reason=Good') self.assertEqual(self.sent[0][1], "Force build successfully requested.") expected = { 'builderid': 23, 'owner': "Harry Potter (@harrypotter) on 'Hogwards'", 'reason': 'Good', 'repository': 'repository.git', # fixed param 'second_repository': 'repository2.git' # fixed param } self.assertEqual(force_args, expected) class TestPollingBot(telegram.TelegramPollingBot): def __init__(self, updates, *args, **kwargs): self.__updates = updates super().__init__(*args, **kwargs) def process_update(self, update): self.__updates -= 1 if not self.__updates: self._polling_continue = False return super().process_update(update) class TestTelegramService(TestReactorMixin, unittest.TestCase): USER = TestTelegramContact.USER CHANNEL = TestTelegramContact.CHANNEL PRIVATE = TestTelegramContact.PRIVATE def setUp(self): self.setUpTestReactor() self.patch(reactor, 'callLater', self.reactor.callLater) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) # returns a Deferred def setupFakeHttp(self): url = 'https://api.telegram.org/bot12345:secret' return fakehttpclientservice.HTTPClientService.getService(self.master, self, url) @defer.inlineCallbacks def makeBot(self, chat_ids=None, authz=None, *args, **kwargs): if chat_ids is None: chat_ids = [] http = yield self.setupFakeHttp() www = get_plugins('www', None, load_now=True) if 'base' not in www: raise SkipTest('telegram tests need buildbot-www installed') return telegram.TelegramWebhookBot('12345:secret', http, chat_ids, authz, *args, **kwargs) @defer.inlineCallbacks def test_getContact(self): bot = yield self.makeBot() c1 = bot.getContact(self.USER, self.PRIVATE) c2 = bot.getContact(self.USER, self.CHANNEL) c1b = bot.getContact(self.USER, self.PRIVATE) self.assertIs(c1, c1b) self.assertIsInstance(c2, words.Contact) self.assertIn((-12345678, 123456789), bot.contacts) self.assertEqual({123456789, -12345678}, set(bot.channels.keys())) @defer.inlineCallbacks def test_getContact_update(self): try: bot = yield self.makeBot() contact = bot.getContact(self.USER, self.CHANNEL) updated_user = self.USER.copy() updated_user['username'] = "dirtyharry" self.assertEquals(contact.user_info['username'], "harrypotter") bot.getContact(updated_user, self.CHANNEL) self.assertEquals(contact.user_info['username'], "dirtyharry") finally: self.USER['username'] = "harrypotter" @defer.inlineCallbacks def test_getContact_invalid(self): bot = yield self.makeBot() bot.authz = {'': None} u = bot.getContact(user=self.USER, channel=self.CHANNEL) self.assertNotIn((-12345678, 123456789), bot.contacts) self.assertNotIn(-12345678, bot.channels) self.assertEqual(sys.getrefcount(u), 2) # local, sys c = u.channel self.assertEqual(sys.getrefcount(c), 3) # local, contact, sys del u self.assertEqual(sys.getrefcount(c), 2) # local, sys @defer.inlineCallbacks def test_getContact_valid(self): bot = yield self.makeBot() bot.authz = {'': None, 'command': 123456789} bot.getContact(user=self.USER, channel=self.CHANNEL) self.assertIn((-12345678, 123456789), bot.contacts) @defer.inlineCallbacks def test_set_webhook(self): bot = yield self.makeBot() bot.http_client.expect("post", "/setWebhook", json={'url': 'our.webhook'}, content_json={'ok': 1}) yield bot.set_webhook('our.webhook') @defer.inlineCallbacks def test_set_webhook_cert(self): bot = yield self.makeBot() bot.http_client.expect("post", "/setWebhook", data={'url': 'our.webhook'}, files={'certificate': b"this is certificate"}, content_json={'ok': 1}) yield bot.set_webhook('our.webhook', "this is certificate") @defer.inlineCallbacks def test_send_message(self): bot = yield self.makeBot() bot.http_client.expect("post", "/sendMessage", json={'chat_id': 1234, 'text': 'Hello', 'parse_mode': 'Markdown'}, content_json={'ok': 1, 'result': {'message_id': 9876}}) m = yield bot.send_message(1234, 'Hello') self.assertEqual(m['message_id'], 9876) @defer.inlineCallbacks def test_send_message_long(self): bot = yield self.makeBot() text1 = '\n'.join("{:039d}".format(i + 1) for i in range(102)) text2 = '\n'.join("{:039d}".format(i + 1) for i in range(102, 204)) text3 = '\n'.join("{:039d}".format(i + 1) for i in range(204, 250)) bot.http_client.expect("post", "/sendMessage", json={'chat_id': 1234, 'text': text1, 'parse_mode': 'Markdown', 'reply_to_message_id': 1000}, content_json={'ok': 1, 'result': {'message_id': 1001}}) bot.http_client.expect("post", "/sendMessage", json={'chat_id': 1234, 'text': text2, 'parse_mode': 'Markdown'}, content_json={'ok': 1, 'result': {'message_id': 1002}}) bot.http_client.expect("post", "/sendMessage", json={'chat_id': 1234, 'text': text3, 'parse_mode': 'Markdown', 'reply_markup': {'inline_keyboard': 'keyboard'}}, content_json={'ok': 1, 'result': {'message_id': 1003}}) text = '\n'.join("{:039d}".format(i + 1) for i in range(250)) m = yield bot.send_message(1234, text, reply_markup={'inline_keyboard': 'keyboard'}, reply_to_message_id=1000) self.assertEqual(m['message_id'], 1003) @defer.inlineCallbacks def test_edit_message(self): bot = yield self.makeBot() bot.http_client.expect("post", "/editMessageText", json={'chat_id': 1234, 'message_id': 9876, 'text': 'Hello', 'parse_mode': 'Markdown'}, content_json={'ok': 1, 'result': {'message_id': 9876}}) m = yield bot.edit_message(1234, 9876, 'Hello') self.assertEqual(m['message_id'], 9876) @defer.inlineCallbacks def test_delete_message(self): bot = yield self.makeBot() bot.http_client.expect("post", "/deleteMessage", json={'chat_id': 1234, 'message_id': 9876}, content_json={'ok': 1}) yield bot.delete_message(1234, 9876) @defer.inlineCallbacks def test_send_sticker(self): bot = yield self.makeBot() bot.http_client.expect("post", "/sendSticker", json={'chat_id': 1234, 'sticker': 'xxxxx'}, content_json={'ok': 1, 'result': {'message_id': 9876}}) m = yield bot.send_sticker(1234, 'xxxxx') self.assertEqual(m['message_id'], 9876) @defer.inlineCallbacks def test_set_nickname(self): bot = yield self.makeBot() self.assertIsNone(bot.nickname) bot.http_client.expect("post", "/getMe", content_json={'ok': 1, 'result': {'username': 'testbot'}}) yield bot.set_nickname() self.assertEqual(bot.nickname, 'testbot') def prepare_request(self, **kwargs): payload = {"update_id": 12345} payload.update(kwargs) content = unicode2bytes(json.dumps(payload)) request = FakeRequest(content=content) request.uri = b"/bot12345:secret" request.method = b"POST" request.received_headers[b'Content-Type'] = b"application/json" return request def request_message(self, text): return self.prepare_request(message={ "message_id": 123, "from": self.USER, "chat": self.CHANNEL, "date": 1566688888, "text": text, }) def request_query(self, data): return self.prepare_request(callback_query={ "id": 123456, "from": self.USER, "data": data, "message": { "message_id": 12345, "from": self.USER, "chat": self.CHANNEL, "date": 1566688888, }}) @defer.inlineCallbacks def test_get_update(self): bot = yield self.makeBot() request = self.request_message("test") update = bot.get_update(request) self.assertEquals(update['message']['from'], self.USER) self.assertEquals(update['message']['chat'], self.CHANNEL) @defer.inlineCallbacks def test_get_update_bad_content_type(self): bot = yield self.makeBot() request = self.request_message("test") request.received_headers[b'Content-Type'] = b"application/data" with self.assertRaises(ValueError): bot.get_update(request) @defer.inlineCallbacks def test_render_POST(self): # This actually also tests process_incoming bot = yield self.makeBot() bot.contactClass = FakeContact request = self.request_message("test") bot.webhook.render_POST(request) contact = bot.getContact(self.USER, self.CHANNEL) self.assertEquals(contact.messages, ["test"]) @defer.inlineCallbacks def test_parse_query_cached(self): bot = yield self.makeBot() bot.contactClass = FakeContact bot.query_cache.update({ 100: "good" }) bot.http_client.expect("post", "/answerCallbackQuery", json={'callback_query_id': 123456}, content_json={'ok': 1}) request = self.request_query("100") bot.process_webhook(request) self.assertEquals(bot.getContact(self.USER, self.CHANNEL).messages, ["good"]) @defer.inlineCallbacks def test_parse_query_cached_dict(self): bot = yield self.makeBot() bot.contactClass = FakeContact bot.query_cache = { 100: {'command': "good", 'notify': "hello"} } bot.http_client.expect("post", "/answerCallbackQuery", json={'callback_query_id': 123456, 'text': "hello"}, content_json={'ok': 1}) request = self.request_query("100") bot.process_webhook(request) self.assertEquals(bot.getContact(self.USER, self.CHANNEL).messages, ["good"]) @defer.inlineCallbacks def test_parse_query_explicit(self): bot = yield self.makeBot() bot.contactClass = FakeContact bot.query_cache = { 100: "bad" } bot.http_client.expect("post", "/answerCallbackQuery", json={'callback_query_id': 123456}, content_json={'ok': 1}) request = self.request_query("good") bot.process_webhook(request) self.assertEquals(bot.getContact(self.USER, self.CHANNEL).messages, ["good"]) @defer.inlineCallbacks def test_parse_query_bad(self): bot = yield self.makeBot() bot.contactClass = FakeContact bot.query_cache.update({ 100: "bad" }) bot.http_client.expect("post", "/editMessageReplyMarkup", json={'chat_id': -12345678, 'message_id': 12345}, content_json={'ok': 1}) bot.http_client.expect("post", "/answerCallbackQuery", json={'callback_query_id': 123456, 'text': "Sorry, button is no longer valid!"}, content_json={'ok': 1}) request = self.request_query("101") bot.process_webhook(request) @defer.inlineCallbacks def makePollingBot(self, updates, chat_ids=None, authz=None, *args, **kwargs): if chat_ids is None: chat_ids = [] http = yield self.setupFakeHttp() return TestPollingBot(updates, '12345:secret', http, chat_ids, authz, *args, **kwargs) @defer.inlineCallbacks def test_polling(self): bot = yield self.makePollingBot(2) bot._polling_continue = True bot.http_client.expect("post", "/deleteWebhook", content_json={"ok": 1}) bot.http_client.expect( "post", "/getUpdates", json={'timeout': bot.poll_timeout}, content_json={ 'ok': 1, 'result': [{ "update_id": 10000, "message": { "message_id": 123, "from": self.USER, "chat": self.CHANNEL, "date": 1566688888, "text": "ignore"}}]}) bot.http_client.expect( "post", "/getUpdates", json={'timeout': bot.poll_timeout, "offset": 10001}, content_json={ 'ok': 1, 'result': [{ "update_id": 10001, "message": { "message_id": 124, "from": self.USER, "chat": self.CHANNEL, "date": 1566688889, "text": "/nay"}}]}) bot.http_client.expect( "post", "/sendMessage", json={'chat_id': -12345678, 'text': 'Never mind, Harry...', 'parse_mode': 'Markdown'}, content_json={'ok': 1, 'result': {'message_id': 125}}) yield bot.do_polling() @defer.inlineCallbacks def test_format_build_status(self): bot = yield self.makeBot() build = {'results': SUCCESS} self.assertEqual(bot.format_build_status(build), "completed successfully ✅") @defer.inlineCallbacks def test_format_build_status_short(self): bot = yield self.makeBot() build = {'results': WARNINGS} self.assertEqual(bot.format_build_status(build, short=True), " ⚠️") class HttpServiceWithErrors(fakehttpclientservice.HTTPClientService): def __init__(self, skip, errs, *args, **kwargs): self.__skip = skip self.__errs = errs self.succeeded = False super().__init__(*args, **kwargs) def post(self, ep, **kwargs): if self.__skip: self.__skip -= 1 else: if self.__errs: self.__errs -= 1 raise RuntimeError("{}".format(self.__errs + 1)) self.succeeded = True return super().post(ep, **kwargs) # returns a Deferred def setupFakeHttpWithErrors(self, skip, errs): url = 'https://api.telegram.org/bot12345:secret' return self.HttpServiceWithErrors.getService(self.master, self, skip, errs, url) @defer.inlineCallbacks def test_post_not_ok(self): bot = yield self.makeBot() bot.http_client.expect( "post", "/post", content_json={'ok': 0}) def log(msg): logs.append(msg) logs = [] bot.log = log yield bot.post("/post") self.assertIn("ERROR", logs[0]) @defer.inlineCallbacks def test_post_need_repeat(self): bot = yield self.makeBot() bot.http_client = yield self.setupFakeHttpWithErrors(0, 2) bot.http_client.expect( "post", "/post", content_json={'ok': 1}) def log(msg): logs.append(msg) logs = [] bot.log = log bot.post("/post") self.assertIn("ERROR", logs[0]) self.reactor.pump(3 * [30.]) self.assertTrue(bot.http_client.succeeded) @defer.inlineCallbacks def test_polling_need_repeat(self): bot = yield self.makePollingBot(1) bot.reactor = self.reactor bot.http_client = yield self.setupFakeHttpWithErrors(1, 2) bot._polling_continue = True bot.http_client.expect("post", "/deleteWebhook", content_json={"ok": 1}) bot.http_client.expect( "post", "/getUpdates", json={'timeout': bot.poll_timeout}, content_json={ 'ok': 1, 'result': [{ "update_id": 10000, "message": { "message_id": 123, "from": self.USER, "chat": self.CHANNEL, "date": 1566688888, "text": "ignore"}}]}) def log(msg): logs.append(msg) logs = [] bot.log = log bot.do_polling() self.assertIn("ERROR", logs[0]) self.reactor.pump(3 * [30.]) self.assertTrue(bot.http_client.succeeded) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_utils.py000066400000000000000000000450731413250514000251610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import textwrap from dateutil.tz import tzutc from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.reporters import utils from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import logging from buildbot.test.util.misc import TestReactorMixin class TestDataUtils(TestReactorMixin, unittest.TestCase, logging.LoggingMixin): LOGCONTENT = textwrap.dedent("""\ line zero line 1 """) def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) def setupDb(self): self.db = self.master.db self.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=98, results=SUCCESS, reason="testReason1"), fakedb.Buildset(id=99, results=SUCCESS, reason="testReason2", parent_buildid=21), fakedb.Builder(id=80, name='Builder1'), fakedb.Builder(id=81, name='Builder2'), fakedb.BuildRequest(id=9, buildsetid=97, builderid=80), fakedb.BuildRequest(id=10, buildsetid=97, builderid=80), fakedb.BuildRequest(id=11, buildsetid=98, builderid=80), fakedb.BuildRequest(id=12, buildsetid=98, builderid=80), fakedb.BuildRequest(id=13, buildsetid=99, builderid=81), fakedb.Build(id=18, number=0, builderid=80, buildrequestid=9, workerid=13, masterid=92, results=FAILURE), fakedb.Build(id=19, number=1, builderid=80, buildrequestid=10, workerid=13, masterid=92, results=RETRY), fakedb.Build(id=20, number=2, builderid=80, buildrequestid=11, workerid=13, masterid=92, results=SUCCESS), fakedb.Build(id=21, number=3, builderid=80, buildrequestid=12, workerid=13, masterid=92, results=SUCCESS), fakedb.Build(id=22, number=1, builderid=81, buildrequestid=13, workerid=13, masterid=92, results=SUCCESS), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.SourceStamp(id=234), fakedb.Change(changeid=13, branch='trunk', revision='9283', author='me@foo', repository='svn://...', codebase='cbsvn', project='world-domination', sourcestampid=234), fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='him@foo', patch_comment='foo', subdir='/foo', patchlevel=3), fakedb.SourceStamp(id=235, patchid=99), ]) for _id in (20, 21): self.db.insertTestData([ fakedb.BuildProperty( buildid=_id, name="workername", value="wrk"), fakedb.BuildProperty( buildid=_id, name="reason", value="because"), fakedb.BuildProperty( buildid=_id, name="owner", value="him"), fakedb.Step(id=100 + _id, buildid=_id, name="step1"), fakedb.Step(id=200 + _id, buildid=_id, name="step2"), fakedb.Log(id=60 + _id, stepid=100 + _id, name='stdio', slug='stdio', type='s', num_lines=2), fakedb.LogChunk(logid=60 + _id, first_line=0, last_line=1, compressed=0, content=self.LOGCONTENT), ]) @defer.inlineCallbacks def getChangesForBuild(buildid): assert buildid == 20 ch = yield self.master.db.changes.getChange(13) return [ch] self.master.db.changes.getChangesForBuild = getChangesForBuild @defer.inlineCallbacks def test_getDetailsForBuildset(self): self.setupDb() res = yield utils.getDetailsForBuildset(self.master, 98, want_properties=True, want_steps=True, want_previous_build=True) self.assertEqual(len(res['builds']), 2) build1 = res['builds'][0] build2 = res['builds'][1] buildset = res['buildset'] self.assertEqual(build1['properties'], {'reason': ('because', 'fakedb'), 'owner': ('him', 'fakedb'), 'workername': ('wrk', 'fakedb')}) self.assertEqual(len(build1['steps']), 2) self.assertEqual(build1['buildid'], 20) self.assertEqual(build2['buildid'], 21) self.assertEqual(buildset['bsid'], 98) # make sure prev_build was computed self.assertEqual(build1['prev_build']['buildid'], 18) self.assertEqual(build2['prev_build']['buildid'], 20) @defer.inlineCallbacks def test_getDetailsForBuild(self): self.setupDb() build = yield self.master.data.get(("builds", 21)) yield utils.getDetailsForBuild(self.master, build, want_properties=False, want_steps=False, want_previous_build=False, want_logs=False) self.assertEqual(build['parentbuild'], None) self.assertEqual(build['parentbuilder'], None) @defer.inlineCallbacks def test_getDetailsForBuildWithParent(self): self.setupDb() build = yield self.master.data.get(("builds", 22)) yield utils.getDetailsForBuild(self.master, build, want_properties=False, want_steps=False, want_previous_build=False, want_logs=False) self.assertEqual(build['parentbuild']['buildid'], 21) self.assertEqual(build['parentbuilder']['name'], "Builder1") @defer.inlineCallbacks def test_getDetailsForBuildsetWithLogs(self): self.setupDb() res = yield utils.getDetailsForBuildset(self.master, 98, want_properties=True, want_steps=True, want_previous_build=True, want_logs=True, want_logs_content=True) build1 = res['builds'][0] self.assertEqual(build1['steps'][0]['logs'][0]['content']['content'], self.LOGCONTENT) self.assertEqual(build1['steps'][0]['logs'][0]['url'], 'http://localhost:8080/#builders/80/builds/2/steps/29/logs/stdio') @defer.inlineCallbacks def test_get_details_for_buildset_all(self): self.setupDb() res = yield utils.getDetailsForBuildset(self.master, 98, want_properties=True, want_steps=True, want_previous_build=True, want_logs=True, want_logs_content=True) self.assertEqual(res, { 'builds': [{ 'builder': { 'builderid': 80, 'description': None, 'masterids': [], 'name': 'Builder1', 'tags': [] }, 'builderid': 80, 'buildid': 20, 'buildrequestid': 11, 'buildset': { 'bsid': 98, 'complete': False, 'complete_at': None, 'external_idstring': 'extid', 'parent_buildid': None, 'parent_relationship': None, 'reason': 'testReason1', 'results': 0, 'sourcestamps': [{ 'branch': 'master', 'codebase': '', 'created_at': datetime.datetime(1972, 11, 5, 18, 7, 14, tzinfo=tzutc()), 'patch': None, 'project': 'proj', 'repository': 'repo', 'revision': 'abcd', 'ssid': 234 }], 'submitted_at': 12345678 }, 'complete': False, 'complete_at': None, 'masterid': 92, 'number': 2, 'prev_build': { 'builderid': 80, 'buildid': 18, 'buildrequestid': 9, 'complete': False, 'complete_at': None, 'masterid': 92, 'number': 0, 'properties': {}, 'results': 2, 'started_at': datetime.datetime(2011, 5, 1, 15, 3, 42, tzinfo=tzutc()), 'state_string': 'test', 'workerid': 13 }, 'properties': { 'owner': ('him', 'fakedb'), 'reason': ('because', 'fakedb'), 'workername': ('wrk', 'fakedb') }, 'results': 0, 'started_at': datetime.datetime(2011, 5, 1, 15, 3, 42, tzinfo=tzutc()), 'state_string': 'test', 'steps': [{ 'buildid': 20, 'complete': False, 'complete_at': None, 'hidden': False, 'logs': [{ 'complete': False, 'content': { 'content': 'line zero\nline 1\n', 'firstline': 0, 'logid': 80 }, 'logid': 80, 'name': 'stdio', 'num_lines': 2, 'slug': 'stdio', 'stepid': 120, 'type': 's', 'url': 'http://localhost:8080/#builders/80/builds/2/steps/29/logs/stdio' }], 'name': 'step1', 'number': 29, 'results': None, 'started_at': datetime.datetime(2011, 5, 1, 15, 3, 42, tzinfo=tzutc()), 'state_string': '', 'stepid': 120, 'urls': [] }, { 'buildid': 20, 'complete': False, 'complete_at': None, 'hidden': False, 'logs': [], 'name': 'step2', 'number': 29, 'results': None, 'started_at': datetime.datetime(2011, 5, 1, 15, 3, 42, tzinfo=tzutc()), 'state_string': '', 'stepid': 220, 'urls': [] }], 'url': 'http://localhost:8080/#builders/80/builds/2', 'workerid': 13 }, { 'builder': { 'builderid': 80, 'description': None, 'masterids': [], 'name': 'Builder1', 'tags': [] }, 'builderid': 80, 'buildid': 21, 'buildrequestid': 12, 'buildset': { 'bsid': 98, 'complete': False, 'complete_at': None, 'external_idstring': 'extid', 'parent_buildid': None, 'parent_relationship': None, 'reason': 'testReason1', 'results': 0, 'sourcestamps': [{ 'branch': 'master', 'codebase': '', 'created_at': datetime.datetime(1972, 11, 5, 18, 7, 14, tzinfo=tzutc()), 'patch': None, 'project': 'proj', 'repository': 'repo', 'revision': 'abcd', 'ssid': 234 }], 'submitted_at': 12345678 }, 'complete': False, 'complete_at': None, 'masterid': 92, 'number': 3, 'prev_build': { 'builderid': 80, 'buildid': 20, 'buildrequestid': 11, 'complete': False, 'complete_at': None, 'masterid': 92, 'number': 2, 'properties': {}, 'results': 0, 'started_at': datetime.datetime(2011, 5, 1, 15, 3, 42, tzinfo=tzutc()), 'state_string': 'test', 'workerid': 13 }, 'properties': { 'owner': ('him', 'fakedb'), 'reason': ('because', 'fakedb'), 'workername': ('wrk', 'fakedb') }, 'results': 0, 'started_at': datetime.datetime(2011, 5, 1, 15, 3, 42, tzinfo=tzutc()), 'state_string': 'test', 'steps': [{ 'buildid': 21, 'complete': False, 'complete_at': None, 'hidden': False, 'logs': [{ 'complete': False, 'content': {'content': 'line zero\nline 1\n', 'firstline': 0, 'logid': 81}, 'logid': 81, 'name': 'stdio', 'num_lines': 2, 'slug': 'stdio', 'stepid': 121, 'type': 's', 'url': 'http://localhost:8080/#builders/80/builds/3/steps/29/logs/stdio' }], 'name': 'step1', 'number': 29, 'results': None, 'started_at': datetime.datetime(2011, 5, 1, 15, 3, 42, tzinfo=tzutc()), 'state_string': '', 'stepid': 121, 'urls': [] }, { 'buildid': 21, 'complete': False, 'complete_at': None, 'hidden': False, 'logs': [], 'name': 'step2', 'number': 29, 'results': None, 'started_at': datetime.datetime(2011, 5, 1, 15, 3, 42, tzinfo=tzutc()), 'state_string': '', 'stepid': 221, 'urls': [] }], 'url': 'http://localhost:8080/#builders/80/builds/3', 'workerid': 13 }], 'buildset': { 'bsid': 98, 'complete': False, 'complete_at': None, 'external_idstring': 'extid', 'parent_buildid': None, 'parent_relationship': None, 'reason': 'testReason1', 'results': 0, 'sourcestamps': [{ 'branch': 'master', 'codebase': '', 'created_at': datetime.datetime(1972, 11, 5, 18, 7, 14, tzinfo=tzutc()), 'patch': None, 'project': 'proj', 'repository': 'repo', 'revision': 'abcd', 'ssid': 234 }], 'submitted_at': 12345678 } }) @defer.inlineCallbacks def test_getResponsibleUsers(self): self.setupDb() res = yield utils.getResponsibleUsersForSourceStamp(self.master, 234) self.assertEqual(res, ["me@foo"]) @defer.inlineCallbacks def test_getResponsibleUsersFromPatch(self): self.setupDb() res = yield utils.getResponsibleUsersForSourceStamp(self.master, 235) self.assertEqual(res, ["him@foo"]) @defer.inlineCallbacks def test_getResponsibleUsersForBuild(self): self.setupDb() res = yield utils.getResponsibleUsersForBuild(self.master, 20) self.assertEqual(sorted(res), sorted(["me@foo", "him"])) @defer.inlineCallbacks def test_getResponsibleUsersForBuildWithBadOwner(self): self.setUpLogging() self.setupDb() self.db.insertTestData([ fakedb.BuildProperty( buildid=20, name="owner", value=["him"]), ]) res = yield utils.getResponsibleUsersForBuild(self.master, 20) self.assertLogged("Please report a bug") self.assertEqual(sorted(res), sorted(["me@foo", "him"])) @defer.inlineCallbacks def test_getResponsibleUsersForBuildWithOwners(self): self.setupDb() self.db.insertTestData([ fakedb.BuildProperty( buildid=20, name="owners", value=["him", "her"]), ]) res = yield utils.getResponsibleUsersForBuild(self.master, 20) self.assertEqual(sorted(res), sorted(["me@foo", "him", "her"])) @defer.inlineCallbacks def test_getPreviousBuild(self): self.setupDb() build = yield self.master.data.get(("builds", 21)) res = yield utils.getPreviousBuild(self.master, build) self.assertEqual(res['buildid'], 20) @defer.inlineCallbacks def test_getPreviousBuildWithRetry(self): self.setupDb() build = yield self.master.data.get(("builds", 20)) res = yield utils.getPreviousBuild(self.master, build) self.assertEqual(res['buildid'], 18) class TestURLUtils(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) def test_UrlForBuild(self): self.assertEqual(utils.getURLForBuild(self.master, 1, 3), 'http://localhost:8080/#builders/1/builds/3') buildbot-3.4.0/master/buildbot/test/unit/reporters/test_words.py000066400000000000000000000747441413250514000251660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import mock from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters import words from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import datetime2epoch class ContactMixin(TestReactorMixin): botClass = words.StatusBot channelClass = words.Channel contactClass = words.Contact USER = "me" CHANNEL = "#buildbot" BUILDER_NAMES = ['builder1', 'builder2'] BUILDER_IDS = [23, 45] @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.patch(reactor, 'callLater', self.reactor.callLater) self.patch(reactor, 'seconds', self.reactor.seconds) self.patch(reactor, 'stop', self.reactor.stop) self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) for builderid, name in zip(self.BUILDER_IDS, self.BUILDER_NAMES): self.master.db.builders.addTestBuilder( builderid=builderid, name=name) self.bot = self.botClass(notify_events={'success': 1, 'failure': 1}) self.bot.channelClass = self.channelClass self.bot.contactClass = self.contactClass self.bot.nickname = 'nick' self.missing_workers = set() # fake out subscription/unsubscription self.subscribed = False # fake out clean shutdown self.bot.parent = self self.bot.master.botmaster = mock.Mock( name='StatusBot-instance.master.botmaster') self.bot.master.botmaster.shuttingDown = False def cleanShutdown(): self.bot.master.botmaster.shuttingDown = True self.bot.master.botmaster.cleanShutdown = cleanShutdown def cancelCleanShutdown(): self.bot.master.botmaster.shuttingDown = False self.bot.master.botmaster.cancelCleanShutdown = cancelCleanShutdown self.contact = self.contactClass(user=self.USER, channel=self.bot.getChannel(self.CHANNEL)) yield self.contact.channel.setServiceParent(self.master) yield self.master.startService() def patch_send(self): self.sent = [] def send(msg): if not isinstance(msg, (list, tuple)): msg = (msg,) for m in msg: self.sent.append(m) self.contact.channel.send = send @defer.inlineCallbacks def do_test_command(self, command, args='', contact=None, clock_ticks=None, exp_usage=True, exp_UsageError=False, shuttingDown=False, **kwargs): if contact is None: contact = self.contact cmd = getattr(contact, 'command_' + command.upper()) if exp_usage: self.assertTrue(hasattr(cmd, 'usage')) self.patch_send() self.bot.master.botmaster.shuttingDown = shuttingDown if exp_UsageError: try: yield cmd(args, **kwargs) except words.UsageError: return else: self.fail("no UsageError") else: yield cmd(args, **kwargs) if clock_ticks: self.reactor.pump(clock_ticks) def setupSomeBuilds(self): self.master.db.insertTestData([ # Three builds on builder#0, One build on builder#1 fakedb.Build(id=13, masterid=88, workerid=13, builderid=self.BUILDER_IDS[0], buildrequestid=82, number=3), fakedb.Build(id=14, masterid=88, workerid=13, builderid=self.BUILDER_IDS[0], buildrequestid=83, number=4), fakedb.Build(id=15, masterid=88, workerid=13, builderid=self.BUILDER_IDS[1], buildrequestid=84, number=5), fakedb.Build(id=16, masterid=88, workerid=13, builderid=self.BUILDER_IDS[0], buildrequestid=85, number=6), ]) self.master.db.builds.finishBuild(buildid=14, results=SUCCESS) def setup_multi_builders(self): # Make first builder configured, but not connected # Make second builder configured and connected self.master.db.insertTestData([ fakedb.Worker(id=1, name='linux1', info={}), # connected one fakedb.Worker(id=2, name='linux2', info={}), # disconnected one fakedb.BuilderMaster( id=4012, masterid=13, builderid=self.BUILDER_IDS[0]), fakedb.BuilderMaster( id=4013, masterid=13, builderid=self.BUILDER_IDS[1]), fakedb.ConfiguredWorker(id=14013, workerid=2, buildermasterid=4012), fakedb.ConfiguredWorker(id=14013, workerid=1, buildermasterid=4013), ]) class TestContact(ContactMixin, unittest.TestCase): def test_channel_service(self): self.assertTrue(self.contact.channel.running) self.contact.channel.stopService() @defer.inlineCallbacks def test_command_notify0(self): yield self.do_test_command('notify', exp_UsageError=True) yield self.do_test_command('notify', args="invalid arg", exp_UsageError=True) yield self.do_test_command('notify', args="on") self.assertEqual( self.sent, ["The following events are being notified: finished, started."]) yield self.do_test_command('notify', args="off") self.assertEqual( self.sent, ['No events are being notified.']) yield self.do_test_command('notify', args="on started") self.assertEqual( self.sent, ["The following events are being notified: started."]) yield self.do_test_command('notify', args="off started") self.assertEqual( self.sent, ['No events are being notified.']) yield self.assertFailure( self.do_test_command('notify', args="off finished"), KeyError) yield self.do_test_command('notify', args="list") self.assertEqual( self.sent, ['No events are being notified.']) @defer.inlineCallbacks def notify_build_test(self, notify_args): self.bot.tags = None yield self.test_command_watch_builder0() yield self.do_test_command('notify', args=notify_args) buildStarted = self.contact.channel.subscribed[0].callback buildFinished = self.contact.channel.subscribed[1].callback for buildid in (13, 14, 16): self.master.db.builds.finishBuild(buildid=buildid, results=SUCCESS) build = yield self.master.db.builds.getBuild(buildid) buildStarted("somekey", build) buildFinished("somekey", build) def test_command_notify_build_started(self): self.notify_build_test("on started") def test_command_notify_build_finished(self): self.notify_build_test("on finished") def test_command_notify_build_better(self): self.notify_build_test("on better") def test_command_notify_build_worse(self): self.notify_build_test("on worse") def test_command_notify_build_problem(self): self.notify_build_test("on problem") def test_command_notify_build_recovery(self): self.notify_build_test("on recovery") def test_command_notify_build_started_finished(self): self.notify_build_test("on") @defer.inlineCallbacks def test_notify_missing_worker(self): self.patch_send() yield self.do_test_command('notify', args='on worker') missing_worker = self.contact.channel.subscribed[2].callback missing_worker((None, None, 'missing'), dict(workerid=1, name="work", last_connection="sometime")) self.assertEquals(self.sent[1], "Worker `work` is missing. It was seen last on sometime.") self.assertIn(1, self.contact.channel.missing_workers) @defer.inlineCallbacks def test_notify_worker_is_back(self): self.patch_send() yield self.do_test_command('notify', args='on worker') self.contact.channel.missing_workers.add(1) missing_worker = self.contact.channel.subscribed[2].callback missing_worker((None, None, 'connected'), dict(workerid=1, name="work", last_connection="sometime")) self.assertEquals(self.sent[1], "Worker `work` is back online.") self.assertNotIn(1, self.contact.channel.missing_workers) @defer.inlineCallbacks def test_command_help_noargs(self): yield self.do_test_command('help') self.assertIn('help - ', '\n'.join(self.sent)) @defer.inlineCallbacks def test_command_help_arg(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = 'foo - bar' yield self.do_test_command('help', args='foo') self.assertIn('Usage: foo - bar', self.sent[0]) @defer.inlineCallbacks def test_command_help_no_usage(self): self.contact.command_FOO = lambda: None yield self.do_test_command('help', args='foo') self.assertIn('No usage info for', self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { None: 'foo - bar' } yield self.do_test_command('help', args='foo') self.assertIn('Usage: foo - bar', self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_no_usage(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = {} yield self.do_test_command('help', args='foo') self.assertIn("No usage info for 'foo'", self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_arg(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { 'this': 'foo this - bar' } yield self.do_test_command('help', args='foo this') self.assertIn('Usage: foo this - bar', self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_arg_no_usage(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { # nothing for arg 'this' ('this', 'first'): 'foo this first - bar' } yield self.do_test_command('help', args='foo this') self.assertIn("No usage info for 'foo' 'this'", self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_arg_subarg(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { ('this', 'first'): 'foo this first - bar' } yield self.do_test_command('help', args='foo this first') self.assertIn('Usage: foo this first - bar', self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_arg_subarg_no_usage(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { None: 'foo - bar', 'this': 'foo this - bar', ('this', 'first'): 'foo this first - bar' # nothing for subarg 'missing' } yield self.do_test_command('help', args='foo this missing') self.assertIn("No usage info for 'foo' 'this' 'missing'", self.sent[0]) @defer.inlineCallbacks def test_command_help_nosuch(self): yield self.do_test_command('help', args='foo', exp_UsageError=True) @defer.inlineCallbacks def test_command_shutdown(self): yield self.do_test_command('shutdown', exp_UsageError=True) self.assertEqual(self.bot.master.botmaster.shuttingDown, False) @defer.inlineCallbacks def test_command_shutdown_check_running(self): yield self.do_test_command('shutdown', args='check', shuttingDown=False) self.assertEqual(self.bot.master.botmaster.shuttingDown, False) self.assertIn('buildbot is running', self.sent[0]) @defer.inlineCallbacks def test_command_shutdown_check_shutting_down(self): yield self.do_test_command('shutdown', args='check', shuttingDown=True) self.assertEqual(self.bot.master.botmaster.shuttingDown, True) self.assertIn('buildbot is shutting down', self.sent[0]) @defer.inlineCallbacks def test_command_shutdown_start(self): yield self.do_test_command('shutdown', args='start', shuttingDown=False) self.assertEqual(self.bot.master.botmaster.shuttingDown, True) @defer.inlineCallbacks def test_command_shutdown_stop(self): yield self.do_test_command('shutdown', args='stop', shuttingDown=True) self.assertEqual(self.bot.master.botmaster.shuttingDown, False) @defer.inlineCallbacks def test_command_shutdown_now(self): yield self.do_test_command('shutdown', args='now') self.assertEqual(self.bot.master.botmaster.shuttingDown, False) self.assertTrue(self.reactor.stop_called) @defer.inlineCallbacks def test_command_source(self): yield self.do_test_command('source') self.assertIn('My source', self.sent[0]) @defer.inlineCallbacks def test_command_commands(self): yield self.do_test_command('commands') self.assertIn('Buildbot commands', self.sent[0]) @defer.inlineCallbacks def test_command_hello(self): yield self.do_test_command('hello', exp_usage=False) self.assertIn(self.sent[0], words.GREETINGS) @defer.inlineCallbacks def test_command_list(self): yield self.do_test_command('list', exp_UsageError=True) @defer.inlineCallbacks def test_command_list_builders(self): yield self.do_test_command('list', args='all builders') self.assertEqual(len(self.sent), 1) for builder in self.BUILDER_NAMES: self.assertIn('{} [offline]'.format(builder), self.sent[0]) @defer.inlineCallbacks def test_command_list_workers(self): workers = ['worker1', 'worker2'] for worker in workers: self.master.db.workers.db.insertTestData([ fakedb.Worker(name=worker) ]) yield self.do_test_command('list', args='all workers') self.assertEqual(len(self.sent), 1) for worker in workers: self.assertIn('{} [offline]'.format(worker), self.sent[0]) @defer.inlineCallbacks def test_command_list_workers_online(self): self.setup_multi_builders() # Also set the connectedness: self.master.db.insertTestData([ fakedb.ConnectedWorker(id=113, masterid=13, workerid=1) ]) yield self.do_test_command('list', args='all workers') self.assertEqual(len(self.sent), 1) self.assertNotIn('linux1 [disconnected]', self.sent[0]) self.assertIn('linux2 [disconnected]', self.sent[0]) @defer.inlineCallbacks def test_command_list_changes(self): self.master.db.workers.db.insertTestData([ fakedb.Change() ]) yield self.do_test_command('list', args='2 changes') self.assertEqual(len(self.sent), 1) @defer.inlineCallbacks def test_command_list_builders_not_connected(self): self.setup_multi_builders() yield self.do_test_command('list', args='all builders') self.assertEqual(len(self.sent), 1) self.assertIn('{} [offline]'.format(self.BUILDER_NAMES[0]), self.sent[0]) self.assertIn('{} [offline]'.format(self.BUILDER_NAMES[1]), self.sent[0]) @defer.inlineCallbacks def test_command_list_builders_connected(self): self.setup_multi_builders() # Also set the connectedness: self.master.db.insertTestData([ fakedb.ConnectedWorker(id=113, masterid=13, workerid=1) ]) yield self.do_test_command('list', args='all builders') self.assertEqual(len(self.sent), 1) self.assertIn('{} [offline]'.format(self.BUILDER_NAMES[0]), self.sent[0]) self.assertNotIn('{} [offline]'.format(self.BUILDER_NAMES[1]), self.sent[0]) @defer.inlineCallbacks def test_command_status(self): yield self.do_test_command('status') @defer.inlineCallbacks def test_command_status_online(self): # we are online and we have some finished builds self.setup_multi_builders() self.master.db.insertTestData([ fakedb.ConfiguredWorker(id=14012, workerid=1, buildermasterid=4013), fakedb.ConnectedWorker(id=114, masterid=13, workerid=1) ]) self.setupSomeBuilds() self.master.db.builds.finishBuild(buildid=13, results=FAILURE) self.master.db.builds.finishBuild(buildid=15, results=SUCCESS) self.master.db.builds.finishBuild(buildid=16, results=FAILURE) yield self.do_test_command('status') @defer.inlineCallbacks def test_command_status_all(self): yield self.do_test_command('status', args='all') @defer.inlineCallbacks def test_command_status_builder0_offline(self): yield self.do_test_command('status', args=self.BUILDER_NAMES[0]) self.assertEqual(self.sent, ['`{}`: offline'.format(self.BUILDER_NAMES[0])]) @defer.inlineCallbacks def test_command_status_builder0_running(self): self.setupSomeBuilds() yield self.do_test_command('status', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 1) self.assertIn('`builder1`: running', self.sent[0]) self.assertRegex(self.sent[0], r' build \[#3\].* \(no current step\)') self.assertRegex(self.sent[0], r' build \[#6\].* \(no current step\)') @defer.inlineCallbacks def test_command_status_bogus(self): yield self.do_test_command('status', args='bogus_builder', exp_UsageError=True) def sub_seconds(self, strings): # sometimes timing works out wrong, so just call it "n seconds" return [re.sub(r'\d seconds|a moment', 'N seconds', s) for s in strings] @defer.inlineCallbacks def test_command_last(self): self.setupSomeBuilds() self.setup_multi_builders() # Also set the connectedness: self.master.db.insertTestData([ fakedb.ConnectedWorker(id=113, masterid=13, workerid=2) ]) yield self.do_test_command('last') self.assertEqual(len(self.sent), 1) sent = self.sub_seconds(self.sent) self.assertIn( '`builder1`: last build completed successfully (N seconds ago)', sent) @defer.inlineCallbacks def test_command_last_all(self): self.setupSomeBuilds() yield self.do_test_command('last', args='all') self.assertEqual(len(self.sent), 1) sent = self.sub_seconds(self.sent) self.assertIn( '`builder1`: last build completed successfully (N seconds ago)', sent[0]) self.assertIn( '`builder2`: no builds run since last restart', sent[0]) @defer.inlineCallbacks def test_command_last_builder_bogus(self): yield self.do_test_command('last', args="BOGUS", exp_UsageError=True) @defer.inlineCallbacks def test_command_last_builder0(self): self.setupSomeBuilds() yield self.do_test_command('last', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 1) sent = self.sub_seconds(self.sent) self.assertIn( '`builder1`: last build completed successfully (N seconds ago)', sent) @defer.inlineCallbacks def test_command_last_builder1(self): self.setupSomeBuilds() yield self.do_test_command('last', args=self.BUILDER_NAMES[1]) self.assertEqual(len(self.sent), 1) self.assertIn( '`builder2`: no builds run since last restart', self.sent) @defer.inlineCallbacks def test_command_watch(self): yield self.do_test_command('watch', exp_UsageError=True) @defer.inlineCallbacks def test_command_watch_builder0_no_builds(self): yield self.do_test_command('watch', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 1) self.assertIn('There are no currently running builds.', self.sent[0]) @defer.inlineCallbacks def test_command_watch_builder0(self): self.setupSomeBuilds() yield self.do_test_command('watch', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 2) self.assertIn('Watching build [#3](http://localhost:8080/#builders/23/builds/3) of ' '`builder1` until it finishes...', self.sent) self.assertIn('Watching build [#6](http://localhost:8080/#builders/23/builds/6) of ' '`builder1` until it finishes...', self.sent) @defer.inlineCallbacks def test_command_watch_builder0_get_notifications(self): # (continue from the prior test) self.bot.tags = None yield self.test_command_watch_builder0() del self.sent[:] yield self.sendBuildFinishedMessage(16) self.assertEqual(len(self.sent), 1) self.assertIn("Build [#6](http://localhost:8080/#builders/23/builds/6) of " "`builder1` completed successfully.", self.sent) @defer.inlineCallbacks def test_command_watch_builder1(self): self.setupSomeBuilds() yield self.do_test_command('watch', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 2) self.assertIn('Watching build [#3](http://localhost:8080/#builders/23/builds/3) of ' '`builder1` until it finishes...', self.sent) self.assertIn('Watching build [#6](http://localhost:8080/#builders/23/builds/6) of ' '`builder1` until it finishes...', self.sent) @defer.inlineCallbacks def sendBuildFinishedMessage(self, buildid, results=0): self.master.db.builds.finishBuild(buildid=buildid, results=SUCCESS) build = yield self.master.db.builds.getBuild(buildid) self.master.mq.callConsumer(('builds', str(buildid), 'complete'), dict( buildid=buildid, number=build['number'], builderid=build['builderid'], buildrequestid=build['buildrequestid'], workerid=build['workerid'], masterid=build['masterid'], started_at=datetime2epoch( build['started_at']), complete=True, complete_at=datetime2epoch( build['complete_at']), state_string='', results=results, )) @defer.inlineCallbacks def test_command_stop(self): yield self.do_test_command('stop', exp_UsageError=True) @defer.inlineCallbacks def test_command_stop_bogus_builder(self): yield self.do_test_command('stop', args="build BOGUS 'i have a reason'", exp_UsageError=True) @defer.inlineCallbacks def test_command_stop_builder0_no_builds(self): yield self.do_test_command('stop', args="build {} 'i have a reason'".format( self.BUILDER_NAMES[0])) self.assertEqual(len(self.sent), 1) self.assertIn('no build is', self.sent[0]) @defer.inlineCallbacks def test_command_stop_builder0_1_builds(self): self.setupSomeBuilds() yield self.do_test_command('stop', args="build {} 'i have a reason'".format( self.BUILDER_NAMES[0])) self.assertEqual(len(self.sent), 2) self.assertRegex(self.sent[0], r'Build \[#[36]\].* of `builder1` interrupted') self.assertRegex(self.sent[1], r'Build \[#[63]\].* of `builder1` interrupted') @defer.inlineCallbacks def test_command_force_no_args(self): yield self.do_test_command('force', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_wrong_first_arg(self): yield self.do_test_command('force', args='notbuild', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_build_no_args(self): yield self.do_test_command('force', args='build', exp_UsageError=True) # TODO: missing tests for: # - bad args # - arg validation failure (self.master.config.validation) @defer.inlineCallbacks def test_command_force(self): yield self.do_test_command( 'force', args='build --branch BRANCH1 --revision REV1 --props=PROP1=VALUE1 {} REASON' .format(self.BUILDER_NAMES[0])) @defer.inlineCallbacks def test_handleMessage_short_command(self): self.contact.command_TESTY = mock.Mock() yield self.contact.handleMessage('testy') self.contact.command_TESTY.assert_called_with('') @defer.inlineCallbacks def test_handleMessage_long_command(self): self.contact.command_TESTY = mock.Mock() yield self.contact.handleMessage('testy westy boo') self.contact.command_TESTY.assert_called_with('westy boo') @defer.inlineCallbacks def test_handleMessage_excited(self): self.patch_send() yield self.contact.handleMessage('hi!') self.assertEqual(len(self.sent), 1) # who cares what it says.. @defer.inlineCallbacks def test_handleMessage_exception(self): self.patch_send() def command_TESTY(msg): raise RuntimeError("FAIL") self.contact.command_TESTY = command_TESTY yield self.contact.handleMessage('testy boom') self.assertEqual(self.sent, ["Something bad happened (see logs)"]) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_handleMessage_UsageError(self): self.patch_send() def command_TESTY(msg): raise words.UsageError("oh noes") self.contact.command_TESTY = command_TESTY yield self.contact.handleMessage('testy boom') self.assertEqual(self.sent, ["oh noes"]) @defer.inlineCallbacks def test_unclosed_quote(self): yield self.do_test_command('list', args='args\'', exp_UsageError=True) yield self.do_test_command('status', args='args\'', exp_UsageError=True) yield self.do_test_command('notify', args='args\'', exp_UsageError=True) yield self.do_test_command('watch', args='args\'', exp_UsageError=True) yield self.do_test_command('force', args='args\'', exp_UsageError=True) yield self.do_test_command('stop', args='args\'', exp_UsageError=True) yield self.do_test_command('last', args='args\'', exp_UsageError=True) yield self.do_test_command('help', args='args\'', exp_UsageError=True) @defer.inlineCallbacks def test_buildStarted(self): self.setupSomeBuilds() self.patch_send() build = yield self.master.db.builds.getBuild(13) self.bot.tags = None self.contact.channel.notify_for = lambda _: True self.contact.useRevisions = False self.contact.channel.buildStarted(build) self.assertEqual( self.sent.pop(), "Build [#3](http://localhost:8080/#builders/23/builds/3) of `builder1` started.") def test_getCommandMethod_authz_default(self): self.bot.authz = words.StatusBot.expand_authz(None) meth = self.contact.getCommandMethod('shutdown') self.assertEqual(meth, self.contact.access_denied) authz1 = { 'force': ['me'], 'shutdown': ['notme', 'someone'], ('dance', 'notify'): True, '': False} def test_getCommandMethod_explicit_allow(self): self.bot.authz = words.StatusBot.expand_authz(self.authz1) meth = self.contact.getCommandMethod('force') self.assertNotEqual(meth, self.contact.access_denied) def test_getCommandMethod_explicit_disallow(self): self.bot.authz = words.StatusBot.expand_authz(self.authz1) meth = self.contact.getCommandMethod('shutdown') self.assertEqual(meth, self.contact.access_denied) def test_getCommandMethod_explicit_multi(self): self.bot.authz = words.StatusBot.expand_authz(self.authz1) self.assertIn('DANCE', self.bot.authz) meth = self.contact.getCommandMethod('dance') self.assertNotEqual(meth, self.contact.access_denied) def test_getCommandMethod_explicit_default(self): self.bot.authz = words.StatusBot.expand_authz(self.authz1) meth = self.contact.getCommandMethod('help') self.assertEqual(meth, self.contact.access_denied) authz2 = { 'shutdown': False, '': False, '*': True} def test_getCommandMethod_exclamation(self): self.bot.authz = words.StatusBot.expand_authz(self.authz2) meth = self.contact.getCommandMethod('help') self.assertNotEqual(meth, self.contact.access_denied) def test_getCommandMethod_exclamation_override(self): self.bot.authz = words.StatusBot.expand_authz(self.authz2) meth = self.contact.getCommandMethod('shutdown') self.assertEqual(meth, self.contact.access_denied) def test_access_denied(self): self.patch_send() self.contact.access_denied() self.assertIn("not pass", self.sent[0]) @defer.inlineCallbacks def test_bot_loadState(self): boid = yield self.bot._get_object_id() self.master.db.insertTestData([ fakedb.ObjectState(objectid=boid, name='notify_events', value_json='[["#channel1", ["warnings"]]]'), ]) yield self.bot.loadState() self.assertEqual(self.bot.channels['#channel1'].notify_events, {'warnings'}) buildbot-3.4.0/master/buildbot/test/unit/reporters/test_zulip.py000066400000000000000000000151401413250514000251540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.reporters.zulip import ZulipStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.logging import LoggingMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestZulipStatusPush(unittest.TestCase, ReporterTestMixin, LoggingMixin, ConfigErrorsMixin, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.setup_reporter_test() self.master = fakemaster.make_master( testcase=self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def tearDown(self): if self.master.running: yield self.master.stopService() @defer.inlineCallbacks def setupZulipStatusPush(self, endpoint="http://example.com", token="123", stream=None): self.sp = ZulipStatusPush( endpoint=endpoint, token=token, stream=stream) self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, endpoint, debug=None, verify=None) yield self.sp.setServiceParent(self.master) yield self.master.startService() @defer.inlineCallbacks def test_build_started(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.insert_build_new() self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": 'new', "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 10000001 }) yield self.sp._got_event(('builds', 20, 'new'), build) @defer.inlineCallbacks def test_build_finished(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.insert_build_finished() self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": "finished", "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 10000005, "results": 0 }) yield self.sp._got_event(('builds', 20, 'finished'), build) @defer.inlineCallbacks def test_stream_none(self): yield self.setupZulipStatusPush(stream=None) build = yield self.insert_build_finished() self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123', json={ "event": "finished", "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 10000005, "results": 0 }) yield self.sp._got_event(('builds', 20, 'finished'), build) def test_endpoint_string(self): with self.assertRaisesConfigError( "Endpoint must be a string"): ZulipStatusPush(endpoint=1234, token="abcd") def test_token_string(self): with self.assertRaisesConfigError( "Token must be a string"): ZulipStatusPush(endpoint="http://example.com", token=1234) @defer.inlineCallbacks def test_invalid_json_data(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.insert_build_new() self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": 'new', "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 10000001 }, code=500) self.setUpLogging() yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged('500: Error pushing build status to Zulip') @defer.inlineCallbacks def test_invalid_url(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.insert_build_new() self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": 'new', "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 10000001 }, code=404) self.setUpLogging() yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged('404: Error pushing build status to Zulip') @defer.inlineCallbacks def test_invalid_token(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.insert_build_new() self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": 'new', "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 10000001 }, code=401, content_json={"result": "error", "msg": "Invalid API key", "code": "INVALID_API_KEY"}) self.setUpLogging() yield self.sp._got_event(('builds', 20, 'new'), build) self.assertLogged('401: Error pushing build status to Zulip') buildbot-3.4.0/master/buildbot/test/unit/schedulers/000077500000000000000000000000001413250514000225135ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/schedulers/__init__.py000066400000000000000000000000001413250514000246120ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_base.py000066400000000000000000000731421413250514000250450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import task from twisted.trial import unittest from buildbot import config from buildbot.changes import changes from buildbot.process import properties from buildbot.process.properties import Interpolate from buildbot.schedulers import base from buildbot.test import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class BaseScheduler(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 19 SCHEDULERID = 9 exp_bsid_brids = (123, {'b': 456}) def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, name='testsched', builderNames=None, properties=None, codebases=None): if builderNames is None: builderNames = ['a', 'b'] if properties is None: properties = {} if codebases is None: codebases = {'': {}} if isinstance(builderNames, list): dbBuilder = list() builderid = 0 for builderName in builderNames: builderid += 1 dbBuilder.append(fakedb.Builder(id=builderid, name=builderName)) self.master.db.insertTestData(dbBuilder) sched = self.attachScheduler( base.BaseScheduler(name=name, builderNames=builderNames, properties=properties, codebases=codebases), self.OBJECTID, self.SCHEDULERID) self.master.data.updates.addBuildset = mock.Mock( name='data.addBuildset', side_effect=lambda *args, **kwargs: defer.succeed(self.exp_bsid_brids)) return sched # tests def test_constructor_builderNames(self): with self.assertRaises(config.ConfigErrors): self.makeScheduler(builderNames='xxx') def test_constructor_builderNames_unicode(self): self.makeScheduler(builderNames=['a']) def test_constructor_builderNames_renderable(self): @properties.renderer def names(props): return ['a'] self.makeScheduler(builderNames=names) def test_constructor_codebases_valid(self): codebases = {"codebase1": {"repository": "", "branch": "", "revision": ""}} self.makeScheduler(codebases=codebases) def test_constructor_codebases_valid_list(self): codebases = ['codebase1'] self.makeScheduler(codebases=codebases) def test_constructor_codebases_invalid(self): # scheduler only accepts codebases with at least repository set codebases = {"codebase1": {"dictionary": "", "that": "", "fails": ""}} with self.assertRaises(config.ConfigErrors): self.makeScheduler(codebases=codebases) @defer.inlineCallbacks def test_getCodebaseDict(self): sched = self.makeScheduler( codebases={'lib': {'repository': 'librepo'}}) cbd = yield sched.getCodebaseDict('lib') self.assertEqual(cbd, {'repository': 'librepo'}) @defer.inlineCallbacks def test_getCodebaseDict_constructedFromList(self): sched = self.makeScheduler(codebases=['lib', 'lib2']) cbd = yield sched.getCodebaseDict('lib') self.assertEqual(cbd, {}) def test_getCodebaseDict_not_found(self): sched = self.makeScheduler( codebases={'lib': {'repository': 'librepo'}}) return self.assertFailure(sched.getCodebaseDict('app'), KeyError) def test_listBuilderNames(self): sched = self.makeScheduler(builderNames=['x', 'y']) self.assertEqual(sched.listBuilderNames(), ['x', 'y']) @defer.inlineCallbacks def test_startConsumingChanges_fileIsImportant_check(self): sched = self.makeScheduler() try: yield sched.startConsumingChanges(fileIsImportant="maybe") except AssertionError: pass else: self.fail("didn't assert") @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler() expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def do_test_change_consumption(self, kwargs, expected_result): # (expected_result should be True (important), False (unimportant), or # None (ignore the change)) sched = self.makeScheduler() sched.startService() self.addCleanup(sched.stopService) # set up a change message, a changedict, a change, and convince # getChange and fromChdict to convert one to the other msg = dict(changeid=12934) chdict = dict(changeid=12934, is_chdict=True) def getChange(changeid): assert changeid == 12934 return defer.succeed(chdict) self.db.changes.getChange = getChange change = self.makeFakeChange() change.number = 12934 def fromChdict(cls, master, chdict): assert chdict['changeid'] == 12934 and chdict['is_chdict'] return defer.succeed(change) self.patch(changes.Change, 'fromChdict', classmethod(fromChdict)) change_received = [None] def gotChange(got_change, got_important): # check that we got the expected change object self.assertIdentical(got_change, change) change_received[0] = got_important return defer.succeed(None) sched.gotChange = gotChange yield sched.startConsumingChanges(**kwargs) # check that it registered callbacks self.assertEqual(len(self.mq.qrefs), 2) qref = self.mq.qrefs[1] self.assertEqual(qref.filter, ('changes', None, 'new')) # invoke the callback with the change, and check the result qref.callback('change.12934.new', msg) self.assertEqual(change_received[0], expected_result) def test_change_consumption_defaults(self): # all changes are important by default return self.do_test_change_consumption( dict(), True) def test_change_consumption_fileIsImportant_True(self): return self.do_test_change_consumption( dict(fileIsImportant=lambda c: True), True) def test_change_consumption_fileIsImportant_False(self): return self.do_test_change_consumption( dict(fileIsImportant=lambda c: False), False) @defer.inlineCallbacks def test_change_consumption_fileIsImportant_exception(self): yield self.do_test_change_consumption( dict(fileIsImportant=lambda c: 1 / 0), None) self.assertEqual(1, len(self.flushLoggedErrors(ZeroDivisionError))) def test_change_consumption_change_filter_True(self): cf = mock.Mock() cf.filter_change = lambda c: True return self.do_test_change_consumption( dict(change_filter=cf), True) def test_change_consumption_change_filter_False(self): cf = mock.Mock() cf.filter_change = lambda c: False return self.do_test_change_consumption( dict(change_filter=cf), None) def test_change_consumption_fileIsImportant_False_onlyImportant(self): return self.do_test_change_consumption( dict(fileIsImportant=lambda c: False, onlyImportant=True), None) def test_change_consumption_fileIsImportant_True_onlyImportant(self): return self.do_test_change_consumption( dict(fileIsImportant=lambda c: True, onlyImportant=True), True) @defer.inlineCallbacks def test_activation(self): sched = self.makeScheduler(name='n', builderNames=['a']) sched.clock = task.Clock() sched.activate = mock.Mock(return_value=defer.succeed(None)) sched.deactivate = mock.Mock(return_value=defer.succeed(None)) # set the schedulerid, and claim the scheduler on another master yield self.setSchedulerToMaster(self.OTHER_MASTER_ID) yield sched.startService() sched.clock.advance(sched.POLL_INTERVAL_SEC / 2) sched.clock.advance(sched.POLL_INTERVAL_SEC / 5) sched.clock.advance(sched.POLL_INTERVAL_SEC / 5) self.assertFalse(sched.activate.called) self.assertFalse(sched.deactivate.called) self.assertFalse(sched.isActive()) # objectid is attached by the test helper self.assertEqual(sched.serviceid, self.SCHEDULERID) # clear that masterid yield sched.stopService() self.setSchedulerToMaster(None) yield sched.startService() sched.clock.advance(sched.POLL_INTERVAL_SEC) self.assertTrue(sched.activate.called) self.assertFalse(sched.deactivate.called) self.assertTrue(sched.isActive()) # stop the service and see that deactivate is called yield sched.stopService() self.assertTrue(sched.activate.called) self.assertTrue(sched.deactivate.called) self.assertFalse(sched.isActive()) def test_activation_claim_raises(self): sched = self.makeScheduler(name='n', builderNames=['a']) sched.clock = task.Clock() # set the schedulerid, and claim the scheduler on another master self.setSchedulerToMaster(RuntimeError()) sched.startService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) self.assertFalse(sched.isActive()) def test_activation_activate_fails(self): sched = self.makeScheduler(name='n', builderNames=['a']) sched.clock = task.Clock() def activate(): raise RuntimeError('oh noes') sched.activate = activate sched.startService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) @defer.inlineCallbacks def do_addBuildsetForSourceStampsWithDefaults(self, codebases, sourcestamps, exp_sourcestamps): sched = self.makeScheduler(name='n', builderNames=['b'], codebases=codebases) bsid, brids = yield sched.addBuildsetForSourceStampsWithDefaults( reason='power', sourcestamps=sourcestamps, waited_for=False) self.assertEqual((bsid, brids), self.exp_bsid_brids) call = self.master.data.updates.addBuildset.mock_calls[0] def sourceStampKey(sourceStamp): repository = sourceStamp.get('repository', '') if repository is None: repository = '' branch = sourceStamp.get('branch', '') if not None else '' if branch is None: branch = '' return (repository, branch) self.assertEqual(sorted(call[2]['sourcestamps'], key=sourceStampKey), sorted(exp_sourcestamps, key=sourceStampKey)) def test_addBuildsetForSourceStampsWithDefaults(self): codebases = { 'cbA': dict(repository='svn://A..', branch='stable', revision='13579'), 'cbB': dict(repository='svn://B..', branch='stable', revision='24680') } sourcestamps = [ {'codebase': 'cbA', 'branch': 'AA'}, {'codebase': 'cbB', 'revision': 'BB'}, ] exp_sourcestamps = [ {'repository': 'svn://B..', 'branch': 'stable', 'revision': 'BB', 'codebase': 'cbB', 'project': ''}, {'repository': 'svn://A..', 'branch': 'AA', 'project': '', 'revision': '13579', 'codebase': 'cbA'}, ] return self.do_addBuildsetForSourceStampsWithDefaults( codebases, sourcestamps, exp_sourcestamps) def test_addBuildsetForSourceStampsWithDefaults_fill_in_codebases(self): codebases = { 'cbA': dict(repository='svn://A..', branch='stable', revision='13579'), 'cbB': dict(repository='svn://B..', branch='stable', revision='24680') } sourcestamps = [ {'codebase': 'cbA', 'branch': 'AA'}, ] exp_sourcestamps = [ {'repository': 'svn://B..', 'branch': 'stable', 'revision': '24680', 'codebase': 'cbB', 'project': ''}, {'repository': 'svn://A..', 'branch': 'AA', 'project': '', 'revision': '13579', 'codebase': 'cbA'}, ] return self.do_addBuildsetForSourceStampsWithDefaults( codebases, sourcestamps, exp_sourcestamps) def test_addBuildsetForSourceStampsWithDefaults_no_repository(self): exp_sourcestamps = [ {'repository': '', 'branch': None, 'revision': None, 'codebase': '', 'project': ''}, ] return self.do_addBuildsetForSourceStampsWithDefaults( {'': {}}, [], exp_sourcestamps) def test_addBuildsetForSourceStamps_unknown_codbases(self): codebases = {} sourcestamps = [ {'codebase': 'cbA', 'branch': 'AA'}, {'codebase': 'cbB', 'revision': 'BB'}, ] exp_sourcestamps = [ {'branch': None, 'revision': 'BB', 'codebase': 'cbB', 'project': '', 'repository': ''}, {'branch': 'AA', 'revision': None, 'codebase': 'cbA', 'project': '', 'repository': ''}, ] return self.do_addBuildsetForSourceStampsWithDefaults( codebases, sourcestamps, exp_sourcestamps) @defer.inlineCallbacks def test_addBuildsetForChanges_one_change(self): sched = self.makeScheduler(name='n', builderNames=['b']) self.db.insertTestData([ fakedb.Change(changeid=13, sourcestampid=234), ]) bsid, brids = yield sched.addBuildsetForChanges(reason='power', waited_for=False, changeids=[13]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler'), }, reason='power', scheduler='n', sourcestamps=[234]) @defer.inlineCallbacks def test_addBuildsetForChanges_properties(self): sched = self.makeScheduler(name='n', builderNames=['c']) self.db.insertTestData([ fakedb.Change(changeid=14, sourcestampid=234), ]) bsid, brids = yield sched.addBuildsetForChanges(reason='downstream', waited_for=False, changeids=[14]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler'), }, reason='downstream', scheduler='n', sourcestamps=[234]) @defer.inlineCallbacks def test_addBuildsetForChanges_properties_with_virtual_builders(self): sched = self.makeScheduler(name='n', builderNames=['c'], properties={ 'virtual_builder_name': Interpolate("myproject-%(src::branch)s") }) self.db.insertTestData([ fakedb.SourceStamp(id=234, branch='dev1', project="linux"), fakedb.Change(changeid=14, sourcestampid=234, branch="dev1"), ]) bsid, brids = yield sched.addBuildsetForChanges(reason='downstream', waited_for=False, changeids=[14]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, properties={ 'virtual_builder_name': ("myproject-dev1", "Scheduler"), 'scheduler': ('n', 'Scheduler'), }, reason='downstream', scheduler='n', sourcestamps=[234]) @defer.inlineCallbacks def test_addBuildsetForChanges_multiple_changes_same_codebase(self): # This is a test for backwards compatibility # Changes from different repositories come together in one build sched = self.makeScheduler(name='n', builderNames=['b', 'c'], codebases={'cb': {'repository': 'http://repo'}}) # No codebaseGenerator means all changes have codebase == '' self.db.insertTestData([ fakedb.Change(changeid=13, codebase='cb', sourcestampid=12), fakedb.Change(changeid=14, codebase='cb', sourcestampid=11), fakedb.Change(changeid=15, codebase='cb', sourcestampid=10), ]) # note that the changeids are given out of order here; it should still # use the most recent bsid, brids = yield sched.addBuildsetForChanges(reason='power', waited_for=False, changeids=[14, 15, 13]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1, 2], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler'), }, reason='power', scheduler='n', sourcestamps=[10]) # sourcestampid from greatest changeid @defer.inlineCallbacks def test_addBuildsetForChanges_codebases_set_multiple_codebases(self): codebases = {'cbA': dict(repository='svn://A..', branch='stable', revision='13579'), 'cbB': dict( repository='svn://B..', branch='stable', revision='24680'), 'cbC': dict( repository='svn://C..', branch='stable', revision='12345'), 'cbD': dict( repository='svn://D..')} # Scheduler gets codebases that can be used to create extra sourcestamps # for repositories that have no changes sched = self.makeScheduler(name='n', builderNames=['b', 'c'], codebases=codebases) self.db.insertTestData([ fakedb.Change(changeid=12, codebase='cbA', sourcestampid=912), fakedb.Change(changeid=13, codebase='cbA', sourcestampid=913), fakedb.Change(changeid=14, codebase='cbA', sourcestampid=914), fakedb.Change(changeid=15, codebase='cbB', sourcestampid=915), fakedb.Change(changeid=16, codebase='cbB', sourcestampid=916), fakedb.Change(changeid=17, codebase='cbB', sourcestampid=917), # note: no changes for cbC or cbD ]) # note that the changeids are given out of order here; it should still # use the most recent for each codebase bsid, brids = yield sched.addBuildsetForChanges(reason='power', waited_for=True, changeids=[14, 12, 17, 16, 13, 15]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=True, builderids=[1, 2], external_idstring=None, reason='power', scheduler='n', properties={ 'scheduler': ('n', 'Scheduler'), }, sourcestamps=[914, 917, dict(branch='stable', repository='svn://C..', codebase='cbC', project='', revision='12345'), dict(branch=None, repository='svn://D..', codebase='cbD', project='', revision=None) ] ) @defer.inlineCallbacks def test_addBuildsetForSourceStamp(self): sched = self.makeScheduler(name='n', builderNames=['b']) sourcestamps = [91, {'sourcestamp': True}] bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=sourcestamps) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, reason='whynot', scheduler='n', properties={ 'scheduler': ('n', 'Scheduler'), }, sourcestamps=[91, {'sourcestamp': True}]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_explicit_builderNames(self): sched = self.makeScheduler(name='n', builderNames=['b', 'x', 'y']) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=True, sourcestamps=[ 91, {'sourcestamp': True}], builderNames=['x', 'y']) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=True, builderids=[2, 3], external_idstring=None, reason='whynot', scheduler='n', properties={ 'scheduler': ('n', 'Scheduler'), }, sourcestamps=[91, {'sourcestamp': True}]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_properties(self): props = properties.Properties(xxx="yyy") sched = self.makeScheduler(name='n', builderNames=['b']) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=[91], properties=props) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, properties={ 'xxx': ('yyy', 'TEST'), 'scheduler': ('n', 'Scheduler')}, reason='whynot', scheduler='n', sourcestamps=[91]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_combine_change_properties(self): sched = self.makeScheduler() self.master.db.insertTestData([ fakedb.SourceStamp(id=98, branch='stable'), fakedb.Change(changeid=25, sourcestampid=98, branch='stable'), fakedb.ChangeProperty(changeid=25, property_name='color', property_value='["pink","Change"]'), ]) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=[98]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1, 2], external_idstring=None, properties={ 'scheduler': ('testsched', 'Scheduler'), 'color': ('pink', 'Change')}, reason='whynot', scheduler='testsched', sourcestamps=[98]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_renderable_builderNames(self): @properties.renderer def names(props): if props.changes[0]['branch'] == 'stable': return ['c'] elif props.changes[0]['branch'] == 'unstable': return ['a', 'b'] return None sched = self.makeScheduler(name='n', builderNames=names) self.master.db.insertTestData([ fakedb.Builder(id=1, name='a'), fakedb.Builder(id=2, name='b'), fakedb.Builder(id=3, name='c'), fakedb.SourceStamp(id=98, branch='stable'), fakedb.SourceStamp(id=99, branch='unstable'), fakedb.Change(changeid=25, sourcestampid=98, branch='stable'), fakedb.Change(changeid=26, sourcestampid=99, branch='unstable'), ]) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=[98]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[3], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler')}, reason='whynot', scheduler='n', sourcestamps=[98]) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='because', waited_for=False, sourcestamps=[99]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1, 2], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler')}, reason='because', scheduler='n', sourcestamps=[99]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_list_of_renderable_builderNames(self): names = ['a', 'b', properties.Interpolate('%(prop:extra_builder)s')] sched = self.makeScheduler(name='n', builderNames=names) self.master.db.insertTestData([ fakedb.Builder(id=1, name='a'), fakedb.Builder(id=2, name='b'), fakedb.Builder(id=3, name='c'), fakedb.SourceStamp(id=98, branch='stable'), fakedb.Change(changeid=25, sourcestampid=98, branch='stable'), fakedb.ChangeProperty(changeid=25, property_name='extra_builder', property_value='["c","Change"]'), ]) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=[98]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1, 2, 3], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler'), 'extra_builder': ('c', 'Change')}, reason='whynot', scheduler='n', sourcestamps=[98]) def test_signature_addBuildsetForChanges(self): sched = self.makeScheduler(builderNames=['xxx']) @self.assertArgSpecMatches( sched.addBuildsetForChanges, # Real self.fake_addBuildsetForChanges, # Real ) def addBuildsetForChanges(self, waited_for=False, reason='', external_idstring=None, changeids=None, builderNames=None, properties=None, **kw): pass def test_signature_addBuildsetForSourceStamps(self): sched = self.makeScheduler(builderNames=['xxx']) @self.assertArgSpecMatches( sched.addBuildsetForSourceStamps, # Real self.fake_addBuildsetForSourceStamps, # Fake ) def addBuildsetForSourceStamps(self, waited_for=False, sourcestamps=None, reason='', external_idstring=None, properties=None, builderNames=None, **kw): pass def test_signature_addBuildsetForSourceStampsWithDefaults(self): sched = self.makeScheduler(builderNames=['xxx']) @self.assertArgSpecMatches( sched.addBuildsetForSourceStampsWithDefaults, # Real self.fake_addBuildsetForSourceStampsWithDefaults, # Fake ) def addBuildsetForSourceStampsWithDefaults(self, reason, sourcestamps=None, waited_for=False, properties=None, builderNames=None, **kw): pass buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_basic.py000066400000000000000000000574171413250514000252230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import task from twisted.trial import unittest from buildbot import config from buildbot.schedulers import basic from buildbot.test import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class CommonStuffMixin: def makeScheduler(self, klass, **kwargs_override): kwargs = dict(name="tsched", treeStableTimer=60, builderNames=['tbuild']) kwargs.update(kwargs_override) self.master.db.insertTestData( [fakedb.Builder(name=builderName) for builderName in kwargs['builderNames']]) sched = self.attachScheduler( klass(**kwargs), self.OBJECTID, self.SCHEDULERID) # add a Clock to help checking timing issues self.clock = sched._reactor = task.Clock() # keep track of builds in self.events self.events = [] @self.assertArgSpecMatches(sched.addBuildsetForChanges) def addBuildsetForChanges( waited_for=False, reason='', external_idstring=None, changeids=None, builderNames=None, properties=None, **kw): self.assertEqual(external_idstring, None) self.assertEqual(reason, sched.reason) self.events.append('B{}@{}'.format(repr(changeids).replace(' ', ''), int(self.clock.seconds()))) return defer.succeed(None) sched.addBuildsetForChanges = addBuildsetForChanges # see self.assertConsumingChanges self.consumingChanges = None def startConsumingChanges(**kwargs): self.consumingChanges = kwargs return defer.succeed(None) sched.startConsumingChanges = startConsumingChanges return sched def assertConsumingChanges(self, **kwargs): self.assertEqual(self.consumingChanges, kwargs) class BaseBasicScheduler(CommonStuffMixin, scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 244 SCHEDULERID = 4 # a custom subclass since we're testing the base class. This basically # re-implements SingleBranchScheduler, but with more asserts class Subclass(basic.BaseBasicScheduler): timer_started = False def getChangeFilter(self, *args, **kwargs): return kwargs.get('change_filter') def getTimerNameForChange(self, change): self.timer_started = True return "xxx" def getChangeClassificationsForTimer(self, schedulerid, timer_name): assert timer_name == "xxx" assert schedulerid == BaseBasicScheduler.SCHEDULERID return self.master.db.schedulers.getChangeClassifications(schedulerid) def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() # tests def test_constructor_positional_exception(self): with self.assertRaises(config.ConfigErrors): self.Subclass("tsched", "master", 60) @defer.inlineCallbacks def test_activate_no_treeStableTimer(self): cf = mock.Mock('cf') fII = mock.Mock('fII') sched = self.makeScheduler(self.Subclass, treeStableTimer=None, change_filter=cf, fileIsImportant=fII) self.db.schedulers.fakeClassifications(self.SCHEDULERID, {20: True}) yield sched.activate() # check that the scheduler has started to consume changes, and the # classifications *have* been flushed, since they will not be used self.assertConsumingChanges(fileIsImportant=fII, change_filter=cf, onlyImportant=False) self.db.schedulers.assertClassifications(self.SCHEDULERID, {}) yield sched.deactivate() def test_subclass_fileIsImportant(self): class Subclass(self.Subclass): def fileIsImportant(self, change): return False sched = self.makeScheduler(Subclass, onlyImportant=True) self.assertEqual( Subclass.fileIsImportant.__get__(sched), sched.fileIsImportant) @defer.inlineCallbacks def test_activate_treeStableTimer(self): cf = mock.Mock() sched = self.makeScheduler( self.Subclass, treeStableTimer=10, change_filter=cf) self.db.schedulers.fakeClassifications(self.SCHEDULERID, {20: True}) self.master.db.insertTestData([ fakedb.Change(changeid=20), fakedb.SchedulerChange(schedulerid=self.SCHEDULERID, changeid=20, important=1) ]) yield sched.activate() # check that the scheduler has started to consume changes, and no # classifications have been flushed. Furthermore, the existing # classification should have been acted on, so the timer should be # running self.assertConsumingChanges(fileIsImportant=None, change_filter=cf, onlyImportant=False) self.db.schedulers.assertClassifications( self.SCHEDULERID, {20: True}) self.assertTrue(sched.timer_started) self.clock.advance(10) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_no_treeStableTimer_unimportant(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=None, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), False) self.assertEqual(self.events, []) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_no_treeStableTimer_important(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=None, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), True) self.assertEqual(self.events, ['B[13]@0']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_unimportant(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=10, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), False) self.assertEqual(self.events, []) self.clock.advance(10) self.assertEqual(self.events, []) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_important(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=10, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), True) self.clock.advance(10) self.assertEqual(self.events, ['B[13]@10']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_sequence(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=9, branch='master') self.master.db.insertTestData([ fakedb.Change(changeid=1, branch='master', when_timestamp=1110), fakedb.ChangeFile(changeid=1, filename='readme.txt'), fakedb.Change(changeid=2, branch='master', when_timestamp=2220), fakedb.ChangeFile(changeid=2, filename='readme.txt'), fakedb.Change(changeid=3, branch='master', when_timestamp=3330), fakedb.ChangeFile(changeid=3, filename='readme.txt'), fakedb.Change(changeid=4, branch='master', when_timestamp=4440), fakedb.ChangeFile(changeid=4, filename='readme.txt'), ]) sched.activate() self.clock.advance(2220) # this important change arrives at 2220, so the stable timer will last # until 2229 yield sched.gotChange( self.makeFakeChange(branch='master', number=1, when=2220), True) self.assertEqual(self.events, []) self.db.schedulers.assertClassifications(self.SCHEDULERID, {1: True}) # but another (unimportant) change arrives before then self.clock.advance(6) # to 2226 self.assertEqual(self.events, []) yield sched.gotChange( self.makeFakeChange(branch='master', number=2, when=2226), False) self.assertEqual(self.events, []) self.db.schedulers.assertClassifications( self.SCHEDULERID, {1: True, 2: False}) self.clock.advance(3) # to 2229 self.assertEqual(self.events, []) self.clock.advance(3) # to 2232 self.assertEqual(self.events, []) # another important change arrives at 2232 yield sched.gotChange( self.makeFakeChange(branch='master', number=3, when=2232), True) self.assertEqual(self.events, []) self.db.schedulers.assertClassifications( self.SCHEDULERID, {1: True, 2: False, 3: True}) self.clock.advance(3) # to 2235 self.assertEqual(self.events, []) # finally, time to start the build! self.clock.advance(6) # to 2241 self.assertEqual(self.events, ['B[1,2,3]@2241']) self.db.schedulers.assertClassifications(self.SCHEDULERID, {}) yield sched.deactivate() @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler(self.Subclass) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler(self.Subclass) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler(self.Subclass) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) class SingleBranchScheduler(CommonStuffMixin, scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): SCHEDULERID = 245 OBJECTID = 224455 codebases = {'a': {'repository': "", 'branch': 'master'}, 'b': {'repository': "", 'branch': 'master'}} def makeFullScheduler(self, **kwargs): self.master.db.insertTestData( [fakedb.Builder(name=builderName) for builderName in kwargs['builderNames']]) sched = self.attachScheduler(basic.SingleBranchScheduler(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True) # add a Clock to help checking timing issues self.clock = sched._reactor = task.Clock() return sched def mkbs(self, **kwargs): # create buildset for expected_buildset in assertBuildset. bs = dict(reason=self.sched.reason, external_idstring=None, sourcestampsetid=100, properties=[('scheduler', ('test', 'Scheduler'))]) bs.update(kwargs) return bs def mkss(self, **kwargs): # create sourcestamp for expected_sourcestamps in assertBuildset. ss = dict( branch='master', project='', repository='', sourcestampsetid=100) ss.update(kwargs) return ss def mkch(self, **kwargs): # create changeset and insert in database. chd = dict(branch='master', project='', repository='') chd.update(kwargs) ch = self.makeFakeChange(**chd) # fakedb.Change requires changeid instead of number chd['changeid'] = chd['number'] del chd['number'] self.db.insertTestData([fakedb.Change(**chd)]) return ch def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def test_constructor_no_reason(self): sched = self.makeScheduler( basic.SingleBranchScheduler, branch="master") self.assertEqual( sched.reason, "The SingleBranchScheduler scheduler named 'tsched' triggered this build") def test_constructor_reason(self): sched = self.makeScheduler( basic.SingleBranchScheduler, branch="master", reason="Changeset") self.assertEqual(sched.reason, "Changeset") def test_constructor_branch_mandatory(self): with self.assertRaises(config.ConfigErrors): basic.SingleBranchScheduler(name="tsched", treeStableTimer=60) def test_constructor_no_branch_but_filter(self): # this shouldn't fail basic.SingleBranchScheduler(name="tsched", treeStableTimer=60, builderNames=['a', 'b'], change_filter=mock.Mock()) def test_constructor_branches_forbidden(self): with self.assertRaises(config.ConfigErrors): basic.SingleBranchScheduler(name="tsched", treeStableTimer=60, branches='x') @defer.inlineCallbacks def test_gotChange_treeStableTimer_important(self): # this looks suspiciously like the same test above, because SingleBranchScheduler # is about the same as the test subclass used above sched = self.makeScheduler(basic.SingleBranchScheduler, treeStableTimer=10, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), True) self.clock.advance(10) self.assertEqual(self.events, ['B[13]@10']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_createAbsoluteSourceStamps_saveCodebase(self): # check codebase is stored after receiving change. sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=True) self.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='test', class_name='SingleBranchScheduler')]) yield sched.activate() yield sched.gotChange(self.mkch(codebase='a', revision='1234:abc', repository='A', number=0), True) yield sched.gotChange(self.mkch(codebase='b', revision='2345:bcd', repository='B', number=1), True) self.db.state.assertState(self.OBJECTID, lastCodebases={ 'a': dict(branch='master', repository='A', revision='1234:abc', lastChange=0), 'b': dict(branch='master', repository='B', revision='2345:bcd', lastChange=1)}) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_createAbsoluteSourceStamps_older_change(self): # check codebase is not stored if it's older than the most recent sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=True) self.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='test', class_name='SingleBranchScheduler'), fakedb.ObjectState(objectid=self.OBJECTID, name='lastCodebases', value_json='{"a": {"branch": "master", "repository": "A", ' '"revision": "5555:def", "lastChange": 20}}')]) yield sched.activate() # this change is not recorded, since it's older than # change 20 yield sched.gotChange(self.mkch(codebase='a', revision='1234:abc', repository='A', number=10), True) self.db.state.assertState(self.OBJECTID, lastCodebases={ 'a': dict(branch='master', repository='A', revision='5555:def', lastChange=20)}) yield sched.deactivate() @defer.inlineCallbacks def test_getCodebaseDict(self): sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=True) sched._lastCodebases = {'a': dict(branch='master', repository='A', revision='5555:def', lastChange=20)} cbd = yield sched.getCodebaseDict('a') self.assertEqual(cbd, dict(branch='master', repository='A', revision='5555:def', lastChange=20)) @defer.inlineCallbacks def test_getCodebaseDict_no_createAbsoluteSourceStamps(self): sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=False) sched._lastCodebases = {'a': dict(branch='master', repository='A', revision='5555:def', lastChange=20)} cbd = yield sched.getCodebaseDict('a') # _lastCodebases is ignored self.assertEqual(cbd, {'branch': 'master', 'repository': ''}) class AnyBranchScheduler(CommonStuffMixin, scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): SCHEDULERID = 6 OBJECTID = 246 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def test_constructor_branch_forbidden(self): with self.assertRaises(config.ConfigErrors): basic.SingleBranchScheduler(name="tsched", treeStableTimer=60, branch='x') @defer.inlineCallbacks def test_gotChange_treeStableTimer_multiple_branches(self): """Two changes with different branches get different treeStableTimers""" sched = self.makeScheduler(basic.AnyBranchScheduler, treeStableTimer=10, branches=['master', 'devel', 'boring']) sched.activate() def mkch(**kwargs): ch = self.makeFakeChange(**kwargs) self.db.changes.fakeAddChangeInstance(ch) return ch yield sched.gotChange(mkch(branch='master', number=13), True) yield self.clock.advance(1) # time is now 1 yield sched.gotChange(mkch(branch='master', number=14), False) yield sched.gotChange(mkch(branch='boring', number=15), False) yield self.clock.pump([1] * 4) # time is now 5 yield sched.gotChange(mkch(branch='devel', number=16), True) yield self.clock.pump([1] * 10) # time is now 15 self.assertEqual(self.events, ['B[13,14]@11', 'B[16]@15']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_multiple_repositories(self): """Two repositories, even with the same branch name, have different treeStableTimers""" sched = self.makeScheduler(basic.AnyBranchScheduler, treeStableTimer=10, branches=['master']) yield sched.activate() def mkch(**kwargs): ch = self.makeFakeChange(**kwargs) self.db.changes.fakeAddChangeInstance(ch) return ch yield sched.gotChange(mkch(branch='master', repository="repo", number=13), True) yield self.clock.advance(1) # time is now 1 yield sched.gotChange(mkch(branch='master', repository="repo", number=14), False) yield sched.gotChange(mkch(branch='master', repository="other_repo", number=15), False) yield self.clock.pump([1] * 4) # time is now 5 yield sched.gotChange(mkch(branch='master', repository="other_repo", number=17), True) yield self.clock.pump([1] * 10) # time is now 15 self.assertEqual(self.events, ['B[13,14]@11', 'B[15,17]@15']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_multiple_projects(self): """Two projects, even with the same branch name, have different treeStableTimers""" sched = self.makeScheduler(basic.AnyBranchScheduler, treeStableTimer=10, branches=['master']) sched.startService() def mkch(**kwargs): ch = self.makeFakeChange(**kwargs) self.db.changes.fakeAddChangeInstance(ch) return ch yield sched.gotChange(mkch(branch='master', project="proj", number=13), True) yield self.clock.advance(1) # time is now 1 yield sched.gotChange(mkch(branch='master', project="proj", number=14), False) yield sched.gotChange(mkch(branch='master', project="other_proj", number=15), False) yield self.clock.pump([1] * 4) # time is now 5 yield sched.gotChange(mkch(branch='master', project="other_proj", number=17), True) yield self.clock.pump([1] * 10) # time is now 15 self.assertEqual(self.events, ['B[13,14]@11', 'B[15,17]@15']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_multiple_codebases(self): """Two codebases, even with the same branch name, have different treeStableTimers""" sched = self.makeScheduler(basic.AnyBranchScheduler, treeStableTimer=10, branches=['master']) sched.startService() def mkch(**kwargs): ch = self.makeFakeChange(**kwargs) self.db.changes.fakeAddChangeInstance(ch) return ch yield sched.gotChange(mkch(branch='master', codebase="base", number=13), True) self.clock.advance(1) # time is now 1 yield sched.gotChange(mkch(branch='master', codebase="base", number=14), False) yield sched.gotChange(mkch(branch='master', codebase="other_base", number=15), False) self.clock.pump([1] * 4) # time is now 5 yield sched.gotChange(mkch(branch='master', codebase="other_base", number=17), True) self.clock.pump([1] * 10) # time is now 15 self.assertEqual(self.events, ['B[13,14]@11', 'B[15,17]@15']) yield sched.deactivate() buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_canceller.py000066400000000000000000000507361413250514000260670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.trial import unittest from buildbot.schedulers.canceller import OldBuildCanceller from buildbot.schedulers.canceller import _OldBuildFilterSet from buildbot.schedulers.canceller import _OldBuildTracker from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util.ssfilter import SourceStampFilter class TestFilterSet(unittest.TestCase): def test_empty_filter(self): filter = _OldBuildFilterSet() self.assertFalse(filter.is_matched('builder', {'prop': 'value'})) @parameterized.expand([ ('other_builder', 'builder2', {'project': 'p', 'repository': 'r'}, False), ('nothing', 'builder1', {'project': 'value_other', 'repository': 'value_other'}, False), ('single1', 'builder1', {'project': 'p', 'repository': 'value_other'}, True), ('single2', 'builder1', {'project': 'value_other', 'repository': 'r'}, True), ('all', 'builder1', {'project': 'p', 'repository': 'r'}, True), ]) def test_multiple_filters_on_builder(self, name, builder, props, expected): filter = _OldBuildFilterSet() filter.add_filter(['builder1'], SourceStampFilter(project_eq='p')) filter.add_filter(['builder1'], SourceStampFilter(repository_eq='r')) self.assertEqual(filter.is_matched(builder, props), expected) class TestOldBuildTracker(unittest.TestCase): def setUp(self): filter = _OldBuildFilterSet() ss_filter = SourceStampFilter(codebase_eq=['cb1', 'cb2'], repository_eq=['rp1', 'rp2'], branch_eq=['br1', 'br2']) filter.add_filter(['bldr1', 'bldr2'], ss_filter) self.cancellations = [] self.tracker = _OldBuildTracker(filter, self.on_cancel) def on_cancel(self, id_tuple): is_build, id = id_tuple self.cancellations.append(('build' if is_build else 'breq', id)) def assert_cancelled(self, cancellations): self.assertEqual(self.cancellations, cancellations) self.cancellations = [] def create_ss_dict(self, project, codebase, repository, branch): # Changes have the same structure for the attributes that we're using, so we reuse this # function for changes. return { 'project': project, 'codebase': codebase, 'repository': repository, 'branch': branch, } def test_unknown_branch_not_tracked(self): ss_dicts = [self.create_ss_dict('pr1', 'cb1', 'rp1', None)] self.tracker.on_new_build(1, 'bldr1', ss_dicts) self.assertFalse(self.tracker.is_build_tracked(1)) self.tracker.on_new_buildrequest(10, 'bldr1', ss_dicts) self.assertFalse(self.tracker.is_buildrequest_tracked(10)) def test_multi_codebase_unknown_branch_not_tracked(self): ss_dicts = [self.create_ss_dict('pr1', 'cb1', 'rp1', None), self.create_ss_dict('pr2', 'cb2', 'rp2', 'br2')] self.tracker.on_new_build(1, 'bldr1', ss_dicts) self.assertFalse(self.tracker.is_build_tracked(1)) self.tracker.on_new_buildrequest(10, 'bldr1', ss_dicts) self.assertFalse(self.tracker.is_buildrequest_tracked(10)) def test_unmatched_ss_not_tracked(self): ss_dicts = [self.create_ss_dict('pr1', 'cb1', 'rp1', 'untracked')] self.tracker.on_new_build(1, 'bldr1', ss_dicts) self.assertFalse(self.tracker.is_build_tracked(1)) self.tracker.on_new_buildrequest(10, 'bldr1', ss_dicts) self.assertFalse(self.tracker.is_buildrequest_tracked(10)) def test_multi_codebase_unmatched_ss_not_tracked(self): ss_dicts = [self.create_ss_dict('pr1', 'cb1', 'rp1', 'untracked'), self.create_ss_dict('pr2', 'cb2', 'rp2', 'untracked')] self.tracker.on_new_build(1, 'bldr1', ss_dicts) self.assertFalse(self.tracker.is_build_tracked(1)) self.tracker.on_new_buildrequest(10, 'bldr1', ss_dicts) self.assertFalse(self.tracker.is_buildrequest_tracked(10)) def test_multi_codebase_tracks_if_at_least_one_ss_match(self): ss_dicts = [self.create_ss_dict('pr1', 'cb1', 'rp1', 'untracked'), self.create_ss_dict('pr2', 'cb2', 'rp2', 'br2')] self.tracker.on_new_build(1, 'bldr1', ss_dicts) self.assertTrue(self.tracker.is_build_tracked(1)) self.tracker.on_new_buildrequest(10, 'bldr1', ss_dicts) self.assertTrue(self.tracker.is_buildrequest_tracked(10)) def test_cancel_build(self): ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br1') not_matching_ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br2') self.tracker.on_new_build(1, 'bldr1', [ss_dict]) self.assertTrue(self.tracker.is_build_tracked(1)) self.tracker.on_change(not_matching_ss_dict) self.assert_cancelled([]) self.assertTrue(self.tracker.is_build_tracked(1)) self.tracker.on_change(ss_dict) self.assert_cancelled([('build', 1)]) self.assertFalse(self.tracker.is_build_tracked(1)) self.tracker.on_change(ss_dict) self.assert_cancelled([]) def test_not_cancel_finished_build(self): ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br1') self.tracker.on_new_build(1, 'bldr1', [ss_dict]) self.assertTrue(self.tracker.is_build_tracked(1)) self.tracker.on_finished_build(1) self.assertFalse(self.tracker.is_build_tracked(1)) self.tracker.on_change(ss_dict) self.assert_cancelled([]) self.assertFalse(self.tracker.is_build_tracked(1)) def test_cancel_buildrequest(self): ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br1') not_matching_ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br2') self.tracker.on_new_buildrequest(1, 'bldr1', [ss_dict]) self.assertTrue(self.tracker.is_buildrequest_tracked(1)) self.tracker.on_change(not_matching_ss_dict) self.assert_cancelled([]) self.assertTrue(self.tracker.is_buildrequest_tracked(1)) self.tracker.on_change(ss_dict) self.assert_cancelled([('breq', 1)]) self.assertFalse(self.tracker.is_buildrequest_tracked(1)) self.tracker.on_change(ss_dict) self.assert_cancelled([]) def test_not_cancel_finished_buildrequest(self): ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br1') self.tracker.on_new_buildrequest(1, 'bldr1', [ss_dict]) self.assertTrue(self.tracker.is_buildrequest_tracked(1)) self.tracker.on_complete_buildrequest(1) self.assertFalse(self.tracker.is_buildrequest_tracked(1)) self.tracker.on_change(ss_dict) self.assert_cancelled([]) self.assertFalse(self.tracker.is_buildrequest_tracked(1)) @parameterized.expand([ ('first', True), ('second', False), ]) def test_cancel_multi_codebase_build(self, name, cancel_first_ss): ss_dict1 = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br1') ss_dict2 = self.create_ss_dict('pr2', 'cb2', 'rp2', 'br2') not_matching_ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br2') self.tracker.on_new_build(1, 'bldr1', [ss_dict1, ss_dict2]) self.assertTrue(self.tracker.is_build_tracked(1)) self.tracker.on_change(not_matching_ss_dict) self.assert_cancelled([]) self.assertTrue(self.tracker.is_build_tracked(1)) self.tracker.on_change(ss_dict1 if cancel_first_ss else ss_dict2) self.assert_cancelled([('build', 1)]) self.assertFalse(self.tracker.is_build_tracked(1)) self.tracker.on_change(ss_dict1) self.tracker.on_change(ss_dict2) self.assert_cancelled([]) def test_cancel_multi_codebase_build_ignores_non_matching_change_in_tracked_build(self): ss_dict1 = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br1') non_matched_ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'brZ') self.tracker.on_new_build(1, 'bldr1', [ss_dict1, non_matched_ss_dict]) self.assertTrue(self.tracker.is_build_tracked(1)) self.tracker.on_change(non_matched_ss_dict) self.assert_cancelled([]) self.assertTrue(self.tracker.is_build_tracked(1)) def test_cancel_multiple_builds(self): ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br1') not_matching_ss_dict = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br2') self.tracker.on_new_build(1, 'bldr1', [ss_dict]) self.tracker.on_new_build(2, 'bldr1', [ss_dict]) self.assertTrue(self.tracker.is_build_tracked(1)) self.assertTrue(self.tracker.is_build_tracked(2)) self.tracker.on_change(not_matching_ss_dict) self.assert_cancelled([]) self.assertTrue(self.tracker.is_build_tracked(1)) self.assertTrue(self.tracker.is_build_tracked(2)) self.tracker.on_change(ss_dict) self.assert_cancelled([('build', 1), ('build', 2)]) self.assertFalse(self.tracker.is_build_tracked(1)) self.assertFalse(self.tracker.is_build_tracked(2)) self.tracker.on_change(ss_dict) self.assert_cancelled([]) def test_cancel_multi_codebase_multiple_builds(self): ss_dict1 = self.create_ss_dict('pr1', 'cb1', 'rp1', 'br1') ss_dict2 = self.create_ss_dict('pr2', 'cb2', 'rp2', 'br2') ss_dict3 = self.create_ss_dict('pr3', 'cb3', 'rp3', 'br3') self.tracker.on_new_build(1, 'bldr1', [ss_dict1, ss_dict2]) self.tracker.on_new_build(2, 'bldr1', [ss_dict1, ss_dict3]) self.tracker.on_new_build(3, 'bldr1', [ss_dict2, ss_dict3]) self.assertTrue(self.tracker.is_build_tracked(1)) self.assertTrue(self.tracker.is_build_tracked(2)) self.assertTrue(self.tracker.is_build_tracked(3)) self.assert_cancelled([]) self.tracker.on_change(ss_dict1) self.assert_cancelled([('build', 1), ('build', 2)]) self.assertFalse(self.tracker.is_build_tracked(1)) self.assertFalse(self.tracker.is_build_tracked(2)) self.assertTrue(self.tracker.is_build_tracked(3)) self.tracker.on_change(ss_dict1) self.assert_cancelled([]) class TestOldBuildCancellerUtils(ConfigErrorsMixin, unittest.TestCase): @parameterized.expand([ ('only_builder', [(['bldr'], SourceStampFilter())]), ('with_codebase', [(['bldr'], SourceStampFilter(codebase_eq=['value']))]), ('with_repository', [(['bldr'], SourceStampFilter(repository_eq=['value']))]), ('with_branch', [(['bldr'], SourceStampFilter(branch_eq=['value']))]), ('all', [(['bldr'], SourceStampFilter(codebase_eq=['v1', 'v2'], repository_eq=['v1', 'v2'], branch_eq=['v1', 'v2']))]), ]) def test_check_filters_valid(self, name, filters): OldBuildCanceller.check_filters(filters) @parameterized.expand([ ('dict', {}), ('list_list', [[]]), ]) def test_check_filters_not_dict(self, name, value): with self.assertRaisesConfigError('The filters argument must be a list of tuples'): OldBuildCanceller.check_filters(value) def test_check_filters_invalid_uple(self): with self.assertRaisesConfigError('must be a list of tuples each of which'): OldBuildCanceller.check_filters([('a', 'b', 'c')]) with self.assertRaisesConfigError('must be a list of tuples each of which'): OldBuildCanceller.check_filters([('a',)]) @parameterized.expand([ ('dict', {}, 'filter builders must be list of strings or a string'), ('list_int', [1], 'Value of filter builders must be string'), ]) def test_check_builders_keys_not_list(self, name, value, error): with self.assertRaisesConfigError(error): OldBuildCanceller.check_filters([(value, SourceStampFilter())]) class TestOldBuildCanceller(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) self.master.mq.verifyMessages = False self.insert_test_data() self._cancelled_build_ids = [] yield self.master.startService() def tearDown(self): return self.master.stopService() def create_ss_dict(self, project, codebase, repository, branch): # Changes have the same structure for the attributes that we're using, so we reuse this # function for changes. return { 'project': project, 'codebase': codebase, 'repository': repository, 'branch': branch, } def insert_test_data(self): self.master.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Builder(id=79, name='builder1'), fakedb.Builder(id=80, name='builder2'), fakedb.Buildset(id=98, results=None, reason="reason98"), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.SourceStamp(id=234, revision='revision1', project='project1', codebase='codebase1', repository='repository1', branch='branch1'), fakedb.BuildRequest(id=10, buildsetid=98, builderid=79), fakedb.Build(id=19, number=1, builderid=79, buildrequestid=10, workerid=13, masterid=92, results=None, state_string="state1"), fakedb.Buildset(id=99, results=None, reason="reason99"), fakedb.BuildsetSourceStamp(buildsetid=99, sourcestampid=235), fakedb.SourceStamp(id=235, revision='revision2', project='project2', codebase='codebase2', repository='repository2', branch='branch2'), fakedb.BuildRequest(id=11, buildsetid=99, builderid=80), fakedb.Build(id=20, number=1, builderid=80, buildrequestid=11, workerid=13, masterid=92, results=None, state_string="state2"), ]) @defer.inlineCallbacks def setup_canceller_with_filters(self): self.canceller = OldBuildCanceller('canceller', [ (['builder1'], SourceStampFilter(branch_eq=['branch1'])), (['builder2'], SourceStampFilter(branch_eq=['branch2'])), ]) yield self.canceller.setServiceParent(self.master) @defer.inlineCallbacks def setup_canceller_with_no_filters(self): self.canceller = OldBuildCanceller('canceller', []) yield self.canceller.setServiceParent(self.master) def assert_cancelled(self, cancellations): expected_productions = [] for kind, id in cancellations: if kind == 'build': expected_productions.append( (('control', 'builds', str(id), 'stop'), {'reason': 'Build has been obsoleted by a newer commit'})) elif kind == 'breq': expected_productions.append( (('control', 'buildrequests', str(id), 'cancel'), {'reason': 'Build request has been obsoleted by a newer commit'})) else: raise Exception(f"Unknown cancellation type {kind}") self.master.mq.assertProductions(expected_productions) @defer.inlineCallbacks def test_cancel_build_after_new_commit(self): yield self.setup_canceller_with_filters() ss_dict = self.create_ss_dict('project1', 'codebase1', 'repository1', 'branch1') self.master.mq.callConsumer(('changes', '123', 'new'), ss_dict) self.assert_cancelled([('build', 19)]) self.master.mq.callConsumer(('changes', '124', 'new'), ss_dict) self.assert_cancelled([]) @defer.inlineCallbacks def test_build_finished_then_new_commit_no_cancel(self): yield self.setup_canceller_with_filters() ss_dict = self.create_ss_dict('project1', 'codebase1', 'repository1', 'branch1') self.master.mq.callConsumer(('builds', '19', 'finished'), {'buildid': 19}) self.master.mq.callConsumer(('changes', '123', 'new'), ss_dict) self.assert_cancelled([]) @defer.inlineCallbacks def test_reconfig_no_longer_matched_tracked_build_cancelled(self): yield self.setup_canceller_with_filters() ss_dict = self.create_ss_dict('project1', 'codebase1', 'repository1', 'branch1') yield self.canceller.reconfigService('canceller', []) self.master.mq.callConsumer(('changes', '123', 'new'), ss_dict) self.assert_cancelled([('build', 19)]) self.master.mq.callConsumer(('changes', '124', 'new'), ss_dict) self.assert_cancelled([]) @defer.inlineCallbacks def test_reconfig_defers_finished_builds_to_after_registration(self): # We need to make sure that during reconfiguration any finished build messages are not # acted before the build is tracked yield self.setup_canceller_with_no_filters() ss_dict1 = self.create_ss_dict('project1', 'codebase1', 'repository1', 'branch1') ss_dict2 = self.create_ss_dict('project2', 'codebase2', 'repository2', 'branch2') # Setup controllable blocking wait on canceller._on_build_new, _on_buildrequest_new on_build_new_d = defer.Deferred() on_build_new_original = self.canceller._on_build_new on_build_new_build_ids = [] on_buildrequest_new_d = defer.Deferred() on_buildrequest_new_original = self.canceller._on_buildrequest_new on_buildrequest_new_breq_ids = [] @defer.inlineCallbacks def waiting_on_build_new(key, build): on_build_new_build_ids.append(build['buildid']) if not on_build_new_d.called: yield on_build_new_d yield on_build_new_original(key, build) self.canceller._on_build_new = waiting_on_build_new @defer.inlineCallbacks def waiting_on_buildrequest_new(key, breq): on_buildrequest_new_breq_ids.append(breq['buildrequestid']) if not on_buildrequest_new_d.called: yield on_buildrequest_new_d yield on_buildrequest_new_original(key, breq) self.canceller._on_buildrequest_new = waiting_on_buildrequest_new # Start reconfig. We verify that we actually blocked in on_build_new d = self.canceller.reconfigService('canceller', [ {'builders': ['builder1'], 'branch_eq': ['branch1']}, {'builders': ['builder2'], 'branch_eq': ['branch2']}, ]) self.assertEqual(on_build_new_build_ids, []) self.assertEqual(on_buildrequest_new_breq_ids, [10]) self.assertFalse(d.called) # The build finish messages should be queued self.master.mq.callConsumer(('builds', '19', 'finished'), {'buildid': 19}) self.master.mq.callConsumer(('builds', '20', 'finished'), {'buildid': 20}) self.master.mq.callConsumer(('buildrequests', '10', 'complete'), {'buildrequestid': 10}) self.master.mq.callConsumer(('buildrequests', '11', 'complete'), {'buildrequestid': 11}) # Unblock reconfigService on_build_new_d.callback(None) on_buildrequest_new_d.callback(None) yield d self.assertEqual(on_build_new_build_ids, [19, 20]) self.assertEqual(on_buildrequest_new_breq_ids, [10, 11]) self.assertFalse(self.canceller._build_tracker.is_build_tracked(19)) self.assertFalse(self.canceller._build_tracker.is_build_tracked(20)) self.assertFalse(self.canceller._build_tracker.is_buildrequest_tracked(10)) self.assertFalse(self.canceller._build_tracker.is_buildrequest_tracked(11)) self.master.mq.callConsumer(('changes', '123', 'new'), ss_dict1) self.master.mq.callConsumer(('changes', '124', 'new'), ss_dict2) self.assert_cancelled([]) buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_canceller_buildset.py000066400000000000000000000135501413250514000277530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.schedulers.canceller_buildset import FailingBuildsetCanceller from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util.ssfilter import SourceStampFilter class TestOldBuildCanceller(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) self.master.mq.verifyMessages = False self.insert_test_data() self._cancelled_build_ids = [] yield self.master.startService() def tearDown(self): return self.master.stopService() def insert_test_data(self): self.master.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Builder(id=100, name='builder1'), fakedb.Builder(id=101, name='builder2'), fakedb.Builder(id=102, name='builder3'), fakedb.Buildset(id=200, results=None, reason="reason98"), fakedb.BuildsetSourceStamp(buildsetid=200, sourcestampid=300), fakedb.SourceStamp(id=300, revision='revision1', project='project1', codebase='codebase1', repository='repository1', branch='branch1'), fakedb.BuildRequest(id=400, buildsetid=200, builderid=100), fakedb.BuildRequestClaim(brid=400, masterid=92, claimed_at=1), fakedb.Build(id=500, number=1, builderid=100, buildrequestid=400, workerid=13, masterid=92, results=None, state_string="state1"), fakedb.BuildRequest(id=401, buildsetid=200, builderid=101), fakedb.BuildRequestClaim(brid=401, masterid=92, claimed_at=1), fakedb.Build(id=501, number=1, builderid=101, buildrequestid=401, workerid=13, masterid=92, results=None, state_string="state2"), fakedb.BuildRequest(id=402, buildsetid=200, builderid=102), fakedb.BuildRequestClaim(brid=402, masterid=92, claimed_at=1), fakedb.Build(id=502, number=1, builderid=102, buildrequestid=402, workerid=13, masterid=92, results=None, state_string="state3"), ]) def assert_cancelled(self, cancellations): expected_productions = [] for build_id in cancellations: reason = 'Build has been cancelled because another build in the same buildset failed' expected_productions.append( (('control', 'builds', str(build_id), 'stop'), {'reason': reason})) self.master.mq.assertProductions(expected_productions) @defer.inlineCallbacks def send_build_finished(self, id, results): build = yield self.master.data.get(('builds', str(id))) build['results'] = results self.master.mq.callConsumer(('builds', str(id), 'finished'), build) @defer.inlineCallbacks def test_cancel_buildrequests_ss_filter_does_not_match(self): self.canceller = FailingBuildsetCanceller('canceller', [ (['builder1'], ['builder1', 'builder2', 'builder3'], SourceStampFilter(branch_eq=['branch_other'])), ]) yield self.canceller.setServiceParent(self.master) yield self.send_build_finished(500, FAILURE) self.assert_cancelled([]) @defer.inlineCallbacks def test_cancel_buildrequests_builder_filter_does_not_match(self): self.canceller = FailingBuildsetCanceller('canceller', [ (['builder2'], ['builder1', 'builder2', 'builder3'], SourceStampFilter(branch_eq=['branch1'])), ]) yield self.canceller.setServiceParent(self.master) yield self.send_build_finished(500, FAILURE) self.assert_cancelled([]) @defer.inlineCallbacks def test_cancel_buildrequests_not_failure(self): self.canceller = FailingBuildsetCanceller('canceller', [ (['builder1'], ['builder1', 'builder2', 'builder3'], SourceStampFilter(branch_eq=['branch1'])), ]) yield self.canceller.setServiceParent(self.master) yield self.send_build_finished(500, SUCCESS) self.assert_cancelled([]) @defer.inlineCallbacks def test_cancel_buildrequests_matches(self): self.canceller = FailingBuildsetCanceller('canceller', [ (['builder1'], ['builder1', 'builder2', 'builder3'], SourceStampFilter(branch_eq=['branch1'])), ]) yield self.canceller.setServiceParent(self.master) yield self.send_build_finished(500, FAILURE) self.assert_cancelled([501, 502]) @defer.inlineCallbacks def test_cancel_buildrequests_matches_any_builder(self): self.canceller = FailingBuildsetCanceller('canceller', [ (['builder1'], None, SourceStampFilter(branch_eq=['branch1'])), ]) yield self.canceller.setServiceParent(self.master) yield self.send_build_finished(500, FAILURE) self.assert_cancelled([501, 502]) buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_dependent.py000066400000000000000000000206751413250514000261040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.schedulers import base from buildbot.schedulers import dependent from buildbot.test import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin SUBMITTED_AT_TIME = 111111111 COMPLETE_AT_TIME = 222222222 OBJECTID = 33 SCHEDULERID = 133 UPSTREAM_NAME = 'uppy' class Dependent(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, upstream=None): # build a fake upstream scheduler class Upstream(base.BaseScheduler): def __init__(self, name): self.name = name if not upstream: upstream = Upstream(UPSTREAM_NAME) sched = dependent.Dependent(name='n', builderNames=['b'], upstream=upstream) self.attachScheduler(sched, OBJECTID, SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) return sched def assertBuildsetSubscriptions(self, bsids=None): self.db.state.assertState(OBJECTID, upstream_bsids=bsids) # tests # NOTE: these tests take advantage of the fact that all of the fake # scheduler operations are synchronous, and thus do not return a Deferred. # The Deferred from trigger() is completely processed before this test # method returns. def test_constructor_string_arg(self): with self.assertRaises(config.ConfigErrors): self.makeScheduler(upstream='foo') @defer.inlineCallbacks def test_activate(self): sched = self.makeScheduler() sched.activate() self.assertEqual( sorted([q.filter for q in sched.master.mq.qrefs]), [('buildsets', None, 'complete',), ('buildsets', None, 'new',), ('schedulers', '133', 'updated')]) yield sched.deactivate() self.assertEqual([q.filter for q in sched.master.mq.qrefs], [('schedulers', '133', 'updated')]) def sendBuildsetMessage(self, scheduler_name=None, results=-1, complete=False): """Call callConsumer with a buildset message. Most of the values here are hard-coded to correspond to those in do_test.""" msg = dict( bsid=44, sourcestamps=[], # blah blah blah submitted_at=SUBMITTED_AT_TIME, complete=complete, complete_at=COMPLETE_AT_TIME if complete else None, external_idstring=None, reason='Because', results=results if complete else -1, parent_buildid=None, parent_relationship=None, ) if not complete: msg['scheduler'] = scheduler_name self.master.mq.callConsumer( ('buildsets', '44', 'complete' if complete else 'new'), msg) def do_test(self, scheduler_name, expect_subscription, results, expect_buildset): """Test the dependent scheduler by faking a buildset and subsequent completion from an upstream scheduler. @param scheduler_name: upstream scheduler's name @param expect_subscription: whether to expect the dependent to subscribe to the buildset @param results: results of the upstream scheduler's buildset @param expect_buidlset: whether to expect the dependent to generate a new buildset in response """ sched = self.makeScheduler() sched.activate() # announce a buildset with a matching name.. self.db.insertTestData([ fakedb.SourceStamp(id=93, revision='555', branch='master', project='proj', repository='repo', codebase='cb'), fakedb.Buildset( id=44, submitted_at=SUBMITTED_AT_TIME, complete=False, complete_at=None, external_idstring=None, reason='Because', results=-1, ), fakedb.BuildsetSourceStamp(buildsetid=44, sourcestampid=93), ]) self.sendBuildsetMessage(scheduler_name=scheduler_name, complete=False) # check whether scheduler is subscribed to that buildset if expect_subscription: self.assertBuildsetSubscriptions([44]) else: self.assertBuildsetSubscriptions([]) # pretend that the buildset is finished self.db.buildsets.fakeBuildsetCompletion(bsid=44, result=results) self.sendBuildsetMessage(results=results, complete=True) # and check whether a buildset was added in response if expect_buildset: self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=None, # defaults external_idstring=None, properties=None, reason='downstream', sourcestamps=[93])), ]) else: self.assertEqual(self.addBuildsetCalls, []) def test_related_buildset_SUCCESS(self): return self.do_test(UPSTREAM_NAME, True, SUCCESS, True) def test_related_buildset_WARNINGS(self): return self.do_test(UPSTREAM_NAME, True, WARNINGS, True) def test_related_buildset_FAILURE(self): return self.do_test(UPSTREAM_NAME, True, FAILURE, False) def test_unrelated_buildset(self): return self.do_test('unrelated', False, SUCCESS, False) @defer.inlineCallbacks def test_getUpstreamBuildsets_missing(self): sched = self.makeScheduler() # insert some state, with more bsids than exist self.db.insertTestData([ fakedb.SourceStamp(id=1234), fakedb.Buildset(id=11), fakedb.Buildset(id=13), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=1234), fakedb.Object(id=OBJECTID), fakedb.ObjectState(objectid=OBJECTID, name='upstream_bsids', value_json='[11,12,13]'), ]) # check return value (missing 12) self.assertEqual((yield sched._getUpstreamBuildsets()), [(11, [], False, -1), (13, [1234], False, -1)]) # and check that it wrote the correct value back to the state self.db.state.assertState(OBJECTID, upstream_bsids=[11, 13]) @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler() expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler() yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler() yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_forcesched.py000066400000000000000000001051451413250514000262370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.schedulers.forcesched import AnyPropertyParameter from buildbot.schedulers.forcesched import BaseParameter from buildbot.schedulers.forcesched import BooleanParameter from buildbot.schedulers.forcesched import ChoiceStringParameter from buildbot.schedulers.forcesched import CodebaseParameter from buildbot.schedulers.forcesched import CollectedValidationError from buildbot.schedulers.forcesched import FileParameter from buildbot.schedulers.forcesched import FixedParameter from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.forcesched import IntParameter from buildbot.schedulers.forcesched import NestedParameter from buildbot.schedulers.forcesched import PatchParameter from buildbot.schedulers.forcesched import StringParameter from buildbot.schedulers.forcesched import UserNameParameter from buildbot.schedulers.forcesched import oneCodebase from buildbot.test.util import scheduler from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin class TestForceScheduler(scheduler.SchedulerMixin, ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 19 SCHEDULERID = 9 maxDiff = None def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, name='testsched', builderNames=None, **kw): if builderNames is None: builderNames = ['a', 'b'] sched = self.attachScheduler( ForceScheduler(name=name, builderNames=builderNames, **kw), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) sched.master.config = config.MasterConfig() self.assertEqual(sched.name, name) return sched # tests def test_compare_branch(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[]), ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( branch=FixedParameter("branch", "fishing/pole")))) def test_compare_reason(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[], reason=FixedParameter("reason", "no fish for you!")), ForceScheduler(name="testched", builderNames=[], reason=FixedParameter("reason", "thanks for the fish!"))) def test_compare_revision(self): self.assertNotEqual( ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( revision=FixedParameter("revision", "fish-v1"))), ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( revision=FixedParameter("revision", "fish-v2")))) def test_compare_repository(self): self.assertNotEqual( ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( repository=FixedParameter("repository", "git://pond.org/fisher.git"))), ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( repository=FixedParameter("repository", "svn://ocean.com/trawler/")))) def test_compare_project(self): self.assertNotEqual( ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( project=FixedParameter("project", "fisher"))), ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( project=FixedParameter("project", "trawler")))) def test_compare_username(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[]), ForceScheduler(name="testched", builderNames=[], username=FixedParameter("username", "The Fisher King "))) def test_compare_properties(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[], properties=[]), ForceScheduler(name="testched", builderNames=[], properties=[FixedParameter("prop", "thanks for the fish!")])) def test_compare_codebases(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[], codebases=['bar']), ForceScheduler(name="testched", builderNames=[], codebases=['foo'])) @defer.inlineCallbacks def test_basicForce(self): sched = self.makeScheduler() res = yield sched.force('user', builderNames=['a'], branch='a', reason='because', revision='c', repository='d', project='p') # only one builder forced, so there should only be one brid self.assertEqual(res, (500, {1000: 100})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a'], waited_for=False, properties={ 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), }, reason="A build was forced by 'user': because", sourcestamps=[ {'codebase': '', 'branch': 'a', 'revision': 'c', 'repository': 'd', 'project': 'p'}, ])), ]) @defer.inlineCallbacks def test_basicForce_reasonString(self): """Same as above, but with a reasonString""" sched = self.makeScheduler( reasonString='%(owner)s wants it %(reason)s') res = yield sched.force('user', builderNames=['a'], branch='a', reason='because', revision='c', repository='d', project='p') bsid, brids = res # only one builder forced, so there should only be one brid self.assertEqual(len(brids), 1) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': ['a'], 'properties': {'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form')}, 'reason': 'user wants it because', 'sourcestamps': [{'branch': 'a', 'codebase': '', 'project': 'p', 'repository': 'd', 'revision': 'c'}], 'waited_for': False}), ]) (bsid, dict(reason="user wants it because", brids=brids, external_idstring=None, properties=[('owner', ('user', 'Force Build Form')), ('reason', ('because', 'Force Build Form')), ('scheduler', ('testsched', 'Scheduler')), ], sourcestampsetid=100), {'': dict(branch='a', revision='c', repository='d', codebase='', project='p', sourcestampsetid=100) }) @defer.inlineCallbacks def test_force_allBuilders(self): sched = self.makeScheduler() res = yield sched.force('user', branch='a', reason='because', revision='c', repository='d', project='p', ) self.assertEqual(res, (500, {1000: 100, 1001: 101})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a', 'b'], waited_for=False, properties={ 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), }, reason="A build was forced by 'user': because", sourcestamps=[ {'codebase': '', 'branch': 'a', 'revision': 'c', 'repository': 'd', 'project': 'p'}, ])), ]) @defer.inlineCallbacks def test_force_someBuilders(self): sched = self.makeScheduler(builderNames=['a', 'b', 'c']) res = yield sched.force('user', builderNames=['a', 'b'], branch='a', reason='because', revision='c', repository='d', project='p', ) self.assertEqual(res, (500, {1000: 100, 1001: 101})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a', 'b'], waited_for=False, properties={ 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), }, reason="A build was forced by 'user': because", sourcestamps=[ {'codebase': '', 'branch': 'a', 'revision': 'c', 'repository': 'd', 'project': 'p'}, ])), ]) def test_bad_codebases(self): # codebases must be a list of either string or BaseParameter types with self.assertRaisesConfigError( "ForceScheduler 'foo': 'codebases' must be a " "list of strings or CodebaseParameter objects:"): ForceScheduler(name='foo', builderNames=['bar'], codebases=[123],) with self.assertRaisesConfigError( "ForceScheduler 'foo': 'codebases' must be a " "list of strings or CodebaseParameter objects:"): ForceScheduler(name='foo', builderNames=['bar'], codebases=[IntParameter('foo')]) # codebases cannot be empty with self.assertRaisesConfigError( "ForceScheduler 'foo': 'codebases' cannot be " "empty; use [CodebaseParameter(codebase='', hide=True)] if needed:"): ForceScheduler(name='foo', builderNames=['bar'], codebases=[]) # codebases cannot be a dictionary # dictType on Python 3 is: "" # dictType on Python 2 is: "" dictType = str(type({})) errMsg = ("ForceScheduler 'foo': 'codebases' should be a list " "of strings or CodebaseParameter, " "not {}".format(dictType)) with self.assertRaisesConfigError(errMsg): ForceScheduler(name='foo', builderNames=['bar'], codebases={'cb': {'branch': 'trunk'}}) @defer.inlineCallbacks def test_good_codebases(self): sched = self.makeScheduler(codebases=['foo', CodebaseParameter('bar')]) res = yield sched.force('user', builderNames=['a'], reason='because', foo_branch='a', foo_revision='c', foo_repository='d', foo_project='p', bar_branch='a2', bar_revision='c2', bar_repository='d2', bar_project='p2' ) bsid, brids = res expProperties = { 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), } self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a'], waited_for=False, properties=expProperties, reason="A build was forced by 'user': because", sourcestamps=[ {'branch': 'a2', 'project': 'p2', 'repository': 'd2', 'revision': 'c2', 'codebase': 'bar'}, {'branch': 'a', 'project': 'p', 'repository': 'd', 'revision': 'c', 'codebase': 'foo'}, ])), ]) @defer.inlineCallbacks def test_codebase_with_patch(self): sched = self.makeScheduler(codebases=['foo', CodebaseParameter('bar', patch=PatchParameter())]) res = yield sched.force('user', builderNames=['a'], reason='because', foo_branch='a', foo_revision='c', foo_repository='d', foo_project='p', bar_branch='a2', bar_revision='c2', bar_repository='d2', bar_project='p2', bar_patch_body=b"xxx") bsid, brids = res expProperties = { 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), } self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a'], waited_for=False, properties=expProperties, reason="A build was forced by 'user': because", sourcestamps=[ {'branch': 'a2', 'project': 'p2', 'repository': 'd2', 'revision': 'c2', 'codebase': 'bar', 'patch_body': b'xxx', 'patch_author': '', 'patch_subdir': '.', 'patch_comment': '', 'patch_level': 1}, {'branch': 'a', 'project': 'p', 'repository': 'd', 'revision': 'c', 'codebase': 'foo'}, ])), ]) def formatJsonForTest(self, gotJson): ret = "" linestart = "expectJson='" spaces = 7 * 4 + 2 while len(gotJson) > (90 - spaces): gotJson = " " * spaces + linestart + gotJson pos = gotJson[:100].rfind(",") if pos > 0: pos += 2 ret += gotJson[:pos] + "'\n" gotJson = gotJson[pos:] linestart = "'" ret += " " * spaces + linestart + gotJson + "')\n" return ret # value = the value to be sent with the parameter (ignored if req is set) # expect = the expected result (can be an exception type) # klass = the parameter class type # req = use this request instead of the auto-generated one based on value @defer.inlineCallbacks def do_ParameterTest(self, expect, klass, # None=one prop, Exception=exception, dict=many props expectKind=None, owner='user', value=None, req=None, expectJson=None, **kwargs): name = kwargs.setdefault('name', 'p1') # construct one if needed if isinstance(klass, type): prop = klass(**kwargs) else: prop = klass self.assertEqual(prop.name, name) self.assertEqual(prop.label, kwargs.get('label', prop.name)) if expectJson is not None: gotSpec = prop.getSpec() gotJson = json.dumps(gotSpec) expectSpec = json.loads(expectJson) if gotSpec != expectSpec: try: import xerox # pylint: disable=import-outside-toplevel formatted = self.formatJsonForTest(gotJson) print( "You may update the test with (copied to clipboard):\n" + formatted) xerox.copy(formatted) input() except ImportError: print("Note: for quick fix, pip install xerox") self.assertEqual(gotSpec, expectSpec) sched = self.makeScheduler(properties=[prop]) if not req: req = {name: value, 'reason': 'because'} try: bsid, brids = yield sched.force(owner, builderNames=['a'], **req) except Exception as e: if expectKind is not Exception: # an exception is not expected raise if not isinstance(e, expect): # the exception is the wrong kind raise return None # success expect_props = { 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), } if expectKind is None: expect_props[name] = (expect, 'Force Build Form') elif expectKind is dict: for k, v in expect.items(): expect_props[k] = (v, 'Force Build Form') else: self.fail("expectKind is wrong type!") # only forced on 'a' self.assertEqual((bsid, brids), (500, {1000: 100})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a'], waited_for=False, properties=expect_props, reason="A build was forced by 'user': because", sourcestamps=[ {'branch': '', 'project': '', 'repository': '', 'revision': '', 'codebase': ''}, ])), ]) return None def test_StringParameter(self): self.do_ParameterTest(value="testedvalue", expect="testedvalue", klass=StringParameter, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "text", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"size": 10, "autopopulate": null}') def test_StringParameter_Required(self): self.do_ParameterTest(value=" ", expect=CollectedValidationError, expectKind=Exception, klass=StringParameter, required=True) def test_StringParameter_maxsize(self): self.do_ParameterTest(value="xx" * 20, expect=CollectedValidationError, expectKind=Exception, klass=StringParameter, maxsize=10) def test_FileParameter_maxsize(self): self.do_ParameterTest(value="xx" * 20, expect=CollectedValidationError, expectKind=Exception, klass=FileParameter, maxsize=10) def test_FileParameter(self): self.do_ParameterTest(value="xx", expect="xx", klass=FileParameter, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "file", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, ' '"maxsize": 10485760, "autopopulate": null}') def test_PatchParameter(self): expect_json = ( '{"name": "p1", "fullName": "p1", "label": "p1", "autopopulate": null, ' '"tablabel": "p1", "type": "nested", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"layout": "vertical", "columns": 1, "fields": [{"name": "body", ' '"fullName": "p1_body", "label": "body", "tablabel": "body", "autopopulate": null, ' '"type": "file", "default": "", "required": false, "multiple": false, ' '"regex": null, "hide": false, "maxsize": 10485760}, {"name": "level", ' '"fullName": "p1_level", "label": "level", "tablabel": "level", ' '"type": "int", "default": 1, "required": false, "multiple": false, ' '"regex": null, "hide": false, "maxsize": null, "size": 10, "autopopulate": null}, ' '{"name": "author", "fullName": "p1_author", "label": "author", ' '"tablabel": "author", "type": "text", "default": "", "autopopulate": null, ' '"required": false, "multiple": false, "regex": null, "hide": false, ' '"maxsize": null, "size": 10}, {"name": "comment", "autopopulate": null, ' '"fullName": "p1_comment", "label": "comment", "tablabel": "comment", ' '"type": "text", "default": "", "required": false, "multiple": false, ' '"regex": null, "hide": false, "maxsize": null, "size": 10}, ' '{"name": "subdir", "fullName": "p1_subdir", "label": "subdir", ' '"tablabel": "subdir", "type": "text", "default": ".", "autopopulate": null, ' '"required": false, "multiple": false, "regex": null, "hide": false, ' '"maxsize": null, "size": 10}]}' ) self.do_ParameterTest(req=dict(p1_author='me', reason="because"), expect={ 'author': 'me', 'body': '', 'comment': '', 'level': 1, 'subdir': '.'}, klass=PatchParameter, expectJson=expect_json) def test_IntParameter(self): self.do_ParameterTest(value="123", expect=123, klass=IntParameter, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "int", "default": 0, "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"size": 10, "autopopulate": null}') def test_FixedParameter(self): self.do_ParameterTest(value="123", expect="321", klass=FixedParameter, default="321", expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "fixed", "default": "321", ' '"required": false, "multiple": false, "regex": null, "hide": true, ' '"maxsize": null, "autopopulate": null}') def test_BooleanParameter_True(self): req = dict(p1=True, reason='because') self.do_ParameterTest(value="123", expect=True, klass=BooleanParameter, req=req, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "bool", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, ' '"maxsize": null, "autopopulate": null}') def test_BooleanParameter_False(self): req = dict(p2=True, reason='because') self.do_ParameterTest(value="123", expect=False, klass=BooleanParameter, req=req) def test_UserNameParameter(self): email = "test " expect_json = ( '{"name": "username", "fullName": "username", ' '"label": "Your name:", "tablabel": "Your name:", "type": "username", ' '"default": "", "required": false, "multiple": false, "regex": null, ' '"hide": false, "maxsize": null, "size": 30, ' '"need_email": true, "autopopulate": null}' ) self.do_ParameterTest(value=email, expect=email, klass=UserNameParameter(), name="username", label="Your name:", expectJson=expect_json) def test_UserNameParameterIsValidMail(self): email = "test@buildbot.net" expect_json = ( '{"name": "username", "fullName": "username", ' '"label": "Your name:", "tablabel": "Your name:", "type": "username", ' '"default": "", "required": false, "multiple": false, "regex": null, ' '"hide": false, "maxsize": null, "size": 30, ' '"need_email": true, "autopopulate": null}' ) self.do_ParameterTest(value=email, expect=email, klass=UserNameParameter(), name="username", label="Your name:", expectJson=expect_json) def test_UserNameParameterIsValidMailBis(self): email = "" expect_json = ( '{"name": "username", "fullName": "username", ' '"label": "Your name:", "tablabel": "Your name:", "type": "username", ' '"default": "", "required": false, "multiple": false, "regex": null, ' '"hide": false, "maxsize": null, "size": 30, ' '"need_email": true, "autopopulate": null}' ) self.do_ParameterTest(value=email, expect=email, klass=UserNameParameter(), name="username", label="Your name:", expectJson=expect_json) def test_ChoiceParameter(self): self.do_ParameterTest(value='t1', expect='t1', klass=ChoiceStringParameter, choices=[ 't1', 't2'], expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "list", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"choices": ["t1", "t2"], "strict": true, "autopopulate": null}') def test_ChoiceParameterError(self): self.do_ParameterTest(value='t3', expect=CollectedValidationError, expectKind=Exception, klass=ChoiceStringParameter, choices=[ 't1', 't2'], debug=False) def test_ChoiceParameterError_notStrict(self): self.do_ParameterTest(value='t1', expect='t1', strict=False, klass=ChoiceStringParameter, choices=['t1', 't2']) def test_ChoiceParameterMultiple(self): self.do_ParameterTest(value=['t1', 't2'], expect=['t1', 't2'], klass=ChoiceStringParameter, choices=['t1', 't2'], multiple=True, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "list", "default": "", "required": false, ' '"multiple": true, "regex": null, "hide": false, "maxsize": null, ' '"choices": ["t1", "t2"], "strict": true, "autopopulate": null}') def test_ChoiceParameterMultipleError(self): self.do_ParameterTest(value=['t1', 't3'], expect=CollectedValidationError, expectKind=Exception, klass=ChoiceStringParameter, choices=[ 't1', 't2'], multiple=True, debug=False) def test_NestedParameter(self): fields = [ IntParameter(name="foo") ] expect_json = ( '{"name": "p1", "fullName": "p1", "label": "p1", "autopopulate": null, ' '"tablabel": "p1", "type": "nested", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"layout": "vertical", "columns": 1, "fields": [{"name": "foo", ' '"fullName": "p1_foo", "label": "foo", "tablabel": "foo", "autopopulate": null, ' '"type": "int", "default": 0, "required": false, "multiple": false, ' '"regex": null, "hide": false, "maxsize": null, "size": 10}]}' ) self.do_ParameterTest(req=dict(p1_foo='123', reason="because"), expect=dict(foo=123), klass=NestedParameter, fields=fields, expectJson=expect_json) def test_NestedNestedParameter(self): fields = [ NestedParameter(name="inner", fields=[ StringParameter(name='str'), AnyPropertyParameter(name='any') ]), IntParameter(name="foo") ] self.do_ParameterTest(req=dict(p1_foo='123', p1_inner_str="bar", p1_inner_any_name="hello", p1_inner_any_value="world", reason="because"), expect=dict( foo=123, inner=dict(str="bar", hello="world")), klass=NestedParameter, fields=fields) def test_NestedParameter_nullname(self): # same as above except "p1" and "any" are skipped fields = [ NestedParameter(name="inner", fields=[ StringParameter(name='str'), AnyPropertyParameter(name='') ]), IntParameter(name="foo"), NestedParameter(name='bar', fields=[ NestedParameter( name='', fields=[AnyPropertyParameter(name='a')]), NestedParameter( name='', fields=[AnyPropertyParameter(name='b')]) ]) ] self.do_ParameterTest(req=dict(foo='123', inner_str="bar", inner_name="hello", inner_value="world", reason="because", bar_a_name="a", bar_a_value="7", bar_b_name="b", bar_b_value="8"), expect=dict(foo=123, inner=dict(str="bar", hello="world"), bar={'a': '7', 'b': '8'}), expectKind=dict, klass=NestedParameter, fields=fields, name='') def test_bad_reason(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': reason must be a StringParameter"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], reason="foo") def test_bad_username(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': username must be a StringParameter"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], username="foo") def test_notstring_name(self): with self.assertRaisesConfigError( "ForceScheduler name must be a unicode string:"): ForceScheduler(name=1234, builderNames=[], codebases=['bar'], username="foo") def test_notidentifier_name(self): # FIXME: this test should be removed eventually when bug 3460 gets a # real fix with self.assertRaisesConfigError( "ForceScheduler name must be an identifier: 'my scheduler'"): ForceScheduler(name='my scheduler', builderNames=[], codebases=['bar'], username="foo") def test_emptystring_name(self): with self.assertRaisesConfigError( "ForceScheduler name must not be empty:"): ForceScheduler(name='', builderNames=[], codebases=['bar'], username="foo") def test_integer_builderNames(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': builderNames must be a list of strings:"): ForceScheduler(name='testsched', builderNames=1234, codebases=['bar'], username="foo") def test_listofints_builderNames(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': builderNames must be a list of strings:"): ForceScheduler(name='testsched', builderNames=[1234], codebases=['bar'], username="foo") def test_listofunicode_builderNames(self): ForceScheduler(name='testsched', builderNames=['a', 'b']) def test_listofmixed_builderNames(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': builderNames must be a list of strings:"): ForceScheduler(name='testsched', builderNames=['test', 1234], codebases=['bar'], username="foo") def test_integer_properties(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': properties must be a list of BaseParameters:"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], username="foo", properties=1234) def test_listofints_properties(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': properties must be a list of BaseParameters:"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], username="foo", properties=[1234, 2345]) def test_listofmixed_properties(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': properties must be a list of BaseParameters:"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], username="foo", properties=[BaseParameter(name="test",), 4567]) def test_novalue_to_parameter(self): with self.assertRaisesConfigError( "Use default='1234' instead of value=... to give a default Parameter value"): BaseParameter(name="test", value="1234") buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_manager.py000066400000000000000000000160141413250514000255400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.schedulers import base from buildbot.schedulers import manager class SchedulerManager(unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.next_objectid = 13 self.objectids = {} self.master = mock.Mock() self.master.master = self.master def getObjectId(sched_name, class_name): k = (sched_name, class_name) try: rv = self.objectids[k] except KeyError: rv = self.objectids[k] = self.next_objectid self.next_objectid += 1 return defer.succeed(rv) self.master.db.state.getObjectId = getObjectId def getScheduler(sched_id): return defer.succeed(dict(enabled=True)) self.master.db.schedulers.getScheduler = getScheduler self.new_config = mock.Mock() self.sm = manager.SchedulerManager() yield self.sm.setServiceParent(self.master) yield self.sm.startService() def tearDown(self): if self.sm.running: return self.sm.stopService() return None class Sched(base.BaseScheduler): # changing sch.attr should make a scheduler look "updated" compare_attrs = ('attr', ) already_started = False reconfig_count = 0 def startService(self): assert not self.already_started assert self.master is not None assert self.objectid is not None self.already_started = True return super().startService() @defer.inlineCallbacks def stopService(self): yield super().stopService() assert self.master is not None assert self.objectid is not None def __repr__(self): return "{}(attr={})".format(self.__class__.__name__, self.attr) class ReconfigSched(Sched): def reconfigServiceWithSibling(self, new_config): self.reconfig_count += 1 self.attr = new_config.attr return super().reconfigServiceWithSibling(new_config) class ReconfigSched2(ReconfigSched): pass def makeSched(self, cls, name, attr='alpha'): sch = cls(name=name, builderNames=['x'], properties={}) sch.attr = attr return sch # tests @defer.inlineCallbacks def test_reconfigService_add_and_change_and_remove(self): sch1 = self.makeSched(self.ReconfigSched, 'sch1', attr='alpha') self.new_config.schedulers = dict(sch1=sch1) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) self.assertEqual(sch1.reconfig_count, 1) sch1_new = self.makeSched(self.ReconfigSched, 'sch1', attr='beta') sch2 = self.makeSched(self.ReconfigSched, 'sch2', attr='alpha') self.new_config.schedulers = dict(sch1=sch1_new, sch2=sch2) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) # sch1 is still the active scheduler, and has been reconfig'd, # and has the correct attribute self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) self.assertEqual(sch1.attr, 'beta') self.assertEqual(sch1.reconfig_count, 2) self.assertIdentical(sch1_new.parent, None) self.assertIdentical(sch1_new.master, None) self.assertIdentical(sch2.parent, self.sm) self.assertIdentical(sch2.master, self.master) self.new_config.schedulers = {} self.assertEqual(sch1.running, True) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertEqual(sch1.running, False) @defer.inlineCallbacks def test_reconfigService_class_name_change(self): sch1 = self.makeSched(self.ReconfigSched, 'sch1') self.new_config.schedulers = dict(sch1=sch1) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) self.assertEqual(sch1.reconfig_count, 1) sch1_new = self.makeSched(self.ReconfigSched2, 'sch1') self.new_config.schedulers = dict(sch1=sch1_new) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) # sch1 had its class name change, so sch1_new is now the active # instance self.assertIdentical(sch1_new.parent, self.sm) self.assertIdentical(sch1_new.master, self.master) @defer.inlineCallbacks def test_reconfigService_not_reconfigurable(self): sch1 = self.makeSched(self.Sched, 'sch1', attr='beta') self.new_config.schedulers = dict(sch1=sch1) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) sch1_new = self.makeSched(self.Sched, 'sch1', attr='alpha') self.new_config.schedulers = dict(sch1=sch1_new) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) # sch1 had parameter change but is not reconfigurable, so sch1_new is now the active # instance self.assertEqual(sch1_new.running, True) self.assertEqual(sch1.running, False) self.assertIdentical(sch1_new.parent, self.sm) self.assertIdentical(sch1_new.master, self.master) @defer.inlineCallbacks def test_reconfigService_not_reconfigurable_no_change(self): sch1 = self.makeSched(self.Sched, 'sch1', attr='beta') self.new_config.schedulers = dict(sch1=sch1) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) sch1_new = self.makeSched(self.Sched, 'sch1', attr='beta') self.new_config.schedulers = dict(sch1=sch1_new) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) # sch1 had its class name change, so sch1_new is now the active # instance self.assertIdentical(sch1_new.parent, None) self.assertEqual(sch1_new.running, False) self.assertIdentical(sch1_new.master, None) self.assertEqual(sch1.running, True) buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_timed_Nightly.py000066400000000000000000000460761413250514000267410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import time import mock from twisted.internet import defer from twisted.python import log from twisted.trial import unittest from buildbot.changes import filter from buildbot.schedulers import timed from buildbot.test import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class Nightly(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): try: datetime.datetime.fromtimestamp(1) except OSError: skip = ("Python 3.6 bug on Windows: " "https://bugs.python.org/issue29097") OBJECTID = 132 SCHEDULERID = 32 # not all timezones are even multiples of 1h from GMT. This variable # holds the number of seconds ahead of the hour for the current timezone. # This is then added to the clock before each test is run (to get to 0 # minutes past the hour) and subtracted before the time offset is reported. localtime_offset = time.timezone % 3600 def makeScheduler(self, **kwargs): sched = self.attachScheduler(timed.Nightly(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True) self.master.db.insertTestData( [fakedb.Builder(name=bname) for bname in kwargs.get("builderNames", [])]) # add a Clock to help checking timing issues sched._reactor = self.reactor self.reactor.advance(self.localtime_offset) # get to 0 min past the hour self.addBuildsetCallTimes = [] def recordTimes(timeList, method): def timedMethod(**kw): timeList.append(self.reactor.seconds() - self.localtime_offset) return method(**kw) return timedMethod sched.addBuildsetForSourceStampsWithDefaults = recordTimes( self.addBuildsetCallTimes, sched.addBuildsetForSourceStampsWithDefaults) sched.addBuildsetForChanges = recordTimes( self.addBuildsetCallTimes, sched.addBuildsetForChanges) # see self.assertConsumingChanges self.consumingChanges = None def startConsumingChanges(**kwargs): self.consumingChanges = kwargs return defer.succeed(None) sched.startConsumingChanges = startConsumingChanges return sched def mkbs(self, **kwargs): # create buildset for expected_buildset in assertBuildset. bs = dict(reason="The Nightly scheduler named 'test' triggered this build", external_idstring='', sourcestampsetid=100, properties=[('scheduler', ('test', 'Scheduler'))]) bs.update(kwargs) return bs def mkss(self, **kwargs): # create sourcestamp for expected_sourcestamps in assertBuildset. ss = dict( branch='master', project='', repository='', sourcestampsetid=100) ss.update(kwargs) return ss def mkch(self, **kwargs): # create changeset and insert in database. chd = dict(branch='master', project='', repository='') chd.update(kwargs) ch = self.makeFakeChange(**chd) # fakedb.Change requires changeid instead of number chd['changeid'] = chd['number'] del chd['number'] self.db.insertTestData([fakedb.Change(**chd)]) return ch def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def assertConsumingChanges(self, **kwargs): self.assertEqual(self.consumingChanges, kwargs) # Tests def test_constructor_no_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') self.assertEqual( sched.reason, "The Nightly scheduler named 'test' triggered this build") def test_constructor_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default', reason="hourly") self.assertEqual(sched.reason, "hourly") def test_constructor_change_filter(self): sched = self.makeScheduler(name='test', builderNames=['test'], branch=None, change_filter=filter.ChangeFilter(category_re="fo+o")) assert sched.change_filter def test_constructor_month(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default', month='1') self.assertEqual(sched.month, "1") @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_start_build(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.startBuild() self.assertEqual(r, None) # end-to-end tests: let's see the scheduler in action @defer.inlineCallbacks def test_iterations_simple(self): # note that Nightly works in local time, but the TestReactor always # starts at midnight UTC, so be careful not to use times that are # timezone dependent -- stick to minutes-past-the-half-hour, as some # timezones are multiples of 30 minutes off from UTC sched = self.makeScheduler(name='test', builderNames=['test'], branch=None, minute=[10, 20, 21, 40, 50, 51]) # add a change classification self.db.schedulers.fakeClassifications(self.SCHEDULERID, {19: True}) yield sched.activate() # check that the classification has been flushed, since this # invocation has not requested onlyIfChanged self.db.schedulers.assertClassifications(self.SCHEDULERID, {}) self.reactor.advance(0) while self.reactor.seconds() < self.localtime_offset + 30 * 60: self.reactor.advance(60) self.assertEqual(self.addBuildsetCallTimes, [600, 1200, 1260]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'sourcestamps': [{'codebase': ''}], 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False}), ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'sourcestamps': [{'codebase': ''}], 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False}), ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'sourcestamps': [{'codebase': ''}], 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', last_build=1260 + self.localtime_offset) yield sched.deactivate() def test_iterations_simple_with_branch(self): # see timezone warning above sched = self.makeScheduler(name='test', builderNames=['test'], branch='master', minute=[5, 35]) sched.activate() self.reactor.advance(0) while self.reactor.seconds() < self.localtime_offset + 10 * 60: self.reactor.advance(60) self.assertEqual(self.addBuildsetCallTimes, [300]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'sourcestamps': [{'codebase': ''}], 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', last_build=300 + self.localtime_offset) d = sched.deactivate() return d def do_test_iterations_onlyIfChanged(self, *changes_at, **kwargs): fII = mock.Mock(name='fII') self.makeScheduler(name='test', builderNames=['test'], branch=None, minute=[5, 25, 45], onlyIfChanged=True, fileIsImportant=fII, **kwargs) return self.do_test_iterations_onlyIfChanged_test(fII, *changes_at) @defer.inlineCallbacks def do_test_iterations_onlyIfChanged_test(self, fII, *changes_at): yield self.sched.activate() # check that the scheduler has started to consume changes self.assertConsumingChanges(fileIsImportant=fII, change_filter=None, onlyImportant=False) # manually run the clock forward through a half-hour, allowing any # excitement to take place changes_at = list(changes_at) self.reactor.advance(0) # let it trigger the first build while self.reactor.seconds() < self.localtime_offset + 30 * 60: # inject any new changes.. while (changes_at and self.reactor.seconds() >= self.localtime_offset + changes_at[0][0]): when, newchange, important = changes_at.pop(0) self.db.changes.fakeAddChangeInstance(newchange) yield self.sched.gotChange(newchange, important).addErrback(log.err) # and advance the clock by a minute self.reactor.advance(60) @defer.inlineCallbacks def test_iterations_onlyIfChanged_no_changes(self): yield self.do_test_iterations_onlyIfChanged() self.assertEqual(self.addBuildsetCalls, []) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_unimp_changes(self): yield self.do_test_iterations_onlyIfChanged( (60, mock.Mock(), False), (600, mock.Mock(), False)) self.assertEqual(self.addBuildsetCalls, []) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_off_branch_changes(self): yield self.do_test_iterations_onlyIfChanged( (60, self.makeFakeChange(number=1, branch='testing'), True), (1700, self.makeFakeChange(number=2, branch='staging'), True)) self.assertEqual(self.addBuildsetCalls, []) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_mixed_changes(self): yield self.do_test_iterations_onlyIfChanged( (120, self.makeFakeChange(number=3, branch=None), False), (130, self.makeFakeChange(number=4, branch='offbranch'), True), (1200, self.makeFakeChange(number=5, branch=None), True), (1201, self.makeFakeChange(number=6, branch=None), False), (1202, self.makeFakeChange(number=7, branch='offbranch'), True)) # note that the changeid list includes the unimportant changes, but not the # off-branch changes, and note that no build took place at 300s, as no important # changes had yet arrived self.assertEqual(self.addBuildsetCallTimes, [1500]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForChanges', { 'builderNames': None, 'changeids': [3, 5, 6], 'external_idstring': None, 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_createAbsoluteSourceStamps_oneChanged(self): # Test createAbsoluteSourceStamps=True when only one codebase has # changed yield self.do_test_iterations_onlyIfChanged( (120, self.makeFakeChange( number=3, codebase='a', revision='2345:bcd'), True), codebases={'a': {'repository': "", 'branch': 'master'}, 'b': {'repository': "", 'branch': 'master'}}, createAbsoluteSourceStamps=True) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) # addBuildsetForChanges calls getCodebase, so this isn't too # interesting self.assertEqual(self.addBuildsetCallTimes, [300]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForChanges', { 'builderNames': None, 'changeids': [3], 'external_idstring': None, 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', lastCodebases={ 'a': dict(revision='2345:bcd', branch=None, repository='', lastChange=3)}) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_createAbsoluteSourceStamps_oneChanged_loadOther(self): # Test createAbsoluteSourceStamps=True when only one codebase has changed, # but the other was previously changed fII = mock.Mock(name='fII') self.makeScheduler(name='test', builderNames=['test'], branch=None, minute=[5, 25, 45], onlyIfChanged=True, fileIsImportant=fII, codebases={'a': {'repository': "", 'branch': 'master'}, 'b': {'repository': "", 'branch': 'master'}}, createAbsoluteSourceStamps=True) self.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='test', class_name='Nightly'), fakedb.ObjectState(objectid=self.OBJECTID, name='lastCodebases', value_json='{"b": {"branch": "master", "repository": "B", "revision": "1234:abc", "lastChange": 2}}')]) # noqa pylint: disable=line-too-long change = self.makeFakeChange(number=3, codebase='a', revision='2345:bcd') yield self.do_test_iterations_onlyIfChanged_test(fII, (120, change, True)) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) # addBuildsetForChanges calls getCodebase, so this isn't too # interesting self.assertEqual(self.addBuildsetCallTimes, [300]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForChanges', { 'builderNames': None, 'changeids': [3], 'external_idstring': None, 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', lastCodebases={ 'a': dict(revision='2345:bcd', branch=None, repository='', lastChange=3), 'b': dict(revision='1234:abc', branch="master", repository='B', lastChange=2)}) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_createAbsoluteSourceStamps_bothChanged(self): # Test createAbsoluteSourceStamps=True when both codebases have changed yield self.do_test_iterations_onlyIfChanged( (120, self.makeFakeChange( number=3, codebase='a', revision='2345:bcd'), True), (122, self.makeFakeChange( number=4, codebase='b', revision='1234:abc'), True), codebases={'a': {'repository': "", 'branch': 'master'}, 'b': {'repository': "", 'branch': 'master'}}, createAbsoluteSourceStamps=True) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) # addBuildsetForChanges calls getCodebase, so this isn't too # interesting self.assertEqual(self.addBuildsetCallTimes, [300]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForChanges', { 'builderNames': None, 'changeids': [3, 4], 'external_idstring': None, 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', lastCodebases={ 'a': dict(revision='2345:bcd', branch=None, repository='', lastChange=3), 'b': dict(revision='1234:abc', branch=None, repository='', lastChange=4)}) yield self.sched.deactivate() buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_timed_NightlyBase.py000066400000000000000000000363751413250514000275350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import time from twisted.internet import defer from twisted.trial import unittest from buildbot.schedulers import timed from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin try: from multiprocessing import Process assert Process except ImportError: Process = None class NightlyBase(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): """detailed getNextBuildTime tests""" OBJECTID = 133 SCHEDULERID = 33 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def makeScheduler(self, firstBuildDuration=0, **kwargs): return self.attachScheduler(timed.NightlyBase(**kwargs), self.OBJECTID, self.SCHEDULERID) @defer.inlineCallbacks def do_getNextBuildTime_test(self, sched, *expectations): for lastActuated, expected in expectations: # convert from tuples to epoch time (in local timezone) lastActuated_ep, expected_ep = [ time.mktime(t + (0,) * (8 - len(t)) + (-1,)) for t in (lastActuated, expected)] got_ep = yield sched.getNextBuildTime(lastActuated_ep) self.assertEqual(got_ep, expected_ep, "{} -> {} != {}".format(lastActuated, time.localtime(got_ep), expected)) def test_getNextBuildTime_hourly(self): sched = self.makeScheduler(name='test', builderNames=['test']) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0, 0), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 3, 15, 0), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 3, 15, 1), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 3, 59, 1), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 3, 59, 59), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 23, 22, 22), (2011, 1, 2, 0, 0, 0)), ((2011, 1, 1, 23, 59, 0), (2011, 1, 2, 0, 0, 0)), ) def test_getNextBuildTime_minutes_single(self): # basically the same as .._hourly sched = self.makeScheduler(name='test', builderNames=['test'], minute=4) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0, 0), (2011, 1, 1, 3, 4, 0)), ((2011, 1, 1, 3, 15, 0), (2011, 1, 1, 4, 4, 0)), ) def test_getNextBuildTime_minutes_multiple(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[4, 34]) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0, 0), (2011, 1, 1, 3, 4, 0)), ((2011, 1, 1, 3, 15, 0), (2011, 1, 1, 3, 34, 0)), ((2011, 1, 1, 3, 34, 0), (2011, 1, 1, 4, 4, 0)), ((2011, 1, 1, 3, 59, 1), (2011, 1, 1, 4, 4, 0)), ) def test_getNextBuildTime_minutes_star(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute='*') return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 11, 30), (2011, 1, 1, 3, 12, 0)), ((2011, 1, 1, 3, 12, 0), (2011, 1, 1, 3, 13, 0)), ((2011, 1, 1, 3, 59, 0), (2011, 1, 1, 4, 0, 0)), ) def test_getNextBuildTime_hours_single(self): sched = self.makeScheduler(name='test', builderNames=['test'], hour=4) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0), (2011, 1, 1, 4, 0)), ((2011, 1, 1, 13, 0), (2011, 1, 2, 4, 0)), ) def test_getNextBuildTime_hours_multiple(self): sched = self.makeScheduler(name='test', builderNames=['test'], hour=[7, 19]) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0), (2011, 1, 1, 7, 0)), ((2011, 1, 1, 7, 1), (2011, 1, 1, 19, 0)), ((2011, 1, 1, 18, 59), (2011, 1, 1, 19, 0)), ((2011, 1, 1, 19, 59), (2011, 1, 2, 7, 0)), ) def test_getNextBuildTime_hours_minutes(self): sched = self.makeScheduler(name='test', builderNames=['test'], hour=13, minute=19) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 11), (2011, 1, 1, 13, 19)), ((2011, 1, 1, 13, 19), (2011, 1, 2, 13, 19)), ((2011, 1, 1, 23, 59), (2011, 1, 2, 13, 19)), ) def test_getNextBuildTime_month_single(self): sched = self.makeScheduler(name='test', builderNames=['test'], month=3) return self.do_getNextBuildTime_test(sched, ((2011, 2, 27, 3, 11), (2011, 3, 1, 0, 0)), # still hourly! ((2011, 3, 1, 1, 11), (2011, 3, 1, 2, 0)), ) def test_getNextBuildTime_month_multiple(self): sched = self.makeScheduler(name='test', builderNames=['test'], month=[4, 6]) return self.do_getNextBuildTime_test(sched, ((2011, 3, 30, 3, 11), (2011, 4, 1, 0, 0)), # still hourly! ((2011, 4, 1, 1, 11), (2011, 4, 1, 2, 0)), ((2011, 5, 29, 3, 11), (2011, 6, 1, 0, 0)), ) def test_getNextBuildTime_month_dayOfMonth(self): sched = self.makeScheduler(name='test', builderNames=['test'], month=[3, 6], dayOfMonth=[15]) return self.do_getNextBuildTime_test(sched, ((2011, 2, 12, 3, 11), (2011, 3, 15, 0, 0)), ((2011, 3, 12, 3, 11), (2011, 3, 15, 0, 0)), ) def test_getNextBuildTime_dayOfMonth_single(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfMonth=10) return self.do_getNextBuildTime_test(sched, ((2011, 1, 9, 3, 0), (2011, 1, 10, 0, 0)), # still hourly! ((2011, 1, 10, 3, 0), (2011, 1, 10, 4, 0)), ((2011, 1, 30, 3, 0), (2011, 2, 10, 0, 0)), ((2011, 12, 30, 11, 0), (2012, 1, 10, 0, 0)), ) def test_getNextBuildTime_dayOfMonth_multiple(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfMonth=[10, 20, 30]) return self.do_getNextBuildTime_test(sched, ((2011, 1, 9, 22, 0), (2011, 1, 10, 0, 0)), ((2011, 1, 19, 22, 0), (2011, 1, 20, 0, 0)), ((2011, 1, 29, 22, 0), (2011, 1, 30, 0, 0)), # no Feb 30! ((2011, 2, 29, 22, 0), (2011, 3, 10, 0, 0)), ) def test_getNextBuildTime_dayOfMonth_hours_minutes(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfMonth=15, hour=20, minute=30) return self.do_getNextBuildTime_test(sched, ((2011, 1, 13, 22, 19), (2011, 1, 15, 20, 30)), ((2011, 1, 15, 19, 19), (2011, 1, 15, 20, 30)), ((2011, 1, 15, 20, 29), (2011, 1, 15, 20, 30)), ) def test_getNextBuildTime_dayOfWeek_single(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek=1) # Tuesday (2011-1-1 was a Saturday) return self.do_getNextBuildTime_test(sched, ((2011, 1, 3, 22, 19), (2011, 1, 4, 0, 0)), # still hourly! ((2011, 1, 4, 19, 19), (2011, 1, 4, 20, 0)), ) def test_getNextBuildTime_dayOfWeek_single_as_string(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek="1") # Tuesday (2011-1-1 was a Saturday) return self.do_getNextBuildTime_test(sched, ((2011, 1, 3, 22, 19), (2011, 1, 4, 0, 0)), # still hourly! ((2011, 1, 4, 19, 19), (2011, 1, 4, 20, 0)), ) def test_getNextBuildTime_dayOfWeek_multiple_as_string(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek="tue,3") # Tuesday, Thursday (2011-1-1 was a Saturday) return self.do_getNextBuildTime_test(sched, ((2011, 1, 3, 22, 19), (2011, 1, 4, 0, 0)), # still hourly! ((2011, 1, 4, 19, 19), (2011, 1, 4, 20, 0)), ((2011, 1, 5, 22, 19), (2011, 1, 6, 0, 0)), # still hourly! ((2011, 1, 6, 19, 19), (2011, 1, 6, 20, 0)), ) def test_getNextBuildTime_dayOfWeek_multiple_hours(self): # Tuesday, Thursday (2011-1-1 was a Saturday) sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek=[1, 3], hour=1) return self.do_getNextBuildTime_test(sched, ((2011, 1, 3, 22, 19), (2011, 1, 4, 1, 0)), ((2011, 1, 4, 22, 19), (2011, 1, 6, 1, 0)), ) def test_getNextBuildTime_dayOfWeek_dayOfMonth(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek=[1, 4], dayOfMonth=5, hour=1) return self.do_getNextBuildTime_test(sched, # Tues ((2011, 1, 3, 22, 19), (2011, 1, 4, 1, 0)), # 5th ((2011, 1, 4, 22, 19), (2011, 1, 5, 1, 0)), # Thurs ((2011, 1, 5, 22, 19), (2011, 1, 7, 1, 0)), ) buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_timed_NightlyTriggerable.py000066400000000000000000000303061413250514000310760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from twisted.internet import task from twisted.trial import unittest from buildbot.process import properties from buildbot.schedulers import timed from buildbot.test import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class NightlyTriggerable(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): try: datetime.datetime.fromtimestamp(1) except OSError: skip = ("Python 3.6 bug on Windows: " "https://bugs.python.org/issue29097") SCHEDULERID = 327 OBJECTID = 1327 def makeScheduler(self, firstBuildDuration=0, **kwargs): sched = self.attachScheduler(timed.NightlyTriggerable(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) # add a Clock to help checking timing issues self.clock = sched._reactor = task.Clock() return sched def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() # utilities def assertBuildsetAdded(self, sourcestamps=None, properties=None): if sourcestamps is None: sourcestamps = {} if properties is None: properties = {} properties['scheduler'] = ('test', 'Scheduler') self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=None, # uses the default properties=properties, reason="The NightlyTriggerable scheduler named 'test' " "triggered this build", sourcestamps=sourcestamps, waited_for=False)), ]) self.addBuildsetCalls = [] def assertNoBuildsetAdded(self): self.assertEqual(self.addBuildsetCalls, []) # tests def test_constructor_no_reason(self): sched = self.makeScheduler(name='test', builderNames=['test']) self.assertEqual( sched.reason, "The NightlyTriggerable scheduler named 'test' triggered this build") def test_constructor_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], reason="hourlytriggerable") self.assertEqual(sched.reason, "hourlytriggerable") def test_constructor_month(self): sched = self.makeScheduler( name='test', builderNames=['test'], month='1') self.assertEqual(sched.month, "1") def test_timer_noBuilds(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5]) sched.activate() self.clock.advance(60 * 60) # Run for 1h self.assertEqual(self.addBuildsetCalls, []) def test_timer_oneTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) sched.activate() sched.trigger(False, [ dict(revision='myrev', branch='br', project='p', repository='r', codebase='cb'), ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) def test_timer_twoTriggers(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) sched.activate() sched.trigger(False, [ dict(codebase='cb', revision='myrev1', branch='br', project='p', repository='r') ], set_props=None) sched.trigger(False, [ dict(codebase='cb', revision='myrev2', branch='br', project='p', repository='r') ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', # builds the second trigger's revision revision='myrev2'), ]) def test_timer_oneTrigger_then_noBuild(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) sched.activate() sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r') ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) self.clock.advance(60 * 60) # Run for 1h # no trigger, so the second did not build self.assertNoBuildsetAdded() def test_timer_oneTriggers_then_oneTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) sched.activate() sched.trigger(False, [ dict(codebase='cb', revision='myrev1', branch='br', project='p', repository='r') ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev1'), ]) sched.trigger(False, [ dict(codebase='cb', revision='myrev2', branch='br', project='p', repository='r') ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev2'), ]) def test_savedTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) value_json = \ '[ [ {"codebase": "cb", "project": "p", "repository": "r", ' \ '"branch": "br", "revision": "myrev"} ], {}, null, null ]' self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), fakedb.ObjectState(objectid=self.SCHEDULERID, name='lastTrigger', value_json=value_json), ]) sched.activate() self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) def test_savedTrigger_dict(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) value_json = \ '[ { "cb": {"codebase": "cb", "project": "p", "repository": "r", ' \ '"branch": "br", "revision": "myrev"} }, {}, null, null ]' self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), fakedb.ObjectState(objectid=self.SCHEDULERID, name='lastTrigger', value_json=value_json), ]) sched.activate() self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) def test_saveTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), ]) sched.activate() (idsDeferred, d) = sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], set_props=None) @d.addCallback def cb(_): self.db.state.assertState(self.SCHEDULERID, lastTrigger=[[ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], {}, None, None]) return d def test_saveTrigger_noTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), ]) sched.activate() (idsDeferre, d) = sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], set_props=None) self.clock.advance(60 * 60) # Run for 1h @d.addCallback def cb(_): self.db.state.assertState(self.SCHEDULERID, lastTrigger=None) return d def test_triggerProperties(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), ]) sched.activate() sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], properties.Properties(testprop='test')) self.db.state.assertState(self.SCHEDULERID, lastTrigger=[[ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], {'testprop': ['test', 'TEST']}, None, None]) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded( properties=dict(testprop=('test', 'TEST')), sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) def test_savedProperties(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) value_json = \ '[ [ {"codebase": "cb", "project": "p", "repository": "r", ' \ '"branch": "br", "revision": "myrev"} ], ' \ '{"testprop": ["test", "TEST"]}, null, null ]' self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), fakedb.ObjectState(objectid=self.SCHEDULERID, name='lastTrigger', value_json=value_json), ]) sched.activate() self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded( properties={'testprop': ('test', 'TEST')}, sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_timed_Periodic.py000066400000000000000000000230221413250514000270430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.schedulers import timed from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class TestException(Exception): pass class Periodic(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 23 SCHEDULERID = 3 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def makeScheduler(self, firstBuildDuration=0, firstBuildError=False, exp_branch=None, **kwargs): self.sched = sched = timed.Periodic(**kwargs) sched._reactor = self.reactor self.attachScheduler(self.sched, self.OBJECTID, self.SCHEDULERID) # keep track of builds in self.events self.events = [] def addBuildsetForSourceStampsWithDefaults(reason, sourcestamps, waited_for=False, properties=None, builderNames=None, **kw): self.assertIn('Periodic scheduler named', reason) # TODO: check branch isFirst = (self.events == []) if self.reactor.seconds() == 0 and firstBuildError: raise TestException() self.events.append('B@%d' % self.reactor.seconds()) if isFirst and firstBuildDuration: d = defer.Deferred() self.reactor.callLater(firstBuildDuration, d.callback, None) return d return defer.succeed(None) sched.addBuildsetForSourceStampsWithDefaults = addBuildsetForSourceStampsWithDefaults # handle state locally self.state = {} def getState(k, default): return defer.succeed(self.state.get(k, default)) sched.getState = getState def setState(k, v): self.state[k] = v return defer.succeed(None) sched.setState = setState return sched # tests def test_constructor_invalid(self): with self.assertRaises(config.ConfigErrors): timed.Periodic(name='test', builderNames=['test'], periodicBuildTimer=-2) def test_constructor_no_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], periodicBuildTimer=10) self.assertEqual( sched.reason, "The Periodic scheduler named 'test' triggered this build") def test_constructor_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], periodicBuildTimer=10, reason="periodic") self.assertEqual(sched.reason, "periodic") def test_iterations_simple(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) sched.activate() self.reactor.advance(0) # let it trigger the first build while self.reactor.seconds() < 30: self.reactor.advance(1) self.assertEqual(self.events, ['B@0', 'B@13', 'B@26']) self.assertEqual(self.state.get('last_build'), 26) d = sched.deactivate() return d def test_iterations_simple_branch(self): sched = self.makeScheduler(exp_branch='newfeature', name='test', builderNames=['test'], periodicBuildTimer=13, branch='newfeature') sched.activate() self.reactor.advance(0) # let it trigger the first build while self.reactor.seconds() < 30: self.reactor.advance(1) self.assertEqual(self.events, ['B@0', 'B@13', 'B@26']) self.assertEqual(self.state.get('last_build'), 26) d = sched.deactivate() return d def test_iterations_long(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=10, firstBuildDuration=15) # takes a while to start a build sched.activate() self.reactor.advance(0) # let it trigger the first (longer) build while self.reactor.seconds() < 40: self.reactor.advance(1) self.assertEqual(self.events, ['B@0', 'B@15', 'B@25', 'B@35']) self.assertEqual(self.state.get('last_build'), 35) d = sched.deactivate() return d @defer.inlineCallbacks def test_start_build_error(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=10, firstBuildError=True) # error during first build start yield sched.activate() self.reactor.advance(0) # let it trigger the first (error) build while self.reactor.seconds() < 40: self.reactor.advance(1) self.assertEqual(self.events, ['B@10', 'B@20', 'B@30', 'B@40']) self.assertEqual(self.state.get('last_build'), 40) self.assertEqual(1, len(self.flushLoggedErrors(TestException))) yield sched.deactivate() def test_iterations_stop_while_starting_build(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13, firstBuildDuration=6) # takes a while to start a build sched.activate() self.reactor.advance(0) # let it trigger the first (longer) build self.reactor.advance(3) # get partway into that build d = sched.deactivate() # begin stopping the service d.addCallback( lambda _: self.events.append('STOP@%d' % self.reactor.seconds())) # run the clock out while self.reactor.seconds() < 40: self.reactor.advance(1) # note that the deactivate completes after the first build completes, and no # subsequent builds occur self.assertEqual(self.events, ['B@0', 'STOP@6']) self.assertEqual(self.state.get('last_build'), 0) return d def test_iterations_with_initial_state(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) # so next build should start in 6s self.state['last_build'] = self.reactor.seconds() - 7 sched.activate() self.reactor.advance(0) # let it trigger the first build while self.reactor.seconds() < 30: self.reactor.advance(1) self.assertEqual(self.events, ['B@6', 'B@19']) self.assertEqual(self.state.get('last_build'), 19) d = sched.deactivate() return d @defer.inlineCallbacks def test_getNextBuildTime_None(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) # given None, build right away t = yield sched.getNextBuildTime(None) self.assertEqual(t, 0) @defer.inlineCallbacks def test_getNextBuildTime_given(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) # given a time, add the periodicBuildTimer to it t = yield sched.getNextBuildTime(20) self.assertEqual(t, 33) @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_start_build(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.startBuild() self.assertEqual(r, None) buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_timed_Timed.py000066400000000000000000000035121413250514000263510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import task from twisted.trial import unittest from buildbot.schedulers import timed from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class Timed(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 928754 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() class Subclass(timed.Timed): def getNextBuildTime(self, lastActuation): self.got_lastActuation = lastActuation return defer.succeed((lastActuation or 1000) + 60) def startBuild(self): self.started_build = True return defer.succeed(None) def makeScheduler(self, firstBuildDuration=0, **kwargs): sched = self.attachScheduler(self.Subclass(**kwargs), self.OBJECTID) self.clock = sched._reactor = task.Clock() return sched # tests # note that most of the heavy-lifting for testing this class is handled by # the subclasses' tests, as that's the more natural place for it buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_triggerable.py000066400000000000000000000324111413250514000264140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from twisted.trial import unittest from buildbot.process import properties from buildbot.schedulers import triggerable from buildbot.test import fakedb from buildbot.test.util import interfaces from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class TriggerableInterfaceTest(unittest.TestCase, interfaces.InterfaceTests): def test_interface(self): self.assertInterfacesImplemented(triggerable.Triggerable) class Triggerable(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 33 SCHEDULERID = 13 def setUp(self): self.setUpTestReactor() # Necessary to get an assertable submitted_at time. self.reactor.advance(946684799) self.setUpScheduler() self.subscription = None def tearDown(self): self.tearDownScheduler() def makeScheduler(self, overrideBuildsetMethods=False, **kwargs): self.master.db.insertTestData([fakedb.Builder(id=77, name='b')]) sched = self.attachScheduler( triggerable.Triggerable(name='n', builderNames=['b'], **kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=overrideBuildsetMethods) return sched @defer.inlineCallbacks def assertTriggeredBuildset(self, idsDeferred, waited_for, properties=None, sourcestamps=None): if properties is None: properties = {} bsid, brids = yield idsDeferred properties.update({'scheduler': ('n', 'Scheduler')}) self.assertEqual( self.master.db.buildsets.buildsets[bsid]['properties'], properties, ) buildset = yield self.master.db.buildsets.getBuildset(bsid) from datetime import datetime from buildbot.util import UTC ssids = buildset.pop('sourcestamps') self.assertEqual( buildset, { 'bsid': bsid, 'complete': False, 'complete_at': None, 'external_idstring': None, 'reason': "The Triggerable scheduler named 'n' triggered this build", 'results': -1, 'submitted_at': datetime(1999, 12, 31, 23, 59, 59, tzinfo=UTC), 'parent_buildid': None, 'parent_relationship': None, } ) actual_sourcestamps = yield defer.gatherResults([ self.master.db.sourcestamps.getSourceStamp(ssid) for ssid in ssids ]) self.assertEqual(len(sourcestamps), len(actual_sourcestamps)) for expected_ss, actual_ss in zip(sourcestamps, actual_sourcestamps): actual_ss = actual_ss.copy() # We don't care if the actual sourcestamp has *more* attributes # than expected. for key in list(actual_ss.keys()): if key not in expected_ss: del actual_ss[key] self.assertEqual(expected_ss, actual_ss) for brid in brids.values(): buildrequest = yield self.master.db.buildrequests.getBuildRequest(brid) self.assertEqual( buildrequest, { 'buildrequestid': brid, 'buildername': 'b', 'builderid': 77, 'buildsetid': bsid, 'claimed': False, 'claimed_at': None, 'complete': False, 'complete_at': None, 'claimed_by_masterid': None, 'priority': 0, 'results': -1, 'submitted_at': datetime(1999, 12, 31, 23, 59, 59, tzinfo=UTC), 'waited_for': waited_for } ) def sendCompletionMessage(self, bsid, results=3): self.master.mq.callConsumer(('buildsets', str(bsid), 'complete'), dict( bsid=bsid, submitted_at=100, complete=True, complete_at=200, external_idstring=None, reason='triggering', results=results, sourcestamps=[], parent_buildid=None, parent_relationship=None, )) # tests # NOTE: these tests take advantage of the fact that all of the fake # scheduler operations are synchronous, and thus do not return a Deferred. # The Deferred from trigger() is completely processed before this test # method returns. def test_constructor_no_reason(self): sched = self.makeScheduler() self.assertEqual( sched.reason, None) # default reason is dynamic def test_constructor_explicit_reason(self): sched = self.makeScheduler(reason="Because I said so") self.assertEqual(sched.reason, "Because I said so") def test_trigger(self): sched = self.makeScheduler(codebases={'cb': {'repository': 'r'}}) # no subscription should be in place yet self.assertEqual(sched.master.mq.qrefs, []) # trigger the scheduler, exercising properties while we're at it waited_for = True set_props = properties.Properties() set_props.setProperty('pr', 'op', 'test') ss = {'revision': 'myrev', 'branch': 'br', 'project': 'p', 'repository': 'r', 'codebase': 'cb'} idsDeferred, d = sched.trigger( waited_for, sourcestamps=[ss], set_props=set_props) self.reactor.advance(0) # let the debounced function fire self.assertTriggeredBuildset( idsDeferred, waited_for, properties={'pr': ('op', 'test')}, sourcestamps=[ dict(branch='br', project='p', repository='r', codebase='cb', revision='myrev'), ]) # set up a boolean so that we can know when the deferred fires self.fired = False @d.addCallback def fired(xxx_todo_changeme): (result, brids) = xxx_todo_changeme self.assertEqual(result, 3) # from sendCompletionMessage self.assertEqual(brids, {77: 1000}) self.fired = True d.addErrback(log.err) # check that the scheduler has subscribed to buildset changes, but # not fired yet self.assertEqual( [q.filter for q in sched.master.mq.qrefs], [('buildsets', None, 'complete',)]) self.assertFalse(self.fired) # pretend a non-matching buildset is complete self.sendCompletionMessage(27) # scheduler should not have reacted self.assertEqual( [q.filter for q in sched.master.mq.qrefs], [('buildsets', None, 'complete',)]) self.assertFalse(self.fired) # pretend the matching buildset is complete self.sendCompletionMessage(200) self.reactor.advance(0) # let the debounced function fire # scheduler should have reacted self.assertEqual( [q.filter for q in sched.master.mq.qrefs], []) self.assertTrue(self.fired) return d def test_trigger_overlapping(self): sched = self.makeScheduler(codebases={'cb': {'repository': 'r'}}) # no subscription should be in place yet self.assertEqual(sched.master.mq.qrefs, []) waited_for = False def makeSS(rev): return {'revision': rev, 'branch': 'br', 'project': 'p', 'repository': 'r', 'codebase': 'cb'} # trigger the scheduler the first time idsDeferred, d = sched.trigger( waited_for, [makeSS('myrev1')]) # triggers bsid 200 self.assertTriggeredBuildset( idsDeferred, waited_for, sourcestamps=[ dict(branch='br', project='p', repository='r', codebase='cb', revision='myrev1'), ]) d.addCallback(lambda res_brids: self.assertEqual(res_brids[0], 11) and self.assertEqual(res_brids[1], {77: 1000})) waited_for = True # and the second time idsDeferred, d = sched.trigger( waited_for, [makeSS('myrev2')]) # triggers bsid 201 self.reactor.advance(0) # let the debounced function fire self.assertTriggeredBuildset( idsDeferred, waited_for, sourcestamps=[ dict(branch='br', project='p', repository='r', codebase='cb', revision='myrev2'), ]) d.addCallback(lambda res_brids1: self.assertEqual(res_brids1[0], 22) and self.assertEqual(res_brids1[1], {77: 1001})) # check that the scheduler has subscribed to buildset changes self.assertEqual( [q.filter for q in sched.master.mq.qrefs], [('buildsets', None, 'complete',)]) # let a few buildsets complete self.sendCompletionMessage(29, results=3) self.sendCompletionMessage(201, results=22) self.sendCompletionMessage(9, results=3) self.sendCompletionMessage(200, results=11) self.reactor.advance(0) # let the debounced function fire # both should have triggered with appropriate results, and the # subscription should be cancelled self.assertEqual(sched.master.mq.qrefs, []) @defer.inlineCallbacks def test_trigger_with_sourcestamp(self): # Test triggering a scheduler with a sourcestamp, and see that # sourcestamp handed to addBuildsetForSourceStampsWithDefaults. sched = self.makeScheduler(overrideBuildsetMethods=True) waited_for = False ss = {'repository': 'r3', 'codebase': 'cb3', 'revision': 'fixrev3', 'branch': 'default', 'project': 'p'} idsDeferred = sched.trigger(waited_for, sourcestamps=[ss])[0] yield idsDeferred self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'properties': {'scheduler': ('n', 'Scheduler')}, 'reason': "The Triggerable scheduler named 'n' triggered " "this build", 'sourcestamps': [{ 'branch': 'default', 'codebase': 'cb3', 'project': 'p', 'repository': 'r3', 'revision': 'fixrev3'}, ], 'waited_for': False}), ]) @defer.inlineCallbacks def test_trigger_without_sourcestamps(self): # Test triggering *without* sourcestamps, and see that nothing is passed # to addBuildsetForSourceStampsWithDefaults waited_for = True sched = self.makeScheduler(overrideBuildsetMethods=True) idsDeferred = sched.trigger(waited_for, sourcestamps=[])[0] yield idsDeferred self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'properties': {'scheduler': ('n', 'Scheduler')}, 'reason': "The Triggerable scheduler named 'n' triggered " "this build", 'sourcestamps': [], 'waited_for': True}), ]) @defer.inlineCallbacks def test_trigger_with_reason(self): # Test triggering with a reason, and make sure the buildset's reason is updated accordingly # (and not the default) waited_for = True sched = self.makeScheduler(overrideBuildsetMethods=True) set_props = properties.Properties() set_props.setProperty('reason', 'test1', 'test') idsDeferred, d = sched.trigger( waited_for, sourcestamps=[], set_props=set_props) yield idsDeferred self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'properties': {'scheduler': ('n', 'Scheduler'), 'reason': ('test1', 'test')}, 'reason': "test1", 'sourcestamps': [], 'waited_for': True}), ]) @defer.inlineCallbacks def test_startService_stopService(self): sched = self.makeScheduler() yield sched.startService() yield sched.stopService() buildbot-3.4.0/master/buildbot/test/unit/schedulers/test_trysched.py000066400000000000000000001012261413250514000257530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import shutil import sys from io import StringIO import mock import twisted from twisted.internet import defer from twisted.protocols import basic from twisted.trial import unittest from buildbot.schedulers import trysched from buildbot.test.util import dirs from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class TryBase(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 26 SCHEDULERID = 6 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, **kwargs): return self.attachScheduler(trysched.Try_Userpass(**kwargs), self.OBJECTID, self.SCHEDULERID) def test_filterBuilderList_ok(self): sched = trysched.TryBase( name='tsched', builderNames=['a', 'b', 'c'], properties={}) self.assertEqual(sched.filterBuilderList(['b', 'c']), ['b', 'c']) def test_filterBuilderList_bad(self): sched = trysched.TryBase( name='tsched', builderNames=['a', 'b'], properties={}) self.assertEqual(sched.filterBuilderList(['b', 'c']), []) def test_filterBuilderList_empty(self): sched = trysched.TryBase( name='tsched', builderNames=['a', 'b'], properties={}) self.assertEqual(sched.filterBuilderList([]), ['a', 'b']) @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) class JobdirService(dirs.DirsMixin, unittest.TestCase): def setUp(self): self.jobdir = 'jobdir' self.newdir = os.path.join(self.jobdir, 'new') self.curdir = os.path.join(self.jobdir, 'cur') self.tmpdir = os.path.join(self.jobdir, 'tmp') self.setUpDirs(self.jobdir, self.newdir, self.curdir, self.tmpdir) def tearDown(self): self.tearDownDirs() def test_messageReceived(self): # stub out svc.scheduler.handleJobFile and .jobdir scheduler = mock.Mock() def handleJobFile(filename, f): self.assertEqual(filename, 'jobdata') self.assertEqual(f.read(), 'JOBDATA') scheduler.handleJobFile = handleJobFile scheduler.jobdir = self.jobdir svc = trysched.JobdirService(scheduler=scheduler, basedir=self.jobdir) # create some new data to process jobdata = os.path.join(self.newdir, 'jobdata') with open(jobdata, "w") as f: f.write('JOBDATA') # run it svc.messageReceived('jobdata') class Try_Jobdir(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 23 SCHEDULERID = 3 def setUp(self): self.setUpTestReactor() self.setUpScheduler() self.jobdir = None def tearDown(self): self.tearDownScheduler() if self.jobdir: shutil.rmtree(self.jobdir) # tests def setup_test_startService(self, jobdir, exp_jobdir): # set up jobdir self.jobdir = os.path.abspath('jobdir') if os.path.exists(self.jobdir): shutil.rmtree(self.jobdir) os.mkdir(self.jobdir) # build scheduler kwargs = dict(name="tsched", builderNames=['a'], jobdir=self.jobdir) sched = self.attachScheduler( trysched.Try_Jobdir(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True) # watch interaction with the watcher service sched.watcher.startService = mock.Mock() sched.watcher.stopService = mock.Mock() @defer.inlineCallbacks def do_test_startService(self): # start it yield self.sched.startService() # check that it has set the basedir correctly self.assertEqual(self.sched.watcher.basedir, self.jobdir) self.assertEqual(1, self.sched.watcher.startService.call_count) self.assertEqual(0, self.sched.watcher.stopService.call_count) yield self.sched.stopService() self.assertEqual(1, self.sched.watcher.startService.call_count) self.assertEqual(1, self.sched.watcher.stopService.call_count) def test_startService_reldir(self): self.setup_test_startService( 'jobdir', os.path.abspath('basedir/jobdir')) return self.do_test_startService() def test_startService_reldir_subdir(self): self.setup_test_startService( 'jobdir', os.path.abspath('basedir/jobdir/cur')) return self.do_test_startService() def test_startService_absdir(self): self.setup_test_startService( os.path.abspath('jobdir'), os.path.abspath('jobdir')) return self.do_test_startService() @defer.inlineCallbacks def do_test_startService_but_not_active(self, jobdir, exp_jobdir): """Same as do_test_startService, but the master wont activate this service""" self.setup_test_startService( 'jobdir', os.path.abspath('basedir/jobdir')) self.setSchedulerToMaster(self.OTHER_MASTER_ID) # start it self.sched.startService() # check that it has set the basedir correctly, even if it doesn't start self.assertEqual(self.sched.watcher.basedir, self.jobdir) yield self.sched.stopService() self.assertEqual(0, self.sched.watcher.startService.call_count) self.assertEqual(0, self.sched.watcher.stopService.call_count) # parseJob def test_parseJob_empty(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['a'], jobdir='foo') with self.assertRaises(trysched.BadJobfile): sched.parseJob(StringIO('')) def test_parseJob_longer_than_netstring_MAXLENGTH(self): self.patch(basic.NetstringReceiver, 'MAX_LENGTH', 100) sched = trysched.Try_Jobdir( name='tsched', builderNames=['a'], jobdir='foo') jobstr = self.makeNetstring( '1', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'buildera', 'builderc' ) jobstr += 'x' * 200 test_temp_file = StringIO(jobstr) with self.assertRaises(trysched.BadJobfile): sched.parseJob(test_temp_file) def test_parseJob_invalid(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['a'], jobdir='foo') with self.assertRaises(trysched.BadJobfile): sched.parseJob(StringIO('this is not a netstring')) def test_parseJob_invalid_version(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['a'], jobdir='foo') with self.assertRaises(trysched.BadJobfile): sched.parseJob(StringIO('1:9,')) def makeNetstring(self, *strings): return ''.join(['{}:{},'.format(len(s), s) for s in strings]) def test_parseJob_v1(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '1', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'buildera', 'builderc' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': b'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': '', 'who': '', 'comment': '', 'repository': '', 'properties': {}, }) def test_parseJob_v1_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '1', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'buildera', 'builderc' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v1_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '1', 'extid', '', '', '1', 'this is my diff, -- ++, etc.' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v1_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '1', 'extid', '', '', '1', 'this is my diff, -- ++, etc.' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v2(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '2', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'buildera', 'builderc' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': b'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': 'proj', 'who': '', 'comment': '', 'repository': 'repo', 'properties': {}, }) def test_parseJob_v2_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '2', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'buildera', 'builderc' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v2_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '2', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v2_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '2', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v3(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '3', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'buildera', 'builderc' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': b'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': 'proj', 'who': 'who', 'comment': '', 'repository': 'repo', 'properties': {}, }) def test_parseJob_v3_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '3', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'buildera', 'builderc' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v3_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '3', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v3_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '3', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v4(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '4', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment', 'buildera', 'builderc' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': b'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': 'proj', 'who': 'who', 'comment': 'comment', 'repository': 'repo', 'properties': {}, }) def test_parseJob_v4_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '4', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment', 'buildera', 'builderc' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v4_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '4', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v4_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '4', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v5(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': 'extid', 'branch': 'trunk', 'baserev': '1234', 'patch_level': 1, 'patch_body': 'this is my diff, -- ++, etc.', 'repository': 'repo', 'project': 'proj', 'who': 'who', 'comment': 'comment', 'builderNames': ['buildera', 'builderc'], 'properties': {'foo': 'bar'}, })) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': b'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': 'proj', 'who': 'who', 'comment': 'comment', 'repository': 'repo', 'properties': {'foo': 'bar'}, }) def test_parseJob_v5_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '4', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment', 'buildera', 'builderc' ) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v5_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': 'extid', 'branch': 'trunk', 'baserev': '1234', 'patch_level': '1', 'patch_body': 'this is my diff, -- ++, etc.', 'repository': 'repo', 'project': 'proj', 'who': 'who', 'comment': 'comment', 'builderNames': [], 'properties': {'foo': 'bar'}, })) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v5_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': 'extid', 'branch': 'trunk', 'baserev': '1234', 'patch_level': '1', 'patch_body': 'this is my diff, -- ++, etc.', 'repository': 'repo', 'project': 'proj', 'who': 'who', 'comment': 'comment', 'builderNames': ['buildera', 'builderb'], 'properties': {}, })) parsedjob = sched.parseJob(StringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v5_invalid_json(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring('5', '{"comment": "com}') with self.assertRaises(trysched.BadJobfile): sched.parseJob(StringIO(jobstr)) # handleJobFile def call_handleJobFile(self, parseJob): sched = self.attachScheduler( trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo'), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) fakefile = mock.Mock() def parseJob_(f): assert f is fakefile return parseJob(f) sched.parseJob = parseJob_ return defer.maybeDeferred(sched.handleJobFile, 'fakefile', fakefile) def makeSampleParsedJob(self, **overrides): pj = dict(baserev='1234', branch='trunk', builderNames=['buildera', 'builderb'], jobid='extid', patch_body=b'this is my diff, -- ++, etc.', patch_level=1, project='proj', repository='repo', who='who', comment='comment', properties={}) pj.update(overrides) return pj @defer.inlineCallbacks def test_handleJobFile(self): yield self.call_handleJobFile(lambda f: self.makeSampleParsedJob()) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['buildera', 'builderb'], external_idstring='extid', properties={}, reason="'try' job by user who", sourcestamps=[ dict( branch='trunk', codebase='', patch_author='who', patch_body=b'this is my diff, -- ++, etc.', patch_comment='comment', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='1234'), ])), ]) @defer.inlineCallbacks def test_handleJobFile_exception(self): def parseJob(f): raise trysched.BadJobfile yield self.call_handleJobFile(parseJob) self.assertEqual(self.addBuildsetCalls, []) self.assertEqual( 1, len(self.flushLoggedErrors(trysched.BadJobfile))) if twisted.version.major <= 9 and sys.version_info[:2] >= (2, 7): test_handleJobFile_exception.skip = ( "flushLoggedErrors does not work correctly on 9.0.0 " "and earlier with Python-2.7") @defer.inlineCallbacks def test_handleJobFile_bad_builders(self): yield self.call_handleJobFile( lambda f: self.makeSampleParsedJob(builderNames=['xxx'])) self.assertEqual(self.addBuildsetCalls, []) @defer.inlineCallbacks def test_handleJobFile_subset_builders(self): yield self.call_handleJobFile( lambda f: self.makeSampleParsedJob(builderNames=['buildera'])) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['buildera'], external_idstring='extid', properties={}, reason="'try' job by user who", sourcestamps=[ dict( branch='trunk', codebase='', patch_author='who', patch_body=b'this is my diff, -- ++, etc.', patch_comment='comment', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='1234'), ])), ]) @defer.inlineCallbacks def test_handleJobFile_with_try_properties(self): yield self.call_handleJobFile( lambda f: self.makeSampleParsedJob(properties={'foo': 'bar'})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['buildera', 'builderb'], external_idstring='extid', properties={'foo': ('bar', 'try build')}, reason="'try' job by user who", sourcestamps=[ dict( branch='trunk', codebase='', patch_author='who', patch_body=b'this is my diff, -- ++, etc.', patch_comment='comment', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='1234'), ])), ]) def test_handleJobFile_with_invalid_try_properties(self): d = self.call_handleJobFile( lambda f: self.makeSampleParsedJob(properties=['foo', 'bar'])) return self.assertFailure(d, AttributeError) class Try_Userpass_Perspective(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 26 SCHEDULERID = 6 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, **kwargs): return self.attachScheduler(trysched.Try_Userpass(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) @defer.inlineCallbacks def call_perspective_try(self, *args, **kwargs): sched = self.makeScheduler(name='tsched', builderNames=['a', 'b'], port='xxx', userpass=[('a', 'b')], properties=dict(frm='schd')) persp = trysched.Try_Userpass_Perspective(sched, 'a') # patch out all of the handling after addBuildsetForSourceStamp def getBuildset(bsid): return dict(bsid=bsid) self.db.buildsets.getBuildset = getBuildset rbss = yield persp.perspective_try(*args, **kwargs) if rbss is None: return self.assertIsInstance(rbss, trysched.RemoteBuildSetStatus) @defer.inlineCallbacks def test_perspective_try(self): yield self.call_perspective_try( 'default', 'abcdef', (1, '-- ++'), 'repo', 'proj', ['a'], properties={'pr': 'op'}) self.maxDiff = None self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['a'], external_idstring=None, properties={'pr': ('op', 'try build')}, reason="'try' job", sourcestamps=[ dict( branch='default', codebase='', patch_author='', patch_body=b'-- ++', patch_comment='', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='abcdef'), ])), ]) @defer.inlineCallbacks def test_perspective_try_bytes(self): yield self.call_perspective_try( 'default', 'abcdef', (1, b'-- ++\xf8'), 'repo', 'proj', ['a'], properties={'pr': 'op'}) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', { 'builderNames': ['a'], 'external_idstring': None, 'properties': {'pr': ('op', 'try build')}, 'reason': "'try' job", 'sourcestamps': [ { 'branch': 'default', 'codebase': '', 'patch_author': '', 'patch_body': b'-- ++\xf8', 'patch_comment': '', 'patch_level': 1, 'patch_subdir': '', 'project': 'proj', 'repository': 'repo', 'revision': 'abcdef', } ] }), ]) @defer.inlineCallbacks def test_perspective_try_who(self): yield self.call_perspective_try( 'default', 'abcdef', (1, '-- ++'), 'repo', 'proj', ['a'], who='who', comment='comment', properties={'pr': 'op'}) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['a'], external_idstring=None, properties={'pr': ('op', 'try build')}, reason="'try' job by user who (comment)", sourcestamps=[ dict( branch='default', codebase='', patch_author='who', patch_body=b'-- ++', patch_comment='comment', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='abcdef'), ])), ]) @defer.inlineCallbacks def test_perspective_try_bad_builders(self): yield self.call_perspective_try( 'default', 'abcdef', (1, '-- ++'), 'repo', 'proj', ['xxx'], properties={'pr': 'op'}) self.assertEqual(self.addBuildsetCalls, []) @defer.inlineCallbacks def test_getAvailableBuilderNames(self): sched = self.makeScheduler(name='tsched', builderNames=['a', 'b'], port='xxx', userpass=[('a', 'b')]) persp = trysched.Try_Userpass_Perspective(sched, 'a') buildernames = yield persp.perspective_getAvailableBuilderNames() self.assertEqual(buildernames, ['a', 'b']) class Try_Userpass(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 25 SCHEDULERID = 5 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, **kwargs): sched = self.attachScheduler(trysched.Try_Userpass(**kwargs), self.OBJECTID, self.SCHEDULERID) return sched @defer.inlineCallbacks def test_service(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) # patch out the pbmanager's 'register' command both to be sure # the registration is correct and to get a copy of the factory registration = mock.Mock() registration.unregister = lambda: defer.succeed(None) sched.master.pbmanager = mock.Mock() def register(portstr, user, passwd, factory): self.assertEqual([portstr, user, passwd], ['tcp:9999', 'fred', 'derf']) self.got_factory = factory return defer.succeed(registration) sched.master.pbmanager.register = register # start it yield sched.startService() # make a fake connection by invoking the factory, and check that we # get the correct perspective persp = self.got_factory(mock.Mock(), 'fred') self.assertTrue(isinstance(persp, trysched.Try_Userpass_Perspective)) yield sched.stopService() @defer.inlineCallbacks def test_service_but_not_active(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) self.setSchedulerToMaster(self.OTHER_MASTER_ID) sched.master.pbmanager = mock.Mock() sched.startService() yield sched.stopService() self.assertFalse(sched.master.pbmanager.register.called) buildbot-3.4.0/master/buildbot/test/unit/scripts/000077500000000000000000000000001413250514000220415ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/scripts/__init__.py000066400000000000000000000000001413250514000241400ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/scripts/test_base.py000066400000000000000000000354531413250514000243760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import errno import os import string import textwrap from io import StringIO from twisted.python import runtime from twisted.python import usage from twisted.trial import unittest from buildbot import config as config_module from buildbot.scripts import base from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util.decorators import skipUnlessPlatformIs class TestIBD(dirs.DirsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('test') self.stdout = StringIO() self.setUpStdoutAssertions() def test_isBuildmasterDir_no_dir(self): self.assertFalse(base.isBuildmasterDir(os.path.abspath('test/nosuch'))) self.assertInStdout('error reading') self.assertInStdout('invalid buildmaster directory') def test_isBuildmasterDir_no_file(self): self.assertFalse(base.isBuildmasterDir(os.path.abspath('test'))) self.assertInStdout('error reading') self.assertInStdout('invalid buildmaster directory') def test_isBuildmasterDir_no_Application(self): # Loading of pre-0.9.0 buildbot.tac file should fail. with open(os.path.join('test', 'buildbot.tac'), 'w') as f: f.write("foo\nx = Application('buildslave')\nbar") self.assertFalse(base.isBuildmasterDir(os.path.abspath('test'))) self.assertInStdout('unexpected content') self.assertInStdout('invalid buildmaster directory') def test_isBuildmasterDir_matches(self): with open(os.path.join('test', 'buildbot.tac'), 'w') as f: f.write("foo\nx = Application('buildmaster')\nbar") self.assertTrue(base.isBuildmasterDir(os.path.abspath('test'))) self.assertWasQuiet() class TestTacFallback(dirs.DirsMixin, unittest.TestCase): """ Tests for L{base.getConfigFileFromTac}. """ def setUp(self): """ Create a base directory. """ self.basedir = os.path.abspath('basedir') return self.setUpDirs('basedir') def _createBuildbotTac(self, contents=None): """ Create a C{buildbot.tac} that points to a given C{configfile} and create that file. @param configfile: Config file to point at and create. @type configfile: L{str} """ if contents is None: contents = '#dummy' tacfile = os.path.join(self.basedir, "buildbot.tac") with open(tacfile, "wt") as f: f.write(contents) return tacfile def test_getConfigFileFromTac(self): """ When L{getConfigFileFromTac} is passed a C{basedir} containing a C{buildbot.tac}, it reads the location of the config file from there. """ self._createBuildbotTac("configfile='other.cfg'") foundConfigFile = base.getConfigFileFromTac( basedir=self.basedir) self.assertEqual(foundConfigFile, "other.cfg") def test_getConfigFileFromTac_fallback(self): """ When L{getConfigFileFromTac} is passed a C{basedir} which doesn't contain a C{buildbot.tac}, it returns C{master.cfg} """ foundConfigFile = base.getConfigFileFromTac( basedir=self.basedir) self.assertEqual(foundConfigFile, 'master.cfg') def test_getConfigFileFromTac_tacWithoutConfigFile(self): """ When L{getConfigFileFromTac} is passed a C{basedir} containing a C{buildbot.tac}, but C{buildbot.tac} doesn't define C{configfile}, L{getConfigFileFromTac} returns C{master.cfg} """ self._createBuildbotTac() foundConfigFile = base.getConfigFileFromTac( basedir=self.basedir) self.assertEqual(foundConfigFile, 'master.cfg') def test_getConfigFileFromTac_usingFile(self): """ When L{getConfigFileFromTac} is passed a C{basedir} containing a C{buildbot.tac} which references C{__file__}, that reference points to C{buildbot.tac}. """ self._createBuildbotTac(textwrap.dedent(""" from twisted.python.util import sibpath configfile = sibpath(__file__, "relative.cfg") """)) foundConfigFile = base.getConfigFileFromTac(basedir=self.basedir) self.assertEqual( foundConfigFile, os.path.join(self.basedir, "relative.cfg")) class TestSubcommandOptions(unittest.TestCase): def fakeOptionsFile(self, **kwargs): self.patch(base.SubcommandOptions, 'loadOptionsFile', lambda self: kwargs.copy()) def parse(self, cls, *args): self.opts = cls() self.opts.parseOptions(args) return self.opts class Bare(base.SubcommandOptions): optFlags = [['foo', 'f', 'Foo!']] def test_bare_subclass(self): self.fakeOptionsFile() opts = self.parse(self.Bare, '-f') self.assertTrue(opts['foo']) class ParamsAndOptions(base.SubcommandOptions): optParameters = [['volume', 'v', '5', 'How Loud?']] buildbotOptions = [['volcfg', 'volume']] def test_buildbotOptions(self): self.fakeOptionsFile() opts = self.parse(self.ParamsAndOptions) self.assertEqual(opts['volume'], '5') def test_buildbotOptions_options(self): self.fakeOptionsFile(volcfg='3') opts = self.parse(self.ParamsAndOptions) self.assertEqual(opts['volume'], '3') def test_buildbotOptions_override(self): self.fakeOptionsFile(volcfg='3') opts = self.parse(self.ParamsAndOptions, '--volume', '7') self.assertEqual(opts['volume'], '7') class RequiredOptions(base.SubcommandOptions): optParameters = [['volume', 'v', None, 'How Loud?']] requiredOptions = ['volume'] def test_requiredOptions(self): self.fakeOptionsFile() with self.assertRaises(usage.UsageError): self.parse(self.RequiredOptions) class TestLoadOptionsFile(dirs.DirsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('test', 'home') self.opts = base.SubcommandOptions() self.dir = os.path.abspath('test') self.home = os.path.abspath('home') self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() def do_loadOptionsFile(self, _here, exp): # only patch these os.path functions briefly, to # avoid breaking other parts of the test system patches = [] if runtime.platformType == 'win32': from win32com.shell import shell patches.append(self.patch(shell, 'SHGetFolderPath', lambda *args: self.home)) else: def expanduser(p): return p.replace('~/', self.home + '/') patches.append(self.patch(os.path, 'expanduser', expanduser)) old_dirname = os.path.dirname def dirname(p): # bottom out at self.dir, rather than / if p == self.dir: return p return old_dirname(p) patches.append(self.patch(os.path, 'dirname', dirname)) try: self.assertEqual(self.opts.loadOptionsFile(_here=_here), exp) finally: for p in patches: p.restore() def writeOptionsFile(self, dir, content, bbdir='.buildbot'): os.makedirs(os.path.join(dir, bbdir)) with open(os.path.join(dir, bbdir, 'options'), 'w') as f: f.write(content) def test_loadOptionsFile_subdirs_not_found(self): subdir = os.path.join(self.dir, 'a', 'b') os.makedirs(subdir) self.do_loadOptionsFile(_here=subdir, exp={}) def test_loadOptionsFile_subdirs_at_root(self): subdir = os.path.join(self.dir, 'a', 'b') os.makedirs(subdir) self.writeOptionsFile(self.dir, 'abc="def"') self.writeOptionsFile(self.home, 'abc=123') # not seen self.do_loadOptionsFile(_here=subdir, exp={'abc': 'def'}) def test_loadOptionsFile_subdirs_at_tip(self): subdir = os.path.join(self.dir, 'a', 'b') os.makedirs(subdir) self.writeOptionsFile(os.path.join(self.dir, 'a', 'b'), 'abc="def"') self.writeOptionsFile(self.dir, 'abc=123') # not seen self.do_loadOptionsFile(_here=subdir, exp={'abc': 'def'}) def test_loadOptionsFile_subdirs_at_homedir(self): subdir = os.path.join(self.dir, 'a', 'b') os.makedirs(subdir) # on windows, the subdir of the home (well, appdata) dir # is 'buildbot', not '.buildbot' self.writeOptionsFile(self.home, 'abc=123', 'buildbot' if runtime.platformType == 'win32' else '.buildbot') self.do_loadOptionsFile(_here=subdir, exp={'abc': 123}) def test_loadOptionsFile_syntax_error(self): self.writeOptionsFile(self.dir, 'abc=abc') with self.assertRaises(NameError): self.do_loadOptionsFile(_here=self.dir, exp={}) self.assertInStdout('error while reading') def test_loadOptionsFile_toomany(self): subdir = os.path.join(self.dir, *tuple(string.ascii_lowercase)) os.makedirs(subdir) self.do_loadOptionsFile(_here=subdir, exp={}) self.assertInStdout('infinite glories') # NOTE: testing the ownership check requires patching os.stat, which causes # other problems since it is so heavily used. def mkconfig(**kwargs): config = dict(quiet=False, replace=False, basedir='test') config.update(kwargs) return config class TestLoadConfig(dirs.DirsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('test') self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() def activeBasedir(self, extra_lines=()): with open(os.path.join('test', 'buildbot.tac'), 'wt') as f: f.write("from twisted.application import service\n") f.write("service.Application('buildmaster')\n") f.write("\n".join(extra_lines)) def test_checkBasedir(self): self.activeBasedir() rv = base.checkBasedir(mkconfig()) self.assertTrue(rv) self.assertInStdout('checking basedir') def test_checkBasedir_quiet(self): self.activeBasedir() rv = base.checkBasedir(mkconfig(quiet=True)) self.assertTrue(rv) self.assertWasQuiet() def test_checkBasedir_no_dir(self): rv = base.checkBasedir(mkconfig(basedir='doesntexist')) self.assertFalse(rv) self.assertInStdout('invalid buildmaster directory') @skipUnlessPlatformIs('posix') def test_checkBasedir_active_pidfile(self): """ active PID file is giving error. """ self.activeBasedir() # write our own pid in the file with open(os.path.join('test', 'twistd.pid'), 'w') as f: f.write(str(os.getpid())) rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('still running') @skipUnlessPlatformIs('posix') def test_checkBasedir_bad_pidfile(self): """ corrupted PID file is giving error. """ self.activeBasedir() with open(os.path.join('test', 'twistd.pid'), 'w') as f: f.write("xxx") rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('twistd.pid contains non-numeric value') @skipUnlessPlatformIs('posix') def test_checkBasedir_stale_pidfile(self): """ Stale PID file is removed without causing a system exit. """ self.activeBasedir() pidfile = os.path.join('test', 'twistd.pid') with open(pidfile, 'w') as f: f.write(str(os.getpid() + 1)) def kill(pid, sig): raise OSError(errno.ESRCH, "fake") self.patch(os, "kill", kill) rv = base.checkBasedir(mkconfig()) self.assertTrue(rv) self.assertInStdout('Removing stale pidfile test') self.assertFalse(os.path.exists(pidfile)) @skipUnlessPlatformIs('posix') def test_checkBasedir_pidfile_kill_error(self): """ if ping-killing the PID file does not work, we should error out. """ self.activeBasedir() # write our own pid in the file pidfile = os.path.join('test', 'twistd.pid') with open(pidfile, 'w') as f: f.write(str(os.getpid() + 1)) def kill(pid, sig): raise OSError(errno.EPERM, "fake") self.patch(os, "kill", kill) rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('Can\'t check status of PID') self.assertTrue(os.path.exists(pidfile)) def test_checkBasedir_invalid_rotateLength(self): self.activeBasedir(extra_lines=['rotateLength="32"']) rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('ERROR') self.assertInStdout('rotateLength') def test_checkBasedir_invalid_maxRotatedFiles(self): self.activeBasedir(extra_lines=['maxRotatedFiles="64"']) rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('ERROR') self.assertInStdout('maxRotatedFiles') def test_loadConfig(self): @classmethod def loadConfig(cls): return config_module.MasterConfig() self.patch(config_module.FileLoader, 'loadConfig', loadConfig) cfg = base.loadConfig(mkconfig()) self.assertIsInstance(cfg, config_module.MasterConfig) self.assertInStdout('checking') def test_loadConfig_ConfigErrors(self): @classmethod def loadConfig(cls): raise config_module.ConfigErrors(['oh noes']) self.patch(config_module.FileLoader, 'loadConfig', loadConfig) cfg = base.loadConfig(mkconfig()) self.assertIdentical(cfg, None) self.assertInStdout('oh noes') def test_loadConfig_exception(self): @classmethod def loadConfig(cls): raise RuntimeError() self.patch(config_module.FileLoader, 'loadConfig', loadConfig) cfg = base.loadConfig(mkconfig()) self.assertIdentical(cfg, None) self.assertInStdout('RuntimeError') buildbot-3.4.0/master/buildbot/test/unit/scripts/test_checkconfig.py000066400000000000000000000166501413250514000257250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import sys import textwrap from io import StringIO import mock from twisted.trial import unittest from buildbot.scripts import base from buildbot.scripts import checkconfig from buildbot.test.util import dirs class TestConfigLoader(dirs.DirsMixin, unittest.TestCase): def setUp(self): # config dir must be unique so that the python runtime does not optimize its list of module self.configdir = self.mktemp() return self.setUpDirs(self.configdir) def tearDown(self): return self.tearDownDirs() # tests def do_test_load(self, config='', other_files=None, stdout_re=None, stderr_re=None): if other_files is None: other_files = {} configFile = os.path.join(self.configdir, 'master.cfg') with open(configFile, "w") as f: f.write(config) for filename, contents in other_files.items(): if isinstance(filename, type(())): fn = os.path.join(self.configdir, *filename) dn = os.path.dirname(fn) if not os.path.isdir(dn): os.makedirs(dn) else: fn = os.path.join(self.configdir, filename) with open(fn, "w") as f: f.write(contents) old_stdout, old_stderr = sys.stdout, sys.stderr stdout = sys.stdout = StringIO() stderr = sys.stderr = StringIO() try: checkconfig._loadConfig( basedir=self.configdir, configFile="master.cfg", quiet=False) finally: sys.stdout, sys.stderr = old_stdout, old_stderr if stdout_re: stdout = stdout.getvalue() self.assertTrue(stdout_re.search(stdout), stdout) if stderr_re: stderr = stderr.getvalue() self.assertTrue(stderr_re.search(stderr), stderr) def test_success(self): len_sys_path = len(sys.path) config = textwrap.dedent("""\ c = BuildmasterConfig = {} c['multiMaster'] = True c['schedulers'] = [] from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory c['builders'] = [ BuilderConfig('testbuilder', factory=BuildFactory(), workername='worker'), ] from buildbot.worker import Worker c['workers'] = [ Worker('worker', 'pass'), ] c['protocols'] = {'pb': {'port': 9989}} """) self.do_test_load(config=config, stdout_re=re.compile('Config file is good!')) # (regression) check that sys.path hasn't changed self.assertEqual(len(sys.path), len_sys_path) def test_failure_ImportError(self): config = textwrap.dedent("""\ import test_scripts_checkconfig_does_not_exist """) # Python 3 displays this error: # No module named 'test_scripts_checkconfig_does_not_exist' # # Python 2 displays this error: # No module named test_scripts_checkconfig_does_not_exist # # We need a regexp that matches both. self.do_test_load(config=config, stderr_re=re.compile( "No module named '?test_scripts_checkconfig_does_not_exist'?")) self.flushLoggedErrors() def test_failure_no_workers(self): config = textwrap.dedent("""\ BuildmasterConfig={} """) self.do_test_load(config=config, stderr_re=re.compile('no workers')) self.flushLoggedErrors() def test_success_imports(self): config = textwrap.dedent("""\ from othermodule import port c = BuildmasterConfig = {} c['schedulers'] = [] c['builders'] = [] c['workers'] = [] c['protocols'] = {'pb': {'port': port}} """) other_files = {'othermodule.py': 'port = 9989'} self.do_test_load(config=config, other_files=other_files) def test_success_import_package(self): config = textwrap.dedent("""\ from otherpackage.othermodule import port c = BuildmasterConfig = {} c['schedulers'] = [] c['builders'] = [] c['workers'] = [] c['protocols'] = {'pb': {'port': 9989}} """) other_files = { ('otherpackage', '__init__.py'): '', ('otherpackage', 'othermodule.py'): 'port = 9989', } self.do_test_load(config=config, other_files=other_files) class TestCheckconfig(unittest.TestCase): def setUp(self): self.loadConfig = mock.Mock( spec=checkconfig._loadConfig, return_value=3) # checkconfig is decorated with @in_reactor, so strip that decoration # since the reactor is already running self.patch(checkconfig, 'checkconfig', checkconfig.checkconfig._orig) self.patch(checkconfig, '_loadConfig', self.loadConfig) def test_checkconfig_default(self): self.assertEqual(checkconfig.checkconfig(dict()), 3) self.loadConfig.assert_called_with(basedir=os.getcwd(), configFile='master.cfg', quiet=None) def test_checkconfig_given_dir(self): self.assertEqual(checkconfig.checkconfig(dict(configFile='.')), 3) self.loadConfig.assert_called_with(basedir='.', configFile='master.cfg', quiet=None) def test_checkconfig_given_file(self): config = dict(configFile='master.cfg') self.assertEqual(checkconfig.checkconfig(config), 3) self.loadConfig.assert_called_with(basedir=os.getcwd(), configFile='master.cfg', quiet=None) def test_checkconfig_quiet(self): config = dict(configFile='master.cfg', quiet=True) self.assertEqual(checkconfig.checkconfig(config), 3) self.loadConfig.assert_called_with(basedir=os.getcwd(), configFile='master.cfg', quiet=True) def test_checkconfig_syntaxError_quiet(self): """ When C{base.getConfigFileFromTac} raises L{SyntaxError}, C{checkconfig.checkconfig} return an error. """ mockGetConfig = mock.Mock(spec=base.getConfigFileFromTac, side_effect=SyntaxError) self.patch(checkconfig, 'getConfigFileFromTac', mockGetConfig) config = dict(configFile='.', quiet=True) self.assertEqual(checkconfig.checkconfig(config), 1) buildbot-3.4.0/master/buildbot/test/unit/scripts/test_cleanupdb.py000066400000000000000000000151341413250514000254130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import textwrap import sqlalchemy as sa from twisted.internet import defer from twisted.trial import unittest from buildbot.scripts import cleanupdb from buildbot.test.fake import fakemaster from buildbot.test.unit.db import test_logs from buildbot.test.util import db from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util.misc import TestReactorMixin try: import lz4 [lz4] hasLz4 = True except ImportError: hasLz4 = False def mkconfig(**kwargs): config = dict(quiet=False, basedir=os.path.abspath('basedir'), force=True) config.update(kwargs) return config def patch_environ(case, key, value): """ Add an environment variable for the duration of a test. """ old_environ = os.environ.copy() def cleanup(): os.environ.clear() os.environ.update(old_environ) os.environ[key] = value case.addCleanup(cleanup) class TestCleanupDb(misc.StdoutAssertionsMixin, dirs.DirsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.origcwd = os.getcwd() self.setUpDirs('basedir') with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write(textwrap.dedent(""" from twisted.application import service application = service.Application('buildmaster') """)) self.setUpStdoutAssertions() self.ensureNoSqliteMemory() def tearDown(self): os.chdir(self.origcwd) self.tearDownDirs() def ensureNoSqliteMemory(self): # test may use mysql or pg if configured in env envkey = "BUILDBOT_TEST_DB_URL" if envkey not in os.environ or os.environ[envkey] == 'sqlite://': patch_environ(self, envkey, "sqlite:///" + os.path.join( self.origcwd, "basedir", "state.sqlite")) def createMasterCfg(self, extraconfig=""): os.chdir(self.origcwd) with open(os.path.join('basedir', 'master.cfg'), 'wt') as f: f.write(textwrap.dedent(""" from buildbot.plugins import * c = BuildmasterConfig = dict() c['db_url'] = {dburl} c['buildbotNetUsageData'] = None c['multiMaster'] = True # don't complain for no builders {extraconfig} """.format(dburl=repr(os.environ["BUILDBOT_TEST_DB_URL"]), extraconfig=extraconfig))) @defer.inlineCallbacks def test_cleanup_not_basedir(self): res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='doesntexist')) self.assertEqual(res, 1) self.assertInStdout('invalid buildmaster directory') @defer.inlineCallbacks def test_cleanup_bad_config(self): res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 1) self.assertInStdout("master.cfg' does not exist") @defer.inlineCallbacks def test_cleanup_bad_config2(self): self.createMasterCfg(extraconfig="++++ # syntaxerror") res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 1) self.assertInStdout( "encountered a SyntaxError while parsing config file:") # config logs an error via log.err, we must eat it or trial will # complain self.flushLoggedErrors() def assertDictAlmostEqual(self, d1, d2): # The test shows each methods return different size # but we still make a fuzzy comparison to resist if underlying libraries # improve efficiency self.assertEqual(len(d1), len(d2)) for k in d2.keys(): self.assertApproximates(d1[k], d2[k], 10) class TestCleanupDbRealDb(db.RealDatabaseWithConnectorMixin, TestCleanupDb): @defer.inlineCallbacks def setUp(self): yield super().setUp() table_names = [ 'logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers' ] self.master = fakemaster.make_master(self, wantRealReactor=True) yield self.setUpRealDatabaseWithConnector(self.master, table_names=table_names) @defer.inlineCallbacks def tearDown(self): yield self.tearDownRealDatabaseWithConnector() @defer.inlineCallbacks def test_cleanup(self): # we reuse the fake db background data from db.logs unit tests yield self.insertTestData(test_logs.Tests.backgroundData) # insert a log with lots of redundancy LOGDATA = "xx\n" * 2000 logid = yield self.master.db.logs.addLog(102, "x", "x", "s") yield self.master.db.logs.appendLog(logid, LOGDATA) # test all methods lengths = {} for mode in self.master.db.logs.COMPRESSION_MODE: if mode == "lz4" and not hasLz4: # ok.. lz4 is not installed, don't fail lengths["lz4"] = 40 continue # create a master.cfg with different compression method self.createMasterCfg("c['logCompressionMethod'] = '{}'".format(mode)) res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 0) # make sure the compression don't change the data we can retrieve # via api res = yield self.master.db.logs.getLogLines(logid, 0, 2000) self.assertEqual(res, LOGDATA) # retrieve the actual data size in db using raw sqlalchemy def thd(conn): tbl = self.master.db.model.logchunks q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))]) q = q.where(tbl.c.logid == logid) return conn.execute(q).fetchone()[0] lengths[mode] = yield self.master.db.pool.do(thd) self.assertDictAlmostEqual( lengths, {'raw': 5999, 'bz2': 44, 'lz4': 40, 'gz': 31}) buildbot-3.4.0/master/buildbot/test/unit/scripts/test_create_master.py000066400000000000000000000220651413250514000262750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.db import connector from buildbot.db import model from buildbot.scripts import create_master from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin def mkconfig(**kwargs): config = dict(force=False, relocatable=False, config='master.cfg', db='sqlite:///state.sqlite', basedir=os.path.abspath('basedir'), quiet=False, **{'no-logrotate': False, 'log-size': 10000000, 'log-count': 10}) config.update(kwargs) return config class TestCreateMaster(misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): # createMaster is decorated with @in_reactor, so strip that decoration # since the master is already running self.patch(create_master, 'createMaster', create_master.createMaster._orig) self.setUpStdoutAssertions() # tests @defer.inlineCallbacks def do_test_createMaster(self, config): # mock out everything that createMaster calls, then check that # they are called, in order functions = ['makeBasedir', 'makeTAC', 'makeSampleConfig', 'createDB'] repls = {} calls = [] for fn in functions: repl = repls[fn] = mock.Mock(name=fn) repl.side_effect = lambda config, fn=fn: calls.append(fn) self.patch(create_master, fn, repl) repls['createDB'].side_effect = (lambda config: calls.append(fn) or defer.succeed(None)) rc = yield create_master.createMaster(config) self.assertEqual(rc, 0) self.assertEqual(calls, functions) for repl in repls.values(): repl.assert_called_with(config) @defer.inlineCallbacks def test_createMaster_quiet(self): yield self.do_test_createMaster(mkconfig(quiet=True)) self.assertWasQuiet() @defer.inlineCallbacks def test_createMaster_loud(self): yield self.do_test_createMaster(mkconfig(quiet=False)) self.assertInStdout('buildmaster configured in') class TestCreateMasterFunctions(www.WwwTestMixin, dirs.DirsMixin, misc.StdoutAssertionsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpDirs('test') self.basedir = os.path.abspath(os.path.join('test', 'basedir')) self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() def assertInTacFile(self, str): with open(os.path.join('test', 'buildbot.tac'), 'rt') as f: content = f.read() self.assertIn(str, content) def assertNotInTacFile(self, str): with open(os.path.join('test', 'buildbot.tac'), 'rt') as f: content = f.read() self.assertNotIn(str, content) def assertDBSetup(self, basedir=None, db_url='sqlite:///state.sqlite', verbose=True): # mock out the database setup self.db = mock.Mock() self.db.setup.side_effect = lambda *a, **k: defer.succeed(None) self.DBConnector = mock.Mock() self.DBConnector.return_value = self.db self.patch(connector, 'DBConnector', self.DBConnector) basedir = basedir or self.basedir # pylint: disable=unsubscriptable-object self.assertEqual( dict(basedir=self.DBConnector.call_args[0][1], db_url=self.DBConnector.call_args[0][0].mkconfig.db['db_url'], verbose=self.db.setup.call_args[1]['verbose'], check_version=self.db.setup.call_args[1]['check_version'], ), dict(basedir=self.basedir, db_url=db_url, verbose=True, check_version=False)) # tests def test_makeBasedir(self): self.assertFalse(os.path.exists(self.basedir)) create_master.makeBasedir(mkconfig(basedir=self.basedir)) self.assertTrue(os.path.exists(self.basedir)) self.assertInStdout('mkdir {}'.format(self.basedir)) def test_makeBasedir_quiet(self): self.assertFalse(os.path.exists(self.basedir)) create_master.makeBasedir(mkconfig(basedir=self.basedir, quiet=True)) self.assertTrue(os.path.exists(self.basedir)) self.assertWasQuiet() def test_makeBasedir_existing(self): os.mkdir(self.basedir) create_master.makeBasedir(mkconfig(basedir=self.basedir)) self.assertInStdout('updating existing installation') def test_makeTAC(self): create_master.makeTAC(mkconfig(basedir='test')) self.assertInTacFile("Application('buildmaster')") self.assertWasQuiet() def test_makeTAC_relocatable(self): create_master.makeTAC(mkconfig(basedir='test', relocatable=True)) self.assertInTacFile("basedir = '.'") # repr() prefers '' self.assertWasQuiet() def test_makeTAC_no_logrotate(self): create_master.makeTAC( mkconfig(basedir='test', **{'no-logrotate': True})) self.assertNotInTacFile("import Log") self.assertWasQuiet() def test_makeTAC_int_log_count(self): create_master.makeTAC(mkconfig(basedir='test', **{'log-count': 30})) self.assertInTacFile("\nmaxRotatedFiles = 30\n") self.assertWasQuiet() def test_makeTAC_str_log_count(self): with self.assertRaises(TypeError): create_master.makeTAC(mkconfig(basedir='test', **{'log-count': '30'})) def test_makeTAC_none_log_count(self): create_master.makeTAC(mkconfig(basedir='test', **{'log-count': None})) self.assertInTacFile("\nmaxRotatedFiles = None\n") self.assertWasQuiet() def test_makeTAC_int_log_size(self): create_master.makeTAC(mkconfig(basedir='test', **{'log-size': 3000})) self.assertInTacFile("\nrotateLength = 3000\n") self.assertWasQuiet() def test_makeTAC_str_log_size(self): with self.assertRaises(TypeError): create_master.makeTAC(mkconfig(basedir='test', **{'log-size': '3000'})) def test_makeTAC_existing_incorrect(self): with open(os.path.join('test', 'buildbot.tac'), 'wt') as f: f.write('WRONG') create_master.makeTAC(mkconfig(basedir='test')) self.assertInTacFile("WRONG") self.assertTrue(os.path.exists( os.path.join('test', 'buildbot.tac.new'))) self.assertInStdout('not touching existing buildbot.tac') def test_makeTAC_existing_incorrect_quiet(self): with open(os.path.join('test', 'buildbot.tac'), 'wt') as f: f.write('WRONG') create_master.makeTAC(mkconfig(basedir='test', quiet=True)) self.assertInTacFile("WRONG") self.assertWasQuiet() def test_makeTAC_existing_correct(self): create_master.makeTAC(mkconfig(basedir='test', quiet=True)) create_master.makeTAC(mkconfig(basedir='test')) self.assertFalse(os.path.exists( os.path.join('test', 'buildbot.tac.new'))) self.assertInStdout('and is correct') def test_makeSampleConfig(self): create_master.makeSampleConfig(mkconfig(basedir='test')) self.assertTrue(os.path.exists( os.path.join('test', 'master.cfg.sample'))) self.assertInStdout('creating ') def test_makeSampleConfig_db(self): create_master.makeSampleConfig(mkconfig(basedir='test', db='XXYYZZ', quiet=True)) with open(os.path.join('test', 'master.cfg.sample'), 'rt') as f: self.assertIn("XXYYZZ", f.read()) self.assertWasQuiet() @defer.inlineCallbacks def test_createDB(self): setup = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(connector.DBConnector, 'setup', setup) upgrade = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(model.Model, 'upgrade', upgrade) yield create_master.createDB( mkconfig(basedir='test', quiet=True), _noMonkey=True) setup.asset_called_with(check_version=False, verbose=False) upgrade.assert_called_with() self.assertWasQuiet() buildbot-3.4.0/master/buildbot/test/unit/scripts/test_logwatcher.py000066400000000000000000000134231413250514000256140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.scripts.logwatcher import BuildmasterStartupError from buildbot.scripts.logwatcher import BuildmasterTimeoutError from buildbot.scripts.logwatcher import LogWatcher from buildbot.scripts.logwatcher import ReconfigError from buildbot.test.util import dirs from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes class MockedLogWatcher(LogWatcher): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.printed_output = [] self.created_paths = [] def create_logfile(self, path): self.created_paths.append(path) def print_output(self, output): self.printed_output.append(output) class TestLogWatcher(unittest.TestCase, dirs.DirsMixin, TestReactorMixin): delimiter = unicode2bytes(os.linesep) def setUp(self): self.setUpDirs('workdir') self.addCleanup(self.tearDownDirs) self.setUpTestReactor() self.spawned_process = mock.Mock() self.reactor.spawnProcess = mock.Mock(return_value=self.spawned_process) def test_start(self): lw = MockedLogWatcher('workdir/test.log', _reactor=self.reactor) lw._start = mock.Mock() lw.start() self.reactor.spawnProcess.assert_called() self.assertEqual(lw.created_paths, ['workdir/test.log']) self.assertTrue(lw.running) @defer.inlineCallbacks def test_success_before_timeout(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(4.9) lw.lineReceived(b'BuildMaster is running') res = yield d self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_failure_after_timeout(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(5.1) lw.lineReceived(b'BuildMaster is running') with self.assertRaises(BuildmasterTimeoutError): yield d @defer.inlineCallbacks def test_progress_restarts_timeout(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(4.9) lw.lineReceived(b'added builder') self.reactor.advance(4.9) lw.lineReceived(b'BuildMaster is running') res = yield d self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_handles_very_long_lines(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.dataReceived(b't' * lw.MAX_LENGTH * 2 + self.delimiter + b'BuildMaster is running' + self.delimiter) res = yield d self.assertEqual(lw.printed_output, [ 'Got an a very long line in the log (length 32768 bytes), ignoring' ]) self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_handles_very_long_lines_separate_packet(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.dataReceived(b't' * lw.MAX_LENGTH * 2) lw.dataReceived(self.delimiter + b'BuildMaster is running' + self.delimiter) res = yield d self.assertEqual(lw.printed_output, [ 'Got an a very long line in the log (length 32768 bytes), ignoring' ]) self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_handles_very_long_lines_separate_packet_with_newline(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.dataReceived(b't' * lw.MAX_LENGTH * 2 + self.delimiter) lw.dataReceived(b'BuildMaster is running' + self.delimiter) res = yield d self.assertEqual(lw.printed_output, [ 'Got an a very long line in the log (length 32768 bytes), ignoring' ]) self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_matches_lines(self): lines_and_expected = [ (b'configuration update aborted without making any changes', ReconfigError()), (b'WARNING: configuration update partially applied; master may malfunction', ReconfigError()), (b'Server Shut Down', ReconfigError()), (b'BuildMaster startup failed', BuildmasterStartupError()), (b'message from master: attached', 'worker'), (b'configuration update complete', 'buildmaster'), (b'BuildMaster is running', 'buildmaster'), ] for line, expected in lines_and_expected: lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.lineReceived(line) if isinstance(expected, Exception): with self.assertRaises(type(expected)): yield d else: res = yield d self.assertEqual(res, expected) buildbot-3.4.0/master/buildbot/test/unit/scripts/test_restart.py000066400000000000000000000052651413250514000251460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.trial import unittest from buildbot.scripts import restart from buildbot.scripts import start from buildbot.scripts import stop from buildbot.test.util import dirs from buildbot.test.util import misc def mkconfig(**kwargs): config = dict(quiet=False, basedir=os.path.abspath('basedir')) config.update(kwargs) return config class TestStop(misc.StdoutAssertionsMixin, dirs.DirsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('basedir') with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write("Application('buildmaster')") self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() # tests def test_restart_not_basedir(self): self.assertEqual(restart.restart(mkconfig(basedir='doesntexist')), 1) self.assertInStdout('invalid buildmaster directory') def test_restart_stop_fails(self): self.patch(stop, 'stop', lambda config, wait: 1) self.assertEqual(restart.restart(mkconfig()), 1) def test_restart_stop_succeeds_start_fails(self): self.patch(stop, 'stop', lambda config, wait: 0) self.patch(start, 'start', lambda config: 1) self.assertEqual(restart.restart(mkconfig()), 1) def test_restart_succeeds(self): self.patch(stop, 'stop', lambda config, wait: 0) self.patch(start, 'start', lambda config: 0) self.assertEqual(restart.restart(mkconfig()), 0) self.assertInStdout('now restarting') def test_restart_succeeds_quiet(self): self.patch(stop, 'stop', lambda config, wait: 0) self.patch(start, 'start', lambda config: 0) self.assertEqual(restart.restart(mkconfig(quiet=True)), 0) self.assertWasQuiet() def test_restart_clean(self): self.patch(stop, 'stop', lambda config, wait: 0) self.patch(start, 'start', lambda config: 0) self.assertEqual(restart.restart(mkconfig(quiet=True, clean=True)), 0) self.assertWasQuiet() buildbot-3.4.0/master/buildbot/test/unit/scripts/test_runner.py000066400000000000000000000726151413250514000247760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import getpass import os import sys from io import StringIO import mock from twisted.python import log from twisted.python import runtime from twisted.python import usage from twisted.trial import unittest from buildbot.scripts import base from buildbot.scripts import runner from buildbot.test.util import misc class OptionsMixin: def setUpOptions(self): self.options_file = {} self.patch(base.SubcommandOptions, 'loadOptionsFile', lambda other_self: self.options_file) def assertOptions(self, opts, exp): got = {k: opts[k] for k in exp} if got != exp: msg = [] for k in exp: if opts[k] != exp[k]: msg.append(" {}: expected {}, got {}".format(k, repr(exp[k]), repr(opts[k]))) self.fail("did not get expected options\n" + ("\n".join(msg))) class TestUpgradeMasterOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.UpgradeMasterOptions() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = runner.UpgradeMasterOptions() self.assertIn('buildbot upgrade-master', opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = dict(quiet=False, replace=False) self.assertOptions(opts, exp) def test_short(self): opts = self.parse('-q', '-r') exp = dict(quiet=True, replace=True) self.assertOptions(opts, exp) def test_long(self): opts = self.parse('--quiet', '--replace') exp = dict(quiet=True, replace=True) self.assertOptions(opts, exp) class TestCreateMasterOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.CreateMasterOptions() self.opts.parseOptions(args) return self.opts def defaults_and(self, **kwargs): defaults = dict(force=False, relocatable=False, config='master.cfg', db='sqlite:///state.sqlite', basedir=os.getcwd(), quiet=False, **{'no-logrotate': False, 'log-size': 10000000, 'log-count': 10}) unk_keys = set(kwargs.keys()) - set(defaults.keys()) assert not unk_keys, "invalid keys {}".format(unk_keys) opts = defaults.copy() opts.update(kwargs) return opts def test_synopsis(self): opts = runner.CreateMasterOptions() self.assertIn('buildbot create-master', opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = self.defaults_and() self.assertOptions(opts, exp) def test_db_quiet(self): opts = self.parse('-q') exp = self.defaults_and(quiet=True) self.assertOptions(opts, exp) def test_db_quiet_long(self): opts = self.parse('--quiet') exp = self.defaults_and(quiet=True) self.assertOptions(opts, exp) def test_force(self): opts = self.parse('-f') exp = self.defaults_and(force=True) self.assertOptions(opts, exp) def test_force_long(self): opts = self.parse('--force') exp = self.defaults_and(force=True) self.assertOptions(opts, exp) def test_relocatable(self): opts = self.parse('-r') exp = self.defaults_and(relocatable=True) self.assertOptions(opts, exp) def test_relocatable_long(self): opts = self.parse('--relocatable') exp = self.defaults_and(relocatable=True) self.assertOptions(opts, exp) def test_no_logrotate(self): opts = self.parse('-n') exp = self.defaults_and(**{'no-logrotate': True}) self.assertOptions(opts, exp) def test_no_logrotate_long(self): opts = self.parse('--no-logrotate') exp = self.defaults_and(**{'no-logrotate': True}) self.assertOptions(opts, exp) def test_config(self): opts = self.parse('-cxyz') exp = self.defaults_and(config='xyz') self.assertOptions(opts, exp) def test_config_long(self): opts = self.parse('--config=xyz') exp = self.defaults_and(config='xyz') self.assertOptions(opts, exp) def test_log_size(self): opts = self.parse('-s124') exp = self.defaults_and(**{'log-size': 124}) self.assertOptions(opts, exp) def test_log_size_long(self): opts = self.parse('--log-size=124') exp = self.defaults_and(**{'log-size': 124}) self.assertOptions(opts, exp) def test_log_size_noninteger(self): with self.assertRaises(usage.UsageError): self.parse('--log-size=1M') def test_log_count(self): opts = self.parse('-l124') exp = self.defaults_and(**{'log-count': 124}) self.assertOptions(opts, exp) def test_log_count_long(self): opts = self.parse('--log-count=124') exp = self.defaults_and(**{'log-count': 124}) self.assertOptions(opts, exp) def test_log_count_none(self): opts = self.parse('--log-count=None') exp = self.defaults_and(**{'log-count': None}) self.assertOptions(opts, exp) def test_log_count_noninteger(self): with self.assertRaises(usage.UsageError): self.parse('--log-count=M') def test_db_long(self): opts = self.parse('--db=foo://bar') exp = self.defaults_and(db='foo://bar') self.assertOptions(opts, exp) def test_db_invalid(self): with self.assertRaisesRegex(usage.UsageError, "could not parse database URL 'inv_db_url'"): self.parse("--db=inv_db_url") def test_db_basedir(self): path = r'c:\foo\bar' if runtime.platformType == "win32" else '/foo/bar' opts = self.parse('-f', path) exp = self.defaults_and(force=True, basedir=path) self.assertOptions(opts, exp) class BaseTestSimpleOptions(OptionsMixin): # tests for options with just --quiet and a usage message commandName = None optionsClass = None def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = self.optionsClass() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = self.optionsClass() self.assertIn('buildbot {}'.format(self.commandName), opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = dict(quiet=False) self.assertOptions(opts, exp) def test_quiet(self): opts = self.parse('--quiet') exp = dict(quiet=True) self.assertOptions(opts, exp) class TestStopOptions(BaseTestSimpleOptions, unittest.TestCase): commandName = 'stop' optionsClass = runner.StopOptions class TestResetartOptions(BaseTestSimpleOptions, unittest.TestCase): commandName = 'restart' optionsClass = runner.RestartOptions def test_nodaemon(self): opts = self.parse('--nodaemon') exp = dict(nodaemon=True) self.assertOptions(opts, exp) class TestStartOptions(BaseTestSimpleOptions, unittest.TestCase): commandName = 'start' optionsClass = runner.StartOptions def test_nodaemon(self): opts = self.parse('--nodaemon') exp = dict(nodaemon=True) self.assertOptions(opts, exp) class TestReconfigOptions(BaseTestSimpleOptions, unittest.TestCase): commandName = 'reconfig' optionsClass = runner.ReconfigOptions class TestTryOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.TryOptions() self.opts.parseOptions(args) return self.opts def defaults_and(self, **kwargs): defaults = dict(connect=None, host=None, jobdir=None, username=None, master=None, passwd=None, who=None, comment=None, diff=None, patchlevel=0, baserev=None, vc=None, branch=None, repository=None, topfile=None, topdir=None, wait=False, dryrun=False, quiet=False, builders=[], properties={}, buildbotbin='buildbot') # dashes make python syntax hard.. defaults['get-builder-names'] = False if 'get_builder_names' in kwargs: kwargs['get-builder-names'] = kwargs['get_builder_names'] del kwargs['get_builder_names'] assert set(kwargs.keys()) <= set(defaults.keys()), "invalid keys" opts = defaults.copy() opts.update(kwargs) return opts def test_synopsis(self): opts = runner.TryOptions() self.assertIn('buildbot try', opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = self.defaults_and() self.assertOptions(opts, exp) def test_properties(self): opts = self.parse('--properties=a=b') exp = self.defaults_and(properties=dict(a='b')) self.assertOptions(opts, exp) def test_properties_multiple_opts(self): opts = self.parse('--properties=X=1', '--properties=Y=2') exp = self.defaults_and(properties=dict(X='1', Y='2')) self.assertOptions(opts, exp) def test_properties_equals(self): opts = self.parse('--properties=X=2+2=4') exp = self.defaults_and(properties=dict(X='2+2=4')) self.assertOptions(opts, exp) def test_properties_commas(self): opts = self.parse('--properties=a=b,c=d') exp = self.defaults_and(properties=dict(a='b', c='d')) self.assertOptions(opts, exp) def test_property(self): opts = self.parse('--property=a=b') exp = self.defaults_and(properties=dict(a='b')) self.assertOptions(opts, exp) def test_property_multiple_opts(self): opts = self.parse('--property=X=1', '--property=Y=2') exp = self.defaults_and(properties=dict(X='1', Y='2')) self.assertOptions(opts, exp) def test_property_equals(self): opts = self.parse('--property=X=2+2=4') exp = self.defaults_and(properties=dict(X='2+2=4')) self.assertOptions(opts, exp) def test_property_commas(self): opts = self.parse('--property=a=b,c=d') exp = self.defaults_and(properties=dict(a='b,c=d')) self.assertOptions(opts, exp) def test_property_and_properties(self): opts = self.parse('--property=X=1', '--properties=Y=2') exp = self.defaults_and(properties=dict(X='1', Y='2')) self.assertOptions(opts, exp) def test_properties_builders_multiple(self): opts = self.parse('--builder=aa', '--builder=bb') exp = self.defaults_and(builders=['aa', 'bb']) self.assertOptions(opts, exp) def test_options_short(self): opts = self.parse( *'-n -q -c pb -u me -m mr:7 -w you -C comm -p 2 -b bb'.split()) exp = self.defaults_and(dryrun=True, quiet=True, connect='pb', username='me', master='mr:7', who='you', comment='comm', patchlevel=2, builders=['bb']) self.assertOptions(opts, exp) def test_options_long(self): opts = self.parse( *"""--wait --dryrun --get-builder-names --quiet --connect=pb --host=h --jobdir=j --username=u --master=m:1234 --passwd=p --who=w --comment=comm --diff=d --patchlevel=7 --baserev=br --vc=cvs --branch=br --repository=rep --builder=bl --properties=a=b --topfile=Makefile --topdir=. --buildbotbin=.virtualenvs/buildbot/bin/buildbot""".split()) exp = self.defaults_and(wait=True, dryrun=True, get_builder_names=True, quiet=True, connect='pb', host='h', jobdir='j', username='u', master='m:1234', passwd='p', who='w', comment='comm', diff='d', patchlevel=7, baserev='br', vc='cvs', branch='br', repository='rep', builders=['bl'], properties=dict(a='b'), topfile='Makefile', topdir='.', buildbotbin='.virtualenvs/buildbot/bin/buildbot') self.assertOptions(opts, exp) def test_patchlevel_inval(self): with self.assertRaises(ValueError): self.parse('-p', 'a') def test_config_builders(self): self.options_file['try_builders'] = ['a', 'b'] opts = self.parse() self.assertOptions(opts, dict(builders=['a', 'b'])) def test_config_builders_override(self): self.options_file['try_builders'] = ['a', 'b'] opts = self.parse('-b', 'd') # overrides a, b self.assertOptions(opts, dict(builders=['d'])) def test_config_old_names(self): self.options_file['try_masterstatus'] = 'ms' self.options_file['try_dir'] = 'td' self.options_file['try_password'] = 'pw' opts = self.parse() self.assertOptions(opts, dict(master='ms', jobdir='td', passwd='pw')) def test_config_masterstatus(self): self.options_file['masterstatus'] = 'ms' opts = self.parse() self.assertOptions(opts, dict(master='ms')) def test_config_masterstatus_override(self): self.options_file['masterstatus'] = 'ms' opts = self.parse('-m', 'mm') self.assertOptions(opts, dict(master='mm')) def test_config_options(self): self.options_file.update(dict(try_connect='pb', try_vc='cvs', try_branch='br', try_repository='rep', try_topdir='.', try_topfile='Makefile', try_host='h', try_username='u', try_jobdir='j', try_password='p', try_master='m:8', try_who='w', try_comment='comm', try_quiet='y', try_wait='y', try_buildbotbin='.virtualenvs/buildbot/bin/buildbot')) opts = self.parse() exp = self.defaults_and(wait=True, quiet=True, connect='pb', host='h', jobdir='j', username='u', master='m:8', passwd='p', who='w', comment='comm', vc='cvs', branch='br', repository='rep', topfile='Makefile', topdir='.', buildbotbin='.virtualenvs/buildbot/bin/buildbot') self.assertOptions(opts, exp) def test_pb_withNoMaster(self): """ When 'builbot try' is asked to connect via pb, but no master is specified, a usage error is raised. """ with self.assertRaises(usage.UsageError): self.parse('--connect=pb') def test_pb_withInvalidMaster(self): """ When 'buildbot try' is asked to connect via pb, but an invalid master is specified, a usage error is raised. """ with self.assertRaises(usage.UsageError): self.parse('--connect=pb', '--master=foo') class TestSendChangeOptions(OptionsMixin, unittest.TestCase): master_and_who = ['-m', 'm:1', '-W', 'w'] def setUp(self): self.setUpOptions() self.getpass_response = 'typed-password' self.patch(getpass, 'getpass', lambda prompt: self.getpass_response) def parse(self, *args): self.opts = runner.SendChangeOptions() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = runner.SendChangeOptions() self.assertIn('buildbot sendchange', opts.getSynopsis()) def test_defaults(self): opts = self.parse('-m', 'm:1', '-W', 'me') exp = dict(master='m:1', auth=('change', 'changepw'), who='me', vc=None, repository='', project='', branch=None, category=None, revision=None, revision_file=None, property=None, comments='', logfile=None, when=None, revlink='', encoding='utf8', files=()) self.assertOptions(opts, exp) def test_files(self): opts = self.parse(*self.master_and_who + ['a', 'b', 'c']) self.assertEqual(opts['files'], ('a', 'b', 'c')) def test_properties(self): opts = self.parse('--property', 'x:y', '--property', 'a:b', *self.master_and_who) self.assertEqual(opts['properties'], dict(x="y", a="b")) def test_properties_with_colon(self): opts = self.parse('--property', 'x:http://foo', *self.master_and_who) self.assertEqual(opts['properties'], dict(x='http://foo')) def test_config_file(self): self.options_file['master'] = 'MMM:123' self.options_file['who'] = 'WWW' self.options_file['branch'] = 'BBB' self.options_file['category'] = 'CCC' self.options_file['vc'] = 'svn' opts = self.parse() exp = dict(master='MMM:123', who='WWW', branch='BBB', category='CCC', vc='svn') self.assertOptions(opts, exp) def test_short_args(self): opts = self.parse(*('-m m:1 -a a:b -W W -R r -P p -b b -s git ' + '-C c -r r -p pn:pv -c c -F f -w 123 -l l -e e').split()) exp = dict(master='m:1', auth=('a', 'b'), who='W', repository='r', project='p', branch='b', category='c', revision='r', vc='git', properties=dict(pn='pv'), comments='c', logfile='f', when=123.0, revlink='l', encoding='e') self.assertOptions(opts, exp) def test_long_args(self): opts = self.parse(*('--master m:1 --auth a:b --who w --repository r ' + '--project p --branch b --category c --revision r --vc git ' + '--property pn:pv --comments c --logfile f ' + '--when 123 --revlink l --encoding e').split()) exp = dict(master='m:1', auth=('a', 'b'), who='w', repository='r', project='p', branch='b', category='c', revision='r', vc='git', properties=dict(pn='pv'), comments='c', logfile='f', when=123.0, revlink='l', encoding='e') self.assertOptions(opts, exp) def test_revision_file(self): with open('revfile', 'wt') as f: f.write('my-rev') self.addCleanup(lambda: os.unlink('revfile')) opts = self.parse('--revision_file', 'revfile', *self.master_and_who) self.assertOptions(opts, dict(revision='my-rev')) def test_invalid_when(self): with self.assertRaises(usage.UsageError): self.parse('--when=foo', *self.master_and_who) def test_comments_overrides_logfile(self): opts = self.parse('--logfile', 'logs', '--comments', 'foo', *self.master_and_who) self.assertOptions(opts, dict(comments='foo')) def test_logfile(self): with open('comments', 'wt') as f: f.write('hi') self.addCleanup(lambda: os.unlink('comments')) opts = self.parse('--logfile', 'comments', *self.master_and_who) self.assertOptions(opts, dict(comments='hi')) def test_logfile_stdin(self): stdin = mock.Mock() stdin.read = lambda: 'hi' self.patch(sys, 'stdin', stdin) opts = self.parse('--logfile', '-', *self.master_and_who) self.assertOptions(opts, dict(comments='hi')) def test_auth_getpass(self): opts = self.parse('--auth=dustin', *self.master_and_who) self.assertOptions(opts, dict(auth=('dustin', 'typed-password'))) def test_invalid_vcs(self): with self.assertRaises(usage.UsageError): self.parse('--vc=foo', *self.master_and_who) def test_invalid_master(self): with self.assertRaises(usage.UsageError): self.parse("--who=test", "-m foo") class TestTryServerOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.TryServerOptions() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = runner.TryServerOptions() self.assertIn('buildbot tryserver', opts.getSynopsis()) def test_defaults(self): with self.assertRaises(usage.UsageError): self.parse() def test_with_jobdir(self): opts = self.parse('--jobdir', 'xyz') exp = dict(jobdir='xyz') self.assertOptions(opts, exp) class TestCheckConfigOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.CheckConfigOptions() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = runner.CheckConfigOptions() self.assertIn('buildbot checkconfig', opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = dict(quiet=False) self.assertOptions(opts, exp) def test_configfile(self): opts = self.parse('foo.cfg') exp = dict(quiet=False, configFile='foo.cfg') self.assertOptions(opts, exp) def test_quiet(self): opts = self.parse('-q') exp = dict(quiet=True) self.assertOptions(opts, exp) class TestUserOptions(OptionsMixin, unittest.TestCase): # mandatory arguments extra_args = ['--master', 'a:1', '--username', 'u', '--passwd', 'p'] def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.UserOptions() self.opts.parseOptions(args) return self.opts def test_defaults(self): with self.assertRaises(usage.UsageError): self.parse() def test_synopsis(self): opts = runner.UserOptions() self.assertIn('buildbot user', opts.getSynopsis()) def test_master(self): opts = self.parse("--master", "abcd:1234", '--op=get', '--ids=x', '--username=u', '--passwd=p') self.assertOptions(opts, dict(master="abcd:1234")) def test_ids(self): opts = self.parse("--ids", "id1,id2,id3", '--op', 'get', *self.extra_args) self.assertEqual(opts['ids'], ['id1', 'id2', 'id3']) def test_info(self): opts = self.parse("--info", "git=Tyler Durden ", '--op', 'add', *self.extra_args) self.assertEqual(opts['info'], [dict(git='Tyler Durden ')]) def test_info_only_id(self): opts = self.parse("--info", "tdurden", '--op', 'update', *self.extra_args) self.assertEqual(opts['info'], [dict(identifier='tdurden')]) def test_info_with_id(self): opts = self.parse("--info", "tdurden:svn=marla", '--op', 'update', *self.extra_args) self.assertEqual( opts['info'], [dict(identifier='tdurden', svn='marla')]) def test_info_multiple(self): opts = self.parse("--info", "git=Tyler Durden ", "--info", "git=Narrator ", '--op', 'add', *self.extra_args) self.assertEqual(opts['info'], [dict(git='Tyler Durden '), dict(git='Narrator ')]) def test_config_user_params(self): self.options_file['user_master'] = 'mm:99' self.options_file['user_username'] = 'un' self.options_file['user_passwd'] = 'pw' opts = self.parse('--op', 'get', '--ids', 'x') self.assertOptions( opts, dict(master='mm:99', username='un', passwd='pw')) def test_config_master(self): self.options_file['master'] = 'mm:99' opts = self.parse('--op', 'get', '--ids', 'x', '--username=u', '--passwd=p') self.assertOptions(opts, dict(master='mm:99')) def test_config_master_override(self): self.options_file['master'] = 'not seen' self.options_file['user_master'] = 'mm:99' opts = self.parse('--op', 'get', '--ids', 'x', '--username=u', '--passwd=p') self.assertOptions(opts, dict(master='mm:99')) def test_invalid_info(self): with self.assertRaises(usage.UsageError): self.parse("--info", "foo=bar", '--op', 'add', *self.extra_args) def test_no_master(self): with self.assertRaises(usage.UsageError): self.parse('-op=foo') def test_invalid_master(self): with self.assertRaises(usage.UsageError): self.parse('-m', 'foo') def test_no_operation(self): with self.assertRaises(usage.UsageError): self.parse('-m', 'a:1') def test_bad_operation(self): with self.assertRaises(usage.UsageError): self.parse('-m', 'a:1', '--op=mayhem') def test_no_username(self): with self.assertRaises(usage.UsageError): self.parse('-m', 'a:1', '--op=add') def test_no_password(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', '-m', 'a:1', '-u', 'tdurden') def test_invalid_bb_username(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', '--bb_username=tdurden', *self.extra_args) def test_invalid_bb_password(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', '--bb_password=marla', *self.extra_args) def test_update_no_bb_username(self): with self.assertRaises(usage.UsageError): self.parse('--op=update', '--bb_password=marla', *self.extra_args) def test_update_no_bb_password(self): with self.assertRaises(usage.UsageError): self.parse('--op=update', '--bb_username=tdurden', *self.extra_args) def test_no_ids_info(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', *self.extra_args) def test_ids_with_add(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', '--ids=id1', *self.extra_args) def test_ids_with_update(self): with self.assertRaises(usage.UsageError): self.parse('--op=update', '--ids=id1', *self.extra_args) def test_no_ids_found_update(self): with self.assertRaises(usage.UsageError): self.parse("--op=update", "--info=svn=x", *self.extra_args) def test_id_with_add(self): with self.assertRaises(usage.UsageError): self.parse("--op=add", "--info=id:x", *self.extra_args) def test_info_with_remove(self): with self.assertRaises(usage.UsageError): self.parse('--op=remove', '--info=x=v', *self.extra_args) def test_info_with_get(self): with self.assertRaises(usage.UsageError): self.parse('--op=get', '--info=x=v', *self.extra_args) class TestOptions(OptionsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() self.setUpStdoutAssertions() def parse(self, *args): self.opts = runner.Options() self.opts.parseOptions(args) return self.opts def test_defaults(self): with self.assertRaises(usage.UsageError): self.parse() def test_version(self): try: self.parse('--version') except SystemExit as e: self.assertEqual(e.args[0], 0) self.assertInStdout('Buildbot version:') def test_verbose(self): self.patch(log, 'startLogging', mock.Mock()) with self.assertRaises(usage.UsageError): self.parse("--verbose") log.startLogging.assert_called_once_with(sys.stderr) class TestRun(unittest.TestCase): class MySubCommand(usage.Options): subcommandFunction = 'buildbot.test.unit.scripts.test_runner.subcommandFunction' optFlags = [ ['loud', 'l', 'be noisy'] ] def postOptions(self): if self['loud']: raise usage.UsageError('THIS IS ME BEING LOUD') def setUp(self): # patch our subcommand in self.patch(runner.Options, 'subCommands', [['my', None, self.MySubCommand, 'my, my']]) # and patch in the callback for it global subcommandFunction subcommandFunction = mock.Mock(name='subcommandFunction', return_value=3) def test_run_good(self): self.patch(sys, 'argv', ['buildbot', 'my']) try: runner.run() except SystemExit as e: self.assertEqual(e.args[0], 3) else: self.fail("didn't exit") def test_run_bad(self): self.patch(sys, 'argv', ['buildbot', 'my', '-l']) stdout = StringIO() self.patch(sys, 'stdout', stdout) try: runner.run() except SystemExit as e: self.assertEqual(e.args[0], 1) else: self.fail("didn't exit") self.assertIn('THIS IS ME', stdout.getvalue()) buildbot-3.4.0/master/buildbot/test/unit/scripts/test_sendchange.py000066400000000000000000000122671413250514000255610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.clients import sendchange as sendchange_client from buildbot.scripts import sendchange from buildbot.test.util import misc class TestSendChange(misc.StdoutAssertionsMixin, unittest.TestCase): class FakeSender: def __init__(self, testcase, master, auth, encoding=None): self.master = master self.auth = auth self.encoding = encoding self.testcase = testcase def send(self, branch, revision, comments, files, **kwargs): kwargs['branch'] = branch kwargs['revision'] = revision kwargs['comments'] = comments kwargs['files'] = files self.send_kwargs = kwargs d = defer.Deferred() if self.testcase.fail: reactor.callLater(0, d.errback, RuntimeError("oh noes")) else: reactor.callLater(0, d.callback, None) return d def setUp(self): self.fail = False # set to true to get Sender.send to fail def Sender_constr(*args, **kwargs): self.sender = self.FakeSender(self, *args, **kwargs) return self.sender self.patch(sendchange_client, 'Sender', Sender_constr) # undo the effects of @in_reactor self.patch(sendchange, 'sendchange', sendchange.sendchange._orig) self.setUpStdoutAssertions() @defer.inlineCallbacks def test_sendchange_config(self): rc = yield sendchange.sendchange(dict(encoding='utf16', who='me', auth=['a', 'b'], master='m', branch='br', category='cat', revision='rr', properties={'a': 'b'}, repository='rep', project='prj', vc='git', revlink='rl', when=1234.0, comments='comm', files=('a', 'b'), codebase='cb')) self.assertEqual((self.sender.master, self.sender.auth, self.sender.encoding, self.sender.send_kwargs, self.getStdout(), rc), ('m', ['a', 'b'], 'utf16', { 'branch': 'br', 'category': 'cat', 'codebase': 'cb', 'comments': 'comm', 'files': ('a', 'b'), 'project': 'prj', 'properties': {'a': 'b'}, 'repository': 'rep', 'revision': 'rr', 'revlink': 'rl', 'when': 1234.0, 'who': 'me', 'vc': 'git'}, 'change sent successfully', 0)) @defer.inlineCallbacks def test_sendchange_config_no_codebase(self): rc = yield sendchange.sendchange(dict(encoding='utf16', who='me', auth=['a', 'b'], master='m', branch='br', category='cat', revision='rr', properties={'a': 'b'}, repository='rep', project='prj', vc='git', revlink='rl', when=1234.0, comments='comm', files=('a', 'b'))) self.assertEqual((self.sender.master, self.sender.auth, self.sender.encoding, self.sender.send_kwargs, self.getStdout(), rc), ('m', ['a', 'b'], 'utf16', { 'branch': 'br', 'category': 'cat', 'codebase': None, 'comments': 'comm', 'files': ('a', 'b'), 'project': 'prj', 'properties': {'a': 'b'}, 'repository': 'rep', 'revision': 'rr', 'revlink': 'rl', 'when': 1234.0, 'who': 'me', 'vc': 'git'}, 'change sent successfully', 0)) @defer.inlineCallbacks def test_sendchange_fail(self): self.fail = True rc = yield sendchange.sendchange({}) self.assertEqual((self.getStdout().split('\n')[0], rc), ('change not sent:', 1)) buildbot-3.4.0/master/buildbot/test/unit/scripts/test_start.py000066400000000000000000000112011413250514000246020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys import time import mock import twisted from twisted.internet import defer from twisted.internet.utils import getProcessOutputAndValue from twisted.python import versions from twisted.trial import unittest from buildbot.scripts import start from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util.decorators import skipUnlessPlatformIs def mkconfig(**kwargs): config = { 'quiet': False, 'basedir': os.path.abspath('basedir'), 'nodaemon': False, } config.update(kwargs) return config fake_master_tac = """\ from twisted.application import service from twisted.internet import reactor from twisted.python import log application = service.Application('highscore') class App(service.Service): def startService(self): super().startService() log.msg("BuildMaster is running") # heh heh heh reactor.callLater(0, reactor.stop) app = App() app.setServiceParent(application) # isBuildmasterDir wants to see this -> Application('buildmaster') """ class TestStart(misc.StdoutAssertionsMixin, dirs.DirsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('basedir') with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write(fake_master_tac) self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() # tests def test_start_not_basedir(self): self.assertEqual(start.start(mkconfig(basedir='doesntexist')), 1) self.assertInStdout('invalid buildmaster directory') def runStart(self, **config): args = [ '-c', 'from buildbot.scripts.start import start; import sys; ' 'sys.exit(start(%r))' % ( mkconfig(**config),), ] env = os.environ.copy() env['PYTHONPATH'] = os.pathsep.join(sys.path) return getProcessOutputAndValue(sys.executable, args=args, env=env) @defer.inlineCallbacks def test_start_no_daemon(self): (_, err, rc) = yield self.runStart(nodaemon=True) # on python 3.5, cryptography loudly complains to upgrade if sys.version_info[:2] != (3, 5): self.assertEqual((err, rc), (b'', 0)) @defer.inlineCallbacks def test_start_quiet(self): res = yield self.runStart(quiet=True) # on python 3.5, cryptography loudly complains to upgrade if sys.version_info[:2] != (3, 5): self.assertEqual(res, (b'', b'', 0)) @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_start_timeout_nonnumber(self): (out, err, rc) = yield self.runStart(start_timeout='a') self.assertEqual((rc, err), (1, b'')) self.assertSubstring(b'Start timeout must be a number\n', out) @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_start_timeout_number_string(self): # integer values from command-line options come in as strings res = yield self.runStart(start_timeout='10') self.assertEqual(res, (mock.ANY, b'', 0)) @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_start(self): try: (out, err, rc) = yield self.runStart() self.assertEqual((rc, err), (0, b'')) self.assertSubstring(b'buildmaster appears to have (re)started correctly', out) finally: # wait for the pidfile to go away after the reactor.stop # in buildbot.tac takes effect pidfile = os.path.join('basedir', 'twistd.pid') while os.path.exists(pidfile): time.sleep(0.01) if twisted.version <= versions.Version('twisted', 9, 0, 0): test_start.skip = test_start_quiet.skip = "Skipping due to suprious PotentialZombieWarning." # the remainder of this script does obscene things: # - forks # - shells out to tail # - starts and stops the reactor # so testing it will be *far* more pain than is worthwhile buildbot-3.4.0/master/buildbot/test/unit/scripts/test_stop.py000066400000000000000000000123071413250514000244420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import signal import time from twisted.trial import unittest from buildbot.scripts import stop from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util.decorators import skipUnlessPlatformIs def mkconfig(**kwargs): config = dict(quiet=False, clean=False, basedir=os.path.abspath('basedir')) config['no-wait'] = kwargs.pop('no_wait', False) config.update(kwargs) return config class TestStop(misc.StdoutAssertionsMixin, dirs.DirsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('basedir') self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() # tests def do_test_stop(self, config, kill_sequence, is_running=True, **kwargs): with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write("Application('buildmaster')") if is_running: with open("basedir/twistd.pid", 'wt') as f: f.write('1234') def sleep(t): self.assertTrue(kill_sequence, "unexpected sleep: %d" % t) what, exp_t = kill_sequence.pop(0) self.assertEqual((what, exp_t), ('sleep', t)) self.patch(time, 'sleep', sleep) def kill(pid, signal): self.assertTrue(kill_sequence, "unexpected signal: %d" % signal) exp_sig, result = kill_sequence.pop(0) self.assertEqual((pid, signal), (1234, exp_sig)) if isinstance(result, Exception): raise result return result self.patch(os, 'kill', kill) rv = stop.stop(config, **kwargs) self.assertEqual(kill_sequence, []) return rv @skipUnlessPlatformIs('posix') def test_stop_not_running(self): rv = self.do_test_stop(mkconfig(no_wait=True), [], is_running=False) self.assertInStdout('not running') self.assertEqual(rv, 0) @skipUnlessPlatformIs('posix') def test_stop_dead_but_pidfile_remains(self): rv = self.do_test_stop(mkconfig(no_wait=True), [(signal.SIGTERM, OSError(3, 'No such process'))]) self.assertEqual(rv, 0) self.assertFalse(os.path.exists(os.path.join('basedir', 'twistd.pid'))) self.assertInStdout('not running') @skipUnlessPlatformIs('posix') def test_stop_dead_but_pidfile_remains_quiet(self): rv = self.do_test_stop(mkconfig(quiet=True, no_wait=True), [(signal.SIGTERM, OSError(3, 'No such process'))],) self.assertEqual(rv, 0) self.assertFalse(os.path.exists(os.path.join('basedir', 'twistd.pid'))) self.assertWasQuiet() @skipUnlessPlatformIs('posix') def test_stop_dead_but_pidfile_remains_wait(self): rv = self.do_test_stop(mkconfig(no_wait=True), [(signal.SIGTERM, OSError(3, 'No such process')) ], wait=True) self.assertEqual(rv, 0) self.assertFalse(os.path.exists(os.path.join('basedir', 'twistd.pid'))) @skipUnlessPlatformIs('posix') def test_stop_slow_death_wait(self): rv = self.do_test_stop(mkconfig(no_wait=True), [ (signal.SIGTERM, None), ('sleep', 0.1), (0, None), # polling.. ('sleep', 1), (0, None), ('sleep', 1), (0, None), ('sleep', 1), (0, OSError(3, 'No such process')), ], wait=True) self.assertInStdout('is dead') self.assertEqual(rv, 0) @skipUnlessPlatformIs('posix') def test_stop_slow_death_wait_timeout(self): rv = self.do_test_stop(mkconfig(no_wait=True), [ (signal.SIGTERM, None), ('sleep', 0.1), ] + [(0, None), ('sleep', 1), ] * 10, wait=True) self.assertInStdout('never saw process') self.assertEqual(rv, 1) @skipUnlessPlatformIs('posix') def test_stop_slow_death_config_wait_timeout(self): rv = self.do_test_stop(mkconfig(), [ (signal.SIGTERM, None), ('sleep', 0.1), ] + [(0, None), ('sleep', 1), ] * 10, ) self.assertInStdout('never saw process') self.assertEqual(rv, 1) @skipUnlessPlatformIs('posix') def test_stop_clean(self): rv = self.do_test_stop(mkconfig(clean=True, no_wait=True), [ (signal.SIGUSR1, None), ], wait=False) self.assertInStdout('sent SIGUSR1 to process') self.assertEqual(rv, 0) buildbot-3.4.0/master/buildbot/test/unit/scripts/test_trycmd.py000066400000000000000000000022451413250514000247570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot.clients import tryclient from buildbot.scripts import trycmd class TestStatusLog(unittest.TestCase): def test_trycmd(self): Try = mock.Mock() self.patch(tryclient, 'Try', Try) inst = Try.return_value = mock.Mock(name='Try-instance') rc = trycmd.trycmd(dict(cfg=1)) Try.assert_called_with(dict(cfg=1)) inst.run.assert_called_with() self.assertEqual(rc, 0) buildbot-3.4.0/master/buildbot/test/unit/scripts/test_tryserver.py000066400000000000000000000031631413250514000255220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from io import StringIO from twisted.trial import unittest from buildbot.scripts import tryserver from buildbot.test.util import dirs class TestStatusLog(dirs.DirsMixin, unittest.TestCase): def setUp(self): self.newdir = os.path.join('jobdir', 'new') self.tmpdir = os.path.join('jobdir', 'tmp') self.setUpDirs("jobdir", self.newdir, self.tmpdir) def test_trycmd(self): config = dict(jobdir='jobdir') inputfile = StringIO('this is my try job') self.patch(sys, 'stdin', inputfile) rc = tryserver.tryserver(config) self.assertEqual(rc, 0) newfiles = os.listdir(self.newdir) tmpfiles = os.listdir(self.tmpdir) self.assertEqual((len(newfiles), len(tmpfiles)), (1, 0)) with open(os.path.join(self.newdir, newfiles[0]), 'rt') as f: self.assertEqual(f.read(), 'this is my try job') buildbot-3.4.0/master/buildbot/test/unit/scripts/test_upgrade_master.py000066400000000000000000000202271413250514000264570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from io import StringIO import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config as config_module from buildbot.db import connector from buildbot.db import masters from buildbot.db import model from buildbot.scripts import base from buildbot.scripts import upgrade_master from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin def mkconfig(**kwargs): config = dict(quiet=False, replace=False, basedir='test') config.update(kwargs) return config class TestUpgradeMaster(dirs.DirsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): # createMaster is decorated with @in_reactor, so strip that decoration # since the master is already running self.patch(upgrade_master, 'upgradeMaster', upgrade_master.upgradeMaster._orig) self.setUpDirs('test') self.setUpStdoutAssertions() def patchFunctions(self, basedirOk=True, configOk=True): self.calls = [] def checkBasedir(config): self.calls.append('checkBasedir') return basedirOk self.patch(base, 'checkBasedir', checkBasedir) def loadConfig(config, configFileName='master.cfg'): self.calls.append('loadConfig') return config_module.MasterConfig() if configOk else False self.patch(base, 'loadConfig', loadConfig) def upgradeFiles(config): self.calls.append('upgradeFiles') self.patch(upgrade_master, 'upgradeFiles', upgradeFiles) def upgradeDatabase(config, master_cfg): self.assertIsInstance(master_cfg, config_module.MasterConfig) self.calls.append('upgradeDatabase') self.patch(upgrade_master, 'upgradeDatabase', upgradeDatabase) # tests @defer.inlineCallbacks def test_upgradeMaster_success(self): self.patchFunctions() rv = yield upgrade_master.upgradeMaster(mkconfig(), _noMonkey=True) self.assertEqual(rv, 0) self.assertInStdout('upgrade complete') @defer.inlineCallbacks def test_upgradeMaster_quiet(self): self.patchFunctions() rv = yield upgrade_master.upgradeMaster(mkconfig(quiet=True), _noMonkey=True) self.assertEqual(rv, 0) self.assertWasQuiet() @defer.inlineCallbacks def test_upgradeMaster_bad_basedir(self): self.patchFunctions(basedirOk=False) rv = yield upgrade_master.upgradeMaster(mkconfig(), _noMonkey=True) self.assertEqual(rv, 1) @defer.inlineCallbacks def test_upgradeMaster_bad_config(self): self.patchFunctions(configOk=False) rv = yield upgrade_master.upgradeMaster(mkconfig(), _noMonkey=True) self.assertEqual(rv, 1) class TestUpgradeMasterFunctions(www.WwwTestMixin, dirs.DirsMixin, misc.StdoutAssertionsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpDirs('test') self.basedir = os.path.abspath(os.path.join('test', 'basedir')) self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() def writeFile(self, path, contents): with open(path, 'wt') as f: f.write(contents) def readFile(self, path): with open(path, 'rt') as f: return f.read() # tests def test_installFile(self): self.writeFile('test/srcfile', 'source data') upgrade_master.installFile(mkconfig(), 'test/destfile', 'test/srcfile') self.assertEqual(self.readFile('test/destfile'), 'source data') self.assertInStdout('creating test/destfile') def test_installFile_existing_differing(self): self.writeFile('test/srcfile', 'source data') self.writeFile('test/destfile', 'dest data') upgrade_master.installFile(mkconfig(), 'test/destfile', 'test/srcfile') self.assertEqual(self.readFile('test/destfile'), 'dest data') self.assertEqual(self.readFile('test/destfile.new'), 'source data') self.assertInStdout('writing new contents to') def test_installFile_existing_differing_overwrite(self): self.writeFile('test/srcfile', 'source data') self.writeFile('test/destfile', 'dest data') upgrade_master.installFile(mkconfig(), 'test/destfile', 'test/srcfile', overwrite=True) self.assertEqual(self.readFile('test/destfile'), 'source data') self.assertFalse(os.path.exists('test/destfile.new')) self.assertInStdout('overwriting') def test_installFile_existing_same(self): self.writeFile('test/srcfile', 'source data') self.writeFile('test/destfile', 'source data') upgrade_master.installFile(mkconfig(), 'test/destfile', 'test/srcfile') self.assertEqual(self.readFile('test/destfile'), 'source data') self.assertFalse(os.path.exists('test/destfile.new')) self.assertWasQuiet() def test_installFile_quiet(self): self.writeFile('test/srcfile', 'source data') upgrade_master.installFile(mkconfig(quiet=True), 'test/destfile', 'test/srcfile') self.assertWasQuiet() def test_upgradeFiles(self): upgrade_master.upgradeFiles(mkconfig()) for f in [ 'test/master.cfg.sample', ]: self.assertTrue(os.path.exists(f), "{} not found".format(f)) self.assertInStdout('upgrading basedir') def test_upgradeFiles_notice_about_unused_public_html(self): os.mkdir('test/public_html') self.writeFile('test/public_html/index.html', 'INDEX') upgrade_master.upgradeFiles(mkconfig()) self.assertInStdout('public_html is not used') @defer.inlineCallbacks def test_upgradeDatabase(self): setup = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(connector.DBConnector, 'setup', setup) upgrade = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(model.Model, 'upgrade', upgrade) setAllMastersActiveLongTimeAgo = mock.Mock( side_effect=lambda **kwargs: defer.succeed(None)) self.patch(masters.MastersConnectorComponent, 'setAllMastersActiveLongTimeAgo', setAllMastersActiveLongTimeAgo) yield upgrade_master.upgradeDatabase( mkconfig(basedir='test', quiet=True), config_module.MasterConfig()) setup.asset_called_with(check_version=False, verbose=False) upgrade.assert_called_with() self.assertWasQuiet() @defer.inlineCallbacks def test_upgradeDatabaseFail(self): setup = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(connector.DBConnector, 'setup', setup) self.patch(sys, 'stderr', StringIO()) upgrade = mock.Mock( side_effect=lambda **kwargs: defer.fail(Exception("o noz"))) self.patch(model.Model, 'upgrade', upgrade) ret = yield upgrade_master._upgradeMaster( mkconfig(basedir='test', quiet=True), config_module.MasterConfig()) self.assertEqual(ret, 1) self.assertIn("problem while upgrading!:\nTraceback (most recent call last):\n", sys.stderr.getvalue()) self.assertIn("o noz", sys.stderr.getvalue()) buildbot-3.4.0/master/buildbot/test/unit/scripts/test_user.py000066400000000000000000000077241413250514000244420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.clients import usersclient from buildbot.process.users import users from buildbot.scripts import user class TestUsersClient(unittest.TestCase): class FakeUsersClient: def __init__(self, master, username="user", passwd="userpw", port=0): self.master = master self.port = port self.username = username self.passwd = passwd self.fail = False def send(self, op, bb_username, bb_password, ids, info): self.op = op self.bb_username = bb_username self.bb_password = bb_password self.ids = ids self.info = info d = defer.Deferred() if self.fail: reactor.callLater(0, d.errback, RuntimeError("oh noes")) else: reactor.callLater(0, d.callback, None) return d def setUp(self): def fake_UsersClient(*args): self.usersclient = self.FakeUsersClient(*args) return self.usersclient self.patch(usersclient, 'UsersClient', fake_UsersClient) # un-do the effects of @in_reactor self.patch(user, 'user', user.user._orig) @defer.inlineCallbacks def test_usersclient_send_ids(self): yield user.user(dict(master='a:9990', username="x", passwd="y", op='get', bb_username=None, bb_password=None, ids=['me', 'you'], info=None)) c = self.usersclient self.assertEqual((c.master, c.port, c.username, c.passwd, c.op, c.ids, c.info), ('a', 9990, "x", "y", 'get', ['me', 'you'], None)) @defer.inlineCallbacks def test_usersclient_send_update_info(self): def _fake_encrypt(passwd): assert passwd == 'day' return 'ENCRY' self.patch(users, 'encrypt', _fake_encrypt) yield user.user(dict(master='a:9990', username="x", passwd="y", op='update', bb_username='bud', bb_password='day', ids=None, info=[{'identifier': 'x', 'svn': 'x'}])) c = self.usersclient self.assertEqual((c.master, c.port, c.username, c.passwd, c.op, c.bb_username, c.bb_password, c.ids, c.info), ('a', 9990, "x", "y", 'update', 'bud', 'ENCRY', None, [{'identifier': 'x', 'svn': 'x'}])) @defer.inlineCallbacks def test_usersclient_send_add_info(self): yield user.user(dict(master='a:9990', username="x", passwd="y", op='add', bb_username=None, bb_password=None, ids=None, info=[{'git': 'x ', 'irc': 'aaa'}])) c = self.usersclient self.assertEqual((c.master, c.port, c.username, c.passwd, c.op, c.bb_username, c.bb_password, c.ids, c.info), ('a', 9990, "x", "y", 'add', None, None, None, [{'identifier': 'aaa', 'git': 'x ', 'irc': 'aaa'}])) buildbot-3.4.0/master/buildbot/test/unit/steps/000077500000000000000000000000001413250514000215105ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/steps/__init__.py000066400000000000000000000000001413250514000236070ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/steps/test_cmake.py000066400000000000000000000117711413250514000242100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.config import ConfigErrors from buildbot.process.properties import Property from buildbot.process.results import SUCCESS from buildbot.steps.cmake import CMake from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.steps import BuildStepMixin class TestCMake(BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpBuildStep() def tearDown(self): self.tearDownBuildStep() def expect_and_run_command(self, *params): command = [CMake.DEFAULT_CMAKE] + list(params) self.expectCommands( ExpectShell(command=command, workdir='wkdir') + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_definitions_type(self): with self.assertRaises(ConfigErrors): CMake(definitions='hello') def test_options_type(self): with self.assertRaises(ConfigErrors): CMake(options='hello') def test_plain(self): self.setupStep(CMake()) self.expectCommands( ExpectShell(command=[CMake.DEFAULT_CMAKE], workdir='wkdir') + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_cmake(self): cmake_bin = 'something/else/cmake' self.setupStep(CMake(cmake=cmake_bin)) self.expectCommands( ExpectShell(command=[cmake_bin], workdir='wkdir') + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_cmake_interpolation(self): prop = 'CMAKE' value = 'Real_CMAKE' self.setupStep(CMake(cmake=Property(prop))) self.properties.setProperty(prop, value, source='test') self.expectCommands( ExpectShell(command=[value], workdir='wkdir') + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_definitions(self): definition = { 'a': 'b' } self.setupStep(CMake(definitions=definition)) self.expect_and_run_command('-D%s=%s' % list(definition.items())[0]) def test_environment(self): command = [CMake.DEFAULT_CMAKE] environment = {'a': 'b'} self.setupStep(CMake(env=environment)) self.expectCommands( ExpectShell( command=command, workdir='wkdir', env={'a': 'b'}) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_definitions_interpolation(self): b_value = 'real_b' definitions = { 'a': Property('b') } self.setupStep(CMake(definitions=definitions)) self.properties.setProperty('b', b_value, source='test') self.expect_and_run_command('-D%s=%s' % ('a', b_value)) def test_definitions_renderable(self): b_value = 'real_b' definitions = Property('b') self.setupStep(CMake(definitions=definitions)) self.properties.setProperty('b', {'a': b_value}, source='test') self.expect_and_run_command('-D%s=%s' % ('a', b_value)) def test_generator(self): generator = 'Ninja' self.setupStep(CMake(generator=generator)) self.expect_and_run_command('-G', generator) def test_generator_interpolation(self): value = 'Our_GENERATOR' self.setupStep(CMake(generator=Property('GENERATOR'))) self.properties.setProperty('GENERATOR', value, source='test') self.expect_and_run_command('-G', value) def test_options(self): options = ('A', 'B') self.setupStep(CMake(options=options)) self.expect_and_run_command(*options) def test_options_interpolation(self): prop = 'option' value = 'value' self.setupStep(CMake(options=(Property(prop),))) self.properties.setProperty(prop, value, source='test') self.expect_and_run_command(value) def test_path(self): path = 'some/path' self.setupStep(CMake(path=path)) self.expect_and_run_command(path) def test_path_interpolation(self): prop = 'path' value = 'some/path' self.setupStep(CMake(path=Property(prop))) self.properties.setProperty(prop, value, source='test') self.expect_and_run_command(value) buildbot-3.4.0/master/buildbot/test/unit/steps/test_cppcheck.py000066400000000000000000000105251413250514000247040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.properties import WithProperties from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import cppcheck from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class Cppcheck(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(cppcheck.Cppcheck(enable=['all'], inconclusive=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=[ 'cppcheck', '.', '--enable=all', '--inconclusive']) + ExpectShell.log('stdio', stdout='Checking file1.c...') + 0) self.expectOutcome(result=SUCCESS, state_string="cppcheck") return self.runStep() def test_command_failure(self): self.setupStep(cppcheck.Cppcheck(enable=['all'], inconclusive=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=[ 'cppcheck', '.', '--enable=all', '--inconclusive']) + ExpectShell.log('stdio', stdout='Checking file1.c...') + 1) self.expectOutcome(result=FAILURE, state_string="cppcheck (failure)") return self.runStep() def test_warnings(self): self.setupStep( cppcheck.Cppcheck(source=['file1.c'], enable=['warning', 'performance'])) self.expectCommands( ExpectShell(workdir='wkdir', command=[ 'cppcheck', 'file1.c', '--enable=warning,performance']) + ExpectShell.log( 'stdio', stdout=('Checking file1.c...\n' '[file1.c:3]: (warning) Logical disjunction always ' 'evaluates to true: t >= 0 || t < 65.\n' '(information) Cppcheck cannot find all the include files ' '(use --check-config for details)')) + 0) self.expectOutcome(result=WARNINGS, state_string="cppcheck warning=1 information=1 (warnings)") return self.runStep() def test_errors(self): self.setupStep(cppcheck.Cppcheck(extra_args=['--my-param=5'])) self.expectCommands( ExpectShell(workdir='wkdir', command=[ 'cppcheck', '.', '--my-param=5']) + ExpectShell.log( 'stdio', stdout=('Checking file1.c...\n' '[file1.c:3]: (error) Possible null pointer dereference: filter\n' '[file1.c:4]: (error) Memory leak: columns\n' "[file1.c:7]: (style) The scope of the variable 'pid' can be reduced")) + 0) self.expectOutcome(result=FAILURE, state_string="cppcheck error=2 style=1 (failure)") return self.runStep() def test_renderables(self): P = WithProperties self.setupStep(cppcheck.Cppcheck( binary=P('a'), source=[P('.'), P('f.c')], extra_args=[P('--p'), P('--p')])) self.expectCommands( ExpectShell(workdir='wkdir', command=[ 'a', '.', 'f.c', '--p', '--p']) + ExpectShell.log( 'stdio', stdout='Checking file1.c...') + 0) self.expectOutcome(result=SUCCESS, state_string="cppcheck") return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_http.py000066400000000000000000000173761413250514000241160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from twisted.web.resource import Resource from twisted.web.server import Site from twisted.web.util import redirectTo from buildbot.process import properties from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import http from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin try: import txrequests assert txrequests import requests assert requests except ImportError: txrequests = requests = None # We use twisted's internal webserver instead of mocking requests # to be sure we use the correct requests interfaces class TestPage(Resource): isLeaf = True def render_GET(self, request): if request.uri == b"/404": request.setResponseCode(404) return b"404" elif request.uri == b'/redirect': return redirectTo(b'/redirected-path', request) elif request.uri == b"/header": return b"".join(request.requestHeaders.getRawHeaders(b"X-Test")) return b"OK" def render_POST(self, request): if request.uri == b"/404": request.setResponseCode(404) return b"404" return b"OK:" + request.content.read() class TestHTTPStep(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): timeout = 3 # those tests should not run long def setUp(self): self.setUpTestReactor() if txrequests is None: raise unittest.SkipTest( "Need to install txrequests to test http steps") # ignore 'http_proxy' environment variable when running tests session = http.getSession() session.trust_env = False # port 0 means random unused port self.listener = reactor.listenTCP(0, Site(TestPage())) self.port = self.listener.getHost().port return self.setUpBuildStep() @defer.inlineCallbacks def tearDown(self): http.closeSession() try: yield self.listener.stopListening() finally: yield self.tearDownBuildStep() def get_connection_string(self): return "http://127.0.0.1:{}".format(self.port) def getURL(self, path=""): return '{}/{}'.format(self.get_connection_string(), path) def test_get(self): url = self.getURL() self.setupStep(http.GET(url)) self.expectLogfile('log', "URL: {}\nStatus: 200\n ------ Content ------\nOK".format(url)) self.expectLogfile('content', "OK") self.expectOutcome(result=SUCCESS, state_string="Status code: 200") return self.runStep() def test_connection_error(self): def throwing_request(*args, **kwargs): raise requests.exceptions.ConnectionError("failed to connect") with mock.patch.object(http.getSession(), 'request', throwing_request): url = self.getURL("path") self.setupStep(http.GET(url)) self.expectOutcome(result=FAILURE, state_string="Requested (failure)") return self.runStep() def test_redirect(self): url = self.getURL("redirect") self.setupStep(http.GET(url)) expected_log = ''' Redirected 1 times: URL: {0}/redirect ------ Content ------ click here ============================================================ URL: {0}/redirected-path Status: 200 ------ Content ------ OK'''.format(self.get_connection_string()) self.expectLogfile('log', expected_log) self.expectLogfile('content', "OK") self.expectOutcome(result=SUCCESS, state_string="Status code: 200") return self.runStep() def test_404(self): url = self.getURL("404") self.setupStep(http.GET(url)) self.expectLogfile('log', "URL: {}\n ------ Content ------\n404".format(url)) self.expectLogfile('content', "404") self.expectOutcome(result=FAILURE, state_string="Status code: 404 (failure)") return self.runStep() def test_method_not_allowed(self): url = self.getURL("path") self.setupStep(http.PUT(url)) self.expectOutcome(result=FAILURE, state_string="Status code: 501 (failure)") return self.runStep() def test_post(self): url = self.getURL("path") self.setupStep(http.POST(url)) self.expectOutcome(result=SUCCESS, state_string="Status code: 200") self.expectLogfile('log', "URL: {}\nStatus: 200\n ------ Content ------\nOK:".format(url)) self.expectLogfile('content', "OK:") return self.runStep() def test_post_data(self): url = self.getURL("path") self.setupStep(http.POST(url, data='mydata')) self.expectOutcome(result=SUCCESS, state_string="Status code: 200") self.expectLogfile('log', "URL: {}\nStatus: 200\n ------ Content ------\nOK:mydata".format(url)) self.expectLogfile('content', "OK:mydata") return self.runStep() def test_post_data_dict(self): url = self.getURL("path") self.setupStep(http.POST(url, data={'key1': 'value1'})) self.expectOutcome(result=SUCCESS, state_string="Status code: 200") self.expectLogfile('log', '''\ URL: {} Status: 200 ------ Content ------ OK:key1=value1'''.format(url)) self.expectLogfile('content', "OK:key1=value1") return self.runStep() def test_header(self): url = self.getURL("header") self.setupStep(http.GET(url, headers={"X-Test": "True"})) self.expectLogfile('log', "URL: {}\nStatus: 200\n ------ Content ------\nTrue".format(url)) self.expectOutcome(result=SUCCESS, state_string="Status code: 200") return self.runStep() @defer.inlineCallbacks def test_hidden_header(self): url = self.getURL("header") self.setupStep(http.GET(url, headers={"X-Test": "True"}, hide_request_headers=["X-Test"], hide_response_headers=["Content-Length"])) self.expectLogfile('log', "URL: {}\nStatus: 200\n ------ Content ------\nTrue".format(url)) self.expectOutcome(result=SUCCESS, state_string="Status code: 200") yield self.runStep() self.assertIn("X-Test: ", self.step.logs['log'].header) self.assertIn("Content-Length: ", self.step.logs['log'].header) def test_params_renderable(self): url = self.getURL() self.setupStep(http.GET(url, params=properties.Property("x"))) self.properties.setProperty( 'x', {'param_1': 'param_1', 'param_2': 2}, 'here') self.expectLogfile('log', ("URL: {}?param_1=param_1¶m_2=2\nStatus: 200\n ------ Content ------\nOK" ).format(url)) self.expectLogfile('content', "OK") self.expectOutcome(result=SUCCESS, state_string="Status code: 200") return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_master.py000066400000000000000000000301271413250514000244170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import pprint import sys from twisted.internet import error from twisted.internet import reactor from twisted.python import failure from twisted.python import runtime from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.properties import Property from buildbot.process.properties import WithProperties from buildbot.process.properties import renderer from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import master from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin _COMSPEC_ENV = 'COMSPEC' class TestMasterShellCommand(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() if runtime.platformType == 'win32': self.comspec = os.environ.get(_COMSPEC_ENV) os.environ[_COMSPEC_ENV] = r'C:\WINDOWS\system32\cmd.exe' return self.setUpBuildStep() def tearDown(self): if runtime.platformType == 'win32': if self.comspec: os.environ[_COMSPEC_ENV] = self.comspec else: del os.environ[_COMSPEC_ENV] return self.tearDownBuildStep() def patchSpawnProcess(self, exp_cmd, exp_argv, exp_path, exp_usePTY, exp_env, outputs): def spawnProcess(pp, cmd, argv, path, usePTY, env): self.assertEqual([cmd, argv, path, usePTY, env], [exp_cmd, exp_argv, exp_path, exp_usePTY, exp_env]) for output in outputs: if output[0] == 'out': pp.outReceived(output[1]) elif output[0] == 'err': pp.errReceived(output[1]) elif output[0] == 'rc': if output[1] != 0: so = error.ProcessTerminated(exitCode=output[1]) else: so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) def test_real_cmd(self): cmd = [sys.executable, '-c', 'print("hello")'] self.setupStep(master.MasterShellCommand(command=cmd)) if runtime.platformType == 'win32': self.expectLogfile('stdio', "hello\r\n") else: self.expectLogfile('stdio', "hello\n") self.expectOutcome(result=SUCCESS, state_string="Ran") return self.runStep() def test_real_cmd_interrupted(self): cmd = [sys.executable, '-c', 'while True: pass'] self.setupStep(master.MasterShellCommand(command=cmd)) self.expectLogfile('stdio', "") if runtime.platformType == 'win32': # windows doesn't have signals, so we don't get 'killed', # but the "exception" part still works. self.expectOutcome(result=EXCEPTION, state_string="failed (1) (exception)") else: self.expectOutcome(result=EXCEPTION, state_string="killed (9) (exception)") d = self.runStep() self.step.interrupt("KILL") return d def test_real_cmd_fails(self): cmd = [sys.executable, '-c', 'import sys; sys.exit(1)'] self.setupStep( master.MasterShellCommand(command=cmd)) self.expectLogfile('stdio', "") self.expectOutcome(result=FAILURE, state_string="failed (1) (failure)") return self.runStep() def test_constr_args(self): self.setupStep( master.MasterShellCommand(description='x', descriptionDone='y', env={'a': 'b'}, workdir='build', usePTY=True, command='true')) if runtime.platformType == 'win32': exp_argv = [r'C:\WINDOWS\system32\cmd.exe', '/c', 'true'] else: exp_argv = ['/bin/sh', '-c', 'true'] self.patchSpawnProcess( exp_cmd=exp_argv[0], exp_argv=exp_argv, exp_path='build', exp_usePTY=True, exp_env={'a': 'b'}, outputs=[ ('out', 'hello!\n'), ('err', 'world\n'), ('rc', 0), ]) self.expectOutcome(result=SUCCESS, state_string='y') return self.runStep() def test_env_subst(self): cmd = [sys.executable, '-c', 'import os; print(os.environ["HELLO"])'] os.environ['WORLD'] = 'hello' self.setupStep( master.MasterShellCommand(command=cmd, env={'HELLO': '${WORLD}'})) if runtime.platformType == 'win32': self.expectLogfile('stdio', "hello\r\n") else: self.expectLogfile('stdio', "hello\n") self.expectOutcome(result=SUCCESS) d = self.runStep() @d.addBoth def _restore_env(res): del os.environ['WORLD'] return res return d def test_env_list_subst(self): cmd = [sys.executable, '-c', 'import os; print(os.environ["HELLO"])'] os.environ['WORLD'] = 'hello' os.environ['LIST'] = 'world' self.setupStep(master.MasterShellCommand(command=cmd, env={'HELLO': ['${WORLD}', '${LIST}']})) if runtime.platformType == 'win32': self.expectLogfile('stdio', "hello;world\r\n") else: self.expectLogfile('stdio', "hello:world\n") self.expectOutcome(result=SUCCESS) d = self.runStep() @d.addBoth def _restore_env(res): del os.environ['WORLD'] del os.environ['LIST'] return res return d def test_prop_rendering(self): cmd = [sys.executable, '-c', WithProperties( 'import os; print("%s"); print(os.environ[\"BUILD\"])', 'project')] self.setupStep(master.MasterShellCommand(command=cmd, env={'BUILD': WithProperties('%s', "project")})) self.properties.setProperty("project", "BUILDBOT-TEST", "TEST") if runtime.platformType == 'win32': self.expectLogfile('stdio', "BUILDBOT-TEST\r\nBUILDBOT-TEST\r\n") else: self.expectLogfile('stdio', "BUILDBOT-TEST\nBUILDBOT-TEST\n") self.expectOutcome(result=SUCCESS) return self.runStep() def test_constr_args_descriptionSuffix(self): self.setupStep(master.MasterShellCommand(description='x', descriptionDone='y', descriptionSuffix='z', env={'a': 'b'}, workdir='build', usePTY=True, command='true')) if runtime.platformType == 'win32': exp_argv = [r'C:\WINDOWS\system32\cmd.exe', '/c', 'true'] else: exp_argv = ['/bin/sh', '-c', 'true'] self.patchSpawnProcess( exp_cmd=exp_argv[0], exp_argv=exp_argv, exp_path='build', exp_usePTY=True, exp_env={'a': 'b'}, outputs=[ ('out', 'hello!\n'), ('err', 'world\n'), ('rc', 0), ]) self.expectOutcome(result=SUCCESS, state_string='y z') return self.runStep() class TestSetProperty(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_simple(self): self.setupStep(master.SetProperty(property="testProperty", value=Interpolate( "sch=%(prop:scheduler)s, worker=%(prop:workername)s"))) self.properties.setProperty( 'scheduler', 'force', source='SetProperty', runtime=True) self.properties.setProperty( 'workername', 'testWorker', source='SetProperty', runtime=True) self.expectOutcome(result=SUCCESS, state_string="Set") self.expectProperty( 'testProperty', 'sch=force, worker=testWorker', source='SetProperty') return self.runStep() class TestLogRenderable(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_simple(self): self.setupStep(master.LogRenderable( content=Interpolate('sch=%(prop:scheduler)s, worker=%(prop:workername)s'))) self.properties.setProperty( 'scheduler', 'force', source='TestSetProperty', runtime=True) self.properties.setProperty( 'workername', 'testWorker', source='TestSetProperty', runtime=True) self.expectOutcome(result=SUCCESS, state_string='Logged') self.expectLogfile( 'Output', pprint.pformat('sch=force, worker=testWorker')) return self.runStep() class TestsSetProperties(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def doOneTest(self, **kwargs): # all three tests should create a 'a' property with 'b' value, all with different # more or less dynamic methods self.setupStep( master.SetProperties(name="my-step", **kwargs)) self.expectProperty('a', 'b', 'my-step') self.expectOutcome(result=SUCCESS, state_string='Properties Set') return self.runStep() def test_basic(self): return self.doOneTest(properties={'a': 'b'}) def test_renderable(self): return self.doOneTest(properties={'a': Interpolate("b")}) def test_renderer(self): @renderer def manipulate(props): # the renderer returns renderable! return {'a': Interpolate('b')} return self.doOneTest(properties=manipulate) class TestAssert(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_eq_pass(self): self.setupStep(master.Assert( Property("test_prop") == "foo")) self.properties.setProperty("test_prop", "foo", "bar") self.expectOutcome(result=SUCCESS) return self.runStep() def test_eq_fail(self): self.setupStep(master.Assert( Property("test_prop") == "bar")) self.properties.setProperty("test_prop", "foo", "bar") self.expectOutcome(result=FAILURE) return self.runStep() def test_renderable_pass(self): @renderer def test_renderer(props): return props.getProperty("test_prop") == "foo" self.setupStep(master.Assert(test_renderer)) self.properties.setProperty("test_prop", "foo", "bar") self.expectOutcome(result=SUCCESS) return self.runStep() def test_renderable_fail(self): @renderer def test_renderer(props): return props.getProperty("test_prop") == "bar" self.setupStep(master.Assert(test_renderer)) self.properties.setProperty("test_prop", "foo", "bar") self.expectOutcome(result=FAILURE) return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_maxq.py000066400000000000000000000051451413250514000240740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot import config from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import maxq from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestShellCommandExecution(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_testdir_required(self): with self.assertRaises(config.ConfigErrors): maxq.MaxQ() def test_success(self): self.setupStep( maxq.MaxQ(testdir='x')) self.expectCommands( ExpectShell(workdir='wkdir', command=["run_maxq.py", "x"]) + ExpectShell.log('stdio', stdout='no failures\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string='success') return self.runStep() def test_nonzero_rc_no_failures(self): self.setupStep( maxq.MaxQ(testdir='x')) self.expectCommands( ExpectShell(workdir='wkdir', command=["run_maxq.py", "x"]) + ExpectShell.log('stdio', stdout='no failures\n') + 2 ) self.expectOutcome(result=FAILURE, state_string='1 maxq failures') return self.runStep() def test_failures(self): self.setupStep( maxq.MaxQ(testdir='x')) self.expectCommands( ExpectShell(workdir='wkdir', command=["run_maxq.py", "x"]) + ExpectShell.log('stdio', stdout='\nTEST FAILURE: foo\n' * 10) + 2 ) self.expectOutcome(result=FAILURE, state_string='10 maxq failures') return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_mswin.py000066400000000000000000000123031413250514000242550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.steps import mswin from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestRobocopySimple(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): """ Test L{Robocopy} command building. """ def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def _run_simple_test(self, source, destination, expected_args=None, expected_code=0, expected_res=SUCCESS, **kwargs): s = mswin.Robocopy(source, destination, **kwargs) self.setupStep(s) s.rendered = True command = ['robocopy', source, destination] if expected_args: command += expected_args command += ['/TEE', '/NP'] self.expectCommands( ExpectShell( workdir='wkdir', command=command, ) + expected_code ) state_string = "'robocopy {} ...'".format(source) if expected_res != SUCCESS: state_string += ' ({})'.format(Results[expected_res]) self.expectOutcome(result=expected_res, state_string=state_string) return self.runStep() def test_copy(self): return self._run_simple_test(r'D:\source', r'E:\dest') def test_copy_files(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['a.txt', 'b.txt', '*.log'], expected_args=['a.txt', 'b.txt', '*.log'] ) def test_copy_recursive(self): return self._run_simple_test( r'D:\source', r'E:\dest', recursive=True, expected_args=['/E'] ) def test_mirror_files(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['*.foo'], mirror=True, expected_args=['*.foo', '/MIR'] ) def test_move_files(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['*.foo'], move=True, expected_args=['*.foo', '/MOVE'] ) def test_exclude(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['blah*'], exclude=['*.foo', '*.bar'], expected_args=['blah*', '/XF', '*.foo', '*.bar'] ) def test_exclude_files(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['blah*'], exclude_files=['*.foo', '*.bar'], expected_args=['blah*', '/XF', '*.foo', '*.bar'] ) def test_exclude_dirs(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['blah*'], exclude_dirs=['foo', 'bar'], expected_args=['blah*', '/XD', 'foo', 'bar'] ) def test_custom_opts(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['*.foo'], custom_opts=['/R:10', '/W:60'], expected_args=['*.foo', '/R:10', '/W:60'] ) def test_verbose_output(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['*.foo'], verbose=True, expected_args=['*.foo', '/V', '/TS', '/FP'] ) @defer.inlineCallbacks def test_codes(self): # Codes that mean uneventful copies (including no copy at all). for i in [0, 1]: yield self._run_simple_test( r'D:\source', r'E:\dest', expected_code=i, expected_res=SUCCESS ) # Codes that mean some mismatched or extra files were found. for i in range(2, 8): yield self._run_simple_test( r'D:\source', r'E:\dest', expected_code=i, expected_res=WARNINGS ) # Codes that mean errors have been encountered. for i in range(8, 32): yield self._run_simple_test( r'D:\source', r'E:\dest', expected_code=i, expected_res=FAILURE ) # bit 32 is meaningless yield self._run_simple_test( r'D:\source', r'E:\dest', expected_code=32, expected_res=EXCEPTION ) buildbot-3.4.0/master/buildbot/test/unit/steps/test_package_deb_lintian.py000066400000000000000000000043371413250514000270530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot import config from buildbot.process.results import SUCCESS from buildbot.steps.package.deb import lintian from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestDebLintian(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_fileloc(self): with self.assertRaises(config.ConfigErrors): lintian.DebLintian() def test_success(self): self.setupStep(lintian.DebLintian('foo_0.23_i386.changes')) self.expectCommands( ExpectShell(workdir='wkdir', command=['lintian', '-v', 'foo_0.23_i386.changes']) + 0) self.expectOutcome(result=SUCCESS, state_string="Lintian") return self.runStep() def test_success_suppressTags(self): self.setupStep(lintian.DebLintian('foo_0.23_i386.changes', suppressTags=['bad-distribution-in-changes-file'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['lintian', '-v', 'foo_0.23_i386.changes', '--suppress-tags', 'bad-distribution-in-changes-file']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_package_deb_pbuilder.py000066400000000000000000000521441413250514000272220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import stat import time from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps.package.deb import pbuilder from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestDebPbuilder(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_new(self): self.setupStep(pbuilder.DebPbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS, state_string='built') return self.runStep() def test_update(self): self.setupStep(pbuilder.DebPbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + Expect.update('stat', [stat.S_IFREG, 99, 99, 1, 0, 0, 99, 0, 0, 0]) + 0, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--update', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', ]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_buildonly_and_property(self): self.setupStep(pbuilder.DebPbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + Expect.update( 'stat', [stat.S_IFREG, 99, 99, 1, 0, 0, 99, 0, int(time.time()), 0]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + ExpectShell.log( 'stdio', stdout='blah\ndpkg-genchanges >../somefilename.changes\foo\n') + 0) self.expectOutcome(result=SUCCESS) self.expectProperty('deb-changes', 'somefilename.changes', 'DebPbuilder') return self.runStep() def test_architecture(self): self.setupStep(pbuilder.DebPbuilder(architecture='amd64')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-amd64-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-amd64-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--architecture', 'amd64']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--architecture', 'amd64', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-amd64-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_architecture_renderable(self): self.setupStep(pbuilder.DebPbuilder(architecture=Interpolate('amd64'))) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-amd64-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-amd64-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--architecture', 'amd64']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--architecture', 'amd64', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-amd64-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_distribution(self): self.setupStep(pbuilder.DebPbuilder(distribution='woody')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/woody-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/woody-local-buildbot.tgz', '--distribution', 'woody', '--mirror', 'http://cdn.debian.net/debian/']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/woody-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_basetgz(self): self.setupStep(pbuilder.DebPbuilder(basetgz='/buildbot/stable-local.tgz')) self.expectCommands( Expect('stat', {'file': '/buildbot/stable-local.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/buildbot/stable-local.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/buildbot/stable-local.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mirror(self): self.setupStep(pbuilder.DebPbuilder(mirror='http://apt:9999/debian')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://apt:9999/debian']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_extrapackages(self): self.setupStep(pbuilder.DebPbuilder(extrapackages=['buildbot'])) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--extrapackages', 'buildbot']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--extrapackages', 'buildbot']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_keyring(self): self.setupStep(pbuilder.DebPbuilder(keyring='/builbot/buildbot.gpg')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--debootstrapopts', '--keyring=/builbot/buildbot.gpg']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_components(self): self.setupStep(pbuilder.DebPbuilder(components='main universe')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--components', 'main universe']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_othermirror(self): self.setupStep(pbuilder.DebPbuilder(othermirror=['http://apt:9999/debian'])) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--othermirror', 'http://apt:9999/debian']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() class TestDebCowbuilder(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_new(self): self.setupStep(pbuilder.DebCowbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow/'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/cowbuilder', '--create', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_update(self): self.setupStep(pbuilder.DebCowbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow/'}) + Expect.update('stat', [stat.S_IFDIR, 99, 99, 1, 0, 0, 99, 0, 0, 0]) + 0, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/cowbuilder', '--update', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/', ]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_buildonly(self): self.setupStep(pbuilder.DebCowbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow/'}) + Expect.update( 'stat', [stat.S_IFDIR, 99, 99, 1, 0, 0, 99, 0, int(time.time()), 0]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_update_reg(self): self.setupStep(pbuilder.DebCowbuilder( basetgz='/var/cache/pbuilder/stable-local-buildbot.cow')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow'}) + Expect.update('stat', [stat.S_IFREG, 99, 99, 1, 0, 0, 99, 0, 0, 0]) + 0, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/cowbuilder', '--update', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow']) + 1) self.expectOutcome(result=FAILURE, state_string='built (failure)') return self.runStep() def test_buildonly_reg(self): self.setupStep(pbuilder.DebCowbuilder( basetgz='/var/cache/pbuilder/stable-local-buildbot.cow')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow'}) + Expect.update( 'stat', [stat.S_IFREG, 99, 99, 1, 0, 0, 99, 0, int(time.time()), 0]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow']) + 1) self.expectOutcome(result=FAILURE, state_string='built (failure)') return self.runStep() class TestUbuPbuilder(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_distribution(self): with self.assertRaises(config.ConfigErrors): pbuilder.UbuPbuilder() def test_new(self): self.setupStep(pbuilder.UbuPbuilder(distribution='oneiric')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/oneiric-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/oneiric-local-buildbot.tgz', '--distribution', 'oneiric', '--mirror', 'http://archive.ubuntu.com/ubuntu/', '--components', 'main universe']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/oneiric-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS, state_string='built') return self.runStep() class TestUbuCowbuilder(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_distribution(self): with self.assertRaises(config.ConfigErrors): pbuilder.UbuCowbuilder() def test_new(self): self.setupStep(pbuilder.UbuCowbuilder(distribution='oneiric')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/oneiric-local-buildbot.cow/'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/cowbuilder', '--create', '--basepath', '/var/cache/pbuilder/oneiric-local-buildbot.cow/', '--distribution', 'oneiric', '--mirror', 'http://archive.ubuntu.com/ubuntu/', '--components', 'main universe']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/oneiric-local-buildbot.cow/']) + 0) self.expectOutcome(result=SUCCESS, state_string='built') return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_package_rpm_mock.py000066400000000000000000000147221413250514000264110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.steps.package.rpm import mock from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestMock(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_root(self): with self.assertRaises(config.ConfigErrors): mock.Mock() def test_class_attrs(self): step = self.setupStep(mock.Mock(root='TESTROOT')) self.assertEqual(step.command, ['mock', '--root', 'TESTROOT']) def test_success(self): self.setupStep(mock.Mock(root='TESTROOT')) self.expectCommands( Expect('rmdir', {'dir': ['build/build.log', 'build/root.log', 'build/state.log'], 'logEnviron': False}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT'], logfiles={'build.log': 'build.log', 'root.log': 'root.log', 'state.log': 'state.log'}) + 0) self.expectOutcome(result=SUCCESS, state_string="'mock --root ...'") return self.runStep() def test_resultdir_success(self): self.setupStep(mock.Mock(root='TESTROOT', resultdir='RESULT')) self.expectCommands( Expect('rmdir', {'dir': ['build/RESULT/build.log', 'build/RESULT/root.log', 'build/RESULT/state.log'], 'logEnviron': False}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT', '--resultdir', 'RESULT'], logfiles={'build.log': 'RESULT/build.log', 'root.log': 'RESULT/root.log', 'state.log': 'RESULT/state.log'}) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_resultdir_renderable(self): resultdir_text = "RESULT" self.setupStep(mock.Mock(root='TESTROOT', resultdir=Interpolate( '%(kw:resultdir)s', resultdir=resultdir_text))) self.expectCommands( Expect('rmdir', {'dir': ['build/RESULT/build.log', 'build/RESULT/root.log', 'build/RESULT/state.log'], 'logEnviron': False}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT', '--resultdir', 'RESULT'], logfiles={'build.log': 'RESULT/build.log', 'root.log': 'RESULT/root.log', 'state.log': 'RESULT/state.log'}) + 0) self.expectOutcome(result=SUCCESS, state_string="'mock --root ...'") return self.runStep() class TestMockBuildSRPM(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_spec(self): with self.assertRaises(config.ConfigErrors): mock.MockBuildSRPM(root='TESTROOT') def test_success(self): self.setupStep(mock.MockBuildSRPM(root='TESTROOT', spec="foo.spec")) self.expectCommands( Expect('rmdir', {'dir': ['build/build.log', 'build/root.log', 'build/state.log'], 'logEnviron': False}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT', '--buildsrpm', '--spec', 'foo.spec', '--sources', '.'], logfiles={'build.log': 'build.log', 'root.log': 'root.log', 'state.log': 'state.log'},) + 0) self.expectOutcome(result=SUCCESS, state_string='mock buildsrpm') return self.runStep() class TestMockRebuild(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_srpm(self): with self.assertRaises(config.ConfigErrors): mock.MockRebuild(root='TESTROOT') def test_success(self): self.setupStep(mock.MockRebuild(root='TESTROOT', srpm="foo.src.rpm")) self.expectCommands( Expect('rmdir', {'dir': ['build/build.log', 'build/root.log', 'build/state.log'], 'logEnviron': False}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT', '--rebuild', 'foo.src.rpm'], logfiles={'build.log': 'build.log', 'root.log': 'root.log', 'state.log': 'state.log'},) + 0) self.expectOutcome(result=SUCCESS, state_string='mock rebuild srpm') return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_package_rpm_rpmbuild.py000066400000000000000000000124551413250514000272770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import OrderedDict from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.steps.package.rpm import rpmbuild from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class RpmBuild(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_specfile(self): with self.assertRaises(config.ConfigErrors): rpmbuild.RpmBuild() def test_success(self): self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", dist=".el5")) self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir ' '`pwd`" --define "_sourcedir `pwd`" --define "_specdir ' '`pwd`" --define "_srcrpmdir `pwd`" --define "dist .el5" ' '-ba foo.spec') + ExpectShell.log('stdio', stdout='lalala') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') return self.runStep() def test_autoRelease(self): self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", autoRelease=True)) self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir `pwd`" ' '--define "_sourcedir `pwd`" --define "_specdir `pwd`" ' '--define "_srcrpmdir `pwd`" --define "_release 0" ' '--define "dist .el6" -ba foo.spec') + ExpectShell.log('stdio', stdout='Your code has been rated at 10/10') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') return self.runStep() def test_define(self): defines = [("a", "1"), ("b", "2")] self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", define=OrderedDict(defines))) self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir ' '`pwd`" --define "_sourcedir `pwd`" --define ' '"_specdir `pwd`" --define "_srcrpmdir `pwd`" ' '--define "a 1" --define "b 2" --define "dist .el6" ' '-ba foo.spec') + ExpectShell.log('stdio', stdout='Your code has been rated at 10/10') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') return self.runStep() def test_define_none(self): self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", define=None)) self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir ' '`pwd`" --define "_sourcedir `pwd`" --define ' '"_specdir `pwd`" --define "_srcrpmdir `pwd`" ' '--define "dist .el6" -ba foo.spec') + ExpectShell.log('stdio', stdout='Your code has been rated at 10/10') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') return self.runStep() @defer.inlineCallbacks def test_renderable_dist(self): self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", dist=Interpolate('%(prop:renderable_dist)s'))) self.properties.setProperty('renderable_dist', '.el7', 'test') self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir ' '`pwd`" --define "_sourcedir `pwd`" --define "_specdir ' '`pwd`" --define "_srcrpmdir `pwd`" --define "dist .el7" ' '-ba foo.spec') + ExpectShell.log('stdio', stdout='lalala') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') yield self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_package_rpm_rpmlint.py000066400000000000000000000042421413250514000271410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.steps.package.rpm import rpmlint from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestRpmLint(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(rpmlint.RpmLint()) self.expectCommands( ExpectShell(workdir='wkdir', command=['rpmlint', '-i', '.']) + 0) self.expectOutcome( result=SUCCESS, state_string='Finished checking RPM/SPEC issues') return self.runStep() def test_fileloc_success(self): self.setupStep(rpmlint.RpmLint(fileloc='RESULT')) self.expectCommands( ExpectShell(workdir='wkdir', command=['rpmlint', '-i', 'RESULT']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_config_success(self): self.setupStep(rpmlint.RpmLint(config='foo.cfg')) self.expectCommands( ExpectShell(workdir='wkdir', command=['rpmlint', '-i', '-f', 'foo.cfg', '.']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_python.py000066400000000000000000000640421413250514000244500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import python from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin log_output_success = '''\ Making output directory... Running Sphinx v1.0.7 loading pickled environment... not yet created No builder selected, using default: html building [html]: targets for 24 source files that are out of date updating environment: 24 added, 0 changed, 0 removed reading sources... [ 4%] index reading sources... [ 8%] manual/cfg-builders ... copying static files... done dumping search index... done dumping object inventory... done build succeeded. ''' log_output_nochange = '''\ Running Sphinx v1.0.7 loading pickled environment... done No builder selected, using default: html building [html]: targets for 0 source files that are out of date updating environment: 0 added, 0 changed, 0 removed looking for now-outdated files... none found no targets are out of date. ''' log_output_warnings = '''\ Running Sphinx v1.0.7 loading pickled environment... done building [html]: targets for 1 source files that are out of date updating environment: 0 added, 1 changed, 0 removed reading sources... [100%] file file.rst:18: (WARNING/2) Literal block expected; none found. looking for now-outdated files... none found pickling environment... done checking consistency... done preparing documents... done writing output... [ 50%] index writing output... [100%] file index.rst:: WARNING: toctree contains reference to document 'preamble' that \ doesn't have a title: no link will be generated writing additional files... search copying static files... done dumping search index... done dumping object inventory... done build succeeded, 2 warnings.''' log_output_warnings_strict = '''\ Running Sphinx v1.0.7 loading pickled environment... done building [html]: targets for 1 source files that are out of date updating environment: 0 added, 1 changed, 0 removed reading sources... [100%] file Warning, treated as error: file.rst:18:Literal block expected; none found. ''' warnings = '''\ file.rst:18: (WARNING/2) Literal block expected; none found. index.rst:: WARNING: toctree contains reference to document 'preamble' that \ doesn't have a title: no link will be generated\ ''' # this is from a run of epydoc against the buildbot source.. epydoc_output = '''\ [............... +--------------------------------------------------------------------- | In /home/dustin/code/buildbot/t/buildbot/master/buildbot/ | ec2.py: | Import failed (but source code parsing was successful). | Error: ImportError: No module named boto (line 19) | [.... Warning: Unable to extract the base list for twisted.web.resource.EncodingResourceWrapper: Bad dotted name [...... +--------------------------------------------------------------------- | In /home/dustin/code/buildbot/t/buildbot/master/buildbot/worker/ | ec2.py: | Import failed (but source code parsing was successful). | Error: ImportError: No module named boto (line 28) | [........... +--------------------------------------------------------------------- | In /home/dustin/code/buildbot/t/buildbot/master/buildbot/status/ | status_push.py: | Import failed (but source code parsing was successful). | Error: ImportError: No module named status_json (line 40) | [....................Special descriptor for class __provides__ ''' class BuildEPYDoc(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_sample(self): self.setupStep(python.BuildEPYDoc()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'epydocs']) + ExpectShell.log('stdio', stdout=epydoc_output) + 1, ) self.expectOutcome(result=FAILURE, state_string='epydoc warn=1 err=3 (failure)') return self.runStep() class PyLint(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() @parameterized.expand([ ('no_results', True), ('with_results', False) ]) def test_success(self, name, store_results): self.setupStep(python.PyLint(command=['pylint'], store_results=store_results)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log('stdio', stdout='Your code has been rated at 10/10') + python.PyLint.RC_OK) self.expectOutcome(result=SUCCESS, state_string='pylint') if store_results: self.expectTestResultSets([('Pylint warnings', 'code_issue', 'message')]) self.expectTestResults([]) return self.runStep() @parameterized.expand([ ('no_results', True), ('with_results', False) ]) def test_error(self, name, store_results): self.setupStep(python.PyLint(command=['pylint'], store_results=store_results)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11: Bad indentation. Found 6 spaces, expected 4\n' 'E: 12: Undefined variable \'foo\'\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_ERROR)) self.expectOutcome(result=FAILURE, state_string='pylint error=1 warning=1 (failure)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-error', 1) if store_results: self.expectTestResultSets([('Pylint warnings', 'code_issue', 'message')]) # note that no results are submitted for tests where we don't know the location return self.runStep() def test_header_output(self): self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', header='W: 11: Bad indentation. Found 6 spaces, expected 4\n') + 0) self.expectOutcome(result=SUCCESS, state_string='pylint') return self.runStep() def test_failure(self): self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11: Bad indentation. Found 6 spaces, expected 4\n' 'F: 13: something really strange happened\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_FATAL)) self.expectOutcome(result=FAILURE, state_string='pylint fatal=1 warning=1 (failure)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-fatal', 1) return self.runStep() def test_failure_zero_returncode(self): # Make sure that errors result in a failed step when pylint's # return code is 0, e.g. when run through a wrapper script. self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11: Bad indentation. Found 6 spaces, expected 4\n' 'E: 12: Undefined variable \'foo\'\n')) + 0) self.expectOutcome(result=FAILURE, state_string='pylint error=1 warning=1 (failure)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-error', 1) return self.runStep() def test_regex_text(self): self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11: Bad indentation. Found 6 spaces, expected 4\n' 'C: 1:foo123: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_text_0_24(self): # pylint >= 0.24.0 prints out column offsets when using text format self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11,0: Bad indentation. Found 6 spaces, expected 4\n' 'C: 3,10:foo123: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_text_1_3_1(self): # at least pylint 1.3.1 prints out space padded column offsets when # using text format self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11, 0: Bad indentation. Found 6 spaces, expected 4\n' 'C: 3,10:foo123: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() @parameterized.expand([ ('no_results', True), ('with_results', False) ]) def test_regex_text_2_0_0(self, name, store_results): # pylint 2.0.0 changed default format to include file path self.setupStep(python.PyLint(command=['pylint'], store_results=store_results)) stdout = ( 'test.py:9:4: W0311: Bad indentation. Found 6 spaces, expected 4 (bad-indentation)\n' + 'test.py:1:0: C0114: Missing module docstring (missing-module-docstring)\n' ) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log('stdio', stdout=stdout) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) if store_results: self.expectTestResultSets([('Pylint warnings', 'code_issue', 'message')]) self.expectTestResults([ (1000, 'test.py:9:4: W0311: Bad indentation. Found 6 spaces, expected 4 ' + '(bad-indentation)', None, 'test.py', 9, None), (1000, 'test.py:1:0: C0114: Missing module docstring (missing-module-docstring)', None, 'test.py', 1, None), ]) return self.runStep() def test_regex_text_2_0_0_invalid_line(self): self.setupStep(python.PyLint(command=['pylint'], store_results=False)) stdout = ( 'test.py:abc:0: C0114: Missing module docstring (missing-module-docstring)\n' ) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log('stdio', stdout=stdout) + python.PyLint.RC_CONVENTION) self.expectOutcome(result=SUCCESS, state_string='pylint') self.expectProperty('pylint-warning', 0) self.expectProperty('pylint-convention', 0) self.expectProperty('pylint-total', 0) return self.runStep() def test_regex_text_ids(self): self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W0311: 11: Bad indentation.\n' 'C0111: 1:funcName: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_text_ids_0_24(self): # pylint >= 0.24.0 prints out column offsets when using text format self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W0311: 11,0: Bad indentation.\n' 'C0111: 3,10:foo123: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() @parameterized.expand([ ('no_results', True), ('with_results', False) ]) def test_regex_parseable_ids(self, name, store_results): self.setupStep(python.PyLint(command=['pylint'], store_results=store_results)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('test.py:9: [W0311] Bad indentation.\n' 'test.py:3: [C0111, foo123] Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) if store_results: self.expectTestResultSets([('Pylint warnings', 'code_issue', 'message')]) self.expectTestResults([ (1000, 'test.py:9: [W0311] Bad indentation.', None, 'test.py', 9, None), (1000, 'test.py:3: [C0111, foo123] Missing docstring', None, 'test.py', 3, None), ]) return self.runStep() def test_regex_parseable(self): self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('test.py:9: [W] Bad indentation.\n' 'test.py:3: [C, foo123] Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_parseable_1_3_1(self): """ In pylint 1.3.1, output parseable is deprecated, but looks like that, this is also the new recommended format string: --msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg} """ self.setupStep(python.PyLint(command=['pylint'], store_results=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log('stdio', stdout=('test.py:9: [W0311(bad-indentation), ] Bad indentation. Found 6 ' 'spaces, expected 4\n' 'test.py:3: [C0111(missing-docstring), myFunc] Missing ' 'function docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() class PyFlakes(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + 0) self.expectOutcome(result=SUCCESS, state_string='pyflakes') return self.runStep() def test_content_in_header(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', # don't match pyflakes-like output in the header header="foo.py:1: 'bar' imported but unused\n") + 0) self.expectOutcome(result=0, state_string='pyflakes') return self.runStep() def test_unused(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:1: 'bar' imported but unused\n") + 1) self.expectOutcome(result=WARNINGS, state_string='pyflakes unused=1 (warnings)') self.expectProperty('pyflakes-unused', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() def test_undefined(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:1: undefined name 'bar'\n") + 1) self.expectOutcome(result=FAILURE, state_string='pyflakes undefined=1 (failure)') self.expectProperty('pyflakes-undefined', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() def test_redefs(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:2: redefinition of unused 'foo' from line 1\n") + 1) self.expectOutcome(result=WARNINGS, state_string='pyflakes redefs=1 (warnings)') self.expectProperty('pyflakes-redefs', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() def test_importstar(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:1: 'from module import *' used; unable to detect undefined names\n") + 1) self.expectOutcome(result=WARNINGS, state_string='pyflakes import*=1 (warnings)') self.expectProperty('pyflakes-import*', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() def test_misc(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:2: redefinition of function 'bar' from line 1\n") + 1) self.expectOutcome(result=WARNINGS, state_string='pyflakes misc=1 (warnings)') self.expectProperty('pyflakes-misc', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() class TestSphinx(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_builddir_required(self): with self.assertRaises(config.ConfigErrors): python.Sphinx() def test_bad_mode(self): with self.assertRaises(config.ConfigErrors): python.Sphinx(sphinx_builddir="_build", mode="don't care") def test_success(self): self.setupStep(python.Sphinx(sphinx_builddir="_build")) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '.', '_build']) + ExpectShell.log('stdio', stdout=log_output_success) + 0 ) self.expectOutcome(result=SUCCESS, state_string="sphinx 0 warnings") return self.runStep() def test_failure(self): self.setupStep(python.Sphinx(sphinx_builddir="_build")) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '.', '_build']) + ExpectShell.log('stdio', stdout='oh noes!') + 1 ) self.expectOutcome(result=FAILURE, state_string="sphinx 0 warnings (failure)") return self.runStep() def test_strict_warnings(self): self.setupStep(python.Sphinx(sphinx_builddir="_build", strict_warnings=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '-W', '.', '_build']) + ExpectShell.log('stdio', stdout=log_output_warnings_strict) + 1 ) self.expectOutcome(result=FAILURE, state_string="sphinx 1 warnings (failure)") return self.runStep() def test_nochange(self): self.setupStep(python.Sphinx(sphinx_builddir="_build")) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '.', '_build']) + ExpectShell.log('stdio', stdout=log_output_nochange) + 0 ) self.expectOutcome(result=SUCCESS, state_string="sphinx 0 warnings") return self.runStep() @defer.inlineCallbacks def test_warnings(self): self.setupStep(python.Sphinx(sphinx_builddir="_build")) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '.', '_build']) + ExpectShell.log('stdio', stdout=log_output_warnings) + 0 ) self.expectOutcome(result=WARNINGS, state_string="sphinx 2 warnings (warnings)") self.expectLogfile("warnings", warnings) yield self.runStep() self.assertEqual(self.step.statistics, {'warnings': 2}) def test_constr_args(self): self.setupStep(python.Sphinx(sphinx_sourcedir='src', sphinx_builddir="bld", sphinx_builder='css', sphinx="/path/to/sphinx-build", tags=['a', 'b'], strict_warnings=True, defines=dict( empty=None, t=True, f=False, s="str"), mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['/path/to/sphinx-build', '-b', 'css', '-t', 'a', '-t', 'b', '-D', 'empty', '-D', 'f=0', '-D', 's=str', '-D', 't=1', '-E', '-W', 'src', 'bld']) + ExpectShell.log('stdio', stdout=log_output_success) + 0 ) self.expectOutcome(result=SUCCESS, state_string="sphinx 0 warnings") return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_python_twisted.py000066400000000000000000000475551413250514000262250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.trial import unittest from buildbot.process.properties import Property from buildbot.process.results import FAILURE from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import python_twisted from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin failureLog = '''\ buildbot.test.unit.test_steps_python_twisted.Trial.testProperties ... [FAILURE] buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env ... [FAILURE] buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_nodupe ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_supplement ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobs ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobsProperties ... [FAILURE] buildbot.test.unit.test_steps_python_twisted.Trial.test_run_plural ... [FAILURE] buildbot.test.unit.test_steps_python_twisted.Trial.test_run_singular ... [FAILURE] =============================================================================== [FAIL] Traceback (most recent call last): File "/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/util/steps.py", line 244, in check "expected step outcome") File "/home/dustin/code/buildbot/t/buildbot/sandbox/lib/python2.7/site-packages/twisted/trial/_synctest.py", line 356, in assertEqual % (msg, pformat(first), pformat(second))) twisted.trial.unittest.FailTest: expected step outcome not equal: a = {'result': 3, 'status_text': ['2 tests', 'passed']} b = {'result': 0, 'status_text': ['2 tests', 'passed']} buildbot.test.unit.test_steps_python_twisted.Trial.testProperties buildbot.test.unit.test_steps_python_twisted.Trial.test_run_plural =============================================================================== [FAIL] Traceback (most recent call last): File "/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/util/steps.py", line 244, in check "expected step outcome") File "/home/dustin/code/buildbot/t/buildbot/sandbox/lib/python2.7/site-packages/twisted/trial/_synctest.py", line 356, in assertEqual % (msg, pformat(first), pformat(second))) twisted.trial.unittest.FailTest: expected step outcome not equal: a = {'result': 3, 'status_text': ['no tests', 'run']} b = {'result': 0, 'status_text': ['no tests', 'run']} buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_nodupe buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_supplement =============================================================================== [FAIL] Traceback (most recent call last): File "/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/util/steps.py", line 244, in check "expected step outcome") File "/home/dustin/code/buildbot/t/buildbot/sandbox/lib/python2.7/site-packages/twisted/trial/_synctest.py", line 356, in assertEqual % (msg, pformat(first), pformat(second))) twisted.trial.unittest.FailTest: expected step outcome not equal: a = {'result': 3, 'status_text': ['1 test', 'passed']} b = {'result': 0, 'status_text': ['1 test', 'passed']} buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobs buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobsProperties buildbot.test.unit.test_steps_python_twisted.Trial.test_run_singular ------------------------------------------------------------------------------- Ran 8 tests in 0.101s FAILED (failures=8) ''' # noqa pylint: disable=line-too-long class Trial(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_run_env(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath=None, env={'PYTHONPATH': 'somepath'})) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}, env=dict(PYTHONPATH='somepath')) + ExpectShell.log('stdio', stdout="Ran 0 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='no tests run') return self.runStep() def test_run_env_supplement(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath='path1', env={'PYTHONPATH': ['path2', 'path3']})) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}, env=dict(PYTHONPATH=['path1', 'path2', 'path3'])) + ExpectShell.log('stdio', stdout="Ran 0 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='no tests run') return self.runStep() def test_run_env_nodupe(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath='path2', env={'PYTHONPATH': ['path1', 'path2']})) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}, env=dict(PYTHONPATH=['path1', 'path2'])) + ExpectShell.log('stdio', stdout="Ran 0 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='no tests run') return self.runStep() def test_run_singular(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath=None)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + ExpectShell.log('stdio', stdout="Ran 1 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='1 test passed') return self.runStep() def test_run_plural(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath=None)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + ExpectShell.log('stdio', stdout="Ran 2 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='2 tests passed') return self.runStep() def test_run_failure(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath=None)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + ExpectShell.log('stdio', stdout=failureLog) + 1 ) self.expectOutcome( result=FAILURE, state_string='tests 8 failures (failure)') self.expectLogfile('problems', failureLog.split('\n\n', 1)[1][:-1]) self.expectLogfile('warnings', textwrap.dedent('''\ buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_nodupe ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_supplement ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobs ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobsProperties ... [FAILURE] ''')) # noqa pylint: disable=line-too-long return self.runStep() def test_renderable_properties(self): self.setupStep(python_twisted.Trial(workdir='build', tests=Property('test_list'), testpath=None)) self.properties.setProperty('test_list', ['testname'], 'Test') self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + ExpectShell.log('stdio', stdout="Ran 2 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='2 tests passed') return self.runStep() def test_build_changed_files(self): self.setupStep(python_twisted.Trial(workdir='build', testChanges=True, testpath=None), buildFiles=['my/test/file.py', 'my/test/file2.py']) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', '--testmodule=my/test/file.py', '--testmodule=my/test/file2.py'], logfiles={'test.log': '_trial_temp/test.log'}) + ExpectShell.log('stdio', stdout="Ran 2 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='2 tests passed') return self.runStep() def test_test_path_env_python_path(self): self.setupStep(python_twisted.Trial(workdir='build', tests='testname', testpath='custom/test/path', env={'PYTHONPATH': '/existing/pypath'})) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}, env={'PYTHONPATH': ['custom/test/path', '/existing/pypath']}) + Expect.log('stdio', stdout="Ran 2 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='2 tests passed') return self.runStep() def test_custom_reactor(self): self.setupStep(python_twisted.Trial(workdir='build', reactor='customreactor', tests='testname', testpath=None)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', '--reactor=customreactor', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + Expect.log('stdio', stdout="Ran 2 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='2 tests passed (custom)') return self.runStep() def test_custom_python(self): self.setupStep(python_twisted.Trial(workdir='build', tests='testname', python='/bin/mypython', testpath=None)) self.expectCommands( ExpectShell(workdir='build', command=['/bin/mypython', 'trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + Expect.log('stdio', stdout="Ran 2 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='2 tests passed') return self.runStep() def test_randomly(self): self.setupStep(python_twisted.Trial(workdir='build', randomly=True, tests='testname', testpath=None)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', '--random=0', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + Expect.log('stdio', stdout="Ran 2 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='2 tests passed') return self.runStep() def test_run_jobs(self): """ The C{jobs} kwarg should correspond to trial's -j option ( included since Twisted 12.3.0), and make corresponding changes to logfiles. """ self.setupStep(python_twisted.Trial(workdir='build', tests='testname', testpath=None, jobs=2)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', '--jobs=2', 'testname'], logfiles={ 'test.0.log': '_trial_temp/0/test.log', 'err.0.log': '_trial_temp/0/err.log', 'out.0.log': '_trial_temp/0/out.log', 'test.1.log': '_trial_temp/1/test.log', 'err.1.log': '_trial_temp/1/err.log', 'out.1.log': '_trial_temp/1/out.log', }) + ExpectShell.log('stdio', stdout="Ran 1 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='1 test passed') return self.runStep() def test_run_jobsProperties(self): """ C{jobs} should accept Properties """ self.setupStep(python_twisted.Trial(workdir='build', tests='testname', jobs=Property('jobs_count'), testpath=None)) self.properties.setProperty('jobs_count', '2', 'Test') self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', '--jobs=2', 'testname'], logfiles={ 'test.0.log': '_trial_temp/0/test.log', 'err.0.log': '_trial_temp/0/err.log', 'out.0.log': '_trial_temp/0/out.log', 'test.1.log': '_trial_temp/1/test.log', 'err.1.log': '_trial_temp/1/err.log', 'out.1.log': '_trial_temp/1/out.log', }) + ExpectShell.log('stdio', stdout="Ran 1 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='1 test passed') return self.runStep() class HLint(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_run_ok(self): self.setupStep(python_twisted.HLint(workdir='build'), buildFiles=['foo.xhtml']) self.expectCommands( ExpectShell(workdir='build', command=[ 'bin/lore', '-p', '--output', 'lint', 'foo.xhtml'],) + ExpectShell.log( 'stdio', stdout="dunno what hlint output looks like..\n") + 0 ) self.expectLogfile('files', 'foo.xhtml\n') self.expectOutcome(result=SUCCESS, state_string='0 hlints') return self.runStep() def test_custom_python(self): self.setupStep(python_twisted.HLint(workdir='build', python='/bin/mypython'), buildFiles=['foo.xhtml']) self.expectCommands( ExpectShell(workdir='build', command=['/bin/mypython', 'bin/lore', '-p', '--output', 'lint', 'foo.xhtml']) + 0 ) self.expectLogfile('files', 'foo.xhtml\n') self.expectOutcome(result=SUCCESS, state_string='0 hlints') return self.runStep() def test_command_failure(self): self.setupStep(python_twisted.HLint(workdir='build'), buildFiles=['foo.xhtml']) self.expectCommands( ExpectShell(workdir='build', command=['bin/lore', '-p', '--output', 'lint', 'foo.xhtml'],) + 1 ) self.expectLogfile('files', 'foo.xhtml\n') self.expectOutcome(result=FAILURE, state_string='hlint (failure)') return self.runStep() def test_no_build_files(self): self.setupStep(python_twisted.HLint(workdir='build')) self.expectOutcome(result=SKIPPED, state_string='hlint (skipped)') return self.runStep() def test_run_warnings(self): self.setupStep(python_twisted.HLint(workdir='build'), buildFiles=['foo.xhtml']) self.expectCommands( ExpectShell(workdir='build', command=[ 'bin/lore', '-p', '--output', 'lint', 'foo.xhtml']) + ExpectShell.log('stdio', stdout="colon: meaning warning\n") + 0 ) self.expectLogfile('warnings', 'colon: meaning warning') self.expectOutcome(result=WARNINGS, state_string='1 hlint (warnings)') return self.runStep() class RemovePYCs(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_run_ok(self): self.setupStep(python_twisted.RemovePYCs()) self.expectCommands( ExpectShell(workdir='wkdir', command=['find', '.', '-name', '\'*.pyc\'', '-exec', 'rm', '{}', ';']) + 0 ) self.expectOutcome(result=SUCCESS, state_string='remove .pycs') return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_renderable.py000066400000000000000000000032451413250514000252300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.buildstep import BuildStep from buildbot.process.properties import Interpolate from buildbot.test.util import config as configmixin from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestBuildStep(BuildStep): def run(self): self.setProperty('name', self.name) return 0 class TestBuildStepNameIsRenderable(steps.BuildStepMixin, unittest.TestCase, TestReactorMixin, configmixin.ConfigErrorsMixin): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_name_is_renderable(self): step = TestBuildStep(name=Interpolate('%(kw:foo)s', foo='bar')) self.setupStep(step) self.expectProperty('name', 'bar') self.expectOutcome(0) return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_shell.py000066400000000000000000001142501413250514000242330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import textwrap from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import properties from buildbot.process import remotetransfer from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import shell from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config as configmixin from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestShellCommandExecution(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_doStepIf_False(self): self.setupStep(shell.ShellCommand(command="echo hello", doStepIf=False)) self.expectOutcome(result=SKIPPED, state_string="'echo hello' (skipped)") return self.runStep() def test_constructor_args_validity(self): # this checks that an exception is raised for invalid arguments with self.assertRaisesConfigError( "Invalid argument(s) passed to ShellCommand: "): shell.ShellCommand(workdir='build', command="echo Hello World", wrongArg1=1, wrongArg2='two') def test_run_simple(self): self.setupStep(shell.ShellCommand(workdir='build', command="echo hello")) self.expectCommands( ExpectShell(workdir='build', command='echo hello') + 0 ) self.expectOutcome(result=SUCCESS, state_string="'echo hello'") return self.runStep() def test_run_list(self): self.setupStep(shell.ShellCommand(workdir='build', command=['trial', '-b', '-B', 'buildbot.test'])) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', '-B', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="'trial -b ...'") return self.runStep() def test_run_nested_description(self): self.setupStep(shell.ShellCommand( workdir='build', command=properties.FlattenList(['trial', ['-b', '-B'], 'buildbot.test']), descriptionDone=properties.FlattenList(['test', ['done']]), descriptionSuffix=properties.FlattenList(['suff', ['ix']]))) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', '-B', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string='test done suff ix') return self.runStep() def test_run_nested_command(self): self.setupStep(shell.ShellCommand(workdir='build', command=['trial', ['-b', '-B'], 'buildbot.test'])) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', '-B', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="'trial -b ...'") return self.runStep() def test_run_nested_deeply_command(self): self.setupStep(shell.ShellCommand(workdir='build', command=[['trial', ['-b', ['-B']]], 'buildbot.test'])) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', '-B', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="'trial -b ...'") return self.runStep() def test_run_nested_empty_command(self): self.setupStep(shell.ShellCommand(workdir='build', command=['trial', [], '-b', [], 'buildbot.test'])) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="'trial -b ...'") return self.runStep() def test_run_env(self): self.setupStep(shell.ShellCommand(workdir='build', command="echo hello"), worker_env=dict(DEF='HERE')) self.expectCommands( ExpectShell(workdir='build', command='echo hello', env=dict(DEF='HERE')) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_run_env_override(self): self.setupStep(shell.ShellCommand(workdir='build', env={'ABC': '123'}, command="echo hello"), worker_env=dict(ABC='XXX', DEF='HERE')) self.expectCommands( ExpectShell(workdir='build', command='echo hello', env=dict(ABC='123', DEF='HERE')) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_run_usePTY(self): self.setupStep(shell.ShellCommand(workdir='build', command="echo hello", usePTY=False)) self.expectCommands( ExpectShell(workdir='build', command='echo hello', usePTY=False) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_run_usePTY_old_worker(self): self.setupStep( shell.ShellCommand(workdir='build', command="echo hello", usePTY=True), worker_version=dict(shell='1.1')) self.expectCommands( ExpectShell(workdir='build', command='echo hello') + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_run_decodeRC(self, rc=1, results=WARNINGS, extra_text=" (warnings)"): self.setupStep(shell.ShellCommand(workdir='build', command="echo hello", decodeRC={1: WARNINGS})) self.expectCommands( ExpectShell(workdir='build', command='echo hello') + rc ) self.expectOutcome( result=results, state_string="'echo hello'" + extra_text) return self.runStep() def test_run_decodeRC_defaults(self): return self.test_run_decodeRC(2, FAILURE, extra_text=" (failure)") def test_run_decodeRC_defaults_0_is_failure(self): return self.test_run_decodeRC(0, FAILURE, extra_text=" (failure)") def test_missing_command_error(self): # this checks that an exception is raised for invalid arguments with self.assertRaisesConfigError( "ShellCommand's `command' argument is not specified"): shell.ShellCommand() class TreeSize(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_run_success(self): self.setupStep(shell.TreeSize()) self.expectCommands( ExpectShell(workdir='wkdir', command=['du', '-s', '-k', '.']) + ExpectShell.log('stdio', stdout='9292 .\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string="treesize 9292 KiB") self.expectProperty('tree-size-KiB', 9292) return self.runStep() def test_run_misparsed(self): self.setupStep(shell.TreeSize()) self.expectCommands( ExpectShell(workdir='wkdir', command=['du', '-s', '-k', '.']) + ExpectShell.log('stdio', stdout='abcdef\n') + 0 ) self.expectOutcome(result=WARNINGS, state_string="treesize unknown (warnings)") return self.runStep() def test_run_failed(self): self.setupStep(shell.TreeSize()) self.expectCommands( ExpectShell(workdir='wkdir', command=['du', '-s', '-k', '.']) + ExpectShell.log('stdio', stderr='abcdef\n') + 1 ) self.expectOutcome(result=FAILURE, state_string="treesize unknown (failure)") return self.runStep() class SetPropertyFromCommand(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_constructor_conflict(self): with self.assertRaises(config.ConfigErrors): shell.SetPropertyFromCommand(property='foo', extract_fn=lambda: None) def test_run_property(self): self.setupStep( shell.SetPropertyFromCommand(property="res", command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout='\n\nabcdef\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string="property 'res' set") self.expectProperty("res", "abcdef") # note: stripped self.expectLogfile('property changes', r"res: " + repr('abcdef')) return self.runStep() def test_renderable_workdir(self): self.setupStep( shell.SetPropertyFromCommand(property="res", command="cmd", workdir=properties.Interpolate('wkdir'))) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout='\n\nabcdef\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string="property 'res' set") self.expectProperty("res", "abcdef") # note: stripped self.expectLogfile('property changes', r"res: " + repr('abcdef')) return self.runStep() def test_run_property_no_strip(self): self.setupStep(shell.SetPropertyFromCommand(property="res", command="cmd", strip=False)) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout='\n\nabcdef\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string="property 'res' set") self.expectProperty("res", "\n\nabcdef\n") self.expectLogfile('property changes', r"res: " + repr('\n\nabcdef\n')) return self.runStep() def test_run_failure(self): self.setupStep( shell.SetPropertyFromCommand(property="res", command="blarg")) self.expectCommands( ExpectShell(workdir='wkdir', command="blarg") + ExpectShell.log('stdio', stderr='cannot blarg: File not found') + 1 ) self.expectOutcome(result=FAILURE, state_string="'blarg' (failure)") self.expectNoProperty("res") return self.runStep() def test_run_extract_fn(self): def extract_fn(rc, stdout, stderr): self.assertEqual( (rc, stdout, stderr), (0, 'startend\n', 'STARTEND\n')) return dict(a=1, b=2) self.setupStep( shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout='start', stderr='START') + ExpectShell.log('stdio', stdout='end') + ExpectShell.log('stdio', stderr='END') + 0 ) self.expectOutcome(result=SUCCESS, state_string="2 properties set") self.expectLogfile('property changes', 'a: 1\nb: 2') self.expectProperty("a", 1) self.expectProperty("b", 2) return self.runStep() def test_run_extract_fn_cmdfail(self): def extract_fn(rc, stdout, stderr): self.assertEqual((rc, stdout, stderr), (3, '', '')) return dict(a=1, b=2) self.setupStep( shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + 3 ) # note that extract_fn *is* called anyway self.expectOutcome(result=FAILURE, state_string="2 properties set (failure)") self.expectLogfile('property changes', 'a: 1\nb: 2') return self.runStep() def test_run_extract_fn_cmdfail_empty(self): def extract_fn(rc, stdout, stderr): self.assertEqual((rc, stdout, stderr), (3, '', '')) return dict() self.setupStep( shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + 3 ) # note that extract_fn *is* called anyway, but returns no properties self.expectOutcome(result=FAILURE, state_string="'cmd' (failure)") return self.runStep() @defer.inlineCallbacks def test_run_extract_fn_exception(self): def extract_fn(rc, stdout, stderr): raise RuntimeError("oh noes") self.setupStep( shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + 0 ) # note that extract_fn *is* called anyway, but returns no properties self.expectOutcome(result=EXCEPTION, state_string="'cmd' (exception)") yield self.runStep() self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) def test_error_both_set(self): """ If both ``extract_fn`` and ``property`` are defined, ``SetPropertyFromCommand`` reports a config error. """ with self.assertRaises(config.ConfigErrors): shell.SetPropertyFromCommand(command=["echo", "value"], property="propname", extract_fn=lambda x: {"propname": "hello"}) def test_error_none_set(self): """ If neither ``extract_fn`` and ``property`` are defined, ``SetPropertyFromCommand`` reports a config error. """ with self.assertRaises(config.ConfigErrors): shell.SetPropertyFromCommand(command=["echo", "value"]) class PerlModuleTest(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_new_version_success(self): self.setupStep(shell.PerlModuleTest(command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ This junk ignored Test Summary Report Result: PASS Tests: 10 Failed: 0 Tests: 10 Failed: 0 Files=93, Tests=20""")) + 0 ) self.expectOutcome(result=SUCCESS, state_string='20 tests 20 passed') return self.runStep() def test_new_version_warnings(self): self.setupStep(shell.PerlModuleTest(command="cmd", warningPattern='^OHNOES')) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ This junk ignored Test Summary Report ------------------- foo.pl (Wstat: 0 Tests: 10 Failed: 0) Failed test: 0 OHNOES 1 OHNOES 2 Files=93, Tests=20, 0 wallclock secs ... Result: PASS""")) + 0 ) self.expectOutcome( result=WARNINGS, state_string='20 tests 20 passed 2 warnings (warnings)') return self.runStep() def test_new_version_failed(self): self.setupStep(shell.PerlModuleTest(command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ foo.pl .. 1/4""")) + ExpectShell.log('stdio', stderr=textwrap.dedent("""\ # Failed test 2 in foo.pl at line 6 # foo.pl line 6 is: ok(0);""")) + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ foo.pl .. Failed 1/4 subtests Test Summary Report ------------------- foo.pl (Wstat: 0 Tests: 4 Failed: 1) Failed test: 0 Files=1, Tests=4, 0 wallclock secs ( 0.06 usr 0.01 sys + 0.03 cusr 0.01 csys = 0.11 CPU) Result: FAIL""")) + ExpectShell.log('stdio', stderr=textwrap.dedent("""\ Failed 1/1 test programs. 1/4 subtests failed.""")) + 1 ) self.expectOutcome(result=FAILURE, state_string='4 tests 3 passed 1 failed (failure)') return self.runStep() def test_old_version_success(self): self.setupStep(shell.PerlModuleTest(command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ This junk ignored All tests successful Files=10, Tests=20, 100 wall blah blah""")) + 0 ) self.expectOutcome(result=SUCCESS, state_string='20 tests 20 passed') return self.runStep() def test_old_version_failed(self): self.setupStep(shell.PerlModuleTest(command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ This junk ignored Failed 1/1 test programs, 3/20 subtests failed.""")) + 1 ) self.expectOutcome(result=FAILURE, state_string='20 tests 17 passed 3 failed (failure)') return self.runStep() class SetPropertyDeprecation(unittest.TestCase): """ Tests for L{shell.SetProperty} """ def test_deprecated(self): """ Accessing L{shell.SetProperty} reports a deprecation error. """ shell.SetProperty warnings = self.flushWarnings([self.test_deprecated]) self.assertEqual(len(warnings), 1) self.assertIdentical(warnings[0]['category'], DeprecationWarning) self.assertEqual(warnings[0]['message'], "buildbot.steps.shell.SetProperty was deprecated in Buildbot 0.8.8: " "It has been renamed to SetPropertyFromCommand" ) class Configure(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_class_attrs(self): step = shell.Configure() self.assertEqual(step.command, ['./configure']) def test_run(self): self.setupStep(shell.Configure()) self.expectCommands( ExpectShell(workdir='wkdir', command=["./configure"]) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() class WarningCountingShellCommand(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_warnings(self): self.setupStep(shell.WarningCountingShellCommand(workdir='w', command=['make'])) self.expectCommands( ExpectShell(workdir='w', command=["make"]) + ExpectShell.log('stdio', stdout='blarg success!') + 0 ) self.expectOutcome(result=SUCCESS) self.expectProperty("warnings-count", 0) return self.runStep() def test_default_pattern(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'])) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout='normal: foo\nwarning: blarg!\n' 'also normal\nWARNING: blarg!\n') + 0 ) self.expectOutcome(result=WARNINGS) self.expectProperty("warnings-count", 2) self.expectLogfile("warnings (2)", "warning: blarg!\nWARNING: blarg!\n") return self.runStep() def test_custom_pattern(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'], warningPattern=r"scary:.*")) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout='scary: foo\nwarning: bar\nscary: bar') + 0 ) self.expectOutcome(result=WARNINGS) self.expectProperty("warnings-count", 2) self.expectLogfile("warnings (2)", "scary: foo\nscary: bar\n") return self.runStep() def test_maxWarnCount(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'], maxWarnCount=9)) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout='warning: noo!\n' * 10) + 0 ) self.expectOutcome(result=FAILURE) self.expectProperty("warnings-count", 10) return self.runStep() def test_fail_with_warnings(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'])) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout='warning: I might fail') + 3 ) self.expectOutcome(result=FAILURE) self.expectProperty("warnings-count", 1) self.expectLogfile("warnings (1)", "warning: I might fail\n") return self.runStep() def test_warn_with_decoderc(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'], decodeRC={3: WARNINGS})) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"], ) + ExpectShell.log('stdio', stdout='I might fail with rc') + 3 ) self.expectOutcome(result=WARNINGS) self.expectProperty("warnings-count", 0) return self.runStep() def do_test_suppressions(self, step, supps_file='', stdout='', exp_warning_count=0, exp_warning_log='', exp_exception=False, props=None): self.setupStep(step) if props is not None: for key in props: self.build.setProperty(key, props[key], "") # Invoke the expected callbacks for the suppression file upload. Note # that this assumes all of the remote_* are synchronous, but can be # easily adapted to suit if that changes (using inlineCallbacks) def upload_behavior(command): writer = command.args['writer'] writer.remote_write(supps_file) writer.remote_close() command.rc = 0 if supps_file is not None: self.expectCommands( # step will first get the remote suppressions file Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='supps', workdir='wkdir', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(upload_behavior), # and then run the command ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout=stdout) + 0 ) else: self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout=stdout) + 0 ) if exp_exception: self.expectOutcome(result=EXCEPTION, state_string="'make' (exception)") else: if exp_warning_count != 0: self.expectOutcome(result=WARNINGS, state_string="'make' (warnings)") self.expectLogfile("warnings (%d)" % exp_warning_count, exp_warning_log) else: self.expectOutcome(result=SUCCESS, state_string="'make'") self.expectProperty("warnings-count", exp_warning_count) return self.runStep() def test_suppressions(self): step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps') supps_file = textwrap.dedent("""\ # example suppressions file amar.c : .*unused variable.* holding.c : .*invalid access to non-static.* """).strip() stdout = textwrap.dedent("""\ /bin/sh ../libtool --tag=CC --silent --mode=link gcc blah /bin/sh ../libtool --tag=CC --silent --mode=link gcc blah amar.c: In function 'write_record': amar.c:164: warning: unused variable 'x' amar.c:164: warning: this should show up /bin/sh ../libtool --tag=CC --silent --mode=link gcc blah /bin/sh ../libtool --tag=CC --silent --mode=link gcc blah holding.c: In function 'holding_thing': holding.c:984: warning: invalid access to non-static 'y' """) exp_warning_log = textwrap.dedent("""\ amar.c:164: warning: this should show up """) return self.do_test_suppressions(step, supps_file, stdout, 1, exp_warning_log) def test_suppressions_directories(self): def warningExtractor(step, line, match): return line.split(':', 2) step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor) supps_file = textwrap.dedent("""\ # these should be suppressed: amar-src/amar.c : XXX .*/server-src/.* : AAA # these should not, as the dirs do not match: amar.c : YYY server-src.* : BBB """).strip() # note that this uses the unicode smart-quotes that gcc loves so much stdout = textwrap.dedent("""\ make: Entering directory \u2019amar-src\u2019 amar.c:164: warning: XXX amar.c:165: warning: YYY make: Leaving directory 'amar-src' make: Entering directory "subdir" make: Entering directory 'server-src' make: Entering directory `one-more-dir` holding.c:999: warning: BBB holding.c:1000: warning: AAA """) exp_warning_log = textwrap.dedent("""\ amar.c:165: warning: YYY holding.c:999: warning: BBB """) return self.do_test_suppressions(step, supps_file, stdout, 2, exp_warning_log) def test_suppressions_directories_custom(self): def warningExtractor(step, line, match): return line.split(':', 2) step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor, directoryEnterPattern="^IN: (.*)", directoryLeavePattern="^OUT:") supps_file = "dir1/dir2/abc.c : .*" stdout = textwrap.dedent("""\ IN: dir1 IN: decoy OUT: decoy IN: dir2 abc.c:123: warning: hello """) return self.do_test_suppressions(step, supps_file, stdout, 0, '') def test_suppressions_linenos(self): def warningExtractor(step, line, match): return line.split(':', 2) step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor) supps_file = "abc.c:.*:100-199\ndef.c:.*:22" stdout = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:150: warning: unseen def.c:22: warning: unseen abc.c:200: warning: seen 2 """) exp_warning_log = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:200: warning: seen 2 """) return self.do_test_suppressions(step, supps_file, stdout, 2, exp_warning_log) @defer.inlineCallbacks def test_suppressions_warningExtractor_exc(self): def warningExtractor(step, line, match): raise RuntimeError("oh noes") step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor) # need at least one supp to trigger warningExtractor supps_file = 'x:y' stdout = "abc.c:99: warning: seen 1" yield self.do_test_suppressions(step, supps_file, stdout, exp_exception=True) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) def test_suppressions_addSuppression(self): # call addSuppression "manually" from a subclass class MyWCSC(shell.WarningCountingShellCommand): def run(self): self.addSuppression([('.*', '.*unseen.*', None, None)]) return super().run() def warningExtractor(step, line, match): return line.split(':', 2) step = MyWCSC(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor) stdout = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:150: warning: unseen abc.c:200: warning: seen 2 """) exp_warning_log = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:200: warning: seen 2 """) return self.do_test_suppressions(step, '', stdout, 2, exp_warning_log) def test_suppressions_suppressionsParameter(self): def warningExtractor(step, line, match): return line.split(':', 2) supps = ( ("abc.c", ".*", 100, 199), ("def.c", ".*", 22, 22), ) step = shell.WarningCountingShellCommand(command=['make'], suppressionList=supps, warningExtractor=warningExtractor) stdout = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:150: warning: unseen def.c:22: warning: unseen abc.c:200: warning: seen 2 """) exp_warning_log = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:200: warning: seen 2 """) return self.do_test_suppressions(step, None, stdout, 2, exp_warning_log) def test_suppressions_suppressionsRenderableParameter(self): def warningExtractor(step, line, match): return line.split(':', 2) supps = ( ("abc.c", ".*", 100, 199), ("def.c", ".*", 22, 22), ) step = shell.WarningCountingShellCommand( command=['make'], suppressionList=properties.Property("suppressionsList"), warningExtractor=warningExtractor) stdout = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:150: warning: unseen def.c:22: warning: unseen abc.c:200: warning: seen 2 """) exp_warning_log = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:200: warning: seen 2 """) return self.do_test_suppressions(step, None, stdout, 2, exp_warning_log, props={"suppressionsList": supps}) def test_warnExtractFromRegexpGroups(self): step = shell.WarningCountingShellCommand(command=['make']) we = shell.WarningCountingShellCommand.warnExtractFromRegexpGroups line, pat, exp_file, exp_lineNo, exp_text = \ ('foo:123:text', '(.*):(.*):(.*)', 'foo', 123, 'text') self.assertEqual(we(step, line, re.match(pat, line)), (exp_file, exp_lineNo, exp_text)) def test_missing_command_error(self): # this checks that an exception is raised for invalid arguments with self.assertRaisesConfigError( "WarningCountingShellCommand's 'command' argument is not specified"): shell.WarningCountingShellCommand() class Compile(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_class_args(self): # since this step is just a pre-configured WarningCountingShellCommand, # there' not much to test! step = self.setupStep(shell.Compile()) self.assertEqual(step.name, "compile") self.assertTrue(step.haltOnFailure) self.assertTrue(step.flunkOnFailure) self.assertEqual(step.description, ["compiling"]) self.assertEqual(step.descriptionDone, ["compile"]) self.assertEqual(step.command, ["make", "all"]) class Test(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpBuildStep() def tearDown(self): self.tearDownBuildStep() def test_setTestResults(self): step = self.setupStep(shell.Test()) step.setTestResults(total=10, failed=3, passed=5, warnings=3) self.assertEqual(step.statistics, { 'tests-total': 10, 'tests-failed': 3, 'tests-passed': 5, 'tests-warnings': 3, }) # ensure that they're additive step.setTestResults(total=1, failed=2, passed=3, warnings=4) self.assertEqual(step.statistics, { 'tests-total': 11, 'tests-failed': 5, 'tests-passed': 8, 'tests-warnings': 7, }) def test_describe_not_done(self): step = self.setupStep(shell.Test()) step.results = SUCCESS step.rendered = True self.assertEqual(step.getResultSummary(), {'step': 'test'}) def test_describe_done(self): step = self.setupStep(shell.Test()) step.rendered = True step.results = SUCCESS step.statistics['tests-total'] = 93 step.statistics['tests-failed'] = 10 step.statistics['tests-passed'] = 20 step.statistics['tests-warnings'] = 30 self.assertEqual(step.getResultSummary(), {'step': '93 tests 20 passed 30 warnings 10 failed'}) def test_describe_done_no_total(self): step = self.setupStep(shell.Test()) step.rendered = True step.results = SUCCESS step.statistics['tests-total'] = 0 step.statistics['tests-failed'] = 10 step.statistics['tests-passed'] = 20 step.statistics['tests-warnings'] = 30 # describe calculates 60 = 10+20+30 self.assertEqual(step.getResultSummary(), {'step': '60 tests 20 passed 30 warnings 10 failed'}) buildbot-3.4.0/master/buildbot/test/unit/steps/test_shellsequence.py000066400000000000000000000202431413250514000257620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.properties import WithProperties from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import shellsequence from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config as configmixin from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.warnings import assertProducesWarnings from buildbot.warnings import DeprecatedApiWarning class DynamicRun(shellsequence.ShellSequence): def run(self): return self.runShellSequence(self.dynamicCommands) class TestOneShellCommand(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_shell_arg_warn_deprecated_logfile(self): with assertProducesWarnings(DeprecatedApiWarning, message_pattern="logfile is deprecated, use logname"): shellsequence.ShellArg(command="command", logfile="logfile") def test_shell_arg_error_logfile_and_logname(self): with assertProducesWarnings(DeprecatedApiWarning, message_pattern="logfile is deprecated, use logname"): with self.assertRaisesConfigError( "the 'logfile' parameter must not be specified when 'logname' is set"): shellsequence.ShellArg(command="command", logname="logname", logfile="logfile") def testShellArgInput(self): with self.assertRaisesConfigError( "the 'command' parameter of ShellArg must not be None"): shellsequence.ShellArg(command=None) arg1 = shellsequence.ShellArg(command=1) with self.assertRaisesConfigError( "1 is an invalid command, it must be a string or a list"): arg1.validateAttributes() arg2 = shellsequence.ShellArg(command=["make", 1]) with self.assertRaisesConfigError( "['make', 1] must only have strings in it"): arg2.validateAttributes() for goodcmd in ["make p1", ["make", "p1"]]: arg = shellsequence.ShellArg(command=goodcmd) arg.validateAttributes() def testShellArgsAreRendered(self): arg1 = shellsequence.ShellArg(command=WithProperties('make %s', 'project')) self.setupStep( shellsequence.ShellSequence(commands=[arg1], workdir='build')) self.properties.setProperty("project", "BUILDBOT-TEST", "TEST") self.expectCommands(ExpectShell(workdir='build', command='make BUILDBOT-TEST') + 0) # TODO: need to factor command-summary stuff into a utility method and # use it here self.expectOutcome(result=SUCCESS, state_string="'make BUILDBOT-TEST'") return self.runStep() def createDynamicRun(self, commands): DynamicRun.dynamicCommands = commands return DynamicRun() def testSanityChecksAreDoneInRuntimeWhenDynamicCmdIsNone(self): self.setupStep(self.createDynamicRun(None)) self.expectOutcome(result=EXCEPTION, state_string="finished (exception)") return self.runStep() def testSanityChecksAreDoneInRuntimeWhenDynamicCmdIsString(self): self.setupStep(self.createDynamicRun(["one command"])) self.expectOutcome(result=EXCEPTION, state_string='finished (exception)') return self.runStep() def testSanityChecksAreDoneInRuntimeWhenDynamicCmdIsInvalidShellArg(self): self.setupStep( self.createDynamicRun([shellsequence.ShellArg(command=1)])) self.expectOutcome(result=EXCEPTION, state_string='finished (exception)') return self.runStep() def testMultipleCommandsAreRun(self): arg1 = shellsequence.ShellArg(command='make p1') arg2 = shellsequence.ShellArg(command='deploy p1') self.setupStep( shellsequence.ShellSequence(commands=[arg1, arg2], workdir='build')) self.expectCommands(ExpectShell(workdir='build', command='make p1') + 0, ExpectShell(workdir='build', command='deploy p1') + 0) self.expectOutcome(result=SUCCESS, state_string="'deploy p1'") return self.runStep() def testSkipWorks(self): arg1 = shellsequence.ShellArg(command='make p1') arg2 = shellsequence.ShellArg(command='') arg3 = shellsequence.ShellArg(command='deploy p1') self.setupStep( shellsequence.ShellSequence(commands=[arg1, arg2, arg3], workdir='build')) self.expectCommands(ExpectShell(workdir='build', command='make p1') + 0, ExpectShell(workdir='build', command='deploy p1') + 0) self.expectOutcome(result=SUCCESS, state_string="'deploy p1'") return self.runStep() def testWarningWins(self): arg1 = shellsequence.ShellArg(command='make p1', warnOnFailure=True, flunkOnFailure=False) arg2 = shellsequence.ShellArg(command='deploy p1') self.setupStep( shellsequence.ShellSequence(commands=[arg1, arg2], workdir='build')) self.expectCommands(ExpectShell(workdir='build', command='make p1') + 1, ExpectShell(workdir='build', command='deploy p1') + 0) self.expectOutcome(result=WARNINGS, state_string="'deploy p1' (warnings)") return self.runStep() def testSequenceStopsOnHaltOnFailure(self): arg1 = shellsequence.ShellArg(command='make p1', haltOnFailure=True) arg2 = shellsequence.ShellArg(command='deploy p1') self.setupStep( shellsequence.ShellSequence(commands=[arg1, arg2], workdir='build')) self.expectCommands(ExpectShell(workdir='build', command='make p1') + 1) self.expectOutcome(result=FAILURE, state_string="'make p1' (failure)") return self.runStep() def testShellArgsAreRenderedAnewAtEachBuild(self): """Unit test to ensure that ShellArg instances are properly re-rendered. This unit test makes sure that ShellArg instances are rendered anew at each new build. """ arg = shellsequence.ShellArg(command=WithProperties('make %s', 'project')) step = shellsequence.ShellSequence(commands=[arg], workdir='build') # First "build" self.setupStep(step) self.properties.setProperty("project", "BUILDBOT-TEST-1", "TEST") self.expectCommands(ExpectShell(workdir='build', command='make BUILDBOT-TEST-1') + 0) self.expectOutcome(result=SUCCESS, state_string="'make BUILDBOT-TEST-1'") self.runStep() # Second "build" self.setupStep(step) self.properties.setProperty("project", "BUILDBOT-TEST-2", "TEST") self.expectCommands(ExpectShell(workdir='build', command='make BUILDBOT-TEST-2') + 0) self.expectOutcome(result=SUCCESS, state_string="'make BUILDBOT-TEST-2'") return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_base_Source.py000066400000000000000000000175471413250514000267510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process import results from buildbot.steps.source import Source from buildbot.test.util import sourcesteps from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class OldStyleSourceStep(Source): def startVC(self): self.finished(results.SUCCESS) class TestSource(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def setup_deferred_mock(self): m = mock.Mock() def wrapper(*args, **kwargs): m(*args, **kwargs) return results.SUCCESS wrapper.mock = m return wrapper def test_start_alwaysUseLatest_True(self): step = self.setupStep(Source(alwaysUseLatest=True), { 'branch': 'other-branch', 'revision': 'revision', }, patch='patch' ) step.branch = 'branch' step.run_vc = self.setup_deferred_mock() step.startStep(mock.Mock()) self.assertEqual(step.run_vc.mock.call_args, (('branch', None, None), {})) def test_start_alwaysUseLatest_False(self): step = self.setupStep(Source(), { 'branch': 'other-branch', 'revision': 'revision', }, patch='patch' ) step.branch = 'branch' step.run_vc = self.setup_deferred_mock() step.startStep(mock.Mock()) self.assertEqual(step.run_vc.mock.call_args, (('other-branch', 'revision', 'patch'), {})) def test_start_alwaysUseLatest_False_binary_patch(self): args = { 'branch': 'other-branch', 'revision': 'revision', } step = self.setupStep(Source(), args, patch=(1, b'patch\xf8')) step.branch = 'branch' step.run_vc = self.setup_deferred_mock() step.startStep(mock.Mock()) self.assertEqual(step.run_vc.mock.call_args, (('other-branch', 'revision', (1, b'patch\xf8')), {})) def test_start_alwaysUseLatest_False_no_branch(self): step = self.setupStep(Source()) step.branch = 'branch' step.run_vc = self.setup_deferred_mock() step.startStep(mock.Mock()) self.assertEqual(step.run_vc.mock.call_args, (('branch', None, None), {})) def test_start_no_codebase(self): step = self.setupStep(Source()) step.branch = 'branch' step.run_vc = self.setup_deferred_mock() step.build.getSourceStamp = mock.Mock() step.build.getSourceStamp.return_value = None self.assertEqual(step.getCurrentSummary(), {'step': 'updating'}) self.assertEqual(step.name, Source.name) step.startStep(mock.Mock()) self.assertEqual(step.build.getSourceStamp.call_args[0], ('',)) self.assertEqual(step.getCurrentSummary(), {'step': 'updating'}) @defer.inlineCallbacks def test_start_with_codebase(self): step = self.setupStep(Source(codebase='codebase')) step.branch = 'branch' step.run_vc = self.setup_deferred_mock() step.build.getSourceStamp = mock.Mock() step.build.getSourceStamp.return_value = None self.assertEqual(step.getCurrentSummary(), {'step': 'updating codebase'}) step.name = yield step.build.render(step.name) self.assertEqual(step.name, Source.name + "-codebase") step.startStep(mock.Mock()) self.assertEqual(step.build.getSourceStamp.call_args[0], ('codebase',)) self.assertEqual(step.getResultSummary(), {'step': 'Codebase codebase not in build codebase (failure)'}) @defer.inlineCallbacks def test_start_with_codebase_and_descriptionSuffix(self): step = self.setupStep(Source(codebase='my-code', descriptionSuffix='suffix')) step.branch = 'branch' step.run_vc = self.setup_deferred_mock() step.build.getSourceStamp = mock.Mock() step.build.getSourceStamp.return_value = None self.assertEqual(step.getCurrentSummary(), {'step': 'updating suffix'}) step.name = yield step.build.render(step.name) self.assertEqual(step.name, Source.name + "-my-code") step.startStep(mock.Mock()) self.assertEqual(step.build.getSourceStamp.call_args[0], ('my-code',)) self.assertEqual(step.getResultSummary(), {'step': 'Codebase my-code not in build suffix (failure)'}) def test_old_style_source_step_throws_exception(self): step = self.setupStep(OldStyleSourceStep()) step.startStep(mock.Mock()) self.expectOutcome(result=results.EXCEPTION) self.flushLoggedErrors(NotImplementedError) class TestSourceDescription(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_constructor_args_strings(self): step = Source(workdir='build', description='svn update (running)', descriptionDone='svn update') self.assertEqual(step.description, ['svn update (running)']) self.assertEqual(step.descriptionDone, ['svn update']) def test_constructor_args_lists(self): step = Source(workdir='build', description=['svn', 'update', '(running)'], descriptionDone=['svn', 'update']) self.assertEqual(step.description, ['svn', 'update', '(running)']) self.assertEqual(step.descriptionDone, ['svn', 'update']) class AttrGroup(Source): def other_method(self): pass def mode_full(self): pass def mode_incremental(self): pass class TestSourceAttrGroup(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_attrgroup_hasattr(self): step = AttrGroup() self.assertTrue(step._hasAttrGroupMember('mode', 'full')) self.assertTrue(step._hasAttrGroupMember('mode', 'incremental')) self.assertFalse(step._hasAttrGroupMember('mode', 'nothing')) def test_attrgroup_getattr(self): step = AttrGroup() self.assertEqual(step._getAttrGroupMember('mode', 'full'), step.mode_full) self.assertEqual(step._getAttrGroupMember('mode', 'incremental'), step.mode_incremental) with self.assertRaises(AttributeError): step._getAttrGroupMember('mode', 'nothing') def test_attrgroup_listattr(self): step = AttrGroup() self.assertEqual(sorted(step._listAttrGroupMembers('mode')), ['full', 'incremental']) buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_bzr.py000066400000000000000000000755411413250514000253120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import error from twisted.python.reflect import namedModule from twisted.trial import unittest from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import bzr from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestBzr(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_mode_full(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_win32path(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh')) self.build.path_module = namedModule('ntpath') self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file=r'wkdir\.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_timeout(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh', timeout=1)) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_revision(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh'), args=dict(revision='3730')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update', '-r', '3730']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean_patched(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, # clean up the applied patch ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, # this clean is from 'mode=clean' ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.FileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.FileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.FileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.FileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean_revision(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean'), args=dict(revision='2345')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update', '-r', '2345']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_fresh(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber_retry(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clobber', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber_revision(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clobber'), args=dict(revision='3730')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.', '-r', '3730']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber_baseurl(self): self.setupStep( bzr.Bzr(baseURL='http://bzr.squid-cache.org/bzr/squid3', defaultBranch='trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', os.path.join('http://bzr.squid-cache.org/bzr/squid3', 'trunk'), '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber_baseurl_nodefault(self): self.setupStep( bzr.Bzr(baseURL='http://bzr.squid-cache.org/bzr/squid3', defaultBranch='trunk', mode='full', method='clobber'), args=dict(branch='branches/SQUID_3_0')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', os.path.join('http://bzr.squid-cache.org/bzr/squid3', 'branches/SQUID_3_0'), '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_copy(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='build', logEnviron=True)) + 0, Expect('stat', dict(file='source/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['bzr', 'update']) + 0, Expect('cpdir', {'fromdir': 'source', 'logEnviron': True, 'todir': 'build'}) + 0, ExpectShell(workdir='source', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_incremental(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_incremental_revision(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental'), args=dict(revision='9384')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update', '-r', '9384']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_incremental_no_existing_repo(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100\n', 'Bzr') return self.runStep() def test_mode_incremental_retry(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100\n', 'Bzr') return self.runStep() def test_bad_revparse(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='oiasdfj010laksjfd') + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_bad_checkout(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + ExpectShell.log('stdio', stderr='failed\n') + 128, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_worker_connection_lost(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_cvs.py000066400000000000000000001675761413250514000253220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import time from twisted.internet import error from twisted.trial import unittest from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import cvs from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin def uploadString(cvsroot): def behavior(command): writer = command.args['writer'] writer.remote_write(cvsroot + "\n") writer.remote_close() return behavior class TestCVS(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def setupStep(self, step, *args, **kwargs): super().setupStep(step, *args, **kwargs) # make parseGotRevision return something consistent, patching the class # instead of the object since a new object is constructed by runTest. def parseGotRevision(self): self.updateSourceProperty('got_revision', '2012-09-09 12:00:39 +0000') self.patch(cvs.CVS, 'parseGotRevision', parseGotRevision) def test_parseGotRevision(self): def gmtime(): return time.struct_time((2012, 9, 9, 12, 9, 33, 6, 253, 0)) self.patch(time, 'gmtime', gmtime) step = cvs.CVS(cvsroot="x", cvsmodule="m", mode='full', method='clean') props = [] def updateSourceProperty(prop, name): props.append((prop, name)) step.updateSourceProperty = updateSourceProperty step.parseGotRevision() self.assertEqual(props, [('got_revision', '2012-09-09 12:09:33 +0000')]) def test_cvsEntriesContainStickyDates(self): step = cvs.CVS(cvsroot="x", cvsmodule="m", mode='full', method='clean') self.assertEqual(step._cvsEntriesContainStickyDates('D'), False) self.assertEqual(step._cvsEntriesContainStickyDates( '/file/1.1/Fri May 17 23:20:00//TMOZILLA_1_0_0_BRANCH\nD'), False) self.assertEqual(step._cvsEntriesContainStickyDates( '/file/1.1/Fri May 17 23:20:00//D2013.10.08.11.20.33\nD'), True) self.assertEqual(step._cvsEntriesContainStickyDates( '/file1/1.1/Fri May 17 23:20:00//\n' '/file2/1.1.2.3/Fri May 17 23:20:00//D2013.10.08.11.20.33\nD'), True) def test_mode_full_clean_and_login(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean', login="a password")) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', 'login'], initialStdin="a password\n") + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_and_login_worker_2_16(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean', login="a password"), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', 'login'], initialStdin="a password\n") + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_timeout(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean', timeout=1)) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_branch(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean', branch='branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', '-r', 'branch']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_branch_sourcestamp(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean'), args={'branch': 'my_branch'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', '-r', 'my_branch']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_fresh(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard', '--ignore']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clobber(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clobber') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clobber_retry(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clobber', retry=(0, 2)) self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_copy(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='copy') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='source/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='source/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='source/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='source', command=['cvs', '-z3', 'update', '-dP']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True, 'timeout': step.timeout}) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_copy_wrong_repo(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='copy') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='source/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('the-end-of-the-universe')) + 0, Expect('rmdir', dict(dir='source', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'source', 'mozilla/browser/']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True, 'timeout': step.timeout}) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_sticky_date(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString('/file/1.1/Fri May 17 23:20:00//D2013.10.08.11.20.33\nD')) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_password_windows(self): self.setupStep( cvs.CVS(cvsroot=":pserver:dustin:secrets@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) # on Windows, this file does not contain the password, per # http://trac.buildbot.net/ticket/2355 + Expect.behavior( uploadString(':pserver:dustin@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', branch='my_branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', '-r', 'my_branch']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_special_case(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', branch='HEAD'), args=dict(revision='2012-08-16 16:05:16 +0000')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', # note, no -r HEAD here - that's the special # case '-D', '2012-08-16 16:05:16 +0000']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch_sourcestamp(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental'), args={'branch': 'my_branch'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', '-r', 'my_branch']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_not_loggedin(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_no_existing_repo(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_retry(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', retry=(0, 1)) self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_wrong_repo(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('the-end-of-the-universe')) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_wrong_module(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('the-end-of-the-universe')) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_no_existing_repo(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + 1, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_wrong_repo(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('the-end-of-the-universe')) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_no_method(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard', '--ignore']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_with_options(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', global_options=['-q'], extra_options=['-l']) self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-q', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', '-l', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_with_env_logEnviron(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', env={'abc': '123'}, logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version'], env={'abc': '123'}, logEnviron=False) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP'], env={'abc': '123'}, logEnviron=False) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_command_fails(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 128, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_cvsdiscard_fails(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard', '--ignore']) + ExpectShell.log('stdio', stderr='FAIL!\n') + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_worker_connection_lost(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_darcs.py000066400000000000000000000440011413250514000255740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import error from twisted.trial import unittest from buildbot import config from buildbot.process import remotetransfer from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import darcs from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestDarcs(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_no_empty_step_config(self): with self.assertRaises(config.ConfigErrors): darcs.Darcs() def test_incorrect_method(self): with self.assertRaises(config.ConfigErrors): darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='fresh') def test_incremental_invalid_method(self): with self.assertRaises(config.ConfigErrors): darcs.Darcs(repourl='http://localhost/darcs', mode='incremental', method='fresh') def test_no_repo_url(self): with self.assertRaises(config.ConfigErrors): darcs.Darcs(mode='full', method='fresh') def test_mode_full_clobber(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_copy(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['darcs', 'pull', '--all', '--verbose']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='build', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_no_method(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['darcs', 'pull', '--all', '--verbose']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='build', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_incremental(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'pull', '--all', '--verbose']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_incremental_patched(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['darcs', 'pull', '--all', '--verbose']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, Expect('stat', dict(file='build/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='build', command=['darcs', 'pull', '--all', '--verbose']) + 0, ExpectShell(workdir='build', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_incremental_patch(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='incremental'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'pull', '--all', '--verbose']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_clobber_retry(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_clobber_revision(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber'), dict(revision='abcdef01')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.darcs-context', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', '--context', '.darcs-context', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_clobber_revision_worker_2_16(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber'), dict(revision='abcdef01'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.darcs-context', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', '--context', '.darcs-context', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_incremental_no_existing_repo(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_darcs', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_worker_connection_lost(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_gerrit.py000066400000000000000000000214531413250514000260020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.steps.source import gerrit from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestGerrit(sourcesteps.SourceStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_mode_full_clean(self): self.setupStep( gerrit.Gerrit(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.build.setProperty("event.change.project", "buildbot") self.sourcestamp.project = 'buildbot' self.build.setProperty("event.patchSet.ref", "gerrit_branch") self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'gerrit_branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'gerrit_branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'Gerrit') return self.runStep() def test_mode_full_clean_force_build(self): self.setupStep( gerrit.Gerrit(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.build.setProperty("event.change.project", "buildbot") self.sourcestamp.project = 'buildbot' self.build.setProperty("gerrit_change", "1234/567") self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'refs/changes/34/1234/567', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'refs/changes/34/1234/567']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'Gerrit') return self.runStep() def test_mode_full_clean_force_same_project(self): self.setupStep( gerrit.Gerrit(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', codebase='buildbot')) self.build.setProperty("event.change.project", "buildbot") self.sourcestamp.project = 'buildbot' self.build.setProperty("gerrit_change", "1234/567") self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'refs/changes/34/1234/567', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'refs/changes/34/1234/567']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', {'buildbot': 'f6ad368298bd941e934a41f3babc827b2aa95a1d'}, 'Gerrit') return self.runStep() def test_mode_full_clean_different_project(self): self.setupStep( gerrit.Gerrit(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', codebase='buildbot')) self.build.setProperty("event.change.project", "buildbot") self.sourcestamp.project = 'not_buildbot' self.build.setProperty("gerrit_change", "1234/567") self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_git.py000066400000000000000000005454341413250514000253030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.internet import error from twisted.trial import unittest from buildbot import config as bbconfig from buildbot.interfaces import WorkerSetupError from buildbot.process import remotetransfer from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import git from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.unit.steps.test_transfer import downloadString from buildbot.test.util import config from buildbot.test.util import sourcesteps from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes class TestGit(sourcesteps.SourceStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = git.Git def setUp(self): self.setUpTestReactor() self.sourceName = self.stepClass.__name__ return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_mode_full_filters_2_26(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', filters=['tree:0'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.26.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_filters_2_27(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', filters=['tree:0'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.27.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--filter', 'tree:0', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_progress_False(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', progress=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_key_2_10(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_command_config = \ 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_key_2_3(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_command = 'ssh -o "BatchMode=yes" -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() @defer.inlineCallbacks def test_mode_full_clean_ssh_key_1_7(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.bldr.wkdir.buildbot/ssh-wrapper.sh' # A place to store what gets read read = [] self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + Expect.behavior(downloadString(read.append)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD'], env={'GIT_SSH': ssh_wrapper_path}) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) yield self.runStep() expected = '#!/bin/sh\nssh -o "BatchMode=yes" -i "{0}" "$@"\n'.format(ssh_key_path) self.assertEqual(b''.join(read), unicode2bytes(expected)) @parameterized.expand([ ('host_key', dict(sshHostKey='sshhostkey')), ('known_hosts', dict(sshKnownHosts='known_hosts')), ]) def test_mode_full_clean_ssh_host_key_2_10(self, name, class_params): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', **class_params)) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_known_hosts_path = '/wrk/.bldr.wkdir.buildbot/ssh-known-hosts' ssh_command_config = \ 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_host_key_2_3(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_known_hosts_path = '/wrk/.bldr.wkdir.buildbot/ssh-known-hosts' ssh_command = \ 'ssh -o "BatchMode=yes" -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() @defer.inlineCallbacks def test_mode_full_clean_ssh_host_key_1_7(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.bldr.wkdir.buildbot/ssh-wrapper.sh' ssh_known_hosts_path = '/wrk/.bldr.wkdir.buildbot/ssh-known-hosts' # A place to store what gets read read = [] self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + Expect.behavior(downloadString(read.append)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD'], env={'GIT_SSH': ssh_wrapper_path}) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) yield self.runStep() expected = ( '#!/bin/sh\n' 'ssh -o "BatchMode=yes" -i "{0}" -o "UserKnownHostsFile={1}" "$@"\n' ).format(ssh_key_path, ssh_known_hosts_path) self.assertEqual(b''.join(read), unicode2bytes(expected)) def test_mode_full_clean_ssh_host_key_1_7_progress(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', sshHostKey='sshhostkey', progress=True)) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.bldr.wkdir.buildbot/ssh-wrapper.sh' ssh_known_hosts_path = '/wrk/.bldr.wkdir.buildbot/ssh-known-hosts' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD'], env={'GIT_SSH': ssh_wrapper_path}) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_host_key_2_10_abs_workdir(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', sshHostKey='sshhostkey'), wantDefaultWorkdir=False) workdir = '/myworkdir/workdir' self.build.workdir = workdir ssh_workdir = '/myworkdir/.bldr.workdir.buildbot' ssh_key_path = '/myworkdir/.bldr.workdir.buildbot/ssh-key' ssh_known_hosts_path = '/myworkdir/.bldr.workdir.buildbot/ssh-known-hosts' ssh_command_config = \ 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir=workdir, command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='/myworkdir/workdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir=workdir, mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir=workdir, mode=0o400)) + 0, Expect('listdir', {'dir': workdir}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir=workdir, command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir=workdir, command=['git', '-c', ssh_command_config, 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir=workdir, command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir=workdir, command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_win32path(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.changeWorkerSystem('win32') self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_win32path_ssh_key_2_10(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) self.changeWorkerSystem('win32') ssh_workdir = '\\wrk\\.bldr.wkdir.buildbot' ssh_key_path = '\\wrk\\.bldr.wkdir.buildbot\\ssh-key' ssh_command_config = 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir\\.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_win32path_ssh_key_2_3(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) self.changeWorkerSystem('win32') ssh_workdir = '\\wrk\\.bldr.wkdir.buildbot' ssh_key_path = '\\wrk\\.bldr.wkdir.buildbot\\ssh-key' ssh_command = 'ssh -o "BatchMode=yes" -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('stat', dict(file='wkdir\\.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_win32path_ssh_key_1_7(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) self.changeWorkerSystem('win32') ssh_workdir = '\\wrk\\.bldr.wkdir.buildbot' ssh_key_path = '\\wrk\\.bldr.wkdir.buildbot\\ssh-key' ssh_wrapper_path = '\\wrk\\.bldr.wkdir.buildbot\\ssh-wrapper.sh' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('stat', dict(file='wkdir\\.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD'], env={'GIT_SSH': ssh_wrapper_path}) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_timeout(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', timeout=1, mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x'], logEnviron=True) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['git', 'update-index', '--refresh']) + 0, ExpectShell(workdir='wkdir', command=['git', 'apply', '--index', '-p', '1'], initialStdin='patch') + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x'], logEnviron=True) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['git', 'update-index', '--refresh']) + 0, ExpectShell(workdir='wkdir', command=['git', 'apply', '--index', '-p', '1'], initialStdin='patch') + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_patch_fail(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['git', 'update-index', '--refresh']) + 0, ExpectShell(workdir='wkdir', command=['git', 'apply', '--index', '-p', '1'], initialStdin='patch') + 1, ) self.expectOutcome(result=FAILURE) self.expectNoProperty('got_revision') return self.runStep() def test_mode_full_clean_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_non_empty_builddir(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['file1', 'file2']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--branch', 'test-branch', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_parsefail(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + ExpectShell.log('stdio', stderr="fatal: Could not parse object " "'b08076bc71c7813038f2cefedff9c5b678d225a8'.\n") + 128, ) self.expectOutcome(result=FAILURE) self.expectNoProperty('got_revision') return self.runStep() def test_mode_full_clean_no_existing_repo(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_no_existing_repo_with_reference(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', reference='path/to/reference/repo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--reference', 'path/to/reference/repo', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_no_existing_repo_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--branch', 'test-branch', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_no_existing_repo_with_origin(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', origin='foo', progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--origin', 'foo', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_submodule(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', submodules=True, progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'sync']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'foreach', '--recursive', 'git clean -f -f -d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_submodule_remotes(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', submodules=True, progress=True, remoteSubmodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'sync']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive', '--remote']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'foreach', '--recursive', 'git clean -f -f -d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_submodule_remotes(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', submodules=True, progress=True, remoteSubmodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive', '--remote']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0 ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clone_fails(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, # clone fails ) self.expectOutcome(result=FAILURE, state_string="update (failure)") self.expectNoProperty('got_revision') return self.runStep() def test_mode_full_clobber_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True, branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--branch', 'test-branch', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_no_branch_support(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.5.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_oldworker(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', progress=True)) self.step.build.getWorkerCommandVersion = lambda cmd, oldversion: "2.15" self.expectCommands( ExpectShell(workdir='wkdir', interruptSignal='TERM', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.git', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', interruptSignal='TERM', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', interruptSignal='TERM', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', interruptSignal='TERM', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_version_format(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5.1') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_retry(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_branch_ssh_key_2_10(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', branch='test-branch', sshPrivateKey='ssh-key', progress=True)) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_command_config = \ 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_clean_fails(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 1, # clean fails -> clobber Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental'), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'cat-file', '-e', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_given_revision_not_exists(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental'), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'cat-file', '-e', 'abcdef01']) + 1, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_submodule(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', submodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'sync']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'foreach', '--recursive', 'git clean -f -f -d -x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty('got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_submodule_git_newer_1_7_6(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', submodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.6') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'sync']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive', '--force']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'foreach', '--recursive', 'git clean -f -f -d -x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_submodule_v1_7_8(self): """This tests the same as test_mode_full_fresh_submodule, but the "submodule update" command should be different for Git v1.7.8+.""" self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', submodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.8') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'sync']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive', '--force', '--checkout']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'foreach', '--recursive', 'git clean -f -f -d -x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_shallow(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', shallow=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--depth', '1', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_shallow_depth(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', shallow="100")) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--depth', '100', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_no_shallow(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_retryFetch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', retryFetch=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 1, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_retryFetch_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', retryFetch=True, branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 1, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_clobberOnFailure(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', clobberOnFailure=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_clobberOnFailure_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', clobberOnFailure=True, branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--branch', 'test-branch', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_copy(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)), Expect('listdir', {'dir': 'source'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='source', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='source', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_copy_ssh_key_2_10(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='copy', sshPrivateKey='sshkey')) ssh_workdir = '/wrk/.bldr.source.buildbot' ssh_key_path = '/wrk/.bldr.source.buildbot/ssh-key' ssh_command_config = \ 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='source', mode=0o400)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)), Expect('listdir', {'dir': 'source'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='source', command=['git', '-c', ssh_command_config, 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='source', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_copy_shallow(self): with self.assertRaisesConfigError( "shallow only possible with mode 'full' and method 'clobber'"): self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='copy', shallow=True) def test_mode_incremental_no_existing_repo(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_no_existing_repo_oldworker(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental')) self.step.build.getWorkerCommandVersion = lambda cmd, oldversion: "2.15" self.expectCommands( ExpectShell(workdir='wkdir', interruptSignal='TERM', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.git', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', interruptSignal='TERM', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', interruptSignal='TERM', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_given_revision(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_revparse_failure(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ada95a1d') # too short + 0, ) self.expectOutcome(result=FAILURE) self.expectNoProperty('got_revision') return self.runStep() def test_mode_full_clobber_submodule(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', submodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_repourl(self): with self.assertRaisesConfigError("must provide repourl"): self.stepClass(mode="full") def test_mode_full_fresh_revision(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', progress=True), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_retry(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_clobberOnFailure(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', clobberOnFailure=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_no_method(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_with_env(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', env={'abc': '123'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_logEnviron(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version'], logEnviron=False) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD'], logEnviron=False) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_wkdir_doesnt_exist(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + 1, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_getDescription(self): # clone of: test_mode_incremental # only difference is to set the getDescription property self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', getDescription=True)) self.expectCommands( # copied from test_mode_incremental: ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, # plus this to test describe: ExpectShell(workdir='wkdir', command=['git', 'describe', 'HEAD']) + ExpectShell.log('stdio', stdout='Tag-1234') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) self.expectProperty('commit-description', 'Tag-1234', self.sourceName) return self.runStep() def test_getDescription_failed(self): # clone of: test_mode_incremental # only difference is to set the getDescription property # this tests when 'git describe' fails; for example, there are no # tags in the repository self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', getDescription=True)) self.expectCommands( # copied from test_mode_incremental: ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, # plus this to test describe: ExpectShell(workdir='wkdir', command=['git', 'describe', 'HEAD']) + ExpectShell.log('stdio', stdout='') + 128, # error, but it's suppressed ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) self.expectNoProperty('commit-description') return self.runStep() def setup_getDescription_test(self, setup_args, output_args, expect_head=True, codebase=None): # clone of: test_mode_full_clobber # only difference is to set the getDescription property kwargs = {} if codebase is not None: kwargs.update(codebase=codebase) self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True, getDescription=setup_args, **kwargs)) self.expectCommands( # copied from test_mode_full_clobber: ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, # plus this to test describe: ExpectShell(workdir='wkdir', command=['git', 'describe'] + output_args + (['HEAD'] if expect_head else [])) + ExpectShell.log('stdio', stdout='Tag-1234') + 0, ) if codebase: self.expectOutcome(result=SUCCESS, state_string="update " + codebase) self.expectProperty('got_revision', {codebase: 'f6ad368298bd941e934a41f3babc827b2aa95a1d'}, self.sourceName) self.expectProperty( 'commit-description', {codebase: 'Tag-1234'}, self.sourceName) else: self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) self.expectProperty('commit-description', 'Tag-1234', self.sourceName) def test_getDescription_empty_dict(self): self.setup_getDescription_test( setup_args={}, output_args=[] ) return self.runStep() def test_getDescription_empty_dict_with_codebase(self): self.setup_getDescription_test( setup_args={}, output_args=[], codebase='baz' ) return self.runStep() def test_getDescription_match(self): self.setup_getDescription_test( setup_args={'match': 'stuff-*'}, output_args=['--match', 'stuff-*'] ) return self.runStep() def test_getDescription_match_false(self): self.setup_getDescription_test( setup_args={'match': None}, output_args=[] ) return self.runStep() def test_getDescription_tags(self): self.setup_getDescription_test( setup_args={'tags': True}, output_args=['--tags'] ) return self.runStep() def test_getDescription_tags_false(self): self.setup_getDescription_test( setup_args={'tags': False}, output_args=[] ) return self.runStep() def test_getDescription_all(self): self.setup_getDescription_test( setup_args={'all': True}, output_args=['--all'] ) return self.runStep() def test_getDescription_all_false(self): self.setup_getDescription_test( setup_args={'all': False}, output_args=[] ) return self.runStep() def test_getDescription_abbrev(self): self.setup_getDescription_test( setup_args={'abbrev': 7}, output_args=['--abbrev=7'] ) return self.runStep() def test_getDescription_abbrev_zero(self): self.setup_getDescription_test( setup_args={'abbrev': 0}, output_args=['--abbrev=0'] ) return self.runStep() def test_getDescription_abbrev_false(self): self.setup_getDescription_test( setup_args={'abbrev': False}, output_args=[] ) return self.runStep() def test_getDescription_dirty(self): self.setup_getDescription_test( setup_args={'dirty': True}, output_args=['--dirty'], expect_head=False ) return self.runStep() def test_getDescription_dirty_empty_str(self): self.setup_getDescription_test( setup_args={'dirty': ''}, output_args=['--dirty'], expect_head=False ) return self.runStep() def test_getDescription_dirty_str(self): self.setup_getDescription_test( setup_args={'dirty': 'foo'}, output_args=['--dirty=foo'], expect_head=False ) return self.runStep() def test_getDescription_dirty_false(self): self.setup_getDescription_test( setup_args={'dirty': False}, output_args=[], expect_head=True ) return self.runStep() def test_getDescription_dirty_none(self): self.setup_getDescription_test( setup_args={'dirty': None}, output_args=[], expect_head=True ) return self.runStep() def test_getDescription_contains(self): self.setup_getDescription_test( setup_args={'contains': True}, output_args=['--contains'] ) return self.runStep() def test_getDescription_contains_false(self): self.setup_getDescription_test( setup_args={'contains': False}, output_args=[] ) return self.runStep() def test_getDescription_candidates(self): self.setup_getDescription_test( setup_args={'candidates': 7}, output_args=['--candidates=7'] ) return self.runStep() def test_getDescription_candidates_zero(self): self.setup_getDescription_test( setup_args={'candidates': 0}, output_args=['--candidates=0'] ) return self.runStep() def test_getDescription_candidates_false(self): self.setup_getDescription_test( setup_args={'candidates': False}, output_args=[] ) return self.runStep() def test_getDescription_exact_match(self): self.setup_getDescription_test( setup_args={'exact-match': True}, output_args=['--exact-match'] ) return self.runStep() def test_getDescription_exact_match_false(self): self.setup_getDescription_test( setup_args={'exact-match': False}, output_args=[] ) return self.runStep() def test_getDescription_debug(self): self.setup_getDescription_test( setup_args={'debug': True}, output_args=['--debug'] ) return self.runStep() def test_getDescription_debug_false(self): self.setup_getDescription_test( setup_args={'debug': False}, output_args=[] ) return self.runStep() def test_getDescription_long(self): self.setup_getDescription_test( setup_args={'long': True}, output_args=['--long'] ) def test_getDescription_long_false(self): self.setup_getDescription_test( setup_args={'long': False}, output_args=[] ) return self.runStep() def test_getDescription_always(self): self.setup_getDescription_test( setup_args={'always': True}, output_args=['--always'] ) def test_getDescription_always_false(self): self.setup_getDescription_test( setup_args={'always': False}, output_args=[] ) return self.runStep() def test_getDescription_lotsa_stuff(self): self.setup_getDescription_test( setup_args={'match': 'stuff-*', 'abbrev': 6, 'exact-match': True}, output_args=['--exact-match', '--match', 'stuff-*', '--abbrev=6'], codebase='baz' ) return self.runStep() def test_config_option(self): name = 'url.http://github.com.insteadOf' value = 'blahblah' self.setupStep( self.stepClass(repourl='{}/buildbot/buildbot.git'.format(value), mode='full', method='clean', config={name: value})) prefix = ['git', '-c', '{}={}'.format(name, value)] self.expectCommands( ExpectShell(workdir='wkdir', command=prefix + ['--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=prefix + ['clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=prefix + ['fetch', '-f', '-t', '{}/buildbot/buildbot.git'.format(value), 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=prefix + ['checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=prefix + ['rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_worker_connection_lost(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + ('err', error.ConnectionLost()) ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() @defer.inlineCallbacks def _test_WorkerSetupError(self, _dovccmd, step, msg): self.patch(self.stepClass, "_dovccmd", _dovccmd) gitStep = self.setupStep(step) with self.assertRaisesRegex(WorkerSetupError, msg): yield gitStep.run_vc("branch", "revision", "patch") def test_noGitCommandInstalled(self): @defer.inlineCallbacks def _dovccmd(command, abandonOnFailure=True, collectStdout=False, initialStdin=None): """ Simulate the case where there is no git command. """ yield return "command not found:" step = self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean') msg = 'git is not installed on worker' return self._test_WorkerSetupError(_dovccmd, step, msg) def test_gitCommandOutputShowsNoVersion(self): @defer.inlineCallbacks def _dovccmd(command, abandonOnFailure=True, collectStdout=False, initialStdin=None): """ Instead of outputting something like "git version 2.11", simulate truncated output which has no version string, to exercise error handling. """ yield return "git " step = self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean') msg = 'git is not installed on worker' return self._test_WorkerSetupError(_dovccmd, step, msg) def test_config_get_description_not_dict_or_boolean(self): with self.assertRaisesConfigError("Git: getDescription must be a boolean or a dict."): self.stepClass(repourl="http://github.com/buildbot/buildbot.git", getDescription=["list"]) def test_config_invalid_method_with_full(self): with self.assertRaisesConfigError("Git: invalid method for mode 'full'."): self.stepClass(repourl="http://github.com/buildbot/buildbot.git", mode='full', method='unknown') class TestGitPush(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = git.GitPush def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_push_simple(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_force(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', force=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch', '--force']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_fail(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', force=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch', '--force']) + ExpectShell.log('stdio', stderr="error: failed to push some refs to \n") + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_push_ssh_key_2_10(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshKey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_command_config = \ 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'push', url, 'testbranch']) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_key_2_3(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshKey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_command = 'ssh -o "BatchMode=yes" -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_key_1_7(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshKey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.bldr.wkdir.buildbot/ssh-wrapper.sh' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch'], env={'GIT_SSH': ssh_wrapper_path}) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_host_key_2_10(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_known_hosts_path = '/wrk/.bldr.wkdir.buildbot/ssh-known-hosts' ssh_command_config = \ 'core.sshCommand=ssh -o "BatchMode=yes" -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'push', url, 'testbranch']) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_host_key_2_3(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_known_hosts_path = '/wrk/.bldr.wkdir.buildbot/ssh-known-hosts' ssh_command = \ 'ssh -o "BatchMode=yes" -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_host_key_1_7(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.bldr.wkdir.buildbot' ssh_key_path = '/wrk/.bldr.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.bldr.wkdir.buildbot/ssh-wrapper.sh' ssh_known_hosts_path = '/wrk/.bldr.wkdir.buildbot/ssh-known-hosts' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch'], env={'GIT_SSH': ssh_wrapper_path}) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_raise_no_git(self): @defer.inlineCallbacks def _checkFeatureSupport(self): yield return False url = 'ssh://github.com/test/test.git' step = self.stepClass(workdir='wkdir', repourl=url, branch='testbranch') self.patch(self.stepClass, "checkFeatureSupport", _checkFeatureSupport) self.setupStep(step) self.expectOutcome(result=EXCEPTION) self.runStep() self.flushLoggedErrors(WorkerSetupError) def test_config_fail_no_branch(self): with self.assertRaisesConfigError("GitPush: must provide branch"): self.stepClass(workdir='wkdir', repourl="url") class TestGitTag(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = git.GitTag def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_tag_annotated(self): messages = ['msg1', 'msg2'] self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', annotated=True, messages=messages)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'tag', '-a', 'myTag', '-m', 'msg1', '-m', 'msg2']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_tag_simple(self): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'tag', 'myTag']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_tag_force(self): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', force=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'tag', 'myTag', '--force']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_tag_fail_already_exist(self): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'tag', 'myTag']) + ExpectShell.log('stdio', stderr="fatal: tag \'%s\' already exist\n") + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_config_annotated_no_messages(self): with self.assertRaises(bbconfig.ConfigErrors): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', annotated=True)) def test_config_no_tag_name(self): with self.assertRaises(bbconfig.ConfigErrors): self.setupStep( self.stepClass(workdir='wkdir')) def test_config_not_annotated_but_meessages(self): with self.assertRaises(bbconfig.ConfigErrors): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', messages=['msg'])) def test_config_annotated_message_not_list(self): with self.assertRaises(bbconfig.ConfigErrors): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', annotated=True, messages="msg")) def test_raise_no_git(self): @defer.inlineCallbacks def _checkFeatureSupport(self): yield return False step = self.stepClass(workdir='wdir', tagName='myTag') self.patch(self.stepClass, "checkFeatureSupport", _checkFeatureSupport) self.setupStep(step) self.expectOutcome(result=EXCEPTION) self.runStep() self.flushLoggedErrors(WorkerSetupError) class TestGitCommit(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = git.GitCommit def setUp(self): self.setUpTestReactor() self.message_list = ['my commit', '42'] self.path_list = ['file1.txt', 'file2.txt'] return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_add_fail(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_commit(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'commit', '-m', 'my commit', '-m', '42']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_commit_empty_disallow(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='disallow')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'commit', '-m', 'my commit', '-m', '42']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_commit_empty_allow(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='create-empty-commit')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'commit', '-m', 'my commit', '-m', '42', '--allow-empty']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_commit_empty_ignore_withcommit(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='ignore')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'status', '--porcelain=v1']) + ExpectShell.log('stdio', stdout='MM file2.txt\n?? file3.txt') + 0, ExpectShell(workdir='wkdir', command=['git', 'commit', '-m', 'my commit', '-m', '42']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_commit_empty_ignore_withoutcommit(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='ignore')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'status', '--porcelain=v1']) + ExpectShell.log('stdio', stdout='?? file3.txt') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_commit_empty_ignore_witherror(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='ignore')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'status', '--porcelain=v1']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_detached_head(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='') + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_config_no_files_arg(self): with self.assertRaisesConfigError( "GitCommit: must provide paths"): self.stepClass(workdir='wkdir', messages=self.message_list) def test_config_files_not_a_list(self): with self.assertRaisesConfigError( "GitCommit: paths must be a list"): self.stepClass(workdir='wkdir', paths="test.txt", messages=self.message_list) def test_config_no_messages_arg(self): with self.assertRaisesConfigError( "GitCommit: must provide messages"): self.stepClass(workdir='wkdir', paths=self.path_list) def test_config_messages_not_a_list(self): with self.assertRaisesConfigError( "GitCommit: messages must be a list"): self.stepClass(workdir='wkdir', paths=self.path_list, messages="my message") def test_raise_no_git(self): @defer.inlineCallbacks def _checkFeatureSupport(self): yield return False step = self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list) self.patch(self.stepClass, "checkFeatureSupport", _checkFeatureSupport) self.setupStep(step) self.expectOutcome(result=EXCEPTION) self.runStep() self.flushLoggedErrors(WorkerSetupError) buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_github.py000066400000000000000000000114051413250514000257640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.results import SUCCESS from buildbot.steps.source import github from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.unit.steps import test_source_git # GitHub step shall behave exactly like Git, and thus is inheriting its tests class TestGitHub(test_source_git.TestGit): stepClass = github.GitHub def test_with_merge_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), dict(branch='refs/pull/1234/merge', revision='12345678')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, # here we always ignore revision, and fetch the merge branch ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'http://github.com/buildbot/buildbot.git', 'refs/pull/1234/merge', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'refs/pull/1234/merge']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'GitHub') return self.runStep() def test_with_head_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), dict(branch='refs/pull/1234/head', revision='12345678')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, # in the case of the head, we try to find if the head is already present # and reset to that without fetching ExpectShell(workdir='wkdir', command=['git', 'cat-file', '-e', '12345678']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', '12345678']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'refs/pull/1234/head']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'GitHub') return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_gitlab.py000066400000000000000000000101721413250514000257440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.steps.source import gitlab from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestGitLab(sourcesteps.SourceStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = gitlab.GitLab def setUp(self): self.setUpTestReactor() self.sourceName = self.stepClass.__name__ return self.setUpSourceStep() def setupStep(self, step, args, **kwargs): step = super().setupStep(step, args, **kwargs) step.build.properties.setProperty("source_branch", "ms-viewport", "gitlab source branch") step.build.properties.setProperty("source_git_ssh_url", "git@gitlab.example.com:build/awesome_project.git", "gitlab source git ssh url") step.build.properties.setProperty("source_project_id", 2337, "gitlab source project ID") step.build.properties.setProperty("target_branch", "master", "gitlab target branch") step.build.properties.setProperty("target_git_ssh_url", "git@gitlab.example.com:mmusterman/awesome_project.git", "gitlab target git ssh url") step.build.properties.setProperty("target_project_id", 239, "gitlab target project ID") return step def tearDown(self): return self.tearDownSourceStep() def test_with_merge_branch(self): self.setupStep( self.stepClass(repourl='git@gitlab.example.com:mmusterman/awesome_project.git', mode='full', method='clean'), dict(branch='master', revision='12345678')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir'}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, # here we always ignore revision, and fetch the merge branch ExpectShell(workdir='wkdir', command=['git', 'fetch', '-f', '-t', 'git@gitlab.example.com:build/awesome_project.git', 'ms-viewport', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-f', 'FETCH_HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'ms-viewport']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'GitLab') return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_mercurial.py000066400000000000000000001423001413250514000264640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import error from twisted.python.reflect import namedModule from twisted.trial import unittest from buildbot import config from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import mercurial from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestMercurial(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def patch_workerVersionIsOlderThan(self, result): self.patch( mercurial.Mercurial, 'workerVersionIsOlderThan', lambda x, y, z: result) def test_no_repourl(self): with self.assertRaises(config.ConfigErrors): mercurial.Mercurial(mode="full") def test_incorrect_mode(self): with self.assertRaises(config.ConfigErrors): mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='invalid') def test_incorrect_method(self): with self.assertRaises(config.ConfigErrors): mercurial.Mercurial(repourl='http://hg.mozilla.org', method='invalid') def test_incorrect_branchType(self): with self.assertRaises(config.ConfigErrors): mercurial.Mercurial(repourl='http://hg.mozilla.org', branchType='invalid') def test_mode_full_clean(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_win32path(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo')) self.build.path_module = namedModule('ntpath') self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file=r'wkdir\.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_timeout(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', timeout=1, mode='full', method='clean', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=[ 'hg', '--verbose', 'import', '--no-commit', '-p', '1', '-'], initialStdin='patch') + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=[ 'hg', '--verbose', 'import', '--no-commit', '-p', '1', '-'], initialStdin='patch') + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_patch_fail(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=[ 'hg', '--verbose', 'import', '--no-commit', '-p', '1', '-'], initialStdin='patch') + 1, ) self.expectOutcome(result=FAILURE, state_string="update (failure)") return self.runStep() def test_mode_full_clean_no_existing_repo(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], logEnviron=True) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clobber(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clobber', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_fresh(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='fresh', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge', '--all']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_fresh_no_existing_repo(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='fresh', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], logEnviron=True) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_fresh_retry(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='fresh', branchType='inrepo', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], logEnviron=True) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_no_existing_repo_dirname(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='dirname'), ) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, # does not exist ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_retry(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='dirname', retry=(0, 1)), ) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, # does not exist ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch_change_dirname(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org/', mode='incremental', branchType='dirname', defaultBranch='devel'), dict(branch='stable') ) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org/stable']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org/stable', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_no_existing_repo_inrepo(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, # does not exist ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_existing_repo(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, # directory exists ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_existing_repo_added_files(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, # directory exists ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + ExpectShell.log('stdio', stdout='foo\nbar/baz\n') + 1, Expect('rmdir', dict(dir=['wkdir/foo', 'wkdir/bar/baz'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_existing_repo_added_files_old_rmdir(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo')) self.patch_workerVersionIsOlderThan(True) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, # directory exists ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + ExpectShell.log('stdio', stdout='foo\nbar/baz\n') + 1, Expect('rmdir', dict(dir='wkdir/foo', logEnviron=True)) + 0, Expect('rmdir', dict(dir='wkdir/bar/baz', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo'), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch_change(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo'), dict( branch='stable', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'stable']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'stable']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch_change_no_clobberOnBranchChange(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo', clobberOnBranchChange=False), dict( branch='stable', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'stable']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'stable']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_env(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo', env={'abc': '123'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version'], env={'abc': '123'}) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()'], env={'abc': '123'}) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_logEnviron(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo', logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version'], logEnviron=False) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=False)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch'], logEnviron=False) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()'], logEnviron=False) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n'], logEnviron=False) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_command_fails(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='fresh', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_worker_connection_lost(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_mtn.py000066400000000000000000001376541413250514000253170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import error from twisted.trial import unittest from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import mtn from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestMonotone(sourcesteps.SourceStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): # Just some random revision id to test. REVID = '95215e2a9a9f8b6f5c9664e3807cd34617ea928c' MTN_VER = 'monotone 1.0 (base revision: UNKNOWN_REV)' def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_mode_full_clean(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_patch_fail(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 1, ) self.expectOutcome(result=FAILURE, state_string="update (failure)") return self.runStep() def test_mode_full_clean_no_existing_db(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_no_existing_checkout(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_from_scratch(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clobber(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clobber', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clobber_no_existing_db(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clobber', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_no_existing_db(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_no_existing_checkout(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_from_scratch(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_retry(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 1, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_fresh(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='fresh', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'ignored']) + ExpectShell.log('stdio', stdout='file3\nfile4') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2', 'wkdir/file3', 'wkdir/file4'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master'), dict(revision='abcdef01',)) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'abcdef01', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout='abcdef019a9f8b6f5c9664e3807cd34617ea928c') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'abcdef019a9f8b6f5c9664e3807cd34617ea928c', 'Monotone') return self.runStep() def test_mode_full_copy(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='copy', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='build', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_no_method(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='build', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_incorrect_method(self): with self.assertRaisesConfigError( "Invalid method for mode == full"): mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='wrongmethod', branch='master') def test_incremental_invalid_method(self): with self.assertRaisesConfigError( "Incremental mode does not require method"): mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', method='fresh', branch="master") def test_repourl(self): with self.assertRaisesConfigError("must provide repourl"): mtn.Monotone(mode="full", branch="master") def test_branch(self): with self.assertRaisesConfigError("must provide branch"): mtn.Monotone(repourl='mtn://localhost/monotone', mode="full",) def test_mode_incremental_patched(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_worker_connection_lost(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() def test_database_migration(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='migration needed') + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'migrate', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_database_invalid(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='not a monotone database') + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_database_too_new(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='too new, cannot use') + 0, Expect('rmdir', dict(dir='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_database_empty(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='database has no tables') + 0, Expect('rmdir', dict(dir='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_p4.py000066400000000000000000001042671413250514000250360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Portions Copyright 2013 Bad Dog Consulting import platform import textwrap from twisted.internet import error from twisted.python import reflect from twisted.trial import unittest from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source.p4 import P4 from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.properties import ConstantRenderable _is_windows = (platform.system() == 'Windows') class TestP4(sourcesteps.SourceStepMixin, TestReactorMixin, ConfigErrorsMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def setupStep(self, step, args=None, patch=None, **kwargs): if args is None: args = {} step = super().setupStep(step, args={}, patch=None, **kwargs) self.build.getSourceStamp().revision = args.get('revision', None) # builddir property used to create absolute path required in perforce # client spec. workspace_dir = '/home/user/workspace' if _is_windows: workspace_dir = r'C:\Users\username\Workspace' self.build.path_module = reflect.namedModule("ntpath") self.properties.setProperty('builddir', workspace_dir, 'P4') def test_no_empty_step_config(self): with self.assertRaisesConfigError('You must provide p4base or p4viewspec'): P4() def test_p4base_has_whitespace(self): with self.assertRaisesConfigError( 'p4base should not end with a trailing / [p4base = //depot with space/]'): P4(p4base='//depot with space/') def test_p4branch_has_whitespace(self): with self.assertRaisesConfigError( 'p4base should not end with a trailing / [p4base = //depot/]'): P4(p4base='//depot/', p4branch='branch with space') def test_no_p4base_has_leading_slash_step_config(self): with self.assertRaisesConfigError('p4base should start with // [p4base = depot/]'): P4(p4base='depot/') def test_no_multiple_type_step_config(self): with self.assertRaisesConfigError( 'Either provide p4viewspec or p4base and p4branch (and optionally p4extra_views)'): P4(p4viewspec=('//depot/trunk', ''), p4base='//depot', p4branch='trunk', p4extra_views=['src', 'doc']) def test_no_p4viewspec_is_string_step_config(self): with self.assertRaisesConfigError( 'p4viewspec must not be a string, and should be a sequence of 2 element sequences'): P4(p4viewspec='a_bad_idea') def test_no_p4base_has_trailing_slash_step_config(self): with self.assertRaisesConfigError( 'p4base should not end with a trailing / [p4base = //depot/]'): P4(p4base='//depot/') def test_no_p4branch_has_trailing_slash_step_config(self): with self.assertRaisesConfigError( 'p4branch should not end with a trailing / [p4branch = blah/]'): P4(p4base='//depot', p4branch='blah/') def test_no_p4branch_with_no_p4base_step_config(self): with self.assertRaisesConfigError('You must provide p4base or p4viewspec'): P4(p4branch='blah') def test_no_p4extra_views_with_no_p4base_step_config(self): with self.assertRaisesConfigError('You must provide p4base or p4viewspec'): P4(p4extra_views='blah') def test_incorrect_mode(self): with self.assertRaisesConfigError( "mode invalid is not an IRenderable, or one of ('incremental', 'full')"): P4(p4base='//depot', mode='invalid') def test_mode_incremental_p4base_with_revision(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass'), dict(revision='100',)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self.expectCommands( ExpectShell(workdir='wkdir', # defaults to this, only changes if it has a copy mode. command=['p4', '-V']) # expected remote command + 0, # expected exit status ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'client', '-i'], initialStdin=client_spec) + 0, ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'sync', '//depot...@100']) + 0, ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'changes', '-m1', '#have']) + ExpectShell.log('stdio', stdout="Change 100 on 2013/03/21 by user@machine \'duh\'") + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'P4') return self.runStep() def _incremental(self, client_stdin='', extra_args=None, workdir='wkdir', timeout=20 * 60): if extra_args is None: extra_args = [] self.expectCommands( ExpectShell(workdir=workdir, command=['p4', '-V']) # expected remote command + 0, # expected exit status ExpectShell(workdir=workdir, timeout=timeout, command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'client', '-i'], initialStdin=client_stdin,) + 0, ExpectShell(workdir=workdir, timeout=timeout, command=(['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1'] + extra_args + ['sync'])) + 0, ExpectShell(workdir=workdir, timeout=timeout, command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'changes', '-m1', '#have']) + ExpectShell.log('stdio', stdout="Change 100 on 2013/03/21 by user@machine \'duh\'") + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'P4') return self.runStep() def test_mode_incremental_p4base(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4base_with_no_branch(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot/trunk', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4base_with_p4extra_views(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4extra_views=[('-//depot/trunk/test', 'test'), ('-//depot/trunk/doc', 'doc'), ('-//depot/trunk/white space', 'white space')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... \t-//depot/trunk/test/... //p4_client1/test/... \t-//depot/trunk/doc/... //p4_client1/doc/... \t"-//depot/trunk/white space/..." "//p4_client1/white space/..." ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4viewspec(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4viewspec=[('//depot/trunk/', ''), ('//depot/white space/', 'white space/'), ('-//depot/white space/excluded/', 'white space/excluded/')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... \t"//depot/white space/..." "//p4_client1/white space/..." \t"-//depot/white space/excluded/..." "//p4_client1/white space/excluded/..." ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4viewspec_suffix(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4viewspec_suffix=None, p4viewspec=[('//depot/trunk/foo.xml', 'bar.xml'), ('//depot/white space/...', 'white space/...'), ('-//depot/white space/excluded/...', 'white space/excluded/...')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/foo.xml //p4_client1/bar.xml \t"//depot/white space/..." "//p4_client1/white space/..." \t"-//depot/white space/excluded/..." "//p4_client1/white space/excluded/..." ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4client_spec_options(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4client_spec_options='rmdir compress', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\trmdir compress LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_parent_workdir(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', workdir='../another_wkdir')) root_dir = '/home/user/another_wkdir' if _is_windows: root_dir = r'C:\Users\username\another_wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec, workdir='../another_wkdir') def test_mode_incremental_p4extra_args(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', p4extra_args=['-Zproxyload'])) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec, extra_args=['-Zproxyload']) def test_mode_incremental_timeout(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', timeout=60 * 60)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec, timeout=60 * 60) def test_mode_incremental_stream(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', stream=True)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal Stream:\t//depot/trunk ''' % root_dir) self._incremental(client_stdin=client_spec) def _full(self, client_stdin='', p4client='p4_client1', p4user='user', workdir='wkdir', extra_args=None, obfuscated_pass=True): if extra_args is None: extra_args = [] if obfuscated_pass: expected_pass = ('obfuscated', 'pass', 'XXXXXX') else: expected_pass = 'pass' self.expectCommands( ExpectShell(workdir=workdir, command=['p4', '-V']) # expected remote command + 0, # expected exit status ExpectShell(workdir=workdir, command=['p4', '-p', 'localhost:12000', '-u', p4user, '-P', expected_pass, '-c', p4client, 'client', '-i'], initialStdin=client_stdin) + 0, ExpectShell(workdir=workdir, command=['p4', '-p', 'localhost:12000', '-u', p4user, '-P', expected_pass, '-c', p4client] + extra_args + ['sync', '#none']) + 0, Expect('rmdir', {'dir': workdir, 'logEnviron': True}) + 0, ExpectShell(workdir=workdir, command=['p4', '-p', 'localhost:12000', '-u', p4user, '-P', expected_pass, '-c', p4client] + extra_args + ['sync']) + 0, ExpectShell(workdir=workdir, command=['p4', '-p', 'localhost:12000', '-u', p4user, '-P', expected_pass, '-c', p4client, 'changes', '-m1', '#have']) + ExpectShell.log('stdio', stdout="Change 100 on 2013/03/21 by user@machine \'duh\'") + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'P4') return self.runStep() def test_mode_full_p4base(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/...\n''' % root_dir) self._full(client_stdin=client_stdin) def test_mode_full_p4base_not_obfuscated(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass'), worker_version={'*': '2.15'}) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/...\n''' % root_dir) self._full(client_stdin=client_stdin, obfuscated_pass=False) def test_mode_full_p4base_with_no_branch(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot/trunk', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._full(client_stdin=client_spec) def test_mode_full_p4viewspec(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4viewspec=[('//depot/main/', ''), ('//depot/main/white space/', 'white space/'), ('-//depot/main/white space/excluded/', 'white space/excluded/')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/main/... //p4_client1/... \t"//depot/main/white space/..." "//p4_client1/white space/..." \t"-//depot/main/white space/excluded/..." "//p4_client1/white space/excluded/..." ''' % root_dir) self._full(client_stdin=client_stdin) def test_mode_full_renderable_p4base(self): # Note that the config check skips checking p4base if it's a renderable self.setupStep( P4(p4port='localhost:12000', mode='full', p4base=ConstantRenderable('//depot'), p4branch='release/1.0', p4user='user', p4client='p4_client2', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client2 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/release/1.0/... //p4_client2/...\n''' % root_dir) self._full(client_stdin=client_stdin, p4client='p4_client2') def test_mode_full_renderable_p4client(self): # Note that the config check skips checking p4base if it's a renderable self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client=ConstantRenderable('p4_client_render'), p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client_render Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client_render/...\n''' % root_dir) self._full(client_stdin=client_stdin, p4client='p4_client_render') def test_mode_full_renderable_p4branch(self): # Note that the config check skips checking p4base if it's a renderable self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch=ConstantRenderable('render_branch'), p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/render_branch/... //p4_client1/...\n''' % root_dir) self._full(client_stdin=client_stdin) def test_mode_full_renderable_p4viewspec(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4viewspec=[(ConstantRenderable('//depot/render_trunk/'), '')], p4user='different_user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: different_user Description: \tCreated by different_user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/render_trunk/... //p4_client1/...\n''' % root_dir) self._full(client_stdin=client_stdin, p4user='different_user') def test_mode_full_p4viewspec_suffix(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4viewspec_suffix=None, p4viewspec=[('//depot/trunk/foo.xml', 'bar.xml'), ('//depot/trunk/white space/...', 'white space/...'), ('-//depot/trunk/white space/excluded/...', 'white space/excluded/...')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/foo.xml //p4_client1/bar.xml \t"//depot/trunk/white space/..." "//p4_client1/white space/..." \t"-//depot/trunk/white space/excluded/..." "//p4_client1/white space/excluded/..." ''' % root_dir) self._full(client_stdin=client_spec) def test_mode_full_p4client_spec_options(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4client_spec_options='rmdir compress', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\trmdir compress LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._full(client_stdin=client_spec) def test_mode_full_parent_workdir(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', workdir='../another_wkdir')) root_dir = '/home/user/another_wkdir' if _is_windows: root_dir = r'C:\Users\username\another_wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._full(client_stdin=client_spec, workdir='../another_wkdir') def test_mode_full_p4extra_args(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', p4extra_args=['-Zproxyload'])) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._full(client_stdin=client_spec, extra_args=['-Zproxyload']) def test_mode_full_stream(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', stream=True)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal Stream:\t//depot/trunk ''' % root_dir) self._full(client_stdin=client_spec) def test_mode_full_stream_renderable_p4base(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base=ConstantRenderable('//depot'), p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', stream=True)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal Stream:\t//depot/trunk ''' % root_dir) self._full(client_stdin=client_spec) def test_mode_full_stream_renderable_p4branch(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch=ConstantRenderable('render_branch'), p4user='user', p4client='p4_client1', p4passwd='pass', stream=True)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal Stream:\t//depot/render_branch ''' % root_dir) self._full(client_stdin=client_spec) def test_worker_connection_lost(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass'), dict(revision='100',)) self.expectCommands( ExpectShell(workdir='wkdir', command=['p4', '-V']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() def test_ticket_auth(self): self.setupStep(P4(p4port='localhost:12000', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', use_tickets=True)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self.expectCommands( ExpectShell(workdir='wkdir', command=['p4', '-V']) + 0, # This is the extra step that gets run when using tickets, # and the password is not passed anymore after that. ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-c', 'p4_client1', 'login'], initialStdin='pass\n') + 0, ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-c', 'p4_client1', 'client', '-i'], initialStdin=client_spec) + 0, ExpectShell(workdir='wkdir', command=(['p4', '-p', 'localhost:12000', '-u', 'user', '-c', 'p4_client1', 'sync'])) + 0, ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-c', 'p4_client1', 'changes', '-m1', '#have']) + ExpectShell.log('stdio', stdout="Change 100 on 2013/03/21 by user@machine \'duh\'") + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_repo.py000066400000000000000000000660501413250514000254550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.changes.changes import Change from buildbot.process.properties import Properties from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps.source import repo from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class RepoURL(unittest.TestCase): # testcases taken from old_source/Repo test def oneTest(self, props, expected): p = Properties() p.update(props, "test") r = repo.RepoDownloadsFromProperties(list(props)) self.assertEqual(sorted(r.getRenderingFor(p)), sorted(expected)) def test_parse1(self): self.oneTest( {'a': "repo download test/bla 564/12"}, ["test/bla 564/12"]) def test_parse2(self): self.oneTest( {'a': "repo download test/bla 564/12 repo download test/bla 564/2"}, ["test/bla 564/12", "test/bla 564/2"]) self.oneTest({'a': "repo download test/bla 564/12", 'b': "repo download test/bla 564/2"}, [ "test/bla 564/12", "test/bla 564/2"]) def test_parse3(self): self.oneTest({'a': "repo download test/bla 564/12 repo download " "test/bla 564/2 test/foo 5/1"}, ["test/bla 564/12", "test/bla 564/2", "test/foo 5/1"]) self.oneTest( {'a': "repo download test/bla 564/12"}, ["test/bla 564/12"]) class TestRepo(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.shouldRetry = False self.logEnviron = True return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def shouldLogEnviron(self): r = self.logEnviron self.logEnviron = False return r def ExpectShell(self, **kw): if 'workdir' not in kw: kw['workdir'] = 'wkdir' if 'logEnviron' not in kw: kw['logEnviron'] = self.shouldLogEnviron() return ExpectShell(**kw) def mySetupStep(self, **kwargs): if "repoDownloads" not in kwargs: kwargs.update(dict(repoDownloads=repo.RepoDownloadsFromProperties(["repo_download", "repo_download2"]))) self.setupStep( repo.Repo(manifestURL='git://myrepo.com/manifest.git', manifestBranch="mb", manifestFile="mf", **kwargs)) self.build.allChanges = lambda x=None: [] def myRunStep(self, result=SUCCESS, state_string=None): self.expectOutcome(result=result, state_string=state_string) return self.runStep() def expectClobber(self): # stat return 1 so we clobber self.expectCommands( Expect('stat', dict(file='wkdir/.repo', logEnviron=self.logEnviron)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=self.logEnviron)) + 0, Expect('mkdir', dict(dir='wkdir', logEnviron=self.logEnviron)) + 0, ) def expectnoClobber(self): # stat return 0, so nothing self.expectCommands( Expect('stat', dict(file='wkdir/.repo', logEnviron=self.logEnviron)) + 0, ) def expectRepoSync(self, which_fail=-1, breakatfail=False, depth=0, initoptions=None, syncoptions=None, override_commands=None): if initoptions is None: initoptions = [] if syncoptions is None: syncoptions = ["-c"] if override_commands is None: override_commands = [] commands = [ self.ExpectShell( command=[ 'bash', '-c', self.step._getCleanupCommand()]), self.ExpectShell( command=['repo', 'init', '-u', 'git://myrepo.com/manifest.git', '-b', 'mb', '-m', 'mf', '--depth', str(depth)] + initoptions) ] + override_commands + [ self.ExpectShell(command=['repo', 'sync', '--force-sync'] + syncoptions), self.ExpectShell( command=['repo', 'manifest', '-r', '-o', 'manifest-original.xml']) ] for i, command in enumerate(commands): self.expectCommands(command + (which_fail == i and 1 or 0)) if which_fail == i and breakatfail: break def test_basic(self): """basic first time repo sync""" self.mySetupStep(repoDownloads=None) self.expectClobber() self.expectRepoSync() return self.myRunStep() def test_basic_depth(self): """basic first time repo sync""" self.mySetupStep(repoDownloads=None, depth=2) self.expectClobber() self.expectRepoSync(depth=2) return self.myRunStep() def test_basic_submodule(self): """basic first time repo sync with submodule""" self.mySetupStep(repoDownloads=None, submodules=True) self.expectClobber() self.expectRepoSync(initoptions=["--submodules"]) return self.myRunStep() def test_update(self): """basic second time repo sync""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync() return self.myRunStep() def test_jobs(self): """basic first time repo sync with jobs""" self.mySetupStep(jobs=2) self.expectClobber() self.expectRepoSync(syncoptions=["-j2", "-c"]) return self.myRunStep() def test_sync_all_branches(self): """basic first time repo sync with all branches""" self.mySetupStep(syncAllBranches=True) self.expectClobber() self.expectRepoSync(syncoptions=[]) return self.myRunStep() def test_manifest_override(self): """repo sync with manifest_override_url property set download via wget """ self.mySetupStep(manifestOverrideUrl="http://u.rl/test.manifest", syncAllBranches=True) self.expectClobber() override_commands = [ Expect( 'stat', dict(file='wkdir/http://u.rl/test.manifest', logEnviron=False)), self.ExpectShell(logEnviron=False, command=['wget', 'http://u.rl/test.manifest', '-O', 'manifest_override.xml']), self.ExpectShell( logEnviron=False, workdir='wkdir/.repo', command=['ln', '-sf', '../manifest_override.xml', 'manifest.xml']) ] self.expectRepoSync(which_fail=2, syncoptions=[], override_commands=override_commands) return self.myRunStep() def test_manifest_override_local(self): """repo sync with manifest_override_url property set copied from local FS """ self.mySetupStep(manifestOverrideUrl="test.manifest", syncAllBranches=True) self.expectClobber() override_commands = [ Expect('stat', dict(file='wkdir/test.manifest', logEnviron=False)), self.ExpectShell(logEnviron=False, command=[ 'cp', '-f', 'test.manifest', 'manifest_override.xml']), self.ExpectShell(logEnviron=False, workdir='wkdir/.repo', command=['ln', '-sf', '../manifest_override.xml', 'manifest.xml']) ] self.expectRepoSync( syncoptions=[], override_commands=override_commands) return self.myRunStep() def test_tarball(self): """repo sync using the tarball cache """ self.mySetupStep(tarball="/tarball.tar") self.expectClobber() self.expectCommands( self.ExpectShell(command=['tar', '-xvf', '/tarball.tar']) + 0) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.tar']) + Expect.log('stdio', stdout=str(10000)) + 0) self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '.']) + Expect.log( 'stdio', stdout=str(10000 + 7 * 24 * 3600)) + 0) return self.myRunStep() def test_create_tarball(self): """repo sync create the tarball if its not here """ self.mySetupStep(tarball="/tarball.tgz") self.expectClobber() self.expectCommands( self.ExpectShell( command=['tar', '-z', '-xvf', '/tarball.tgz']) + 1, self.ExpectShell(command=['rm', '-f', '/tarball.tgz']) + 1, Expect('rmdir', dict(dir='wkdir/.repo', logEnviron=False)) + 1) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.tgz']) + Expect.log('stdio', stderr="file not found!") + 1, self.ExpectShell(command=['tar', '-z', '-cvf', '/tarball.tgz', '.repo']) + 0) return self.myRunStep() def do_test_update_tarball(self, suffix, option): """repo sync update the tarball cache at the end (tarball older than a week) """ self.mySetupStep(tarball="/tarball." + suffix) self.expectClobber() self.expectCommands( self.ExpectShell(command=['tar'] + option + ['-xvf', '/tarball.' + suffix]) + 0) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.' + suffix]) + Expect.log('stdio', stdout=str(10000)) + 0, self.ExpectShell(command=['stat', '-c%Y', '.']) + Expect.log( 'stdio', stdout=str(10001 + 7 * 24 * 3600)) + 0, self.ExpectShell(command=['tar'] + option + ['-cvf', '/tarball.' + suffix, '.repo']) + 0) return self.myRunStep() def test_update_tarball(self): self.do_test_update_tarball("tar", []) def test_update_tarball_gz(self): """tarball compression variants""" self.do_test_update_tarball("tar.gz", ["-z"]) def test_update_tarball_tgz(self): self.do_test_update_tarball("tgz", ["-z"]) def test_update_tarball_pigz(self): self.do_test_update_tarball("pigz", ["-I", "pigz"]) def test_update_tarball_bzip(self): self.do_test_update_tarball("tar.bz2", ["-j"]) def test_update_tarball_lzma(self): self.do_test_update_tarball("tar.lzma", ["--lzma"]) def test_update_tarball_lzop(self): self.do_test_update_tarball("tar.lzop", ["--lzop"]) def test_update_tarball_fail1(self, suffix="tar", option=None): """tarball extract fail -> remove the tarball + remove .repo dir """ if option is None: option = [] self.mySetupStep(tarball="/tarball." + suffix) self.expectClobber() self.expectCommands( self.ExpectShell( command=[ 'tar'] + option + ['-xvf', '/tarball.' + suffix]) + 1, self.ExpectShell( command=['rm', '-f', '/tarball.tar']) + 0, Expect( 'rmdir', dict(dir='wkdir/.repo', logEnviron=False)) + 0) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.' + suffix]) + Expect.log('stdio', stdout=str(10000)) + 0, self.ExpectShell(command=['stat', '-c%Y', '.']) + Expect.log( 'stdio', stdout=str(10001 + 7 * 24 * 3600)) + 0, self.ExpectShell(command=['tar'] + option + ['-cvf', '/tarball.' + suffix, '.repo']) + 0) return self.myRunStep() def test_update_tarball_fail2(self, suffix="tar", option=None): """tarball update fail -> remove the tarball + continue repo download """ if option is None: option = [] self.mySetupStep(tarball="/tarball." + suffix) self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectClobber() self.expectCommands( self.ExpectShell(command=['tar'] + option + ['-xvf', '/tarball.' + suffix]) + 0) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.' + suffix]) + Expect.log('stdio', stdout=str(10000)) + 0, self.ExpectShell(command=['stat', '-c%Y', '.']) + Expect.log( 'stdio', stdout=str(10001 + 7 * 24 * 3600)) + 0, self.ExpectShell(command=['tar'] + option + ['-cvf', '/tarball.' + suffix, '.repo']) + 1, self.ExpectShell( command=['rm', '-f', '/tarball.tar']) + 0, self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0) return self.myRunStep() def test_repo_downloads(self): """basic repo download, and check that repo_downloaded is updated""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0 + Expect.log( 'stdio', stderr="test/bla refs/changes/64/564/12 -> FETCH_HEAD\n") + Expect.log('stdio', stderr="HEAD is now at 0123456789abcdef...\n")) self.expectProperty( "repo_downloaded", "564/12 0123456789abcdef ", "Source") return self.myRunStep() def test_repo_downloads2(self): """2 repo downloads""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.build.setProperty("repo_download2", "repo download test/bla2 565/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0, self.ExpectShell( command=['repo', 'download', 'test/bla2', '565/12']) + 0) return self.myRunStep() def test_repo_download_manifest(self): """2 repo downloads, with one manifest patch""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.build.setProperty("repo_download2", "repo download manifest 565/12", "test") self.expectnoClobber() self.expectCommands( self.ExpectShell( command=['bash', '-c', self.step._getCleanupCommand()]) + 0, self.ExpectShell( command=['repo', 'init', '-u', 'git://myrepo.com/manifest.git', '-b', 'mb', '-m', 'mf', '--depth', '0']) + 0, self.ExpectShell( workdir='wkdir/.repo/manifests', command=[ 'git', 'fetch', 'git://myrepo.com/manifest.git', 'refs/changes/65/565/12']) + 0, self.ExpectShell( workdir='wkdir/.repo/manifests', command=['git', 'cherry-pick', 'FETCH_HEAD']) + 0, self.ExpectShell(command=['repo', 'sync', '--force-sync', '-c']) + 0, self.ExpectShell( command=['repo', 'manifest', '-r', '-o', 'manifest-original.xml']) + 0) self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0) return self.myRunStep() def test_repo_downloads_mirror_sync(self): """repo downloads, with mirror synchronization issues""" self.mySetupStep() # we don't really want the test to wait... self.step.mirror_sync_sleep = 0.001 self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log( "stdio", stderr="fatal: Couldn't find remote ref \n"), self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log( "stdio", stderr="fatal: Couldn't find remote ref \n"), self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0) return self.myRunStep() def test_repo_downloads_change_missing(self): """repo downloads, with no actual mirror synchronization issues (still retries 2 times)""" self.mySetupStep() # we don't really want the test to wait... self.step.mirror_sync_sleep = 0.001 self.step.mirror_sync_retry = 1 # on retry once self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log( "stdio", stderr="fatal: Couldn't find remote ref \n"), self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log( "stdio", stderr="fatal: Couldn't find remote ref \n"), ) return self.myRunStep(result=FAILURE, state_string="repo: change test/bla 564/12 does not exist (failure)") def test_repo_downloads_fail1(self): """repo downloads, cherry-pick returns 1""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log("stdio", stderr="patch \n"), self.ExpectShell( command=['repo', 'forall', '-c', 'git', 'diff', 'HEAD']) + 0 ) return self.myRunStep(result=FAILURE, state_string="download failed: test/bla 564/12 (failure)") def test_repo_downloads_fail2(self): """repo downloads, cherry-pick returns 0 but error in stderr""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0 + Expect.log("stdio", stderr="Automatic cherry-pick failed \n"), self.ExpectShell( command=['repo', 'forall', '-c', 'git', 'diff', 'HEAD']) + 0 ) return self.myRunStep(result=FAILURE, state_string="download failed: test/bla 564/12 (failure)") def test_repo_downloads_from_change_source(self): """basic repo download from change source, and check that repo_downloaded is updated""" self.mySetupStep(repoDownloads=repo.RepoDownloadsFromChangeSource()) change = Change(None, None, None, properties={ 'event.change.owner.email': 'dustin@mozilla.com', 'event.change.subject': 'fix 1234', 'event.change.project': 'pr', 'event.change.owner.name': 'Dustin', 'event.change.number': '4321', 'event.change.url': 'http://buildbot.net', 'event.change.branch': 'br', 'event.type': 'patchset-created', 'event.patchSet.revision': 'abcdef', 'event.patchSet.number': '12', 'event.source': 'GerritChangeSource' }) self.build.allChanges = lambda x=None: [change] self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell(command=['repo', 'download', 'pr', '4321/12']) + 0 + Expect.log( 'stdio', stderr="test/bla refs/changes/64/564/12 -> FETCH_HEAD\n") + Expect.log('stdio', stderr="HEAD is now at 0123456789abcdef...\n")) self.expectProperty( "repo_downloaded", "564/12 0123456789abcdef ", "Source") return self.myRunStep() def test_repo_downloads_from_change_source_codebase(self): """basic repo download from change source, and check that repo_downloaded is updated""" self.mySetupStep( repoDownloads=repo.RepoDownloadsFromChangeSource("mycodebase")) change = Change(None, None, None, properties={ 'event.change.owner.email': 'dustin@mozilla.com', 'event.change.subject': 'fix 1234', 'event.change.project': 'pr', 'event.change.owner.name': 'Dustin', 'event.change.number': '4321', 'event.change.url': 'http://buildbot.net', 'event.change.branch': 'br', 'event.type': 'patchset-created', 'event.patchSet.revision': 'abcdef', 'event.patchSet.number': '12', 'event.source': 'GerritChangeSource' }) # getSourceStamp is faked by SourceStepMixin ss = self.build.getSourceStamp("") ss.changes = [change] self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell(command=['repo', 'download', 'pr', '4321/12']) + 0 + Expect.log( 'stdio', stderr="test/bla refs/changes/64/564/12 -> FETCH_HEAD\n") + Expect.log('stdio', stderr="HEAD is now at 0123456789abcdef...\n")) self.expectProperty( "repo_downloaded", "564/12 0123456789abcdef ", "Source") return self.myRunStep() def test_update_fail1(self): """ fail at cleanup: ignored""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=0, breakatfail=False) return self.myRunStep() def test_update_fail2(self): """fail at repo init: clobber""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=1, breakatfail=True) self.expectClobber() self.expectRepoSync() self.shouldRetry = True return self.myRunStep() def test_update_fail3(self): """ fail at repo sync: clobber""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=2, breakatfail=True) self.expectClobber() self.expectRepoSync() self.shouldRetry = True return self.myRunStep() def test_update_fail4(self): """fail at repo manifest: clobber""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=3, breakatfail=True) self.expectClobber() self.expectRepoSync() self.shouldRetry = True return self.myRunStep() def test_update_doublefail(self): """fail at repo manifest: clobber but still fail""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=3, breakatfail=True) self.expectClobber() self.expectRepoSync(which_fail=3, breakatfail=True) self.shouldRetry = True return self.myRunStep(result=FAILURE, state_string="repo failed at: repo manifest (failure)") def test_update_doublefail2(self): """fail at repo sync: clobber but still fail""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=2, breakatfail=True) self.expectClobber() self.expectRepoSync(which_fail=2, breakatfail=True) self.shouldRetry = True return self.myRunStep(result=FAILURE, state_string="repo failed at: repo sync (failure)") def test_update_doublefail3(self): """fail at repo init: clobber but still fail""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=1, breakatfail=True) self.expectClobber() self.expectRepoSync(which_fail=1, breakatfail=True) self.shouldRetry = True return self.myRunStep(result=FAILURE, state_string="repo failed at: repo init (failure)") def test_basic_fail(self): """fail at repo init: no need to re-clobber but still fail""" self.mySetupStep() self.expectClobber() self.expectRepoSync(which_fail=1, breakatfail=True) self.shouldRetry = True return self.myRunStep(result=FAILURE, state_string="repo failed at: repo init (failure)") buildbot-3.4.0/master/buildbot/test/unit/steps/test_source_svn.py000066400000000000000000002624541413250514000253240ustar00rootroot00000000000000# -*- coding: utf8 -*- # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import error from twisted.python.reflect import namedModule from twisted.trial import unittest from buildbot import config from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import svn from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.properties import ConstantRenderable class TestSVN(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): svn_st_xml = """ """ svn_st_xml_corrupt = """ """ svn_st_xml_empty = """ """ svn_info_stdout_xml = """ http://svn.red-bean.com/repos/test http://svn.red-bean.com/repos/test 5e7d134a-54fb-0310-bd04-b611643e5c25 normal infinity sally 2003-01-15T23:35:12.847647Z """ svn_info_stdout_xml_nonintegerrevision = """ http://svn.red-bean.com/repos/test http://svn.red-bean.com/repos/test 5e7d134a-54fb-0310-bd04-b611643e5c25 normal infinity sally 2003-01-15T23:35:12.847647Z """ def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def patch_workerVersionIsOlderThan(self, result): self.patch(svn.SVN, 'workerVersionIsOlderThan', lambda x, y, z: result) def test_no_repourl(self): with self.assertRaises(config.ConfigErrors): svn.SVN() def test_incorrect_mode(self): with self.assertRaises(config.ConfigErrors): svn.SVN(repourl='http://svn.local/app/trunk', mode='invalid') def test_incorrect_method(self): with self.assertRaises(config.ConfigErrors): svn.SVN(repourl='http://svn.local/app/trunk', method='invalid') def test_svn_not_installed(self): self.setupStep(svn.SVN(repourl='http://svn.local/app/trunk')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 1, ) self.expectException(WorkerSetupError) return self.runStep() def test_corrupt_xml(self): self.setupStep(svn.SVN(repourl='http://svn.local/app/trunk')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_corrupt) + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() @defer.inlineCallbacks def test_revision_noninteger(self): svnTestStep = svn.SVN(repourl='http://svn.local/app/trunk') self.setupStep(svnTestStep) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml_nonintegerrevision) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', 'a10', 'SVN') yield self.runStep() revision = self.step.getProperty('got_revision') with self.assertRaises(ValueError): int(revision) def test_revision_missing(self): """Fail if 'revision' tag isn't there""" svn_info_stdout = self.svn_info_stdout_xml.replace('entry', 'Blah') svnTestStep = svn.SVN(repourl='http://svn.local/app/trunk') self.setupStep(svnTestStep) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=svn_info_stdout) + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_mode_incremental(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_timeout(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', timeout=1, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_renderable(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk'), mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_canonical(self): self.setupStep( svn.SVN(repourl='http://svn.local/trunk/test app', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log( 'stdio', stdout='http://svn.local/trunk/test%20app') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_not_updatable(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental',)) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_retry(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_not_updatable_svninfo_mismatch(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log( 'stdio', # expecting ../trunk/app stdout='http://svn.local/branch/foo/app') + 0, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_win32path(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.build.path_module = namedModule("ntpath") self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file=r'wkdir\.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_preferLastChangedRev(self): """Give the last-changed rev if 'preferLastChangedRev' is set""" self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', preferLastChangedRev=True, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '90', 'SVN') return self.runStep() def test_mode_incremental_preferLastChangedRev_butMissing(self): """If 'preferLastChangedRev' is set, but missing, fall back to the regular revision value.""" svn_info_stdout = self.svn_info_stdout_xml.replace('commit', 'Blah') self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', preferLastChangedRev=True, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=svn_info_stdout) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clobber(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clobber_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', depth='infinite')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_retry(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', depth='infinite'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_keep_on_purge(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', keep_on_purge=['svn_external_path/unversioned_file1'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file2_uniçode'], 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_not_updatable(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_not_updatable_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_old_rmdir(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.patch_workerVersionIsOlderThan(True) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': 'wkdir/svn_external_path/unversioned_file1', 'logEnviron': True, 'timeout': 1200}) + 0, Expect('rmdir', {'dir': 'wkdir/svn_external_path/unversioned_file2_uniçode', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_new_rmdir(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.patch_workerVersionIsOlderThan(False) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2_uniçode'], 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy', codebase='app')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/app/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source/app', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='source/app', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source/app', 'todir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', {'app': '100'}, 'SVN') return self.runStep() def test_mode_full_copy_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='source', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_patch(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', dict(dir=['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2_uniçode'], logEnviron=True, timeout=1200)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_patch_worker_2_16(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', dict(dir=['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2_uniçode'], logEnviron=True, timeout=1200)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_timeout(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', timeout=1, mode='full', method='export')) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', timeout=1, command=['svn', 'export', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='source', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', '--revision', '100', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_auth(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export', username='svn_username', password='svn_password')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX')]) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX')]) + 0, ExpectShell(workdir='', command=['svn', 'export', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX'), 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_with_env(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'], env={'abc': '123'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version'], env={'abc': '123'}) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_logEnviron(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'], logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version'], logEnviron=False) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=False)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], logEnviron=False) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml'], logEnviron=False) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_command_fails(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_bogus_svnversion(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log( 'stdio', stdout='' '' 'http://svn.local/app/trunk' '') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout='1x0y0') + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_rmdir_fails_clobber(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_rmdir_fails_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_cpdir_fails_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_rmdir_fails_purge(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', keep_on_purge=['svn_external_path/unversioned_file1'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file2_uniçode'], 'logEnviron': True, 'timeout': 1200}) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_worker_connection_lost(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() def test_empty_password(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', '', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', '', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_omit_password(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--random']) + ExpectShell.log('stdio', stdout='http://svn.local/app/trunk') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() class TestGetUnversionedFiles(unittest.TestCase): def test_getUnversionedFiles_does_not_list_externals(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual( ["svn_external_path/unversioned_file"], unversioned_files) def test_getUnversionedFiles_does_not_list_missing(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual([], unversioned_files) def test_getUnversionedFiles_corrupted_xml(self): svn_st_xml_corrupt = """ """ with self.assertRaises(buildstep.BuildStepFailed): list(svn.SVN.getUnversionedFiles(svn_st_xml_corrupt, [])) def test_getUnversionedFiles_no_path(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual([], unversioned_files) def test_getUnversionedFiles_no_item(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual( ["svn_external_path/unversioned_file"], unversioned_files) def test_getUnversionedFiles_unicode(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual( ["Path/To/Content/Developers/François"], unversioned_files) class TestSvnUriCanonicalize(unittest.TestCase): # svn.SVN.svnUriCanonicalize() test method factory # # given input string and expected result create a test method that # will call svn.SVN.svnUriCanonicalize() with the input and check # that expected result is returned # # @param input: test input # @param exp: expected result def _makeSUCTest(input, exp): return lambda self: self.assertEqual( svn.SVN.svnUriCanonicalize(input), exp) test_empty = _makeSUCTest( "", "") test_canonical = _makeSUCTest( "http://foo.com/bar", "http://foo.com/bar") test_lc_scheme = _makeSUCTest( "hTtP://foo.com/bar", "http://foo.com/bar") test_trailing_dot = _makeSUCTest( "http://foo.com./bar", "http://foo.com/bar") test_lc_hostname = _makeSUCTest( "http://foO.COm/bar", "http://foo.com/bar") test_lc_hostname_with_user = _makeSUCTest( "http://Jimmy@fOO.Com/bar", "http://Jimmy@foo.com/bar") test_lc_hostname_with_user_pass = _makeSUCTest( "http://Jimmy:Sekrit@fOO.Com/bar", "http://Jimmy:Sekrit@foo.com/bar") test_trailing_slash = _makeSUCTest( "http://foo.com/bar/", "http://foo.com/bar") test_trailing_slash_scheme = _makeSUCTest( "http://", "http://") test_trailing_slash_hostname = _makeSUCTest( "http://foo.com/", "http://foo.com") test_trailing_double_slash = _makeSUCTest( "http://foo.com/x//", "http://foo.com/x") test_double_slash = _makeSUCTest( "http://foo.com/x//y", "http://foo.com/x/y") test_slash = _makeSUCTest( "/", "/") test_dot = _makeSUCTest( "http://foo.com/x/./y", "http://foo.com/x/y") test_dot_dot = _makeSUCTest( "http://foo.com/x/../y", "http://foo.com/y") test_double_dot_dot = _makeSUCTest( "http://foo.com/x/y/../../z", "http://foo.com/z") test_dot_dot_root = _makeSUCTest( "http://foo.com/../x/y", "http://foo.com/x/y") test_quote_spaces = _makeSUCTest( "svn+ssh://user@host:123/My Stuff/file.doc", "svn+ssh://user@host:123/My%20Stuff/file.doc") test_remove_port_80 = _makeSUCTest( "http://foo.com:80/bar", "http://foo.com/bar") test_dont_remove_port_80 = _makeSUCTest( "https://foo.com:80/bar", "https://foo.com:80/bar") # not http test_remove_port_443 = _makeSUCTest( "https://foo.com:443/bar", "https://foo.com/bar") test_dont_remove_port_443 = _makeSUCTest( "svn://foo.com:443/bar", "svn://foo.com:443/bar") # not https test_remove_port_3690 = _makeSUCTest( "svn://foo.com:3690/bar", "svn://foo.com/bar") test_dont_remove_port_3690 = _makeSUCTest( "http://foo.com:3690/bar", "http://foo.com:3690/bar") # not svn test_dont_remove_port_other = _makeSUCTest( "https://foo.com:2093/bar", "https://foo.com:2093/bar") test_quote_funny_chars = _makeSUCTest( "http://foo.com/\x10\xe6%", "http://foo.com/%10%E6%25") test_overquoted = _makeSUCTest( "http://foo.com/%68%65%6c%6c%6f%20%77%6f%72%6c%64", "http://foo.com/hello%20world") buildbot-3.4.0/master/buildbot/test/unit/steps/test_subunit.py000066400000000000000000000142331413250514000246150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import io import re import sys from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import subunit from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin try: from subunit import TestProtocolClient except ImportError: TestProtocolClient = None class FakeTest: def __init__(self, id): self._id = id def id(self): return self._id def create_error(name): try: int('_' + name) return None except ValueError: # We don't want traceback lines with real paths in the logs exctype, value, _ = sys.exc_info() return (exctype, value, None) class TestSubUnit(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): if TestProtocolClient is None: raise unittest.SkipTest("Need to install python-subunit to test subunit step") self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_empty(self): self.setupStep(subunit.SubunitShellCommand(command='test')) self.expectCommands( ExpectShell(workdir='wkdir', command="test") + 0 ) self.expectOutcome(result=SUCCESS, state_string="shell no tests run") return self.runStep() def test_empty_error(self): self.setupStep(subunit.SubunitShellCommand(command='test', failureOnNoTests=True)) self.expectCommands( ExpectShell(workdir='wkdir', command="test") + 0 ) self.expectOutcome(result=FAILURE, state_string="shell no tests run (failure)") return self.runStep() def test_success(self): stream = io.BytesIO() client = TestProtocolClient(stream) test = FakeTest(id='test1') client.startTest(test) client.stopTest(test) self.setupStep(subunit.SubunitShellCommand(command='test')) self.expectCommands( ExpectShell(workdir='wkdir', command="test") + Expect.log('stdio', stdout=stream.getvalue()) + 0 ) self.expectOutcome(result=SUCCESS, state_string="shell 1 test passed") return self.runStep() def test_error(self): stream = io.BytesIO() client = TestProtocolClient(stream) test = FakeTest(id='test1') client.startTest(test) client.addError(test, create_error('error1')) client.stopTest(test) self.setupStep(subunit.SubunitShellCommand(command='test')) self.expectCommands( ExpectShell(workdir='wkdir', command="test") + Expect.log('stdio', stdout=stream.getvalue()) + 0 ) self.expectOutcome(result=FAILURE, state_string="shell Total 1 test(s) 1 error (failure)") self.expectLogfile('problems', re.compile(r'''test1 testtools.testresult.real._StringException:.*ValueError: invalid literal for int\(\) with base 10: '_error1' .*''', re.MULTILINE | re.DOTALL)) # noqa pylint: disable=line-too-long return self.runStep() def test_multiple_errors(self): stream = io.BytesIO() client = TestProtocolClient(stream) test1 = FakeTest(id='test1') test2 = FakeTest(id='test2') client.startTest(test1) client.addError(test1, create_error('error1')) client.stopTest(test1) client.startTest(test2) client.addError(test2, create_error('error2')) client.stopTest(test2) self.setupStep(subunit.SubunitShellCommand(command='test')) self.expectCommands( ExpectShell(workdir='wkdir', command="test") + Expect.log('stdio', stdout=stream.getvalue()) + 0 ) self.expectOutcome(result=FAILURE, state_string="shell Total 2 test(s) 2 errors (failure)") self.expectLogfile('problems', re.compile(r'''test1 testtools.testresult.real._StringException:.*ValueError: invalid literal for int\(\) with base 10: '_error1' test2 testtools.testresult.real._StringException:.*ValueError: invalid literal for int\(\) with base 10: '_error2' .*''', re.MULTILINE | re.DOTALL)) # noqa pylint: disable=line-too-long return self.runStep() def test_warnings(self): stream = io.BytesIO() client = TestProtocolClient(stream) test1 = FakeTest(id='test1') test2 = FakeTest(id='test2') client.startTest(test1) client.stopTest(test1) client.addError(test2, create_error('error2')) client.stopTest(test2) self.setupStep(subunit.SubunitShellCommand(command='test')) self.expectCommands( ExpectShell(workdir='wkdir', command="test") + Expect.log('stdio', stdout=stream.getvalue()) + 0 ) self.expectOutcome(result=SUCCESS, # N.B. not WARNINGS state_string="shell 1 test passed") # note that the warnings list is ignored.. self.expectLogfile('warnings', re.compile(r'''error: test2 \[.* ValueError: invalid literal for int\(\) with base 10: '_error2' \] ''', re.MULTILINE | re.DOTALL)) # noqa pylint: disable=line-too-long return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_transfer.py000066400000000000000000001260461413250514000247560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import shutil import stat import tarfile import tempfile from io import BytesIO from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import remotetransfer from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.steps import transfer from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes def uploadString(string, timestamp=None): def behavior(command): writer = command.args['writer'] writer.remote_write(string + "\n") writer.remote_close() if timestamp: writer.remote_utime(timestamp) return behavior def downloadString(memoizer, timestamp=None): def behavior(command): reader = command.args['reader'] read = reader.remote_read(1000) # save what we read so we can check it memoizer(read) reader.remote_close() if timestamp: reader.remote_utime(timestamp) return read return behavior def uploadTarFile(filename, **members): def behavior(command): f = BytesIO() archive = tarfile.TarFile(fileobj=f, name=filename, mode='w') for name, content in members.items(): content = unicode2bytes(content) archive.addfile(tarfile.TarInfo(name), BytesIO(content)) writer = command.args['writer'] writer.remote_write(f.getvalue()) writer.remote_unpack() return behavior class UploadError: def __init__(self, behavior): self.behavior = behavior self.writer = None def __call__(self, command): self.writer = command.args['writer'] self.writer.cancel = Mock(wraps=self.writer.cancel) self.behavior(command) raise RuntimeError('uh oh') class TestFileUpload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() fd, self.destfile = tempfile.mkstemp() os.close(fd) os.unlink(self.destfile) return self.setUpBuildStep() def tearDown(self): if os.path.exists(self.destfile): os.unlink(self.destfile) return self.tearDownBuildStep() def testConstructorModeType(self): with self.assertRaises(config.ConfigErrors): transfer.FileUpload(workersrc=__file__, masterdest='xyz', mode='g+rwx') def testBasic(self): self.setupStep( transfer.FileUpload(workersrc='srcfile', masterdest=self.destfile)) self.expectCommands( Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading srcfile") d = self.runStep() return d def testWorker2_16(self): self.setupStep( transfer.FileUpload(workersrc='srcfile', masterdest=self.destfile), worker_version={'*': '2.16'}) self.expectCommands( Expect('uploadFile', dict( slavesrc="srcfile", workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading srcfile") d = self.runStep() return d @defer.inlineCallbacks def testTimestamp(self): self.setupStep( transfer.FileUpload(workersrc=__file__, masterdest=self.destfile, keepstamp=True)) timestamp = (os.path.getatime(__file__), os.path.getmtime(__file__)) self.expectCommands( Expect('uploadFile', dict( workersrc=__file__, workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=True, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString('test', timestamp=timestamp)) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading {}".format(os.path.basename(__file__)) ) yield self.runStep() desttimestamp = (os.path.getatime(self.destfile), os.path.getmtime(self.destfile)) srctimestamp = [int(t) for t in timestamp] desttimestamp = [int(d) for d in desttimestamp] self.assertEqual(srctimestamp[0], desttimestamp[0]) self.assertEqual(srctimestamp[1], desttimestamp[1]) def testDescriptionDone(self): self.setupStep( transfer.FileUpload(workersrc=__file__, masterdest=self.destfile, url="http://server/file", descriptionDone="Test File Uploaded")) self.step.addURL = Mock() self.expectCommands( Expect('uploadFile', dict( workersrc=__file__, workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="Test File Uploaded") d = self.runStep() return d @defer.inlineCallbacks def testURL(self): self.setupStep(transfer.FileUpload(workersrc=__file__, masterdest=self.destfile, url="http://server/file")) self.step.addURL = Mock() self.expectCommands( Expect('uploadFile', dict( workersrc=__file__, workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading {}".format(os.path.basename(__file__)) ) yield self.runStep() self.step.addURL.assert_called_once_with( os.path.basename(self.destfile), "http://server/file") @defer.inlineCallbacks def testURLText(self): self.setupStep(transfer.FileUpload(workersrc=__file__, masterdest=self.destfile, url="http://server/file", urlText="testfile")) self.step.addURL = Mock() self.expectCommands( Expect('uploadFile', dict( workersrc=__file__, workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading {}".format(os.path.basename(__file__)) ) yield self.runStep() self.step.addURL.assert_called_once_with( "testfile", "http://server/file") def testFailure(self): self.setupStep( transfer.FileUpload(workersrc='srcfile', masterdest=self.destfile)) self.expectCommands( Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + 1) self.expectOutcome( result=FAILURE, state_string="uploading srcfile (failure)") d = self.runStep() return d @defer.inlineCallbacks def testException(self): self.setupStep( transfer.FileUpload(workersrc='srcfile', masterdest=self.destfile)) behavior = UploadError(uploadString("Hello world!")) self.expectCommands( Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(behavior)) self.expectOutcome( result=EXCEPTION, state_string="uploading srcfile (exception)") yield self.runStep() self.assertEqual(behavior.writer.cancel.called, True) self.assertEqual( len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_interrupt(self): self.setupStep(transfer.FileUpload(workersrc='srcfile', masterdest=self.destfile)) self.expectCommands( Expect('uploadFile', {'workersrc': 'srcfile', 'workdir': 'wkdir', 'blocksize': 262144, 'maxsize': None, 'keepstamp': False, 'writer': ExpectRemoteRef(remotetransfer.FileWriter)}, interrupted=True) + 0) self.interrupt_nth_remote_command(0) self.expectOutcome(result=CANCELLED, state_string="uploading srcfile (cancelled)") self.expectLogfile('interrupt', 'interrupt reason') yield self.runStep() def test_init_workersrc_keyword(self): step = transfer.FileUpload( workersrc='srcfile', masterdest='dstfile') self.assertEqual(step.workersrc, 'srcfile') def test_init_workersrc_positional(self): step = transfer.FileUpload('srcfile', 'dstfile') self.assertEqual(step.workersrc, 'srcfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.FileUpload() with self.assertRaises(TypeError): transfer.FileUpload('src') class TestDirectoryUpload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.destdir = os.path.abspath('destdir') if os.path.exists(self.destdir): shutil.rmtree(self.destdir) return self.setUpBuildStep() def tearDown(self): if os.path.exists(self.destdir): shutil.rmtree(self.destdir) return self.tearDownBuildStep() def testBasic(self): self.setupStep( transfer.DirectoryUpload(workersrc="srcdir", masterdest=self.destdir)) self.expectCommands( Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading srcdir") d = self.runStep() return d def testWorker2_16(self): self.setupStep( transfer.DirectoryUpload( workersrc="srcdir", masterdest=self.destdir), worker_version={'*': '2.16'}) self.expectCommands( Expect('uploadDirectory', dict( slavesrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading srcdir") d = self.runStep() return d @defer.inlineCallbacks def test_url(self): self.setupStep(transfer.DirectoryUpload(workersrc="srcdir", masterdest=self.destdir, url="http://server/dir")) self.step.addURL = Mock() self.expectCommands( Expect('uploadDirectory', {'workersrc': 'srcdir', 'workdir': 'wkdir', 'blocksize': 16384, 'compress': None, 'maxsize': None, 'writer': ExpectRemoteRef(remotetransfer.DirectoryWriter)}) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading srcdir") yield self.runStep() self.step.addURL.assert_called_once_with("destdir", "http://server/dir") @defer.inlineCallbacks def test_url_text(self): self.setupStep(transfer.DirectoryUpload(workersrc="srcdir", masterdest=self.destdir, url="http://server/dir", urlText='url text')) self.step.addURL = Mock() self.expectCommands( Expect('uploadDirectory', {'workersrc': 'srcdir', 'workdir': 'wkdir', 'blocksize': 16384, 'compress': None, 'maxsize': None, 'writer': ExpectRemoteRef(remotetransfer.DirectoryWriter)}) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading srcdir") yield self.runStep() self.step.addURL.assert_called_once_with("url text", "http://server/dir") @defer.inlineCallbacks def testFailure(self): self.setupStep( transfer.DirectoryUpload(workersrc="srcdir", masterdest=self.destdir)) self.expectCommands( Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + 1) self.expectOutcome(result=FAILURE, state_string="uploading srcdir (failure)") yield self.runStep() @defer.inlineCallbacks def testException(self): self.setupStep( transfer.DirectoryUpload(workersrc='srcdir', masterdest=self.destdir)) behavior = UploadError(uploadTarFile('fake.tar', test="Hello world!")) self.expectCommands( Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(behavior)) self.expectOutcome( result=EXCEPTION, state_string="uploading srcdir (exception)") yield self.runStep() self.assertEqual(behavior.writer.cancel.called, True) self.assertEqual( len(self.flushLoggedErrors(RuntimeError)), 1) def test_init_workersrc_keyword(self): step = transfer.DirectoryUpload( workersrc='srcfile', masterdest='dstfile') self.assertEqual(step.workersrc, 'srcfile') def test_init_workersrc_positional(self): step = transfer.DirectoryUpload('srcfile', 'dstfile') self.assertEqual(step.workersrc, 'srcfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.DirectoryUpload() with self.assertRaises(TypeError): transfer.DirectoryUpload('src') class TestMultipleFileUpload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.destdir = os.path.abspath('destdir') if os.path.exists(self.destdir): shutil.rmtree(self.destdir) return self.setUpBuildStep() def tearDown(self): if os.path.exists(self.destdir): shutil.rmtree(self.destdir) return self.tearDownBuildStep() def testEmpty(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=[], masterdest=self.destdir)) self.expectCommands() self.expectOutcome(result=SKIPPED, state_string="finished (skipped)") d = self.runStep() return d def testFile(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcfile"], masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testDirectory(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcdir"], masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d @defer.inlineCallbacks def test_not_existing_path(self): self.setupStep(transfer.MultipleFileUpload(workersrcs=["srcdir"], masterdest=self.destdir)) self.expectCommands( Expect('stat', {'file': "srcdir", 'workdir': 'wkdir'}) + 1) self.expectOutcome(result=FAILURE, state_string="uploading 1 file (failure)") self.expectLogfile('stderr', "File wkdir/srcdir not available at worker") yield self.runStep() @defer.inlineCallbacks def test_special_path(self): self.setupStep(transfer.MultipleFileUpload(workersrcs=["srcdir"], masterdest=self.destdir)) self.expectCommands( Expect('stat', {'file': "srcdir", 'workdir': 'wkdir'}) + Expect.update('stat', [0, 99, 99]) + 0) self.expectOutcome(result=FAILURE, state_string="uploading 1 file (failure)") self.expectLogfile('stderr', 'srcdir is neither a regular file, nor a directory') yield self.runStep() def testMultiple(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcfile", "srcdir"], masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0, Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading 2 files") d = self.runStep() return d def testMultipleString(self): self.setupStep( transfer.MultipleFileUpload(workersrcs="srcfile", masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testGlob(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["src*"], masterdest=self.destdir, glob=True)) self.expectCommands( Expect('glob', dict(path=os.path.join( 'wkdir', 'src*'), logEnviron=False)) + Expect.update('files', ["srcfile"]) + 0, Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0, ) self.expectOutcome( result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testFailedGlob(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["src*"], masterdest=self.destdir, glob=True)) self.expectCommands( Expect('glob', {'path': os.path.join( 'wkdir', 'src*'), 'logEnviron': False}) + Expect.update('files', []) + 1, ) self.expectOutcome( result=SKIPPED, state_string="uploading 0 files (skipped)") d = self.runStep() return d def testFileWorker2_16(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["srcfile"], masterdest=self.destdir), worker_version={'*': '2.16'}) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( slavesrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testDirectoryWorker2_16(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["srcdir"], masterdest=self.destdir), worker_version={'*': '2.16'}) self.expectCommands( Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( slavesrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testMultipleWorker2_16(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["srcfile", "srcdir"], masterdest=self.destdir), worker_version={'*': '2.16'}) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( slavesrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0, Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( slavesrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading 2 files") d = self.runStep() return d @defer.inlineCallbacks def test_url(self): self.setupStep(transfer.MultipleFileUpload(workersrcs=["srcfile"], masterdest=self.destdir, url="http://server/dir")) self.step.addURL = Mock() self.expectCommands( Expect('stat', {'file': "srcfile", 'workdir': 'wkdir'}) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', {'workersrc': "srcfile", 'workdir': 'wkdir', 'blocksize': 16384, 'maxsize': None, 'keepstamp': False, 'writer': ExpectRemoteRef(remotetransfer.FileWriter)}) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") yield self.runStep() self.step.addURL.assert_called_once_with("destdir", "http://server/dir") @defer.inlineCallbacks def test_url_text(self): self.setupStep(transfer.MultipleFileUpload(workersrcs=["srcfile"], masterdest=self.destdir, url="http://server/dir", urlText='url text')) self.step.addURL = Mock() self.expectCommands( Expect('stat', {'file': "srcfile", 'workdir': 'wkdir'}) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', {'workersrc': "srcfile", 'workdir': 'wkdir', 'blocksize': 16384, 'maxsize': None, 'keepstamp': False, 'writer': ExpectRemoteRef(remotetransfer.FileWriter)}) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") yield self.runStep() self.step.addURL.assert_called_once_with("url text", "http://server/dir") def testFailure(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcfile", "srcdir"], masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + 1) self.expectOutcome( result=FAILURE, state_string="uploading 2 files (failure)") d = self.runStep() return d @defer.inlineCallbacks def testException(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcfile", "srcdir"], masterdest=self.destdir)) behavior = UploadError(uploadString("Hello world!")) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(behavior)) self.expectOutcome( result=EXCEPTION, state_string="uploading 2 files (exception)") yield self.runStep() self.assertEqual(behavior.writer.cancel.called, True) self.assertEqual( len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def testSubclass(self): class CustomStep(transfer.MultipleFileUpload): uploadDone = Mock(return_value=None) allUploadsDone = Mock(return_value=None) step = CustomStep( workersrcs=["srcfile", "srcdir"], masterdest=self.destdir) self.setupStep(step) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0, Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading 2 files") yield self.runStep() def checkCalls(res): self.assertEqual(step.uploadDone.call_count, 2) self.assertEqual(step.uploadDone.call_args_list[0], ((SUCCESS, 'srcfile', os.path.join(self.destdir, 'srcfile')), {})) self.assertEqual(step.uploadDone.call_args_list[1], ((SUCCESS, 'srcdir', os.path.join(self.destdir, 'srcdir')), {})) self.assertEqual(step.allUploadsDone.call_count, 1) self.assertEqual(step.allUploadsDone.call_args_list[0], ((SUCCESS, ['srcfile', 'srcdir'], self.destdir), {})) def test_init_workersrcs_keyword(self): step = transfer.MultipleFileUpload( workersrcs=['srcfile'], masterdest='dstfile') self.assertEqual(step.workersrcs, ['srcfile']) def test_init_workersrcs_positional(self): step = transfer.MultipleFileUpload(['srcfile'], 'dstfile') self.assertEqual(step.workersrcs, ['srcfile']) def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.MultipleFileUpload() with self.assertRaises(TypeError): transfer.MultipleFileUpload(['srcfile']) class TestFileDownload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() fd, self.destfile = tempfile.mkstemp() os.close(fd) os.unlink(self.destfile) return self.setUpBuildStep() def tearDown(self): if os.path.exists(self.destfile): os.unlink(self.destfile) return self.tearDownBuildStep() def test_init_workerdest_keyword(self): step = transfer.FileDownload( mastersrc='srcfile', workerdest='dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_workerdest_positional(self): step = transfer.FileDownload('srcfile', 'dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.FileDownload() with self.assertRaises(TypeError): transfer.FileDownload('srcfile') @defer.inlineCallbacks def testBasic(self): master_file = __file__ self.setupStep( transfer.FileDownload( mastersrc=master_file, workerdest=self.destfile)) # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( workerdest=self.destfile, workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.FileReader))) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to {0}".format( os.path.basename(self.destfile))) yield self.runStep() with open(master_file, "rb") as f: contents = f.read() # Only first 1000 bytes transferred in downloadString() helper contents = contents[:1000] self.assertEqual(b''.join(read), contents) @defer.inlineCallbacks def testBasicWorker2_16(self): master_file = __file__ self.setupStep( transfer.FileDownload( mastersrc=master_file, workerdest=self.destfile), worker_version={'*': '2.16'}) # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( slavedest=self.destfile, workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.FileReader))) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to {0}".format( os.path.basename(self.destfile))) yield self.runStep() def checkCalls(res): with open(master_file, "rb") as f: contents = f.read() # Only first 1000 bytes transferred in downloadString() helper contents = contents[:1000] self.assertEqual(b''.join(read), contents) @defer.inlineCallbacks def test_no_file(self): self.setupStep(transfer.FileDownload(mastersrc='not existing file', workerdest=self.destfile)) self.expectCommands() self.expectOutcome(result=FAILURE, state_string="downloading to {0} (failure)".format( os.path.basename(self.destfile))) self.expectLogfile('stderr', "File 'not existing file' not available at master") yield self.runStep() class TestStringDownload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() # check that ConfigErrors is raised on invalid 'mode' argument def testModeConfError(self): with self.assertRaisesRegex(config.ConfigErrors, "StringDownload step's mode must be an integer or None," " got 'not-a-number'"): transfer.StringDownload("string", "file", mode="not-a-number") @defer.inlineCallbacks def testBasic(self): self.setupStep(transfer.StringDownload("Hello World", "hello.txt")) self.step.worker = Mock() self.step.remote = Mock() # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( workerdest="hello.txt", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader))) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to hello.txt") yield self.runStep() def checkCalls(res): self.assertEqual(b''.join(read), b"Hello World") @defer.inlineCallbacks def testBasicWorker2_16(self): self.setupStep( transfer.StringDownload("Hello World", "hello.txt"), worker_version={'*': '2.16'}) self.step.worker = Mock() self.step.remote = Mock() # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( slavedest="hello.txt", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader))) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to hello.txt") yield self.runStep() self.assertEqual(b''.join(read), b"Hello World") def testFailure(self): self.setupStep(transfer.StringDownload("Hello World", "hello.txt")) self.expectCommands( Expect('downloadFile', dict( workerdest="hello.txt", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader))) + 1) self.expectOutcome( result=FAILURE, state_string="downloading to hello.txt (failure)") return self.runStep() def test_init_workerdest_keyword(self): step = transfer.StringDownload('srcfile', workerdest='dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_workerdest_positional(self): step = transfer.StringDownload('srcfile', 'dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.StringDownload() with self.assertRaises(TypeError): transfer.StringDownload('srcfile') class TestJSONStringDownload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def testBasic(self): msg = dict(message="Hello World") self.setupStep(transfer.JSONStringDownload(msg, "hello.json")) self.step.worker = Mock() self.step.remote = Mock() # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( workerdest="hello.json", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader)) ) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to hello.json") yield self.runStep() self.assertEqual(b''.join(read), b'{"message": "Hello World"}') def testFailure(self): msg = dict(message="Hello World") self.setupStep(transfer.JSONStringDownload(msg, "hello.json")) self.expectCommands( Expect('downloadFile', dict( workerdest="hello.json", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader))) + 1) self.expectOutcome( result=FAILURE, state_string="downloading to hello.json (failure)") return self.runStep() def test_init_workerdest_keyword(self): step = transfer.JSONStringDownload('srcfile', workerdest='dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_workerdest_positional(self): step = transfer.JSONStringDownload('srcfile', 'dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.JSONStringDownload() with self.assertRaises(TypeError): transfer.JSONStringDownload('srcfile') class TestJSONPropertiesDownload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def testBasic(self): self.setupStep(transfer.JSONPropertiesDownload("props.json")) self.step.build.setProperty('key1', 'value1', 'test') read = [] self.expectCommands( Expect('downloadFile', dict( workerdest="props.json", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader)) ) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to props.json") yield self.runStep() # we decode as key order is dependent of python version self.assertEqual(json.loads((b''.join(read)).decode()), { "properties": {"key1": "value1"}, "sourcestamps": []}) def test_init_workerdest_keyword(self): step = transfer.JSONPropertiesDownload(workerdest='dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_workerdest_positional(self): step = transfer.JSONPropertiesDownload('dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.JSONPropertiesDownload() buildbot-3.4.0/master/buildbot/test/unit/steps/test_trigger.py000066400000000000000000000671031413250514000245730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.internet import reactor from twisted.python import failure from twisted.trial import unittest from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot.process import properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import trigger from buildbot.test import fakedb from buildbot.test.util import steps from buildbot.test.util.interfaces import InterfaceTests from buildbot.test.util.misc import TestReactorMixin @implementer(interfaces.ITriggerableScheduler) class FakeTriggerable: triggered_with = None result = SUCCESS bsid = 1 brids = {} exception = False never_finish = False def __init__(self, name): self.name = name def trigger(self, waited_for, sourcestamps=None, set_props=None, parent_buildid=None, parent_relationship=None): self.triggered_with = (waited_for, sourcestamps, set_props.properties) idsDeferred = defer.Deferred() idsDeferred.callback((self.bsid, self.brids)) resultsDeferred = defer.Deferred() if not self.never_finish: if self.exception: reactor.callLater( 0, resultsDeferred.errback, RuntimeError('oh noes')) else: reactor.callLater( 0, resultsDeferred.callback, (self.result, self.brids)) return (idsDeferred, resultsDeferred) class TriggerableInterfaceTest(unittest.TestCase, InterfaceTests): def test_interface(self): self.assertInterfacesImplemented(FakeTriggerable) class FakeSourceStamp: def __init__(self, **kwargs): self.__dict__.update(kwargs) def asDict(self, includePatch=True): return self.__dict__.copy() class FakeSchedulerManager: pass # Magic numbers that relate brid to other build settings def BRID_TO_BSID(brid): return brid + 2000 def BRID_TO_BID(brid): return brid + 3000 def BRID_TO_BUILD_NUMBER(brid): return brid + 4000 class TestTrigger(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def setupStep(self, step, sourcestampsInBuild=None, gotRevisionsInBuild=None, *args, **kwargs): sourcestamps = sourcestampsInBuild or [] got_revisions = gotRevisionsInBuild or {} yield super().setupStep(step, *args, **kwargs) # This step reaches deeply into a number of parts of Buildbot. That # should be fixed! # set up a buildmaster that knows about two fake schedulers, a and b m = self.master m.db.checkForeignKeys = True self.build.builder.botmaster = m.botmaster self.build.conn = object() m.config.buildbotURL = "baseurl/" m.scheduler_manager = FakeSchedulerManager() self.scheduler_a = a = FakeTriggerable(name='a') self.scheduler_b = b = FakeTriggerable(name='b') self.scheduler_c = c = FakeTriggerable(name='c') m.scheduler_manager.namedServices = dict(a=a, b=b, c=c) a.brids = {77: 11} b.brids = {78: 22} c.brids = {79: 33, 80: 44} def make_fake_br(brid, builderid): return fakedb.BuildRequest( id=brid, buildsetid=BRID_TO_BSID(brid), builderid=builderid) def make_fake_build(brid, builderid): return fakedb.Build( buildrequestid=brid, id=BRID_TO_BID(brid), number=BRID_TO_BUILD_NUMBER(brid), masterid=9, workerid=13, builderid=builderid) m.db.insertTestData([ fakedb.Builder(id=77, name='A'), fakedb.Builder(id=78, name='B'), fakedb.Builder(id=79, name='C1'), fakedb.Builder(id=80, name='C2'), fakedb.Master(id=9), fakedb.Buildset(id=2022), fakedb.Buildset(id=2011), fakedb.Buildset(id=2033), fakedb.Worker(id=13, name="some:worker"), make_fake_br(11, 77), make_fake_br(22, 78), fakedb.BuildRequest(id=33, buildsetid=2033, builderid=79), fakedb.BuildRequest(id=44, buildsetid=2033, builderid=80), make_fake_build(11, builderid=77), make_fake_build(22, builderid=78), make_fake_build(33, builderid=79), # builderid is 79 on purpose, changed, from the one of the buildrequest # to test the case of the virtual make_fake_build(44, builderid=79), ]) def getAllSourceStamps(): return sourcestamps self.build.getAllSourceStamps = getAllSourceStamps def getAllGotRevisions(): return got_revisions self.step.getAllGotRevisions = getAllGotRevisions self.exp_add_sourcestamp = None self.exp_a_trigger = None self.exp_b_trigger = None self.exp_c_trigger = None self.exp_added_urls = [] @defer.inlineCallbacks def runStep(self, results_dict=None): if results_dict is None: results_dict = {} if self.step.waitForFinish: for i in [11, 22, 33, 44]: yield self.master.db.builds.finishBuild(BRID_TO_BID(i), results_dict.get(i, SUCCESS)) d = super().runStep() # the build doesn't finish until after a callLater, so this has the # effect of checking whether the deferred has been fired already; if self.step.waitForFinish: self.assertFalse(d.called) else: self.assertTrue(d.called) yield d self.assertEqual(self.scheduler_a.triggered_with, self.exp_a_trigger) self.assertEqual(self.scheduler_b.triggered_with, self.exp_b_trigger) # check the URLs stepUrls = self.master.data.updates.stepUrls if stepUrls: got_added_urls = stepUrls[list(stepUrls)[0]] else: got_added_urls = [] self.assertEqual(sorted(got_added_urls), sorted(self.exp_added_urls)) if self.exp_add_sourcestamp: self.assertEqual(self.addSourceStamp_kwargs, self.exp_add_sourcestamp) # pause runStep's completion until after any other callLater's are done d = defer.Deferred() reactor.callLater(0, d.callback, None) yield d def expectTriggeredWith(self, a=None, b=None, c=None, d=None): self.exp_a_trigger = a if a is not None: self.expectTriggeredLinks('a_br') self.exp_b_trigger = b if b is not None: self.expectTriggeredLinks('b_br') self.exp_c_trigger = c if c is not None: self.expectTriggeredLinks('c_br') def expectAddedSourceStamp(self, **kwargs): self.exp_add_sourcestamp = kwargs def expectTriggeredLinks(self, *args): if 'a_br' in args: self.exp_added_urls.append( ('a #11', 'baseurl/#buildrequests/11')) if 'b_br' in args: self.exp_added_urls.append( ('b #22', 'baseurl/#buildrequests/22')) if 'c_br' in args: self.exp_added_urls.append( ('c #33', 'baseurl/#buildrequests/33')) self.exp_added_urls.append( ('c #44', 'baseurl/#buildrequests/44')) if 'a' in args: self.exp_added_urls.append( ('success: A #4011', 'baseurl/#builders/77/builds/4011')) if 'b' in args: self.exp_added_urls.append( ('success: B #4022', 'baseurl/#builders/78/builds/4022')) if 'afailed' in args: self.exp_added_urls.append( ('failure: A #4011', 'baseurl/#builders/77/builds/4011')) if 'c' in args: self.exp_added_urls.append( ('success: C1 #4033', 'baseurl/#builders/79/builds/4033')) self.exp_added_urls.append( ('success: C1 #4044', 'baseurl/#builders/79/builds/4044')) # tests def test_no_schedulerNames(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger() def test_unimportantSchedulerNames_not_in_schedulerNames(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['a'], unimportantSchedulerNames=['b']) def test_unimportantSchedulerNames_not_in_schedulerNames_but_rendered(self): # should not raise trigger.Trigger(schedulerNames=[properties.Interpolate('a')], unimportantSchedulerNames=['b']) def test_sourceStamp_and_updateSourceStamp(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], sourceStamp=dict(x=1), updateSourceStamp=True) def test_sourceStamps_and_updateSourceStamp(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], sourceStamps=[dict(x=1), dict(x=2)], updateSourceStamp=True) def test_updateSourceStamp_and_alwaysUseLatest(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], updateSourceStamp=True, alwaysUseLatest=True) def test_sourceStamp_and_alwaysUseLatest(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], sourceStamp=dict(x=1), alwaysUseLatest=True) def test_sourceStamps_and_alwaysUseLatest(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], sourceStamps=[dict(x=1), dict(x=2)], alwaysUseLatest=True) @defer.inlineCallbacks def test_simple(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], sourceStamps={})) self.expectOutcome(result=SUCCESS, state_string='triggered a') self.expectTriggeredWith(a=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_simple_failure(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'])) self.scheduler_a.result = FAILURE # not waitForFinish, so trigger step succeeds even though the build # didn't fail self.expectOutcome(result=SUCCESS, state_string='triggered a') self.expectTriggeredWith(a=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_simple_exception(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'])) self.scheduler_a.exception = True self.expectOutcome(result=SUCCESS, state_string='triggered a') self.expectTriggeredWith(a=(False, [], {})) yield self.runStep() self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_bogus_scheduler(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'x'])) # bogus scheduler is an exception, not a failure (don't blame the patch) self.expectOutcome(result=EXCEPTION) self.expectTriggeredWith(a=None) # a is not triggered! yield self.runStep() self.flushLoggedErrors(ValueError) @defer.inlineCallbacks def test_updateSourceStamp(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=True), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ], gotRevisionsInBuild={'': 23456}, ) self.expectOutcome(result=SUCCESS, state_string='triggered a') self.expectTriggeredWith( a=(False, [{'codebase': '', 'repository': 'x', 'revision': 23456}], {})) yield self.runStep() @defer.inlineCallbacks def test_updateSourceStamp_no_got_revision(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=True), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ]) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith( a=(False, # uses old revision [{'codebase': '', 'repository': 'x', 'revision': 11111}], {})) yield self.runStep() @defer.inlineCallbacks def test_not_updateSourceStamp(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=False), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ], gotRevisionsInBuild={'': 23456}, ) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith( a=(False, [{'codebase': '', 'repository': 'x', 'revision': 11111}], {})) yield self.runStep() @defer.inlineCallbacks def test_updateSourceStamp_multiple_repositories(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=True), sourcestampsInBuild=[ FakeSourceStamp(codebase='cb1', revision='12345'), FakeSourceStamp(codebase='cb2', revision='12345') ], gotRevisionsInBuild={'cb1': 23456, 'cb2': 34567}, ) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith( a=(False, [{'codebase': 'cb1', 'revision': 23456}, {'codebase': 'cb2', 'revision': 34567}], {})) yield self.runStep() @defer.inlineCallbacks def test_updateSourceStamp_prop_false(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=properties.Property('usess')), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ], gotRevisionsInBuild={'': 23456}, ) self.properties.setProperty('usess', False, 'me') self.expectOutcome(result=SUCCESS) # didn't use got_revision self.expectTriggeredWith( a=(False, [{'codebase': '', 'repository': 'x', 'revision': 11111}], {})) yield self.runStep() @defer.inlineCallbacks def test_updateSourceStamp_prop_true(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=properties.Property('usess')), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ], gotRevisionsInBuild={'': 23456}, ) self.properties.setProperty('usess', True, 'me') self.expectOutcome(result=SUCCESS) # didn't use got_revision self.expectTriggeredWith( a=(False, [{'codebase': '', 'repository': 'x', 'revision': 23456}], {})) yield self.runStep() @defer.inlineCallbacks def test_alwaysUseLatest(self): yield self.setupStep(trigger.Trigger(schedulerNames=['b'], alwaysUseLatest=True), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ]) self.expectOutcome(result=SUCCESS) # Do not pass setid self.expectTriggeredWith(b=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_alwaysUseLatest_prop_false(self): yield self.setupStep(trigger.Trigger(schedulerNames=['b'], alwaysUseLatest=properties.Property('aul')), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ]) self.properties.setProperty('aul', False, 'me') self.expectOutcome(result=SUCCESS) # didn't use latest self.expectTriggeredWith( b=(False, [{'codebase': '', 'repository': 'x', 'revision': 11111}], {})) yield self.runStep() @defer.inlineCallbacks def test_alwaysUseLatest_prop_true(self): yield self.setupStep(trigger.Trigger(schedulerNames=['b'], alwaysUseLatest=properties.Property('aul')), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ]) self.properties.setProperty('aul', True, 'me') self.expectOutcome(result=SUCCESS) # didn't use latest self.expectTriggeredWith(b=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_sourceStamp(self): ss = dict(revision=9876, branch='dev') yield self.setupStep(trigger.Trigger(schedulerNames=['b'], sourceStamp=ss)) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(b=(False, [ss], {})) yield self.runStep() @defer.inlineCallbacks def test_set_of_sourceStamps(self): ss1 = dict( codebase='cb1', repository='r1', revision=9876, branch='dev') ss2 = dict( codebase='cb2', repository='r2', revision=5432, branch='dev') yield self.setupStep(trigger.Trigger(schedulerNames=['b'], sourceStamps=[ss1, ss2])) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(b=(False, [ss1, ss2], {})) yield self.runStep() @defer.inlineCallbacks def test_set_of_sourceStamps_override_build(self): ss1 = dict( codebase='cb1', repository='r1', revision=9876, branch='dev') ss2 = dict( codebase='cb2', repository='r2', revision=5432, branch='dev') ss3 = FakeSourceStamp( codebase='cb3', repository='r3', revision=1234, branch='dev') ss4 = FakeSourceStamp( codebase='cb4', repository='r4', revision=2345, branch='dev') yield self.setupStep(trigger.Trigger(schedulerNames=['b'], sourceStamps=[ss1, ss2]), sourcestampsInBuild=[ss3, ss4]) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(b=(False, [ss1, ss2], {})) yield self.runStep() @defer.inlineCallbacks def test_sourceStamp_prop(self): ss = dict(revision=properties.Property('rev'), branch='dev') yield self.setupStep(trigger.Trigger(schedulerNames=['b'], sourceStamp=ss)) self.properties.setProperty('rev', 602, 'me') expected_ss = dict(revision=602, branch='dev') self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(b=(False, [expected_ss], {})) yield self.runStep() @defer.inlineCallbacks def test_waitForFinish(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'b'], waitForFinish=True)) self.expectOutcome(result=SUCCESS, state_string='triggered a, b') self.expectTriggeredWith( a=(True, [], {}), b=(True, [], {})) self.expectTriggeredLinks('a', 'b') yield self.runStep() @defer.inlineCallbacks def test_waitForFinish_failure(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], waitForFinish=True)) self.scheduler_a.result = FAILURE self.expectOutcome(result=FAILURE) self.expectTriggeredWith(a=(True, [], {})) self.expectTriggeredLinks('afailed') yield self.runStep(results_dict={11: FAILURE}) @defer.inlineCallbacks def test_waitForFinish_split_failure(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'b'], waitForFinish=True)) self.scheduler_a.result = FAILURE self.scheduler_b.result = SUCCESS self.expectOutcome(result=FAILURE, state_string='triggered a, b') self.expectTriggeredWith( a=(True, [], {}), b=(True, [], {})) self.expectTriggeredLinks('afailed', 'b') yield self.runStep(results_dict={11: FAILURE}) @defer.inlineCallbacks def test_waitForFinish_exception(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'b'], waitForFinish=True)) self.step.addCompleteLog = Mock() self.scheduler_b.exception = True self.expectOutcome(result=EXCEPTION, state_string='triggered a, b') self.expectTriggeredWith( a=(True, [], {}), b=(True, [], {})) self.expectTriggeredLinks('a') # b doesn't return a brid yield self.runStep() self.assertEqual(len(self.step.addCompleteLog.call_args_list), 1) @defer.inlineCallbacks def test_virtual_builder(self): yield self.setupStep(trigger.Trigger(schedulerNames=['c'], waitForFinish=True)) self.expectOutcome(result=SUCCESS, state_string='triggered c') self.expectTriggeredWith( c=(True, [], {})) self.expectTriggeredLinks('c') yield self.runStep() @defer.inlineCallbacks def test_set_properties(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], set_properties=dict(x=1, y=2))) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(a=(False, [], dict(x=(1, 'Trigger'), y=(2, 'Trigger')))) yield self.runStep() @defer.inlineCallbacks def test_set_properties_prop(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], set_properties=dict(x=properties.Property('X'), y=2))) self.properties.setProperty('X', 'xxx', 'here') self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(a=(False, [], dict(x=('xxx', 'Trigger'), y=(2, 'Trigger')))) yield self.runStep() @defer.inlineCallbacks def test_copy_properties(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], copy_properties=['a', 'b'])) self.properties.setProperty('a', 'A', 'AA') self.properties.setProperty('b', 'B', 'BB') self.properties.setProperty('c', 'C', 'CC') self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(a=(False, [], dict(a=('A', 'Trigger'), b=('B', 'Trigger')))) yield self.runStep() @defer.inlineCallbacks def test_waitForFinish_interrupt(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], waitForFinish=True)) self.expectOutcome(result=CANCELLED, state_string='interrupted') self.expectTriggeredWith(a=(True, [], {})) d = self.runStep() # interrupt before the callLater representing the Triggerable # schedulers completes self.step.interrupt(failure.Failure(RuntimeError('oh noes'))) yield d @defer.inlineCallbacks def test_waitForFinish_interrupt_no_connection(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], waitForFinish=True)) self.expectOutcome(result=CANCELLED, state_string='interrupted') self.expectTriggeredWith(a=(True, [], {})) self.scheduler_a.never_finish = True d = self.runStep() # interrupt before the callLater representing the Triggerable # schedulers completes self.build.conn = None self.step.interrupt(failure.Failure(RuntimeError('oh noes'))) yield d @defer.inlineCallbacks def test_getSchedulersAndProperties_back_comp(self): class DynamicTrigger(trigger.Trigger): def getSchedulersAndProperties(self): return [("a", {}, False), ("b", {}, True)] yield self.setupStep(DynamicTrigger(schedulerNames=['a', 'b'])) self.scheduler_a.result = SUCCESS self.scheduler_b.result = FAILURE self.expectOutcome(result=SUCCESS, state_string='triggered a, b') self.expectTriggeredWith(a=(False, [], {}), b=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_unimportantSchedulerNames(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'b'], unimportantSchedulerNames=['b'])) self.scheduler_a.result = SUCCESS self.scheduler_b.result = FAILURE self.expectOutcome(result=SUCCESS, state_string='triggered a, b') self.expectTriggeredWith(a=(False, [], {}), b=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_unimportantSchedulerNames_with_more_brids_for_bsid(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'c'], unimportantSchedulerNames=['c'])) self.scheduler_a.result = SUCCESS self.scheduler_c.result = FAILURE self.expectOutcome(result=SUCCESS, state_string='triggered a, c') self.expectTriggeredWith(a=(False, [], {}), c=(False, [], {})) yield self.runStep() buildbot-3.4.0/master/buildbot/test/unit/steps/test_vstudio.py000066400000000000000000001163331413250514000246250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import results from buildbot.process.properties import Property from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import vstudio from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin real_log = r""" 1>------ Build started: Project: lib1, Configuration: debug Win32 ------ 1>Compiling... 1>SystemLog.cpp 1>c:\absolute\path\to\systemlog.cpp(7) : warning C4100: 'op' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(12) : warning C4100: 'statusword' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(12) : warning C4100: 'op' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(17) : warning C4100: 'retryCounter' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(17) : warning C4100: 'op' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(22) : warning C4100: 'op' : unreferenced formal parameter 1>Creating library... 1>Build log was saved at "file://c:\another\absolute\path\to\debug\BuildLog.htm" 1>lib1 - 0 error(s), 6 warning(s) 2>------ Build started: Project: product, Configuration: debug Win32 ------ 2>Linking... 2>LINK : fatal error LNK1168: cannot open ../../debug/directory/dllname.dll for writing 2>Build log was saved at "file://c:\another\similar\path\to\debug\BuildLog.htm" 2>product - 1 error(s), 0 warning(s) ========== Build: 1 succeeded, 1 failed, 6 up-to-date, 0 skipped ========== """ # noqa pylint: disable=line-too-long class TestAddEnvPath(unittest.TestCase): def do_test(self, initial_env, name, value, expected_env): step = vstudio.VisualStudio() step.env = initial_env step.add_env_path(name, value) self.assertEqual(step.env, expected_env) def test_new(self): self.do_test({}, 'PATH', r'C:\NOTHING', {'PATH': r'C:\NOTHING;'}) def test_new_semi(self): self.do_test({}, 'PATH', r'C:\NOTHING;', {'PATH': r'C:\NOTHING;'}) def test_existing(self): self.do_test({'PATH': '/bin'}, 'PATH', r'C:\NOTHING', {'PATH': r'/bin;C:\NOTHING;'}) def test_existing_semi(self): self.do_test({'PATH': '/bin;'}, 'PATH', r'C:\NOTHING', {'PATH': r'/bin;C:\NOTHING;'}) def test_existing_both_semi(self): self.do_test({'PATH': '/bin;'}, 'PATH', r'C:\NOTHING;', {'PATH': r'/bin;C:\NOTHING;'}) class MSLogLineObserver(unittest.TestCase): def setUp(self): self.warnings = [] lw = Mock() lw.addStdout = lambda l: self.warnings.append(l.rstrip()) self.errors = [] self.errors_stderr = [] le = Mock() le.addStdout = lambda l: self.errors.append(('o', l.rstrip())) le.addStderr = lambda l: self.errors.append(('e', l.rstrip())) self.llo = vstudio.MSLogLineObserver(lw, le) self.progress = {} self.llo.step = Mock() self.llo.step.setProgress = self.progress.__setitem__ def receiveLines(self, *lines): for line in lines: self.llo.outLineReceived(line) def assertResult(self, nbFiles=0, nbProjects=0, nbWarnings=0, nbErrors=0, errors=None, warnings=None, progress=None): if errors is None: errors = [] if warnings is None: warnings = [] if progress is None: progress = {} self.assertEqual( dict(nbFiles=self.llo.nbFiles, nbProjects=self.llo.nbProjects, nbWarnings=self.llo.nbWarnings, nbErrors=self.llo.nbErrors, errors=self.errors, warnings=self.warnings, progress=self.progress), dict(nbFiles=nbFiles, nbProjects=nbProjects, nbWarnings=nbWarnings, nbErrors=nbErrors, errors=errors, warnings=warnings, progress=progress)) def test_outLineReceived_empty(self): self.llo.outLineReceived('abcd\r\n') self.assertResult() def test_outLineReceived_projects(self): lines = [ "123>----- some project 1 -----", "123>----- some project 2 -----", ] self.receiveLines(*lines) self.assertResult(nbProjects=2, progress=dict(projects=2), errors=[('o', l) for l in lines], warnings=lines) def test_outLineReceived_files(self): lines = [ "123>SomeClass.cpp", "123>SomeStuff.c", "123>SomeStuff.h", # .h files not recognized ] self.receiveLines(*lines) self.assertResult(nbFiles=2, progress=dict(files=2)) def test_outLineReceived_warnings(self): lines = [ "abc: warning ABC123: xyz!", "def : warning DEF456: wxy!", ] self.receiveLines(*lines) self.assertResult(nbWarnings=2, progress=dict(warnings=2), warnings=lines) def test_outLineReceived_errors(self): lines = [ "error ABC123: foo", " error DEF456 : bar", " error : bar", " error: bar", # NOTE: not matched ] self.receiveLines(*lines) self.assertResult(nbErrors=3, # note: no progress errors=[ ('e', "error ABC123: foo"), ('e', " error DEF456 : bar"), ('e', " error : bar"), ]) def test_outLineReceived_real(self): # based on a real logfile donated by Ben Allard lines = real_log.split("\n") self.receiveLines(*lines) errors = [ ('o', '1>------ Build started: Project: lib1, Configuration: debug Win32 ------'), ('o', '2>------ Build started: Project: product, Configuration: debug Win32 ------'), ('e', '2>LINK : fatal error LNK1168: cannot open ../../debug/directory/dllname.dll for writing') # noqa pylint: disable=line-too-long ] warnings = [ '1>------ Build started: Project: lib1, Configuration: debug Win32 ------', "1>c:\\absolute\\path\\to\\systemlog.cpp(7) : warning C4100: 'op' : unreferenced formal parameter", # noqa pylint: disable=line-too-long "1>c:\\absolute\\path\\to\\systemlog.cpp(12) : warning C4100: 'statusword' : unreferenced formal parameter", # noqa pylint: disable=line-too-long "1>c:\\absolute\\path\\to\\systemlog.cpp(12) : warning C4100: 'op' : unreferenced formal parameter", # noqa pylint: disable=line-too-long "1>c:\\absolute\\path\\to\\systemlog.cpp(17) : warning C4100: 'retryCounter' : unreferenced formal parameter", # noqa pylint: disable=line-too-long "1>c:\\absolute\\path\\to\\systemlog.cpp(17) : warning C4100: 'op' : unreferenced formal parameter", # noqa pylint: disable=line-too-long "1>c:\\absolute\\path\\to\\systemlog.cpp(22) : warning C4100: 'op' : unreferenced formal parameter", # noqa pylint: disable=line-too-long '2>------ Build started: Project: product, Configuration: debug Win32 ------', ] self.assertResult(nbFiles=1, nbErrors=1, nbProjects=2, nbWarnings=6, progress={'files': 1, 'projects': 2, 'warnings': 6}, errors=errors, warnings=warnings) class VCx(vstudio.VisualStudio): def run(self): self.command = ["command", "here"] return super().run() class VisualStudio(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): """ Test L{VisualStudio} with a simple subclass, L{VCx}. """ def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_default_config(self): vs = vstudio.VisualStudio() self.assertEqual(vs.config, 'release') def test_simple(self): self.setupStep(VCx()) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() @defer.inlineCallbacks def test_installdir(self): self.setupStep(VCx(installdir=r'C:\I')) self.step.exp_installdir = r'C:\I' self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") yield self.runStep() self.assertEqual(self.step.installdir, r'C:\I') def test_evaluate_result_failure(self): self.setupStep(VCx()) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + 1 ) self.expectOutcome(result=FAILURE, state_string="compile 0 projects 0 files (failure)") return self.runStep() def test_evaluate_result_errors(self): self.setupStep(VCx()) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + ExpectShell.log('stdio', stdout='error ABC123: foo\r\n') + 0 ) self.expectOutcome(result=FAILURE, state_string="compile 0 projects 0 files 1 errors (failure)") return self.runStep() def test_evaluate_result_warnings(self): self.setupStep(VCx()) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + ExpectShell.log('stdio', stdout='foo: warning ABC123: foo\r\n') + 0 ) self.expectOutcome(result=WARNINGS, state_string="compile 0 projects 0 files 1 warnings (warnings)") return self.runStep() def test_env_setup(self): self.setupStep(VCx( INCLUDE=[r'c:\INC1', r'c:\INC2'], LIB=[r'c:\LIB1', r'C:\LIB2'], PATH=[r'c:\P1', r'C:\P2'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here'], env=dict( INCLUDE=r'c:\INC1;c:\INC2;', LIB=r'c:\LIB1;C:\LIB2;', PATH=r'c:\P1;C:\P2;')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_env_setup_existing(self): self.setupStep(VCx( INCLUDE=[r'c:\INC1', r'c:\INC2'], LIB=[r'c:\LIB1', r'C:\LIB2'], PATH=[r'c:\P1', r'C:\P2'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here'], env=dict( INCLUDE=r'c:\INC1;c:\INC2;', LIB=r'c:\LIB1;C:\LIB2;', PATH=r'c:\P1;C:\P2;')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() @defer.inlineCallbacks def test_rendering(self): self.setupStep(VCx( projectfile=Property('a'), config=Property('b'), project=Property('c'))) self.properties.setProperty('a', 'aa', 'Test') self.properties.setProperty('b', 'bb', 'Test') self.properties.setProperty('c', 'cc', 'Test') self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") yield self.runStep() self.assertEqual( [self.step.projectfile, self.step.config, self.step.project], ['aa', 'bb', 'cc']) class TestVC6(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def getExpectedEnv(self, installdir, LIB=None, p=None, i=None): include = [ installdir + r'\VC98\INCLUDE;', installdir + r'\VC98\ATL\INCLUDE;', installdir + r'\VC98\MFC\INCLUDE;', ] lib = [ installdir + r'\VC98\LIB;', installdir + r'\VC98\MFC\LIB;', ] path = [ installdir + r'\Common\msdev98\BIN;', installdir + r'\VC98\BIN;', installdir + r'\Common\TOOLS\WINNT;', installdir + r'\Common\TOOLS;', ] if p: path.insert(0, '{};'.format(p)) if i: include.insert(0, '{};'.format(i)) if LIB: lib.insert(0, '{};'.format(LIB)) return dict( INCLUDE=''.join(include), LIB=''.join(lib), PATH=''.join(path), ) def test_args(self): self.setupStep(vstudio.VC6(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['msdev', 'pf', '/MAKE', 'pj - cfg', '/REBUILD'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_clean(self): self.setupStep(vstudio.VC6(projectfile='pf', config='cfg', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['msdev', 'pf', '/MAKE', 'pj - cfg', '/CLEAN'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_noproj_build(self): self.setupStep(vstudio.VC6(projectfile='pf', config='cfg', mode='build')) self.expectCommands( ExpectShell(workdir='wkdir', command=['msdev', 'pf', '/MAKE', 'ALL - cfg', '/BUILD'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_env_prepend(self): self.setupStep(vstudio.VC6(projectfile='pf', config='cfg', project='pj', PATH=['p'], INCLUDE=['i'], LIB=['l'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['msdev', 'pf', '/MAKE', 'pj - cfg', '/REBUILD', '/USEENV'], # note extra param env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio', LIB='l', p='p', i='i')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestVC7(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def getExpectedEnv(self, installdir, LIB=None, p=None, i=None): include = [ installdir + r'\VC7\INCLUDE;', installdir + r'\VC7\ATLMFC\INCLUDE;', installdir + r'\VC7\PlatformSDK\include;', installdir + r'\SDK\v1.1\include;', ] lib = [ installdir + r'\VC7\LIB;', installdir + r'\VC7\ATLMFC\LIB;', installdir + r'\VC7\PlatformSDK\lib;', installdir + r'\SDK\v1.1\lib;', ] path = [ installdir + r'\Common7\IDE;', installdir + r'\VC7\BIN;', installdir + r'\Common7\Tools;', installdir + r'\Common7\Tools\bin;', ] if p: path.insert(0, '{};'.format(p)) if i: include.insert(0, '{};'.format(i)) if LIB: lib.insert(0, '{};'.format(LIB)) return dict( INCLUDE=''.join(include), LIB=''.join(lib), PATH=''.join(path), ) def test_args(self): self.setupStep(vstudio.VC7(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio .NET 2003')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_clean(self): self.setupStep(vstudio.VC7(projectfile='pf', config='cfg', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Clean', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio .NET 2003')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_noproj_build(self): self.setupStep(vstudio.VC7(projectfile='pf', config='cfg', mode='build')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Build', 'cfg'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio .NET 2003')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_env_prepend(self): self.setupStep(vstudio.VC7(projectfile='pf', config='cfg', project='pj', PATH=['p'], INCLUDE=['i'], LIB=['l'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/UseEnv', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio .NET 2003', LIB='l', p='p', i='i')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class VC8ExpectedEnvMixin: # used for VC8 and VC9Express def getExpectedEnv(self, installdir, x64=False, LIB=None, i=None, p=None): include = [ installdir + r'\VC\INCLUDE;', installdir + r'\VC\ATLMFC\include;', installdir + r'\VC\PlatformSDK\include;', ] lib = [ installdir + r'\VC\LIB;', installdir + r'\VC\ATLMFC\LIB;', installdir + r'\VC\PlatformSDK\lib;', installdir + r'\SDK\v2.0\lib;', ] path = [ installdir + r'\Common7\IDE;', installdir + r'\VC\BIN;', installdir + r'\Common7\Tools;', installdir + r'\Common7\Tools\bin;', installdir + r'\VC\PlatformSDK\bin;', installdir + r'\SDK\v2.0\bin;', installdir + r'\VC\VCPackages;', r'${PATH};', ] if x64: path.insert(1, installdir + r'\VC\BIN\x86_amd64;') lib = [lb[:-1] + r'\amd64;' for lb in lib] if LIB: lib.insert(0, '{};'.format(LIB)) if p: path.insert(0, '{};'.format(p)) if i: include.insert(0, '{};'.format(i)) return dict( INCLUDE=''.join(include), LIB=''.join(lib), PATH=''.join(path), ) class TestVC8(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_args(self): self.setupStep(vstudio.VC8(projectfile='pf', config='cfg', project='pj', arch='arch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_args_x64(self): self.setupStep(vstudio.VC8(projectfile='pf', config='cfg', project='pj', arch='x64')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8', x64=True)) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_clean(self): self.setupStep(vstudio.VC8(projectfile='pf', config='cfg', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Clean', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() @defer.inlineCallbacks def test_rendering(self): self.setupStep(vstudio.VC8(projectfile='pf', config='cfg', arch=Property('a'))) self.properties.setProperty('a', 'x64', 'Test') self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8', x64=True)) # property has expected effect + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") yield self.runStep() self.assertEqual(self.step.arch, 'x64') class TestVCExpress9(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_args(self): self.setupStep(vstudio.VCExpress9(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['vcexpress', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( # note: still uses version 8 (?!) r'C:\Program Files\Microsoft Visual Studio 8')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_clean(self): self.setupStep(vstudio.VCExpress9(projectfile='pf', config='cfg', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['vcexpress', 'pf', '/Clean', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( # note: still uses version 8 (?!) r'C:\Program Files\Microsoft Visual Studio 8')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_mode_build_env(self): self.setupStep(vstudio.VCExpress9(projectfile='pf', config='cfg', project='pj', mode='build', INCLUDE=['i'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['vcexpress', 'pf', '/Build', 'cfg', '/UseEnv', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8', i='i')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestVC9(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_installdir(self): self.setupStep(vstudio.VC9(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 9.0')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestVC10(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_installdir(self): self.setupStep(vstudio.VC10(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 10.0')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestVC11(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_installdir(self): self.setupStep(vstudio.VC11(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 11.0')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestMsBuild(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def test_no_platform(self): self.setupStep(vstudio.MsBuild(projectfile='pf', config='cfg', platform=None, project='pj')) self.expectOutcome(result=results.EXCEPTION, state_string="built pj for cfg|None") yield self.runStep() self.assertEqual(len(self.flushLoggedErrors(config.ConfigErrors)), 1) def test_rebuild_project(self): self.setupStep(vstudio.MsBuild( projectfile='pf', config='cfg', platform='Win32', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command='"%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="Win32" /maxcpucount /t:"pj"', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'${VS110COMNTOOLS}..\..\VC\vcvarsall.bat'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="built pj for cfg|Win32") return self.runStep() def test_build_project(self): self.setupStep(vstudio.MsBuild( projectfile='pf', config='cfg', platform='Win32', project='pj', mode='build')) self.expectCommands( ExpectShell(workdir='wkdir', command='"%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="Win32" /maxcpucount /t:"pj:Build"', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'${VS110COMNTOOLS}..\..\VC\vcvarsall.bat'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="built pj for cfg|Win32") return self.runStep() def test_clean_project(self): self.setupStep(vstudio.MsBuild( projectfile='pf', config='cfg', platform='Win32', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command='"%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="Win32" /maxcpucount /t:"pj:Clean"', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'${VS110COMNTOOLS}..\..\VC\vcvarsall.bat'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="built pj for cfg|Win32") return self.runStep() def test_rebuild_project_with_defines(self): self.setupStep(vstudio.MsBuild( projectfile='pf', config='cfg', platform='Win32', project='pj', defines=['Define1', 'Define2=42'])) self.expectCommands( ExpectShell(workdir='wkdir', command='"%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="Win32" /maxcpucount /t:"pj" /p:DefineConstants="Define1;Define2=42"', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'${VS110COMNTOOLS}..\..\VC\vcvarsall.bat'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="built pj for cfg|Win32") return self.runStep() def test_rebuild_solution(self): self.setupStep( vstudio.MsBuild(projectfile='pf', config='cfg', platform='x64')) self.expectCommands( ExpectShell(workdir='wkdir', command='"%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="x64" /maxcpucount /t:Rebuild', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'${VS110COMNTOOLS}..\..\VC\vcvarsall.bat'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="built solution for cfg|x64") return self.runStep() class TestMsBuild141(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def test_no_platform(self): self.setupStep(vstudio.MsBuild(projectfile='pf', config='cfg', platform=None, project='pj')) self.expectOutcome(result=results.EXCEPTION, state_string="built pj for cfg|None") yield self.runStep() self.assertEqual(len(self.flushLoggedErrors(config.ConfigErrors)), 1) def test_rebuild_project(self): self.setupStep(vstudio.MsBuild141( projectfile='pf', config='cfg', platform='Win32', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command='FOR /F "tokens=*" %%I in (\'vswhere.exe -property installationPath\') do "%%I\\%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="Win32" /maxcpucount /t:"pj"', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'\VC\Auxiliary\Build\vcvarsall.bat', 'PATH': 'C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\;${PATH};'}) # noqa pylint: disable=line-too-long + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_build_project(self): self.setupStep(vstudio.MsBuild141( projectfile='pf', config='cfg', platform='Win32', project='pj', mode='build')) self.expectCommands( ExpectShell(workdir='wkdir', command='FOR /F "tokens=*" %%I in (\'vswhere.exe -property installationPath\') do "%%I\\%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="Win32" /maxcpucount /t:"pj:Build"', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'\VC\Auxiliary\Build\vcvarsall.bat', 'PATH': 'C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\;${PATH};'}) # noqa pylint: disable=line-too-long + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_clean_project(self): self.setupStep(vstudio.MsBuild141( projectfile='pf', config='cfg', platform='Win32', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command='FOR /F "tokens=*" %%I in (\'vswhere.exe -property installationPath\') do "%%I\\%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="Win32" /maxcpucount /t:"pj:Clean"', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'\VC\Auxiliary\Build\vcvarsall.bat', 'PATH': 'C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\;${PATH};'}) # noqa pylint: disable=line-too-long + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_rebuild_project_with_defines(self): self.setupStep(vstudio.MsBuild141( projectfile='pf', config='cfg', platform='Win32', project='pj', defines=['Define1', 'Define2=42'])) self.expectCommands( ExpectShell(workdir='wkdir', command='FOR /F "tokens=*" %%I in (\'vswhere.exe -property installationPath\') do "%%I\\%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="Win32" /maxcpucount /t:"pj" /p:DefineConstants="Define1;Define2=42"', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'\VC\Auxiliary\Build\vcvarsall.bat', 'PATH': 'C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\;${PATH};'}) # noqa pylint: disable=line-too-long + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_rebuild_solution(self): self.setupStep( vstudio.MsBuild141(projectfile='pf', config='cfg', platform='x64')) self.expectCommands( ExpectShell(workdir='wkdir', command='FOR /F "tokens=*" %%I in (\'vswhere.exe -property installationPath\') do "%%I\\%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="x64" /maxcpucount /t:Rebuild', # noqa pylint: disable=line-too-long env={'VCENV_BAT': r'\VC\Auxiliary\Build\vcvarsall.bat', 'PATH': 'C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\;${PATH};'}) # noqa pylint: disable=line-too-long + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class Aliases(unittest.TestCase): def test_vs2003(self): self.assertIdentical(vstudio.VS2003, vstudio.VC7) def test_vs2005(self): self.assertIdentical(vstudio.VS2005, vstudio.VC8) def test_vs2008(self): self.assertIdentical(vstudio.VS2008, vstudio.VC9) def test_vs2010(self): self.assertIdentical(vstudio.VS2010, vstudio.VC10) def test_vs2012(self): self.assertIdentical(vstudio.VS2012, vstudio.VC11) buildbot-3.4.0/master/buildbot/test/unit/steps/test_worker.py000066400000000000000000000421131413250514000244330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import stat from twisted.internet import defer from twisted.trial import unittest from buildbot.interfaces import WorkerSetupError from buildbot.process import buildstep from buildbot.process import properties from buildbot.process import remotetransfer from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import worker from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin def uploadString(string): def behavior(command): writer = command.args['writer'] writer.remote_write(string) writer.remote_close() return behavior class TestSetPropertiesFromEnv(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_simple(self): self.setupStep(worker.SetPropertiesFromEnv( variables=["one", "two", "three", "five", "six"], source="me")) self.worker.worker_environ = { "one": "1", "two": None, "six": "6", "FIVE": "555"} self.worker.worker_system = 'linux' self.properties.setProperty("four", 4, "them") self.properties.setProperty("five", 5, "them") self.properties.setProperty("six", 99, "them") self.expectOutcome(result=SUCCESS, state_string="Set") self.expectProperty('one', "1", source='me') self.expectNoProperty('two') self.expectNoProperty('three') self.expectProperty('four', 4, source='them') self.expectProperty('five', 5, source='them') self.expectProperty('six', '6', source='me') self.expectLogfile("properties", "one = '1'\nsix = '6'") return self.runStep() def test_case_folding(self): self.setupStep(worker.SetPropertiesFromEnv( variables=["eNv"], source="me")) self.worker.worker_environ = {"ENV": 'EE'} self.worker.worker_system = 'win32' self.expectOutcome(result=SUCCESS, state_string="Set") self.expectProperty('eNv', 'EE', source='me') self.expectLogfile("properties", "eNv = 'EE'") return self.runStep() class TestFileExists(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_found(self): self.setupStep(worker.FileExists(file="x")) self.expectCommands( Expect('stat', {'file': 'x'}) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0 ) self.expectOutcome(result=SUCCESS, state_string="File found.") return self.runStep() def test_not_found(self): self.setupStep(worker.FileExists(file="x")) self.expectCommands( Expect('stat', {'file': 'x'}) + Expect.update('stat', [0, 99, 99]) + 0 ) self.expectOutcome(result=FAILURE, state_string="Not a file. (failure)") return self.runStep() def test_failure(self): self.setupStep(worker.FileExists(file="x")) self.expectCommands( Expect('stat', {'file': 'x'}) + 1 ) self.expectOutcome(result=FAILURE, state_string="File not found. (failure)") return self.runStep() def test_render(self): self.setupStep(worker.FileExists(file=properties.Property("x"))) self.properties.setProperty('x', 'XXX', 'here') self.expectCommands( Expect('stat', {'file': 'XXX'}) + 1 ) self.expectOutcome(result=FAILURE, state_string="File not found. (failure)") return self.runStep() @defer.inlineCallbacks def test_old_version(self): self.setupStep(worker.FileExists(file="x"), worker_version=dict()) self.expectOutcome(result=EXCEPTION, state_string="finished (exception)") yield self.runStep() self.flushLoggedErrors(WorkerSetupError) class TestCopyDirectory(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(worker.CopyDirectory(src="s", dest="d")) self.expectCommands( Expect('cpdir', {'fromdir': 's', 'todir': 'd', 'timeout': 120}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Copied s to d") return self.runStep() def test_timeout(self): self.setupStep(worker.CopyDirectory(src="s", dest="d", timeout=300)) self.expectCommands( Expect('cpdir', {'fromdir': 's', 'todir': 'd', 'timeout': 300}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Copied s to d") return self.runStep() def test_maxTime(self): self.setupStep(worker.CopyDirectory(src="s", dest="d", maxTime=10)) self.expectCommands( Expect('cpdir', {'fromdir': 's', 'todir': 'd', 'maxTime': 10, 'timeout': 120}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Copied s to d") return self.runStep() def test_failure(self): self.setupStep(worker.CopyDirectory(src="s", dest="d")) self.expectCommands( Expect('cpdir', {'fromdir': 's', 'todir': 'd', 'timeout': 120}) + 1 ) self.expectOutcome(result=FAILURE, state_string="Copying s to d failed. (failure)") return self.runStep() def test_render(self): self.setupStep(worker.CopyDirectory( src=properties.Property("x"), dest=properties.Property("y"))) self.properties.setProperty('x', 'XXX', 'here') self.properties.setProperty('y', 'YYY', 'here') self.expectCommands( Expect('cpdir', {'fromdir': 'XXX', 'todir': 'YYY', 'timeout': 120}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Copied XXX to YYY") return self.runStep() class TestRemoveDirectory(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(worker.RemoveDirectory(dir="d")) self.expectCommands( Expect('rmdir', {'dir': 'd'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Deleted") return self.runStep() def test_failure(self): self.setupStep(worker.RemoveDirectory(dir="d")) self.expectCommands( Expect('rmdir', {'dir': 'd'}) + 1 ) self.expectOutcome(result=FAILURE, state_string="Delete failed. (failure)") return self.runStep() def test_render(self): self.setupStep(worker.RemoveDirectory(dir=properties.Property("x"))) self.properties.setProperty('x', 'XXX', 'here') self.expectCommands( Expect('rmdir', {'dir': 'XXX'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Deleted") return self.runStep() class TestMakeDirectory(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(worker.MakeDirectory(dir="d")) self.expectCommands( Expect('mkdir', {'dir': 'd'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Created") return self.runStep() def test_failure(self): self.setupStep(worker.MakeDirectory(dir="d")) self.expectCommands( Expect('mkdir', {'dir': 'd'}) + 1 ) self.expectOutcome(result=FAILURE, state_string="Create failed. (failure)") return self.runStep() def test_render(self): self.setupStep(worker.MakeDirectory(dir=properties.Property("x"))) self.properties.setProperty('x', 'XXX', 'here') self.expectCommands( Expect('mkdir', {'dir': 'XXX'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Created") return self.runStep() class CompositeUser(buildstep.BuildStep, worker.CompositeStepMixin): def __init__(self, payload): self.payload = payload self.logEnviron = False super().__init__() @defer.inlineCallbacks def run(self): yield self.addLogForRemoteCommands('stdio') res = yield self.payload(self) return FAILURE if res else SUCCESS class TestCompositeStepMixin(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_runRemoteCommand(self): cmd_args = ('foo', {'bar': False}) def testFunc(x): x.runRemoteCommand(*cmd_args) self.setupStep(CompositeUser(testFunc)) self.expectCommands(Expect(*cmd_args) + 0) self.expectOutcome(result=SUCCESS) def test_runRemoteCommandFail(self): cmd_args = ('foo', {'bar': False}) @defer.inlineCallbacks def testFunc(x): yield x.runRemoteCommand(*cmd_args) self.setupStep(CompositeUser(testFunc)) self.expectCommands(Expect(*cmd_args) + 1) self.expectOutcome(result=FAILURE) return self.runStep() @defer.inlineCallbacks def test_runRemoteCommandFailNoAbandon(self): cmd_args = ('foo', {'bar': False}) @defer.inlineCallbacks def testFunc(x): yield x.runRemoteCommand(*cmd_args, **dict(abandonOnFailure=False)) testFunc.ran = True self.setupStep(CompositeUser(testFunc)) self.expectCommands(Expect(*cmd_args) + 1) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertTrue(testFunc.ran) def test_rmfile(self): self.setupStep(CompositeUser(lambda x: x.runRmFile("d"))) self.expectCommands( Expect('rmfile', {'path': 'd', 'logEnviron': False}) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mkdir(self): self.setupStep(CompositeUser(lambda x: x.runMkdir("d"))) self.expectCommands( Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_rmdir(self): self.setupStep(CompositeUser(lambda x: x.runRmdir("d"))) self.expectCommands( Expect('rmdir', {'dir': 'd', 'logEnviron': False}) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mkdir_fail(self): self.setupStep(CompositeUser(lambda x: x.runMkdir("d"))) self.expectCommands( Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_glob(self): @defer.inlineCallbacks def testFunc(x): res = yield x.runGlob("*.pyc") self.assertEqual(res, ["one.pyc", "two.pyc"]) self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + Expect.update('files', ["one.pyc", "two.pyc"]) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_glob_fail(self): self.setupStep(CompositeUser(lambda x: x.runGlob("*.pyc"))) self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_abandonOnFailure(self): @defer.inlineCallbacks def testFunc(x): yield x.runMkdir("d") yield x.runMkdir("d") self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_notAbandonOnFailure(self): @defer.inlineCallbacks def testFunc(x): yield x.runMkdir("d", abandonOnFailure=False) yield x.runMkdir("d", abandonOnFailure=False) self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 1, Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 1 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_getFileContentFromWorker(self): @defer.inlineCallbacks def testFunc(x): res = yield x.getFileContentFromWorker("file.txt") self.assertEqual(res, "Hello world!") self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('uploadFile', dict( workersrc="file.txt", workdir='wkdir', blocksize=32 * 1024, maxsize=None, writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_getFileContentFromWorker2_16(self): @defer.inlineCallbacks def testFunc(x): res = yield x.getFileContentFromWorker("file.txt") self.assertEqual(res, "Hello world!") self.setupStep( CompositeUser(testFunc), worker_version={'*': '2.16'}) self.expectCommands( Expect('uploadFile', dict( slavesrc="file.txt", workdir='wkdir', blocksize=32 * 1024, maxsize=None, writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_downloadFileContentToWorker(self): @defer.inlineCallbacks def testFunc(x): res = yield x.downloadFileContentToWorker("/path/dest1", "file text") self.assertEqual(res, None) exp_args = {'maxsize': None, 'workdir': 'wkdir', 'mode': None, 'reader': ExpectRemoteRef(remotetransfer.FileReader), 'blocksize': 32768, 'workerdest': '/path/dest1'} self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('downloadFile', exp_args) ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_downloadFileContentToWorkerWithFilePermissions(self): @defer.inlineCallbacks def testFunc(x): res = yield x.downloadFileContentToWorker("/path/dest1", "file text", mode=stat.S_IRUSR) self.assertEqual(res, None) exp_args = {'maxsize': None, 'workdir': 'wkdir', 'mode': stat.S_IRUSR, 'reader': ExpectRemoteRef(remotetransfer.FileReader), 'blocksize': 32768, 'workerdest': '/path/dest1'} self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('downloadFile', exp_args) ) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/test_asyncio.py000066400000000000000000000066171413250514000234420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import asyncio from twisted.internet import defer from twisted.trial import unittest from buildbot import util from buildbot.asyncio import as_deferred from buildbot.test.util.misc import TestReactorMixin class TestAsyncioTestLoop(TestReactorMixin, unittest.TestCase): maxDiff = None def setUp(self): self.setUpTestReactor(use_asyncio=True) def test_coroutine_schedule(self): d1 = defer.Deferred() f1 = d1.asFuture(self.asyncio_loop) async def coro1(): await f1 f = asyncio.ensure_future(coro1()) d1.callback(None) return defer.Deferred.fromFuture(f) @defer.inlineCallbacks def test_asyncio_gather(self): self.calls = 0 async def coro1(): await asyncio.sleep(1) self.calls += 1 async def coro2(): await asyncio.sleep(1) self.calls += 1 @defer.inlineCallbacks def inlineCallbacks1(): yield util.asyncSleep(1, self.reactor) self.calls += 1 @defer.inlineCallbacks def inlineCallbacks2(): yield util.asyncSleep(1, self.reactor) self.calls += 1 async def main_coro(): dl = [] dl.append(coro1()) dl.append(coro2()) # We support directly yielding a deferred inside a asyncio coroutine # this needs a patch of Deferred.__await__ implemented in asyncio.py dl.append(inlineCallbacks1()) dl.append(inlineCallbacks2().asFuture(self.asyncio_loop)) await asyncio.gather(*dl) self.calls += 1 f1 = main_coro() def advance(): self.reactor.advance(1) if self.calls < 3: self.reactor.callLater(0, advance) yield advance() yield as_deferred(f1) self.assertEqual(self.calls, 5) @defer.inlineCallbacks def test_asyncio_threadsafe(self): f1 = asyncio.Future() async def coro(): self.asyncio_loop.call_soon_threadsafe(f1.set_result, "ok") res = await f1 return res res = yield as_deferred(coro()) self.assertEqual(res, "ok") @defer.inlineCallbacks def test_asyncio_negative_call_at(self): res = yield as_deferred(defer.succeed("OK")) self.assertEqual(res, "OK") @defer.inlineCallbacks def test_asyncio_as_deferred_deferred(self): d = defer.Deferred() self.asyncio_loop.call_at(-1, d.callback, "OK") res = yield d self.assertEqual(res, "OK") @defer.inlineCallbacks def test_asyncio_as_deferred_default(self): res = yield as_deferred("OK") self.assertEqual(res, "OK") buildbot-3.4.0/master/buildbot/test/unit/test_buildbot_net_usage_data.py000066400000000000000000000135511413250514000266170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import platform from unittest.case import SkipTest from urllib import request as urllib_request from twisted.internet import reactor from twisted.python.filepath import FilePath from twisted.trial import unittest import buildbot.buildbot_net_usage_data from buildbot import config from buildbot.buildbot_net_usage_data import _sendBuildbotNetUsageData from buildbot.buildbot_net_usage_data import computeUsageData from buildbot.buildbot_net_usage_data import linux_distribution from buildbot.config import BuilderConfig from buildbot.master import BuildMaster from buildbot.plugins import steps from buildbot.process.factory import BuildFactory from buildbot.schedulers.forcesched import ForceScheduler from buildbot.test.util.integration import DictLoader from buildbot.test.util.warnings import assertProducesWarning from buildbot.warnings import ConfigWarning from buildbot.worker.base import Worker class Tests(unittest.TestCase): def getMaster(self, config_dict): """ Create a started ``BuildMaster`` with the given configuration. """ basedir = FilePath(self.mktemp()) basedir.createDirectory() master = BuildMaster( basedir.path, reactor=reactor, config_loader=DictLoader(config_dict)) master.config = master.config_loader.loadConfig() return master def getBaseConfig(self): return { 'builders': [ BuilderConfig(name="testy", workernames=["local1", "local2"], factory=BuildFactory([steps.ShellCommand(command='echo hello')])), ], 'workers': [Worker('local' + str(i), 'pass') for i in range(3)], 'schedulers': [ ForceScheduler( name="force", builderNames=["testy"]) ], 'protocols': {'null': {}}, 'multiMaster': True, } def test_basic(self): self.patch(config, "_in_unit_tests", False) with assertProducesWarning( ConfigWarning, message_pattern=r"`buildbotNetUsageData` is not configured and defaults to basic."): master = self.getMaster(self.getBaseConfig()) data = computeUsageData(master) self.assertEqual(sorted(data.keys()), sorted(['versions', 'db', 'platform', 'installid', 'mq', 'plugins', 'www_plugins'])) self.assertEqual(data['plugins']['buildbot/worker/base/Worker'], 3) self.assertEqual(sorted(data['plugins'].keys()), sorted( ['buildbot/schedulers/forcesched/ForceScheduler', 'buildbot/worker/base/Worker', 'buildbot/steps/shell/ShellCommand', 'buildbot/config/BuilderConfig'])) def test_full(self): c = self.getBaseConfig() c['buildbotNetUsageData'] = 'full' master = self.getMaster(c) data = computeUsageData(master) self.assertEqual(sorted(data.keys()), sorted(['versions', 'db', 'installid', 'platform', 'mq', 'plugins', 'builders', 'www_plugins'])) def test_custom(self): c = self.getBaseConfig() def myCompute(data): return dict(db=data['db']) c['buildbotNetUsageData'] = myCompute master = self.getMaster(c) data = computeUsageData(master) self.assertEqual(sorted(data.keys()), sorted(['db'])) def test_urllib(self): self.patch(buildbot.buildbot_net_usage_data, '_sendWithRequests', lambda _, __: None) class FakeRequest: def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs open_url = [] class urlopen: def __init__(self, r): self.request = r open_url.append(self) def read(self): return "ok" def close(self): pass self.patch(urllib_request, "Request", FakeRequest) self.patch(urllib_request, "urlopen", urlopen) _sendBuildbotNetUsageData({'foo': 'bar'}) self.assertEqual(len(open_url), 1) self.assertEqual(open_url[0].request.args, ('https://events.buildbot.net/events/phone_home', b'{"foo": "bar"}', {'Content-Length': 14, 'Content-Type': 'application/json'})) def test_real(self): if "TEST_BUILDBOTNET_USAGEDATA" not in os.environ: raise SkipTest( "_sendBuildbotNetUsageData real test only run when environment variable" " TEST_BUILDBOTNET_USAGEDATA is set") _sendBuildbotNetUsageData({'foo': 'bar'}) def test_linux_distro(self): system = platform.system() if system != "Linux": raise SkipTest("test is only for linux") distro = linux_distribution() self.assertEqual(len(distro), 2) self.assertNotIn("unknown", distro[0]) # Rolling distributions like Arch Linux (arch) does not have VERSION_ID if distro[0] not in ["arch", "gentoo"]: self.assertNotIn("unknown", distro[1]) buildbot-3.4.0/master/buildbot/test/unit/test_clients_sendchange.py000066400000000000000000000224361413250514000256120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import reactor from twisted.spread import pb from twisted.trial import unittest from buildbot.clients import sendchange class Sender(unittest.TestCase): def setUp(self): # patch out some PB components and make up some mocks self.patch(pb, 'PBClientFactory', self._fake_PBClientFactory) self.patch(reactor, 'connectTCP', self._fake_connectTCP) self.factory = mock.Mock(name='PBClientFactory') self.factory.login = self._fake_login self.factory.login_d = defer.Deferred() self.remote = mock.Mock(name='PB Remote') self.remote.callRemote = self._fake_callRemote self.remote.broker.transport.loseConnection = self._fake_loseConnection # results self.creds = None self.conn_host = self.conn_port = None self.lostConnection = False self.added_changes = [] self.vc_used = None def _fake_PBClientFactory(self): return self.factory def _fake_login(self, creds): self.creds = creds return self.factory.login_d def _fake_connectTCP(self, host, port, factory): self.conn_host = host self.conn_port = port self.assertIdentical(factory, self.factory) self.factory.login_d.callback(self.remote) def _fake_callRemote(self, method, change): self.assertEqual(method, 'addChange') self.added_changes.append(change) return defer.succeed(None) def _fake_loseConnection(self): self.lostConnection = True def assertProcess(self, host, port, username, password, changes): self.assertEqual([host, port, username, password, changes], [self.conn_host, self.conn_port, self.creds.username, self.creds.password, self.added_changes]) @defer.inlineCallbacks def test_send_minimal(self): s = sendchange.Sender('localhost:1234') yield s.send('branch', 'rev', 'comm', ['a']) self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='', repository='', who=None, files=['a'], comments='comm', branch='branch', revision='rev', category=None, when=None, properties={}, revlink='', src=None)]) @defer.inlineCallbacks def test_send_auth(self): s = sendchange.Sender('localhost:1234', auth=('me', 'sekrit')) yield s.send('branch', 'rev', 'comm', ['a']) self.assertProcess('localhost', 1234, b'me', b'sekrit', [ dict(project='', repository='', who=None, files=['a'], comments='comm', branch='branch', revision='rev', category=None, when=None, properties={}, revlink='', src=None)]) @defer.inlineCallbacks def test_send_full(self): s = sendchange.Sender('localhost:1234') yield s.send('branch', 'rev', 'comm', ['a'], who='me', category='cats', when=1234, properties={'a': 'b'}, repository='r', vc='git', project='p', revlink='rl') self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='p', repository='r', who='me', files=['a'], comments='comm', branch='branch', revision='rev', category='cats', when=1234, properties={'a': 'b'}, revlink='rl', src='git')]) @defer.inlineCallbacks def test_send_files_tuple(self): # 'buildbot sendchange' sends files as a tuple, rather than a list.. s = sendchange.Sender('localhost:1234') yield s.send('branch', 'rev', 'comm', ('a', 'b')) self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='', repository='', who=None, files=['a', 'b'], comments='comm', branch='branch', revision='rev', category=None, when=None, properties={}, revlink='', src=None)]) @defer.inlineCallbacks def test_send_codebase(self): s = sendchange.Sender('localhost:1234') yield s.send('branch', 'rev', 'comm', ['a'], codebase='mycb') self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='', repository='', who=None, files=['a'], comments='comm', branch='branch', revision='rev', category=None, when=None, properties={}, revlink='', src=None, codebase='mycb')]) @defer.inlineCallbacks def test_send_unicode(self): s = sendchange.Sender('localhost:1234') yield s.send('\N{DEGREE SIGN}', '\U0001f49e', '\N{POSTAL MARK FACE}', ['\U0001F4C1'], project='\N{SKULL AND CROSSBONES}', repository='\N{SNOWMAN}', who='\N{THAI CHARACTER KHOMUT}', category='\U0001F640', when=1234, properties={'\N{LATIN SMALL LETTER A WITH MACRON}': 'b'}, revlink='\U0001F517') self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='\N{SKULL AND CROSSBONES}', repository='\N{SNOWMAN}', who='\N{THAI CHARACTER KHOMUT}', files=['\U0001F4C1'], # FILE FOLDER comments='\N{POSTAL MARK FACE}', branch='\N{DEGREE SIGN}', revision='\U0001f49e', # REVOLVING HEARTS category='\U0001F640', # WEARY CAT FACE when=1234, properties={'\N{LATIN SMALL LETTER A WITH MACRON}': 'b'}, revlink='\U0001F517', # LINK SYMBOL src=None)]) @defer.inlineCallbacks def test_send_unicode_utf8(self): s = sendchange.Sender('localhost:1234') yield s.send('\N{DEGREE SIGN}'.encode('utf8'), '\U0001f49e'.encode('utf8'), '\N{POSTAL MARK FACE}'.encode('utf8'), ['\U0001F4C1'.encode('utf8')], project='\N{SKULL AND CROSSBONES}'.encode('utf8'), repository='\N{SNOWMAN}'.encode('utf8'), who='\N{THAI CHARACTER KHOMUT}'.encode('utf8'), category='\U0001F640'.encode('utf8'), when=1234, properties={ '\N{LATIN SMALL LETTER A WITH MACRON}'.encode('utf8'): 'b'}, revlink='\U0001F517'.encode('utf8')) self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='\N{SKULL AND CROSSBONES}', repository='\N{SNOWMAN}', who='\N{THAI CHARACTER KHOMUT}', files=['\U0001F4C1'], # FILE FOLDER comments='\N{POSTAL MARK FACE}', branch='\N{DEGREE SIGN}', revision='\U0001f49e', # REVOLVING HEARTS category='\U0001F640', # WEARY CAT FACE when=1234, # NOTE: not decoded! properties={b'\xc4\x81': 'b'}, revlink='\U0001F517', # LINK SYMBOL src=None)]) @defer.inlineCallbacks def test_send_unicode_latin1(self): # hand send() a bunch of latin1 strings, and expect them recoded # to unicode s = sendchange.Sender('localhost:1234', encoding='latin1') yield s.send('\N{YEN SIGN}'.encode('latin1'), '\N{POUND SIGN}'.encode('latin1'), '\N{BROKEN BAR}'.encode('latin1'), ['\N{NOT SIGN}'.encode('latin1')], project='\N{DEGREE SIGN}'.encode('latin1'), repository='\N{SECTION SIGN}'.encode('latin1'), who='\N{MACRON}'.encode('latin1'), category='\N{PILCROW SIGN}'.encode('latin1'), when=1234, properties={ '\N{SUPERSCRIPT ONE}'.encode('latin1'): 'b'}, revlink='\N{INVERTED QUESTION MARK}'.encode('latin1')) self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='\N{DEGREE SIGN}', repository='\N{SECTION SIGN}', who='\N{MACRON}', files=['\N{NOT SIGN}'], comments='\N{BROKEN BAR}', branch='\N{YEN SIGN}', revision='\N{POUND SIGN}', category='\N{PILCROW SIGN}', when=1234, # NOTE: not decoded! properties={b'\xb9': 'b'}, revlink='\N{INVERTED QUESTION MARK}', src=None)]) buildbot-3.4.0/master/buildbot/test/unit/test_clients_tryclient.py000066400000000000000000000124761413250514000255330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import json import sys from twisted.trial import unittest from buildbot.clients import tryclient from buildbot.util import bytes2unicode class createJobfile(unittest.TestCase): def makeNetstring(self, *strings): return ''.join(['{}:{},'.format(len(s), s) for s in strings]) # versions 1-4 are deprecated and not produced by the try client def test_createJobfile_v5(self): jobid = '123-456' branch = 'branch' baserev = 'baserev' patch_level = 0 patch_body = b'diff...' repository = 'repo' project = 'proj' who = 'someuser' comment = 'insightful comment' builderNames = ['runtests'] properties = {'foo': 'bar'} job = tryclient.createJobfile( jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties) jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': jobid, 'branch': branch, 'baserev': baserev, 'patch_level': patch_level, 'repository': repository, 'project': project, 'who': who, 'comment': comment, 'builderNames': builderNames, 'properties': properties, 'patch_body': bytes2unicode(patch_body), })) self.assertEqual(job, jobstr) def test_createJobfile_v6(self): jobid = '123-456' branch = 'branch' baserev = 'baserev' patch_level = 0 patch_body = b'diff...\xff' repository = 'repo' project = 'proj' who = 'someuser' comment = 'insightful comment' builderNames = ['runtests'] properties = {'foo': 'bar'} job = tryclient.createJobfile( jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties) jobstr = self.makeNetstring( '6', json.dumps({ 'jobid': jobid, 'branch': branch, 'baserev': baserev, 'patch_level': patch_level, 'repository': repository, 'project': project, 'who': who, 'comment': comment, 'builderNames': builderNames, 'properties': properties, 'patch_body_base64': bytes2unicode(base64.b64encode(patch_body)), })) self.assertEqual(job, jobstr) def test_SourceStampExtractor_readPatch(self): sse = tryclient.GitExtractor(None, None, None) for patchlevel, diff in enumerate((None, "", b"")): sse.readPatch(diff, patchlevel) self.assertEqual(sse.patch, (patchlevel, None)) sse.readPatch(b"diff schmiff blah blah blah", 23) self.assertEqual(sse.patch, (23, b"diff schmiff blah blah blah")) def test_GitExtractor_fixBranch(self): sse = tryclient.GitExtractor(None, "origin/master", None) self.assertEqual(sse.branch, "origin/master") sse.fixBranch(b'origi\n') self.assertEqual(sse.branch, "origin/master") sse.fixBranch(b'origin\n') self.assertEqual(sse.branch, "master") def test_GitExtractor_override_baserev(self): sse = tryclient.GitExtractor(None, None, None) sse.override_baserev(b"23ae367063327b79234e081f396ecbc\n") self.assertEqual(sse.baserev, "23ae367063327b79234e081f396ecbc") class RemoteTryPP_TestStream(object): def __init__(self): self.writes = [] self.is_open = True def write(self, data): assert self.is_open self.writes.append(data) def closeStdin(self): assert self.is_open self.is_open = False def test_RemoteTryPP_encoding(self): rmt = tryclient.RemoteTryPP("job") self.assertTrue(isinstance(rmt.job, str)) rmt.transport = self.RemoteTryPP_TestStream() rmt.connectionMade() self.assertFalse(rmt.transport.is_open) self.assertEqual(len(rmt.transport.writes), 1) self.assertFalse(isinstance(rmt.transport.writes[0], str)) for streamname in "out", "err": sys_streamattr = "std" + streamname rmt_methodattr = streamname + "Received" teststream = self.RemoteTryPP_TestStream() saved_stream = getattr(sys, sys_streamattr) try: setattr(sys, sys_streamattr, teststream) getattr(rmt, rmt_methodattr)(b"data") finally: setattr(sys, sys_streamattr, saved_stream) self.assertEqual(len(teststream.writes), 1) self.assertTrue(isinstance(teststream.writes[0], str)) buildbot-3.4.0/master/buildbot/test/unit/test_clients_usersclient.py000066400000000000000000000065211413250514000260500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import reactor from twisted.spread import pb from twisted.trial import unittest from buildbot.clients import usersclient class TestUsersClient(unittest.TestCase): def setUp(self): # patch out some PB components and make up some mocks self.patch(pb, 'PBClientFactory', self._fake_PBClientFactory) self.patch(reactor, 'connectTCP', self._fake_connectTCP) self.factory = mock.Mock(name='PBClientFactory') self.factory.login = self._fake_login self.factory.login_d = defer.Deferred() self.remote = mock.Mock(name='PB Remote') self.remote.callRemote = self._fake_callRemote self.remote.broker.transport.loseConnection = self._fake_loseConnection # results self.conn_host = self.conn_port = None self.lostConnection = False def _fake_PBClientFactory(self): return self.factory def _fake_login(self, creds): return self.factory.login_d def _fake_connectTCP(self, host, port, factory): self.conn_host = host self.conn_port = port self.assertIdentical(factory, self.factory) self.factory.login_d.callback(self.remote) def _fake_callRemote(self, method, op, bb_username, bb_password, ids, info): self.assertEqual(method, 'commandline') self.called_with = dict(op=op, bb_username=bb_username, bb_password=bb_password, ids=ids, info=info) return defer.succeed(None) def _fake_loseConnection(self): self.lostConnection = True def assertProcess(self, host, port, called_with): self.assertEqual([host, port, called_with], [self.conn_host, self.conn_port, self.called_with]) @defer.inlineCallbacks def test_usersclient_info(self): uc = usersclient.UsersClient('localhost', "user", "userpw", 1234) yield uc.send('update', 'bb_user', 'hashed_bb_pass', None, [{'identifier': 'x', 'svn': 'x'}]) self.assertProcess('localhost', 1234, dict(op='update', bb_username='bb_user', bb_password='hashed_bb_pass', ids=None, info=[dict(identifier='x', svn='x')])) @defer.inlineCallbacks def test_usersclient_ids(self): uc = usersclient.UsersClient('localhost', "user", "userpw", 1234) yield uc.send('remove', None, None, ['x'], None) self.assertProcess('localhost', 1234, dict(op='remove', bb_username=None, bb_password=None, ids=['x'], info=None)) buildbot-3.4.0/master/buildbot/test/unit/test_config.py000066400000000000000000001670001413250514000232340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # We cannot use the builtins module here from Python-Future. # We need to use the native __builtin__ module on Python 2, # and builtins module on Python 3, because we need to override # the actual native open method. import os import re import textwrap import mock from twisted.internet import defer from twisted.trial import unittest from zope.interface import implementer from buildbot import config from buildbot import configurators from buildbot import interfaces from buildbot import locks from buildbot import revlinks from buildbot import worker from buildbot.changes import base as changes_base from buildbot.process import factory from buildbot.process import properties from buildbot.schedulers import base as schedulers_base from buildbot.test.util import dirs from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.test.util.warnings import assertProducesWarning from buildbot.util import service from buildbot.warnings import ConfigWarning from buildbot.warnings import DeprecatedApiWarning try: # Python 2 import __builtin__ as builtins except ImportError: # Python 3 import builtins global_defaults = dict( title='Buildbot', titleURL='http://buildbot.net', buildbotURL='http://localhost:8080/', logCompressionLimit=4096, logCompressionMethod='gz', logEncoding='utf-8', logMaxTailSize=None, logMaxSize=None, properties=properties.Properties(), collapseRequests=None, prioritizeBuilders=None, protocols={}, multiMaster=False, manhole=None, buildbotNetUsageData=None, # in unit tests we default to None, but normally defaults to 'basic' www=dict(port=None, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log'), ) class FakeChangeSource(changes_base.ChangeSource): def __init__(self): super().__init__(name='FakeChangeSource') @implementer(interfaces.IScheduler) class FakeScheduler: def __init__(self, name): self.name = name class FakeBuilder: def __init__(self, **kwargs): self.__dict__.update(kwargs) @implementer(interfaces.IWorker) class FakeWorker: def __init__(self, **kwargs): self.__dict__.update(kwargs) @implementer(interfaces.IMachine) class FakeMachine: def __init__(self, **kwargs): self.__dict__.update(kwargs) class ConfigErrors(unittest.TestCase): def test_constr(self): ex = config.ConfigErrors(['a', 'b']) self.assertEqual(ex.errors, ['a', 'b']) def test_addError(self): ex = config.ConfigErrors(['a']) ex.addError('c') self.assertEqual(ex.errors, ['a', 'c']) def test_nonempty(self): empty = config.ConfigErrors() full = config.ConfigErrors(['a']) self.assertTrue(not empty) self.assertFalse(not full) def test_error_raises(self): e = self.assertRaises(config.ConfigErrors, config.error, "message") self.assertEqual(e.errors, ["message"]) def test_error_no_raise(self): e = config.ConfigErrors() self.patch(config, "_errors", e) config.error("message") self.assertEqual(e.errors, ["message"]) def test_str(self): ex = config.ConfigErrors() self.assertEqual(str(ex), "") ex = config.ConfigErrors(["a"]) self.assertEqual(str(ex), "a") ex = config.ConfigErrors(["a", "b"]) self.assertEqual(str(ex), "a\nb") ex = config.ConfigErrors(["a"]) ex.addError('c') self.assertEqual(str(ex), "a\nc") class ConfigLoaderTests(ConfigErrorsMixin, dirs.DirsMixin, unittest.SynchronousTestCase): def setUp(self): self.basedir = os.path.abspath('basedir') self.filename = os.path.join(self.basedir, 'test.cfg') self.patch(config, "_in_unit_tests", False) return self.setUpDirs('basedir') def tearDown(self): return self.tearDownDirs() def install_config_file(self, config_file, other_files=None): if other_files is None: other_files = {} config_file = textwrap.dedent(config_file) with open(os.path.join(self.basedir, self.filename), "w") as f: f.write(config_file) for file, contents in other_files.items(): with open(file, "w") as f: f.write(contents) def test_loadConfig_missing_file(self): with self.assertRaisesConfigError( re.compile("configuration file .* does not exist")): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_missing_basedir(self): with self.assertRaisesConfigError( re.compile("basedir .* does not exist")): config.loadConfigDict(os.path.join(self.basedir, 'NO'), 'test.cfg') def test_loadConfig_open_error(self): """ Check that loadConfig() raises correct ConfigError exception in cases when configure file is found, but we fail to open it. """ def raise_IOError(*args): raise IOError("error_msg") self.install_config_file('#dummy') # override build-in open() function to always rise IOError self.patch(builtins, "open", raise_IOError) # check that we got the expected ConfigError exception with self.assertRaisesConfigError( re.compile("unable to open configuration file .*: error_msg")): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_parse_error(self): self.install_config_file('def x:\nbar') with self.assertRaisesConfigError(re.compile( "encountered a SyntaxError while parsing config file:")): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_eval_ConfigError(self): self.install_config_file("""\ from buildbot import config BuildmasterConfig = { 'multiMaster': True } config.error('oh noes!')""") with self.assertRaisesConfigError("oh noes"): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_eval_otherError(self): self.install_config_file("""\ from buildbot import config BuildmasterConfig = { 'multiMaster': True } raise ValueError('oh noes')""") with self.assertRaisesConfigError( "error while parsing config file: oh noes (traceback in logfile)"): config.loadConfigDict(self.basedir, self.filename) [error] = self.flushLoggedErrors(ValueError) self.assertEqual(error.value.args, ("oh noes",)) def test_loadConfig_no_BuildmasterConfig(self): self.install_config_file('x=10') with self.assertRaisesConfigError( "does not define 'BuildmasterConfig'"): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_with_local_import(self): self.install_config_file("""\ from subsidiary_module import x BuildmasterConfig = dict(x=x) """, {'basedir/subsidiary_module.py': "x = 10"}) _, rv = config.loadConfigDict(self.basedir, self.filename) self.assertEqual(rv, {'x': 10}) class MasterConfig(ConfigErrorsMixin, dirs.DirsMixin, unittest.TestCase): maxDiff = None def setUp(self): self.basedir = os.path.abspath('basedir') self.filename = os.path.join(self.basedir, 'test.cfg') return self.setUpDirs('basedir') def tearDown(self): return self.tearDownDirs() # utils def patch_load_helpers(self): # patch out all of the "helpers" for loadConfig with null functions for n in dir(config.MasterConfig): if n.startswith('load_'): typ = 'loader' elif n.startswith('check_'): typ = 'checker' else: continue v = getattr(config.MasterConfig, n) if callable(v): if typ == 'loader': self.patch(config.MasterConfig, n, mock.Mock(side_effect=lambda filename, config_dict: None)) else: self.patch(config.MasterConfig, n, mock.Mock(side_effect=lambda: None)) def install_config_file(self, config_file, other_files=None): if other_files is None: other_files = {} config_file = textwrap.dedent(config_file) with open(os.path.join(self.basedir, self.filename), "w") as f: f.write(config_file) for file, contents in other_files.items(): with open(file, "w") as f: f.write(contents) # tests def test_defaults(self): cfg = config.MasterConfig() expected = dict( # validation, db=dict( db_url='sqlite:///state.sqlite'), mq=dict(type='simple'), metrics=None, caches=dict(Changes=10, Builds=15), schedulers={}, builders=[], workers=[], change_sources=[], status=[], user_managers=[], revlink=revlinks.default_revlink_matcher ) expected.update(global_defaults) expected['buildbotNetUsageData'] = 'basic' got = { attr: getattr(cfg, attr) for attr, exp in expected.items()} got = interfaces.IConfigured(got).getConfigDict() expected = interfaces.IConfigured(expected).getConfigDict() self.assertEqual(got, expected) def test_defaults_validation(self): # re's aren't comparable, but we can make sure the keys match cfg = config.MasterConfig() self.assertEqual(sorted(cfg.validation.keys()), sorted([ 'branch', 'revision', 'property_name', 'property_value', ])) def test_loadConfig_eval_ConfigErrors(self): # We test a config that has embedded errors, as well # as semantic errors that get added later. If an exception is raised # prematurely, then the semantic errors wouldn't get reported. self.install_config_file("""\ from buildbot import config BuildmasterConfig = {} config.error('oh noes!') config.error('noes too!')""") e = self.assertRaises(config.ConfigErrors, config.FileLoader(self.basedir, self.filename).loadConfig) self.assertEqual(e.errors, ['oh noes!', 'noes too!', 'no workers are configured', 'no builders are configured']) def test_loadConfig_unknown_key(self): self.patch_load_helpers() self.install_config_file("""\ BuildmasterConfig = dict(foo=10) """) with self.assertRaisesConfigError("Unknown BuildmasterConfig key foo"): config.FileLoader(self.basedir, self.filename).loadConfig() def test_loadConfig_unknown_keys(self): self.patch_load_helpers() self.install_config_file("""\ BuildmasterConfig = dict(foo=10, bar=20) """) with self.assertRaisesConfigError( "Unknown BuildmasterConfig keys bar, foo"): config.FileLoader(self.basedir, self.filename).loadConfig() def test_loadConfig_success(self): self.patch_load_helpers() self.install_config_file("""\ BuildmasterConfig = dict() """) rv = config.FileLoader(self.basedir, self.filename).loadConfig() self.assertIsInstance(rv, config.MasterConfig) # make sure all of the loaders and checkers are called self.assertTrue(rv.load_global.called) self.assertTrue(rv.load_validation.called) self.assertTrue(rv.load_db.called) self.assertTrue(rv.load_metrics.called) self.assertTrue(rv.load_caches.called) self.assertTrue(rv.load_schedulers.called) self.assertTrue(rv.load_builders.called) self.assertTrue(rv.load_workers.called) self.assertTrue(rv.load_change_sources.called) self.assertTrue(rv.load_machines.called) self.assertTrue(rv.load_user_managers.called) self.assertTrue(rv.check_single_master.called) self.assertTrue(rv.check_schedulers.called) self.assertTrue(rv.check_locks.called) self.assertTrue(rv.check_builders.called) self.assertTrue(rv.check_ports.called) self.assertTrue(rv.check_machines.called) def test_preChangeGenerator(self): cfg = config.MasterConfig() self.assertEqual({ 'author': None, 'files': None, 'comments': None, 'revision': None, 'when_timestamp': None, 'branch': None, 'category': None, 'revlink': '', 'properties': {}, 'repository': '', 'project': '', 'codebase': None}, cfg.preChangeGenerator()) class MasterConfig_loaders(ConfigErrorsMixin, unittest.TestCase): filename = 'test.cfg' def setUp(self): self.cfg = config.MasterConfig() self.errors = config.ConfigErrors() self.patch(config, '_errors', self.errors) # utils def assertResults(self, **expected): self.assertFalse(self.errors, self.errors.errors) got = { attr: getattr(self.cfg, attr) for attr, exp in expected.items()} got = interfaces.IConfigured(got).getConfigDict() expected = interfaces.IConfigured(expected).getConfigDict() self.assertEqual(got, expected) # tests def test_load_global_defaults(self): self.maxDiff = None self.cfg.load_global(self.filename, {}) self.assertResults(**global_defaults) def test_load_global_string_param_not_string(self): self.cfg.load_global(self.filename, dict(title=10)) self.assertConfigError(self.errors, 'must be a string') def test_load_global_int_param_not_int(self): self.cfg.load_global(self.filename, dict(changeHorizon='yes')) self.assertConfigError(self.errors, 'must be an int') def test_load_global_protocols_not_dict(self): self.cfg.load_global(self.filename, dict(protocols="test")) self.assertConfigError(self.errors, "c['protocols'] must be dict") def test_load_global_protocols_key_int(self): self.cfg.load_global(self.filename, dict(protocols={321: {"port": 123}})) self.assertConfigError( self.errors, "c['protocols'] keys must be strings") def test_load_global_protocols_value_not_dict(self): self.cfg.load_global(self.filename, dict(protocols={"pb": 123})) self.assertConfigError( self.errors, "c['protocols']['pb'] must be a dict") def do_test_load_global(self, config_dict, **expected): self.cfg.load_global(self.filename, config_dict) self.assertResults(**expected) def test_load_global_title(self): self.do_test_load_global(dict(title='hi'), title='hi') def test_load_global_title_too_long(self): with assertProducesWarning(ConfigWarning, message_pattern=r"Title is too long"): self.do_test_load_global(dict(title="Very very very very very long title")) def test_load_global_projectURL(self): self.do_test_load_global(dict(projectName='hey'), title='hey') def test_load_global_titleURL(self): self.do_test_load_global(dict(titleURL='hi'), titleURL='hi') def test_load_global_buildbotURL(self): self.do_test_load_global(dict(buildbotURL='hey'), buildbotURL='hey') def test_load_global_changeHorizon(self): self.do_test_load_global(dict(changeHorizon=10), changeHorizon=10) def test_load_global_changeHorizon_none(self): self.do_test_load_global(dict(changeHorizon=None), changeHorizon=None) def test_load_global_buildbotNetUsageData(self): self.patch(config, "_in_unit_tests", False) with assertProducesWarning( ConfigWarning, message_pattern=r"`buildbotNetUsageData` is not configured and defaults to basic."): self.do_test_load_global( dict()) def test_load_global_logCompressionLimit(self): self.do_test_load_global(dict(logCompressionLimit=10), logCompressionLimit=10) def test_load_global_logCompressionMethod(self): self.do_test_load_global(dict(logCompressionMethod='bz2'), logCompressionMethod='bz2') def test_load_global_logCompressionMethod_invalid(self): self.cfg.load_global(self.filename, dict(logCompressionMethod='foo')) self.assertConfigError( self.errors, "c['logCompressionMethod'] must be 'raw', 'bz2', 'gz' or 'lz4'") def test_load_global_codebaseGenerator(self): func = lambda _: "dummy" self.do_test_load_global(dict(codebaseGenerator=func), codebaseGenerator=func) def test_load_global_codebaseGenerator_invalid(self): self.cfg.load_global(self.filename, dict(codebaseGenerator='dummy')) self.assertConfigError(self.errors, "codebaseGenerator must be a callable " "accepting a dict and returning a str") def test_load_global_logMaxSize(self): self.do_test_load_global(dict(logMaxSize=123), logMaxSize=123) def test_load_global_logMaxTailSize(self): self.do_test_load_global(dict(logMaxTailSize=123), logMaxTailSize=123) def test_load_global_logEncoding(self): self.do_test_load_global( dict(logEncoding='latin-2'), logEncoding='latin-2') def test_load_global_properties(self): exp = properties.Properties() exp.setProperty('x', 10, self.filename) self.do_test_load_global(dict(properties=dict(x=10)), properties=exp) def test_load_global_properties_invalid(self): self.cfg.load_global(self.filename, dict(properties='yes')) self.assertConfigError(self.errors, "must be a dictionary") def test_load_global_collapseRequests_bool(self): self.do_test_load_global(dict(collapseRequests=False), collapseRequests=False) def test_load_global_collapseRequests_callable(self): callable = lambda: None self.do_test_load_global(dict(collapseRequests=callable), collapseRequests=callable) def test_load_global_collapseRequests_invalid(self): self.cfg.load_global(self.filename, dict(collapseRequests='yes')) self.assertConfigError(self.errors, "must be a callable, True, or False") def test_load_global_prioritizeBuilders_callable(self): callable = lambda: None self.do_test_load_global(dict(prioritizeBuilders=callable), prioritizeBuilders=callable) def test_load_global_prioritizeBuilders_invalid(self): self.cfg.load_global(self.filename, dict(prioritizeBuilders='yes')) self.assertConfigError(self.errors, "must be a callable") def test_load_global_protocols_str(self): self.do_test_load_global(dict(protocols={'pb': {'port': 'udp:123'}}), protocols={'pb': {'port': 'udp:123'}}) def test_load_global_multiMaster(self): self.do_test_load_global(dict(multiMaster=1), multiMaster=1) def test_load_global_manhole(self): mh = mock.Mock(name='manhole') self.do_test_load_global(dict(manhole=mh), manhole=mh) def test_load_global_revlink_callable(self): callable = lambda: None self.do_test_load_global(dict(revlink=callable), revlink=callable) def test_load_global_revlink_invalid(self): self.cfg.load_global(self.filename, dict(revlink='')) self.assertConfigError(self.errors, "must be a callable") def test_load_validation_defaults(self): self.cfg.load_validation(self.filename, {}) self.assertEqual(sorted(self.cfg.validation.keys()), sorted([ 'branch', 'revision', 'property_name', 'property_value', ])) def test_load_validation_invalid(self): self.cfg.load_validation(self.filename, dict(validation='plz')) self.assertConfigError(self.errors, "must be a dictionary") def test_load_validation_unk_keys(self): self.cfg.load_validation(self.filename, dict(validation=dict(users='.*'))) self.assertConfigError(self.errors, "unrecognized validation key(s)") def test_load_validation(self): r = re.compile('.*') self.cfg.load_validation(self.filename, dict(validation=dict(branch=r))) self.assertEqual(self.cfg.validation['branch'], r) # check that defaults are still around self.assertIn('revision', self.cfg.validation) def test_load_db_defaults(self): self.cfg.load_db(self.filename, {}) self.assertResults( db=dict(db_url='sqlite:///state.sqlite')) def test_load_db_db_url(self): self.cfg.load_db(self.filename, dict(db_url='abcd')) self.assertResults(db=dict(db_url='abcd')) def test_load_db_dict(self): self.cfg.load_db(self.filename, {'db': {'db_url': 'abcd'}}) self.assertResults(db=dict(db_url='abcd')) def test_load_db_unk_keys(self): self.cfg.load_db(self.filename, {'db': {'db_url': 'abcd', 'bar': 'bar'}}) self.assertConfigError(self.errors, "unrecognized keys in") def test_load_mq_defaults(self): self.cfg.load_mq(self.filename, {}) self.assertResults(mq=dict(type='simple')) def test_load_mq_explicit_type(self): self.cfg.load_mq(self.filename, dict(mq=dict(type='simple'))) self.assertResults(mq=dict(type='simple')) def test_load_mq_unk_type(self): self.cfg.load_mq(self.filename, dict(mq=dict(type='foo'))) self.assertConfigError(self.errors, "mq type 'foo' is not known") def test_load_mq_unk_keys(self): self.cfg.load_mq(self.filename, dict(mq=dict(bar='bar'))) self.assertConfigError(self.errors, "unrecognized keys in") def test_load_metrics_defaults(self): self.cfg.load_metrics(self.filename, {}) self.assertResults(metrics=None) def test_load_metrics_invalid(self): self.cfg.load_metrics(self.filename, dict(metrics=13)) self.assertConfigError(self.errors, "must be a dictionary") def test_load_metrics(self): self.cfg.load_metrics(self.filename, dict(metrics=dict(foo=1))) self.assertResults(metrics=dict(foo=1)) def test_load_caches_defaults(self): self.cfg.load_caches(self.filename, {}) self.assertResults(caches=dict(Changes=10, Builds=15)) def test_load_caches_invalid(self): self.cfg.load_caches(self.filename, dict(caches=13)) self.assertConfigError(self.errors, "must be a dictionary") def test_load_caches_buildCacheSize(self): self.cfg.load_caches(self.filename, dict(buildCacheSize=13)) self.assertResults(caches=dict(Builds=13, Changes=10)) def test_load_caches_buildCacheSize_and_caches(self): self.cfg.load_caches(self.filename, dict(buildCacheSize=13, caches=dict(builds=11))) self.assertConfigError(self.errors, "cannot specify") def test_load_caches_changeCacheSize(self): self.cfg.load_caches(self.filename, dict(changeCacheSize=13)) self.assertResults(caches=dict(Changes=13, Builds=15)) def test_load_caches_changeCacheSize_and_caches(self): self.cfg.load_caches(self.filename, dict(changeCacheSize=13, caches=dict(changes=11))) self.assertConfigError(self.errors, "cannot specify") def test_load_caches(self): self.cfg.load_caches(self.filename, dict(caches=dict(foo=1))) self.assertResults(caches=dict(Changes=10, Builds=15, foo=1)) def test_load_caches_not_int_err(self): """ Test that non-integer cache sizes are not allowed. """ self.cfg.load_caches(self.filename, dict(caches=dict(foo="1"))) self.assertConfigError(self.errors, "value for cache size 'foo' must be an integer") def test_load_caches_to_small_err(self): """ Test that cache sizes less then 1 are not allowed. """ self.cfg.load_caches(self.filename, dict(caches=dict(Changes=-12))) self.assertConfigError(self.errors, "'Changes' cache size must be at least 1, got '-12'") def test_load_schedulers_defaults(self): self.cfg.load_schedulers(self.filename, {}) self.assertResults(schedulers={}) def test_load_schedulers_not_list(self): self.cfg.load_schedulers(self.filename, dict(schedulers=dict())) self.assertConfigError(self.errors, "must be a list of") def test_load_schedulers_not_instance(self): self.cfg.load_schedulers(self.filename, dict(schedulers=[mock.Mock()])) self.assertConfigError(self.errors, "must be a list of") def test_load_schedulers_dupe(self): sch1 = FakeScheduler(name='sch') sch2 = FakeScheduler(name='sch') self.cfg.load_schedulers(self.filename, dict(schedulers=[sch1, sch2])) self.assertConfigError(self.errors, "scheduler name 'sch' used multiple times") def test_load_schedulers(self): sch = schedulers_base.BaseScheduler('sch', [""]) self.cfg.load_schedulers(self.filename, dict(schedulers=[sch])) self.assertResults(schedulers=dict(sch=sch)) def test_load_builders_defaults(self): self.cfg.load_builders(self.filename, {}) self.assertResults(builders=[]) def test_load_builders_not_list(self): self.cfg.load_builders(self.filename, dict(builders=dict())) self.assertConfigError(self.errors, "must be a list") def test_load_builders_not_instance(self): self.cfg.load_builders(self.filename, dict(builders=[mock.Mock()])) self.assertConfigError( self.errors, "is not a builder config (in c['builders']") def test_load_builders(self): bldr = config.BuilderConfig(name='x', factory=factory.BuildFactory(), workername='x') self.cfg.load_builders(self.filename, dict(builders=[bldr])) self.assertResults(builders=[bldr]) def test_load_builders_dict(self): bldr = dict(name='x', factory=factory.BuildFactory(), workername='x') self.cfg.load_builders(self.filename, dict(builders=[bldr])) self.assertIsInstance(self.cfg.builders[0], config.BuilderConfig) self.assertEqual(self.cfg.builders[0].name, 'x') def test_load_builders_abs_builddir(self): bldr = dict(name='x', factory=factory.BuildFactory(), workername='x', builddir=os.path.abspath('.')) self.cfg.load_builders(self.filename, dict(builders=[bldr])) self.assertEqual( len(self.flushWarnings([self.cfg.load_builders])), 1) def test_load_workers_defaults(self): self.cfg.load_workers(self.filename, {}) self.assertResults(workers=[]) def test_load_workers_not_list(self): self.cfg.load_workers(self.filename, dict(workers=dict())) self.assertConfigError(self.errors, "must be a list") def test_load_workers_not_instance(self): self.cfg.load_workers(self.filename, dict(workers=[mock.Mock()])) self.assertConfigError(self.errors, "must be a list of") def test_load_workers_reserved_names(self): for name in 'debug', 'change', 'status': self.cfg.load_workers(self.filename, dict(workers=[worker.Worker(name, 'x')])) self.assertConfigError(self.errors, "is reserved") self.errors.errors[:] = [] # clear out the errors def test_load_workers_not_identifiers(self): for name in ("123 no initial digits", "spaces not allowed", 'a/b', "a.b.c.d", "a-b_c.d9",): self.cfg.load_workers(self.filename, dict(workers=[worker.Worker(name, 'x')])) self.assertConfigError(self.errors, "is not an identifier") self.errors.errors[:] = [] # clear out the errors def test_load_workers_too_long(self): name = "a" * 51 self.cfg.load_workers(self.filename, dict(workers=[worker.Worker(name, 'x')])) self.assertConfigError(self.errors, "is longer than") self.errors.errors[:] = [] # clear out the errors def test_load_workers_empty(self): name = "" self.cfg.load_workers(self.filename, dict(workers=[worker.Worker(name, 'x')])) self.errors.errors[:] = self.errors.errors[ 1:2] # only get necessary error self.assertConfigError(self.errors, "cannot be an empty string") self.errors.errors[:] = [] # clear out the errors def test_load_workers(self): wrk = worker.Worker('foo', 'x') self.cfg.load_workers(self.filename, dict(workers=[wrk])) self.assertResults(workers=[wrk]) def test_load_change_sources_defaults(self): self.cfg.load_change_sources(self.filename, {}) self.assertResults(change_sources=[]) def test_load_change_sources_not_instance(self): self.cfg.load_change_sources(self.filename, dict(change_source=[mock.Mock()])) self.assertConfigError(self.errors, "must be a list of") def test_load_change_sources_single(self): chsrc = FakeChangeSource() self.cfg.load_change_sources(self.filename, dict(change_source=chsrc)) self.assertResults(change_sources=[chsrc]) def test_load_change_sources_list(self): chsrc = FakeChangeSource() self.cfg.load_change_sources(self.filename, dict(change_source=[chsrc])) self.assertResults(change_sources=[chsrc]) def test_load_machines_defaults(self): self.cfg.load_machines(self.filename, {}) self.assertResults(machines=[]) def test_load_machines_not_instance(self): self.cfg.load_machines(self.filename, dict(machines=[mock.Mock()])) self.assertConfigError(self.errors, "must be a list of") def test_load_machines_single(self): mm = FakeMachine(name='a') self.cfg.load_machines(self.filename, dict(machines=mm)) self.assertConfigError(self.errors, "must be a list of") def test_load_machines_list(self): mm = FakeMachine() self.cfg.load_machines(self.filename, dict(machines=[mm])) self.assertResults(machines=[mm]) def test_load_user_managers_defaults(self): self.cfg.load_user_managers(self.filename, {}) self.assertResults(user_managers=[]) def test_load_user_managers_not_list(self): self.cfg.load_user_managers(self.filename, dict(user_managers='foo')) self.assertConfigError(self.errors, "must be a list") def test_load_user_managers(self): um = mock.Mock() self.cfg.load_user_managers(self.filename, dict(user_managers=[um])) self.assertResults(user_managers=[um]) def test_load_www_default(self): self.cfg.load_www(self.filename, {}) self.assertResults(www=dict(port=None, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log')) def test_load_www_port(self): self.cfg.load_www(self.filename, dict(www=dict(port=9888))) self.assertResults(www=dict(port=9888, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log')) def test_load_www_plugin(self): self.cfg.load_www(self.filename, dict(www=dict(plugins={'waterfall': {'foo': 'bar'}}))) self.assertResults(www=dict(port=None, plugins={'waterfall': {'foo': 'bar'}}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log')) def test_load_www_allowed_origins(self): self.cfg.load_www(self.filename, dict(www=dict(allowed_origins=['a', 'b']))) self.assertResults(www=dict(port=None, allowed_origins=['a', 'b'], plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log')) def test_load_www_logfileName(self): self.cfg.load_www(self.filename, dict(www=dict(logfileName='http-access.log'))) self.assertResults(www=dict(port=None, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http-access.log')) def test_load_www_versions(self): custom_versions = [ ('Test Custom Component', '0.0.1'), ('Test Custom Component 2', '0.1.0'), ] self.cfg.load_www( self.filename, {'www': dict(versions=custom_versions)}) self.assertResults(www=dict(port=None, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, versions=custom_versions, logfileName='http.log')) def test_load_www_versions_not_list(self): custom_versions = { 'Test Custom Component': '0.0.1', 'Test Custom Component 2': '0.0.2', } self.cfg.load_www( self.filename, {'www': dict(versions=custom_versions)}) self.assertConfigError( self.errors, 'Invalid www configuration value of versions') def test_load_www_versions_value_invalid(self): custom_versions = [('a', '1'), 'abc', ('b',)] self.cfg.load_www( self.filename, {'www': dict(versions=custom_versions)}) self.assertConfigError( self.errors, 'Invalid www configuration value of versions') def test_load_www_cookie_expiration_time_not_timedelta(self): self.cfg.load_www( self.filename, {'www': dict(cookie_expiration_time=1)}) self.assertConfigError( self.errors, 'Invalid www["cookie_expiration_time"]') def test_load_www_unknown(self): self.cfg.load_www(self.filename, dict(www=dict(foo="bar"))) self.assertConfigError(self.errors, "unknown www configuration parameter(s) foo") def test_load_services_nominal(self): class MyService(service.BuildbotService): def reconfigService(foo=None): self.foo = foo myService = MyService(foo="bar", name="foo") self.cfg.load_services(self.filename, dict( services=[myService])) self.assertResults(services={"foo": myService}) def test_load_services_badservice(self): class MyService: pass myService = MyService() self.cfg.load_services(self.filename, dict( services=[myService])) errMsg = (".MyService'> ") errMsg += "object should be an instance of buildbot.util.service.BuildbotService" self.assertConfigError(self.errors, errMsg) def test_load_services_duplicate(self): class MyService(service.BuildbotService): name = 'myservice' def reconfigService(self, x=None): self.x = x self.cfg.load_services(self.filename, dict( services=[MyService(x='a'), MyService(x='b')])) self.assertConfigError( self.errors, 'Duplicate service name %r' % MyService.name) def test_load_configurators_norminal(self): class MyConfigurator(configurators.ConfiguratorBase): def configure(self, config_dict): config_dict['foo'] = 'bar' c = dict(configurators=[MyConfigurator()]) self.cfg.run_configurators(self.filename, c) self.assertEqual(c['foo'], 'bar') class MasterConfig_checkers(ConfigErrorsMixin, unittest.TestCase): def setUp(self): self.cfg = config.MasterConfig() self.errors = config.ConfigErrors() self.patch(config, '_errors', self.errors) # utils def setup_basic_attrs(self): # set up a basic config for checking; this will be modified below sch = mock.Mock() sch.name = 'sch' sch.listBuilderNames = lambda: ['b1', 'b2'] b1 = mock.Mock() b1.name = 'b1' b2 = mock.Mock() b2.name = 'b2' self.cfg.schedulers = dict(sch=sch) self.cfg.workers = [mock.Mock()] self.cfg.builders = [b1, b2] def setup_builder_locks(self, builder_lock=None, dup_builder_lock=False, bare_builder_lock=False): """Set-up two mocked builders with specified locks. @type builder_lock: string or None @param builder_lock: Name of the lock to add to first builder. If None, no lock is added. @type dup_builder_lock: boolean @param dup_builder_lock: if True, add a lock with duplicate name to the second builder @type dup_builder_lock: boolean @param bare_builder_lock: if True, add bare lock objects, don't wrap them into locks.LockAccess object """ def bldr(name): b = mock.Mock() b.name = name b.locks = [] b.factory.steps = [('cls', (), dict(locks=[]))] return b def lock(name): lock = locks.MasterLock(name) if bare_builder_lock: return lock return locks.LockAccess(lock, "counting", count=1) b1, b2 = bldr('b1'), bldr('b2') self.cfg.builders = [b1, b2] if builder_lock: b1.locks.append(lock(builder_lock)) if dup_builder_lock: b2.locks.append(lock(builder_lock)) # tests def test_check_single_master_multimaster(self): self.cfg.multiMaster = True self.cfg.check_single_master() self.assertNoConfigErrors(self.errors) def test_check_single_master_no_builders(self): self.setup_basic_attrs() self.cfg.builders = [] self.cfg.check_single_master() self.assertConfigError(self.errors, "no builders are configured") def test_check_single_master_no_workers(self): self.setup_basic_attrs() self.cfg.workers = [] self.cfg.check_single_master() self.assertConfigError(self.errors, "no workers are configured") def test_check_single_master_unsch_builder(self): self.setup_basic_attrs() b3 = mock.Mock() b3.name = 'b3' self.cfg.builders.append(b3) self.cfg.check_single_master() self.assertConfigError(self.errors, "have no schedulers to drive them") def test_check_single_master_renderable_builderNames(self): self.setup_basic_attrs() b3 = mock.Mock() b3.name = 'b3' self.cfg.builders.append(b3) sch2 = mock.Mock() sch2.listBuilderNames = lambda: properties.Interpolate('%(prop:foo)s') self.cfg.schedulers['sch2'] = sch2 self.cfg.check_single_master() self.assertNoConfigErrors(self.errors) def test_check_schedulers_unknown_builder(self): self.setup_basic_attrs() del self.cfg.builders[1] # remove b2, leaving b1 self.cfg.check_schedulers() self.assertConfigError(self.errors, "Unknown builder 'b2'") def test_check_schedulers_ignored_in_multiMaster(self): self.setup_basic_attrs() del self.cfg.builders[1] # remove b2, leaving b1 self.cfg.multiMaster = True self.cfg.check_schedulers() self.assertNoConfigErrors(self.errors) def test_check_schedulers_renderable_builderNames(self): self.setup_basic_attrs() sch2 = mock.Mock() sch2.listBuilderNames = lambda: properties.Interpolate('%(prop:foo)s') self.cfg.schedulers['sch2'] = sch2 self.cfg.check_schedulers() self.assertNoConfigErrors(self.errors) def test_check_schedulers(self): self.setup_basic_attrs() self.cfg.check_schedulers() self.assertNoConfigErrors(self.errors) def test_check_locks_dup_builder_lock(self): self.setup_builder_locks(builder_lock='l', dup_builder_lock=True) self.cfg.check_locks() self.assertConfigError(self.errors, "Two locks share") def test_check_locks(self): self.setup_builder_locks(builder_lock='bl') self.cfg.check_locks() self.assertNoConfigErrors(self.errors) def test_check_locks_none(self): # no locks in the whole config, should be fine self.setup_builder_locks() self.cfg.check_locks() self.assertNoConfigErrors(self.errors) def test_check_locks_bare(self): # check_locks() should be able to handle bare lock object, # lock objects that are not wrapped into LockAccess() object self.setup_builder_locks(builder_lock='oldlock', bare_builder_lock=True) self.cfg.check_locks() self.assertNoConfigErrors(self.errors) def test_check_builders_unknown_worker(self): wrk = mock.Mock() wrk.workername = 'xyz' self.cfg.workers = [wrk] b1 = FakeBuilder(workernames=['xyz', 'abc'], builddir='x', name='b1') self.cfg.builders = [b1] self.cfg.check_builders() self.assertConfigError(self.errors, "builder 'b1' uses unknown workers 'abc'") def test_check_builders_duplicate_name(self): b1 = FakeBuilder(workernames=[], name='b1', builddir='1') b2 = FakeBuilder(workernames=[], name='b1', builddir='2') self.cfg.builders = [b1, b2] self.cfg.check_builders() self.assertConfigError(self.errors, "duplicate builder name 'b1'") def test_check_builders_duplicate_builddir(self): b1 = FakeBuilder(workernames=[], name='b1', builddir='dir') b2 = FakeBuilder(workernames=[], name='b2', builddir='dir') self.cfg.builders = [b1, b2] self.cfg.check_builders() self.assertConfigError(self.errors, "duplicate builder builddir 'dir'") def test_check_builders(self): wrk = mock.Mock() wrk.workername = 'a' self.cfg.workers = [wrk] b1 = FakeBuilder(workernames=['a'], name='b1', builddir='dir1') b2 = FakeBuilder(workernames=['a'], name='b2', builddir='dir2') self.cfg.builders = [b1, b2] self.cfg.check_builders() self.assertNoConfigErrors(self.errors) def test_check_ports_protocols_set(self): self.cfg.protocols = {"pb": {"port": 10}} self.cfg.check_ports() self.assertNoConfigErrors(self.errors) def test_check_ports_protocols_not_set_workers(self): self.cfg.workers = [mock.Mock()] self.cfg.check_ports() self.assertConfigError(self.errors, "workers are configured, but c['protocols'] not") def test_check_ports_protocols_port_duplication(self): self.cfg.protocols = {"pb": {"port": 123}, "amp": {"port": 123}} self.cfg.check_ports() self.assertConfigError(self.errors, "Some of ports in c['protocols'] duplicated") def test_check_machines_unknown_name(self): self.cfg.workers = [ FakeWorker(name='wa', machine_name='unk') ] self.cfg.machines = [ FakeMachine(name='a') ] self.cfg.check_machines() self.assertConfigError(self.errors, 'uses unknown machine') def test_check_machines_duplicate_name(self): self.cfg.machines = [ FakeMachine(name='a'), FakeMachine(name='a') ] self.cfg.check_machines() self.assertConfigError(self.errors, 'duplicate machine name') class MasterConfig_old_worker_api(unittest.TestCase): filename = "test.cfg" def setUp(self): self.cfg = config.MasterConfig() def test_workers_new_api(self): with assertNotProducesWarnings(DeprecatedApiWarning): self.assertEqual(self.cfg.workers, []) class BuilderConfig(ConfigErrorsMixin, unittest.TestCase): factory = factory.BuildFactory() # utils def assertAttributes(self, cfg, **expected): got = { attr: getattr(cfg, attr) for attr, exp in expected.items()} self.assertEqual(got, expected) # tests def test_no_name(self): with self.assertRaisesConfigError("builder's name is required"): config.BuilderConfig(factory=self.factory, workernames=['a']) def test_reserved_name(self): with self.assertRaisesConfigError( "builder names must not start with an underscore: '_a'"): config.BuilderConfig(name='_a', factory=self.factory, workernames=['a']) def test_utf8_name(self): with self.assertRaisesConfigError( "builder names must be unicode or ASCII"): config.BuilderConfig(name="\N{SNOWMAN}".encode('utf-8'), factory=self.factory, workernames=['a']) def test_no_factory(self): with self.assertRaisesConfigError("builder 'a' has no factory"): config.BuilderConfig(name='a', workernames=['a']) def test_wrong_type_factory(self): with self.assertRaisesConfigError("builder 'a's factory is not"): config.BuilderConfig(factory=[], name='a', workernames=['a']) def test_no_workernames(self): with self.assertRaisesConfigError( "builder 'a': at least one workername is required"): config.BuilderConfig(name='a', factory=self.factory) def test_bogus_workernames(self): with self.assertRaisesConfigError( "workernames must be a list or a string"): config.BuilderConfig(name='a', workernames={1: 2}, factory=self.factory) def test_bogus_workername(self): with self.assertRaisesConfigError("workername must be a string"): config.BuilderConfig(name='a', workername=1, factory=self.factory) def test_tags_must_be_list(self): with self.assertRaisesConfigError("tags must be a list"): config.BuilderConfig(tags='abc', name='a', workernames=['a'], factory=self.factory) def test_tags_must_be_list_of_str(self): with self.assertRaisesConfigError( "tags list contains something that is not a string"): config.BuilderConfig(tags=['abc', 13], name='a', workernames=['a'], factory=self.factory) def test_tags_no_tag_dupes(self): with self.assertRaisesConfigError( "builder 'a': tags list contains duplicate tags: abc"): config.BuilderConfig(tags=['abc', 'bca', 'abc'], name='a', workernames=['a'], factory=self.factory) def test_inv_nextWorker(self): with self.assertRaisesConfigError("nextWorker must be a callable"): config.BuilderConfig(nextWorker="foo", name="a", workernames=['a'], factory=self.factory) def test_inv_nextBuild(self): with self.assertRaisesConfigError("nextBuild must be a callable"): config.BuilderConfig(nextBuild="foo", name="a", workernames=['a'], factory=self.factory) def test_inv_canStartBuild(self): with self.assertRaisesConfigError("canStartBuild must be a callable"): config.BuilderConfig(canStartBuild="foo", name="a", workernames=['a'], factory=self.factory) def test_inv_env(self): with self.assertRaisesConfigError("builder's env must be a dictionary"): config.BuilderConfig(env="foo", name="a", workernames=['a'], factory=self.factory) def test_defaults(self): cfg = config.BuilderConfig( name='a b c', workername='a', factory=self.factory) self.assertIdentical(cfg.factory, self.factory) self.assertAttributes(cfg, name='a b c', workernames=['a'], builddir='a_b_c', workerbuilddir='a_b_c', tags=[], nextWorker=None, locks=[], env={}, properties={}, collapseRequests=None, description=None) def test_unicode_name(self): cfg = config.BuilderConfig( name='a \N{SNOWMAN} c', workername='a', factory=self.factory) self.assertIdentical(cfg.factory, self.factory) self.assertAttributes(cfg, name='a \N{SNOWMAN} c') def test_args(self): cfg = config.BuilderConfig( name='b', workername='s1', workernames='s2', builddir='bd', workerbuilddir='wbd', factory=self.factory, tags=['c'], nextWorker=lambda: 'ns', nextBuild=lambda: 'nb', locks=['l'], env=dict(x=10), properties=dict(y=20), collapseRequests='cr', description='buzz') self.assertIdentical(cfg.factory, self.factory) self.assertAttributes(cfg, name='b', workernames=['s2', 's1'], builddir='bd', workerbuilddir='wbd', tags=['c'], locks=['l'], env={'x': 10}, properties={'y': 20}, collapseRequests='cr', description='buzz') def test_getConfigDict(self): ns = lambda: 'ns' nb = lambda: 'nb' cfg = config.BuilderConfig( name='b', workername='s1', workernames='s2', builddir='bd', workerbuilddir='wbd', factory=self.factory, tags=['c'], nextWorker=ns, nextBuild=nb, locks=['l'], env=dict(x=10), properties=dict(y=20), collapseRequests='cr', description='buzz') self.assertEqual(cfg.getConfigDict(), {'builddir': 'bd', 'tags': ['c'], 'description': 'buzz', 'env': {'x': 10}, 'factory': self.factory, 'locks': ['l'], 'collapseRequests': 'cr', 'name': 'b', 'nextBuild': nb, 'nextWorker': ns, 'properties': {'y': 20}, 'workerbuilddir': 'wbd', 'workernames': ['s2', 's1'], }) def test_getConfigDict_collapseRequests(self): for cr in (False, lambda a, b, c: False): cfg = config.BuilderConfig(name='b', collapseRequests=cr, factory=self.factory, workername='s1') self.assertEqual(cfg.getConfigDict(), {'builddir': 'b', 'collapseRequests': cr, 'name': 'b', 'workerbuilddir': 'b', 'factory': self.factory, 'workernames': ['s1'], }) def test_init_workername_keyword(self): cfg = config.BuilderConfig(name='a b c', workername='a', factory=self.factory) self.assertEqual(cfg.workernames, ['a']) def test_init_workername_positional(self): with assertNotProducesWarnings(DeprecatedApiWarning): cfg = config.BuilderConfig( 'a b c', 'a', factory=self.factory) self.assertEqual(cfg.workernames, ['a']) def test_init_workernames_keyword(self): cfg = config.BuilderConfig(name='a b c', workernames=['a'], factory=self.factory) self.assertEqual(cfg.workernames, ['a']) def test_init_workernames_positional(self): with assertNotProducesWarnings(DeprecatedApiWarning): cfg = config.BuilderConfig( 'a b c', None, ['a'], factory=self.factory) self.assertEqual(cfg.workernames, ['a']) def test_init_workerbuilddir_keyword(self): cfg = config.BuilderConfig( name='a b c', workername='a', factory=self.factory, workerbuilddir="dir") self.assertEqual(cfg.workerbuilddir, 'dir') def test_init_workerbuilddir_positional(self): with assertNotProducesWarnings(DeprecatedApiWarning): cfg = config.BuilderConfig( 'a b c', 'a', None, None, 'dir', factory=self.factory) self.assertEqual(cfg.workerbuilddir, 'dir') def test_init_next_worker_keyword(self): f = lambda: None cfg = config.BuilderConfig( name='a b c', workername='a', factory=self.factory, nextWorker=f) self.assertEqual(cfg.nextWorker, f) class FakeService(service.ReconfigurableServiceMixin, service.AsyncService): succeed = True call_index = 1 @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): self.called = FakeService.call_index FakeService.call_index += 1 yield super().reconfigServiceWithBuildbotConfig(new_config) if not self.succeed: raise ValueError("oh noes") class FakeMultiService(service.ReconfigurableServiceMixin, service.AsyncMultiService): def reconfigServiceWithBuildbotConfig(self, new_config): self.called = True d = super().reconfigServiceWithBuildbotConfig(new_config) return d class ReconfigurableServiceMixin(unittest.TestCase): @defer.inlineCallbacks def test_service(self): svc = FakeService() yield svc.reconfigServiceWithBuildbotConfig(mock.Mock()) self.assertTrue(svc.called) @defer.inlineCallbacks def test_service_failure(self): svc = FakeService() svc.succeed = False try: yield svc.reconfigServiceWithBuildbotConfig(mock.Mock()) except ValueError: pass else: self.fail("should have raised ValueError") @defer.inlineCallbacks def test_multiservice(self): svc = FakeMultiService() ch1 = FakeService() yield ch1.setServiceParent(svc) ch2 = FakeMultiService() yield ch2.setServiceParent(svc) ch3 = FakeService() yield ch3.setServiceParent(ch2) yield svc.reconfigServiceWithBuildbotConfig(mock.Mock()) self.assertTrue(svc.called) self.assertTrue(ch1.called) self.assertTrue(ch2.called) self.assertTrue(ch3.called) @defer.inlineCallbacks def test_multiservice_priority(self): parent = FakeMultiService() svc128 = FakeService() yield svc128.setServiceParent(parent) services = [svc128] for i in range(20, 1, -1): svc = FakeService() svc.reconfig_priority = i yield svc.setServiceParent(parent) services.append(svc) yield parent.reconfigServiceWithBuildbotConfig(mock.Mock()) prio_order = [s.called for s in services] called_order = sorted(prio_order) self.assertEqual(prio_order, called_order) @defer.inlineCallbacks def test_multiservice_nested_failure(self): svc = FakeMultiService() ch1 = FakeService() yield ch1.setServiceParent(svc) ch1.succeed = False try: yield svc.reconfigServiceWithBuildbotConfig(mock.Mock()) except ValueError: pass else: self.fail("should have raised ValueError") buildbot-3.4.0/master/buildbot/test/unit/test_configurator_base.py000066400000000000000000000023531413250514000254620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.configurators import ConfiguratorBase from buildbot.test.util import configurators class ConfiguratorBaseTests(configurators.ConfiguratorMixin, unittest.SynchronousTestCase): ConfiguratorClass = ConfiguratorBase def test_basic(self): self.setupConfigurator() self.assertEqual(self.config_dict, { 'schedulers': [], 'protocols': {}, 'workers': [], 'builders': [] }) self.assertEqual(self.configurator.workers, []) buildbot-3.4.0/master/buildbot/test/unit/test_contrib_buildbot_cvs_mail.py000066400000000000000000000203561413250514000271720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import sys from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.internet import utils from twisted.python import log from twisted.trial import unittest from buildbot.test.util.misc import encodeExecutableAndArgs test = ''' Update of /cvsroot/test In directory example:/tmp/cvs-serv21085 Modified Files: README hello.c Log Message: two files checkin ''' golden_1_11_regex = [ '^From:', '^To: buildbot@example.com$', '^Reply-To: noreply@example.com$', '^Subject: cvs update for project test$', '^Date:', '^X-Mailer: Python buildbot-cvs-mail', '^$', '^Cvsmode: 1.11$', '^Category: None', '^CVSROOT: \"ext:example:/cvsroot\"', '^Files: test README 1.1,1.2 hello.c 2.2,2.3$', '^Project: test$', '^$', '^Update of /cvsroot/test$', '^In directory example:/tmp/cvs-serv21085$', '^$', '^Modified Files:$', 'README hello.c$', 'Log Message:$', '^two files checkin', '^$', '^$'] golden_1_12_regex = [ '^From: ', '^To: buildbot@example.com$', '^Reply-To: noreply@example.com$', '^Subject: cvs update for project test$', '^Date: ', '^X-Mailer: Python buildbot-cvs-mail', '^$', '^Cvsmode: 1.12$', '^Category: None$', '^CVSROOT: \"ext:example.com:/cvsroot\"$', '^Files: README 1.1 1.2 hello.c 2.2 2.3$', '^Path: test$', '^Project: test$', '^$', '^Update of /cvsroot/test$', '^In directory example:/tmp/cvs-serv21085$', '^$', '^Modified Files:$', 'README hello.c$', '^Log Message:$', 'two files checkin', '^$', '^$'] class _SubprocessProtocol(protocol.ProcessProtocol): def __init__(self, input, deferred): if isinstance(input, str): input = input.encode('utf-8') self.input = input self.deferred = deferred self.output = b'' def outReceived(self, s): self.output += s errReceived = outReceived def connectionMade(self): # push the input and send EOF self.transport.write(self.input) self.transport.closeStdin() def processEnded(self, reason): self.deferred.callback((self.output, reason.value.exitCode)) def getProcessOutputAndValueWithInput(executable, args, input): "similar to getProcessOutputAndValue, but also allows injection of input on stdin" d = defer.Deferred() p = _SubprocessProtocol(input, d) (executable, args) = encodeExecutableAndArgs(executable, args) reactor.spawnProcess(p, executable, (executable,) + tuple(args)) return d class TestBuildbotCvsMail(unittest.TestCase): buildbot_cvs_mail_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '../../../contrib/buildbot_cvs_mail.py')) if not os.path.exists(buildbot_cvs_mail_path): skip = ("'{}' does not exist (normal unless run from git)".format(buildbot_cvs_mail_path)) def assertOutputOk(self, result, regexList): "assert that the output from getProcessOutputAndValueWithInput matches expectations" (output, code) = result if isinstance(output, bytes): output = output.decode("utf-8") try: self.assertEqual(code, 0, "subprocess exited uncleanly") lines = output.splitlines() self.assertEqual(len(lines), len(regexList), "got wrong number of lines of output") misses = [] for line, regex in zip(lines, regexList): m = re.search(regex, line) if not m: misses.append((regex, line)) self.assertEqual(misses, [], "got non-matching lines") except Exception: log.msg("got output:\n" + output) raise def test_buildbot_cvs_mail_from_cvs1_11(self): # Simulate CVS 1.11 executable = sys.executable args = [self.buildbot_cvs_mail_path, '--cvsroot=\"ext:example:/cvsroot\"', '--email=buildbot@example.com', '-P', 'test', '-R', 'noreply@example.com', '-t', 'test', 'README', '1.1,1.2', 'hello.c', '2.2,2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = getProcessOutputAndValueWithInput(executable, args, input=test) d.addCallback(self.assertOutputOk, golden_1_11_regex) return d def test_buildbot_cvs_mail_from_cvs1_12(self): # Simulate CVS 1.12, with --path option executable = sys.executable args = [self.buildbot_cvs_mail_path, '--cvsroot=\"ext:example.com:/cvsroot\"', '--email=buildbot@example.com', '-P', 'test', '--path', 'test', '-R', 'noreply@example.com', '-t', 'README', '1.1', '1.2', 'hello.c', '2.2', '2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = getProcessOutputAndValueWithInput(executable, args, input=test) d.addCallback(self.assertOutputOk, golden_1_12_regex) return d def test_buildbot_cvs_mail_no_args_exits_with_error(self): executable = sys.executable args = [self.buildbot_cvs_mail_path] (executable, args) = encodeExecutableAndArgs(executable, args) d = utils.getProcessOutputAndValue(executable, args) def check(result): (stdout, stderr, code) = result self.assertEqual(code, 2) d.addCallback(check) return d def test_buildbot_cvs_mail_without_email_opt_exits_with_error(self): executable = sys.executable args = [self.buildbot_cvs_mail_path, '--cvsroot=\"ext:example.com:/cvsroot\"', '-P', 'test', '--path', 'test', '-R', 'noreply@example.com', '-t', 'README', '1.1', '1.2', 'hello.c', '2.2', '2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = utils.getProcessOutputAndValue(executable, args) def check(result): (stdout, stderr, code) = result self.assertEqual(code, 2) d.addCallback(check) return d def test_buildbot_cvs_mail_without_cvsroot_opt_exits_with_error(self): executable = sys.executable args = [self.buildbot_cvs_mail_path, '--complete-garbage-opt=gomi', '--cvsroot=\"ext:example.com:/cvsroot\"', '--email=buildbot@example.com', '-P', 'test', '--path', 'test', '-R', 'noreply@example.com', '-t', 'README', '1.1', '1.2', 'hello.c', '2.2', '2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = utils.getProcessOutputAndValue(executable, args) def check(result): (stdout, stderr, code) = result self.assertEqual(code, 2) d.addCallback(check) return d def test_buildbot_cvs_mail_with_unknown_opt_exits_with_error(self): executable = sys.executable args = [self.buildbot_cvs_mail_path, '--email=buildbot@example.com', '-P', 'test', '--path', 'test', '-R', 'noreply@example.com', '-t', 'README', '1.1', '1.2', 'hello.c', '2.2', '2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = utils.getProcessOutputAndValue(executable, args) def check(result): (stdout, stderr, code) = result self.assertEqual(code, 2) d.addCallback(check) return d buildbot-3.4.0/master/buildbot/test/unit/test_download_secret_to_worker.py000066400000000000000000000134151413250514000272360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat from twisted.python.filepath import FilePath from twisted.trial import unittest from buildbot.process import remotetransfer from buildbot.process.results import SUCCESS from buildbot.steps.download_secret_to_worker import DownloadSecretsToWorker from buildbot.steps.download_secret_to_worker import RemoveWorkerFileSecret from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.util import config as configmixin from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestDownloadFileSecretToWorkerCommand(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() tempdir = FilePath(self.mktemp()) tempdir.createDirectory() self.temp_path = tempdir.path return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def testBasic(self): self.setupStep( DownloadSecretsToWorker([(os.path.join(self.temp_path, "pathA"), "something"), (os.path.join(self.temp_path, "pathB"), "something more")])) args1 = { 'maxsize': None, 'mode': stat.S_IRUSR | stat.S_IWUSR, 'reader': ExpectRemoteRef(remotetransfer.StringFileReader), 'blocksize': 32 * 1024, 'workerdest': os.path.join(self.temp_path, "pathA"), 'workdir': "wkdir" } args2 = { 'maxsize': None, 'mode': stat.S_IRUSR | stat.S_IWUSR, 'reader': ExpectRemoteRef(remotetransfer.StringFileReader), 'blocksize': 32 * 1024, 'workerdest': os.path.join(self.temp_path, "pathB"), 'workdir': "wkdir" } self.expectCommands( Expect('downloadFile', args1) + 0, Expect('downloadFile', args2) + 0, ) self.expectOutcome( result=SUCCESS, state_string="finished") d = self.runStep() return d class TestRemoveWorkerFileSecretCommand30(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() tempdir = FilePath(self.mktemp()) tempdir.createDirectory() self.temp_path = tempdir.path return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def testBasic(self): self.setupStep(RemoveWorkerFileSecret([(os.path.join(self.temp_path, "pathA"), "something"), (os.path.join(self.temp_path, "pathB"), "somethingmore")]), worker_version={'*': '3.0'}) args1 = { 'path': os.path.join(self.temp_path, "pathA"), 'dir': os.path.abspath(os.path.join(self.temp_path, "pathA")), 'logEnviron': False } args2 = { 'path': os.path.join(self.temp_path, "pathB"), 'dir': os.path.abspath(os.path.join(self.temp_path, "pathB")), 'logEnviron': False } self.expectCommands( Expect('rmdir', args1) + 0, Expect('rmdir', args2) + 0, ) self.expectOutcome( result=SUCCESS, state_string="finished") d = self.runStep() return d class TestRemoveFileSecretToWorkerCommand(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() tempdir = FilePath(self.mktemp()) tempdir.createDirectory() self.temp_path = tempdir.path return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def testBasic(self): self.setupStep( RemoveWorkerFileSecret([(os.path.join(self.temp_path, "pathA"), "something"), (os.path.join(self.temp_path, "pathB"), "somethingmore")])) args1 = { 'path': os.path.join(self.temp_path, "pathA"), 'logEnviron': False } args2 = { 'path': os.path.join(self.temp_path, "pathB"), 'logEnviron': False } self.expectCommands( Expect('rmfile', args1) + 0, Expect('rmfile', args2) + 0, ) self.expectOutcome( result=SUCCESS, state_string="finished") d = self.runStep() return d buildbot-3.4.0/master/buildbot/test/unit/test_fake_httpclientservice.py000066400000000000000000000047571413250514000265250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.util import httpclientservice from buildbot.util import service class myTestedService(service.BuildbotService): name = 'myTestedService' @defer.inlineCallbacks def reconfigService(self, baseurl): self._http = yield httpclientservice.HTTPClientService.getService(self.master, baseurl) @defer.inlineCallbacks def doGetRoot(self): res = yield self._http.get("/") # note that at this point, only the http response headers are received if res.code != 200: raise Exception("%d: server did not succeed" % (res.code)) res_json = yield res.json() # res.json() returns a deferred to represent the time needed to fetch the entire body return res_json class Test(unittest.TestCase): @defer.inlineCallbacks def setUp(self): baseurl = 'http://127.0.0.1:8080' self.parent = service.MasterService() self._http = yield fakehttpclientservice.HTTPClientService.getService( self.parent, self, baseurl) self.tested = myTestedService(baseurl) yield self.tested.setServiceParent(self.parent) yield self.parent.startService() @defer.inlineCallbacks def test_root(self): self._http.expect("get", "/", content_json={'foo': 'bar'}) response = yield self.tested.doGetRoot() self.assertEqual(response, {'foo': 'bar'}) @defer.inlineCallbacks def test_root_error(self): self._http.expect("get", "/", content_json={'foo': 'bar'}, code=404) try: yield self.tested.doGetRoot() except Exception as e: self.assertEqual(str(e), '404: server did not succeed') buildbot-3.4.0/master/buildbot/test/unit/test_fake_secrets_manager.py000066400000000000000000000111311413250514000261100ustar00rootroot00000000000000 from twisted.internet import defer from twisted.trial import unittest from buildbot.secrets.manager import SecretManager from buildbot.secrets.secret import SecretDetails from buildbot.test.fake import fakemaster from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.misc import TestReactorMixin class TestSecretsManager(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.master.config.secretsProviders = [FakeSecretStorage(secretdict={"foo": "bar", "other": "value"})] @defer.inlineCallbacks def testGetManagerService(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) secret_service_manager.services = [fakeStorageService] expectedClassName = FakeSecretStorage.__name__ expectedSecretDetail = SecretDetails(expectedClassName, "foo", "bar") secret_result = yield secret_service_manager.get("foo") strExpectedSecretDetail = str(secret_result) self.assertEqual(secret_result, expectedSecretDetail) self.assertEqual(secret_result.key, "foo") self.assertEqual(secret_result.value, "bar") self.assertEqual(secret_result.source, expectedClassName) self.assertEqual(strExpectedSecretDetail, "FakeSecretStorage foo: 'bar'") @defer.inlineCallbacks def testGetNoDataManagerService(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) secret_service_manager.services = [fakeStorageService] secret_result = yield secret_service_manager.get("foo2") self.assertEqual(secret_result, None) @defer.inlineCallbacks def testGetDataMultipleManagerService(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) otherFakeStorageService = FakeSecretStorage() otherFakeStorageService.reconfigService(secretdict={"foo2": "bar", "other2": "value"}) secret_service_manager.services = [fakeStorageService, otherFakeStorageService] expectedSecretDetail = SecretDetails(FakeSecretStorage.__name__, "foo2", "bar") secret_result = yield secret_service_manager.get("foo2") self.assertEqual(secret_result, expectedSecretDetail) @defer.inlineCallbacks def testGetDataMultipleManagerValues(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": ""}) otherFakeStorageService = FakeSecretStorage() otherFakeStorageService.reconfigService(secretdict={"foo2": "bar2", "other": ""}) secret_service_manager.services = [fakeStorageService, otherFakeStorageService] expectedSecretDetail = SecretDetails(FakeSecretStorage.__name__, "other", "") secret_result = yield secret_service_manager.get("other") self.assertEqual(secret_result, expectedSecretDetail) @defer.inlineCallbacks def testGetDataMultipleManagerServiceNoDatas(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) otherFakeStorageService = FakeSecretStorage() otherFakeStorageService.reconfigService(secretdict={"foo2": "bar", "other2": "value"}) secret_service_manager.services = [fakeStorageService, otherFakeStorageService] secret_result = yield secret_service_manager.get("foo3") self.assertEqual(secret_result, None) buildbot-3.4.0/master/buildbot/test/unit/test_interpolate_secrets.py000066400000000000000000000074641413250514000260540ustar00rootroot00000000000000import gc from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.secrets.manager import SecretManager from buildbot.test.fake import fakemaster from buildbot.test.fake.fakebuild import FakeBuild from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin class FakeBuildWithMaster(FakeBuild): def __init__(self, master): super().__init__() self.master = master class TestInterpolateSecrets(TestReactorMixin, unittest.TestCase, ConfigErrorsMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) self.secretsrv = SecretManager() self.secretsrv.services = [fakeStorageService] yield self.secretsrv.setServiceParent(self.master) self.build = FakeBuildWithMaster(self.master) @defer.inlineCallbacks def test_secret(self): command = Interpolate("echo %(secret:foo)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo bar") @defer.inlineCallbacks def test_secret_not_found(self): command = Interpolate("echo %(secret:fuo)s") yield self.assertFailure(self.build.render(command), defer.FirstError) gc.collect() self.flushLoggedErrors(defer.FirstError) self.flushLoggedErrors(KeyError) class TestInterpolateSecretsNoService(TestReactorMixin, unittest.TestCase, ConfigErrorsMixin): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.build = FakeBuildWithMaster(self.master) @defer.inlineCallbacks def test_secret(self): command = Interpolate("echo %(secret:fuo)s") yield self.assertFailure(self.build.render(command), defer.FirstError) gc.collect() self.flushLoggedErrors(defer.FirstError) self.flushLoggedErrors(KeyError) class TestInterpolateSecretsHiddenSecrets(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) fakeStorageService = FakeSecretStorage() password = "bar" fakeStorageService.reconfigService( secretdict={"foo": password, "other": password + "random", "empty": ""}) self.secretsrv = SecretManager() self.secretsrv.services = [fakeStorageService] yield self.secretsrv.setServiceParent(self.master) self.build = FakeBuildWithMaster(self.master) @defer.inlineCallbacks def test_secret(self): command = Interpolate("echo %(secret:foo)s") rendered = yield self.build.render(command) cleantext = self.build.properties.cleanupTextFromSecrets(rendered) self.assertEqual(cleantext, "echo ") @defer.inlineCallbacks def test_secret_replace(self): command = Interpolate("echo %(secret:foo)s %(secret:other)s") rendered = yield self.build.render(command) cleantext = self.build.properties.cleanupTextFromSecrets(rendered) self.assertEqual(cleantext, "echo ") @defer.inlineCallbacks def test_secret_replace_with_empty_secret(self): command = Interpolate("echo %(secret:empty)s %(secret:other)s") rendered = yield self.build.render(command) cleantext = self.build.properties.cleanupTextFromSecrets(rendered) self.assertEqual(cleantext, "echo ") buildbot-3.4.0/master/buildbot/test/unit/test_janitor_configurator.py000066400000000000000000000100461413250514000262140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from datetime import timedelta from parameterized import parameterized import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.configurators import janitor from buildbot.configurators.janitor import JANITOR_NAME from buildbot.configurators.janitor import BuildDataJanitor from buildbot.configurators.janitor import JanitorConfigurator from buildbot.configurators.janitor import LogChunksJanitor from buildbot.process.results import SUCCESS from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.timed import Nightly from buildbot.test.util import config as configmixin from buildbot.test.util import configurators from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin from buildbot.util import datetime2epoch from buildbot.worker.local import LocalWorker class JanitorConfiguratorTests(configurators.ConfiguratorMixin, unittest.SynchronousTestCase): ConfiguratorClass = JanitorConfigurator def test_nothing(self): self.setupConfigurator() self.assertEqual(self.config_dict, { }) @parameterized.expand([ ('logs', {'logHorizon': timedelta(weeks=1)}, [LogChunksJanitor]), ('build_data', {'build_data_horizon': timedelta(weeks=1)}, [BuildDataJanitor]), ('logs_build_data', {'build_data_horizon': timedelta(weeks=1), 'logHorizon': timedelta(weeks=1)}, [LogChunksJanitor, BuildDataJanitor]), ]) def test_steps(self, name, configuration, exp_steps): self.setupConfigurator(**configuration) self.expectWorker(JANITOR_NAME, LocalWorker) self.expectScheduler(JANITOR_NAME, Nightly) self.expectScheduler(JANITOR_NAME + "_force", ForceScheduler) self.expectBuilderHasSteps(JANITOR_NAME, exp_steps) self.expectNoConfigError() class LogChunksJanitorTests(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpBuildStep() self.patch(janitor, "now", lambda: datetime.datetime(year=2017, month=1, day=1)) def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def test_basic(self): self.setupStep( LogChunksJanitor(logHorizon=timedelta(weeks=1))) self.master.db.logs.deleteOldLogChunks = mock.Mock(return_value=3) self.expectOutcome(result=SUCCESS, state_string="deleted 3 logchunks") yield self.runStep() expected_timestamp = datetime2epoch(datetime.datetime(year=2016, month=12, day=25)) self.master.db.logs.deleteOldLogChunks.assert_called_with(expected_timestamp) @defer.inlineCallbacks def test_build_data(self): self.setupStep(BuildDataJanitor(build_data_horizon=timedelta(weeks=1))) self.master.db.build_data.deleteOldBuildData = mock.Mock(return_value=4) self.expectOutcome(result=SUCCESS, state_string="deleted 4 build data key-value pairs") yield self.runStep() expected_timestamp = datetime2epoch(datetime.datetime(year=2016, month=12, day=25)) self.master.db.build_data.deleteOldBuildData.assert_called_with(expected_timestamp) buildbot-3.4.0/master/buildbot/test/unit/test_locks.py000066400000000000000000000657041413250514000231120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.locks import BaseLock from buildbot.locks import LockAccess from buildbot.locks import MasterLock from buildbot.locks import RealMasterLock from buildbot.locks import RealWorkerLock from buildbot.locks import WorkerLock from buildbot.util.eventual import flushEventualQueue class Requester: pass class BaseLockTests(unittest.TestCase): @parameterized.expand([ ('counting', 0, 0), ('counting', 0, 1), ('counting', 1, 1), ('counting', 0, 2), ('counting', 1, 2), ('counting', 2, 2), ('counting', 0, 3), ('counting', 1, 3), ('counting', 2, 3), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_is_available_empty(self, mode, count, maxCount): req = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count self.assertTrue(lock.isAvailable(req, access)) @parameterized.expand([ ('counting', 0, 0), ('counting', 0, 1), ('counting', 1, 1), ('counting', 0, 2), ('counting', 1, 2), ('counting', 2, 2), ('counting', 0, 3), ('counting', 1, 3), ('counting', 2, 3), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_is_available_without_waiter(self, mode, count, maxCount): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) lock.release(req, access) self.assertTrue(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter, access)) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_is_available_with_waiter(self, mode, count, maxCount): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) lock.waitUntilMaybeAvailable(req_waiter, access) lock.release(req, access) self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter, access)) lock.claim(req_waiter, access) lock.release(req_waiter, access) self.assertTrue(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter, access)) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_is_available_with_multiple_waiters(self, mode, count, maxCount): req = Requester() req_waiter1 = Requester() req_waiter2 = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) lock.waitUntilMaybeAvailable(req_waiter1, access) lock.waitUntilMaybeAvailable(req_waiter2, access) lock.release(req, access) self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertFalse(lock.isAvailable(req_waiter2, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) self.assertFalse(lock.isAvailable(req, access)) self.assertFalse(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) lock.claim(req_waiter2, access) lock.release(req_waiter2, access) self.assertTrue(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) def test_is_available_with_multiple_waiters_multiple_counting(self): req1 = Requester() req2 = Requester() req_waiter1 = Requester() req_waiter2 = Requester() req_waiter3 = Requester() lock = BaseLock('test', maxCount=2) access = mock.Mock(spec=LockAccess) access.mode = 'counting' access.count = 1 lock.claim(req1, access) lock.claim(req2, access) lock.waitUntilMaybeAvailable(req_waiter1, access) lock.waitUntilMaybeAvailable(req_waiter2, access) lock.waitUntilMaybeAvailable(req_waiter3, access) lock.release(req1, access) lock.release(req2, access) self.assertFalse(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertFalse(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) self.assertFalse(lock.isAvailable(req1, access)) self.assertFalse(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter2, access) lock.release(req_waiter2, access) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter3, access) lock.release(req_waiter3, access) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) def test_is_available_with_mult_waiters_mult_counting_set_maxCount(self): req1 = Requester() req2 = Requester() req_waiter1 = Requester() req_waiter2 = Requester() req_waiter3 = Requester() lock = BaseLock('test', maxCount=2) access = mock.Mock(spec=LockAccess) access.mode = 'counting' access.count = 1 lock.claim(req1, access) lock.claim(req2, access) lock.waitUntilMaybeAvailable(req_waiter1, access) lock.waitUntilMaybeAvailable(req_waiter2, access) lock.waitUntilMaybeAvailable(req_waiter3, access) lock.release(req1, access) lock.release(req2, access) self.assertFalse(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertFalse(lock.isAvailable(req_waiter3, access)) lock.setMaxCount(4) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.setMaxCount(2) lock.waitUntilMaybeAvailable(req_waiter1, access) lock.claim(req_waiter2, access) lock.release(req_waiter2, access) self.assertFalse(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertFalse(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter3, access) lock.release(req_waiter3, access) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_duplicate_wait_until_maybe_available_throws(self, mode, count, maxCount): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) lock.waitUntilMaybeAvailable(req_waiter, access) with self.assertRaises(AssertionError): lock.waitUntilMaybeAvailable(req_waiter, access) lock.release(req, access) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_stop_waiting_ensures_deferred_was_previous_result_of_wait(self, mode, count, maxCount): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) lock.waitUntilMaybeAvailable(req_waiter, access) with self.assertRaises(AssertionError): wrong_d = defer.Deferred() lock.stopWaitingUntilAvailable(req_waiter, access, wrong_d) lock.release(req, access) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_stop_waiting_fires_deferred_if_not_woken(self, mode, count, maxCount): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) d = lock.waitUntilMaybeAvailable(req_waiter, access) lock.stopWaitingUntilAvailable(req_waiter, access, d) self.assertTrue(d.called) lock.release(req, access) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) @defer.inlineCallbacks def test_stop_waiting_does_not_fire_deferred_if_already_woken(self, mode, count, maxCount): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) d = lock.waitUntilMaybeAvailable(req_waiter, access) lock.release(req, access) yield flushEventualQueue() self.assertTrue(d.called) # note that if the function calls the deferred again, an exception would be thrown from # inside Twisted. lock.stopWaitingUntilAvailable(req_waiter, access, d) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_stop_waiting_does_not_raise_after_release(self, mode, count, maxCount): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) d = lock.waitUntilMaybeAvailable(req_waiter, access) lock.release(req, access) self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter, access)) lock.stopWaitingUntilAvailable(req_waiter, access, d) lock.claim(req_waiter, access) lock.release(req_waiter, access) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_stop_waiting_removes_non_called_waiter(self, mode, count, maxCount): req = Requester() req_waiter1 = Requester() req_waiter2 = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) d1 = lock.waitUntilMaybeAvailable(req_waiter1, access) d2 = lock.waitUntilMaybeAvailable(req_waiter2, access) lock.release(req, access) yield flushEventualQueue() self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertFalse(lock.isAvailable(req_waiter2, access)) self.assertTrue(d1.called) lock.stopWaitingUntilAvailable(req_waiter2, access, d2) self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertFalse(lock.isAvailable(req_waiter2, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) self.assertTrue(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) @defer.inlineCallbacks def test_stop_waiting_wakes_up_next_deferred_if_already_woken(self, mode, count, maxCount): req = Requester() req_waiter1 = Requester() req_waiter2 = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.claim(req, access) d1 = lock.waitUntilMaybeAvailable(req_waiter1, access) d2 = lock.waitUntilMaybeAvailable(req_waiter2, access) lock.release(req, access) yield flushEventualQueue() self.assertTrue(d1.called) self.assertFalse(d2.called) lock.stopWaitingUntilAvailable(req_waiter1, access, d1) yield flushEventualQueue() self.assertTrue(d2.called) @parameterized.expand([ ('counting', 1, 1), ('counting', 2, 2), ('counting', 3, 3), ('exclusive', 1, 1), ]) def test_can_release_non_waited_lock(self, mode, count, maxCount): req = Requester() req_not_waited = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = mode access.count = count lock.release(req_not_waited, access) lock.claim(req, access) lock.release(req, access) yield flushEventualQueue() lock.release(req_not_waited, access) @parameterized.expand([ ('counting', 'counting', 1, 1, 1), ('counting', 'exclusive', 1, 1, 1), ('exclusive', 'counting', 1, 1, 1), ('exclusive', 'exclusive', 1, 1, 1), ]) @defer.inlineCallbacks def test_release_calls_waiters_in_fifo_order(self, mode1, mode2, count1, count2, maxCount): req = Requester() req_waiters = [Requester() for _ in range(5)] lock = BaseLock('test', maxCount=maxCount) access1 = mock.Mock(spec=LockAccess) access1.mode = mode1 access1.count = count1 access2 = mock.Mock(spec=LockAccess) access2.mode = mode2 access2.count = count2 accesses = [access1, access2, access1, access2, access1] expected_called = [False] * 5 lock.claim(req, access1) deferreds = [lock.waitUntilMaybeAvailable(req_waiter, access) for req_waiter, access in zip(req_waiters, accesses)] self.assertEqual([d.called for d in deferreds], expected_called) lock.release(req, access1) yield flushEventualQueue() expected_called[0] = True self.assertEqual([d.called for d in deferreds], expected_called) for i in range(4): self.assertTrue(lock.isAvailable(req_waiters[i], accesses[i])) lock.claim(req_waiters[i], accesses[i]) self.assertEqual([d.called for d in deferreds], expected_called) lock.release(req_waiters[i], accesses[i]) yield flushEventualQueue() expected_called[i + 1] = True self.assertEqual([d.called for d in deferreds], expected_called) lock.claim(req_waiters[4], accesses[4]) lock.release(req_waiters[4], accesses[4]) @parameterized.expand([ (1, ), ]) @defer.inlineCallbacks def test_release_calls_multiple_waiters_on_release(self, count): req = Requester() req_waiters = [Requester() for _ in range(5)] lock = BaseLock('test', maxCount=5) access_counting = mock.Mock(spec=LockAccess) access_counting.mode = 'counting' access_counting.count = count access_excl = mock.Mock(spec=LockAccess) access_excl.mode = 'exclusive' access_excl.count = 1 lock.claim(req, access_excl) deferreds = [lock.waitUntilMaybeAvailable(req_waiter, access_counting) for req_waiter in req_waiters] self.assertEqual([d.called for d in deferreds], [False] * 5) lock.release(req, access_excl) yield flushEventualQueue() self.assertEqual([d.called for d in deferreds], [True] * 5) @parameterized.expand([ (1, 1), ]) @defer.inlineCallbacks def test_release_calls_multiple_waiters_on_setMaxCount(self, count, maxCount): req = Requester() req_waiters = [Requester() for _ in range(5)] lock = BaseLock('test', maxCount=maxCount) access_counting = mock.Mock(spec=LockAccess) access_counting.mode = 'counting' access_counting.count = count lock.claim(req, access_counting) deferreds = [lock.waitUntilMaybeAvailable(req_waiter, access_counting) for req_waiter in req_waiters] self.assertEqual([d.called for d in deferreds], [False] * 5) lock.release(req, access_counting) yield flushEventualQueue() self.assertEqual([d.called for d in deferreds], [True] + [False] * 4) lock.setMaxCount(5) yield flushEventualQueue() self.assertEqual([d.called for d in deferreds], [True] * 5) @parameterized.expand([ (2, 2), (3, 3), (4, 4), (5, 5), ]) def test_exclusive_must_have_count_one(self, count, maxCount): req = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = 'exclusive' access.count = count with self.assertRaises(AssertionError): lock.claim(req, access) @parameterized.expand([ (0, 1), (1, 1), (0, 2), (1, 2), (2, 2), (0, 3), (1, 3), (2, 3), (3, 3), ]) def test_counting_count_zero_always_succeeds(self, count, maxCount): reqs = [Requester() for _ in range(10)] req_waiters = [Requester() for _ in range(10)] req_nonzero = Requester() lock = BaseLock('test', maxCount=maxCount) access_zero = mock.Mock(spec=LockAccess) access_zero.mode = 'counting' access_zero.count = 0 access_nonzero = mock.Mock(spec=LockAccess) access_nonzero.mode = 'counting' access_nonzero.count = count lock.claim(req_nonzero, access_nonzero) for req in reqs: self.assertTrue(lock.isAvailable(req, access_zero)) lock.claim(req, access_zero) for req_waiter in req_waiters: self.assertTrue(lock.isAvailable(req_waiter, access_zero)) for req in reqs: self.assertTrue(lock.isAvailable(req, access_zero)) lock.release(req, access_zero) lock.release(req_nonzero, access_nonzero) @parameterized.expand([ (1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2), ]) def test_count_cannot_be_larger_than_maxcount(self, count, maxCount): req = Requester() lock = BaseLock('test', maxCount=maxCount) access = mock.Mock(spec=LockAccess) access.mode = 'counting' access.count = count self.assertFalse(lock.isAvailable(req, access)) @parameterized.expand([ (0, 1, 1), (0, 1, 2), (1, 2, 3), (1, 2, 4), (1, 3, 4), (1, 3, 5), (2, 3, 5), (2, 3, 6), ]) def test_different_counts_below_limit(self, count1, count2, maxCount): req1 = Requester() req2 = Requester() lock = BaseLock('test', maxCount=maxCount) access1 = mock.Mock(spec=LockAccess) access1.mode = 'counting' access1.count = count1 access2 = mock.Mock(spec=LockAccess) access2.mode = 'counting' access2.count = count2 self.assertTrue(lock.isAvailable(req1, access1)) lock.claim(req1, access1) self.assertTrue(lock.isAvailable(req2, access2)) lock.release(req1, access1) @parameterized.expand([ (0, 2, 1), (0, 3, 1), (0, 3, 2), (1, 2, 2), (1, 3, 3), (1, 4, 3), (2, 3, 2), (2, 3, 3), (2, 3, 4), (2, 4, 4), ]) def test_different_counts_over_limit(self, count1, count2, maxCount): req1 = Requester() req2 = Requester() lock = BaseLock('test', maxCount=maxCount) access1 = mock.Mock(spec=LockAccess) access1.mode = 'counting' access1.count = count1 access2 = mock.Mock(spec=LockAccess) access2.mode = 'counting' access2.count = count2 self.assertTrue(lock.isAvailable(req1, access1)) lock.claim(req1, access1) self.assertFalse(lock.isAvailable(req2, access2)) lock.release(req1, access1) class RealLockTests(unittest.TestCase): def test_master_lock_init_from_lockid(self): lock = RealMasterLock('lock1') lock.updateFromLockId(MasterLock('lock1', maxCount=3), 0) self.assertEqual(lock.lockName, 'lock1') self.assertEqual(lock.maxCount, 3) self.assertEqual(lock.description, '') def test_master_lock_update_from_lockid(self): lock = RealMasterLock('lock1') lock.updateFromLockId(MasterLock('lock1', maxCount=3), 0) lock.updateFromLockId(MasterLock('lock1', maxCount=4), 0) self.assertEqual(lock.lockName, 'lock1') self.assertEqual(lock.maxCount, 4) self.assertEqual(lock.description, '') with self.assertRaises(AssertionError): lock.updateFromLockId(MasterLock('lock2', maxCount=4), 0) def test_worker_lock_init_from_lockid(self): lock = RealWorkerLock('lock1') lock.updateFromLockId(WorkerLock('lock1', maxCount=3), 0) self.assertEqual(lock.lockName, 'lock1') self.assertEqual(lock.maxCount, 3) self.assertEqual(lock.description, '') worker_lock = lock.getLockForWorker('worker1') self.assertEqual(worker_lock.lockName, 'lock1') self.assertEqual(worker_lock.maxCount, 3) self.assertTrue(worker_lock.description.startswith( '') self.assertEqual(worker_lock.lockName, 'lock1') self.assertEqual(worker_lock.maxCount, 5) self.assertTrue(worker_lock.description.startswith( ' crossbar init > crossbar start & > export WAMP_ROUTER_URL=ws://localhost:8080/ws > trial buildbot.unit.test_mq_wamp""") # if connection is bad, this test can timeout easily # we reduce the timeout to help maintain the sanity of the developer timeout = 2 @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() if "WAMP_ROUTER_URL" not in os.environ: raise unittest.SkipTest(self.HOW_TO_RUN) self.master = fakemaster.make_master(self) self.mq = wamp.WampMQ() yield self.mq.setServiceParent(self.master) self.connector = self.master.wamp = connector.WampConnector() yield self.connector.setServiceParent(self.master) yield self.master.startService() config = FakeConfig() config.mq['router_url'] = os.environ["WAMP_ROUTER_URL"] yield self.connector.reconfigServiceWithBuildbotConfig(config) def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def test_forward_data(self): d = defer.Deferred() callback = mock.Mock(side_effect=lambda *a, **kw: d.callback(None)) yield self.mq.startConsuming(callback, ('a', 'b')) # _produce returns a deferred yield self.mq._produce(('a', 'b'), 'foo') # calling produce should eventually call the callback with decoding of # topic yield d callback.assert_called_with(('a', 'b'), 'foo') @defer.inlineCallbacks def test_forward_data_wildcard(self): d = defer.Deferred() callback = mock.Mock(side_effect=lambda *a, **kw: d.callback(None)) yield self.mq.startConsuming(callback, ('a', None)) # _produce returns a deferred yield self.mq._produce(('a', 'b'), 'foo') # calling produce should eventually call the callback with decoding of # topic yield d callback.assert_called_with(('a', 'b'), 'foo') buildbot-3.4.0/master/buildbot/test/unit/test_pbmanager.py000066400000000000000000000143441413250514000237250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Test clean shutdown functionality of the master """ import mock from twisted.cred import credentials from twisted.internet import defer from twisted.spread import pb from twisted.trial import unittest from buildbot import pbmanager class FakeMaster: initLock = defer.DeferredLock() def addService(self, svc): pass @property def master(self): return self class TestPBManager(unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.pbm = pbmanager.PBManager() yield self.pbm.setServiceParent(FakeMaster()) self.pbm.startService() self.connections = [] def tearDown(self): return self.pbm.stopService() def perspectiveFactory(self, mind, username): persp = mock.Mock() persp.is_my_persp = True persp.attached = lambda mind: defer.succeed(None) self.connections.append(username) return defer.succeed(persp) @defer.inlineCallbacks def test_repr(self): reg = yield self.pbm.register( 'tcp:0:interface=127.0.0.1', "x", "y", self.perspectiveFactory) self.assertEqual(repr(self.pbm.dispatchers['tcp:0:interface=127.0.0.1']), '') self.assertEqual( repr(reg), '') @defer.inlineCallbacks def test_register_unregister(self): portstr = "tcp:0:interface=127.0.0.1" reg = yield self.pbm.register(portstr, "boris", "pass", self.perspectiveFactory) # make sure things look right self.assertIn(portstr, self.pbm.dispatchers) disp = self.pbm.dispatchers[portstr] self.assertIn('boris', disp.users) # we can't actually connect to it, as that requires finding the # dynamically allocated port number which is buried out of reach; # however, we can try the requestAvatar and requestAvatarId methods. username = yield disp.requestAvatarId(credentials.UsernamePassword(b'boris', b'pass')) self.assertEqual(username, b'boris') avatar = yield disp.requestAvatar(b'boris', mock.Mock(), pb.IPerspective) (iface, persp, detach_fn) = avatar self.assertTrue(persp.is_my_persp) self.assertIn('boris', self.connections) yield reg.unregister() @defer.inlineCallbacks def test_register_no_user(self): portstr = "tcp:0:interface=127.0.0.1" reg = yield self.pbm.register(portstr, "boris", "pass", self.perspectiveFactory) # make sure things look right self.assertIn(portstr, self.pbm.dispatchers) disp = self.pbm.dispatchers[portstr] self.assertIn('boris', disp.users) # we can't actually connect to it, as that requires finding the # dynamically allocated port number which is buried out of reach; # however, we can try the requestAvatar and requestAvatarId methods. username = yield disp.requestAvatarId(credentials.UsernamePassword(b'boris', b'pass')) self.assertEqual(username, b'boris') with self.assertRaises(ValueError): yield disp.requestAvatar(b'notboris', mock.Mock(), pb.IPerspective) self.assertNotIn('boris', self.connections) yield reg.unregister() @defer.inlineCallbacks def test_double_register_unregister(self): portstr = "tcp:0:interface=127.0.0.1" reg1 = yield self.pbm.register(portstr, "boris", "pass", None) reg2 = yield self.pbm.register(portstr, "ivona", "pass", None) # make sure things look right self.assertEqual(len(self.pbm.dispatchers), 1) self.assertIn(portstr, self.pbm.dispatchers) disp = self.pbm.dispatchers[portstr] self.assertIn('boris', disp.users) self.assertIn('ivona', disp.users) yield reg1.unregister() self.assertEqual(len(self.pbm.dispatchers), 1) self.assertIn(portstr, self.pbm.dispatchers) disp = self.pbm.dispatchers[portstr] self.assertNotIn('boris', disp.users) self.assertIn('ivona', disp.users) yield reg2.unregister() self.assertEqual(len(self.pbm.dispatchers), 0) @defer.inlineCallbacks def test_requestAvatarId_noinitLock(self): portstr = "tcp:0:interface=127.0.0.1" reg = yield self.pbm.register(portstr, "boris", "pass", self.perspectiveFactory) disp = self.pbm.dispatchers[portstr] d = disp.requestAvatarId(credentials.UsernamePassword(b'boris', b'pass')) self.assertTrue(d.called, "requestAvatarId should have been called since the lock is free") yield reg.unregister() @defer.inlineCallbacks def test_requestAvatarId_initLock(self): portstr = "tcp:0:interface=127.0.0.1" reg = yield self.pbm.register(portstr, "boris", "pass", self.perspectiveFactory) disp = self.pbm.dispatchers[portstr] try: # simulate a reconfig/restart in progress yield self.pbm.master.initLock.acquire() # try to authenticate while the lock is locked d = disp.requestAvatarId(credentials.UsernamePassword(b'boris', b'pass')) self.assertFalse(d.called, "requestAvatarId should block until the lock is released") finally: # release the lock, it should allow for auth to proceed yield self.pbm.master.initLock.release() self.assertTrue(d.called, "requestAvatarId should have been called after the lock was released") yield reg.unregister() buildbot-3.4.0/master/buildbot/test/unit/test_plugins.py000066400000000000000000000235511413250514000234520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Unit tests for the plugin framework """ import warnings import mock from twisted.trial import unittest from zope.interface import implementer import buildbot.plugins.db from buildbot.errors import PluginDBError from buildbot.interfaces import IPlugin from buildbot.test.util.warnings import assertProducesWarning # buildbot.plugins.db needs to be imported for patching, however just 'db' is # much shorter for using in tests db = buildbot.plugins.db class FakeEntry: """ An entry suitable for unit tests """ def __init__(self, name, project_name, version, fail_require, value, warnings=[]): self._name = name self._dist = mock.Mock(spec_set=['project_name', 'version']) self._dist.project_name = project_name self._dist.version = version self._fail_require = fail_require self._value = value self._warnings = warnings @property def name(self): "entry name" return self._name @property def dist(self): "dist thingie" return self._dist def require(self): """ handle external dependencies """ if self._fail_require: raise RuntimeError('Fail require as requested') def load(self): """ handle loading """ for w in self._warnings: warnings.warn(w, DeprecationWarning) return self._value class ITestInterface(IPlugin): """ test interface """ def hello(name): "Greets by :param:`name`" @implementer(ITestInterface) class ClassWithInterface: """ a class to implement a simple interface """ def __init__(self, name=None): self._name = name def hello(self, name=None): 'implement the required method' return name or self._name class ClassWithNoInterface: """ just a class """ # NOTE: buildbot.plugins.db prepends the group with common namespace -- # 'buildbot.' _FAKE_ENTRIES = { 'buildbot.interface': [ FakeEntry('good', 'non-existent', 'irrelevant', False, ClassWithInterface), FakeEntry('deep.path', 'non-existent', 'irrelevant', False, ClassWithInterface) ], 'buildbot.interface_warnings': [ FakeEntry('good', 'non-existent', 'irrelevant', False, ClassWithInterface, warnings=['test warning']), FakeEntry('deep.path', 'non-existent', 'irrelevant', False, ClassWithInterface, warnings=['test warning']) ], 'buildbot.interface_failed': [ FakeEntry('good', 'non-existent', 'irrelevant', True, ClassWithInterface) ], 'buildbot.no_interface': [ FakeEntry('good', 'non-existent', 'irrelevant', False, ClassWithNoInterface) ], 'buildbot.no_interface_again': [ FakeEntry('good', 'non-existent', 'irrelevant', False, ClassWithNoInterface) ], 'buildbot.no_interface_failed': [ FakeEntry('good', 'non-existent', 'irrelevant', True, ClassWithNoInterface) ], 'buildbot.duplicates': [ FakeEntry('good', 'non-existent', 'first', False, ClassWithNoInterface), FakeEntry('good', 'non-existent', 'second', False, ClassWithNoInterface) ] } def provide_fake_entries(group): """ give a set of fake entries for known groups """ return _FAKE_ENTRIES.get(group, []) @mock.patch('buildbot.plugins.db.iter_entry_points', provide_fake_entries) class TestBuildbotPlugins(unittest.TestCase): def setUp(self): buildbot.plugins.db._DB = buildbot.plugins.db._PluginDB() def test_check_group_registration(self): with mock.patch.object(buildbot.plugins.db, '_DB', db._PluginDB()): # The groups will be prepended with namespace, so info() will # return a dictionary with right keys, but no data groups = set(_FAKE_ENTRIES.keys()) for group in groups: db.get_plugins(group) registered = set(db.info().keys()) self.assertEqual(registered, groups) self.assertEqual(registered, set(db.namespaces())) def test_interface_provided_simple(self): # Basic check before the actual test self.assertTrue(ITestInterface.implementedBy(ClassWithInterface)) plugins = db.get_plugins('interface', interface=ITestInterface) self.assertTrue('good' in plugins.names) result_get = plugins.get('good') result_getattr = plugins.good self.assertFalse(result_get is None) self.assertTrue(result_get is result_getattr) # Make sure we actually got our class greeter = result_get('yes') self.assertEqual('yes', greeter.hello()) self.assertEqual('no', greeter.hello('no')) def test_missing_plugin(self): plugins = db.get_plugins('interface', interface=ITestInterface) with self.assertRaises(AttributeError): getattr(plugins, 'bad') with self.assertRaises(PluginDBError): plugins.get('bad') with self.assertRaises(PluginDBError): plugins.get('good.extra') def test_interface_provided_deep(self): # Basic check before the actual test self.assertTrue(ITestInterface.implementedBy(ClassWithInterface)) plugins = db.get_plugins('interface', interface=ITestInterface) self.assertTrue('deep.path' in plugins.names) self.assertTrue('deep.path' in plugins) self.assertFalse('even.deeper.path' in plugins) result_get = plugins.get('deep.path') result_getattr = plugins.deep.path self.assertFalse(result_get is None) self.assertTrue(result_get is result_getattr) # Make sure we actually got our class greeter = result_get('yes') self.assertEqual('yes', greeter.hello()) self.assertEqual('no', greeter.hello('no')) def test_interface_warnings(self): # we should not get no warnings when not trying to access the plugin plugins = db.get_plugins('interface_warnings', interface=ITestInterface) self.assertTrue('good' in plugins.names) self.assertTrue('deep.path' in plugins.names) # we should get warning when trying to access the plugin with assertProducesWarning(DeprecationWarning, "test warning"): _ = plugins.get('good') with assertProducesWarning(DeprecationWarning, "test warning"): _ = plugins.good with assertProducesWarning(DeprecationWarning, "test warning"): _ = plugins.get('deep.path') with assertProducesWarning(DeprecationWarning, "test warning"): _ = plugins.deep.path def test_interface_provided_deps_failed(self): plugins = db.get_plugins('interface_failed', interface=ITestInterface, check_extras=True) with self.assertRaises(PluginDBError): plugins.get('good') def test_required_interface_not_provided(self): plugins = db.get_plugins('no_interface_again', interface=ITestInterface) self.assertTrue(plugins._interface is ITestInterface) with self.assertRaises(PluginDBError): plugins.get('good') def test_no_interface_provided(self): plugins = db.get_plugins('no_interface') self.assertFalse(plugins.get('good') is None) def test_no_interface_provided_deps_failed(self): plugins = db.get_plugins('no_interface_failed', check_extras=True) with self.assertRaises(PluginDBError): plugins.get('good') def test_failure_on_dups(self): with self.assertRaises(PluginDBError): db.get_plugins('duplicates', load_now=True) def test_get_info_on_a_known_plugin(self): plugins = db.get_plugins('interface') self.assertEqual(('non-existent', 'irrelevant'), plugins.info('good')) def test_failure_on_unknown_plugin_info(self): plugins = db.get_plugins('interface') with self.assertRaises(PluginDBError): plugins.info('bad') def test_failure_on_unknown_plugin_get(self): plugins = db.get_plugins('interface') with self.assertRaises(PluginDBError): plugins.get('bad') class SimpleFakeEntry(FakeEntry): def __init__(self, name, value): super().__init__(name, 'non-existent', 'irrelevant', False, value) _WORKER_FAKE_ENTRIES = { 'buildbot.worker': [ SimpleFakeEntry('Worker', ClassWithInterface), SimpleFakeEntry('EC2LatentWorker', ClassWithInterface), SimpleFakeEntry('LibVirtWorker', ClassWithInterface), SimpleFakeEntry('OpenStackLatentWorker', ClassWithInterface), SimpleFakeEntry('newthirdparty', ClassWithInterface), SimpleFakeEntry('deep.newthirdparty', ClassWithInterface), ], 'buildbot.util': [ SimpleFakeEntry('WorkerLock', ClassWithInterface), SimpleFakeEntry('enforceChosenWorker', ClassWithInterface), SimpleFakeEntry('WorkerChoiceParameter', ClassWithInterface), ], } def provide_worker_fake_entries(group): """ give a set of fake entries for known groups """ return _WORKER_FAKE_ENTRIES.get(group, []) buildbot-3.4.0/master/buildbot/test/unit/test_revlinks.py000066400000000000000000000143061413250514000236240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.revlinks import BitbucketRevlink from buildbot.revlinks import GithubRevlink from buildbot.revlinks import GitwebMatch from buildbot.revlinks import RevlinkMatch from buildbot.revlinks import SourceforgeGitRevlink from buildbot.revlinks import SourceforgeGitRevlink_AlluraPlatform from buildbot.revlinks import default_revlink_matcher class TestGithubRevlink(unittest.TestCase): revision = 'b6874701b54e0043a78882b020afc86033133f91' url = 'https://github.com/buildbot/buildbot/commit/b6874701b54e0043a78882b020afc86033133f91' def testHTTPS(self): self.assertEqual(GithubRevlink(self.revision, 'https://github.com/buildbot/buildbot.git'), self.url) def testGIT(self): self.assertEqual(GithubRevlink(self.revision, 'git://github.com/buildbot/buildbot.git'), self.url) def testSSH(self): self.assertEqual(GithubRevlink(self.revision, 'git@github.com:buildbot/buildbot.git'), self.url) def testSSHuri(self): self.assertEqual(GithubRevlink(self.revision, 'ssh://git@github.com/buildbot/buildbot.git'), self.url) class TestSourceforgeGitRevlink(unittest.TestCase): revision = 'b99c89a2842d386accea8072ae5bb6e24aa7cf29' url = 'http://gemrb.git.sourceforge.net/git/gitweb.cgi?p=gemrb/gemrb;a=commit;h=b99c89a2842d386accea8072ae5bb6e24aa7cf29' # noqa pylint: disable=line-too-long def testGIT(self): url = SourceforgeGitRevlink(self.revision, 'git://gemrb.git.sourceforge.net/gitroot/gemrb/gemrb') self.assertEqual(url, self.url) def testSSH(self): url = SourceforgeGitRevlink(self.revision, 'somebody@gemrb.git.sourceforge.net:gitroot/gemrb/gemrb') self.assertEqual(url, self.url) def testSSHuri(self): url = SourceforgeGitRevlink(self.revision, 'ssh://somebody@gemrb.git.sourceforge.net/gitroot/gemrb/gemrb') self.assertEqual(url, self.url) class TestSourceforgeGitRevlink_AlluraPlatform(unittest.TestCase): revision = '6f9b1470bae497c6ce47e4cf8c9195d864d2ba2f' url = 'https://sourceforge.net/p/klusters/klusters/ci/6f9b1470bae497c6ce47e4cf8c9195d864d2ba2f/' def testGIT(self): url = SourceforgeGitRevlink_AlluraPlatform(self.revision, 'git://git.code.sf.net/p/klusters/klusters') self.assertEqual(url, self.url) def testSSHuri(self): url = SourceforgeGitRevlink_AlluraPlatform( self.revision, 'ssh://somebody@git.code.sf.net/p/klusters/klusters') self.assertEqual(url, self.url) class TestRevlinkMatch(unittest.TestCase): def testNotmuch(self): revision = 'f717d2ece1836c863f9cc02abd1ff2539307cd1d' matcher = RevlinkMatch(['git://notmuchmail.org/git/(.*)'], r'http://git.notmuchmail.org/git/\1/commit/%s') self.assertEqual(matcher(revision, 'git://notmuchmail.org/git/notmuch'), 'http://git.notmuchmail.org/git/notmuch/commit/f717d2ece1836c863f9cc02abd1ff2539307cd1d') # noqa pylint: disable=line-too-long def testSingleString(self): revision = 'rev' matcher = RevlinkMatch('test', 'out%s') self.assertEqual(matcher(revision, 'test'), 'outrev') def testSingleUnicode(self): revision = 'rev' matcher = RevlinkMatch('test', 'out%s') self.assertEqual(matcher(revision, 'test'), 'outrev') def testTwoCaptureGroups(self): revision = 'rev' matcher = RevlinkMatch('([A-Z]*)Z([0-9]*)', r'\2-\1-%s') self.assertEqual(matcher(revision, 'ABCZ43'), '43-ABC-rev') class TestGitwebMatch(unittest.TestCase): def testOrgmode(self): revision = '490d6ace10e0cfe74bab21c59e4b7bd6aa3c59b8' matcher = GitwebMatch( 'git://orgmode.org/(?P.*)', 'http://orgmode.org/w/') self.assertEqual(matcher(revision, 'git://orgmode.org/org-mode.git'), 'http://orgmode.org/w/?p=org-mode.git;a=commit;h=490d6ace10e0cfe74bab21c59e4b7bd6aa3c59b8') # noqa pylint: disable=line-too-long class TestBitbucketRevlink(unittest.TestCase): revision = '4d4284cf4fb49ce82fefb6cbac8e462073c5f106' url = 'https://bitbucket.org/fakeproj/fakerepo/commits/4d4284cf4fb49ce82fefb6cbac8e462073c5f106' def testHTTPS(self): self.assertEqual(BitbucketRevlink(self.revision, 'https://fakeuser@bitbucket.org/fakeproj/fakerepo.git'), self.url) def testSSH(self): self.assertEqual(BitbucketRevlink(self.revision, 'git@bitbucket.org:fakeproj/fakerepo.git'), self.url) class TestDefaultRevlinkMultiPlexer(unittest.TestCase): revision = "0" def testAllRevlinkMatchers(self): # GithubRevlink self.assertTrue(default_revlink_matcher( self.revision, 'https://github.com/buildbot/buildbot.git')) # BitbucketRevlink self.assertTrue(default_revlink_matcher( self.revision, 'git@bitbucket.org:fakeproj/fakerepo.git')) # SourceforgeGitRevlink self.assertTrue(default_revlink_matcher( self.revision, 'git://gemrb.git.sourceforge.net/gitroot/gemrb/gemrb')) # SourceforgeGitRevlink_AlluraPlatform self.assertTrue(default_revlink_matcher( self.revision, 'git://git.code.sf.net/p/klusters/klusters')) buildbot-3.4.0/master/buildbot/test/unit/test_secret_in_file.py000066400000000000000000000104731413250514000247420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat from twisted.internet import defer from twisted.python.filepath import FilePath from twisted.trial import unittest from buildbot.secrets.providers.file import SecretInAFile from buildbot.test.util.config import ConfigErrorsMixin from buildbot.util.misc import writeLocalFile class TestSecretInFile(ConfigErrorsMixin, unittest.TestCase): def createTempDir(self, dirname): tempdir = FilePath(self.mktemp()) tempdir.createDirectory() return tempdir.path def createFileTemp(self, tempdir, filename, text="", chmodRights=0o700): file_path = os.path.join(tempdir, filename) writeLocalFile(file_path, text, chmodRights) return file_path @defer.inlineCallbacks def setUp(self): self.tmp_dir = self.createTempDir("temp") self.filepath = self.createFileTemp(self.tmp_dir, "tempfile.txt", text="key value\n") self.srvfile = SecretInAFile(self.tmp_dir) yield self.srvfile.startService() @defer.inlineCallbacks def tearDown(self): yield self.srvfile.stopService() def testCheckConfigSecretInAFileService(self): self.assertEqual(self.srvfile.name, "SecretInAFile") self.assertEqual(self.srvfile._dirname, self.tmp_dir) def testCheckConfigErrorSecretInAFileService(self): if os.name != "posix": self.skipTest("Permission checks only works on posix systems") filepath = self.createFileTemp(self.tmp_dir, "tempfile2.txt", chmodRights=stat.S_IRGRP) expctd_msg_error = " on file tempfile2.txt are too " \ "open. It is required that your secret files are" \ " NOT accessible by others!" with self.assertRaisesConfigError(expctd_msg_error): self.srvfile.checkConfig(self.tmp_dir) os.remove(filepath) @defer.inlineCallbacks def testCheckConfigfileExtension(self): filepath = self.createFileTemp(self.tmp_dir, "tempfile2.ini", text="test suffix", chmodRights=stat.S_IRWXU) filepath2 = self.createFileTemp(self.tmp_dir, "tempfile2.txt", text="some text", chmodRights=stat.S_IRWXU) yield self.srvfile.reconfigService(self.tmp_dir, suffixes=[".ini"]) self.assertEqual(self.srvfile.get("tempfile2"), "test suffix") self.assertEqual(self.srvfile.get("tempfile3"), None) os.remove(filepath) os.remove(filepath2) @defer.inlineCallbacks def testReconfigSecretInAFileService(self): otherdir = self.createTempDir("temp2") yield self.srvfile.reconfigService(otherdir) self.assertEqual(self.srvfile.name, "SecretInAFile") self.assertEqual(self.srvfile._dirname, otherdir) def testGetSecretInFile(self): value = self.srvfile.get("tempfile.txt") self.assertEqual(value, "key value") @defer.inlineCallbacks def testGetSecretInFileSuffixes(self): yield self.srvfile.reconfigService(self.tmp_dir, suffixes=[".txt"]) value = self.srvfile.get("tempfile") self.assertEqual(value, "key value") def testGetSecretInFileNotFound(self): value = self.srvfile.get("tempfile2.txt") self.assertEqual(value, None) @defer.inlineCallbacks def testGetSecretInFileNoStrip(self): yield self.srvfile.reconfigService(self.tmp_dir, strip=False) value = self.srvfile.get("tempfile.txt") self.assertEqual(value, "key value\n") buildbot-3.4.0/master/buildbot/test/unit/test_secret_in_hvac.py000066400000000000000000000200611413250514000247360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from unittest.mock import patch from parameterized import parameterized from twisted.internet import defer from twisted.trial import unittest from buildbot.secrets.providers.vault_hvac import HashiCorpVaultKvSecretProvider from buildbot.secrets.providers.vault_hvac import VaultAuthenticatorApprole from buildbot.secrets.providers.vault_hvac import VaultAuthenticatorToken from buildbot.test.util import interfaces from buildbot.test.util.config import ConfigErrorsMixin try: import hvac assert hvac except ImportError: hvac = None class FakeHvacApprole: def login(self, role_id, secret_id): self.role_id = role_id self.secret_id = secret_id class FakeHvacAuth: approle = FakeHvacApprole() class FakeHvacKvV1: token = None def read_secret(self, path, mount_point): if self.token is None: raise hvac.exceptions.Unauthorized if path == "wrong/path": raise hvac.exceptions.InvalidPath(message="Fake InvalidPath exception") return {'data': {'key': "value"}} class FakeHvacKvV2: token = None def read_secret_version(self, path, mount_point): if self.token is None: raise hvac.exceptions.Unauthorized(message="Fake Unauthorized exception") if path == "wrong/path": raise hvac.exceptions.InvalidPath(message="Fake InvalidPath exception") return {'data': {'data': {'key': "value"}}} class FakeHvacKv: default_kv_version = 2 v1 = FakeHvacKvV1() v2 = FakeHvacKvV2() class FakeHvacSecrets: kv = FakeHvacKv() class FakeHvacClient: auth = FakeHvacAuth() secrets = FakeHvacSecrets() _token = None @property def token(self): return self._token @token.setter def token(self, new_token): self._token = new_token self.secrets.kv.v1.token = new_token self.secrets.kv.v2.token = new_token def mock_vault(*args, **kwargs): client = FakeHvacClient() client.token = "mockToken" return client class TestSecretInVaultAuthenticator(interfaces.InterfaceTests): def test_authenticate(self): raise NotImplementedError class TestSecretInVaultAuthenticatorToken(unittest.TestCase, TestSecretInVaultAuthenticator): def setUp(self): if hvac is None: raise unittest.SkipTest( "Need to install hvac to test VaultAuthenticatorToken") def test_authenticate(self): token = "mockToken" authenticator = VaultAuthenticatorToken(token) client = hvac.Client() authenticator.authenticate(client) self.assertEqual(client.token, token) class TestSecretInVaultAuthenticatorApprole(unittest.TestCase, TestSecretInVaultAuthenticator): def test_authenticate(self): authenticator = VaultAuthenticatorApprole("testRole", "testSecret") client = FakeHvacClient() authenticator.authenticate(client) self.assertEqual(client.auth.approle.secret_id, "testSecret") class TestSecretInHashiCorpVaultKvSecretProvider(ConfigErrorsMixin, unittest.TestCase): def setUp(self): if hvac is None: raise unittest.SkipTest( "Need to install hvac to test HashiCorpVaultKvSecretProvider") param = dict(vault_server="", authenticator=VaultAuthenticatorToken("mockToken"), path_delimiter='|', path_escape='\\', api_version=2) self.provider = HashiCorpVaultKvSecretProvider(**param) self.provider.reconfigService(**param) self.provider.client = FakeHvacClient() self.provider.client.secrets.kv.default_kv_version = param['api_version'] self.provider.client.token = "mockToken" @parameterized.expand([ ('vault_server_not_string', {'vault_server': {}}, 'vault_server must be a string'), ('path_delimiter_not_char', {'vault_server': 'abc', 'path_delimiter': {}}, 'path_delimiter must be a single character'), ('path_delimiter_too_long', {'vault_server': 'abc', 'path_delimiter': 'ab'}, 'path_delimiter must be a single character'), ('path_escape_not_char', {'vault_server': 'abc', 'path_escape': {}}, 'path_escape must be a single character'), ('path_escape_too_long', {'vault_server': 'abc', 'path_escape': 'ab'}, 'path_escape must be a single character'), ('api_version_unsupported', {'vault_server': 'abc', 'api_version': 3}, 'api_version 3 is not supported'), ]) def test_check_config(self, name, params, error): with self.assertRaisesConfigError(error): HashiCorpVaultKvSecretProvider(authenticator=VaultAuthenticatorToken("mockToken"), **params) def test_check_config_authenticator(self): with self.assertRaisesConfigError('authenticator must be instance of VaultAuthenticator'): HashiCorpVaultKvSecretProvider(vault_server='abc') def test_escaped_split(self): parts = self.provider.escaped_split("a/b\\|c/d|e/f\\|g/h") self.assertEqual(parts, ["a/b|c/d", "e/f|g/h"]) def test_escaped_split_ends_with_escape(self): parts = self.provider.escaped_split("a|b\\") self.assertEqual(parts, ["a", "b"]) def test_thd_hvac_wrap_read_v1(self): self.provider.api_version = 1 self.provider.client.token = "mockToken" value = self.provider.thd_hvac_wrap_read("some/path") self.assertEqual(value['data']['key'], "value") def test_thd_hvac_wrap_read_v2(self): self.provider.client.token = "mockToken" value = self.provider.thd_hvac_wrap_read("some/path") self.assertEqual(value['data']['data']['key'], "value") # for some reason, errors regarding generator function were thrown @patch("hvac.Client", side_effect=mock_vault) def test_thd_hvac_wrap_read_unauthorized(self, mock_vault): self.provider.client.token = None yield self.assertFailure(self.provider.thd_hvac_wrap_read("some/path"), hvac.exceptions.Unauthorized) def test_thd_hvac_get_reauthorize(self): """ When token is None, provider gets unauthorized exception and is forced to re-authenticate """ self.provider.client.token = None value = self.provider.thd_hvac_get("some/path") self.assertEqual(value['data']['data']['key'], "value") @defer.inlineCallbacks def test_get_v1(self): self.provider.api_version = 1 self.provider.client.token = "mockToken" value = yield self.provider.get("some/path|key") self.assertEqual(value, "value") @defer.inlineCallbacks def test_get_v2(self): self.provider.client.token = "mockToken" value = yield self.provider.get("some/path|key") self.assertEqual(value, "value") @defer.inlineCallbacks def test_get_fail_no_key(self): self.provider.client.token = "mockToken" with self.assertRaises(KeyError): yield self.provider.get("some/path") @defer.inlineCallbacks def test_get_fail_wrong_key(self): self.provider.client.token = "mockToken" with self.assertRaises(KeyError): yield self.provider.get("some/path|wrong_key") @defer.inlineCallbacks def test_get_fail_multiple_separators(self): self.provider.client.token = "mockToken" with self.assertRaises(KeyError): yield self.provider.get("some/path|unescaped|key") buildbot-3.4.0/master/buildbot/test/unit/test_secret_in_passwordstore.py000066400000000000000000000120641413250514000267400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from pathlib import Path from unittest import mock from twisted.internet import defer from twisted.python.filepath import FilePath from twisted.trial import unittest from buildbot.secrets.providers.passwordstore import SecretInPass from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.runprocess import ExpectMaster from buildbot.test.util.runprocess import MasterRunProcessMixin class TestSecretInPass(MasterRunProcessMixin, TestReactorMixin, ConfigErrorsMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setup_master_run_process() self.master = fakemaster.make_master(self) with mock.patch.object(Path, "is_file", return_value=True): self.tmp_dir = self.create_temp_dir("temp") self.srvpass = SecretInPass("password", self.tmp_dir) yield self.srvpass.setServiceParent(self.master) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.srvpass.stopService() def create_temp_dir(self, dirname): tempdir = FilePath(self.mktemp()) tempdir.createDirectory() return tempdir.path def test_check_config_secret_in_pass_service(self): self.assertEqual(self.srvpass.name, "SecretInPass") env = self.srvpass._env self.assertEquals(env["PASSWORD_STORE_GPG_OPTS"], "--passphrase password") self.assertEquals(env["PASSWORD_STORE_DIR"], self.tmp_dir) def test_check_config_binary_error_secret_in_pass_service(self): expected_error_msg = "pass does not exist in PATH" with mock.patch.object(Path, "is_file", return_value=False): with self.assertRaisesConfigError(expected_error_msg): self.srvpass.checkConfig("password", "temp") def test_check_config_directory_error_secret_in_pass_service(self): expected_error_msg = "directory temp2 does not exist" with mock.patch.object(Path, "is_file", return_value=True): with self.assertRaisesConfigError(expected_error_msg): self.srvpass.checkConfig("password", "temp2") @defer.inlineCallbacks def test_reconfig_secret_in_a_file_service(self): with mock.patch.object(Path, "is_file", return_value=True): otherdir = self.create_temp_dir("temp2") yield self.srvpass.reconfigService("password2", otherdir) self.assertEqual(self.srvpass.name, "SecretInPass") env = self.srvpass._env self.assertEquals(env["PASSWORD_STORE_GPG_OPTS"], "--passphrase password2") self.assertEquals(env["PASSWORD_STORE_DIR"], otherdir) @defer.inlineCallbacks def test_get_secret_in_pass(self): self.expect_commands( ExpectMaster(['pass', 'secret']) .stdout(b'value') ) value = yield self.srvpass.get("secret") self.assertEqual(value, "value") self.assert_all_commands_ran() @defer.inlineCallbacks def test_get_secret_in_pass_multiple_lines_unix(self): self.expect_commands( ExpectMaster(['pass', 'secret']) .stdout(b"value1\nvalue2\nvalue3") ) value = yield self.srvpass.get("secret") self.assertEqual(value, "value1") self.assert_all_commands_ran() @defer.inlineCallbacks def test_get_secret_in_pass_multiple_lines_darwin(self): self.expect_commands( ExpectMaster(['pass', 'secret']) .stdout(b"value1\rvalue2\rvalue3") ) value = yield self.srvpass.get("secret") self.assertEqual(value, "value1") self.assert_all_commands_ran() @defer.inlineCallbacks def test_get_secret_in_pass_multiple_lines_windows(self): self.expect_commands( ExpectMaster(['pass', 'secret']) .stdout(b"value1\r\nvalue2\r\nvalue3") ) value = yield self.srvpass.get("secret") self.assertEqual(value, "value1") self.assert_all_commands_ran() @defer.inlineCallbacks def test_get_secret_in_pass_not_found(self): self.expect_commands( ExpectMaster(['pass', 'secret']) .stderr(b"Not found") ) value = yield self.srvpass.get("secret") self.assertEqual(value, None) buildbot-3.4.0/master/buildbot/test/unit/test_secret_in_vault.py000066400000000000000000000165241413250514000251610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings from twisted.internet import defer from twisted.trial import unittest from buildbot.secrets.providers.vault import HashiCorpVaultSecretProvider from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin class TestSecretInVaultHttpFakeBase(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self, version): warnings.simplefilter('ignore') self.setUpTestReactor() self.srvcVault = HashiCorpVaultSecretProvider(vaultServer="http://vaultServer", vaultToken="someToken", apiVersion=version) self.master = fakemaster.make_master(self, wantData=True) self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'http://vaultServer', headers={'X-Vault-Token': "someToken"}) yield self.srvcVault.setServiceParent(self.master) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.srvcVault.stopService() class TestSecretInVaultV1(TestSecretInVaultHttpFakeBase): def setUp(self): super().setUp(version=1) @defer.inlineCallbacks def testGetValue(self): self._http.expect(method='get', ep='/v1/secret/value', params=None, data=None, json=None, code=200, content_json={"data": {"value": "value1"}}) value = yield self.srvcVault.get("value") self.assertEqual(value, "value1") @defer.inlineCallbacks def test_get_any_key_without_value_name(self): self._http.expect(method='get', ep='/v1/secret/any_key', params=None, data=None, json=None, code=200, content_json={"data": {"any_value": "value1"}}) yield self.assertFailure(self.srvcVault.get("any_key"), KeyError) @defer.inlineCallbacks def test_get_any_key_with_value_name(self): self._http.expect(method='get', ep='/v1/secret/any_key', params=None, data=None, json=None, code=200, content_json={"data": {"any_value": "value1"}}) value = yield self.srvcVault.get("any_key/any_value") self.assertEqual(value, "value1") @defer.inlineCallbacks def testGetValueNotFound(self): self._http.expect(method='get', ep='/v1/secret/value', params=None, data=None, json=None, code=200, content_json={"data": {"valueNotFound": "value1"}}) yield self.assertFailure(self.srvcVault.get("value"), KeyError) @defer.inlineCallbacks def testGetError(self): self._http.expect(method='get', ep='/v1/secret/valueNotFound', params=None, data=None, json=None, code=404, content_json={"data": {"valueNotFound": "value1"}}) yield self.assertFailure(self.srvcVault.get("valueNotFound"), KeyError) def testCheckConfigSecretInVaultService(self): self.assertEqual(self.srvcVault.name, "SecretInVault") self.assertEqual(self.srvcVault.vaultServer, "http://vaultServer") self.assertEqual(self.srvcVault.vaultToken, "someToken") def testCheckConfigErrorSecretInVaultService(self): with self.assertRaisesConfigError( "vaultServer must be a string while it is"): self.srvcVault.checkConfig() def testCheckConfigErrorSecretInVaultServiceWrongServerAddress(self): with self.assertRaisesConfigError( "vaultToken must be a string while it is"): self.srvcVault.checkConfig(vaultServer="serveraddr") def test_check_config_error_apiVersion_unsupported(self): with self.assertRaisesConfigError( "apiVersion 0 is not supported"): self.srvcVault.checkConfig(vaultServer="serveraddr", vaultToken="vaultToken", apiVersion=0) @defer.inlineCallbacks def testReconfigSecretInVaultService(self): self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'serveraddr', headers={'X-Vault-Token': "someToken"}) yield self.srvcVault.reconfigService(vaultServer="serveraddr", vaultToken="someToken") self.assertEqual(self.srvcVault.vaultServer, "serveraddr") self.assertEqual(self.srvcVault.vaultToken, "someToken") class TestSecretInVaultV2(TestSecretInVaultHttpFakeBase): def setUp(self): super().setUp(version=2) @defer.inlineCallbacks def testGetValue(self): self._http.expect(method='get', ep='/v1/secret/data/value', params=None, data=None, json=None, code=200, content_json={"data": {"data": {"value": "value1"}}}) value = yield self.srvcVault.get("value") self.assertEqual(value, "value1") @defer.inlineCallbacks def test_get_any_key_without_value_name(self): self._http.expect(method='get', ep='/v1/secret/data/any_key', params=None, data=None, json=None, code=200, content_json={"data": {"data": {"any_value": "value1"}}}) yield self.assertFailure(self.srvcVault.get("any_key"), KeyError) @defer.inlineCallbacks def test_get_any_key_with_value_name(self): self._http.expect(method='get', ep='/v1/secret/data/any_key', params=None, data=None, json=None, code=200, content_json={"data": {"data": {"any_value": "value1"}}}) value = yield self.srvcVault.get("any_key/any_value") self.assertEqual(value, "value1") @defer.inlineCallbacks def testGetValueNotFound(self): self._http.expect(method='get', ep='/v1/secret/data/value', params=None, data=None, json=None, code=200, content_json={"data": {"data": {"valueNotFound": "value1"}}}) yield self.assertFailure(self.srvcVault.get("value"), KeyError) @defer.inlineCallbacks def testGetError(self): self._http.expect(method='get', ep='/v1/secret/data/valueNotFound', params=None, data=None, json=None, code=404, content_json={"data": {"data": {"valueNotFound": "value1"}}}) yield self.assertFailure(self.srvcVault.get("valueNotFound"), KeyError) buildbot-3.4.0/master/buildbot/test/unit/test_secret_rendered_service.py000066400000000000000000000040501413250514000266370ustar00rootroot00000000000000from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Secret from buildbot.secrets.manager import SecretManager from buildbot.test.fake import fakemaster from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.misc import TestReactorMixin from buildbot.util.service import BuildbotService class FakeServiceUsingSecrets(BuildbotService): name = "FakeServiceUsingSecrets" secrets = ["foo", "bar", "secret"] def reconfigService(self, foo=None, bar=None, secret=None, other=None): self.foo = foo self.bar = bar self.secret = secret def returnRenderedSecrets(self, secretKey): return getattr(self, secretKey) class TestRenderSecrets(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) fakeStorageService = FakeSecretStorage(secretdict={"foo": "bar", "other": "value"}) self.secretsrv = SecretManager() self.secretsrv.services = [fakeStorageService] yield self.secretsrv.setServiceParent(self.master) self.srvtest = FakeServiceUsingSecrets() yield self.srvtest.setServiceParent(self.master) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_secret_rendered(self): yield self.srvtest.configureService() new = FakeServiceUsingSecrets(foo=Secret("foo"), other=Secret("other")) yield self.srvtest.reconfigServiceWithSibling(new) self.assertEqual("bar", self.srvtest.returnRenderedSecrets("foo")) @defer.inlineCallbacks def test_secret_rendered_not_found(self): new = FakeServiceUsingSecrets(foo=Secret("foo")) yield self.srvtest.reconfigServiceWithSibling(new) with self.assertRaises(Exception): self.srvtest.returnRenderedSecrets("more") buildbot-3.4.0/master/buildbot/test/unit/test_stats_service.py000066400000000000000000000517641413250514000246560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.errors import CaptureCallbackError from buildbot.statistics import capture from buildbot.statistics import stats_service from buildbot.statistics import storage_backends from buildbot.statistics.storage_backends.base import StatsStorageBase from buildbot.statistics.storage_backends.influxdb_client import InfluxStorageService from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.fake import fakestats from buildbot.test.util import logging from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestStatsServicesBase(TestReactorMixin, unittest.TestCase): BUILDER_NAMES = ['builder1', 'builder2'] BUILDER_IDS = [1, 2] @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) for builderid, name in zip(self.BUILDER_IDS, self.BUILDER_NAMES): self.master.db.builders.addTestBuilder( builderid=builderid, name=name) self.stats_service = stats_service.StatsService(storage_backends=[ fakestats.FakeStatsStorageService() ], name="FakeStatsService") yield self.stats_service.setServiceParent(self.master) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() class TestStatsServicesConfiguration(TestStatsServicesBase): @defer.inlineCallbacks def test_reconfig_with_no_storage_backends(self): new_storage_backends = [] yield self.stats_service.reconfigService(new_storage_backends) self.checkEqual(new_storage_backends) @defer.inlineCallbacks def test_reconfig_with_fake_storage_backend(self): new_storage_backends = [ fakestats.FakeStatsStorageService(name='One'), fakestats.FakeStatsStorageService(name='Two') ] yield self.stats_service.reconfigService(new_storage_backends) self.checkEqual(new_storage_backends) @defer.inlineCallbacks def test_reconfig_with_consumers(self): backend = fakestats.FakeStatsStorageService(name='One') backend.captures = [capture.CaptureProperty('test_builder', 'test')] new_storage_backends = [backend] yield self.stats_service.reconfigService(new_storage_backends) yield self.stats_service.reconfigService(new_storage_backends) self.assertEqual(len(self.master.mq.qrefs), 1) @defer.inlineCallbacks def test_bad_configuration(self): # Reconfigure with a bad configuration. new_storage_backends = [mock.Mock()] with self.assertRaises(TypeError): yield self.stats_service.reconfigService(new_storage_backends) def checkEqual(self, new_storage_backends): # Check whether the new_storage_backends was set in reconfigService registeredStorageServices = \ [s for s in self.stats_service.registeredStorageServices if isinstance(s, StatsStorageBase)] for s in new_storage_backends: if s not in registeredStorageServices: raise AssertionError("reconfigService failed." "Not all storage services registered.") class TestInfluxDB(TestStatsServicesBase, logging.LoggingMixin): # Smooth test of influx db service. We don't want to force people to install influxdb, so we # just disable this unit test if the influxdb module is not installed, # using SkipTest @defer.inlineCallbacks def test_influxdb_not_installed(self): captures = [capture.CaptureProperty('test_builder', 'test')] try: # Try to import import influxdb # pylint: disable=import-outside-toplevel # consume it somehow to please pylint [influxdb] except ImportError: with self.assertRaises(config.ConfigErrors): InfluxStorageService("fake_url", "fake_port", "fake_user", "fake_password", "fake_db", captures) # if instead influxdb is installed, then initialize it - no errors # should be realized else: new_storage_backends = [ InfluxStorageService("fake_url", "fake_port", "fake_user", "fake_password", "fake_db", captures) ] yield self.stats_service.reconfigService(new_storage_backends) @defer.inlineCallbacks def test_influx_storage_service_fake_install(self): # use a fake InfluxDBClient to test InfluxStorageService in systems which # don't have influxdb installed. Primarily useful for test coverage. self.patch(storage_backends.influxdb_client, 'InfluxDBClient', fakestats.FakeInfluxDBClient) captures = [capture.CaptureProperty('test_builder', 'test')] new_storage_backends = [InfluxStorageService( "fake_url", "fake_port", "fake_user", "fake_password", "fake_db", captures )] yield self.stats_service.reconfigService(new_storage_backends) def test_influx_storage_service_post_value(self): # test the thd_postStatsValue method of InfluxStorageService self.patch(storage_backends.influxdb_client, 'InfluxDBClient', fakestats.FakeInfluxDBClient) svc = InfluxStorageService( "fake_url", "fake_port", "fake_user", "fake_password", "fake_db", "fake_stats") post_data = { 'name': 'test', 'value': 'test' } context = {'x': 'y'} svc.thd_postStatsValue(post_data, "test_series_name", context) data = { 'measurement': "test_series_name", 'fields': { "name": "test", "value": "test" }, 'tags': {'x': 'y'} } points = [data] self.assertEqual(svc.client.points, points) def test_influx_service_not_inited(self): self.setUpLogging() self.patch(storage_backends.influxdb_client, 'InfluxDBClient', fakestats.FakeInfluxDBClient) svc = InfluxStorageService( "fake_url", "fake_port", "fake_user", "fake_password", "fake_db", "fake_stats") svc._inited = False svc.thd_postStatsValue("test", "test", "test") self.assertLogged("Service.*not initialized") class TestStatsServicesConsumers(steps.BuildStepMixin, TestStatsServicesBase): """ Test the stats service from a fake step """ @defer.inlineCallbacks def setUp(self): yield super().setUp() self.routingKey = ( "builders", self.BUILDER_IDS[0], "builds", 1, "finished") self.master.mq.verifyMessages = False def setupBuild(self): self.master.db.insertTestData([ fakedb.Build(id=1, masterid=1, workerid=1, builderid=self.BUILDER_IDS[0], buildrequestid=1, number=1), ]) @defer.inlineCallbacks def setupFakeStorage(self, captures): self.fake_storage_service = fakestats.FakeStatsStorageService() self.fake_storage_service.captures = captures yield self.stats_service.reconfigService([self.fake_storage_service]) def get_dict(self, build): return dict( buildid=1, number=build['number'], builderid=build['builderid'], buildrequestid=build['buildrequestid'], workerid=build['workerid'], masterid=build['masterid'], started_at=build['started_at'], complete=True, complete_at=build['complete_at'], state_string='', results=0, ) @defer.inlineCallbacks def end_build_call_consumers(self): self.master.db.builds.finishBuild(buildid=1, results=0) build = yield self.master.db.builds.getBuild(buildid=1) self.master.mq.callConsumer(self.routingKey, self.get_dict(build)) @defer.inlineCallbacks def test_property_capturing(self): self.setupFakeStorage( [capture.CaptureProperty('builder1', 'test_name')]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') yield self.end_build_call_consumers() self.assertEqual([( {'name': 'test_name', 'value': 'test_value'}, 'builder1-test_name', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_property_capturing_all_builders(self): self.setupFakeStorage( [capture.CapturePropertyAllBuilders('test_name')]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') yield self.end_build_call_consumers() self.assertEqual([( {'name': 'test_name', 'value': 'test_value'}, 'builder1-test_name', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_property_capturing_regex(self): self.setupFakeStorage( [capture.CaptureProperty('builder1', 'test_n.*', regex=True)]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') yield self.end_build_call_consumers() self.assertEqual([( {'name': 'test_name', 'value': 'test_value'}, 'builder1-test_name', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_property_capturing_error(self): self.setupFakeStorage([capture.CaptureProperty('builder1', 'test')]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') self.master.db.builds.finishBuild(buildid=1, results=0) build = yield self.master.db.builds.getBuild(buildid=1) cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(self.routingKey, self.get_dict(build)), CaptureCallbackError) @defer.inlineCallbacks def test_property_capturing_alt_callback(self): def cb(*args, **kwargs): return 'test_value' self.setupFakeStorage( [capture.CaptureProperty('builder1', 'test_name', cb)]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') yield self.end_build_call_consumers() self.assertEqual([( {'name': 'test_name', 'value': 'test_value'}, 'builder1-test_name', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_build_start_time_capturing(self): self.setupFakeStorage([capture.CaptureBuildStartTime('builder1')]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'start-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_start_time_capturing_all_builders(self): self.setupFakeStorage([capture.CaptureBuildStartTimeAllBuilders()]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'start-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_start_time_capturing_alt_callback(self): def cb(*args, **kwargs): return '2015-07-08T01:45:17.391018' self.setupFakeStorage([capture.CaptureBuildStartTime('builder1', cb)]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'start-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_end_time_capturing(self): self.setupFakeStorage([capture.CaptureBuildEndTime('builder1')]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'end-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_end_time_capturing_all_builders(self): self.setupFakeStorage([capture.CaptureBuildEndTimeAllBuilders()]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'end-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_end_time_capturing_alt_callback(self): def cb(*args, **kwargs): return '2015-07-08T01:45:17.391018' self.setupFakeStorage([capture.CaptureBuildEndTime('builder1', cb)]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'end-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def build_time_capture_helper(self, time_type, cb=None): self.setupFakeStorage([capture.CaptureBuildDuration('builder1', report_in=time_type, callback=cb)]) self.setupBuild() yield self.end_build_call_consumers() @defer.inlineCallbacks def test_build_duration_capturing_seconds(self): yield self.build_time_capture_helper('seconds') self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_duration_capturing_minutes(self): yield self.build_time_capture_helper('minutes') self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_duration_capturing_hours(self): yield self.build_time_capture_helper('hours') self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) def test_build_duration_report_in_error(self): with self.assertRaises(config.ConfigErrors): capture.CaptureBuildDuration('builder1', report_in='foobar') @defer.inlineCallbacks def test_build_duration_capturing_alt_callback(self): def cb(*args, **kwargs): return 10 yield self.build_time_capture_helper('seconds', cb) self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_duration_capturing_all_builders(self): self.setupFakeStorage([capture.CaptureBuildDurationAllBuilders()]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_times_capturing_error(self): def cb(*args, **kwargs): raise TypeError self.setupFakeStorage([capture.CaptureBuildStartTime('builder1', cb)]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') self.master.db.builds.finishBuild(buildid=1, results=0) build = yield self.master.db.builds.getBuild(buildid=1) cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(self.routingKey, self.get_dict(build)), CaptureCallbackError) self.setupFakeStorage([capture.CaptureBuildEndTime('builder1', cb)]) cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(self.routingKey, self.get_dict(build)), CaptureCallbackError) self.setupFakeStorage( [capture.CaptureBuildDuration('builder1', callback=cb)]) cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(self.routingKey, self.get_dict(build)), CaptureCallbackError) @defer.inlineCallbacks def test_yield_metrics_value(self): self.setupFakeStorage([capture.CaptureBuildStartTime('builder1')]) self.setupBuild() yield self.end_build_call_consumers() yield self.stats_service.yieldMetricsValue('test', {'test': 'test'}, 1) build_data = yield self.stats_service.master.data.get(('builds', 1)) routingKey = ("stats-yieldMetricsValue", "stats-yield-data") msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } exp = [(routingKey, msg)] self.stats_service.master.mq.assertProductions(exp) @defer.inlineCallbacks def test_capture_data(self): self.setupFakeStorage([capture.CaptureData('test', 'builder1')]) self.setupBuild() self.master.db.builds.finishBuild(buildid=1, results=0) build_data = yield self.stats_service.master.data.get(('builds', 1)) msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } routingKey = ("stats-yieldMetricsValue", "stats-yield-data") self.master.mq.callConsumer(routingKey, msg) self.assertEqual([( {'test': 'test'}, 'builder1-test', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_capture_data_all_builders(self): self.setupFakeStorage([capture.CaptureDataAllBuilders('test')]) self.setupBuild() self.master.db.builds.finishBuild(buildid=1, results=0) build_data = yield self.stats_service.master.data.get(('builds', 1)) msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } routingKey = ("stats-yieldMetricsValue", "stats-yield-data") self.master.mq.callConsumer(routingKey, msg) self.assertEqual([( {'test': 'test'}, 'builder1-test', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_capture_data_alt_callback(self): def cb(*args, **kwargs): return {'test': 'test'} self.setupFakeStorage([capture.CaptureData('test', 'builder1', cb)]) self.setupBuild() self.master.db.builds.finishBuild(buildid=1, results=0) build_data = yield self.stats_service.master.data.get(('builds', 1)) msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } routingKey = ("stats-yieldMetricsValue", "stats-yield-data") self.master.mq.callConsumer(routingKey, msg) self.assertEqual([( {'test': 'test'}, 'builder1-test', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_capture_data_error(self): def cb(*args, **kwargs): raise TypeError self.setupFakeStorage([capture.CaptureData('test', 'builder1', cb)]) self.setupBuild() self.master.db.builds.finishBuild(buildid=1, results=0) build_data = yield self.stats_service.master.data.get(('builds', 1)) msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } routingKey = ("stats-yieldMetricsValue", "stats-yield-data") cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(routingKey, msg), CaptureCallbackError) buildbot-3.4.0/master/buildbot/test/unit/test_steps_git_diffinfo.py000066400000000000000000000124571413250514000256410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process import results from buildbot.steps import gitdiffinfo from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin try: import unidiff except ImportError: unidiff = None class TestDiffInfo(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): if not unidiff: skip = 'unidiff is required for GitDiffInfo tests' def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_merge_base_failure(self): self.setupStep(gitdiffinfo.GitDiffInfo()) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', 'merge-base', 'HEAD', 'master']) + Expect.log('stdio-merge-base', stderr='fatal: Not a valid object name') + 128) self.expect_log_file_stderr('stdio-merge-base', 'fatal: Not a valid object name') self.expectOutcome(result=results.FAILURE, state_string="GitDiffInfo (failure)") return self.runStep() def test_diff_failure(self): self.setupStep(gitdiffinfo.GitDiffInfo()) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', 'merge-base', 'HEAD', 'master']) + Expect.log('stdio-merge-base', stdout='1234123412341234') + 0, ExpectShell(workdir='wkdir', command=['git', 'diff', '--no-prefix', '-U0', '1234123412341234', 'HEAD']) + Expect.log('stdio-diff', stderr='fatal: ambiguous argument') + 1, ) self.expectLogfile('stdio-merge-base', '1234123412341234') self.expect_log_file_stderr('stdio-diff', 'fatal: ambiguous argument') self.expectOutcome(result=results.FAILURE, state_string="GitDiffInfo (failure)") return self.runStep() def test_empty_diff(self): self.setupStep(gitdiffinfo.GitDiffInfo()) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', 'merge-base', 'HEAD', 'master']) + Expect.log('stdio-merge-base', stdout='1234123412341234') + 0, ExpectShell(workdir='wkdir', command=['git', 'diff', '--no-prefix', '-U0', '1234123412341234', 'HEAD']) + Expect.log('stdio-diff', stdout='') + 0, ) self.expectLogfile('stdio-merge-base', '1234123412341234') self.expect_log_file_stderr('stdio-diff', '') self.expectOutcome(result=results.SUCCESS, state_string="GitDiffInfo") self.expect_build_data('diffinfo-master', b'[]', 'GitDiffInfo') return self.runStep() def test_complex_diff(self): self.setupStep(gitdiffinfo.GitDiffInfo()) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', 'merge-base', 'HEAD', 'master']) + Expect.log('stdio-merge-base', stdout='1234123412341234') + 0, ExpectShell(workdir='wkdir', command=['git', 'diff', '--no-prefix', '-U0', '1234123412341234', 'HEAD']) + Expect.log('stdio-diff', stdout='''\ diff --git file1 file1 deleted file mode 100644 index 42f90fd..0000000 --- file1 +++ /dev/null @@ -1,3 +0,0 @@ -line11 -line12 -line13 diff --git file2 file2 index c337bf1..1cb02b9 100644 --- file2 +++ file2 @@ -4,0 +5,3 @@ line24 +line24n +line24n2 +line24n3 @@ -15,0 +19,3 @@ line215 +line215n +line215n2 +line215n3 diff --git file3 file3 new file mode 100644 index 0000000..632e269 --- /dev/null +++ file3 @@ -0,0 +1,3 @@ +line31 +line32 +line33 ''') + 0, ) self.expectLogfile('stdio-merge-base', '1234123412341234') self.expectOutcome(result=results.SUCCESS, state_string="GitDiffInfo") diff_info = ( b'[{"source_file": "file1", "target_file": "/dev/null", ' + b'"is_binary": false, "is_rename": false, ' + b'"hunks": [{"ss": 1, "sl": 3, "ts": 0, "tl": 0}]}, ' + b'{"source_file": "file2", "target_file": "file2", ' + b'"is_binary": false, "is_rename": false, ' + b'"hunks": [{"ss": 4, "sl": 0, "ts": 5, "tl": 3}, ' + b'{"ss": 15, "sl": 0, "ts": 19, "tl": 3}]}, ' + b'{"source_file": "/dev/null", "target_file": "file3", ' + b'"is_binary": false, "is_rename": false, ' + b'"hunks": [{"ss": 0, "sl": 0, "ts": 1, "tl": 3}]}]') self.expect_build_data('diffinfo-master', diff_info, 'GitDiffInfo') return self.runStep() buildbot-3.4.0/master/buildbot/test/unit/test_templates_dir/000077500000000000000000000000001413250514000242455ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/test_templates_dir/builds.html000066400000000000000000000000151413250514000264110ustar00rootroot00000000000000
buildbot-3.4.0/master/buildbot/test/unit/test_templates_dir/plugin/000077500000000000000000000000001413250514000255435ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/test_templates_dir/plugin/plugin.jade000066400000000000000000000000531413250514000276640ustar00rootroot00000000000000.myclass pre | this is customized buildbot-3.4.0/master/buildbot/test/unit/test_test_util_validation.py000066400000000000000000000173661413250514000262260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import locale from twisted.python import log from twisted.trial import unittest from buildbot.test.util import validation from buildbot.util import UTC class VerifyDict(unittest.TestCase): def doValidationTest(self, validator, good, bad): for g in good: log.msg('expect %r to be good' % (g,)) msgs = list(validator.validate('g', g)) self.assertEqual(msgs, [], 'messages for %r' % (g,)) for b in bad: log.msg('expect %r to be bad' % (b,)) msgs = list(validator.validate('b', b)) self.assertNotEqual(msgs, [], 'no messages for %r' % (b,)) log.msg('..got messages:') for msg in msgs: log.msg(" " + msg) def test_IntValidator(self): self.doValidationTest(validation.IntValidator(), good=[ 1, 10 ** 100 ], bad=[ 1.0, "one", "1", None ]) def test_BooleanValidator(self): self.doValidationTest(validation.BooleanValidator(), good=[ True, False ], bad=[ "yes", "no", 1, 0, None ]) def test_StringValidator(self): self.doValidationTest(validation.StringValidator(), good=[ "unicode only" ], bad=[ None, b"bytestring" ]) def test_BinaryValidator(self): self.doValidationTest(validation.BinaryValidator(), good=[ b"bytestring" ], bad=[ None, "no unicode" ]) def test_DateTimeValidator(self): self.doValidationTest(validation.DateTimeValidator(), good=[ datetime.datetime( 1980, 6, 15, 12, 31, 15, tzinfo=UTC), ], bad=[ None, 198847493, # no timezone datetime.datetime(1980, 6, 15, 12, 31, 15), ]) def test_IdentifierValidator(self): os_encoding = locale.getpreferredencoding() try: '\N{SNOWMAN}'.encode(os_encoding) except UnicodeEncodeError as e: # Default encoding of Windows console is 'cp1252' # which cannot encode the snowman. raise(unittest.SkipTest("Cannot encode weird unicode " "on this platform with {}".format(os_encoding))) from e self.doValidationTest(validation.IdentifierValidator(50), good=[ "linux", "Linux", "abc123", "a" * 50, '\N{SNOWMAN}' ], bad=[ None, '', b'linux', 'a/b', "a.b.c.d", "a-b_c.d9", 'spaces not allowed', "a" * 51, "123 no initial digits", ]) def test_NoneOk(self): self.doValidationTest( validation.NoneOk(validation.BooleanValidator()), good=[ True, False, None ], bad=[ 1, "yes" ]) def test_DictValidator(self): self.doValidationTest(validation.DictValidator( a=validation.BooleanValidator(), b=validation.StringValidator(), optionalNames=['b']), good=[ {'a': True}, {'a': True, 'b': 'xyz'}, ], bad=[ None, 1, "hi", {}, {'a': 1}, {'a': 1, 'b': 'xyz'}, {'a': True, 'b': 999}, {'a': True, 'b': 'xyz', 'c': 'extra'}, ]) def test_DictValidator_names(self): v = validation.DictValidator( a=validation.BooleanValidator()) self.assertEqual(list(v.validate('v', {'a': 1})), [ "v['a'] (1) is not a boolean" ]) def test_ListValidator(self): self.doValidationTest( validation.ListValidator(validation.BooleanValidator()), good=[ [], [True], [False, True], ], bad=[ None, ['a'], [True, 'a'], 1, "hi" ]) def test_ListValidator_names(self): v = validation.ListValidator(validation.BooleanValidator()) self.assertEqual(list(v.validate('v', ['a'])), [ "v[0] ('a') is not a boolean" ]) def test_SourcedPropertiesValidator(self): self.doValidationTest(validation.SourcedPropertiesValidator(), good=[ {'pname': ('{"a":"b"}', 'test')}, ], bad=[ None, 1, b"hi", {'pname': {b'a': b'b'}}, # no source # name not unicode {'pname': ({b'a': b'b'}, 'test')}, # source not unicode {'pname': ({b'a': b'b'}, 'test')}, # self is not json-able {'pname': (self, 'test')}, ]) def test_MessageValidator(self): self.doValidationTest(validation.MessageValidator( events=[b'started', b'stopped'], messageValidator=validation.DictValidator( a=validation.BooleanValidator(), xid=validation.IntValidator(), yid=validation.IntValidator())), good=[ (('thing', '1', '2', 'started'), {'xid': 1, 'yid': 2, 'a': True}), ], bad=[ # routingKey is not a tuple ('thing', {}), # routingKey has wrong event (('thing', '1', '2', 'exploded'), {'xid': 1, 'yid': 2, 'a': True}), # routingKey element has wrong type (('thing', 1, 2, 'started'), {'xid': 1, 'yid': 2, 'a': True}), # routingKey element isn't in message (('thing', '1', '2', 'started'), {'xid': 1, 'a': True}), # message doesn't validate (('thing', '1', '2', 'started'), {'xid': 1, 'yid': 2, 'a': 'x'}), ]) def test_Selector(self): sel = validation.Selector() sel.add(lambda x: x == 'int', validation.IntValidator()) sel.add(lambda x: x == 'str', validation.StringValidator()) self.doValidationTest(sel, good=[ ('int', 1), ('str', 'hi'), ], bad=[ ('int', 'hi'), ('str', 1), ('float', 1.0), ]) buildbot-3.4.0/master/buildbot/test/unit/test_test_util_warnings.py000066400000000000000000000131031413250514000257050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings from twisted.trial import unittest from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.test.util.warnings import assertProducesWarning from buildbot.test.util.warnings import assertProducesWarnings from buildbot.test.util.warnings import ignoreWarning class SomeWarning(Warning): pass class OtherWarning(Warning): pass class TestWarningsFilter(unittest.TestCase): def test_warnigs_caught(self): # Assertion is correct. with assertProducesWarning(SomeWarning): warnings.warn("test", SomeWarning) def test_warnigs_caught_num_check(self): # Assertion is correct. with assertProducesWarnings(SomeWarning, num_warnings=3): warnings.warn("1", SomeWarning) warnings.warn("2", SomeWarning) warnings.warn("3", SomeWarning) def test_warnigs_caught_num_check_fail(self): def f1(): with assertProducesWarnings(SomeWarning, num_warnings=2): pass with self.assertRaises(AssertionError): f1() def f2(): with assertProducesWarnings(SomeWarning, num_warnings=2): warnings.warn("1", SomeWarning) with self.assertRaises(AssertionError): f2() def f3(): with assertProducesWarnings(SomeWarning, num_warnings=2): warnings.warn("1", SomeWarning) warnings.warn("2", SomeWarning) warnings.warn("3", SomeWarning) with self.assertRaises(AssertionError): f3() def test_warnigs_caught_pattern_check(self): # Assertion is correct. with assertProducesWarning(SomeWarning, message_pattern=r"t.st"): warnings.warn("The test", SomeWarning) def test_warnigs_caught_pattern_check_fail(self): def f(): # Assertion fails. with assertProducesWarning(SomeWarning, message_pattern=r"other"): warnings.warn("The test", SomeWarning) with self.assertRaises(AssertionError): f() def test_warnigs_caught_patterns_check(self): # Assertion is correct. with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2", "3"]): warnings.warn("log 1 message", SomeWarning) warnings.warn("log 2 message", SomeWarning) warnings.warn("log 3 message", SomeWarning) def test_warnigs_caught_patterns_check_fails(self): def f1(): # Assertion fails. with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2"]): warnings.warn("msg 1", SomeWarning) with self.assertRaises(AssertionError): f1() def f2(): # Assertion fails. with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2"]): warnings.warn("msg 2", SomeWarning) warnings.warn("msg 1", SomeWarning) with self.assertRaises(AssertionError): f2() def f3(): # Assertion fails. with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2"]): warnings.warn("msg 1", SomeWarning) warnings.warn("msg 2", SomeWarning) warnings.warn("msg 3", SomeWarning) with self.assertRaises(AssertionError): f3() def test_no_warnigs_check(self): with assertNotProducesWarnings(SomeWarning): pass with ignoreWarning(OtherWarning): with assertNotProducesWarnings(SomeWarning): warnings.warn("msg 3", OtherWarning) def test_warnigs_filter(self): with ignoreWarning(OtherWarning): with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2", "3"]): warnings.warn("other", OtherWarning) warnings.warn("log 1 message", SomeWarning) warnings.warn("other", OtherWarning) warnings.warn("log 2 message", SomeWarning) warnings.warn("other", OtherWarning) warnings.warn("log 3 message", SomeWarning) warnings.warn("other", OtherWarning) def test_nested_filters(self): with assertProducesWarnings(SomeWarning, messages_patterns=["some 1"]): with assertProducesWarnings(OtherWarning, messages_patterns=["other 1"]): warnings.warn("other 1", OtherWarning) warnings.warn("some 1", SomeWarning) def test_ignore_warnings(self): with assertNotProducesWarnings(SomeWarning): with ignoreWarning(SomeWarning): warnings.warn("some 1", SomeWarning) buildbot-3.4.0/master/buildbot/test/unit/test_util.py000066400000000000000000000417261413250514000227520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import locale import os import mock from twisted.internet import reactor from twisted.internet import task from twisted.trial import unittest from buildbot import util class formatInterval(unittest.TestCase): def test_zero(self): self.assertEqual(util.formatInterval(0), "0 secs") def test_seconds_singular(self): self.assertEqual(util.formatInterval(1), "1 secs") def test_seconds(self): self.assertEqual(util.formatInterval(7), "7 secs") def test_minutes_one(self): self.assertEqual(util.formatInterval(60), "60 secs") def test_minutes_over_one(self): self.assertEqual(util.formatInterval(61), "1 mins, 1 secs") def test_minutes(self): self.assertEqual(util.formatInterval(300), "5 mins, 0 secs") def test_hours_one(self): self.assertEqual(util.formatInterval(3600), "60 mins, 0 secs") def test_hours_over_one_sec(self): self.assertEqual(util.formatInterval(3601), "1 hrs, 1 secs") def test_hours_over_one_min(self): self.assertEqual(util.formatInterval(3660), "1 hrs, 60 secs") def test_hours(self): self.assertEqual(util.formatInterval(7200), "2 hrs, 0 secs") def test_mixed(self): self.assertEqual(util.formatInterval(7392), "2 hrs, 3 mins, 12 secs") class TestHumanReadableDelta(unittest.TestCase): def test_timeDeltaToHumanReadable(self): """ It will return a human readable time difference. """ try: datetime.datetime.fromtimestamp(1) except OSError as e: raise unittest.SkipTest( "Python 3.6 bug on Windows: " "https://bugs.python.org/issue29097") from e result = util.human_readable_delta(1, 1) self.assertEqual('super fast', result) result = util.human_readable_delta(1, 2) self.assertEqual('1 seconds', result) result = util.human_readable_delta(1, 61) self.assertEqual('1 minutes', result) result = util.human_readable_delta(1, 62) self.assertEqual('1 minutes, 1 seconds', result) result = util.human_readable_delta(1, 60 * 60 + 1) self.assertEqual('1 hours', result) result = util.human_readable_delta(1, 60 * 60 + 61) self.assertEqual('1 hours, 1 minutes', result) result = util.human_readable_delta(1, 60 * 60 + 62) self.assertEqual('1 hours, 1 minutes, 1 seconds', result) result = util.human_readable_delta(1, 24 * 60 * 60 + 1) self.assertEqual('1 days', result) result = util.human_readable_delta(1, 24 * 60 * 60 + 2) self.assertEqual('1 days, 1 seconds', result) class TestFuzzyInterval(unittest.TestCase): def test_moment(self): self.assertEqual(util.fuzzyInterval(1), "a moment") def test_seconds(self): self.assertEqual(util.fuzzyInterval(17), "17 seconds") def test_seconds_rounded(self): self.assertEqual(util.fuzzyInterval(48), "50 seconds") def test_minute(self): self.assertEqual(util.fuzzyInterval(58), "a minute") def test_minutes(self): self.assertEqual(util.fuzzyInterval(3 * 60 + 24), "3 minutes") def test_minutes_rounded(self): self.assertEqual(util.fuzzyInterval(32 * 60 + 24), "30 minutes") def test_hour(self): self.assertEqual(util.fuzzyInterval(3600 + 1200), "an hour") def test_hours(self): self.assertEqual(util.fuzzyInterval(9 * 3600 - 720), "9 hours") def test_day(self): self.assertEqual(util.fuzzyInterval(32 * 3600 + 124), "a day") def test_days(self): self.assertEqual(util.fuzzyInterval((19 + 24) * 3600 + 124), "2 days") def test_month(self): self.assertEqual(util.fuzzyInterval(36 * 24 * 3600 + 124), "a month") def test_months(self): self.assertEqual(util.fuzzyInterval(86 * 24 * 3600 + 124), "3 months") def test_year(self): self.assertEqual(util.fuzzyInterval(370 * 24 * 3600), "a year") def test_years(self): self.assertEqual(util.fuzzyInterval((2 * 365 + 96) * 24 * 3600), "2 years") class safeTranslate(unittest.TestCase): def test_str_good(self): self.assertEqual(util.safeTranslate(str("full")), b"full") def test_str_bad(self): self.assertEqual(util.safeTranslate(str("speed=slow;quality=high")), b"speed_slow_quality_high") def test_str_pathological(self): # if you needed proof this wasn't for use with sensitive data self.assertEqual(util.safeTranslate(str("p\ath\x01ogy")), b"p\ath\x01ogy") # bad chars still here! def test_unicode_good(self): self.assertEqual(util.safeTranslate("full"), b"full") def test_unicode_bad(self): self.assertEqual(util.safeTranslate(str("speed=slow;quality=high")), b"speed_slow_quality_high") def test_unicode_pathological(self): self.assertEqual(util.safeTranslate("\u0109"), b"\xc4\x89") # yuck! class naturalSort(unittest.TestCase): def test_alpha(self): self.assertEqual( util.naturalSort(['x', 'aa', 'ab']), ['aa', 'ab', 'x']) def test_numeric(self): self.assertEqual( util.naturalSort(['1', '10', '11', '2', '20']), ['1', '2', '10', '11', '20']) def test_alphanum(self): l1 = 'aa10ab aa1ab aa10aa f a aa3 aa30 aa3a aa30a'.split() l2 = 'a aa1ab aa3 aa3a aa10aa aa10ab aa30 aa30a f'.split() self.assertEqual(util.naturalSort(l1), l2) class none_or_str(unittest.TestCase): def test_none(self): self.assertEqual(util.none_or_str(None), None) def test_str(self): self.assertEqual(util.none_or_str("hi"), "hi") def test_int(self): self.assertEqual(util.none_or_str(199), "199") class TimeFunctions(unittest.TestCase): def test_UTC(self): self.assertEqual(util.UTC.utcoffset(datetime.datetime.now()), datetime.timedelta(0)) self.assertEqual(util.UTC.dst(datetime.datetime.now()), datetime.timedelta(0)) self.assertEqual(util.UTC.tzname(datetime.datetime.utcnow()), "UTC") def test_epoch2datetime(self): self.assertEqual(util.epoch2datetime(0), datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=util.UTC)) self.assertEqual(util.epoch2datetime(1300000000), datetime.datetime(2011, 3, 13, 7, 6, 40, tzinfo=util.UTC)) def test_datetime2epoch(self): dt = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=util.UTC) self.assertEqual(util.datetime2epoch(dt), 0) dt = datetime.datetime(2011, 3, 13, 7, 6, 40, tzinfo=util.UTC) self.assertEqual(util.datetime2epoch(dt), 1300000000) class DiffSets(unittest.TestCase): def test_empty(self): removed, added = util.diffSets(set([]), set([])) self.assertEqual((removed, added), (set([]), set([]))) def test_no_lists(self): removed, added = util.diffSets([1, 2], [2, 3]) self.assertEqual((removed, added), (set([1]), set([3]))) def test_no_overlap(self): removed, added = util.diffSets(set([1, 2]), set([3, 4])) self.assertEqual((removed, added), (set([1, 2]), set([3, 4]))) def test_no_change(self): removed, added = util.diffSets(set([1, 2]), set([1, 2])) self.assertEqual((removed, added), (set([]), set([]))) def test_added(self): removed, added = util.diffSets(set([1, 2]), set([1, 2, 3])) self.assertEqual((removed, added), (set([]), set([3]))) def test_removed(self): removed, added = util.diffSets(set([1, 2]), set([1])) self.assertEqual((removed, added), (set([2]), set([]))) class MakeList(unittest.TestCase): def test_empty_string(self): self.assertEqual(util.makeList(''), ['']) def test_None(self): self.assertEqual(util.makeList(None), []) def test_string(self): self.assertEqual(util.makeList('hello'), ['hello']) def test_unicode(self): self.assertEqual(util.makeList('\N{SNOWMAN}'), ['\N{SNOWMAN}']) def test_list(self): self.assertEqual(util.makeList(['a', 'b']), ['a', 'b']) def test_tuple(self): self.assertEqual(util.makeList(('a', 'b')), ['a', 'b']) def test_copy(self): input = ['a', 'b'] output = util.makeList(input) input.append('c') self.assertEqual(output, ['a', 'b']) class Flatten(unittest.TestCase): def test_simple(self): self.assertEqual(util.flatten([1, 2, 3]), [1, 2, 3]) def test_deep(self): self.assertEqual(util.flatten([[1, 2], 3, [[4]]]), [1, 2, 3, 4]) # def test_deeply_nested(self): # self.assertEqual(util.flatten([5, [6, (7, 8)]]), # [5, 6, 7, 8]) # def test_tuples(self): # self.assertEqual(util.flatten([(1, 2), 3]), [1, 2, 3]) def test_dict(self): d = {'a': [5, 6, 7], 'b': [7, 8, 9]} self.assertEqual(util.flatten(d), d) def test_string(self): self.assertEqual(util.flatten("abc"), "abc") class Ascii2Unicode(unittest.TestCase): def test_unicode(self): rv = util.bytes2unicode('\N{SNOWMAN}', encoding='ascii') self.assertEqual((rv, type(rv)), ('\N{SNOWMAN}', str)) def test_ascii(self): rv = util.bytes2unicode('abcd', encoding='ascii') self.assertEqual((rv, type(rv)), ('abcd', str)) def test_nonascii(self): with self.assertRaises(UnicodeDecodeError): util.bytes2unicode(b'a\x85', encoding='ascii') def test_None(self): self.assertEqual(util.bytes2unicode(None, encoding='ascii'), None) def test_bytes2unicode(self): rv1 = util.bytes2unicode(b'abcd') rv2 = util.bytes2unicode('efgh') self.assertEqual(type(rv1), str) self.assertEqual(type(rv2), str) class StringToBoolean(unittest.TestCase): def test_it(self): stringValues = [ (b'on', True), (b'true', True), (b'yes', True), (b'1', True), (b'off', False), (b'false', False), (b'no', False), (b'0', False), (b'ON', True), (b'TRUE', True), (b'YES', True), (b'OFF', False), (b'FALSE', False), (b'NO', False), ] for s, b in stringValues: self.assertEqual(util.string2boolean(s), b, repr(s)) def test_ascii(self): rv = util.bytes2unicode(b'abcd', encoding='ascii') self.assertEqual((rv, type(rv)), ('abcd', str)) def test_nonascii(self): with self.assertRaises(UnicodeDecodeError): util.bytes2unicode(b'a\x85', encoding='ascii') def test_None(self): self.assertEqual(util.bytes2unicode(None, encoding='ascii'), None) class AsyncSleep(unittest.TestCase): def test_sleep(self): clock = task.Clock() self.patch(reactor, 'callLater', clock.callLater) d = util.asyncSleep(2) self.assertFalse(d.called) clock.advance(1) self.assertFalse(d.called) clock.advance(1) self.assertTrue(d.called) class FunctionalEnvironment(unittest.TestCase): def test_working_locale(self): environ = {'LANG': 'en_GB.UTF-8'} self.patch(os, 'environ', environ) config = mock.Mock() util.check_functional_environment(config) self.assertEqual(config.error.called, False) def test_broken_locale(self): def err(): raise KeyError self.patch(locale, 'getdefaultlocale', err) config = mock.Mock() util.check_functional_environment(config) config.error.assert_called_with(mock.ANY) class StripUrlPassword(unittest.TestCase): def test_simple_url(self): self.assertEqual(util.stripUrlPassword('http://foo.com/bar'), 'http://foo.com/bar') def test_username(self): self.assertEqual(util.stripUrlPassword('http://d@foo.com/bar'), 'http://d@foo.com/bar') def test_username_with_at(self): self.assertEqual(util.stripUrlPassword('http://d@bb.net@foo.com/bar'), 'http://d@bb.net@foo.com/bar') def test_username_pass(self): self.assertEqual(util.stripUrlPassword('http://d:secret@foo.com/bar'), 'http://d:xxxx@foo.com/bar') def test_username_pass_with_at(self): self.assertEqual( util.stripUrlPassword('http://d@bb.net:scrt@foo.com/bar'), 'http://d@bb.net:xxxx@foo.com/bar') class JoinList(unittest.TestCase): def test_list(self): self.assertEqual(util.join_list(['aa', 'bb']), 'aa bb') def test_tuple(self): self.assertEqual(util.join_list(('aa', 'bb')), 'aa bb') def test_string(self): self.assertEqual(util.join_list('abc'), 'abc') def test_unicode(self): self.assertEqual(util.join_list('abc'), 'abc') def test_nonascii(self): with self.assertRaises(UnicodeDecodeError): util.join_list([b'\xff']) class CommandToString(unittest.TestCase): def test_short_string(self): self.assertEqual(util.command_to_string("ab cd"), "'ab cd'") def test_long_string(self): self.assertEqual(util.command_to_string("ab cd ef"), "'ab cd ...'") def test_list(self): self.assertEqual(util.command_to_string(['ab', 'cd', 'ef']), "'ab cd ...'") def test_nested_list(self): self.assertEqual(util.command_to_string(['ab', ['cd', ['ef']]]), "'ab cd ...'") def test_object(self): # this looks like a renderable self.assertEqual(util.command_to_string(object()), None) def test_list_with_objects(self): # the object looks like a renderable, and is skipped self.assertEqual(util.command_to_string(['ab', object(), 'cd']), "'ab cd'") def test_invalid_ascii(self): self.assertEqual(util.command_to_string(b'a\xffc'), "'a\ufffdc'") class TestRewrap(unittest.TestCase): def test_main(self): tests = [ ("", "", None), ("\n", "\n", None), ("\n ", "\n", None), (" \n", "\n", None), (" \n ", "\n", None), (""" multiline with indent """, "\nmultiline with indent", None), ("""\ multiline with indent """, "multiline with indent\n", None), ("""\ multiline with indent """, "multiline with indent\n", None), ("""\ multiline with indent and formatting """, "multiline with indent\n and\n formatting\n", None), ("""\ multiline with indent and wrapping and formatting """, "multiline with\nindent and\nwrapping\n and\n formatting\n", 15), ] for text, expected, width in tests: self.assertEqual(util.rewrap(text, width=width), expected) class TestMerge(unittest.TestCase): def test_merge(self): self.assertEqual( util.dictionary_merge( { 'a': {'b': 1} }, { 'a': {'c': 2} }), { 'a': {'b': 1, 'c': 2} }) def test_overwrite(self): self.assertEqual( util.dictionary_merge( { 'a': {'b': 1} }, { 'a': 1 }), { 'a': 1 }) def test_overwrite2(self): self.assertEqual( util.dictionary_merge( { 'a': {'b': 1, 'c': 2} }, { 'a': {'b': [1, 2, 3]} }), { 'a': {'b': [1, 2, 3], 'c': 2} }) buildbot-3.4.0/master/buildbot/test/unit/test_util_queue.py000066400000000000000000000175441413250514000241570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members import threading from twisted.internet import defer from twisted.trial import unittest from buildbot.util.backoff import BackoffTimeoutExceededError from buildbot.util.queue import ConnectableThreadQueue class FakeConnection: pass class TestableConnectableThreadQueue(ConnectableThreadQueue): def __init__(self, case, *args, **kwargs): super().__init__(*args, **kwargs) self.case = case self.create_connection_called_count = 0 self.close_connection_called_count = 0 self._test_conn = None def create_connection(self): self.case.assertTrue(self.connecting) self.create_connection_called_count += 1 self.case.assertIsNone(self._test_conn) self._test_conn = FakeConnection() return self._test_conn def on_close_connection(self, conn): self.case.assertIs(conn, self._test_conn) self._test_conn = None self.close_connection() def close_connection(self): self.case.assertFalse(self.connecting) self._test_conn = None self.close_connection_called_count += 1 super().close_connection() class TestException(Exception): pass class TestConnectableThreadQueue(unittest.TestCase): def setUp(self): self.queue = TestableConnectableThreadQueue(self, connect_backoff_start_seconds=0, connect_backoff_multiplier=0, connect_backoff_max_wait_seconds=0) def tearDown(self): self.join_queue() def join_queue(self, connection_called_count=None): self.queue.join(timeout=1) if self.queue.is_alive(): raise AssertionError('Thread is still alive') if connection_called_count is not None: self.assertEqual(self.queue.create_connection_called_count, connection_called_count) self.assertEqual(self.queue.close_connection_called_count, connection_called_count) def test_no_work(self): self.join_queue(0) @defer.inlineCallbacks def test_single_item_called(self): def work(conn, *args, **kwargs): self.assertIs(conn, self.queue.conn) self.assertEqual(args, ('arg',)) self.assertEqual(kwargs, {'kwarg': 'kwvalue'}) return 'work_result' result = yield self.queue.execute_in_thread(work, 'arg', kwarg='kwvalue') self.assertEqual(result, 'work_result') self.join_queue(1) @defer.inlineCallbacks def test_single_item_called_exception(self): def work(conn): raise TestException() with self.assertRaises(TestException): yield self.queue.execute_in_thread(work) self.join_queue(1) @defer.inlineCallbacks def test_exception_does_not_break_further_work(self): def work_exception(conn): raise TestException() def work_success(conn): return 'work_result' with self.assertRaises(TestException): yield self.queue.execute_in_thread(work_exception) result = yield self.queue.execute_in_thread(work_success) self.assertEqual(result, 'work_result') self.join_queue(1) @defer.inlineCallbacks def test_single_item_called_disconnect(self): def work(conn): pass yield self.queue.execute_in_thread(work) self.queue.close_connection() yield self.queue.execute_in_thread(work) self.join_queue(2) @defer.inlineCallbacks def test_many_items_called_in_order(self): self.expected_work_index = 0 def work(conn, work_index): self.assertEqual(self.expected_work_index, work_index) self.expected_work_index = work_index + 1 return work_index work_deferreds = [self.queue.execute_in_thread(work, i) for i in range(0, 100)] for i, d in enumerate(work_deferreds): self.assertEqual((yield d), i) self.join_queue(1) class FailingConnectableThreadQueue(ConnectableThreadQueue): def __init__(self, case, lock, *args, **kwargs): super().__init__(*args, **kwargs) self.case = case self.lock = lock self.create_connection_called_count = 0 def on_close_connection(self, conn): raise AssertionError("on_close_connection should not have been called") def close_connection(self): raise AssertionError("close_connection should not have been called") def _drain_queue_with_exception(self, e): with self.lock: return super()._drain_queue_with_exception(e) class ThrowingConnectableThreadQueue(FailingConnectableThreadQueue): def create_connection(self): with self.lock: self.create_connection_called_count += 1 self.case.assertTrue(self.connecting) raise TestException() class NoneReturningConnectableThreadQueue(FailingConnectableThreadQueue): def create_connection(self): with self.lock: self.create_connection_called_count += 1 self.case.assertTrue(self.connecting) return None class ConnectionErrorTests: def setUp(self): self.lock = threading.Lock() self.queue = self.QueueClass(self, self.lock, connect_backoff_start_seconds=0.001, connect_backoff_multiplier=1, connect_backoff_max_wait_seconds=0.0039) def tearDown(self): self.queue.join(timeout=1) if self.queue.is_alive(): raise AssertionError('Thread is still alive') @defer.inlineCallbacks def test_resets_after_reject(self): def work(conn): raise AssertionError('work should not be executed') with self.lock: d = self.queue.execute_in_thread(work) with self.assertRaises(BackoffTimeoutExceededError): yield d self.assertEqual(self.queue.create_connection_called_count, 5) with self.lock: d = self.queue.execute_in_thread(work) with self.assertRaises(BackoffTimeoutExceededError): yield d self.assertEqual(self.queue.create_connection_called_count, 10) self.flushLoggedErrors(TestException) @defer.inlineCallbacks def test_multiple_work_rejected(self): def work(conn): raise AssertionError('work should not be executed') with self.lock: d1 = self.queue.execute_in_thread(work) d2 = self.queue.execute_in_thread(work) d3 = self.queue.execute_in_thread(work) with self.assertRaises(BackoffTimeoutExceededError): yield d1 with self.assertRaises(BackoffTimeoutExceededError): yield d2 with self.assertRaises(BackoffTimeoutExceededError): yield d3 self.assertEqual(self.queue.create_connection_called_count, 5) self.flushLoggedErrors(TestException) class TestConnectionErrorThrow(ConnectionErrorTests, unittest.TestCase): QueueClass = ThrowingConnectableThreadQueue class TestConnectionErrorReturnNone(ConnectionErrorTests, unittest.TestCase): QueueClass = NoneReturningConnectableThreadQueue buildbot-3.4.0/master/buildbot/test/unit/test_version.py000066400000000000000000000043661413250514000234610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest class VersioningUtilsTests(unittest.SynchronousTestCase): # Version utils are copied in three packages. # this unit test is made to be able to test the three versions # with the same test module_under_test = "buildbot" def setUp(self): try: self.m = __import__(self.module_under_test) except ImportError as e: raise unittest.SkipTest(self.module_under_test + " package is not installed") from e def test_gitDescribeToPep440devVersion(self): self.assertEqual(self.m.gitDescribeToPep440("v0.9.8-20-gf0f45ca"), "0.9.9-dev20") def test_gitDescribeToPep440tag(self): self.assertEqual(self.m.gitDescribeToPep440("v0.9.8"), "0.9.8") def test_gitDescribeToPep440p1tag(self): self.assertEqual(self.m.gitDescribeToPep440("v0.9.9.post1"), "0.9.9.post1") def test_gitDescribeToPep440p1dev(self): self.assertEqual(self.m.gitDescribeToPep440("v0.9.9.post1-20-gf0f45ca"), "0.9.10-dev20") def test_getVersionFromArchiveIdNoTag(self): version = self.m.getVersionFromArchiveId("1514651968 (git-archive-version)") self.assertEqual(version, "2017.12.30") def test_getVersionFromArchiveIdtag(self): version = self.m.getVersionFromArchiveId('1514808197 (HEAD -> master, tag: v1.0.0)') self.assertEqual(version, "1.0.0") class VersioningUtilsTests_PKG(VersioningUtilsTests): module_under_test = "buildbot_pkg" class VersioningUtilsTests_WORKER(VersioningUtilsTests): module_under_test = "buildbot_worker" buildbot-3.4.0/master/buildbot/test/unit/test_wamp_connector.py000066400000000000000000000103041413250514000247770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import service from buildbot.wamp import connector class FakeConfig: def __init__(self, mq_dict): self.mq = mq_dict class FakeService(service.AsyncMultiService): name = "fakeWampService" # Fake wamp service # just call the maker on demand by the test def __init__(self, url, realm, make, extra=None, debug=False, debug_wamp=False, debug_app=False): super().__init__() self.make = make self.extra = extra def gotConnection(self): self.make(None) r = self.make(self) r.publish = mock.Mock(spec=r.publish) r.register = mock.Mock(spec=r.register) r.subscribe = mock.Mock(spec=r.subscribe) r.onJoin(None) class TestedWampConnector(connector.WampConnector): serviceClass = FakeService class WampConnector(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() master = fakemaster.make_master(self) self.connector = TestedWampConnector() config = FakeConfig({'type': 'wamp', 'router_url': "wss://foo", 'realm': "bb"}) yield self.connector.setServiceParent(master) yield master.startService() yield self.connector.reconfigServiceWithBuildbotConfig(config) @defer.inlineCallbacks def test_reconfig_same_config(self): config = FakeConfig({'type': 'wamp', 'router_url': "wss://foo", 'realm': "bb"}) yield self.connector.reconfigServiceWithBuildbotConfig(config) @parameterized.expand([ ('type', 'simple'), ('router_url', 'wss://other-foo'), ('realm', 'bb-other'), ('wamp_debug_level', 'info'), ]) @defer.inlineCallbacks def test_reconfig_does_not_allow_config_change(self, attr_name, attr_value): mq_dict = {'type': 'wamp', 'router_url': "wss://foo", 'realm': "bb"} mq_dict[attr_name] = attr_value with self.assertRaises(ValueError, msg="Cannot use different wamp settings when reconfiguring"): yield self.connector.reconfigServiceWithBuildbotConfig(FakeConfig(mq_dict)) @defer.inlineCallbacks def test_startup(self): d = self.connector.getService() self.connector.app.gotConnection() yield d # 824 is the hardcoded masterid of fakemaster self.connector.service.publish.assert_called_with( "org.buildbot.824.connected") @defer.inlineCallbacks def test_subscribe(self): d = self.connector.subscribe('callback', 'topic', 'options') self.connector.app.gotConnection() yield d self.connector.service.subscribe.assert_called_with( 'callback', 'topic', 'options') @defer.inlineCallbacks def test_publish(self): d = self.connector.publish('topic', 'data', 'options') self.connector.app.gotConnection() yield d self.connector.service.publish.assert_called_with( 'topic', 'data', options='options') @defer.inlineCallbacks def test_OnLeave(self): d = self.connector.getService() self.connector.app.gotConnection() yield d self.assertTrue(self.connector.master.running) self.connector.service.onLeave(None) self.assertFalse(self.connector.master.running) buildbot-3.4.0/master/buildbot/test/unit/util/000077500000000000000000000000001413250514000213275ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/util/__init__.py000066400000000000000000000000001413250514000234260ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/util/test_ComparableMixin.py000066400000000000000000000063131413250514000260150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot import util class ComparableMixin(unittest.TestCase): class Foo(util.ComparableMixin): compare_attrs = ("a", "b") def __init__(self, a, b, c): self.a, self.b, self.c = a, b, c class Bar(Foo, util.ComparableMixin): compare_attrs = ("b", "c") def setUp(self): self.f123 = self.Foo(1, 2, 3) self.f124 = self.Foo(1, 2, 4) self.f134 = self.Foo(1, 3, 4) self.b123 = self.Bar(1, 2, 3) self.b223 = self.Bar(2, 2, 3) self.b213 = self.Bar(2, 1, 3) def test_equality_identity(self): self.assertEqual(self.f123, self.f123) def test_equality_same(self): another_f123 = self.Foo(1, 2, 3) self.assertEqual(self.f123, another_f123) def test_equality_unimportantDifferences(self): self.assertEqual(self.f123, self.f124) def test_inequality_unimportantDifferences_subclass(self): # verify that the parent class's compare_attrs does # affect the subclass self.assertNotEqual(self.b123, self.b223) def test_inequality_importantDifferences(self): self.assertNotEqual(self.f123, self.f134) def test_inequality_importantDifferences_subclass(self): self.assertNotEqual(self.b123, self.b213) def test_inequality_differentClasses(self): self.assertNotEqual(self.f123, self.b123) def test_instance_attribute_not_used(self): # setting compare_attrs as an instance method doesn't # affect the outcome of the comparison another_f123 = self.Foo(1, 2, 3) another_f123.compare_attrs = ("b", "a") self.assertEqual(self.f123, another_f123) def test_ne_importantDifferences(self): self.assertNotEqual(self.f123, self.f134) def test_ne_differentClasses(self): self.assertNotEqual(self.f123, self.b123) def test_compare(self): self.assertEqual(self.f123, self.f123) self.assertNotEqual(self.b223, self.b213) self.assertGreater(self.b223, self.b213) # Different classes self.assertFalse(self.b223 > self.f123) self.assertGreaterEqual(self.b223, self.b213) self.assertGreaterEqual(self.b223, self.b223) # Different classes self.assertFalse(self.f123 >= self.b123) self.assertLess(self.b213, self.b223) self.assertLessEqual(self.b213, self.b223) self.assertLessEqual(self.b213, self.b213) # Different classes self.assertFalse(self.f123 <= self.b123) buildbot-3.4.0/master/buildbot/test/unit/util/test_backoff.py000066400000000000000000000111121413250514000243270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import time from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util.misc import TestReactorMixin from buildbot.util import backoff class TestException(Exception): pass class ExponentialBackoffEngineAsyncTests(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() def test_construct_asserts(self): with self.assertRaises(ValueError): backoff.ExponentialBackoffEngine(-1, 1, 1) with self.assertRaises(ValueError): backoff.ExponentialBackoffEngine(1, -1, 1) with self.assertRaises(ValueError): backoff.ExponentialBackoffEngine(1, 1, -1) @defer.inlineCallbacks def assert_called_after_time(self, d, time): self.assertFalse(d.called) self.reactor.advance(time * 0.99) self.assertFalse(d.called) self.reactor.advance(time * 0.010001) # avoid rounding errors by overshooting a little self.assertTrue(d.called) yield d # throw exceptions stored in d, if any @defer.inlineCallbacks def assert_called_immediately(self, d): self.assertTrue(d.called) yield d @defer.inlineCallbacks def test_wait_times(self): engine = backoff.ExponentialBackoffEngineAsync(self.reactor, start_seconds=10, multiplier=2, max_wait_seconds=1000) yield self.assert_called_after_time(engine.wait_on_failure(), 10) yield self.assert_called_after_time(engine.wait_on_failure(), 20) engine.on_success() yield self.assert_called_after_time(engine.wait_on_failure(), 10) yield self.assert_called_after_time(engine.wait_on_failure(), 20) yield self.assert_called_after_time(engine.wait_on_failure(), 40) engine.on_success() engine.on_success() yield self.assert_called_after_time(engine.wait_on_failure(), 10) @defer.inlineCallbacks def test_max_wait_seconds(self): engine = backoff.ExponentialBackoffEngineAsync(self.reactor, start_seconds=10, multiplier=2, max_wait_seconds=100) yield self.assert_called_after_time(engine.wait_on_failure(), 10) yield self.assert_called_after_time(engine.wait_on_failure(), 20) yield self.assert_called_after_time(engine.wait_on_failure(), 40) yield self.assert_called_after_time(engine.wait_on_failure(), 30) with self.assertRaises(backoff.BackoffTimeoutExceededError): yield self.assert_called_immediately(engine.wait_on_failure()) with self.assertRaises(backoff.BackoffTimeoutExceededError): yield self.assert_called_immediately(engine.wait_on_failure()) engine.on_success() yield self.assert_called_after_time(engine.wait_on_failure(), 10) yield self.assert_called_after_time(engine.wait_on_failure(), 20) yield self.assert_called_after_time(engine.wait_on_failure(), 40) yield self.assert_called_after_time(engine.wait_on_failure(), 30) with self.assertRaises(backoff.BackoffTimeoutExceededError): yield self.assert_called_immediately(engine.wait_on_failure()) class ExponentialBackoffEngineSyncTests(unittest.TestCase): # All the complex cases are tested in ExponentialBackoffEngineAsyncTests where we can fake # the clock. For the synchronous engine we just need to test that waiting works. def test_wait_on_failure(self): engine = backoff.ExponentialBackoffEngineSync(start_seconds=0.05, multiplier=2, max_wait_seconds=1) begin = time.monotonic() engine.wait_on_failure() end = time.monotonic() # Note that if time is adjusted back even a little bit during the test it will fail. # So we add a little bit of wiggle room. self.assertGreater(end - begin, 0.04) buildbot-3.4.0/master/buildbot/test/unit/util/test_bbcollections.py000066400000000000000000000045301413250514000255640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.util import bbcollections class KeyedSets(unittest.TestCase): def setUp(self): self.ks = bbcollections.KeyedSets() def test_getitem_default(self): self.assertEqual(self.ks['x'], set()) # remaining tests effectively cover __getitem__ def test_add(self): self.ks.add('y', 2) self.assertEqual(self.ks['y'], set([2])) def test_add_twice(self): self.ks.add('z', 2) self.ks.add('z', 4) self.assertEqual(self.ks['z'], set([2, 4])) def test_discard_noError(self): self.ks.add('full', 12) self.ks.discard('empty', 13) # should not fail self.ks.discard('full', 13) # nor this self.assertEqual(self.ks['full'], set([12])) def test_discard_existing(self): self.ks.add('yarn', 'red') self.ks.discard('yarn', 'red') self.assertEqual(self.ks['yarn'], set([])) def test_contains_true(self): self.ks.add('yarn', 'red') self.assertTrue('yarn' in self.ks) def test_contains_false(self): self.assertFalse('yarn' in self.ks) def test_contains_setNamesNotContents(self): self.ks.add('yarn', 'red') self.assertFalse('red' in self.ks) def test_pop_exists(self): self.ks.add('names', 'pop') self.ks.add('names', 'coke') self.ks.add('names', 'soda') popped = self.ks.pop('names') remaining = self.ks['names'] self.assertEqual((popped, remaining), (set(['pop', 'coke', 'soda']), set())) def test_pop_missing(self): self.assertEqual(self.ks.pop('flavors'), set()) buildbot-3.4.0/master/buildbot/test/unit/util/test_codebase.py000066400000000000000000000110161413250514000245040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin from buildbot.util import codebase from buildbot.util import state class FakeObject(codebase.AbsoluteSourceStampsMixin, state.StateMixin): name = 'fake-name' def __init__(self, master, codebases): self.master = master self.codebases = codebases class TestAbsoluteSourceStampsMixin(unittest.TestCase, scheduler.SchedulerMixin, TestReactorMixin): codebases = {'a': {'repository': '', 'branch': 'master'}, 'b': {'repository': '', 'branch': 'master'}} def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) self.db = self.master.db self.object = FakeObject(self.master, self.codebases) def mkch(self, **kwargs): ch = self.makeFakeChange(**kwargs) self.master.db.changes.fakeAddChangeInstance(ch) return ch @defer.inlineCallbacks def test_getCodebaseDict(self): cbd = yield self.object.getCodebaseDict('a') self.assertEqual(cbd, {'repository': '', 'branch': 'master'}) @defer.inlineCallbacks def test_getCodebaseDict_not_found(self): d = self.object.getCodebaseDict('c') yield self.assertFailure(d, KeyError) @defer.inlineCallbacks def test_getCodebaseDict_existing(self): self.db.state.set_fake_state(self.object, lastCodebases={'a': { 'repository': 'A', 'revision': '1234:abc', 'branch': 'master', 'lastChange': 10 }}) cbd = yield self.object.getCodebaseDict('a') self.assertEqual(cbd, {'repository': 'A', 'revision': '1234:abc', 'branch': 'master', 'lastChange': 10}) cbd = yield self.object.getCodebaseDict('b') self.assertEqual(cbd, {'repository': '', 'branch': 'master'}) @defer.inlineCallbacks def test_recordChange(self): yield self.object.recordChange(self.mkch(codebase='a', repository='A', revision='1234:abc', branch='master', number=10)) self.db.state.assertStateByClass('fake-name', 'FakeObject', lastCodebases={ 'a': {'repository': 'A', 'revision': '1234:abc', 'branch': 'master', 'lastChange': 10}}) @defer.inlineCallbacks def test_recordChange_older(self): self.db.state.set_fake_state(self.object, lastCodebases={'a': { 'repository': 'A', 'revision': '2345:bcd', 'branch': 'master', 'lastChange': 20 }}) yield self.object.getCodebaseDict('a') yield self.object.recordChange(self.mkch(codebase='a', repository='A', revision='1234:abc', branch='master', number=10)) self.db.state.assertStateByClass('fake-name', 'FakeObject', lastCodebases={ 'a': {'repository': 'A', 'revision': '2345:bcd', 'branch': 'master', 'lastChange': 20}}) @defer.inlineCallbacks def test_recordChange_newer(self): self.db.state.set_fake_state(self.object, lastCodebases={'a': { 'repository': 'A', 'revision': '1234:abc', 'branch': 'master', 'lastChange': 10 }}) yield self.object.getCodebaseDict('a') yield self.object.recordChange(self.mkch(codebase='a', repository='A', revision='2345:bcd', branch='master', number=20)) self.db.state.assertStateByClass('fake-name', 'FakeObject', lastCodebases={ 'a': {'repository': 'A', 'revision': '2345:bcd', 'branch': 'master', 'lastChange': 20}}) buildbot-3.4.0/master/buildbot/test/unit/util/test_debounce.py000066400000000000000000000203061413250514000245250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import task from twisted.python import failure from twisted.python import log from twisted.trial import unittest from buildbot.util import debounce class DebouncedClass: def __init__(self, reactor): self.callDeferred = None self.calls = 0 self.expCalls = 0 self.stopDeferreds = [] self.reactor = reactor @debounce.method(wait=4.0, get_reactor=lambda self: self.reactor) def maybe(self): assert not self.callDeferred self.calls += 1 log.msg('debounced function called') self.callDeferred = defer.Deferred() @self.callDeferred.addBoth def unset(x): log.msg('debounced function complete') self.callDeferred = None return x return self.callDeferred class DebounceTest(unittest.TestCase): def setUp(self): self.clock = task.Clock() def scenario(self, events): dbs = dict((k, DebouncedClass(self.clock)) for k in {n for n, _, _ in events}) while events: n, t, e = events.pop(0) db = dbs[n] log.msg('time=%f, event=%s' % (t, e)) if t > self.clock.seconds(): self.clock.advance(t - self.clock.seconds()) if e == 'maybe': db.maybe() elif e == 'called': db.expCalls += 1 elif e == 'complete': db.callDeferred.callback(None) elif e == 'fail': db.callDeferred.errback(failure.Failure(RuntimeError())) elif e == 'failure_logged': self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) elif e == 'check': pass # just check the expCalls elif e == 'start': db.maybe.start() elif e in ('stop', 'stop-and-called'): db.stopDeferreds.append(db.maybe.stop()) if e == 'stop-and-called': db.expCalls += 1 elif e == 'stopNotComplete': self.assertFalse(db.stopDeferreds[-1].called) elif e == 'stopComplete': self.assertTrue(db.stopDeferreds[-1].called) db.stopDeferreds.pop() else: self.fail("unknown scenario event {}".format(e)) for db in dbs.values(): self.assertEqual(db.calls, db.expCalls) def test_called_once(self): """The debounced method is called only after 4 seconds""" self.scenario([ (1, 0.0, 'maybe'), (1, 2.0, 'check'), (1, 4.0, 'called'), (1, 5.0, 'check'), (1, 6.0, 'complete'), (1, 7.0, 'check') ]) def test_coalesce_calls(self): """Multiple calls are coalesced during 4 seconds, but the function runs 4 seconds after the first call.""" self.scenario([ (1, 0.0, 'maybe'), (1, 1.0, 'maybe'), (1, 2.0, 'maybe'), (1, 3.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'check'), (1, 6.0, 'complete'), (1, 7.0, 'check'), ]) def test_second_call_during_first(self): """If the debounced method is called after an execution has begun, then a second execution will take place 4 seconds after the execution finishes, with intervening calls coalesced.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'maybe'), (1, 6.0, 'complete'), (1, 7.0, 'maybe'), (1, 9.0, 'maybe'), (1, 10.0, 'called'), (1, 11.0, 'check'), ]) def test_failure_logged(self): """If the debounced method fails, the error is logged, but otherwise it behaves as if it had succeeded.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'maybe'), (1, 6.0, 'fail'), (1, 6.0, 'failure_logged'), (1, 10.0, 'called'), (1, 11.0, 'check'), ]) def test_instance_independence(self): """The timers for two instances are independent.""" self.scenario([ (1, 0.0, 'maybe'), (2, 2.0, 'maybe'), (1, 4.0, 'called'), (2, 6.0, 'called'), (1, 6.0, 'complete'), (2, 6.0, 'complete'), (1, 7.0, 'check'), ]) def test_start_when_started(self): """Calling meth.start when already started has no effect""" self.scenario([ (1, 0.0, 'start'), (1, 1.0, 'start'), ]) def test_stop_while_idle(self): """If the debounced method is stopped while idle, subsequent calls do nothing.""" self.scenario([ (1, 0.0, 'stop'), (1, 0.0, 'stopComplete'), (1, 1.0, 'maybe'), (1, 6.0, 'check'), # not called ]) def test_stop_while_waiting(self): """If the debounced method is stopped while waiting, the waiting call occurs immediately, stop returns immediately, and subsequent calls do nothing.""" self.scenario([ (1, 0.0, 'maybe'), (1, 2.0, 'stop-and-called'), (1, 2.1, 'complete'), (1, 2.1, 'stopComplete'), (1, 3.0, 'maybe'), (1, 8.0, 'check'), # not called ]) def test_stop_while_running(self): """If the debounced method is stopped while running, the running call completes, stop returns only after the call completes, and subsequent calls do nothing.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'stop'), (1, 5.0, 'stopNotComplete'), (1, 6.0, 'complete'), (1, 6.0, 'stopComplete'), (1, 6.0, 'maybe'), (1, 10.0, 'check'), # not called ]) def test_multiple_stops(self): """Multiple stop calls will return individually when the method completes.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'stop'), (1, 5.0, 'stop'), (1, 5.0, 'stopNotComplete'), (1, 6.0, 'complete'), (1, 6.0, 'stopComplete'), (1, 6.0, 'stopComplete'), (1, 6.0, 'maybe'), (1, 10.0, 'check'), # not called ]) def test_stop_while_running_queued(self): """If the debounced method is stopped while running with another call queued, the running call completes, stop returns only after the call completes, the queued call never occurs, and subsequent calls do nothing.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 4.5, 'maybe'), (1, 5.0, 'stop'), (1, 5.0, 'stopNotComplete'), (1, 6.0, 'complete'), (1, 6.0, 'stopComplete'), (1, 6.0, 'maybe'), (1, 10.0, 'check'), # not called ]) def test_start_after_stop(self): """After a stop and subsequent start, a call to the debounced method causes an invocation 4 seconds later.""" self.scenario([ (1, 0.0, 'stop'), (1, 1.0, 'maybe'), (1, 2.0, 'start'), (1, 2.0, 'maybe'), (1, 5.0, 'check'), # not called (1, 6.0, 'called'), ]) buildbot-3.4.0/master/buildbot/test/unit/util/test_deferwaiter.py000066400000000000000000000171651413250514000252530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util.misc import TestReactorMixin from buildbot.util import asyncSleep from buildbot.util.deferwaiter import DeferWaiter from buildbot.util.deferwaiter import RepeatedActionHandler class TestException(Exception): pass class WaiterTests(unittest.TestCase): def test_add_deferred_called(self): w = DeferWaiter() w.add(defer.succeed(None)) self.assertFalse(w.has_waited()) d = w.wait() self.assertTrue(d.called) def test_add_non_deferred(self): w = DeferWaiter() w.add(2) self.assertFalse(w.has_waited()) d = w.wait() self.assertTrue(d.called) def test_add_deferred_not_called_and_call_later(self): w = DeferWaiter() d1 = defer.Deferred() w.add(d1) self.assertTrue(w.has_waited()) d = w.wait() self.assertFalse(d.called) d1.callback(None) self.assertFalse(w.has_waited()) self.assertTrue(d.called) @defer.inlineCallbacks def test_passes_result(self): w = DeferWaiter() d1 = defer.Deferred() w.add(d1) d1.callback(123) res = yield d1 self.assertEqual(res, 123) d = w.wait() self.assertTrue(d.called) @defer.inlineCallbacks def test_cancel_not_called(self): w = DeferWaiter() d1 = defer.Deferred() w.add(d1) self.assertTrue(w.has_waited()) w.cancel() self.assertFalse(w.has_waited()) d = w.wait() self.assertTrue(d.called) with self.assertRaises(defer.CancelledError): yield d1 self.flushLoggedErrors(defer.CancelledError) @defer.inlineCallbacks def test_cancel_called(self): w = DeferWaiter() d1_waited = defer.Deferred() d1 = defer.succeed(None) d1.addCallback(lambda _: d1_waited) w.add(d1) w.cancel() d = w.wait() self.assertTrue(d.called) self.assertTrue(d1.called) self.assertTrue(d1_waited.called) with self.assertRaises(defer.CancelledError): yield d1 self.flushLoggedErrors(defer.CancelledError) class RepeatedActionHandlerTests(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_does_not_add_action_on_start(self): w = DeferWaiter() times = [] def action(): times.append(self.reactor.seconds()) h = RepeatedActionHandler(self.reactor, w, 1, action) self.reactor.advance(2) h.stop() self.assertEqual(len(times), 0) d = w.wait() self.assertTrue(d.called) yield d @parameterized.expand([ ('after_action', True), ('before_action', False), ]) @defer.inlineCallbacks def test_runs_action_with_timer(self, name, timer_after_action): w = DeferWaiter() times = [] def action(): times.append(round(self.reactor.seconds(), 1)) h = RepeatedActionHandler(self.reactor, w, 1, action, start_timer_after_action_completes=timer_after_action) h.start() self.reactor.pump([0.1] * 35) self.assertEqual(times, [1.1, 2.1, 3.1]) h.stop() d = w.wait() self.assertTrue(d.called) yield d @parameterized.expand([ ('after_action', True), ('before_action', False), ]) @defer.inlineCallbacks def test_runs_action_after_exception_with_timer(self, name, timer_after_action): w = DeferWaiter() times = [] def action(): times.append(round(self.reactor.seconds(), 1)) if len(times) == 2: raise TestException() h = RepeatedActionHandler(self.reactor, w, 1, action, start_timer_after_action_completes=timer_after_action) h.start() self.reactor.pump([0.1] * 35) self.assertEqual(times, [1.1, 2.1, 3.1]) h.stop() d = w.wait() self.assertTrue(d.called) self.flushLoggedErrors(TestException) yield d @defer.inlineCallbacks def test_ignores_duplicate_start_or_stop(self): w = DeferWaiter() times = [] def action(): times.append(round(self.reactor.seconds(), 1)) h = RepeatedActionHandler(self.reactor, w, 1, action) h.start() h.start() self.reactor.pump([0.1] * 35) self.assertEqual(times, [1.1, 2.1, 3.1]) h.stop() h.stop() d = w.wait() self.assertTrue(d.called) yield d @defer.inlineCallbacks def test_can_update_interval(self): w = DeferWaiter() times = [] def action(): times.append(round(self.reactor.seconds(), 1)) h = RepeatedActionHandler(self.reactor, w, 1, action) h.start() self.reactor.pump([0.1] * 15) h.setInterval(2) self.reactor.pump([0.1] * 50) self.assertEqual(times, [1.1, 2.1, 4.1, 6.2]) h.stop() d = w.wait() self.assertTrue(d.called) yield d @parameterized.expand([ ('after_action', True, [1.1, 2.6, 4.1]), ('before_action', False, [1.1, 2.1, 3.1, 4.1]), ]) @defer.inlineCallbacks def test_runs_action_with_timer_delay(self, name, timer_after_action, expected_times): w = DeferWaiter() times = [] @defer.inlineCallbacks def action(): times.append(round(self.reactor.seconds(), 1)) yield asyncSleep(0.5, reactor=self.reactor) h = RepeatedActionHandler(self.reactor, w, 1, action, start_timer_after_action_completes=timer_after_action) h.start() self.reactor.pump([0.1] * 47) self.assertEqual(times, expected_times) h.stop() d = w.wait() self.assertTrue(d.called) yield d @parameterized.expand([ ('after_action', True), ('before_action', False), ]) @defer.inlineCallbacks def test_waiter_waits_for_action_timer_starts(self, name, timer_after_action): w = DeferWaiter() times = [] @defer.inlineCallbacks def action(): times.append(round(self.reactor.seconds(), 1)) yield asyncSleep(0.5, reactor=self.reactor) h = RepeatedActionHandler(self.reactor, w, 1, action, start_timer_after_action_completes=timer_after_action) h.start() self.reactor.pump([0.1] * 12) self.assertEqual(times, [1.1]) d = w.wait() self.assertFalse(d.called) h.stop() self.assertFalse(d.called) self.reactor.pump([0.1] * 5) # action started on 1.1, will end at 1.6 self.assertTrue(d.called) yield d buildbot-3.4.0/master/buildbot/test/unit/util/test_eventual.py000066400000000000000000000070671413250514000245750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from twisted.trial import unittest from buildbot.util import eventual class Eventually(unittest.TestCase): def setUp(self): # reset the queue to its base state eventual._theSimpleQueue = eventual._SimpleCallQueue() self.old_log_err = log.err self.results = [] def tearDown(self): log.err = self.old_log_err return eventual.flushEventualQueue() # utility callback def cb(self, *args, **kwargs): r = args if kwargs: r = r + (kwargs,) self.results.append(r) # flush the queue and assert results @defer.inlineCallbacks def assertResults(self, exp): yield eventual.flushEventualQueue() self.assertEqual(self.results, exp) # tests def test_eventually_calls(self): eventual.eventually(self.cb) return self.assertResults([()]) def test_eventually_args(self): eventual.eventually(self.cb, 1, 2, a='a') return self.assertResults([(1, 2, dict(a='a'))]) def test_eventually_err(self): # monkey-patch log.err; this is restored by tearDown log.err = lambda: self.results.append("err") def cb_fails(): raise RuntimeError("should not cause test failure") eventual.eventually(cb_fails) return self.assertResults(['err']) def test_eventually_butNotNow(self): eventual.eventually(self.cb, 1) self.assertFalse(self.results != []) return self.assertResults([(1,)]) def test_eventually_order(self): eventual.eventually(self.cb, 1) eventual.eventually(self.cb, 2) eventual.eventually(self.cb, 3) return self.assertResults([(1,), (2,), (3,)]) def test_flush_waitForChainedEventuallies(self): def chain(n): self.results.append(n) if n <= 0: return eventual.eventually(chain, n - 1) chain(3) # (the flush this tests is implicit in assertResults) return self.assertResults([3, 2, 1, 0]) def test_flush_waitForTreeEventuallies(self): # a more complex set of eventualities def tree(n): self.results.append(n) if n <= 0: return eventual.eventually(tree, n - 1) eventual.eventually(tree, n - 1) tree(2) # (the flush this tests is implicit in assertResults) return self.assertResults([2, 1, 1, 0, 0, 0, 0]) def test_flush_duringTurn(self): testd = defer.Deferred() def cb(): d = eventual.flushEventualQueue() d.addCallback(testd.callback) eventual.eventually(cb) return testd def test_fireEventually_call(self): d = eventual.fireEventually(13) d.addCallback(self.cb) return self.assertResults([(13,)]) buildbot-3.4.0/master/buildbot/test/unit/util/test_git.py000066400000000000000000000124261413250514000235300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.trial import unittest from buildbot.test.util import config from buildbot.util.git import GitMixin from buildbot.util.git import escapeShellArgIfNeeded from buildbot.util.git import getSshKnownHostsContents class TestEscapeShellArgIfNeeded(unittest.TestCase): def assert_escapes(self, arg): escaped = '"{}"'.format(arg) self.assertEqual(escapeShellArgIfNeeded(arg), escaped) def assert_does_not_escape(self, arg): self.assertEqual(escapeShellArgIfNeeded(arg), arg) def test_empty(self): self.assert_escapes('') def test_spaces(self): self.assert_escapes(' ') self.assert_escapes('a ') self.assert_escapes(' a') self.assert_escapes('a b') def test_special(self): self.assert_escapes('a=b') self.assert_escapes('a%b') self.assert_escapes('a(b') self.assert_escapes('a[b') def test_no_escape(self): self.assert_does_not_escape('abc') self.assert_does_not_escape('a_b') self.assert_does_not_escape('-opt') self.assert_does_not_escape('--opt') class TestSetUpGit(GitMixin, unittest.TestCase, config.ConfigErrorsMixin): @parameterized.expand([ ('no_keys', None, None, None, None), ('only_private_key', 'key', None, None, None), ('private_key_host_key', 'key', 'host', None, None), ('private_key_known_hosts', 'key', None, 'hosts', None), ('no_private_key_host_key', None, 'host', None, 'sshPrivateKey must be provided in order use sshHostKey'), ('no_private_key_known_hosts', None, None, 'hosts', 'sshPrivateKey must be provided in order use sshKnownHosts'), ('both_host_key_known_hosts', 'key', 'host', 'hosts', 'only one of sshKnownHosts and sshHostKey can be provided'), ]) def test_config(self, name, private_key, host_key, known_hosts, config_error): self.sshPrivateKey = private_key self.sshHostKey = host_key self.sshKnownHosts = known_hosts if config_error is None: self.setupGit() else: with self.assertRaisesConfigError(config_error): self.setupGit() class TestParseGitFeatures(GitMixin, unittest.TestCase): def setUp(self): self.sshPrivateKey = None self.sshHostKey = None self.sshKnownHosts = None self.setupGit() def test_no_output(self): self.parseGitFeatures('') self.assertFalse(self.gitInstalled) self.assertFalse(self.supportsBranch) self.assertFalse(self.supportsSubmoduleForce) self.assertFalse(self.supportsSubmoduleCheckout) self.assertFalse(self.supportsSshPrivateKeyAsEnvOption) self.assertFalse(self.supportsSshPrivateKeyAsConfigOption) def test_git_noversion(self): self.parseGitFeatures('git') self.assertFalse(self.gitInstalled) self.assertFalse(self.supportsBranch) self.assertFalse(self.supportsSubmoduleForce) self.assertFalse(self.supportsSubmoduleCheckout) self.assertFalse(self.supportsSshPrivateKeyAsEnvOption) self.assertFalse(self.supportsSshPrivateKeyAsConfigOption) def test_git_zero_version(self): self.parseGitFeatures('git version 0.0.0') self.assertTrue(self.gitInstalled) self.assertFalse(self.supportsBranch) self.assertFalse(self.supportsSubmoduleForce) self.assertFalse(self.supportsSubmoduleCheckout) self.assertFalse(self.supportsSshPrivateKeyAsEnvOption) self.assertFalse(self.supportsSshPrivateKeyAsConfigOption) def test_git_2_10_0(self): self.parseGitFeatures('git version 2.10.0') self.assertTrue(self.gitInstalled) self.assertTrue(self.supportsBranch) self.assertTrue(self.supportsSubmoduleForce) self.assertTrue(self.supportsSubmoduleCheckout) self.assertTrue(self.supportsSshPrivateKeyAsEnvOption) self.assertTrue(self.supportsSshPrivateKeyAsConfigOption) class TestAdjustCommandParamsForSshPrivateKey(GitMixin, unittest.TestCase): def test_throws_when_wrapper_not_given(self): self.gitInstalled = True command = [] env = {} with self.assertRaises(Exception): self.adjustCommandParamsForSshPrivateKey(command, env, 'path/to/key') class TestGetSshKnownHostsContents(unittest.TestCase): def test(self): key = 'ssh-rsa AAAA<...>WsHQ==' expected = '* ssh-rsa AAAA<...>WsHQ==' self.assertEqual(expected, getSshKnownHostsContents(key)) buildbot-3.4.0/master/buildbot/test/unit/util/test_giturlparse.py000066400000000000000000000113741413250514000253070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.util import giturlparse class Tests(unittest.TestCase): def test_github(self): for u in [ "https://github.com/buildbot/buildbot", "https://github.com/buildbot/buildbot.git", "ssh://git@github.com:buildbot/buildbot.git", "git://github.com/buildbot/buildbot.git"]: u = giturlparse(u) self.assertIn(u.user, (None, "git")) self.assertEqual(u.domain, "github.com") self.assertEqual(u.owner, "buildbot") self.assertEqual(u.repo, "buildbot") self.assertIsNone(u.port) def test_gitlab(self): for u in [ "ssh://git@mygitlab.com/group/subgrouptest/testproject.git", "https://mygitlab.com/group/subgrouptest/testproject.git", "git@mygitlab.com:group/subgrouptest/testproject.git", "git://mygitlab.com/group/subgrouptest/testproject.git"]: u = giturlparse(u) self.assertIsNone(u.port) self.assertIn(u.user, (None, "git")) self.assertEqual(u.domain, "mygitlab.com") self.assertEqual(u.owner, "group/subgrouptest") self.assertEqual(u.repo, "testproject") def test_gitlab_subsubgroup(self): for u in [ "ssh://git@mygitlab.com/group/subgrouptest/subsubgroup/testproject.git", "https://mygitlab.com/group/subgrouptest/subsubgroup/testproject.git", "git://mygitlab.com/group/subgrouptest/subsubgroup/testproject.git"]: u = giturlparse(u) self.assertIn(u.user, (None, "git")) self.assertIsNone(u.port) self.assertEqual(u.domain, "mygitlab.com") self.assertEqual(u.owner, "group/subgrouptest/subsubgroup") self.assertEqual(u.repo, "testproject") def test_gitlab_user(self): for u in [ "ssh://buildbot@mygitlab.com:group/subgrouptest/testproject.git", "https://buildbot@mygitlab.com/group/subgrouptest/testproject.git"]: u = giturlparse(u) self.assertEqual(u.domain, "mygitlab.com") self.assertIsNone(u.port) self.assertEqual(u.user, "buildbot") self.assertEqual(u.owner, "group/subgrouptest") self.assertEqual(u.repo, "testproject") def test_gitlab_port(self): for u in [ "ssh://buildbot@mygitlab.com:1234/group/subgrouptest/testproject.git"]: u = giturlparse(u) self.assertEqual(u.domain, "mygitlab.com") self.assertEqual(u.port, 1234) self.assertEqual(u.user, "buildbot") self.assertEqual(u.owner, "group/subgrouptest") self.assertEqual(u.repo, "testproject") def test_bitbucket(self): for u in [ "https://bitbucket.org/org/repo.git", "ssh://git@bitbucket.org:org/repo.git", "git@bitbucket.org:org/repo.git", ]: u = giturlparse(u) self.assertIn(u.user, (None, "git")) self.assertEqual(u.domain, "bitbucket.org") self.assertEqual(u.owner, "org") self.assertEqual(u.repo, "repo") def test_no_owner(self): for u in [ "https://example.org/repo.git", "ssh://example.org:repo.git", "ssh://git@example.org:repo.git", "git@example.org:repo.git", ]: u = giturlparse(u) self.assertIn(u.user, (None, "git")) self.assertEqual(u.domain, "example.org") self.assertIsNone(u.owner) self.assertEqual(u.repo, "repo") def test_protos(self): self.assertEqual(giturlparse("https://bitbucket.org/org/repo.git").proto, "https") self.assertEqual(giturlparse("git://bitbucket.org/org/repo.git").proto, "git") self.assertEqual(giturlparse("ssh://git@bitbucket.org:org/repo.git").proto, "ssh") self.assertEqual(giturlparse("git@bitbucket.org:org/repo.git").proto, "ssh") buildbot-3.4.0/master/buildbot/test/unit/util/test_httpclientservice.py000066400000000000000000000460221413250514000265030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import json import os import mock from twisted.internet import defer from twisted.internet import reactor from twisted.python import components from twisted.trial import unittest from twisted.web import resource from twisted.web import server from buildbot import interfaces from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.util import bytes2unicode from buildbot.util import httpclientservice from buildbot.util import service from buildbot.util import unicode2bytes try: from requests.auth import HTTPDigestAuth except ImportError: pass # There is no way to unregister an adapter, so we have no other option # than registering it as a module side effect :-( components.registerAdapter( lambda m: m, mock.Mock, interfaces.IHttpResponse) class HTTPClientServiceTestBase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): if httpclientservice.txrequests is None or httpclientservice.treq is None: raise unittest.SkipTest('this test requires txrequests and treq') self.patch(httpclientservice, 'txrequests', mock.Mock()) self.patch(httpclientservice, 'treq', mock.Mock()) self.parent = service.MasterService() self.parent.reactor = reactor self.base_headers = {} yield self.parent.startService() class HTTPClientServiceTestTxRequest(HTTPClientServiceTestBase): @defer.inlineCallbacks def setUp(self): yield super().setUp() self._http = yield httpclientservice.HTTPClientService.getService( self.parent, 'http://foo', headers=self.base_headers) def test_get(self): self._http.get('/bar') self._http._session.request.assert_called_once_with('get', 'http://foo/bar', headers={}, background_callback=mock.ANY) def test_put(self): self._http.put('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) jsonBytes = unicode2bytes(jsonStr) headers = {'Content-Type': 'application/json'} self._http._session.request.assert_called_once_with('put', 'http://foo/bar', background_callback=mock.ANY, data=jsonBytes, headers=headers) def test_post(self): self._http.post('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) jsonBytes = unicode2bytes(jsonStr) headers = {'Content-Type': 'application/json'} self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=jsonBytes, headers=headers) def test_delete(self): self._http.delete('/bar') self._http._session.request.assert_called_once_with('delete', 'http://foo/bar', background_callback=mock.ANY, headers={}) def test_post_headers(self): self.base_headers.update({'X-TOKEN': 'XXXYYY'}) self._http.post('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) jsonBytes = unicode2bytes(jsonStr) self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=jsonBytes, headers={ 'X-TOKEN': 'XXXYYY', 'Content-Type': 'application/json'}) @defer.inlineCallbacks def test_post_auth(self): self._http = yield httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', auth=('user', 'pa$$')) self._http.post('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) jsonBytes = unicode2bytes(jsonStr) self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=jsonBytes, auth=( 'user', 'pa$$'), headers={ 'Content-Type': 'application/json' }) class HTTPClientServiceTestTxRequestNoEncoding(HTTPClientServiceTestBase): @defer.inlineCallbacks def setUp(self): yield super().setUp() self._http = self.successResultOf( httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', headers=self.base_headers, skipEncoding=True)) def test_post_raw(self): self._http.post('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) headers = {'Content-Type': 'application/json'} self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=jsonStr, headers=headers) def test_post_rawlist(self): self._http.post('/bar', json=[{'foo': 'bar'}]) jsonStr = json.dumps([dict(foo='bar')]) headers = {'Content-Type': 'application/json'} self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=jsonStr, headers=headers) class HTTPClientServiceTestTReq(HTTPClientServiceTestBase): @defer.inlineCallbacks def setUp(self): yield super().setUp() self.patch(httpclientservice.HTTPClientService, 'PREFER_TREQ', True) self._http = yield httpclientservice.HTTPClientService.getService( self.parent, 'http://foo', headers=self.base_headers) def test_get(self): self._http.get('/bar') httpclientservice.treq.get.assert_called_once_with('http://foo/bar', agent=mock.ANY, headers={}) def test_put(self): self._http.put('/bar', json={'foo': 'bar'}) headers = {'Content-Type': ['application/json']} httpclientservice.treq.put.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=b'{"foo": "bar"}', headers=headers) def test_post(self): self._http.post('/bar', json={'foo': 'bar'}) headers = {'Content-Type': ['application/json']} httpclientservice.treq.post.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=b'{"foo": "bar"}', headers=headers) def test_delete(self): self._http.delete('/bar') httpclientservice.treq.delete.assert_called_once_with('http://foo/bar', agent=mock.ANY, headers={}) def test_post_headers(self): self.base_headers.update({'X-TOKEN': 'XXXYYY'}) self._http.post('/bar', json={'foo': 'bar'}) headers = { 'Content-Type': ['application/json'], 'X-TOKEN': ['XXXYYY'] } httpclientservice.treq.post.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=b'{"foo": "bar"}', headers=headers) @defer.inlineCallbacks def test_post_auth(self): self._http = yield httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', auth=('user', 'pa$$')) self._http.post('/bar', json={'foo': 'bar'}) headers = { 'Content-Type': ['application/json'], } httpclientservice.treq.post.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=b'{"foo": "bar"}', auth=( 'user', 'pa$$'), headers=headers) @defer.inlineCallbacks def test_post_auth_digest(self): auth = HTTPDigestAuth('user', 'pa$$') self._http = yield httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', auth=auth) self._http.post('/bar', data={'foo': 'bar'}) # if digest auth, we don't use treq! we use txrequests self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=dict( foo='bar'), auth=auth, headers={ }) class HTTPClientServiceTestTReqNoEncoding(HTTPClientServiceTestBase): @defer.inlineCallbacks def setUp(self): yield super().setUp() self.patch(httpclientservice.HTTPClientService, 'PREFER_TREQ', True) self._http = self.successResultOf( httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', headers=self.base_headers, skipEncoding=True)) def test_post_raw(self): self._http.post('/bar', json={'foo': 'bar'}) json_str = json.dumps(dict(foo='bar')) headers = {'Content-Type': ['application/json']} httpclientservice.treq.post.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=json_str, headers=headers) def test_post_rawlist(self): self._http.post('/bar', json=[{'foo': 'bar'}]) json_str = json.dumps([dict(foo='bar')]) headers = {'Content-Type': ['application/json']} httpclientservice.treq.post.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=json_str, headers=headers) class MyResource(resource.Resource): isLeaf = True def render_GET(self, request): def decode(x): if isinstance(x, bytes): return bytes2unicode(x) elif isinstance(x, (list, tuple)): return [bytes2unicode(y) for y in x] elif isinstance(x, dict): newArgs = {} for a, b in x.items(): newArgs[decode(a)] = decode(b) return newArgs return x args = decode(request.args) content_type = request.getHeader(b'content-type') if content_type == b"application/json": jsonBytes = request.content.read() jsonStr = bytes2unicode(jsonBytes) args['json_received'] = json.loads(jsonStr) data = json.dumps(args) data = unicode2bytes(data) request.setHeader(b'content-type', b'application/json') request.setHeader(b'content-length', b"%d" % len(data)) if request.method == b'HEAD': return b'' return data render_HEAD = render_GET render_POST = render_GET class HTTPClientServiceTestTxRequestE2E(unittest.TestCase): """The e2e tests must be the same for txrequests and treq We just force treq in the other TestCase """ def httpFactory(self, parent): return httpclientservice.HTTPClientService.getService( parent, 'http://127.0.0.1:{}'.format(self.port)) def expect(self, *arg, **kwargs): pass @defer.inlineCallbacks def setUp(self): if httpclientservice.txrequests is None or httpclientservice.treq is None: raise unittest.SkipTest('this test requires txrequests and treq') site = server.Site(MyResource()) self.listenport = reactor.listenTCP(0, site) self.port = self.listenport.getHost().port self.parent = parent = service.MasterService() self.parent.reactor = reactor yield parent.startService() self._http = yield self.httpFactory(parent) @defer.inlineCallbacks def tearDown(self): self.listenport.stopListening() yield self.parent.stopService() @defer.inlineCallbacks def test_content(self): self.expect('get', '/', content_json={}) res = yield self._http.get('/') content = yield res.content() self.assertEqual(content, b'{}') @defer.inlineCallbacks def test_content_with_params(self): self.expect('get', '/', params=dict(a='b'), content_json=dict(a=['b'])) res = yield self._http.get('/', params=dict(a='b')) content = yield res.content() self.assertEqual(content, b'{"a": ["b"]}') @defer.inlineCallbacks def test_post_content_with_params(self): self.expect('post', '/', params=dict(a='b'), content_json=dict(a=['b'])) res = yield self._http.post('/', params=dict(a='b')) content = yield res.content() self.assertEqual(content, b'{"a": ["b"]}') @defer.inlineCallbacks def test_put_content_with_data(self): self.expect('post', '/', data=dict(a='b'), content_json=dict(a=['b'])) res = yield self._http.post('/', data=dict(a='b')) content = yield res.content() self.assertEqual(content, b'{"a": ["b"]}') @defer.inlineCallbacks def test_put_content_with_json(self): exp_content_json = dict(json_received=dict(a='b')) self.expect('post', '/', json=dict(a='b'), content_json=exp_content_json) res = yield self._http.post('/', json=dict(a='b')) content = yield res.content() content = bytes2unicode(content) content = json.loads(content) self.assertEqual(content, exp_content_json) @defer.inlineCallbacks def test_put_content_with_json_datetime(self): exp_content_json = dict(json_received=dict(a='b', ts=12)) dt = datetime.datetime.utcfromtimestamp(12) self.expect('post', '/', json=dict(a='b', ts=dt), content_json=exp_content_json) res = yield self._http.post('/', json=dict(a='b', ts=dt)) content = yield res.content() content = bytes2unicode(content) content = json.loads(content) self.assertEqual(content, exp_content_json) @defer.inlineCallbacks def test_json(self): self.expect('get', '/', content_json={}) res = yield self._http.get('/') content = yield res.json() self.assertEqual(content, {}) self.assertEqual(res.code, 200) # note that freebsd workers will not like when there are too many parallel connections # we can change this test via environment variable NUM_PARALLEL = os.environ.get("BBTEST_NUM_PARALLEL", 5) @defer.inlineCallbacks def test_lots(self): for i in range(self.NUM_PARALLEL): self.expect('get', '/', params=dict(a='b'), content_json=dict(a=['b'])) # use for benchmarking (txrequests: 3ms per request treq: 1ms per # request) for i in range(self.NUM_PARALLEL): res = yield self._http.get('/', params=dict(a='b')) content = yield res.content() self.assertEqual(content, b'{"a": ["b"]}') @defer.inlineCallbacks def test_lots_parallel(self): for i in range(self.NUM_PARALLEL): self.expect('get', '/', params=dict(a='b'), content_json=dict(a=['b'])) # use for benchmarking (txrequests: 3ms per request treq: 11ms per # request (!?)) def oneReq(): d = self._http.get('/', params=dict(a='b')) @d.addCallback def content(res): return res.content() return d dl = [oneReq() for i in range(self.NUM_PARALLEL)] yield defer.gatherResults(dl) class HTTPClientServiceTestTReqE2E(HTTPClientServiceTestTxRequestE2E): @defer.inlineCallbacks def setUp(self): self.patch(httpclientservice.HTTPClientService, 'PREFER_TREQ', True) yield super().setUp() class HTTPClientServiceTestFakeE2E(HTTPClientServiceTestTxRequestE2E): @defer.inlineCallbacks def httpFactory(self, parent): service = yield fakehttpclientservice.HTTPClientService.getService( parent, self, 'http://127.0.0.1:{}'.format(self.port)) return service def expect(self, *arg, **kwargs): self._http.expect(*arg, **kwargs) buildbot-3.4.0/master/buildbot/test/unit/util/test_identifiers.py000066400000000000000000000100631413250514000252450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import locale from twisted.python import log from twisted.trial import unittest from buildbot.util import identifiers class Tests(unittest.TestCase): def test_isIdentifier(self): os_encoding = locale.getpreferredencoding() try: '\N{SNOWMAN}'.encode(os_encoding) except UnicodeEncodeError as e: # Default encoding of Windows console is 'cp1252' # which cannot encode the snowman. raise(unittest.SkipTest("Cannot encode weird unicode " "on this platform with {}".format(os_encoding))) from e good = [ "linux", "Linux", "abc123", "a" * 50, '\N{SNOWMAN}' ] for g in good: log.msg('expect %r to be good' % (g,)) self.assertTrue(identifiers.isIdentifier(50, g)) bad = [ None, '', b'linux', 'a/b', "a.b.c.d", "a-b_c.d9", 'spaces not allowed', "a" * 51, "123 no initial digits", '\N{SNOWMAN}.\N{SNOWMAN}', ] for b in bad: log.msg('expect %r to be bad' % (b,)) self.assertFalse(identifiers.isIdentifier(50, b)) def assertEqualUnicode(self, got, exp): self.assertTrue(isinstance(exp, str)) self.assertEqual(got, exp) def test_forceIdentifier_already_is(self): self.assertEqualUnicode( identifiers.forceIdentifier(10, 'abc'), 'abc') def test_forceIdentifier_ascii(self): self.assertEqualUnicode( identifiers.forceIdentifier(10, 'abc'), 'abc') def test_forceIdentifier_too_long(self): self.assertEqualUnicode( identifiers.forceIdentifier(10, 'abcdefghijKL'), 'abcdefghij') def test_forceIdentifier_invalid_chars(self): self.assertEqualUnicode( identifiers.forceIdentifier(100, 'my log.html'), 'my_log_html') def test_forceIdentifier_leading_digit(self): self.assertEqualUnicode( identifiers.forceIdentifier(100, '9 pictures of cats.html'), '__pictures_of_cats_html') def test_forceIdentifier_digits(self): self.assertEqualUnicode( identifiers.forceIdentifier(100, 'warnings(2000)'), 'warnings_2000_') def test_incrementIdentifier_simple(self): self.assertEqualUnicode( identifiers.incrementIdentifier(100, 'aaa'), 'aaa_2') def test_incrementIdentifier_simple_way_too_long(self): self.assertEqualUnicode( identifiers.incrementIdentifier(3, 'aaa'), 'a_2') def test_incrementIdentifier_simple_too_long(self): self.assertEqualUnicode( identifiers.incrementIdentifier(4, 'aaa'), 'aa_2') def test_incrementIdentifier_single_digit(self): self.assertEqualUnicode( identifiers.incrementIdentifier(100, 'aaa_2'), 'aaa_3') def test_incrementIdentifier_add_digits(self): self.assertEqualUnicode( identifiers.incrementIdentifier(100, 'aaa_99'), 'aaa_100') def test_incrementIdentifier_add_digits_too_long(self): self.assertEqualUnicode( identifiers.incrementIdentifier(6, 'aaa_99'), 'aa_100') def test_incrementIdentifier_add_digits_out_of_space(self): with self.assertRaises(ValueError): identifiers.incrementIdentifier(6, '_99999') buildbot-3.4.0/master/buildbot/test/unit/util/test_interfaces.py000066400000000000000000000056411413250514000250710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.test.util import interfaces class TestAssertArgSpecMatches(interfaces.InterfaceTests, unittest.TestCase): def test_simple_decorator(self): def myfunc(x, y=2, *args): pass @self.assertArgSpecMatches(myfunc) def myfunc2(x, y=2, *args): pass try: @self.assertArgSpecMatches(myfunc) def myfunc3(x, y=3, *args): pass except Exception as e: error = e else: error = None self.assertIdentical(type(error), unittest.FailTest) self.assertEqual( error.args, ('Expected: (x, y=3, *args); got: (x, y=2, *args)',)) def test_double_decorator(self): def myfunc(x, y): pass def myfunc2(x, y): pass def myfunc3(x, yy): pass @self.assertArgSpecMatches(myfunc, myfunc2) def myfunc4(x, y): pass try: @self.assertArgSpecMatches(myfunc, myfunc3) def myfunc5(x, y): pass except Exception as e: error = e else: error = None self.assertIdentical(type(error), unittest.FailTest) self.assertEqual(error.args, ('Expected: (x, y); got: (x, yy)',)) try: @self.assertArgSpecMatches(myfunc, myfunc3) def myfunc6(xx, yy): pass except Exception as e: error = e else: error = None self.assertIdentical(type(error), unittest.FailTest) self.assertEqual(error.args, ('Expected: (x, y); got: (x, yy)',)) def test_function_style(self): def myfunc(x, y=2, *args): pass def myfunc2(x, y=2, *args): pass def myfunc3(x, y=3, *args): pass self.assertArgSpecMatches(myfunc, myfunc2) try: self.assertArgSpecMatches(myfunc, myfunc3) except Exception as e: error = e else: error = None self.assertIdentical(type(error), unittest.FailTest) self.assertEqual( error.args, ('Expected: (x, y=2, *args); got: (x, y=3, *args)',)) buildbot-3.4.0/master/buildbot/test/unit/util/test_kubeclientservice.py000066400000000000000000000352761413250514000264630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import copy import os import sys import textwrap from io import StringIO from unittest.case import SkipTest import yaml import mock from twisted.internet import defer from twisted.python import runtime from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttp from buildbot.test.fake import kube as fakekube from buildbot.test.util import config from buildbot.test.util.misc import TestReactorMixin from buildbot.util import kubeclientservice class MockFileBase: file_mock_config = {} def setUp(self): self.patcher = mock.patch('buildbot.util.kubeclientservice.open', self.mock_open) self.patcher.start() def tearDown(self): self.patcher.stop() def mock_open(self, filename, mode=None, encoding='UTF-8'): filename_type = os.path.basename(filename) file_value = self.file_mock_config[filename_type] mock_open = mock.Mock( __enter__=mock.Mock(return_value=StringIO(file_value)), __exit__=mock.Mock()) return mock_open class KubeClientServiceTestClusterConfig( MockFileBase, config.ConfigErrorsMixin, unittest.TestCase): file_mock_config = { 'token': 'BASE64_TOKEN', 'namespace': 'buildbot_namespace' } def setUp(self): super().setUp() self.patch(kubeclientservice.os, 'environ', {'KUBERNETES_PORT': 'tcp://foo'}) def patchExist(self, val): self.patch(kubeclientservice.os.path, 'exists', lambda x: val) def test_not_exists(self): self.patchExist(False) with self.assertRaisesConfigError('kube_dir not found:'): kubeclientservice.KubeInClusterConfigLoader() @defer.inlineCallbacks def test_basic(self): self.patchExist(True) config = kubeclientservice.KubeInClusterConfigLoader() yield config.startService() self.assertEqual( config.getConfig(), { 'headers': { 'Authorization': 'Bearer BASE64_TOKEN' }, 'master_url': 'https://foo', 'namespace': 'buildbot_namespace', 'verify': '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt' }) KUBE_CTL_PROXY_FAKE = """ import time import sys print("Starting to serve on 127.0.0.1:" + sys.argv[2]) sys.stdout.flush() time.sleep(1000) """ KUBE_CTL_PROXY_FAKE_ERROR = """ import time import sys print("Issue with the config!", file=sys.stderr) sys.stderr.flush() sys.exit(1) """ class KubeClientServiceTestKubeHardcodedConfig(config.ConfigErrorsMixin, unittest.TestCase): def test_basic(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default" ) self.assertEqual(config.getConfig(), { 'master_url': 'http://localhost:8001', 'namespace': 'default', 'headers': {} }) @defer.inlineCallbacks def test_verify_is_forwarded_to_keywords(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem" ) service = kubeclientservice.KubeClientService(config) url, kwargs = yield service._prepareRequest("/test", {}) self.assertEqual('/path/to/pem', kwargs['verify']) @defer.inlineCallbacks def test_verify_headers_are_passed_to_the_query(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem", headers={'Test': '10'} ) service = kubeclientservice.KubeClientService(config) url, kwargs = yield service._prepareRequest("/test", {}) self.assertEqual({'Test': '10'}, kwargs['headers']) def test_the_configuration_parent_is_set_to_the_service(self): # This is needed to allow secret expansion self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001") service = kubeclientservice.KubeClientService(config) self.assertEqual(service, self.config.parent) def test_cannot_pass_both_bearer_and_basic_auth(self): with self.assertRaises(Exception): kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem", basicAuth="Bla", bearerToken="Bla") @defer.inlineCallbacks def test_verify_bearerToken_is_expanded(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem", bearerToken=Interpolate("%(kw:test)s", test=10)) service = kubeclientservice.KubeClientService(config) url, kwargs = yield service._prepareRequest("/test", {}) self.assertEqual("Bearer 10", kwargs['headers']['Authorization']) @defer.inlineCallbacks def test_verify_basicAuth_is_expanded(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem", basicAuth={'user': 'name', 'password': Interpolate("%(kw:test)s", test=10)}) service = kubeclientservice.KubeClientService(config) url, kwargs = yield service._prepareRequest("/test", {}) expected = "Basic {0}".format(base64.b64encode("name:10".encode('utf-8'))) self.assertEqual(expected, kwargs['headers']['Authorization']) class KubeClientServiceTestKubeCtlProxyConfig(config.ConfigErrorsMixin, unittest.TestCase): def patchProxyCmd(self, cmd): if runtime.platformType != 'posix': self.config = None raise SkipTest('only posix platform is supported by this test') self.patch(kubeclientservice.KubeCtlProxyConfigLoader, 'kube_ctl_proxy_cmd', [sys.executable, "-c", cmd]) def tearDown(self): if self.config is not None: return self.config.stopService() return None @defer.inlineCallbacks def test_basic(self): self.patchProxyCmd(KUBE_CTL_PROXY_FAKE) self.config = config = kubeclientservice.KubeCtlProxyConfigLoader() yield config.startService() self.assertEqual(config.getConfig(), { 'master_url': 'http://localhost:8001', 'namespace': 'default' }) @defer.inlineCallbacks def test_config_args(self): self.patchProxyCmd(KUBE_CTL_PROXY_FAKE) self.config = config = kubeclientservice.KubeCtlProxyConfigLoader( proxy_port=8002, namespace="system") yield config.startService() self.assertEqual(config.kube_proxy_output, b'Starting to serve on 127.0.0.1:8002') self.assertEqual(config.getConfig(), { 'master_url': 'http://localhost:8002', 'namespace': 'system' }) yield config.stopService() @defer.inlineCallbacks def test_config_with_error(self): self.patchProxyCmd(KUBE_CTL_PROXY_FAKE_ERROR) self.config = config = kubeclientservice.KubeCtlProxyConfigLoader() with self.assertRaises(RuntimeError): yield config.startService() # integration tests for KubeClientService class RealKubeClientServiceTest(TestReactorMixin, unittest.TestCase): timeout = 200 POD_SPEC = yaml.safe_load( textwrap.dedent(""" apiVersion: v1 kind: Pod metadata: name: pod-example spec: containers: - name: alpine image: alpine command: ["sleep"] args: ["100"] """)) def createKube(self): if "TEST_KUBERNETES" not in os.environ: raise SkipTest( "kubernetes integration tests only run when environment " "variable TEST_KUBERNETES is set") self.kube = kubeclientservice.KubeClientService( kubeclientservice.KubeCtlProxyConfigLoader()) def expect(self, *args, **kwargs): pass @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.createKube() yield self.kube.setServiceParent(self.master) yield self.master.startService() def tearDown(self): return self.master.stopService() kube = None @defer.inlineCallbacks def test_create_and_delete_pod(self): content = {'kind': 'Pod', 'metadata': {'name': 'pod-example'}} self.expect( method='post', ep='/api/v1/namespaces/default/pods', params=None, data=None, json={ 'apiVersion': 'v1', 'kind': 'Pod', 'metadata': { 'name': 'pod-example' }, 'spec': { 'containers': [{ 'name': 'alpine', 'image': 'alpine', 'command': ['sleep'], 'args': ['100'] }] } }, content_json=content) res = yield self.kube.createPod(self.kube.namespace, self.POD_SPEC) self.assertEqual(res['kind'], 'Pod') self.assertEqual(res['metadata']['name'], 'pod-example') self.assertNotIn('deletionTimestamp', res['metadata']) content['metadata']['deletionTimestamp'] = 'now' self.expect( method='delete', ep='/api/v1/namespaces/default/pods/pod-example', params={'graceperiod': 0}, data=None, json=None, code=200, content_json=content) res = yield self.kube.deletePod(self.kube.namespace, 'pod-example') self.assertEqual(res['kind'], 'Pod') self.assertIn('deletionTimestamp', res['metadata']) # first time present self.expect( method='get', ep='/api/v1/namespaces/default/pods/pod-example/status', params=None, data=None, json=None, code=200, content_json=content) # second time deleted content = {'kind': 'Status', 'reason': 'NotFound'} self.expect( method='get', ep='/api/v1/namespaces/default/pods/pod-example/status', params=None, data=None, json=None, code=404, content_json=content) res = yield self.kube.waitForPodDeletion( self.kube.namespace, 'pod-example', timeout=200) self.assertEqual(res['kind'], 'Status') self.assertEqual(res['reason'], 'NotFound') @defer.inlineCallbacks def test_create_bad_spec(self): spec = copy.deepcopy(self.POD_SPEC) del spec['metadata'] content = { 'kind': 'Status', 'reason': 'MissingName', 'message': 'need name' } self.expect( method='post', ep='/api/v1/namespaces/default/pods', params=None, data=None, json={ 'apiVersion': 'v1', 'kind': 'Pod', 'spec': { 'containers': [{ 'name': 'alpine', 'image': 'alpine', 'command': ['sleep'], 'args': ['100'] }] } }, code=400, content_json=content) with self.assertRaises(kubeclientservice.KubeError): yield self.kube.createPod(self.kube.namespace, spec) @defer.inlineCallbacks def test_delete_not_existing(self): content = { 'kind': 'Status', 'reason': 'NotFound', 'message': 'no container by that name' } self.expect( method='delete', ep='/api/v1/namespaces/default/pods/pod-example', params={'graceperiod': 0}, data=None, json=None, code=404, content_json=content) with self.assertRaises(kubeclientservice.KubeError): yield self.kube.deletePod(self.kube.namespace, 'pod-example') @defer.inlineCallbacks def test_wait_for_delete_not_deleting(self): yield self.kube.createPod(self.kube.namespace, self.POD_SPEC) with self.assertRaises(TimeoutError): yield self.kube.waitForPodDeletion( self.kube.namespace, 'pod-example', timeout=2) res = yield self.kube.deletePod(self.kube.namespace, 'pod-example') self.assertEqual(res['kind'], 'Pod') self.assertIn('deletionTimestamp', res['metadata']) yield self.kube.waitForPodDeletion( self.kube.namespace, 'pod-example', timeout=100) class FakeKubeClientServiceTest(RealKubeClientServiceTest): def createKube(self): self.kube = fakekube.KubeClientService( kubeclientservice.KubeHardcodedConfig(master_url='http://m')) class PatchedKubeClientServiceTest(RealKubeClientServiceTest): def createKube(self): self.kube = kubeclientservice.KubeClientService( kubeclientservice.KubeHardcodedConfig(master_url='http://m')) self.http = fakehttp.HTTPClientService('http://m') self.kube.get = self.http.get self.kube.post = self.http.post self.kube.put = self.http.put self.kube.delete = self.http.delete def expect(self, *args, **kwargs): return self.http.expect(*args, **kwargs) def test_wait_for_delete_not_deleting(self): # no need to describe the expect flow for that case pass buildbot-3.4.0/master/buildbot/test/unit/util/test_lineboundaries.py000066400000000000000000000127421413250514000257510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from twisted.trial import unittest from buildbot.util import lineboundaries class LBF(unittest.TestCase): def setUp(self): self.callbacks = [] self.lbf = lineboundaries.LineBoundaryFinder(self._callback) def _callback(self, wholeLines): self.assertEqual(wholeLines[-1], '\n', 'got %r' % (wholeLines)) self.callbacks.append(wholeLines) d = defer.Deferred() reactor.callLater(0, d.callback, None) return d def assertCallbacks(self, callbacks): self.assertEqual(self.callbacks, callbacks) self.callbacks = [] # tests @defer.inlineCallbacks def test_already_terminated(self): yield self.lbf.append('abcd\ndefg\n') self.assertCallbacks(['abcd\ndefg\n']) yield self.lbf.append('xyz\n') self.assertCallbacks(['xyz\n']) yield self.lbf.flush() self.assertCallbacks([]) @defer.inlineCallbacks def test_partial_line(self): for c in "hello\nworld": yield self.lbf.append(c) self.assertCallbacks(['hello\n']) yield self.lbf.flush() self.assertCallbacks(['world\n']) @defer.inlineCallbacks def test_empty_appends(self): yield self.lbf.append('hello ') yield self.lbf.append('') yield self.lbf.append('world\n') yield self.lbf.append('') self.assertCallbacks(['hello world\n']) @defer.inlineCallbacks def test_embedded_newlines(self): yield self.lbf.append('hello, ') self.assertCallbacks([]) yield self.lbf.append('cruel\nworld') self.assertCallbacks(['hello, cruel\n']) yield self.lbf.flush() self.assertCallbacks(['world\n']) @defer.inlineCallbacks def test_windows_newlines_folded(self): r"Windows' \r\n is treated as and converted to a newline" yield self.lbf.append('hello, ') self.assertCallbacks([]) yield self.lbf.append('cruel\r\n\r\nworld') self.assertCallbacks(['hello, cruel\n\n']) yield self.lbf.flush() self.assertCallbacks(['world\n']) @defer.inlineCallbacks def test_bare_cr_folded(self): r"a bare \r is treated as and converted to a newline" yield self.lbf.append('1%\r5%\r15%\r100%\nfinished') yield self.lbf.flush() self.assertCallbacks(['1%\n5%\n15%\n100%\n', 'finished\n']) @defer.inlineCallbacks def test_backspace_folded(self): r"a lot of \b is treated as and converted to a newline" yield self.lbf.append('1%\b\b5%\b\b15%\b\b\b100%\nfinished') yield self.lbf.flush() self.assertCallbacks(['1%\n5%\n15%\n100%\n', 'finished\n']) @defer.inlineCallbacks def test_mixed_consecutive_newlines(self): r"mixing newline styles back-to-back doesn't collapse them" yield self.lbf.append('1\r\n\n\r') self.assertCallbacks(['1\n\n']) # last \r is delayed until flush yield self.lbf.append('2\n\r\n') self.assertCallbacks(['\n2\n\n']) @defer.inlineCallbacks def test_split_newlines(self): r"multi-character newlines, split across chunks, are converted" input = 'a\nb\r\nc\rd\n\re' for splitpoint in range(1, len(input) - 1): a, b = input[:splitpoint], input[splitpoint:] yield self.lbf.append(a) yield self.lbf.append(b) yield self.lbf.flush() res = ''.join(self.callbacks) log.msg('feeding %r, %r gives %r' % (a, b, res)) self.assertEqual(res, 'a\nb\nc\nd\n\ne\n') self.callbacks = [] @defer.inlineCallbacks def test_split_terminal_control(self): """terminal control characters are converted""" yield self.lbf.append('1234\033[u4321') yield self.lbf.flush() self.assertCallbacks(['1234\n', '4321\n']) yield self.lbf.append('1234\033[1;2H4321') yield self.lbf.flush() self.assertCallbacks(['1234\n', '4321\n']) yield self.lbf.append('1234\033[1;2f4321') yield self.lbf.flush() self.assertCallbacks(['1234\n', '4321\n']) @defer.inlineCallbacks def test_long_lines(self): """long lines are split""" for i in range(4): yield self.lbf.append('12' * 1000) # a split at 4096 + the remaining chars self.assertCallbacks(['12' * 2048 + '\n' + '12' * 952 + '\n']) @defer.inlineCallbacks def test_huge_lines(self): """huge lines are split""" yield self.lbf.append('12' * 32768) yield self.lbf.flush() self.assertCallbacks([('12' * 2048 + '\n') * 16]) @defer.inlineCallbacks def test_empty_flush(self): yield self.lbf.flush() self.assertEqual(self.callbacks, []) buildbot-3.4.0/master/buildbot/test/unit/util/test_lru.py000066400000000000000000000431351413250514000235500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import gc import random import string from twisted.internet import defer from twisted.internet import reactor from twisted.python import failure from twisted.trial import unittest from buildbot.util import lru # construct weakref-able objects for particular keys def short(k): return set([k.upper() * 3]) def long(k): return set([k.upper() * 6]) class LRUCacheTest(unittest.TestCase): def setUp(self): lru.inv_failed = False self.lru = lru.LRUCache(short, 3) def tearDown(self): self.assertFalse(lru.inv_failed, "invariant failed; see logs") def check_result(self, r, exp, exp_hits=None, exp_misses=None, exp_refhits=None): self.assertEqual(r, exp) if exp_hits is not None: self.assertEqual(self.lru.hits, exp_hits) if exp_misses is not None: self.assertEqual(self.lru.misses, exp_misses) if exp_refhits is not None: self.assertEqual(self.lru.refhits, exp_refhits) def test_single_key(self): # just get an item val = self.lru.get('a') self.check_result(val, short('a'), 0, 1) # second time, it should be cached.. self.lru.miss_fn = long val = self.lru.get('a') self.check_result(val, short('a'), 1, 1) def test_simple_lru_expulsion(self): val = self.lru.get('a') self.check_result(val, short('a'), 0, 1) val = self.lru.get('b') self.check_result(val, short('b'), 0, 2) val = self.lru.get('c') self.check_result(val, short('c'), 0, 3) val = self.lru.get('d') self.check_result(val, short('d'), 0, 4) del(val) gc.collect() # now try 'a' again - it should be a miss self.lru.miss_fn = long val = self.lru.get('a') self.check_result(val, long('a'), 0, 5) # ..and that expelled B, but C is still in the cache val = self.lru.get('c') self.check_result(val, short('c'), 1, 5) @defer.inlineCallbacks def test_simple_lru_expulsion_maxsize_1(self): self.lru = lru.LRUCache(short, 1) val = yield self.lru.get('a') self.check_result(val, short('a'), 0, 1) val = yield self.lru.get('a') self.check_result(val, short('a'), 1, 1) val = yield self.lru.get('b') self.check_result(val, short('b'), 1, 2) del(val) gc.collect() # now try 'a' again - it should be a miss self.lru.miss_fn = long val = yield self.lru.get('a') self.check_result(val, long('a'), 1, 3) del(val) gc.collect() # ..and that expelled B val = yield self.lru.get('b') self.check_result(val, long('b'), 1, 4) def test_simple_lru_expulsion_maxsize_1_null_result(self): # a regression test for #2011 def miss_fn(k): if k == 'b': return None return short(k) self.lru = lru.LRUCache(miss_fn, 1) val = self.lru.get('a') self.check_result(val, short('a'), 0, 1) val = self.lru.get('b') self.check_result(val, None, 0, 2) del(val) # 'a' was not expelled since 'b' was None self.lru.miss_fn = long val = self.lru.get('a') self.check_result(val, short('a'), 1, 2) def test_queue_collapsing(self): # just to check that we're practicing with the right queue size (so # QUEUE_SIZE_FACTOR is 10) self.assertEqual(self.lru.max_queue, 30) for c in 'a' + 'x' * 27 + 'ab': res = self.lru.get(c) self.check_result(res, short('b'), 27, 3) # at this point, we should have 'x', 'a', and 'b' in the cache, and # 'axx..xxab' in the queue. self.assertEqual(len(self.lru.queue), 30) # This 'get' operation for an existing key should cause compaction res = self.lru.get('b') self.check_result(res, short('b'), 28, 3) self.assertEqual(len(self.lru.queue), 3) # expect a cached short('a') self.lru.miss_fn = long res = self.lru.get('a') self.check_result(res, short('a'), 29, 3) def test_all_misses(self): for i, c in enumerate(string.ascii_lowercase + string.ascii_uppercase): res = self.lru.get(c) self.check_result(res, short(c), 0, i + 1) def test_get_exception(self): def fail_miss_fn(k): raise RuntimeError("oh noes") self.lru.miss_fn = fail_miss_fn got_exc = False try: self.lru.get('abc') except RuntimeError: got_exc = True self.assertEqual(got_exc, True) def test_all_hits(self): res = self.lru.get('a') self.check_result(res, short('a'), 0, 1) self.lru.miss_fn = long for i in range(100): res = self.lru.get('a') self.check_result(res, short('a'), i + 1, 1) def test_weakrefs(self): res_a = self.lru.get('a') self.check_result(res_a, short('a')) # note that res_a keeps a reference to this value res_b = self.lru.get('b') self.check_result(res_b, short('b')) del res_b # discard reference to b # blow out the cache and the queue self.lru.miss_fn = long for c in (string.ascii_lowercase[2:] * 5): self.lru.get(c) # and fetch a again, expecting the cached value res = self.lru.get('a') self.check_result(res, res_a, exp_refhits=1) # but 'b' should give us a new value res = self.lru.get('b') self.check_result(res, long('b'), exp_refhits=1) def test_fuzz(self): chars = list(string.ascii_lowercase * 40) random.shuffle(chars) for i, c in enumerate(chars): res = self.lru.get(c) self.check_result(res, short(c)) def test_set_max_size(self): # load up the cache with three items for c in 'abc': res = self.lru.get(c) self.check_result(res, short(c)) del(res) # reset the size to 1 self.lru.set_max_size(1) gc.collect() # and then expect that 'b' is no longer in the cache self.lru.miss_fn = long res = self.lru.get('b') self.check_result(res, long('b')) def test_miss_fn_kwargs(self): def keep_kwargs_miss_fn(k, **kwargs): return set(kwargs.keys()) self.lru.miss_fn = keep_kwargs_miss_fn val = self.lru.get('a', a=1, b=2) self.check_result(val, set(['a', 'b']), 0, 1) def test_miss_fn_returns_none(self): calls = [] def none_miss_fn(k): calls.append(k) return None self.lru.miss_fn = none_miss_fn for i in range(2): self.assertEqual(self.lru.get('a'), None) # check that the miss_fn was called twice self.assertEqual(calls, ['a', 'a']) def test_put(self): self.assertEqual(self.lru.get('p'), short('p')) self.lru.put('p', set(['P2P2'])) self.assertEqual(self.lru.get('p'), set(['P2P2'])) def test_put_nonexistent_key(self): self.assertEqual(self.lru.get('p'), short('p')) self.lru.put('q', set(['new-q'])) self.assertEqual(self.lru.get('p'), set(['PPP'])) self.assertEqual(self.lru.get('q'), set(['new-q'])) # updated class AsyncLRUCacheTest(unittest.TestCase): def setUp(self): lru.inv_failed = False self.lru = lru.AsyncLRUCache(self.short_miss_fn, 3) def tearDown(self): self.assertFalse(lru.inv_failed, "invariant failed; see logs") def short_miss_fn(self, key): return defer.succeed(short(key)) def long_miss_fn(self, key): return defer.succeed(long(key)) def failure_miss_fn(self, key): return defer.succeed(None) def check_result(self, r, exp, exp_hits=None, exp_misses=None, exp_refhits=None): self.assertEqual(r, exp) if exp_hits is not None: self.assertEqual(self.lru.hits, exp_hits) if exp_misses is not None: self.assertEqual(self.lru.misses, exp_misses) if exp_refhits is not None: self.assertEqual(self.lru.refhits, exp_refhits) # tests @defer.inlineCallbacks def test_single_key(self): # just get an item res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) # second time, it should be cached.. self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, short('a'), 1, 1) @defer.inlineCallbacks def test_simple_lru_expulsion(self): res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) res = yield self.lru.get('b') self.check_result(res, short('b'), 0, 2) res = yield self.lru.get('c') self.check_result(res, short('c'), 0, 3) res = yield self.lru.get('d') self.check_result(res, short('d'), 0, 4) gc.collect() # now try 'a' again - it should be a miss self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, long('a'), 0, 5) # ..and that expelled B, but C is still in the cache res = yield self.lru.get('c') self.check_result(res, short('c'), 1, 5) @defer.inlineCallbacks def test_simple_lru_expulsion_maxsize_1(self): self.lru = lru.AsyncLRUCache(self.short_miss_fn, 1) res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) res = yield self.lru.get('a') self.check_result(res, short('a'), 1, 1) res = yield self.lru.get('b') self.check_result(res, short('b'), 1, 2) gc.collect() # now try 'a' again - it should be a miss self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, long('a'), 1, 3) gc.collect() # ..and that expelled B res = yield self.lru.get('b') self.check_result(res, long('b'), 1, 4) @defer.inlineCallbacks def test_simple_lru_expulsion_maxsize_1_null_result(self): # a regression test for #2011 def miss_fn(k): if k == 'b': return defer.succeed(None) return defer.succeed(short(k)) self.lru = lru.AsyncLRUCache(miss_fn, 1) res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) res = yield self.lru.get('b') self.check_result(res, None, 0, 2) # 'a' was not expelled since 'b' was None self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, short('a'), 1, 2) @defer.inlineCallbacks def test_queue_collapsing(self): # just to check that we're practicing with the right queue size (so # QUEUE_SIZE_FACTOR is 10) self.assertEqual(self.lru.max_queue, 30) for c in 'a' + 'x' * 27 + 'ab': res = yield self.lru.get(c) self.check_result(res, short('b'), 27, 3) # at this point, we should have 'x', 'a', and 'b' in the cache, and # 'axx..xxab' in the queue. self.assertEqual(len(self.lru.queue), 30) # This 'get' operation for an existing key should cause compaction res = yield self.lru.get('b') self.check_result(res, short('b'), 28, 3) self.assertEqual(len(self.lru.queue), 3) # expect a cached short('a') self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, short('a'), 29, 3) @defer.inlineCallbacks def test_all_misses(self): for i, c in enumerate(string.ascii_lowercase + string.ascii_uppercase): res = yield self.lru.get(c) self.check_result(res, short(c), 0, i + 1) @defer.inlineCallbacks def test_get_exception(self): def fail_miss_fn(k): return defer.fail(RuntimeError("oh noes")) self.lru.miss_fn = fail_miss_fn got_exc = False try: yield self.lru.get('abc') except RuntimeError: got_exc = True self.assertEqual(got_exc, True) @defer.inlineCallbacks def test_all_hits(self): res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) self.lru.miss_fn = self.long_miss_fn for i in range(100): res = yield self.lru.get('a') self.check_result(res, short('a'), i + 1, 1) @defer.inlineCallbacks def test_weakrefs(self): res_a = yield self.lru.get('a') self.check_result(res_a, short('a')) # note that res_a keeps a reference to this value res_b = yield self.lru.get('b') self.check_result(res_b, short('b')) del res_b # discard reference to b # blow out the cache and the queue self.lru.miss_fn = self.long_miss_fn for c in (string.ascii_lowercase[2:] * 5): yield self.lru.get(c) # and fetch a again, expecting the cached value res = yield self.lru.get('a') self.check_result(res, res_a, exp_refhits=1) # but 'b' should give us a new value res = yield self.lru.get('b') self.check_result(res, long('b'), exp_refhits=1) @defer.inlineCallbacks def test_fuzz(self): chars = list(string.ascii_lowercase * 40) random.shuffle(chars) for i, c in enumerate(chars): res = yield self.lru.get(c) self.check_result(res, short(c)) @defer.inlineCallbacks def test_massively_parallel(self): chars = list(string.ascii_lowercase * 5) misses = [0] def slow_short_miss_fn(key): d = defer.Deferred() misses[0] += 1 reactor.callLater(0, lambda: d.callback(short(key))) return d self.lru.miss_fn = slow_short_miss_fn def check(c, d): d.addCallback(self.check_result, short(c)) return d yield defer.gatherResults([ check(c, self.lru.get(c)) for c in chars]) self.assertEqual(misses[0], 26) self.assertEqual(self.lru.misses, 26) self.assertEqual(self.lru.hits, 4 * 26) @defer.inlineCallbacks def test_slow_fetch(self): def slower_miss_fn(k): d = defer.Deferred() reactor.callLater(0.05, lambda: d.callback(short(k))) return d self.lru.miss_fn = slower_miss_fn def do_get(test_d, k): d = self.lru.get(k) d.addCallback(self.check_result, short(k)) d.addCallbacks(test_d.callback, test_d.errback) ds = [] for i in range(8): d = defer.Deferred() reactor.callLater(0.02 * i, do_get, d, 'x') ds.append(d) yield defer.gatherResults(ds) self.assertEqual((self.lru.hits, self.lru.misses), (7, 1)) def test_slow_failure(self): def slow_fail_miss_fn(k): d = defer.Deferred() reactor.callLater(0.05, lambda: d.errback(failure.Failure(RuntimeError("oh noes")))) return d self.lru.miss_fn = slow_fail_miss_fn @defer.inlineCallbacks def do_get(test_d, k): d = self.lru.get(k) yield self.assertFailure(d, RuntimeError) d.addCallbacks(test_d.callback, test_d.errback) ds = [] for i in range(8): d = defer.Deferred() reactor.callLater(0.02 * i, do_get, d, 'x') ds.append(d) d = defer.gatherResults(ds) return d @defer.inlineCallbacks def test_set_max_size(self): # load up the cache with three items for c in 'abc': res = yield self.lru.get(c) self.check_result(res, short(c)) # reset the size to 1 self.lru.set_max_size(1) gc.collect() # and then expect that 'b' is no longer in the cache self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('b') self.check_result(res, long('b')) @defer.inlineCallbacks def test_miss_fn_kwargs(self): def keep_kwargs_miss_fn(k, **kwargs): return defer.succeed(set(kwargs.keys())) self.lru.miss_fn = keep_kwargs_miss_fn res = yield self.lru.get('a', a=1, b=2) self.check_result(res, set(['a', 'b']), 0, 1) @defer.inlineCallbacks def test_miss_fn_returns_none(self): calls = [] def none_miss_fn(k): calls.append(k) return defer.succeed(None) self.lru.miss_fn = none_miss_fn for i in range(2): self.assertEqual((yield self.lru.get('a')), None) # check that the miss_fn was called twice self.assertEqual(calls, ['a', 'a']) @defer.inlineCallbacks def test_put(self): self.assertEqual((yield self.lru.get('p')), short('p')) self.lru.put('p', set(['P2P2'])) self.assertEqual((yield self.lru.get('p')), set(['P2P2'])) buildbot-3.4.0/master/buildbot/test/unit/util/test_maildir.py000066400000000000000000000057251413250514000243720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util import dirs from buildbot.util import maildir class TestMaildirService(dirs.DirsMixin, unittest.TestCase): def setUp(self): self.maildir = os.path.abspath("maildir") self.newdir = os.path.join(self.maildir, "new") self.curdir = os.path.join(self.maildir, "cur") self.tmpdir = os.path.join(self.maildir, "tmp") self.setUpDirs(self.maildir, self.newdir, self.curdir, self.tmpdir) self.svc = None def tearDown(self): if self.svc and self.svc.running: self.svc.stopService() self.tearDownDirs() # tests @defer.inlineCallbacks def test_start_stop_repeatedly(self): self.svc = maildir.MaildirService(self.maildir) self.svc.startService() yield self.svc.stopService() self.svc.startService() yield self.svc.stopService() self.assertEqual(len(list(self.svc)), 0) @defer.inlineCallbacks def test_messageReceived(self): self.svc = maildir.MaildirService(self.maildir) # add a fake messageReceived method messagesReceived = [] def messageReceived(filename): messagesReceived.append(filename) return defer.succeed(None) self.svc.messageReceived = messageReceived yield self.svc.startService() self.assertEqual(messagesReceived, []) tmpfile = os.path.join(self.tmpdir, "newmsg") newfile = os.path.join(self.newdir, "newmsg") open(tmpfile, "w").close() os.rename(tmpfile, newfile) # TODO: can we wait for a dnotify somehow, if enabled? yield self.svc.poll() self.assertEqual(messagesReceived, ['newmsg']) def test_moveToCurDir(self): self.svc = maildir.MaildirService(self.maildir) tmpfile = os.path.join(self.tmpdir, "newmsg") newfile = os.path.join(self.newdir, "newmsg") open(tmpfile, "w").close() os.rename(tmpfile, newfile) f = self.svc.moveToCurDir("newmsg") f.close() self.assertEqual([os.path.exists(os.path.join(d, "newmsg")) for d in (self.newdir, self.curdir, self.tmpdir)], [False, True, False]) buildbot-3.4.0/master/buildbot/test/unit/util/test_misc.py000066400000000000000000000077461413250514000237110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import util from buildbot.test.util.misc import TestReactorMixin from buildbot.util import misc class deferredLocked(unittest.TestCase): def test_name(self): self.assertEqual(util.deferredLocked, misc.deferredLocked) @defer.inlineCallbacks def test_fn(self): lock = defer.DeferredLock() @util.deferredLocked(lock) def check_locked(arg1, arg2): self.assertEqual([lock.locked, arg1, arg2], [True, 1, 2]) return defer.succeed(None) yield check_locked(1, 2) self.assertFalse(lock.locked) @defer.inlineCallbacks def test_fn_fails(self): lock = defer.DeferredLock() @util.deferredLocked(lock) def do_fail(): return defer.fail(RuntimeError("oh noes")) try: yield do_fail() self.fail("didn't errback") except Exception: self.assertFalse(lock.locked) @defer.inlineCallbacks def test_fn_exception(self): lock = defer.DeferredLock() @util.deferredLocked(lock) def do_fail(): raise RuntimeError("oh noes") # using decorators confuses pylint and gives a false positive below try: yield do_fail() # pylint: disable=assignment-from-no-return self.fail("didn't errback") except Exception: self.assertFalse(lock.locked) @defer.inlineCallbacks def test_method(self): testcase = self class C: @util.deferredLocked('aLock') def check_locked(self, arg1, arg2): testcase.assertEqual( [self.aLock.locked, arg1, arg2], [True, 1, 2]) return defer.succeed(None) obj = C() obj.aLock = defer.DeferredLock() yield obj.check_locked(1, 2) self.assertFalse(obj.aLock.locked) class TestCancelAfter(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.d = defer.Deferred() def test_succeeds(self): d = misc.cancelAfter(10, self.d, self.reactor) self.assertIdentical(d, self.d) @d.addCallback def check(r): self.assertEqual(r, "result") self.assertFalse(d.called) self.d.callback("result") self.assertTrue(d.called) @defer.inlineCallbacks def test_fails(self): d = misc.cancelAfter(10, self.d, self.reactor) self.assertFalse(d.called) self.d.errback(RuntimeError("oh noes")) self.assertTrue(d.called) yield self.assertFailure(d, RuntimeError) @defer.inlineCallbacks def test_timeout_succeeds(self): d = misc.cancelAfter(10, self.d, self.reactor) self.assertFalse(d.called) self.reactor.advance(11) d.callback("result") # ignored self.assertTrue(d.called) yield self.assertFailure(d, defer.CancelledError) @defer.inlineCallbacks def test_timeout_fails(self): d = misc.cancelAfter(10, self.d, self.reactor) self.assertFalse(d.called) self.reactor.advance(11) self.d.errback(RuntimeError("oh noes")) # ignored self.assertTrue(d.called) yield self.assertFailure(d, defer.CancelledError) buildbot-3.4.0/master/buildbot/test/unit/util/test_netstrings.py000066400000000000000000000032541413250514000251440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.protocols import basic from twisted.trial import unittest from buildbot.util import netstrings class NetstringParser(unittest.TestCase): def test_valid_netstrings(self): p = netstrings.NetstringParser() p.feed("5:hello,5:world,") self.assertEqual(p.strings, [b'hello', b'world']) def test_valid_netstrings_byte_by_byte(self): # (this is really testing twisted's support, but oh well) p = netstrings.NetstringParser() [p.feed(c) for c in "5:hello,5:world,"] self.assertEqual(p.strings, [b'hello', b'world']) def test_invalid_netstring(self): p = netstrings.NetstringParser() with self.assertRaises(basic.NetstringParseError): p.feed("5-hello!") def test_incomplete_netstring(self): p = netstrings.NetstringParser() p.feed("11:hello world,6:foob") # note that the incomplete 'foobar' does not appear here self.assertEqual(p.strings, [b'hello world']) buildbot-3.4.0/master/buildbot/test/unit/util/test_notifier.py000066400000000000000000000071131413250514000245610ustar00rootroot00000000000000# Copyright Buildbot Team Members # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from twisted.internet import defer from twisted.python.failure import Failure from twisted.trial import unittest from buildbot.util import Notifier class TestException(Exception): """ An exception thrown in tests. """ class Tests(unittest.TestCase): def test_wait(self): """ Calling `Notifier.wait` returns a deferred that hasn't fired. """ n = Notifier() self.assertNoResult(n.wait()) def test_notify_no_waiters(self): """ Calling `Notifier.notify` when there are no waiters does not raise. """ n = Notifier() n.notify(object()) # Does not raise. @defer.inlineCallbacks def test_notify_multiple_waiters(self): """ If there all multiple waiters, `Notifier.notify` fires all the deferreds with the same value. """ value = object() n = Notifier() ds = [n.wait(), n.wait()] n.notify(value) self.assertEqual((yield ds[0]), value) self.assertEqual((yield ds[1]), value) @defer.inlineCallbacks def test_new_waiters_not_notified(self): """ If a new waiter is added while notifying, it won't be notified until the next notification. """ value = object() n = Notifier() box = [] def add_new_waiter(_): box.append(n.wait()) n.wait().addCallback(add_new_waiter) n.notify(object()) self.assertNoResult(box[0]) n.notify(value) self.assertEqual( (yield box[0]), value, ) @defer.inlineCallbacks def test_notify_failure(self): """ If a failure is passed to `Notifier.notify` then the waiters are errback'd. """ n = Notifier() d = n.wait() n.notify(Failure(TestException())) with self.assertRaises(TestException): yield d def test_nonzero_waiters(self): """ If there are waiters, ``Notifier`` evaluates as `True`. """ n = Notifier() n.wait() self.assertTrue(n) def test_nonzero_no_waiters(self): """ If there no waiters, ``Notifier`` evaluates as `False`. """ n = Notifier() self.assertFalse(n) def test_nonzero_cleared_waiters(self): """ After notifying waiters, ``Notifier`` evaluates as `False`. """ n = Notifier() n.wait() n.notify(object()) self.assertFalse(n) buildbot-3.4.0/master/buildbot/test/unit/util/test_patch_delay.py000066400000000000000000000060711413250514000252210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial.unittest import SynchronousTestCase from buildbot.test.util.patch_delay import patchForDelay class TestException(Exception): pass def fun_to_patch(*args, **kwargs): return defer.succeed((args, kwargs)) def fun_to_patch_exception(): raise TestException() non_callable = 1 class Tests(SynchronousTestCase): def test_raises_not_found(self): with self.assertRaises(Exception): with patchForDelay(__name__ + '.notfound'): pass def test_raises_not_callable(self): with self.assertRaises(Exception): with patchForDelay(__name__ + '.non_callable'): pass def test_patches_within_context(self): d = fun_to_patch() self.assertTrue(d.called) with patchForDelay(__name__ + '.fun_to_patch') as delay: d = fun_to_patch() self.assertEqual(len(delay), 1) self.assertFalse(d.called) delay.fire() self.assertEqual(len(delay), 0) self.assertTrue(d.called) d = fun_to_patch() self.assertTrue(d.called) def test_auto_fires_unfired_delay(self): with patchForDelay(__name__ + '.fun_to_patch') as delay: d = fun_to_patch() self.assertEqual(len(delay), 1) self.assertFalse(d.called) self.assertTrue(d.called) def test_auto_fires_unfired_delay_exception(self): try: with patchForDelay(__name__ + '.fun_to_patch') as delay: d = fun_to_patch() self.assertEqual(len(delay), 1) self.assertFalse(d.called) raise TestException() except TestException: pass self.assertTrue(d.called) def test_passes_arguments(self): with patchForDelay(__name__ + '.fun_to_patch') as delay: d = fun_to_patch('arg', kw='kwarg') self.assertEqual(len(delay), 1) delay.fire() args = self.successResultOf(d) self.assertEqual(args, (('arg',), {'kw': 'kwarg'})) def test_passes_exception(self): with patchForDelay(__name__ + '.fun_to_patch_exception') as delay: d = fun_to_patch_exception() self.assertEqual(len(delay), 1) delay.fire() f = self.failureResultOf(d) f.check(TestException) buildbot-3.4.0/master/buildbot/test/unit/util/test_pathmatch.py000066400000000000000000000065141413250514000247170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.util import pathmatch class Matcher(unittest.TestCase): def setUp(self): self.m = pathmatch.Matcher() def test_dupe_path(self): def set(): self.m[('abc,')] = 1 set() with self.assertRaises(AssertionError): set() def test_empty(self): with self.assertRaises(KeyError): self.m[('abc',)] def test_diff_length(self): self.m[('abc', 'def')] = 2 self.m[('ab', 'cd', 'ef')] = 3 self.assertEqual(self.m[('abc', 'def')], (2, {})) def test_same_length(self): self.m[('abc', 'def')] = 2 self.m[('abc', 'efg')] = 3 self.assertEqual(self.m[('abc', 'efg')], (3, {})) def test_pattern_variables(self): self.m[('A', ':a', 'B', ':b')] = 'AB' self.assertEqual(self.m[('A', 'a', 'B', 'b')], ('AB', dict(a='a', b='b'))) def test_pattern_variables_underscore(self): self.m[('A', ':a_a_a')] = 'AB' self.assertEqual(self.m[('A', 'a')], ('AB', dict(a_a_a='a'))) def test_pattern_variables_num(self): self.m[('A', 'n:a', 'B', 'n:b')] = 'AB' self.assertEqual(self.m[('A', '10', 'B', '-20')], ('AB', dict(a=10, b=-20))) def test_pattern_variables_ident(self): self.m[('A', 'i:a', 'B', 'i:b')] = 'AB' self.assertEqual(self.m[('A', 'abc', 'B', 'x-z-B')], ('AB', dict(a='abc', b='x-z-B'))) def test_pattern_variables_num_invalid(self): self.m[('A', 'n:a')] = 'AB' with self.assertRaises(KeyError): self.m[('A', '1x0')] def test_pattern_variables_ident_invalid(self): self.m[('A', 'i:a')] = 'AB' with self.assertRaises(KeyError): self.m[('A', '10')] def test_pattern_variables_ident_num_distinguised(self): self.m[('A', 'n:a')] = 'num' self.m[('A', 'i:a')] = 'ident' self.assertEqual(self.m[('A', '123')], ('num', dict(a=123))) self.assertEqual(self.m[('A', 'abc')], ('ident', dict(a='abc'))) def test_prefix_matching(self): self.m[('A', ':a')] = 'A' self.m[('A', ':a', 'B', ':b')] = 'AB' self.assertEqual( (self.m[('A', 'a1', 'B', 'b')], self.m['A', 'a2']), (('AB', dict(a='a1', b='b')), ('A', dict(a='a2')))) def test_dirty_again(self): self.m[('abc', 'def')] = 2 self.assertEqual(self.m[('abc', 'def')], (2, {})) self.m[('abc', 'efg')] = 3 self.assertEqual(self.m[('abc', 'def')], (2, {})) self.assertEqual(self.m[('abc', 'efg')], (3, {})) buildbot-3.4.0/master/buildbot/test/unit/util/test_poll.py000066400000000000000000000374211413250514000237150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util.misc import TestReactorMixin from buildbot.util import poll class TestPollerSync(TestReactorMixin, unittest.TestCase): @poll.method def poll(self): self.calls += 1 if self.fail_after_running: raise RuntimeError('oh noes') def setUp(self): self.setUpTestReactor() self.master = mock.Mock() self.master.reactor = self.reactor poll.track_poll_methods() self.calls = 0 self.fail_after_running = False def tearDown(self): poll.reset_poll_methods() self.assertEqual(self.reactor.getDelayedCalls(), []) def test_call_not_started_does_nothing(self): self.reactor.advance(100) self.assertEqual(self.calls, 0) def test_call_when_stopped_does_nothing(self): self.poll() self.assertEqual(self.calls, 0) @defer.inlineCallbacks def test_call_when_started_forces_run(self): self.poll.start(interval=100, now=False) self.poll() self.reactor.advance(0) self.assertEqual(self.calls, 1) yield self.poll.stop() @defer.inlineCallbacks def test_start_with_now_forces_run_immediately(self): self.poll.start(interval=10, now=True) self.reactor.advance(0) self.assertEqual(self.calls, 1) yield self.poll.stop() @defer.inlineCallbacks def test_start_with_now_false_does_not_run(self): self.poll.start(interval=10, now=False) self.assertEqual(self.calls, 0) yield self.poll.stop() def test_stop_on_stopped_does_nothing(self): self.poll.start(interval=1) d = self.poll.stop() self.assertTrue(d.called) d = self.poll.stop() self.assertTrue(d.called) @defer.inlineCallbacks def test_start_twice_error(self): self.poll.start(interval=1) with self.assertRaises(Exception): self.poll.start(interval=2) yield self.poll.stop() def test_repeats_and_stops(self): """Polling repeats until stopped, and stop returns a Deferred""" self.poll.start(interval=10, now=True) self.reactor.advance(0) while self.reactor.seconds() <= 200: self.assertEqual(self.calls, (self.reactor.seconds() // 10) + 1) self.reactor.advance(1) d = self.poll.stop() self.assertTrue(d.called) self.assertEqual(self.calls, 21) self.reactor.advance(10) self.assertEqual(self.calls, 21) @defer.inlineCallbacks def test_fail_reschedules_and_logs_exceptions(self): self.fail_after_running = True self.poll.start(interval=1, now=True) self.reactor.advance(0) self.assertEqual(self.calls, 1) self.reactor.advance(1) self.assertEqual(self.calls, 2) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 2) yield self.poll.stop() @parameterized.expand([ ('shorter_than_interval_now_True', 5, True), ('longer_than_interval_now_True', 15, True), ('shorter_than_interval_now_False', 5, False), ('longer_than_interval_now_False', 15, False), ]) @defer.inlineCallbacks def test_run_with_random_delay(self, name, random_delay_max, now): interval = 10 with mock.patch("buildbot.util.poll.randint", return_value=random_delay_max): self.poll.start(interval=interval, now=now, random_delay_max=random_delay_max) self.reactor.advance(0) if not now: i = 0 while i < interval: self.assertEqual(self.calls, 0) self.reactor.advance(1) i += 1 i = 0 while i < random_delay_max: self.assertEqual(self.calls, 0) self.reactor.advance(1) i += 1 self.assertEqual(self.calls, 1) yield self.poll.stop() @parameterized.expand([ ('now_True', True), ('now_False', False), ]) @defer.inlineCallbacks def test_run_with_random_delay_zero_interval_still_delays(self, name, now): random_delay_max = 5 with mock.patch("buildbot.util.poll.randint", return_value=random_delay_max): self.poll.start(interval=0, now=now, random_delay_max=random_delay_max) self.reactor.advance(0) self.assertEqual(self.calls, 0) i = 0 while i < random_delay_max: self.assertEqual(self.calls, 0) self.reactor.advance(1) i += 1 self.assertEqual(self.calls, 1) yield self.poll.stop() @defer.inlineCallbacks def test_run_with_random_delay_stops_immediately_during_delay_phase(self): random_delay_max = 5 with mock.patch("buildbot.util.poll.randint", return_value=random_delay_max): self.poll.start(interval=10, now=True, random_delay_max=random_delay_max) self.reactor.advance(1) self.assertEqual(self.calls, 0) yield self.poll.stop() class TestPollerAsync(TestReactorMixin, unittest.TestCase): @poll.method @defer.inlineCallbacks def poll(self): assert not self.running, "overlapping call" self.running = True d = defer.Deferred() self.reactor.callLater(self.duration, d.callback, None) yield d self.calls += 1 self.running = False if self.fail_after_running: raise RuntimeError('oh noes') def setUp(self): self.setUpTestReactor() self.master = mock.Mock() self.master.reactor = self.reactor poll.track_poll_methods() self.calls = 0 self.running = False self.duration = 1 self.fail_after_running = False def tearDown(self): poll.reset_poll_methods() @defer.inlineCallbacks def test_call_when_started_forces_run(self): self.poll.start(interval=10, now=True) self.reactor.advance(0) self.assertEqual(self.calls, 0) self.assertTrue(self.running) self.reactor.advance(self.duration) self.assertEqual(self.calls, 1) self.assertFalse(self.running) yield self.poll.stop() def test_repeats_and_stops(self): """ Polling repeats until stopped, and stop returns a Deferred. The duration of the function's execution does not affect the execution interval: executions occur every 10 seconds. """ self.poll.start(interval=10, now=True) self.reactor.advance(0) while self.reactor.seconds() <= 200: self.assertEqual(self.calls, (self.reactor.seconds() + 9) // 10) self.assertEqual(self.running, self.reactor.seconds() % 10 == 0) self.reactor.advance(1) d = self.poll.stop() self.assertTrue(d.called) self.assertEqual(self.calls, 21) self.reactor.advance(10) self.assertEqual(self.calls, 21) @parameterized.expand([ ('now_True', True), ('now_False', False), ]) @defer.inlineCallbacks def test_zero_interval_starts_immediately(self, name, now): self.poll.start(interval=0, now=now) self.reactor.advance(0) self.assertEqual(self.calls, 0) self.assertTrue(self.running) self.reactor.advance(1) self.assertEqual(self.calls, 1) self.assertTrue(self.running) self.reactor.pump([1] * 10) self.assertEqual(self.calls, 11) self.assertTrue(self.running) d = self.poll.stop() self.assertTrue(self.running) self.reactor.advance(1) self.assertFalse(self.running) yield d @defer.inlineCallbacks def test_fail_reschedules_and_logs_exceptions(self): self.fail_after_running = True self.poll.start(interval=10, now=True) self.reactor.advance(0) self.assertTrue(self.running) self.reactor.advance(1) self.assertEqual(self.calls, 1) self.reactor.advance(10) self.assertTrue(self.running) self.reactor.advance(1) self.assertEqual(self.calls, 2) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 2) yield self.poll.stop() def test_stop_while_running_waits_for_completion(self): self.duration = 2 self.poll.start(interval=10) self.reactor.advance(0) self.assertFalse(self.running) self.reactor.advance(10) self.assertTrue(self.running) d = self.poll.stop() self.assertFalse(d.called) # not stopped yet self.reactor.advance(1) self.assertFalse(d.called) self.reactor.advance(1) self.assertTrue(d.called) def test_call_while_waiting_schedules_immediately(self): self.poll.start(interval=10) self.reactor.advance(0) self.reactor.advance(5) self.poll() self.reactor.advance(0) self.assertTrue(self.running) self.reactor.advance(1) self.assertEqual(self.calls, 1) self.assertFalse(self.running) self.reactor.advance(4) self.assertTrue(self.running) self.reactor.advance(1) self.assertEqual(self.calls, 2) def test_call_while_running_reschedules_immediately_after(self): self.duration = 5 self.poll.start(interval=10, now=True) self.reactor.advance(0) self.assertTrue(self.running) self.reactor.advance(3) self.poll() self.reactor.advance(2) self.assertEqual(self.calls, 1) self.reactor.advance(5) self.assertEqual(self.calls, 2) def test_call_while_running_then_stop(self): """Calling the poll method while the decorated method is running, then calling stop will not wait for both invocations to complete.""" self.duration = 5 self.poll.start(interval=10, now=True) self.reactor.advance(0) self.assertTrue(self.running) self.reactor.advance(3) self.assertTrue(self.running) self.poll() d = self.poll.stop() self.reactor.advance(2) self.assertEqual(self.calls, 1) self.assertTrue(d.called) self.reactor.advance(5) self.assertEqual(self.calls, 1) def test_stop_twice_while_running(self): """If stop is called *twice* while the poll function is running, then neither Deferred fires until the run is complete.""" self.duration = 2 self.poll.start(interval=10) self.reactor.advance(0) self.assertFalse(self.running) self.reactor.advance(10) self.assertTrue(self.running) d1 = self.poll.stop() self.assertFalse(d1.called) # not stopped yet self.reactor.advance(1) d2 = self.poll.stop() self.assertFalse(d2.called) self.reactor.advance(1) self.assertTrue(d1.called) self.assertTrue(d2.called) @defer.inlineCallbacks def test_stop_and_restart(self): """If the method is immediately restarted from a callback on a stop Deferred, the polling continues with the new start time.""" self.duration = 6 self.poll.start(interval=10) self.reactor.advance(0) self.assertFalse(self.running) self.reactor.advance(10) self.assertTrue(self.running) d = self.poll.stop() self.assertFalse(d.called) # not stopped yet self.reactor.advance(6) self.assertFalse(self.running) self.assertTrue(d.called) yield d self.poll.start(interval=10) self.reactor.advance(10) self.assertEqual(self.reactor.seconds(), 26) self.assertTrue(self.running) self.reactor.advance(6) yield self.poll.stop() def test_method_longer_than_interval_invoked_at_interval_multiples(self): self.duration = 4 self.poll.start(interval=3, now=True) self.reactor.advance(0) exp = [ (0, True, 0), (1, True, 0), (2, True, 0), (3, True, 0), (4, False, 1), (5, False, 1), (6, True, 1), # next multiple of 3 (10, False, 2), (12, True, 2), (16, False, 3), ] for secs, running, calls in exp: while self.reactor.seconds() < secs: self.reactor.advance(1) self.assertEqual(self.running, running) self.assertEqual(self.calls, calls) @parameterized.expand([ ('shorter_than_interval_now_True', 5, True), ('longer_than_interval_now_True', 15, True), ('shorter_than_interval_now_False', 5, False), ('longer_than_interval_now_False', 15, False), ]) @defer.inlineCallbacks def test_run_with_random_delay(self, name, random_delay_max, now): interval = 10 with mock.patch("buildbot.util.poll.randint", return_value=random_delay_max): self.poll.start(interval=interval, now=now, random_delay_max=random_delay_max) self.reactor.advance(0) if not now: i = 0 while i < interval: self.assertFalse(self.running) self.assertEqual(self.calls, 0) self.reactor.advance(1) i += 1 i = 0 while i < random_delay_max: self.assertFalse(self.running) self.assertEqual(self.calls, 0) self.reactor.advance(1) i += 1 self.assertEqual(self.calls, 0) self.assertTrue(self.running) self.reactor.advance(self.duration) self.assertEqual(self.calls, 1) self.assertFalse(self.running) yield self.poll.stop() @parameterized.expand([ ('now_True', True), ('now_False', False), ]) @defer.inlineCallbacks def test_run_with_random_delay_zero_interval_still_delays(self, name, now): random_delay_max = 5 with mock.patch("buildbot.util.poll.randint", return_value=random_delay_max): self.poll.start(interval=0, now=now, random_delay_max=random_delay_max) self.reactor.advance(0) self.assertFalse(self.running) self.assertEqual(self.calls, 0) i = 0 while i < random_delay_max: self.assertFalse(self.running) self.assertEqual(self.calls, 0) self.reactor.advance(1) i += 1 self.assertTrue(self.running) self.reactor.advance(1) self.assertEqual(self.calls, 1) self.assertFalse(self.running) yield self.poll.stop() @defer.inlineCallbacks def test_run_with_random_delay_stops_immediately_during_delay_phase(self): random_delay_max = 5 with mock.patch("buildbot.util.poll.randint", return_value=random_delay_max): self.poll.start(interval=10, now=True, random_delay_max=random_delay_max) self.reactor.advance(1) self.assertFalse(self.running) self.assertEqual(self.calls, 0) yield self.poll.stop() buildbot-3.4.0/master/buildbot/test/unit/util/test_private_tempdir.py000066400000000000000000000035451413250514000261450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil import tempfile from twisted.trial import unittest from buildbot.test.util.decorators import skipUnlessPlatformIs from buildbot.util.private_tempdir import PrivateTemporaryDirectory class TestTemporaryDirectory(unittest.TestCase): # In this test we want to also check potential platform differences, so # we don't mock the filesystem access def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) def test_simple(self): with PrivateTemporaryDirectory(dir=self.tempdir) as dir: self.assertTrue(os.path.isdir(dir)) self.assertFalse(os.path.isdir(dir)) @skipUnlessPlatformIs('posix') def test_mode(self): with PrivateTemporaryDirectory(dir=self.tempdir, mode=0o700) as dir: self.assertEqual(0o40700, os.stat(dir).st_mode) def test_cleanup(self): ctx = PrivateTemporaryDirectory(dir=self.tempdir) self.assertTrue(os.path.isdir(ctx.name)) ctx.cleanup() self.assertFalse(os.path.isdir(ctx.name)) ctx.cleanup() # also check whether multiple calls don't throw ctx.cleanup() buildbot-3.4.0/master/buildbot/test/unit/util/test_protocol.py000066400000000000000000000034271413250514000246070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.util.protocol import LineProcessProtocol class FakeLineProcessProtocol(LineProcessProtocol): def __init__(self): super().__init__() self.out_lines = [] self.err_lines = [] def outLineReceived(self, line): self.out_lines.append(line) def errLineReceived(self, line): self.err_lines.append(line) class TestLineProcessProtocol(unittest.TestCase): def test_stdout(self): p = FakeLineProcessProtocol() p.outReceived(b'\nline2\nline3\nli') p.outReceived(b'ne4\nli') self.assertEqual(p.out_lines, [b'', b'line2', b'line3', b'line4']) p.processEnded(0) self.assertEqual(p.out_lines, [b'', b'line2', b'line3', b'line4', b'li']) def test_stderr(self): p = FakeLineProcessProtocol() p.errReceived(b'\nline2\nline3\nli') p.errReceived(b'ne4\nli') self.assertEqual(p.err_lines, [b'', b'line2', b'line3', b'line4']) p.processEnded(0) self.assertEqual(p.err_lines, [b'', b'line2', b'line3', b'line4', b'li']) buildbot-3.4.0/master/buildbot/test/unit/util/test_raml.py000066400000000000000000000063171413250514000237020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.trial import unittest from buildbot.util import raml class TestRaml(unittest.TestCase): def setUp(self): self.api = raml.RamlSpec() def test_api(self): self.assertTrue(self.api.api is not None) def test_endpoints(self): self.assertIn( "/masters/{masterid}/builders/{builderid}/workers/{workerid}", self.api.endpoints.keys()) def test_endpoints_uri_parameters(self): # comparison of OrderedDict do not take in account order :( # this is why we compare str repr, to make sure the endpoints are in # the right order self.assertEqual(str(self.api.endpoints[ "/masters/{masterid}/builders/{builderid}/workers/{workerid}"]['uriParameters']), str(raml.OrderedDict([ ('masterid', raml.OrderedDict([ ('type', 'number'), ('description', 'the id of the master')])), ('builderid', raml.OrderedDict([ ('type', 'number'), ('description', 'the id of the builder')])), ('workerid', raml.OrderedDict([ ('type', 'number'), ('description', 'the id of the worker')]))])) ) def test_types(self): self.assertIn( "log", self.api.types.keys()) def test_json_example(self): self.assertEqual( textwrap.dedent( self.api.format_json(self.api.types["build"]['example'], 0)), textwrap.dedent(""" { "builderid": 10, "buildid": 100, "buildrequestid": 13, "workerid": 20, "complete": false, "complete_at": null, "masterid": 824, "number": 1, "results": null, "started_at": 1451001600, "state_string": "created", "properties": {} }""").strip()) def test_endpoints_by_type(self): self.assertIn( "/masters/{masterid}/builders/{builderid}/workers/{workerid}", self.api.endpoints_by_type['worker'].keys()) def test_iter_actions(self): build = self.api.endpoints_by_type['build'] actions = dict(self.api.iter_actions(build['/builds/{buildid}'])) self.assertEqual(sorted(actions.keys()), sorted(['rebuild', 'stop'])) def test_rawendpoints(self): self.assertIn( "/steps/{stepid}/logs/{log_slug}/raw", self.api.rawendpoints.keys()) buildbot-3.4.0/master/buildbot/test/unit/util/test_runprocess.py000066400000000000000000000241641413250514000251520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized import mock from twisted.internet import defer from twisted.python import runtime from twisted.trial import unittest from buildbot.test.util.logging import LoggingMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util.runprocess import RunProcess # windows returns rc 1, because exit status cannot indicate "signalled"; # posix returns rc -1 for "signalled" FATAL_RC = -1 EXPECTED_PWD = '/workdir' if runtime.platformType == 'win32': FATAL_RC = 1 EXPECTED_PWD = 'C:\\workdir' class TestRunProcess(TestReactorMixin, LoggingMixin, unittest.TestCase): FAKE_PID = 1234 def setUp(self): self.setUpTestReactor() self.setUpLogging() self.process = None self.reactor.spawnProcess = self.fake_spawn_process def fake_spawn_process(self, pp, command, args, env, workdir): self.assertIsNone(self.process) self.pp = pp self.pp.transport = mock.Mock() self.process = mock.Mock() self.process.pid = self.FAKE_PID self.process_spawned_args = (command, args, env, workdir) return self.process def run_process(self, command, override_kill_success=True, override_is_dead=True, **kwargs): self.run_process_obj = RunProcess(self.reactor, command, '/workdir', **kwargs) self.run_process_obj.get_os_env = lambda: {'OS_ENV': 'value'} self.run_process_obj.send_signal = mock.Mock(side_effect=lambda sig: override_kill_success) self.run_process_obj.is_dead = mock.Mock(side_effect=lambda: override_is_dead) return self.run_process_obj.start() def end_process(self, signal=None, rc=0): reason = mock.Mock() reason.value.signal = signal reason.value.exitCode = rc self.pp.processEnded(reason) @defer.inlineCallbacks def test_no_output(self): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=False) self.assertEqual(self.process_spawned_args, ('cmd', ['cmd'], {'OS_ENV': 'value', 'PWD': EXPECTED_PWD}, '/workdir')) self.pp.connectionMade() self.assertFalse(d.called) self.end_process() self.assertTrue(d.called) res = yield d self.assertEqual(res, (0, b'')) @defer.inlineCallbacks def test_env_new_kv(self): d = self.run_process(['cmd'], collect_stdout=False, collect_stderr=False, env={'custom': 'custom-value'}) self.assertEqual(self.process_spawned_args, ('cmd', ['cmd'], {'OS_ENV': 'value', 'PWD': EXPECTED_PWD, 'custom': 'custom-value'}, '/workdir')) self.pp.connectionMade() self.end_process() res = yield d self.assertEqual(res, 0) @defer.inlineCallbacks def test_env_overwrite_os_kv(self): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=False, env={'OS_ENV': 'custom-value'}) self.assertEqual(self.process_spawned_args, ('cmd', ['cmd'], {'OS_ENV': 'custom-value', 'PWD': EXPECTED_PWD}, '/workdir')) self.pp.connectionMade() self.end_process() res = yield d self.assertEqual(res, (0, b'')) @defer.inlineCallbacks def test_env_remove_os_kv(self): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=False, env={'OS_ENV': None}) self.assertEqual(self.process_spawned_args, ('cmd', ['cmd'], {'PWD': EXPECTED_PWD}, '/workdir')) self.pp.connectionMade() self.end_process() res = yield d self.assertEqual(res, (0, b'')) @defer.inlineCallbacks def test_collect_nothing(self): d = self.run_process(['cmd'], collect_stdout=False, collect_stderr=False) self.pp.connectionMade() self.pp.transport.write.assert_not_called() self.pp.transport.closeStdin.assert_called() self.pp.outReceived(b'stdout_data') self.pp.errReceived(b'stderr_data') self.assertFalse(d.called) self.end_process() self.assertTrue(d.called) res = yield d self.assertEqual(res, 0) @defer.inlineCallbacks def test_collect_stdout_no_stderr(self): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=False) self.pp.connectionMade() self.pp.transport.write.assert_not_called() self.pp.transport.closeStdin.assert_called() self.pp.outReceived(b'stdout_data') self.pp.errReceived(b'stderr_data') self.assertFalse(d.called) self.end_process() self.assertTrue(d.called) res = yield d self.assertEqual(res, (0, b'stdout_data')) @defer.inlineCallbacks def test_collect_stdout_with_stdin(self): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=False, initial_stdin=b'stdin') self.pp.connectionMade() self.pp.transport.write.assert_called_with(b'stdin') self.pp.transport.closeStdin.assert_called() self.pp.outReceived(b'stdout_data') self.pp.errReceived(b'stderr_data') self.end_process() res = yield d self.assertEqual(res, (0, b'stdout_data')) @defer.inlineCallbacks def test_collect_stdout_and_stderr(self): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=True) self.pp.connectionMade() self.pp.transport.write.assert_not_called() self.pp.transport.closeStdin.assert_called() self.pp.outReceived(b'stdout_data') self.pp.errReceived(b'stderr_data') self.end_process() res = yield d self.assertEqual(res, (0, b'stdout_data', b'stderr_data')) @defer.inlineCallbacks def test_process_failed_with_rc(self): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=True) self.pp.connectionMade() self.pp.outReceived(b'stdout_data') self.pp.errReceived(b'stderr_data') self.end_process(rc=1) res = yield d self.assertEqual(res, (1, b'stdout_data', b'stderr_data')) @defer.inlineCallbacks def test_process_failed_with_signal(self): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=True) self.pp.connectionMade() self.pp.outReceived(b'stdout_data') self.pp.errReceived(b'stderr_data') self.end_process(signal='SIGILL') res = yield d self.assertEqual(res, (-1, b'stdout_data', b'stderr_data')) @parameterized.expand([ ('too_short_time_no_output', 0, 4.9, False, False, False), ('too_short_time_with_output', 0, 4.9, False, True, True), ('timed_out_no_output', 0, 5.1, True, False, False), ('timed_out_with_output', 0, 5.1, True, True, True), ('stdout_prevented_timeout', 1.0, 4.9, False, True, False), ('stderr_prevented_timeout', 1.0, 4.9, False, False, True), ('timed_out_after_extra_output', 1.0, 5.1, True, True, True), ]) @defer.inlineCallbacks def test_io_timeout(self, name, wait1, wait2, timed_out, had_stdout, had_stderr): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=True, io_timeout=5) self.pp.connectionMade() self.reactor.advance(wait1) if had_stdout: self.pp.outReceived(b'stdout_data') if had_stderr: self.pp.errReceived(b'stderr_data') self.reactor.advance(wait2) self.assertFalse(d.called) self.end_process() self.assertTrue(d.called) if timed_out: self.run_process_obj.send_signal.assert_called_with('TERM') else: self.run_process_obj.send_signal.assert_not_called() res = yield d self.assertEqual(res, (FATAL_RC if timed_out else 0, b'stdout_data' if had_stdout else b'', b'stderr_data' if had_stderr else b'')) @parameterized.expand([ ('too_short_time', 4.9, False), ('timed_out', 5.1, True), ]) @defer.inlineCallbacks def test_runtime_timeout(self, name, wait, timed_out): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=True, runtime_timeout=5) self.pp.connectionMade() self.reactor.advance(wait) self.assertFalse(d.called) self.end_process() self.assertTrue(d.called) if timed_out: self.run_process_obj.send_signal.assert_called_with('TERM') else: self.run_process_obj.send_signal.assert_not_called() res = yield d self.assertEqual(res, (FATAL_RC if timed_out else 0, b'', b'')) @defer.inlineCallbacks def test_runtime_timeout_failing_to_kill(self): d = self.run_process(['cmd'], collect_stdout=True, collect_stderr=True, runtime_timeout=5, sigterm_timeout=5, override_is_dead=False) self.pp.connectionMade() self.reactor.advance(5.1) self.run_process_obj.send_signal.assert_called_with('TERM') self.reactor.advance(5.1) self.run_process_obj.send_signal.assert_called_with('KILL') self.reactor.advance(5.1) self.assertTrue(d.called) self.end_process() with self.assertRaises(RuntimeError): yield d self.assertLogged("attempted to kill process, but it wouldn't die") buildbot-3.4.0/master/buildbot/test/unit/util/test_sautils.py000066400000000000000000000016111413250514000244230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.util import sautils class SAVersion(unittest.TestCase): def test_sa_version(self): self.assertTrue(sautils.sa_version() > (0, 5, 0)) buildbot-3.4.0/master/buildbot/test/unit/util/test_service.py000066400000000000000000000665271413250514000244200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import task from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.util import service class DeferredStartStop(service.AsyncService): def startService(self): self.d = defer.Deferred() return self.d def stopService(self): self.d = defer.Deferred() return self.d class AsyncMultiService(unittest.TestCase): def setUp(self): self.svc = service.AsyncMultiService() @defer.inlineCallbacks def test_empty(self): yield self.svc.startService() yield self.svc.stopService() @defer.inlineCallbacks def test_waits_for_child_services(self): child = DeferredStartStop() yield child.setServiceParent(self.svc) d = self.svc.startService() self.assertFalse(d.called) child.d.callback(None) self.assertTrue(d.called) d = self.svc.stopService() self.assertFalse(d.called) child.d.callback(None) self.assertTrue(d.called) @defer.inlineCallbacks def test_child_fails(self): child = DeferredStartStop() yield child.setServiceParent(self.svc) d = self.svc.startService() self.assertFalse(d.called) child.d.errback(RuntimeError('oh noes')) self.assertTrue(d.called) @d.addErrback def check(f): f.check(RuntimeError) d = self.svc.stopService() self.assertFalse(d.called) child.d.errback(RuntimeError('oh noes')) self.assertTrue(d.called) @d.addErrback def check_again(f): f.check(RuntimeError) def test_child_starts_on_sSP(self): d = self.svc.startService() self.assertTrue(d.called) child = DeferredStartStop() d = child.setServiceParent(self.svc) self.assertFalse(d.called) child.d.callback(None) self.assertTrue(d.called) class ClusteredBuildbotService(unittest.TestCase): SVC_NAME = 'myName' SVC_ID = 20 class DummyService(service.ClusteredBuildbotService): pass def setUp(self): self.svc = self.makeService() def tearDown(self): pass def makeService(self, name=SVC_NAME, serviceid=SVC_ID): svc = self.DummyService(name=name) svc.clock = task.Clock() self.setServiceClaimable(svc, defer.succeed(False)) self.setActivateToReturn(svc, defer.succeed(None)) self.setDeactivateToReturn(svc, defer.succeed(None)) self.setGetServiceIdToReturn(svc, defer.succeed(serviceid)) self.setUnclaimToReturn(svc, defer.succeed(None)) return svc def makeMock(self, value): mockObj = mock.Mock() if isinstance(value, Exception): mockObj.side_effect = value else: mockObj.return_value = value return mockObj def setServiceClaimable(self, svc, claimable): svc._claimService = self.makeMock(claimable) def setGetServiceIdToReturn(self, svc, serviceid): svc._getServiceId = self.makeMock(serviceid) def setUnclaimToReturn(self, svc, unclaim): svc._unclaimService = self.makeMock(unclaim) def setActivateToReturn(self, svc, activate): svc.activate = self.makeMock(activate) def setDeactivateToReturn(self, svc, deactivate): svc.deactivate = self.makeMock(deactivate) def test_name_PreservesUnicodePromotion(self): svc = self.makeService(name='n') self.assertIsInstance(svc.name, str) self.assertEqual(svc.name, 'n') def test_name_GetsUnicodePromotion(self): svc = self.makeService(name='n') self.assertIsInstance(svc.name, str) self.assertEqual(svc.name, 'n') def test_compare(self): a = self.makeService(name='a', serviceid=20) b1 = self.makeService(name='b', serviceid=21) b2 = self.makeService(name='b', serviceid=21) # same args as 'b1' b3 = self.makeService(name='b', serviceid=20) # same id as 'a' self.assertTrue(a == a) self.assertTrue(a != b1) self.assertTrue(a != b2) self.assertTrue(a != b3) self.assertTrue(b1 != a) self.assertTrue(b1 == b1) self.assertTrue(b1 == b2) self.assertTrue(b1 == b3) def test_create_NothingCalled(self): # None of the member functions get called until startService happens self.assertFalse(self.svc.activate.called) self.assertFalse(self.svc.deactivate.called) self.assertFalse(self.svc._getServiceId.called) self.assertFalse(self.svc._claimService.called) self.assertFalse(self.svc._unclaimService.called) def test_create_IsInactive(self): # starts in inactive state self.assertFalse(self.svc.isActive()) def test_create_HasNoServiceIdYet(self): # has no service id at first self.assertIdentical(self.svc.serviceid, None) def test_start_UnclaimableSoNotActiveYet(self): self.svc.startService() self.assertFalse(self.svc.isActive()) def test_start_GetsServiceIdAssigned(self): self.svc.startService() self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(self.SVC_ID, self.svc.serviceid) def test_start_WontPollYet(self): self.svc.startService() # right before the poll interval, nothing has tried again yet self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC * 0.95) self.assertEqual(0, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertFalse(self.svc.isActive()) @defer.inlineCallbacks def test_start_PollButClaimFails(self): yield self.svc.startService() # at the POLL time, it gets called again, but we're still inactive... self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC * 1.05) self.assertEqual(0, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(2, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_start_PollsPeriodically(self): NUMBER_OF_POLLS = 15 self.svc.startService() for i in range(NUMBER_OF_POLLS): self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual( 1 + NUMBER_OF_POLLS, self.svc._claimService.call_count) def test_start_ClaimSucceeds(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(True, self.svc.isActive()) def test_start_PollingAfterClaimSucceedsDoesNothing(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # another epoch shouldn't do anything further... self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC * 2) self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(True, self.svc.isActive()) def test_stopWhileStarting_NeverActive(self): self.svc.startService() # .. claim fails stopDeferred = self.svc.stopService() # a stop at this point unwinds things immediately self.successResultOf(stopDeferred) # advance the clock, and nothing should happen self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC * 2) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertFalse(self.svc.isActive()) def test_stop_AfterActivated(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # now deactivate: stopDeferred = self.svc.stopService() # immediately stops self.successResultOf(stopDeferred) self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(False, self.svc.isActive()) def test_stop_AfterActivated_NoDeferred(self): # set all the child-class functions to return non-deferreds, # just to check we can handle both: self.setServiceClaimable(self.svc, True) self.setActivateToReturn(self.svc, None) self.setDeactivateToReturn(self.svc, None) self.setGetServiceIdToReturn(self.svc, self.SVC_ID) self.setUnclaimToReturn(self.svc, None) self.svc.startService() # now deactivate: stopDeferred = self.svc.stopService() # immediately stops self.successResultOf(stopDeferred) self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(False, self.svc.isActive()) def test_stopWhileStarting_getServiceIdTakesForever(self): # create a deferred that will take a while... svcIdDeferred = defer.Deferred() self.setGetServiceIdToReturn(self.svc, svcIdDeferred) self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # stop before it has the service id (the svcIdDeferred is stuck) stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) # .. no deactivates yet.... self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc.activate.call_count) self.assertEqual(0, self.svc._claimService.call_count) self.assertEqual(False, self.svc.isActive()) # then let service id part finish svcIdDeferred.callback(None) # ... which will cause the stop to also finish self.successResultOf(stopDeferred) # and everything else should unwind too: self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_stopWhileStarting_claimServiceTakesForever(self): # create a deferred that will take a while... claimDeferred = defer.Deferred() self.setServiceClaimable(self.svc, claimDeferred) self.svc.startService() # .. claim is still pending here # stop before it's done activating stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) # .. no deactivates yet.... self.assertEqual(0, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) # then let claim succeed, but we should see things unwind claimDeferred.callback(True) # ... which will cause the stop to also finish self.successResultOf(stopDeferred) # and everything else should unwind too: self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_stopWhileStarting_activateTakesForever(self): """If activate takes forever, things acquiesce nicely""" # create a deferreds that will take a while... activateDeferred = defer.Deferred() self.setActivateToReturn(self.svc, activateDeferred) self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # stop before it's done activating stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) # .. no deactivates yet.... self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(True, self.svc.isActive()) # then let activate finish activateDeferred.callback(None) # ... which will cause the stop to also finish self.successResultOf(stopDeferred) # and everything else should unwind too: self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_stop_unclaimTakesForever(self): # create a deferred that will take a while... unclaimDeferred = defer.Deferred() self.setUnclaimToReturn(self.svc, unclaimDeferred) self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # stop before it's done activating stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) # .. no deactivates yet.... self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) # then let unclaim part finish unclaimDeferred.callback(None) # ... which will cause the stop to finish self.successResultOf(stopDeferred) # and everything should unwind: self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_stop_deactivateTakesForever(self): # create a deferred that will take a while... deactivateDeferred = defer.Deferred() self.setDeactivateToReturn(self.svc, deactivateDeferred) self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # stop before it's done activating stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) # then let deactivate finish deactivateDeferred.callback(None) # ... which will cause the stop to finish self.successResultOf(stopDeferred) # and everything else should unwind too: self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_claim_raises(self): self.setServiceClaimable(self.svc, RuntimeError()) self.svc.startService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) self.assertEqual(False, self.svc.isActive()) @defer.inlineCallbacks def test_activate_raises(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.setActivateToReturn(self.svc, RuntimeError()) yield self.svc.startService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) # half-active: we actually return True in this case: self.assertEqual(True, self.svc.isActive()) def test_deactivate_raises(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.setDeactivateToReturn(self.svc, RuntimeError()) self.svc.startService() self.svc.stopService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) self.assertEqual(False, self.svc.isActive()) def test_unclaim_raises(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.setUnclaimToReturn(self.svc, RuntimeError()) self.svc.startService() self.svc.stopService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) self.assertEqual(False, self.svc.isActive()) class MyService(service.BuildbotService): def checkConfig(self, foo, a=None): if a is None: config.error("a must be specified") return defer.succeed(True) def reconfigService(self, *argv, **kwargs): self.config = argv, kwargs return defer.succeed(None) class fakeConfig: pass class fakeMaster(service.MasterService, service.ReconfigurableServiceMixin): pass def makeFakeMaster(): m = fakeMaster() m.db = mock.Mock() return m class BuildbotService(unittest.TestCase): def setUp(self): self.master = makeFakeMaster() @defer.inlineCallbacks def prepareService(self): self.master.config = fakeConfig() serv = MyService(1, a=2, name="basic") yield serv.setServiceParent(self.master) yield self.master.startService() yield serv.reconfigServiceWithSibling(serv) return serv @defer.inlineCallbacks def testNominal(self): yield self.prepareService() self.assertEqual( self.master.namedServices["basic"].config, ((1,), dict(a=2))) @defer.inlineCallbacks def testConfigDict(self): serv = yield self.prepareService() self.assertEqual(serv.getConfigDict(), { 'args': (1,), 'class': 'buildbot.test.unit.util.test_service.MyService', 'kwargs': {'a': 2}, 'name': 'basic'}) def testNoName(self): with self.assertRaises(ValueError): MyService(1, a=2) def testChecksDone(self): with self.assertRaises(config.ConfigErrors): MyService(1, name="foo") class BuildbotServiceManager(unittest.TestCase): def setUp(self): self.master = makeFakeMaster() @defer.inlineCallbacks def prepareService(self): self.master.config = fakeConfig() serv = MyService(1, a=2, name="basic") self.master.config.services = {"basic": serv} self.manager = service.BuildbotServiceManager() yield self.manager.setServiceParent(self.master) yield self.master.startService() yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) return serv @defer.inlineCallbacks def testNominal(self): yield self.prepareService() self.assertEqual( self.manager.namedServices["basic"].config, ((1,), dict(a=2))) @defer.inlineCallbacks def testReconfigNoChange(self): serv = yield self.prepareService() serv.config = None # 'de-configure' the service # reconfigure with the same config serv2 = MyService(1, a=2, name="basic") self.master.config.services = {"basic": serv2} # reconfigure the master yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) # the first service is still used self.assertIdentical(self.manager.namedServices["basic"], serv) # the second service is not used self.assertNotIdentical(self.manager.namedServices["basic"], serv2) # reconfigServiceWithConstructorArgs was not called self.assertEqual(serv.config, None) @defer.inlineCallbacks def testReconfigWithChanges(self): serv = yield self.prepareService() serv.config = None # 'de-configure' the service # reconfigure with the different config serv2 = MyService(1, a=4, name="basic") self.master.config.services = {"basic": serv2} # reconfigure the master yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) # the first service is still used self.assertIdentical(self.manager.namedServices["basic"], serv) # the second service is not used self.assertNotIdentical(self.manager.namedServices["basic"], serv2) # reconfigServiceWithConstructorArgs was called with new config self.assertEqual(serv.config, ((1,), dict(a=4))) def testNoName(self): with self.assertRaises(ValueError): MyService(1, a=2) def testChecksDone(self): with self.assertRaises(config.ConfigErrors): MyService(1, name="foo") @defer.inlineCallbacks def testReconfigWithNew(self): serv = yield self.prepareService() # reconfigure with the new service serv2 = MyService(1, a=4, name="basic2") self.master.config.services['basic2'] = serv2 # the second service is not there yet self.assertIdentical(self.manager.namedServices.get("basic2"), None) # reconfigure the master yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) # the first service is still used self.assertIdentical(self.manager.namedServices["basic"], serv) # the second service is created self.assertIdentical(self.manager.namedServices["basic2"], serv2) # reconfigServiceWithConstructorArgs was called with new config self.assertEqual(serv2.config, ((1,), dict(a=4))) @defer.inlineCallbacks def testReconfigWithDeleted(self): serv = yield self.prepareService() self.assertEqual(serv.running, True) # remove all self.master.config.services = {} # reconfigure the master yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) # the first service is still used self.assertIdentical(self.manager.namedServices.get("basic"), None) self.assertEqual(serv.running, False) @defer.inlineCallbacks def testConfigDict(self): yield self.prepareService() self.assertEqual(self.manager.getConfigDict(), { 'childs': [{ 'args': (1,), 'class': 'buildbot.test.unit.util.test_service.MyService', 'kwargs': {'a': 2}, 'name': 'basic'}], 'name': 'services'}) @defer.inlineCallbacks def testRenderSecrets(self): yield self.prepareService() service = self.manager.namedServices['basic'] test = yield service.renderSecrets(Interpolate('test_string')) self.assertEqual(test, 'test_string') @defer.inlineCallbacks def testRenderSecrets2Args(self): yield self.prepareService() service = self.manager.namedServices['basic'] test, test2 = yield service.renderSecrets(Interpolate('test_string'), 'ok_for_non_renderable') self.assertEqual(test, 'test_string') self.assertEqual(test2, 'ok_for_non_renderable') @defer.inlineCallbacks def testRenderSecretsWithTuple(self): yield self.prepareService() service = self.manager.namedServices['basic'] test = yield service.renderSecrets(('user', Interpolate('test_string'))) self.assertEqual(test, ('user', 'test_string')) class UnderTestSharedService(service.SharedService): def __init__(self, arg1=None): super().__init__() class UnderTestDependentService(service.AsyncService): @defer.inlineCallbacks def startService(self): self.dependent = yield UnderTestSharedService.getService(self.parent) def stopService(self): assert self.dependent.running class SharedService(unittest.TestCase): @defer.inlineCallbacks def test_bad_constructor(self): parent = service.AsyncMultiService() with self.assertRaises(Exception): yield UnderTestSharedService.getService(parent, arg2="foo") @defer.inlineCallbacks def test_creation(self): parent = service.AsyncMultiService() r = yield UnderTestSharedService.getService(parent) r2 = yield UnderTestSharedService.getService(parent) r3 = yield UnderTestSharedService.getService(parent, "arg1") r4 = yield UnderTestSharedService.getService(parent, "arg1") self.assertIdentical(r, r2) self.assertNotIdentical(r, r3) self.assertIdentical(r3, r4) self.assertEqual(len(list(iter(parent))), 2) @defer.inlineCallbacks def test_startup(self): """the service starts when parent starts and stop""" parent = service.AsyncMultiService() r = yield UnderTestSharedService.getService(parent) self.assertEqual(r.running, 0) yield parent.startService() self.assertEqual(r.running, 1) yield parent.stopService() self.assertEqual(r.running, 0) @defer.inlineCallbacks def test_already_started(self): """the service starts during the getService if parent already started""" parent = service.AsyncMultiService() yield parent.startService() r = yield UnderTestSharedService.getService(parent) self.assertEqual(r.running, 1) # then we stop the parent, and the shared service stops yield parent.stopService() self.assertEqual(r.running, 0) @defer.inlineCallbacks def test_already_stopped_last(self): parent = service.AsyncMultiService() o = UnderTestDependentService() yield o.setServiceParent(parent) yield parent.startService() yield parent.stopService() buildbot-3.4.0/master/buildbot/test/unit/util/test_ssfilter.py000066400000000000000000000176111413250514000246010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from parameterized import parameterized from twisted.trial import unittest from buildbot.util.ssfilter import SourceStampFilter from buildbot.util.ssfilter import extract_filter_values from buildbot.util.ssfilter import extract_filter_values_branch from buildbot.util.ssfilter import extract_filter_values_regex class TestSourceStampFilter(unittest.TestCase): def test_extract_filter_values(self): self.assertEqual(extract_filter_values([], 'name'), []) self.assertEqual(extract_filter_values(['value'], 'name'), ['value']) self.assertEqual(extract_filter_values('value', 'name'), ['value']) with self.assertRaises(ValueError): extract_filter_values({'value'}, 'name') with self.assertRaises(ValueError): extract_filter_values(None, 'name') with self.assertRaises(ValueError): extract_filter_values([{'value'}], 'name') with self.assertRaises(ValueError): extract_filter_values([None], 'name') def test_extract_filter_values_branch(self): self.assertEqual(extract_filter_values_branch([], 'name'), []) self.assertEqual(extract_filter_values_branch(['value'], 'name'), ['value']) self.assertEqual(extract_filter_values_branch('value', 'name'), ['value']) self.assertEqual(extract_filter_values_branch([None], 'name'), [None]) self.assertEqual(extract_filter_values_branch(None, 'name'), [None]) with self.assertRaises(ValueError): extract_filter_values({'value'}, 'name') with self.assertRaises(ValueError): extract_filter_values([{'value'}], 'name') def test_extract_filter_values_regex(self): self.assertEqual(extract_filter_values_regex([], 'name'), []) self.assertEqual(extract_filter_values_regex(['value'], 'name'), ['value']) self.assertEqual(extract_filter_values_regex('value', 'name'), ['value']) self.assertEqual(extract_filter_values_regex([re.compile('test')], 'name'), [re.compile('test')]) self.assertEqual(extract_filter_values_regex(re.compile('test'), 'name'), [re.compile('test')]) with self.assertRaises(ValueError): extract_filter_values({'value'}, 'name') with self.assertRaises(ValueError): extract_filter_values([{'value'}], 'name') @parameterized.expand([ ('match', {'project': 'p', 'codebase': 'c', 'repository': 'r', 'branch': 'b'}, True), ('not_project', {'project': '0', 'codebase': 'c', 'repository': 'r', 'branch': 'b'}, False), ('not_codebase', {'project': 'p', 'codebase': '0', 'repository': 'r', 'branch': 'b'}, False), ('not_repository', {'project': 'p', 'codebase': 'c', 'repository': '0', 'branch': 'b'}, False), ('not_branch', {'project': 'p', 'codebase': 'c', 'repository': 'r', 'branch': '0'}, False), ('none_branch', {'project': 'p', 'codebase': 'c', 'repository': 'r', 'branch': None}, False), ]) def test_filter_is_matched_eq_or_re(self, name, ss, expected): filter = SourceStampFilter(project_eq='p', codebase_eq='c', repository_eq='r', branch_eq='b') self.assertEqual(filter.is_matched(ss), expected) filter = SourceStampFilter(project_re='^p$', codebase_re='^c$', repository_re='^r$', branch_re='^b$') self.assertEqual(filter.is_matched(ss), expected) filter = SourceStampFilter(project_re=re.compile('^p$'), codebase_re=re.compile('^c$'), repository_re=re.compile('^r$'), branch_re=re.compile('^b$')) self.assertEqual(filter.is_matched(ss), expected) @parameterized.expand([ ('match', {'project': 'p', 'codebase': 'c', 'repository': 'r', 'branch': 'b'}, True), ('not_project', {'project': 'p0', 'codebase': 'c', 'repository': 'r', 'branch': 'b'}, False), ('not_codebase', {'project': 'p', 'codebase': 'c0', 'repository': 'r', 'branch': 'b'}, False), ('not_repository', {'project': 'p', 'codebase': 'c', 'repository': 'r0', 'branch': 'b'}, False), ('not_branch', {'project': 'p', 'codebase': 'c', 'repository': 'r', 'branch': 'b0'}, False), ('none_branch', {'project': 'p', 'codebase': 'c', 'repository': 'r', 'branch': None}, True) ]) def test_filter_is_matched_not_eq_or_re(self, name, ss, expected): filter = SourceStampFilter(project_not_eq='p0', codebase_not_eq='c0', repository_not_eq='r0', branch_not_eq='b0') self.assertEqual(filter.is_matched(ss), expected) filter = SourceStampFilter(project_not_re='^p0$', codebase_not_re='^c0$', repository_not_re='^r0$', branch_not_re='^b0$') self.assertEqual(filter.is_matched(ss), expected) filter = SourceStampFilter(project_not_re=re.compile('^p0$'), codebase_not_re=re.compile('^c0$'), repository_not_re=re.compile('^r0$'), branch_not_re=re.compile('^b0$')) self.assertEqual(filter.is_matched(ss), expected) def test_filter_is_matched_codebase(self): filter = SourceStampFilter(project_eq='p', codebase_eq='c', repository_eq='r', branch_eq='b') self.assertEqual(filter.is_matched_codebase('c'), True) self.assertEqual(filter.is_matched_codebase('0'), False) def test_filter_repr(self): filter = SourceStampFilter(project_eq='p', codebase_eq='c', repository_eq='r', branch_eq='b', project_re='^p$', codebase_re='^c$', repository_re='^r$', branch_re='^b$', project_not_eq='p0', codebase_not_eq='c0', repository_not_eq='r0', branch_not_eq='b0', project_not_re='^p0$', codebase_not_re='^c0$', repository_not_re='^r0$', branch_not_re='^b0$') self.assertEqual(repr(filter), "") buildbot-3.4.0/master/buildbot/test/unit/util/test_ssl.py000066400000000000000000000026641413250514000235510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot import config from buildbot.util import ssl class Tests(unittest.TestCase): @ssl.skipUnless def test_ClientContextFactory(self): from twisted.internet.ssl import ClientContextFactory self.assertEqual(ssl.ClientContextFactory, ClientContextFactory) @ssl.skipUnless def test_ConfigError(self): ssl.ssl_import_error = "lib xxx do not exist" ssl.has_ssl = False self.patch(config, "_errors", mock.Mock()) ssl.ensureHasSSL("myplugin") config._errors.addError.assert_called_with( "TLS dependencies required for myplugin are not installed : " "lib xxx do not exist\n pip install 'buildbot[tls]'") buildbot-3.4.0/master/buildbot/test/unit/util/test_state.py000066400000000000000000000050061413250514000240610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import state class FakeObject(state.StateMixin): name = "fake-name" def __init__(self, master): self.master = master class TestStateMixin(TestReactorMixin, unittest.TestCase): OBJECTID = 19 def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.object = FakeObject(self.master) @defer.inlineCallbacks def test_getState(self): self.master.db.state.set_fake_state(self.object, fav_color=['red', 'purple']) res = yield self.object.getState('fav_color') self.assertEqual(res, ['red', 'purple']) @defer.inlineCallbacks def test_getState_default(self): res = yield self.object.getState('fav_color', 'black') self.assertEqual(res, 'black') def test_getState_KeyError(self): self.master.db.state.set_fake_state(self.object, fav_color=['red', 'purple']) d = self.object.getState('fav_book') def cb(_): self.fail("should not succeed") def check_exc(f): f.trap(KeyError) d.addCallbacks(cb, check_exc) return d @defer.inlineCallbacks def test_setState(self): yield self.object.setState('y', 14) self.master.db.state.assertStateByClass('fake-name', 'FakeObject', y=14) @defer.inlineCallbacks def test_setState_existing(self): self.master.db.state.set_fake_state(self.object, x=13) yield self.object.setState('x', 14) self.master.db.state.assertStateByClass('fake-name', 'FakeObject', x=14) buildbot-3.4.0/master/buildbot/test/unit/util/test_subscriptions.py000066400000000000000000000110171413250514000256470ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from twisted.trial import unittest from buildbot.util import subscription class subscriptions(unittest.TestCase): def setUp(self): self.subpt = subscription.SubscriptionPoint('test_sub') def test_str(self): self.assertIn('test_sub', str(self.subpt)) def test_subscribe_unsubscribe(self): state = [] def cb(*args, **kwargs): state.append((args, kwargs)) # subscribe sub = self.subpt.subscribe(cb) self.assertTrue(isinstance(sub, subscription.Subscription)) self.assertEqual(state, []) # deliver self.subpt.deliver(1, 2, a=3, b=4) self.assertEqual(state, [((1, 2), dict(a=3, b=4))]) state.pop() # unsubscribe sub.unsubscribe() # don't receive events anymore self.subpt.deliver(3, 4) self.assertEqual(state, []) def test_exception(self): def cb(*args, **kwargs): raise RuntimeError('mah bucket!') self.subpt.subscribe(cb) self.subpt.deliver() # should not raise exceptions = self.subpt.pop_exceptions() self.assertEqual(len(exceptions), 1) self.assertIsInstance(exceptions[0], RuntimeError) # log.err will cause Trial to complain about this error anyway, unless # we clean it up self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) def test_deferred_exception(self): d = defer.Deferred() @defer.inlineCallbacks def cb_deferred(*args, **kwargs): yield d raise RuntimeError('msg') self.subpt.subscribe(cb_deferred) self.subpt.deliver() d.callback(None) exceptions = self.subpt.pop_exceptions() self.assertEqual(len(exceptions), 1) self.assertIsInstance(exceptions[0], failure.Failure) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) def test_multiple_exceptions(self): d = defer.Deferred() @defer.inlineCallbacks def cb_deferred(*args, **kwargs): yield d raise RuntimeError('msg') def cb(*args, **kwargs): raise RuntimeError('msg') self.subpt.subscribe(cb_deferred) self.subpt.subscribe(cb) self.subpt.deliver() d.callback(None) exceptions = self.subpt.pop_exceptions() self.assertEqual(len(exceptions), 2) self.assertIsInstance(exceptions[0], RuntimeError) self.assertIsInstance(exceptions[1], failure.Failure) self.assertEqual(2, len(self.flushLoggedErrors(RuntimeError))) def test_deliveries_finished(self): state = [] def create_cb(d): def cb(*args): state.append(args) return d return cb d1 = defer.Deferred() d2 = defer.Deferred() self.subpt.subscribe(create_cb(d1)) self.subpt.subscribe(create_cb(d2)) self.assertEqual(state, []) self.subpt.deliver(1, 2) self.assertEqual(state, [(1, 2), (1, 2)]) d = self.subpt.waitForDeliveriesToFinish() self.assertFalse(d.called) d1.callback(None) self.assertFalse(d.called) d2.callback(None) self.assertTrue(d.called) # when there are no waiting deliveries, should call the callback immediately d = self.subpt.waitForDeliveriesToFinish() self.assertTrue(d.called) def test_deliveries_not_finished_within_callback(self): state = [] def cb(*args): state.append(args) d = self.subpt.waitForDeliveriesToFinish() self.assertFalse(d.called) self.subpt.subscribe(cb) self.assertEqual(state, []) self.subpt.deliver(1, 2) self.assertEqual(state, [(1, 2)]) buildbot-3.4.0/master/buildbot/test/unit/util/test_test_result_submitter.py000066400000000000000000000300511413250514000274120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util.test_result_submitter import TestResultSubmitter class TestTestResultSubmitter(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True) yield self.master.startService() self.master.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Step(id=131, number=132, name='step132', buildid=30), ]) @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_complete_empty(self): sub = TestResultSubmitter() yield sub.setup_by_ids(self.master, 88, 30, 131, 'desc', 'cat', 'unit') setid = sub.get_test_result_set_id() sets = yield self.master.data.get(('builds', 30, 'test_result_sets')) self.assertEqual(list(sets), [{ 'test_result_setid': setid, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc', 'category': 'cat', 'value_unit': 'unit', 'tests_passed': None, 'tests_failed': None, 'complete': False }]) yield sub.finish() sets = yield self.master.data.get(('builds', 30, 'test_result_sets')) self.assertEqual(list(sets), [{ 'test_result_setid': setid, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc', 'category': 'cat', 'value_unit': 'unit', 'tests_passed': None, 'tests_failed': None, 'complete': True }]) @defer.inlineCallbacks def test_submit_result(self): sub = TestResultSubmitter(batch_n=3) yield sub.setup_by_ids(self.master, 88, 30, 131, 'desc', 'cat', 'unit') sub.add_test_result('1', 'name1') yield sub.finish() setid = sub.get_test_result_set_id() sets = yield self.master.data.get(('builds', 30, 'test_result_sets')) self.assertEqual(list(sets), [{ 'test_result_setid': setid, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc', 'category': 'cat', 'value_unit': 'unit', 'tests_passed': None, 'tests_failed': None, 'complete': True }]) results = yield self.master.data.get(('test_result_sets', setid, 'results')) self.assertEqual(list(results), [{ 'test_resultid': 1002, 'builderid': 88, 'test_result_setid': setid, 'test_name': 'name1', 'test_code_path': None, 'duration_ns': None, 'line': None, 'value': '1' }]) def filter_results_value_name(self, results): return [{'test_name': r['test_name'], 'value': r['value']} for r in results] @defer.inlineCallbacks def test_submit_result_wrong_argument_types(self): sub = TestResultSubmitter() yield sub.setup_by_ids(self.master, 88, 30, 131, 'desc', 'cat', 'unit') with self.assertRaises(TypeError): sub.add_test_result(1, 'name1') with self.assertRaises(TypeError): sub.add_test_result('1', test_name=123) with self.assertRaises(TypeError): sub.add_test_result('1', 'name1', test_code_path=123) with self.assertRaises(TypeError): sub.add_test_result('1', 'name1', line='123') with self.assertRaises(TypeError): sub.add_test_result('1', 'name1', duration_ns='123') @defer.inlineCallbacks def test_batchs_last_batch_full(self): sub = TestResultSubmitter(batch_n=3) yield sub.setup_by_ids(self.master, 88, 30, 131, 'desc', 'cat', 'unit') sub.add_test_result('1', 'name1') sub.add_test_result('2', 'name2') sub.add_test_result('3', 'name3') sub.add_test_result('4', 'name4') sub.add_test_result('5', 'name5') sub.add_test_result('6', 'name6') yield sub.finish() setid = sub.get_test_result_set_id() results = yield self.master.data.get(('test_result_sets', setid, 'results')) results = self.filter_results_value_name(results) self.assertEqual(results, [ {'test_name': 'name1', 'value': '1'}, {'test_name': 'name2', 'value': '2'}, {'test_name': 'name3', 'value': '3'}, {'test_name': 'name4', 'value': '4'}, {'test_name': 'name5', 'value': '5'}, {'test_name': 'name6', 'value': '6'}, ]) @defer.inlineCallbacks def test_batchs_last_batch_not_full(self): sub = TestResultSubmitter(batch_n=3) yield sub.setup_by_ids(self.master, 88, 30, 131, 'desc', 'cat', 'unit') sub.add_test_result('1', 'name1') sub.add_test_result('2', 'name2') sub.add_test_result('3', 'name3') sub.add_test_result('4', 'name4') sub.add_test_result('5', 'name5') yield sub.finish() setid = sub.get_test_result_set_id() results = yield self.master.data.get(('test_result_sets', setid, 'results')) results = self.filter_results_value_name(results) self.assertEqual(results, [ {'test_name': 'name1', 'value': '1'}, {'test_name': 'name2', 'value': '2'}, {'test_name': 'name3', 'value': '3'}, {'test_name': 'name4', 'value': '4'}, {'test_name': 'name5', 'value': '5'}, ]) @defer.inlineCallbacks def test_counts_pass_fail(self): sub = TestResultSubmitter(batch_n=3) yield sub.setup_by_ids(self.master, 88, 30, 131, 'desc', 'pass_fail', 'boolean') sub.add_test_result('0', 'name1') sub.add_test_result('0', 'name2') sub.add_test_result('1', 'name3') sub.add_test_result('1', 'name4') sub.add_test_result('0', 'name5') yield sub.finish() setid = sub.get_test_result_set_id() sets = yield self.master.data.get(('builds', 30, 'test_result_sets')) self.assertEqual(list(sets), [{ 'test_result_setid': setid, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc', 'category': 'pass_fail', 'value_unit': 'boolean', 'tests_passed': 2, 'tests_failed': 3, 'complete': True }]) @defer.inlineCallbacks def test_counts_pass_fail_invalid_values(self): sub = TestResultSubmitter(batch_n=3) yield sub.setup_by_ids(self.master, 88, 30, 131, 'desc', 'pass_fail', 'boolean') sub.add_test_result('0', 'name1') sub.add_test_result('0', 'name2') sub.add_test_result('1', 'name3') sub.add_test_result('1', 'name4') sub.add_test_result('invalid', 'name5') yield sub.finish() setid = sub.get_test_result_set_id() sets = yield self.master.data.get(('builds', 30, 'test_result_sets')) self.assertEqual(list(sets), [{ 'test_result_setid': setid, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc', 'category': 'pass_fail', 'value_unit': 'boolean', 'tests_passed': 2, 'tests_failed': 2, 'complete': True }]) # also check whether we preserve the "invalid" values in the database. results = yield self.master.data.get(('test_result_sets', setid, 'results')) results = self.filter_results_value_name(results) self.assertEqual(results, [ {'test_name': 'name1', 'value': '0'}, {'test_name': 'name2', 'value': '0'}, {'test_name': 'name3', 'value': '1'}, {'test_name': 'name4', 'value': '1'}, {'test_name': 'name5', 'value': 'invalid'}, ]) self.flushLoggedErrors(ValueError) @defer.inlineCallbacks def test_counts_pass_only(self): sub = TestResultSubmitter(batch_n=3) yield sub.setup_by_ids(self.master, 88, 30, 131, 'desc', 'pass_only', 'some_unit') sub.add_test_result('string1', 'name1') sub.add_test_result('string2', 'name2') sub.add_test_result('string3', 'name3') sub.add_test_result('string4', 'name4') sub.add_test_result('string5', 'name5') yield sub.finish() setid = sub.get_test_result_set_id() sets = yield self.master.data.get(('builds', 30, 'test_result_sets')) self.assertEqual(list(sets), [{ 'test_result_setid': setid, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc', 'category': 'pass_only', 'value_unit': 'some_unit', 'tests_passed': 5, 'tests_failed': 0, 'complete': True }]) results = yield self.master.data.get(('test_result_sets', setid, 'results')) results = self.filter_results_value_name(results) self.assertEqual(results, [ {'test_name': 'name1', 'value': 'string1'}, {'test_name': 'name2', 'value': 'string2'}, {'test_name': 'name3', 'value': 'string3'}, {'test_name': 'name4', 'value': 'string4'}, {'test_name': 'name5', 'value': 'string5'}, ]) self.flushLoggedErrors(ValueError) @defer.inlineCallbacks def test_counts_fail_only(self): sub = TestResultSubmitter(batch_n=3) yield sub.setup_by_ids(self.master, 88, 30, 131, 'desc', 'fail_only', 'some_unit') sub.add_test_result('string1', 'name1') sub.add_test_result('string2', 'name2') sub.add_test_result('string3', 'name3') sub.add_test_result('string4', 'name4') sub.add_test_result('string5', 'name5') yield sub.finish() setid = sub.get_test_result_set_id() sets = yield self.master.data.get(('builds', 30, 'test_result_sets')) self.assertEqual(list(sets), [{ 'test_result_setid': setid, 'builderid': 88, 'buildid': 30, 'stepid': 131, 'description': 'desc', 'category': 'fail_only', 'value_unit': 'some_unit', 'tests_passed': 0, 'tests_failed': 5, 'complete': True }]) results = yield self.master.data.get(('test_result_sets', setid, 'results')) results = self.filter_results_value_name(results) self.assertEqual(results, [ {'test_name': 'name1', 'value': 'string1'}, {'test_name': 'name2', 'value': 'string2'}, {'test_name': 'name3', 'value': 'string3'}, {'test_name': 'name4', 'value': 'string4'}, {'test_name': 'name5', 'value': 'string5'}, ]) self.flushLoggedErrors(ValueError) buildbot-3.4.0/master/buildbot/test/unit/util/test_test_util_runprocess.py000066400000000000000000000235741413250514000272520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import reporter from twisted.trial import unittest from buildbot.test.util.runprocess import ExpectMaster from buildbot.test.util.runprocess import MasterRunProcessMixin from buildbot.util import runprocess class TestRunprocessMixin(unittest.TestCase): def run_test_method(self, method): class TestCase(MasterRunProcessMixin, unittest.TestCase): def setUp(self): self.setup_master_run_process() def runTest(self): return method(self) self.testcase = TestCase() result = reporter.TestResult() self.testcase.run(result) # This blocks return result def assert_test_failure(self, result, expected_failure): self.assertEqual(result.errors, []) self.assertEqual(len(result.failures), 1) self.assertTrue(result.failures[0][1].check(unittest.FailTest)) if expected_failure: self.assertSubstring(expected_failure, result.failures[0][1].getErrorMessage()) def assert_successful(self, result): if not result.wasSuccessful(): output = 'expected success' if result.failures: output += ('\ntest failed: {}'.format(result.failures[0][1].getErrorMessage())) if result.errors: output += ('\nerrors: {}'.format([error[1].value for error in result.errors])) raise self.failureException(output) self.assertTrue(result.wasSuccessful()) def test_patch(self): original_run_process = runprocess.run_process def method(testcase): testcase.expect_commands() self.assertEqual(runprocess.run_process, testcase.patched_run_process) result = self.run_test_method(method) self.assert_successful(result) self.assertEqual(runprocess.run_process, original_run_process) def test_method_chaining(self): expect = ExpectMaster('command') self.assertEqual(expect, expect.exit(0)) self.assertEqual(expect, expect.stdout(b"output")) self.assertEqual(expect, expect.stderr(b"error")) def test_run_process_one_command_only_rc(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"]).stdout(b'stdout').stderr(b'stderr')) res = yield runprocess.run_process(None, ["command"], collect_stdout=False, collect_stderr=False) self.assertEqual(res, 0) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_successful(result) def test_run_process_one_command_only_rc_stdout(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"]).stdout(b'stdout').stderr(b'stderr')) res = yield runprocess.run_process(None, ["command"], collect_stdout=True, collect_stderr=False) self.assertEqual(res, (0, b'stdout')) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_successful(result) def test_run_process_one_command_with_rc_stderr(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"]).stdout(b'stdout').stderr(b'stderr')) res = yield runprocess.run_process(None, ["command"], collect_stdout=False, collect_stderr=True) self.assertEqual(res, (0, b'stderr')) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_successful(result) def test_run_process_one_command_with_rc_stdout_stderr(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"]).stdout(b'stdout').stderr(b'stderr')) res = yield runprocess.run_process(None, ["command"]) self.assertEqual(res, (0, b'stdout', b'stderr')) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_successful(result) def test_run_process_expect_two_run_one(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"])) testcase.expect_commands(ExpectMaster(["command2"])) res = yield runprocess.run_process(None, ["command"]) self.assertEqual(res, (0, b'', b'')) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_test_failure(result, "assert all expected commands were run") def test_run_process_wrong_command(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command2"])) yield runprocess.run_process(None, ["command"]) result = self.run_test_method(method) self.assert_test_failure(result, "unexpected command run") # assert we have a meaningful message self.assert_test_failure(result, "command2") def test_run_process_wrong_args(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command", "arg"])) yield runprocess.run_process(None, ["command", "otherarg"]) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_test_failure(result, "unexpected command run") def test_run_process_missing_path(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"]).workdir("/home")) yield runprocess.run_process(None, ["command"]) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_test_failure(result, "unexpected command run") def test_run_process_wrong_path(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command", "arg"]).workdir("/home")) yield runprocess.run_process(None, ["command"], workdir="/path") testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_test_failure(result, "unexpected command run") def test_run_process_not_current_path(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command", "arg"])) yield runprocess.run_process(None, ["command"], workdir="/path") testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_test_failure(result, "unexpected command run") def test_run_process_error_output(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"]).stderr(b"some test")) res = yield runprocess.run_process(None, ["command"], collect_stderr=False, stderr_is_error=True) self.assertEqual(res, (-1, b'')) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_successful(result) def test_run_process_nonzero_exit(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"]).exit(1)) res = yield runprocess.run_process(None, ["command"]) self.assertEqual(res, (1, b'', b'')) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_successful(result) def test_run_process_environ_success(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"])) testcase.add_run_process_expect_env({'key': 'value'}) res = yield runprocess.run_process(None, ["command"], env={'key': 'value'}) self.assertEqual(res, (0, b'', b'')) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_successful(result) def test_run_process_environ_wrong_value(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"])) testcase.add_run_process_expect_env({'key': 'value'}) yield runprocess.run_process(None, ["command"], env={'key': 'wrongvalue'}) testcase.assert_all_commands_ran() result = self.run_test_method(method) self.assert_test_failure(result, "Expected environment to have key = 'value'") def test_run_process_environ_missing(self): @defer.inlineCallbacks def method(testcase): testcase.expect_commands(ExpectMaster(["command"])) testcase.add_run_process_expect_env({'key': 'value'}) d = runprocess.run_process(None, ["command"]) return d result = self.run_test_method(method) self.assert_test_failure(result, "Expected environment to have key = 'value'") buildbot-3.4.0/master/buildbot/test/unit/util/test_tuplematch.py000066400000000000000000000024241413250514000251100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.test.util import tuplematching from buildbot.util import tuplematch class MatchTuple(tuplematching.TupleMatchingMixin, unittest.TestCase): # called by the TupleMatchingMixin methods def do_test_match(self, routingKey, shouldMatch, filter): result = tuplematch.matchTuple(routingKey, filter) msg = '{} {} {}'.format(repr(routingKey), 'should match' if shouldMatch else "shouldn't match", repr(filter)) self.assertEqual(shouldMatch, result, msg) buildbot-3.4.0/master/buildbot/test/unit/worker/000077500000000000000000000000001413250514000216635ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/worker/__init__.py000066400000000000000000000000001413250514000237620ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/worker/test_base.py000066400000000000000000001042251413250514000242120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot import locks from buildbot.machine.base import Machine from buildbot.plugins import util from buildbot.process import properties from buildbot.secrets.manager import SecretManager from buildbot.test import fakedb from buildbot.test.fake import bworkermanager from buildbot.test.fake import fakemaster from buildbot.test.fake import fakeprotocol from buildbot.test.fake import worker from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util import interfaces from buildbot.test.util import logging from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import AbstractLatentWorker from buildbot.worker import base class ConcreteWorker(base.AbstractWorker): pass class FakeBuilder: def getBuilderId(self): return defer.succeed(1) class WorkerInterfaceTests(interfaces.InterfaceTests): def test_attr_workername(self): self.assertTrue(hasattr(self.wrk, 'workername')) def test_attr_properties(self): self.assertTrue(hasattr(self.wrk, 'properties')) def test_attr_defaultProperties(self): self.assertTrue(hasattr(self.wrk, 'defaultProperties')) @defer.inlineCallbacks def test_attr_worker_basedir(self): yield self.callAttached() self.assertIsInstance(self.wrk.worker_basedir, str) @defer.inlineCallbacks def test_attr_path_module(self): yield self.callAttached() self.assertTrue(hasattr(self.wrk, 'path_module')) @defer.inlineCallbacks def test_attr_worker_system(self): yield self.callAttached() self.assertTrue(hasattr(self.wrk, 'worker_system')) def test_signature_acquireLocks(self): @self.assertArgSpecMatches(self.wrk.acquireLocks) def acquireLocks(self): pass def test_signature_releaseLocks(self): @self.assertArgSpecMatches(self.wrk.releaseLocks) def releaseLocks(self): pass def test_signature_attached(self): @self.assertArgSpecMatches(self.wrk.attached) def attached(self, conn): pass def test_signature_detached(self): @self.assertArgSpecMatches(self.wrk.detached) def detached(self): pass def test_signature_addWorkerForBuilder(self): @self.assertArgSpecMatches(self.wrk.addWorkerForBuilder) def addWorkerForBuilder(self, wfb): pass def test_signature_removeWorkerForBuilder(self): @self.assertArgSpecMatches(self.wrk.removeWorkerForBuilder) def removeWorkerForBuilder(self, wfb): pass def test_signature_buildFinished(self): @self.assertArgSpecMatches(self.wrk.buildFinished) def buildFinished(self, wfb): pass def test_signature_canStartBuild(self): @self.assertArgSpecMatches(self.wrk.canStartBuild) def canStartBuild(self): pass class RealWorkerItfc(TestReactorMixin, unittest.TestCase, WorkerInterfaceTests): def setUp(self): self.setUpTestReactor() self.wrk = ConcreteWorker('wrk', 'pa') @defer.inlineCallbacks def callAttached(self): self.master = fakemaster.make_master(self, wantData=True) yield self.master.workers.disownServiceParent() self.workers = bworkermanager.FakeWorkerManager() yield self.workers.setServiceParent(self.master) self.master.workers = self.workers yield self.wrk.setServiceParent(self.master.workers) self.conn = fakeprotocol.FakeConnection(self.wrk) yield self.wrk.attached(self.conn) class FakeWorkerItfc(TestReactorMixin, unittest.TestCase, WorkerInterfaceTests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.wrk = worker.FakeWorker(self.master) def callAttached(self): self.conn = fakeprotocol.FakeConnection(self.wrk) return self.wrk.attached(self.conn) class TestAbstractWorker(logging.LoggingMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpLogging() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) self.botmaster = self.master.botmaster yield self.master.workers.disownServiceParent() self.workers = self.master.workers = bworkermanager.FakeWorkerManager() yield self.workers.setServiceParent(self.master) @defer.inlineCallbacks def createWorker(self, name='bot', password='pass', attached=False, configured=True, **kwargs): worker = ConcreteWorker(name, password, **kwargs) if configured: yield worker.setServiceParent(self.workers) if attached: worker.conn = fakeprotocol.FakeConnection(worker) return worker @defer.inlineCallbacks def createMachine(self, name, configured=True, **kwargs): machine = Machine(name) if configured: yield machine.setServiceParent(self.master.machine_manager) return machine @defer.inlineCallbacks def test_constructor_minimal(self): bs = yield self.createWorker('bot', 'pass') yield bs.startService() self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.max_builds, None) self.assertEqual(bs.notify_on_missing, []) self.assertEqual(bs.missing_timeout, ConcreteWorker.DEFAULT_MISSING_TIMEOUT) self.assertEqual(bs.properties.getProperty('workername'), 'bot') self.assertEqual(bs.access, []) @defer.inlineCallbacks def test_constructor_secrets(self): fake_storage_service = FakeSecretStorage() secret_service = SecretManager() secret_service.services = [fake_storage_service] yield secret_service.setServiceParent(self.master) fake_storage_service.reconfigService(secretdict={"passkey": "1234"}) bs = yield self.createWorker('bot', util.Secret('passkey')) yield bs.startService() self.assertEqual(bs.password, '1234') @defer.inlineCallbacks def test_constructor_full(self): lock1, lock2 = locks.MasterLock('lock1'), locks.MasterLock('lock2') access1, access2 = lock1.access('counting'), lock2.access('counting') bs = yield self.createWorker('bot', 'pass', max_builds=2, notify_on_missing=['me@me.com'], missing_timeout=120, properties={'a': 'b'}, locks=[access1, access2]) yield bs.startService() self.assertEqual(bs.max_builds, 2) self.assertEqual(bs.notify_on_missing, ['me@me.com']) self.assertEqual(bs.missing_timeout, 120) self.assertEqual(bs.properties.getProperty('a'), 'b') self.assertEqual(bs.access, [access1, access2]) @defer.inlineCallbacks def test_constructor_notify_on_missing_not_list(self): bs = yield self.createWorker('bot', 'pass', notify_on_missing='foo@foo.com') yield bs.startService() # turned into a list: self.assertEqual(bs.notify_on_missing, ['foo@foo.com']) def test_constructor_notify_on_missing_not_string(self): with self.assertRaises(config.ConfigErrors): ConcreteWorker('bot', 'pass', notify_on_missing=['a@b.com', 13]) @defer.inlineCallbacks def do_test_reconfigService(self, old, new, existingRegistration=True): old.parent = self.master if existingRegistration: old.registration = bworkermanager.FakeWorkerRegistration(old) old.missing_timer = mock.Mock(name='missing_timer') if not old.running: yield old.startService() yield old.reconfigServiceWithSibling(new) @defer.inlineCallbacks def test_reconfigService_attrs(self): old = yield self.createWorker('bot', 'pass', max_builds=2, notify_on_missing=['me@me.com'], missing_timeout=120, properties={'a': 'b'}) new = yield self.createWorker('bot', 'pass', configured=False, max_builds=3, notify_on_missing=['her@me.com'], missing_timeout=121, properties={'a': 'c'}) old.updateWorker = mock.Mock(side_effect=lambda: defer.succeed(None)) yield self.do_test_reconfigService(old, new) self.assertEqual(old.max_builds, 3) self.assertEqual(old.notify_on_missing, ['her@me.com']) self.assertEqual(old.missing_timeout, 121) self.assertEqual(old.properties.getProperty('a'), 'c') self.assertEqual(old.registration.updates, ['bot']) self.assertTrue(old.updateWorker.called) @defer.inlineCallbacks def test_reconfigService_has_properties(self): old = yield self.createWorker(name="bot", password="pass") yield self.do_test_reconfigService(old, old) self.assertTrue(old.properties.getProperty('workername'), 'bot') @defer.inlineCallbacks def test_setupProperties(self): props = properties.Properties() props.setProperty('foo', 1, 'Scheduler') props.setProperty('bar', 'bleh', 'Change') props.setProperty('omg', 'wtf', 'Builder') wrkr = yield self.createWorker( 'bot', 'passwd', defaultProperties={'bar': 'onoes', 'cuckoo': 42}) wrkr.setupProperties(props) self.assertEquals(props.getProperty('bar'), 'bleh') self.assertEquals(props.getProperty('cuckoo'), 42) @defer.inlineCallbacks def test_reconfigService_initial_registration(self): old = yield self.createWorker('bot', 'pass') yield self.do_test_reconfigService(old, old, existingRegistration=False) self.assertIn('bot', self.master.workers.registrations) self.assertEqual(old.registration.updates, ['bot']) @defer.inlineCallbacks def test_reconfigService_builder(self): old = yield self.createWorker('bot', 'pass') yield self.do_test_reconfigService(old, old) # initial configuration, there is no builder configured self.assertEqual(old._configured_builderid_list, []) workers = yield self.master.data.get(('workers',)) self.assertEqual(len(workers[0]['configured_on']), 0) new = yield self.createWorker('bot', 'pass', configured=False) # we create a fake builder, and associate to the master self.botmaster.builders['bot'] = [FakeBuilder()] self.master.db.insertTestData([ fakedb.Builder(id=1, name='builder'), fakedb.BuilderMaster(builderid=1, masterid=824) ]) # on reconfig, the db should see the builder configured for this worker yield old.reconfigServiceWithSibling(new) self.assertEqual(old._configured_builderid_list, [1]) workers = yield self.master.data.get(('workers',)) self.assertEqual(len(workers[0]['configured_on']), 1) self.assertEqual(workers[0]['configured_on'][0]['builderid'], 1) @defer.inlineCallbacks def test_reconfig_service_no_machine(self): old = yield self.createWorker('bot', 'pass') self.assertIsNone(old.machine) yield self.do_test_reconfigService(old, old) self.assertIsNone(old.machine) @defer.inlineCallbacks def test_reconfig_service_with_machine_initial(self): machine = yield self.createMachine('machine1') old = yield self.createWorker('bot', 'pass', machine_name='machine1') self.assertIsNone(old.machine) yield self.do_test_reconfigService(old, old) self.assertIs(old.machine, machine) @defer.inlineCallbacks def test_reconfig_service_with_unknown_machine(self): old = yield self.createWorker('bot', 'pass', machine_name='machine1') self.assertIsNone(old.machine) yield self.do_test_reconfigService(old, old) self.assertLogged('Unknown machine') @parameterized.expand([ ('None_to_machine_initial', False, None, None, 'machine1', 'machine1'), ('None_to_machine', True, None, None, 'machine1', 'machine1'), ('machine_to_None_initial', False, 'machine1', None, None, None), ('machine_to_None', True, 'machine1', 'machine1', None, None), ('machine_to_same_machine_initial', False, 'machine1', None, 'machine1', 'machine1'), ('machine_to_same_machine', True, 'machine1', 'machine1', 'machine1', 'machine1'), ('machine_to_another_machine_initial', False, 'machine1', None, 'machine2', 'machine2'), ('machine_to_another_machine', True, 'machine1', 'machine1', 'machine2', 'machine2'), ]) @defer.inlineCallbacks def test_reconfig_service_machine(self, test_name, do_initial_self_reconfig, old_machine_name, expected_old_machine_name, new_machine_name, expected_new_machine_name): machine1 = yield self.createMachine('machine1') machine2 = yield self.createMachine('machine2') name_to_machine = { None: None, machine1.name: machine1, machine2.name: machine2, } expected_old_machine = name_to_machine[expected_old_machine_name] expected_new_machine = name_to_machine[expected_new_machine_name] old = yield self.createWorker('bot', 'pass', machine_name=old_machine_name) new = yield self.createWorker('bot', 'pass', configured=False, machine_name=new_machine_name) if do_initial_self_reconfig: yield self.do_test_reconfigService(old, old) self.assertIs(old.machine, expected_old_machine) yield self.do_test_reconfigService(old, new) self.assertIs(old.machine, expected_new_machine) @defer.inlineCallbacks def test_stopService(self): worker = yield self.createWorker() yield worker.startService() reg = worker.registration yield worker.stopService() self.assertTrue(reg.unregistered) self.assertEqual(worker.registration, None) # FIXME: Test that reconfig properly deals with # 1) locks # 2) telling worker about builder # 3) missing timer # in both the initial config and a reconfiguration. def test_startMissingTimer_no_parent(self): bs = ConcreteWorker('bot', 'pass', notify_on_missing=['abc'], missing_timeout=10) bs.startMissingTimer() self.assertEqual(bs.missing_timer, None) def test_startMissingTimer_no_timeout(self): bs = ConcreteWorker('bot', 'pass', notify_on_missing=['abc'], missing_timeout=0) bs.parent = mock.Mock() bs.startMissingTimer() self.assertEqual(bs.missing_timer, None) def test_startMissingTimer_no_notify(self): bs = ConcreteWorker('bot', 'pass', missing_timeout=3600) bs.parent = mock.Mock() bs.running = True bs.startMissingTimer() self.assertNotEqual(bs.missing_timer, None) def test_missing_timer(self): bs = ConcreteWorker('bot', 'pass', notify_on_missing=['abc'], missing_timeout=100) bs.parent = mock.Mock() bs.running = True bs.startMissingTimer() self.assertNotEqual(bs.missing_timer, None) bs.stopMissingTimer() self.assertEqual(bs.missing_timer, None) @defer.inlineCallbacks def test_setServiceParent_started(self): master = self.master bsmanager = master.workers yield master.startService() bs = ConcreteWorker('bot', 'pass') yield bs.setServiceParent(bsmanager) self.assertEqual(bs.manager, bsmanager) self.assertEqual(bs.parent, bsmanager) self.assertEqual(bsmanager.master, master) self.assertEqual(bs.master, master) @defer.inlineCallbacks def test_setServiceParent_masterLocks(self): """ http://trac.buildbot.net/ticket/2278 """ master = self.master bsmanager = master.workers yield master.startService() lock = locks.MasterLock('masterlock') bs = ConcreteWorker('bot', 'pass', locks=[lock.access("counting")]) yield bs.setServiceParent(bsmanager) @defer.inlineCallbacks def test_setServiceParent_workerLocks(self): """ http://trac.buildbot.net/ticket/2278 """ master = self.master bsmanager = master.workers yield master.startService() lock = locks.WorkerLock('lock') bs = ConcreteWorker('bot', 'pass', locks=[lock.access("counting")]) yield bs.setServiceParent(bsmanager) @defer.inlineCallbacks def test_startService_paused_true(self): """Test that paused state is restored on a buildbot restart""" self.master.db.insertTestData([ fakedb.Worker(id=9292, name='bot', paused=1) ]) worker = yield self.createWorker() yield worker.startService() self.assertTrue(worker.isPaused()) self.assertFalse(worker._graceful) @defer.inlineCallbacks def test_startService_graceful_true(self): """Test that graceful state is NOT restored on a buildbot restart""" self.master.db.insertTestData([ fakedb.Worker(id=9292, name='bot', graceful=1) ]) worker = yield self.createWorker() yield worker.startService() self.assertFalse(worker.isPaused()) self.assertFalse(worker._graceful) @defer.inlineCallbacks def test_startService_getWorkerInfo_empty(self): worker = yield self.createWorker() yield worker.startService() self.assertEqual(len(worker.info.asDict()), 0) # check that a new worker row was added for this worker bs = yield self.master.db.workers.getWorker(name='bot') self.assertEqual(bs['name'], 'bot') @defer.inlineCallbacks def test_startService_getWorkerInfo_fromDb(self): self.master.db.insertTestData([ fakedb.Worker(id=9292, name='bot', info={ 'admin': 'TheAdmin', 'host': 'TheHost', 'access_uri': 'TheURI', 'version': 'TheVersion' }) ]) worker = yield self.createWorker() yield worker.startService() self.assertEqual(worker.workerid, 9292) self.assertEqual(worker.info.asDict(), { 'version': ('TheVersion', 'Worker'), 'admin': ('TheAdmin', 'Worker'), 'host': ('TheHost', 'Worker'), 'access_uri': ('TheURI', 'Worker'), }) @defer.inlineCallbacks def test_attached_remoteGetWorkerInfo(self): worker = yield self.createWorker() yield worker.startService() ENVIRON = {} COMMANDS = {'cmd1': '1', 'cmd2': '1'} conn = fakeprotocol.FakeConnection(worker) conn.info = { 'admin': 'TheAdmin', 'host': 'TheHost', 'access_uri': 'TheURI', 'environ': ENVIRON, 'basedir': 'TheBaseDir', 'system': 'TheWorkerSystem', 'version': 'TheVersion', 'worker_commands': COMMANDS, } yield worker.attached(conn) self.assertEqual(worker.info.asDict(), { 'version': ('TheVersion', 'Worker'), 'admin': ('TheAdmin', 'Worker'), 'host': ('TheHost', 'Worker'), 'access_uri': ('TheURI', 'Worker'), 'basedir': ('TheBaseDir', 'Worker'), 'system': ('TheWorkerSystem', 'Worker'), }) self.assertEqual(worker.worker_environ, ENVIRON) self.assertEqual(worker.worker_basedir, 'TheBaseDir') self.assertEqual(worker.worker_system, 'TheWorkerSystem') self.assertEqual(worker.worker_commands, COMMANDS) @defer.inlineCallbacks def test_attached_callsMaybeStartBuildsForWorker(self): worker = yield self.createWorker() yield worker.startService() yield worker.reconfigServiceWithSibling(worker) conn = fakeprotocol.FakeConnection(worker) conn.info = {} yield worker.attached(conn) self.assertEqual(self.botmaster.buildsStartedForWorkers, ["bot"]) @defer.inlineCallbacks def test_attached_workerInfoUpdates(self): # put in stale info: self.master.db.insertTestData([ fakedb.Worker(name='bot', info={ 'admin': 'WrongAdmin', 'host': 'WrongHost', 'access_uri': 'WrongURI', 'version': 'WrongVersion' }) ]) worker = yield self.createWorker() yield worker.startService() conn = fakeprotocol.FakeConnection(worker) conn.info = { 'admin': 'TheAdmin', 'host': 'TheHost', 'access_uri': 'TheURI', 'version': 'TheVersion', } yield worker.attached(conn) self.assertEqual(worker.info.asDict(), { 'version': ('TheVersion', 'Worker'), 'admin': ('TheAdmin', 'Worker'), 'host': ('TheHost', 'Worker'), 'access_uri': ('TheURI', 'Worker'), }) # and the db is updated too: db_worker = yield self.master.db.workers.getWorker(name="bot") self.assertEqual(db_worker['workerinfo']['admin'], 'TheAdmin') self.assertEqual(db_worker['workerinfo']['host'], 'TheHost') self.assertEqual(db_worker['workerinfo']['access_uri'], 'TheURI') self.assertEqual(db_worker['workerinfo']['version'], 'TheVersion') @defer.inlineCallbacks def test_worker_shutdown(self): worker = yield self.createWorker(attached=True) yield worker.startService() yield worker.shutdown() self.assertEqual( worker.conn.remoteCalls, [('remoteSetBuilderList', []), ('remoteShutdown',)]) @defer.inlineCallbacks def test_worker_shutdown_not_connected(self): worker = yield self.createWorker(attached=False) yield worker.startService() # No exceptions should be raised here yield worker.shutdown() @defer.inlineCallbacks def test_shutdownRequested(self): worker = yield self.createWorker(attached=False) yield worker.startService() yield worker.shutdownRequested() self.assertEqual(worker._graceful, True) @defer.inlineCallbacks def test_missing_timer_missing(self): worker = yield self.createWorker(attached=False, missing_timeout=1) yield worker.startService() self.assertNotEqual(worker.missing_timer, None) yield self.reactor.advance(1) self.assertEqual(worker.missing_timer, None) self.assertEqual(len(self.master.data.updates.missingWorkers), 1) @defer.inlineCallbacks def test_missing_timer_stopped(self): worker = yield self.createWorker(attached=False, missing_timeout=1) yield worker.startService() self.assertNotEqual(worker.missing_timer, None) yield worker.stopService() self.assertEqual(worker.missing_timer, None) self.assertEqual(len(self.master.data.updates.missingWorkers), 0) @defer.inlineCallbacks def test_worker_actions_stop(self): worker = yield self.createWorker(attached=False) yield worker.startService() worker.controlWorker(("worker", 1, "stop"), {'reason': "none"}) self.assertEqual(worker._graceful, True) @defer.inlineCallbacks def test_worker_actions_kill(self): worker = yield self.createWorker(attached=False) yield worker.startService() worker.controlWorker(("worker", 1, "kill"), {'reason': "none"}) self.assertEqual(worker.conn, None) @defer.inlineCallbacks def test_worker_actions_pause(self): worker = yield self.createWorker(attached=False) yield worker.startService() self.assertTrue(worker.canStartBuild()) worker.controlWorker(("worker", 1, "pause"), {"reason": "none"}) self.assertEqual(worker._paused, True) self.assertFalse(worker.canStartBuild()) worker.controlWorker(("worker", 1, "unpause"), {"reason": "none"}) self.assertEqual(worker._paused, False) self.assertTrue(worker.canStartBuild()) @defer.inlineCallbacks def test_worker_quarantine_doesnt_affect_pause(self): worker = yield self.createWorker(attached=False) yield worker.startService() self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) self.assertFalse(worker._paused) # put worker into quarantine. # Check canStartBuild() is False, and paused state is not changed worker.putInQuarantine() self.assertFalse(worker._paused) self.assertFalse(worker.canStartBuild()) self.assertIsNotNone(worker.quarantine_timer) # human manually pauses the worker worker.controlWorker(("worker", 1, "pause"), {"reason": "none"}) self.assertTrue(worker._paused) self.assertFalse(worker.canStartBuild()) # simulate wait for quarantine to end # Check canStartBuild() is still False, and paused state is not changed self.master.reactor.advance(10) self.assertTrue(worker._paused) self.assertFalse(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) @defer.inlineCallbacks def test_worker_quarantine_unpausing_exits_quarantine(self): worker = yield self.createWorker(attached=False) yield worker.startService() self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) # put worker into quarantine whilst unpaused. worker.putInQuarantine() self.assertFalse(worker._paused) self.assertFalse(worker.canStartBuild()) # pause and unpause the worker worker.controlWorker(("worker", 1, "pause"), {"reason": "none"}) self.assertFalse(worker.canStartBuild()) worker.controlWorker(("worker", 1, "unpause"), {"reason": "none"}) self.assertTrue(worker.canStartBuild()) # put worker into quarantine whilst paused. worker.controlWorker(("worker", 1, "pause"), {"reason": "none"}) worker.putInQuarantine() self.assertTrue(worker._paused) self.assertFalse(worker.canStartBuild()) # unpause worker should start the build worker.controlWorker(("worker", 1, "unpause"), {"reason": "none"}) self.assertFalse(worker._paused) self.assertTrue(worker.canStartBuild()) @defer.inlineCallbacks def test_worker_quarantine_unpausing_doesnt_reset_timeout(self): worker = yield self.createWorker(attached=False) yield worker.startService() self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) # pump up the quarantine wait time for quarantine_wait in (10, 20, 40, 80): worker.putInQuarantine() self.assertFalse(worker.canStartBuild()) self.assertIsNotNone(worker.quarantine_timer) self.master.reactor.advance(quarantine_wait) self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) # put worker into quarantine (160s) worker.putInQuarantine() self.assertFalse(worker._paused) self.assertFalse(worker.canStartBuild()) # pause and unpause the worker to exit quarantine worker.controlWorker(("worker", 1, "pause"), {"reason": "none"}) self.assertFalse(worker.canStartBuild()) worker.controlWorker(("worker", 1, "unpause"), {"reason": "none"}) self.assertFalse(worker._paused) self.assertTrue(worker.canStartBuild()) # next build fails. check timeout is 320s worker.putInQuarantine() self.master.reactor.advance(319) self.assertFalse(worker.canStartBuild()) self.assertIsNotNone(worker.quarantine_timer) self.master.reactor.advance(1) self.assertIsNone(worker.quarantine_timer) self.assertTrue(worker.canStartBuild()) @defer.inlineCallbacks def test_worker_quarantine_wait_times(self): worker = yield self.createWorker(attached=False) yield worker.startService() self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) for quarantine_wait in (10, 20, 40, 80, 160, 320, 640, 1280, 2560, 3600, 3600): # put worker into quarantine worker.putInQuarantine() self.assertFalse(worker.canStartBuild()) self.assertIsNotNone(worker.quarantine_timer) # simulate wait just before quarantine ends self.master.reactor.advance(quarantine_wait - 1) self.assertFalse(worker.canStartBuild()) self.assertIsNotNone(worker.quarantine_timer) # simulate wait to just after quarantine ends self.master.reactor.advance(1) self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) @defer.inlineCallbacks def test_worker_quarantine_reset(self): worker = yield self.createWorker(attached=False) yield worker.startService() self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) # pump up the quarantine wait time for quarantine_wait in (10, 20, 40, 80): worker.putInQuarantine() self.assertFalse(worker.canStartBuild()) self.assertIsNotNone(worker.quarantine_timer) self.master.reactor.advance(quarantine_wait) self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) # Now get a successful build worker.resetQuarantine() # the workers quarantine period should reset back to 10 worker.putInQuarantine() self.master.reactor.advance(10) self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) @defer.inlineCallbacks def test_worker_quarantine_whilst_quarantined(self): worker = yield self.createWorker(attached=False) yield worker.startService() self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) # put worker in quarantine worker.putInQuarantine() self.assertFalse(worker.canStartBuild()) self.assertIsNotNone(worker.quarantine_timer) # simulate wait for half the time, and put in quarantine again self.master.reactor.advance(5) worker.putInQuarantine() self.assertFalse(worker.canStartBuild()) self.assertIsNotNone(worker.quarantine_timer) # simulate wait for another 5 seconds, and we should leave quarantine self.master.reactor.advance(5) self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) # simulate wait for yet another 5 seconds, and ensure nothing changes self.master.reactor.advance(5) self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) @defer.inlineCallbacks def test_worker_quarantine_stop_timer(self): worker = yield self.createWorker(attached=False) yield worker.startService() self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) # Call stopQuarantineTimer whilst not quarantined worker.stopQuarantineTimer() self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) # Call stopQuarantineTimer whilst quarantined worker.putInQuarantine() self.assertFalse(worker.canStartBuild()) self.assertIsNotNone(worker.quarantine_timer) worker.stopQuarantineTimer() self.assertTrue(worker.canStartBuild()) self.assertIsNone(worker.quarantine_timer) class TestAbstractLatentWorker(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) self.botmaster = self.master.botmaster yield self.master.workers.disownServiceParent() self.workers = self.master.workers = bworkermanager.FakeWorkerManager() yield self.workers.setServiceParent(self.master) @defer.inlineCallbacks def do_test_reconfigService(self, old, new, existingRegistration=True): old.parent = self.master if existingRegistration: old.registration = bworkermanager.FakeWorkerRegistration(old) old.missing_timer = mock.Mock(name='missing_timer') yield old.startService() yield old.reconfigServiceWithSibling(new) @defer.inlineCallbacks def test_reconfigService(self): old = AbstractLatentWorker( "name", "password", build_wait_timeout=10) new = AbstractLatentWorker( "name", "password", build_wait_timeout=30) yield self.do_test_reconfigService(old, new) self.assertEqual(old.build_wait_timeout, 30) buildbot-3.4.0/master/buildbot/test/unit/worker/test_docker.py000066400000000000000000000461151413250514000245520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot import interfaces from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.properties import Property from buildbot.test.fake import docker from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import docker as dockerworker class TestDockerLatentWorker(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setupWorker(self, *args, **kwargs): self.patch(dockerworker, 'docker', docker) worker = dockerworker.DockerLatentWorker(*args, **kwargs) master = fakemaster.make_master(self, wantData=True) fakemaster.master = master worker.setServiceParent(master) yield master.startService() self.addCleanup(master.stopService) return worker def setUp(self): self.setUpTestReactor() self.build = Properties( image='busybox:latest', builder='docker_worker', distro='wheezy') self.build2 = Properties( image='busybox:latest', builder='docker_worker2', distro='wheezy') self.patch(dockerworker, 'client', docker) docker.Client.containerCreated = False @defer.inlineCallbacks def test_constructor_nodocker(self): self.patch(dockerworker, 'client', None) with self.assertRaises(config.ConfigErrors): yield self.setupWorker('bot', 'pass', 'unix://tmp.sock', 'debian:wheezy', []) @defer.inlineCallbacks def test_constructor_noimage_nodockerfile(self): with self.assertRaises(config.ConfigErrors): yield self.setupWorker('bot', 'pass', 'http://localhost:2375') @defer.inlineCallbacks def test_constructor_noimage_dockerfile(self): bs = yield self.setupWorker( 'bot', 'pass', 'http://localhost:2375', dockerfile="FROM ubuntu") self.assertEqual(bs.dockerfile, "FROM ubuntu") self.assertEqual(bs.image, None) @defer.inlineCallbacks def test_constructor_image_nodockerfile(self): bs = yield self.setupWorker( 'bot', 'pass', 'http://localhost:2375', image="myworker") self.assertEqual(bs.dockerfile, None) self.assertEqual(bs.image, 'myworker') @defer.inlineCallbacks def test_constructor_minimal(self): # Minimal set of parameters bs = yield self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker') self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.client_args, {'base_url': 'tcp://1234:2375'}) self.assertEqual(bs.image, 'worker') self.assertEqual(bs.command, []) @defer.inlineCallbacks def test_builds_may_be_incompatible(self): # Minimal set of parameters bs = yield self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker') self.assertEqual(bs.builds_may_be_incompatible, True) @defer.inlineCallbacks def test_contruction_minimal_docker_py(self): docker.version = "1.10.6" bs = yield self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker') id, name = yield bs.start_instance(self.build) client = docker.APIClient.latest self.assertEqual(client.called_class_name, "Client") client = docker.Client.latest self.assertNotEqual(client.called_class_name, "APIClient") @defer.inlineCallbacks def test_contruction_minimal_docker(self): docker.version = "2.0.0" bs = yield self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker') id, name = yield bs.start_instance(self.build) client = docker.Client.latest self.assertEqual(client.called_class_name, "APIClient") client = docker.APIClient.latest self.assertNotEqual(client.called_class_name, "Client") @defer.inlineCallbacks def test_constructor_nopassword(self): # when no password, it is created automatically bs = yield self.setupWorker('bot', None, 'tcp://1234:2375', 'worker') self.assertEqual(bs.workername, 'bot') self.assertEqual(len(bs.password), 20) @defer.inlineCallbacks def test_constructor_all_docker_parameters(self): # Volumes have their own tests bs = yield self.setupWorker('bot', 'pass', 'unix:///var/run/docker.sock', 'worker_img', ['/bin/sh'], dockerfile="FROM ubuntu", version='1.9', tls=True, hostconfig={'network_mode': 'fake', 'dns': ['1.1.1.1', '1.2.3.4']}, custom_context=False, buildargs=None, encoding='gzip') self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.image, 'worker_img') self.assertEqual(bs.command, ['/bin/sh']) self.assertEqual(bs.dockerfile, "FROM ubuntu") self.assertEqual(bs.volumes, []) self.assertEqual(bs.client_args, { 'base_url': 'unix:///var/run/docker.sock', 'version': '1.9', 'tls': True}) self.assertEqual( bs.hostconfig, {'network_mode': 'fake', 'dns': ['1.1.1.1', '1.2.3.4']}) self.assertFalse(bs.custom_context) self.assertEqual(bs.buildargs, None) self.assertEqual(bs.encoding, 'gzip') @defer.inlineCallbacks def test_constructor_host_config_build(self): # Volumes have their own tests bs = yield self.setupWorker('bot', 'pass', 'unix:///var/run/docker.sock', 'worker_img', ['/bin/sh'], dockerfile="FROM ubuntu", volumes=["/tmp:/tmp:ro"], hostconfig={'network_mode': 'fake', 'dns': ['1.1.1.1', '1.2.3.4']}, custom_context=False, buildargs=None, encoding='gzip') id, name = yield bs.start_instance(self.build) client = docker.APIClient.latest expected = { 'network_mode': 'fake', 'dns': ['1.1.1.1', '1.2.3.4'], 'binds': ['/tmp:/tmp:ro'], } if dockerworker.docker_py_version >= 2.2: expected['init'] = True self.assertEqual(client.call_args_create_host_config, [expected]) @defer.inlineCallbacks def test_constructor_host_config_build_set_init(self): # Volumes have their own tests bs = yield self.setupWorker('bot', 'pass', 'unix:///var/run/docker.sock', 'worker_img', ['/bin/sh'], dockerfile="FROM ubuntu", volumes=["/tmp:/tmp:ro"], hostconfig={'network_mode': 'fake', 'dns': ['1.1.1.1', '1.2.3.4'], 'init': False}, custom_context=False, buildargs=None, encoding='gzip') id, name = yield bs.start_instance(self.build) client = docker.APIClient.latest self.assertEqual(client.call_args_create_host_config, [ {'network_mode': 'fake', 'dns': ['1.1.1.1', '1.2.3.4'], 'init': False, 'binds': ['/tmp:/tmp:ro'], } ]) @defer.inlineCallbacks def test_start_instance_volume_renderable(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'worker', ['bin/bash'], volumes=[Interpolate('/data:/worker/%(kw:builder)s/build', builder=Property('builder'))]) id, name = yield bs.start_instance(self.build) client = docker.Client.latest self.assertEqual(len(client.call_args_create_container), 1) self.assertEqual(client.call_args_create_container[0]['volumes'], ['/worker/docker_worker/build']) @defer.inlineCallbacks def test_interpolate_renderables_for_new_build(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'worker', ['bin/bash'], volumes=[Interpolate('/data:/worker/%(kw:builder)s/build', builder=Property('builder'))]) yield bs.start_instance(self.build) docker.Client.containerCreated = True # the worker recreates the (mock) client on every action, clearing the containers # but stop_instance only works if the there is a docker container running yield bs.stop_instance() self.assertTrue((yield bs.isCompatibleWithBuild(self.build2))) @defer.inlineCallbacks def test_reject_incompatible_build_while_running(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'worker', ['bin/bash'], volumes=[Interpolate('/data:/worker/%(kw:builder)s/build', builder=Property('builder'))]) yield bs.start_instance(self.build) self.assertFalse((yield bs.isCompatibleWithBuild(self.build2))) @defer.inlineCallbacks def test_volume_no_suffix(self): bs = yield self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker', ['bin/bash'], volumes=['/src/webapp:/opt/webapp']) yield bs.start_instance(self.build) client = docker.Client.latest self.assertEqual(len(client.call_args_create_container), 1) self.assertEqual(len(client.call_args_create_host_config), 1) self.assertEqual(client.call_args_create_container[0]['volumes'], ['/opt/webapp']) self.assertEqual(client.call_args_create_host_config[0]['binds'], ["/src/webapp:/opt/webapp"]) @defer.inlineCallbacks def test_volume_ro_rw(self): bs = yield self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker', ['bin/bash'], volumes=['/src/webapp:/opt/webapp:ro', '~:/backup:rw']) yield bs.start_instance(self.build) client = docker.Client.latest self.assertEqual(len(client.call_args_create_container), 1) self.assertEqual(len(client.call_args_create_host_config), 1) self.assertEqual(client.call_args_create_container[0]['volumes'], ['/opt/webapp', '/backup']) self.assertEqual(client.call_args_create_host_config[0]['binds'], ['/src/webapp:/opt/webapp:ro', '~:/backup:rw']) @defer.inlineCallbacks def test_volume_bad_format(self): with self.assertRaises(config.ConfigErrors): yield self.setupWorker('bot', 'pass', 'http://localhost:2375', image="worker", volumes=['abcd=efgh']) @defer.inlineCallbacks def test_volume_bad_format_renderable(self): bs = yield self.setupWorker( 'bot', 'pass', 'http://localhost:2375', image="worker", volumes=[Interpolate('/data==/worker/%(kw:builder)s/build', builder=Property('builder'))]) with self.assertRaises(config.ConfigErrors): yield bs.start_instance(self.build) @defer.inlineCallbacks def test_start_instance_image_no_version(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'busybox', ['bin/bash']) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'busybox') @defer.inlineCallbacks def test_start_instance_image_right_version(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'busybox:latest', ['bin/bash']) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'busybox:latest') @defer.inlineCallbacks def test_start_instance_image_wrong_version(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'busybox:123', ['bin/bash']) with self.assertRaises(interfaces.LatentWorkerCannotSubstantiate): yield bs.start_instance(self.build) @defer.inlineCallbacks def test_start_instance_image_renderable(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', Property('image'), ['bin/bash']) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'busybox:latest') @defer.inlineCallbacks def test_start_instance_noimage_nodockerfile(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'customworker', ['bin/bash']) with self.assertRaises(interfaces.LatentWorkerCannotSubstantiate): yield bs.start_instance(self.build) @defer.inlineCallbacks def test_start_instance_image_and_dockefile(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'customworker', dockerfile='BUG') with self.assertRaises(interfaces.LatentWorkerCannotSubstantiate): yield bs.start_instance(self.build) @defer.inlineCallbacks def test_start_instance_noimage_gooddockerfile(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'customworker', dockerfile='FROM debian:wheezy') id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'customworker') @defer.inlineCallbacks def test_start_instance_noimage_pull(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'alpine:latest', autopull=True) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'alpine:latest') @defer.inlineCallbacks def test_start_instance_image_pull(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', autopull=True) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'tester:latest') client = docker.Client.latest self.assertEqual(client._pullCount, 0) @defer.inlineCallbacks def test_start_instance_image_alwayspull(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', autopull=True, alwaysPull=True) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'tester:latest') client = docker.Client.latest self.assertEqual(client._pullCount, 1) @defer.inlineCallbacks def test_start_instance_image_noauto_alwayspull(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', autopull=False, alwaysPull=True) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'tester:latest') client = docker.Client.latest self.assertEqual(client._pullCount, 0) @defer.inlineCallbacks def test_start_instance_noimage_renderabledockerfile(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'customworker', dockerfile=Interpolate('FROM debian:%(kw:distro)s', distro=Property('distro'))) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'customworker') @defer.inlineCallbacks def test_start_instance_custom_context_and_buildargs(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', dockerfile=Interpolate('FROM debian:latest'), custom_context=True, buildargs={'sample_arg1': 'test_val1'}) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'tester:latest') @defer.inlineCallbacks def test_start_instance_custom_context_no_buildargs(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', dockerfile=Interpolate('FROM debian:latest'), custom_context=True) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'tester:latest') @defer.inlineCallbacks def test_start_instance_buildargs_no_custom_context(self): bs = yield self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', dockerfile=Interpolate('FROM debian:latest'), buildargs={'sample_arg1': 'test_val1'}) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'tester:latest') @defer.inlineCallbacks def test_start_worker_but_already_created_with_same_name(self): bs = yield self.setupWorker( 'existing', 'pass', 'tcp://1234:2375', 'busybox:latest', ['bin/bash']) id, name = yield bs.start_instance(self.build) self.assertEqual(name, 'busybox:latest') @defer.inlineCallbacks def test_constructor_hostname(self): bs = yield self.setupWorker( 'bot', 'pass', 'http://localhost:2375', image="myworker_image", hostname="myworker_hostname") self.assertEqual(bs.hostname, 'myworker_hostname') class testDockerPyStreamLogs(unittest.TestCase): def compare(self, result, log): self.assertEqual(result, list(dockerworker._handle_stream_line(log))) def testEmpty(self): self.compare([], '{"stream":"\\n"}\r\n') def testOneLine(self): self.compare( [" ---> Using cache"], '{"stream":" ---\\u003e Using cache\\n"}\r\n') def testMultipleLines(self): self.compare(["Fetched 8298 kB in 3s (2096 kB/s)", "Reading package lists..."], '{"stream": "Fetched 8298 kB in 3s (2096 kB/s)\\nReading ' 'package lists..."}\r\n') def testError(self): self.compare(["ERROR: The command [/bin/sh -c apt-get update && apt-get install -y" " python-dev python-pip] returned a non-zero code: 127"], '{"errorDetail": {"message": "The command [/bin/sh -c apt-get update && ' 'apt-get install -y python-dev python-pip] returned a ' 'non-zero code: 127"},' ' "error": "The command [/bin/sh -c apt-get update && apt-get install -y' ' python-dev python-pip] returned a non-zero code: 127"}\r\n') buildbot-3.4.0/master/buildbot/test/unit/worker/test_ec2.py000066400000000000000000000575731413250514000237660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2014 Longaccess private company import os from twisted.trial import unittest from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.warnings import DeprecatedApiWarning try: from moto import mock_ec2 assert mock_ec2 import boto3 assert boto3 from botocore.client import ClientError except ImportError: boto3 = None ec2 = None ClientError = None if boto3 is not None: from buildbot.worker import ec2 # pylint: disable=ungrouped-imports # Current moto (1.3.7) requires dummy credentials to work # https://github.com/spulec/moto/issues/1924 os.environ['AWS_SECRET_ACCESS_KEY'] = 'foobar_secret' os.environ['AWS_ACCESS_KEY_ID'] = 'foobar_key' os.environ['AWS_DEFAULT_REGION'] = 'us-east-1' # redefine the mock_ec2 decorator to skip the test if boto3 or moto # isn't installed def skip_ec2(f): f.skip = "boto3 or moto is not installed" return f if boto3 is None: mock_ec2 = skip_ec2 def anyImageId(c): for image in c.describe_images()['Images']: return image['ImageId'] return 'foo' class TestEC2LatentWorker(unittest.TestCase): ec2_connection = None def setUp(self): super().setUp() if boto3 is None: raise unittest.SkipTest("moto not found") def botoSetup(self, name='latent_buildbot_worker'): # the proxy system is also not properly mocked, so we need to delete environment variables for env in ['http_proxy', 'https_proxy', 'HTTP_PROXY', 'HTTPS_PROXY']: if env in os.environ: del os.environ[env] # create key pair is not correctly mocked and need to have fake aws creds configured kw = dict(region_name='us-east-1', aws_access_key_id='ACCESS_KEY', aws_secret_access_key='SECRET_KEY', aws_session_token='SESSION_TOKEN') c = boto3.client('ec2', **kw) r = boto3.resource('ec2', **kw) try: r.create_key_pair(KeyName=name) except NotImplementedError as e: raise unittest.SkipTest("KeyPairs.create_key_pair not implemented" " in this version of moto, please update.") from e r.create_security_group(GroupName=name, Description='the security group') instance = r.create_instances(ImageId=anyImageId(c), MinCount=1, MaxCount=1)[0] c.create_image(InstanceId=instance.id, Name="foo", Description="bar") c.terminate_instances(InstanceIds=[instance.id]) return c, r def _patch_moto_describe_spot_price_history(self, bs, instance_type, price): def fake_describe_price(*args, **kwargs): return { 'SpotPriceHistory': [{'InstanceType': instance_type, 'SpotPrice': price}] } self.patch(bs.ec2.meta.client, "describe_spot_price_history", fake_describe_price) def _patch_moto_describe_spot_instance_requests(self, c, r, bs): this_call = [0] orig_describe_instance = bs.ec2.meta.client.describe_spot_instance_requests def fake_describe_spot_instance_requests(*args, **kwargs): curr_call = this_call[0] this_call[0] += 1 if curr_call == 0: raise ClientError({'Error': {'Code': 'InvalidSpotInstanceRequestID.NotFound'}}, 'DescribeSpotInstanceRequests') if curr_call == 1: return orig_describe_instance(*args, **kwargs) response = orig_describe_instance(*args, **kwargs) instances = r.instances.filter(Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) response['SpotInstanceRequests'][0]['Status']['Code'] = 'fulfilled' response['SpotInstanceRequests'][0]['InstanceId'] = list(instances)[0].id return response self.patch(bs.ec2.meta.client, 'describe_spot_instance_requests', fake_describe_spot_instance_requests) @mock_ec2 def test_constructor_minimal(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name='keypair_name', security_name='security_name', ami=amis[0].id, ) self.assertEqual(bs.workername, 'bot1') self.assertEqual(bs.password, 'sekrit') self.assertEqual(bs.instance_type, 'm1.large') self.assertEqual(bs.ami, amis[0].id) @mock_ec2 def test_constructor_tags(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) tags = {'foo': 'bar'} bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name='keypair_name', security_name='security_name', tags=tags, ami=amis[0].id, ) self.assertEqual(bs.tags, tags) @mock_ec2 def test_constructor_region(self): c, r = self.botoSetup() amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=amis[0].id, region='us-west-1' ) self.assertEqual(bs.session.region_name, 'us-west-1') @mock_ec2 def test_fail_mixing_classic_and_vpc_ec2_settings(self): c, r = self.botoSetup() amis = list(r.images.all()) def create_worker(): ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', keypair_name="test_key", identifier='publickey', secret_identifier='privatekey', ami=amis[0].id, security_name="classic", subnet_id="sn-1234" ) with self.assertRaises(ValueError): create_worker() @mock_ec2 def test_start_vpc_instance(self): c, r = self.botoSetup() vpc = r.create_vpc(CidrBlock="192.168.0.0/24") subnet = r.create_subnet(VpcId=vpc.id, CidrBlock="192.168.0.0/24") amis = list(r.images.all()) sg = r.create_security_group(GroupName="test_sg", Description="test_sg", VpcId=vpc.id) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_group_ids=[sg.id], subnet_id=subnet.id, ami=amis[0].id ) bs._poll_resolution = 0 instance_id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, instance_id) self.assertEqual(instances[0].subnet_id, subnet.id) self.assertEqual(len(instances[0].security_groups), 1) self.assertEqual(instances[0].security_groups[0]['GroupId'], sg.id) self.assertEqual(instances[0].key_name, 'latent_buildbot_worker') @mock_ec2 def test_start_instance(self): c, r = self.botoSetup() amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name='keypair_name', security_name='security_name', ami=amis[0].id ) bs._poll_resolution = 1 instance_id, image_id, start_time = bs._start_instance() self.assertTrue(instance_id.startswith('i-')) self.assertTrue(image_id.startswith('ami-')) self.assertTrue(start_time > "00:00:00") instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, instance_id) self.assertIsNone(instances[0].tags) self.assertEqual(instances[0].id, bs.properties.getProperty('instance')) @mock_ec2 def test_start_instance_volumes(self): c, r = self.botoSetup() block_device_map_arg = [ { 'DeviceName': "/dev/xvdb", 'Ebs': { "VolumeType": "io1", "Iops": 10, "VolumeSize": 20, } }, { 'DeviceName': "/dev/xvdc", 'Ebs': { "VolumeType": "gp2", "VolumeSize": 30, "DeleteOnTermination": False, } }, ] block_device_map_res = [ { 'DeviceName': "/dev/xvdb", 'Ebs': { "VolumeType": "io1", "Iops": 10, "VolumeSize": 20, "DeleteOnTermination": True, } }, { 'DeviceName': "/dev/xvdc", 'Ebs': { "VolumeType": "gp2", "VolumeSize": 30, "DeleteOnTermination": False, } }, ] amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=amis[0].id, block_device_map=block_device_map_arg ) # moto does not currently map volumes properly. below ensures # that my conversion code properly composes it, including # delete_on_termination default. self.assertEqual(block_device_map_res, bs.block_device_map) @mock_ec2 def test_start_instance_attach_volume(self): c, r = self.botoSetup() vol = r.create_volume(Size=10, AvailabilityZone='us-east-1a') amis = list(r.images.all()) ami = amis[0] bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=ami.id, volumes=[(vol.id, "/dev/sdz")] ) bs._poll_resolution = 0 id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) instance = instances[0] sdz = [bm for bm in instance.block_device_mappings if bm['DeviceName'] == '/dev/sdz'][0] self.assertEqual(vol.id, sdz['Ebs']['VolumeId']) @mock_ec2 def test_start_instance_tags(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) tags = {'foo': 'bar'} bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', tags=tags, ami=amis[0].id ) bs._poll_resolution = 0 id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, id) self.assertEqual(instances[0].tags, [{'Value': 'bar', 'Key': 'foo'}]) @mock_ec2 def test_start_instance_ip(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) eip = c.allocate_address(Domain='vpc') elastic_ip = eip['PublicIp'] bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', elastic_ip=elastic_ip, ami=amis[0].id ) bs._poll_resolution = 0 id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) addresses = c.describe_addresses()['Addresses'] self.assertEqual(instances[0].id, addresses[0]['InstanceId']) @mock_ec2 def test_start_vpc_spot_instance(self): c, r = self.botoSetup() vpc = r.create_vpc(CidrBlock="192.168.0.0/24") subnet = r.create_subnet(VpcId=vpc.id, CidrBlock="192.168.0.0/24") amis = list(r.images.all()) sg = r.create_security_group(GroupName="test_sg", Description="test_sg", VpcId=vpc.id) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", ami=amis[0].id, spot_instance=True, max_spot_price=1.5, security_group_ids=[sg.id], subnet_id=subnet.id, ) bs._poll_resolution = 0 self._patch_moto_describe_spot_price_history(bs, 'm1.large', price=1.0) self._patch_moto_describe_spot_instance_requests(c, r, bs) instance_id, _, _ = bs._request_spot_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertTrue(bs.spot_instance) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, instance_id) self.assertEqual(instances[0].subnet_id, subnet.id) self.assertEqual(len(instances[0].security_groups), 1) # TODO: As of moto 2.0.2 GroupId is not handled in spot requests # self.assertEqual(instances[0].security_groups[0]['GroupId'], sg.id) @mock_ec2 def test_start_spot_instance(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) product_description = 'Linux/Unix' bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name='keypair_name', security_name='security_name', ami=amis[0].id, spot_instance=True, max_spot_price=1.5, product_description=product_description ) bs._poll_resolution = 0 self._patch_moto_describe_spot_price_history(bs, 'm1.large', price=1.0) self._patch_moto_describe_spot_instance_requests(c, r, bs) instance_id, _, _ = bs._request_spot_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertTrue(bs.spot_instance) self.assertEqual(bs.product_description, product_description) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, instance_id) self.assertIsNone(instances[0].tags) @mock_ec2 def test_get_image_ami(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) ami = amis[0] bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=ami.id ) image = bs.get_image() self.assertEqual(image.id, ami.id) @mock_ec2 def test_get_image_owners(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) ami = amis[0] bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', valid_ami_owners=[int(ami.owner_id)] ) image = bs.get_image() self.assertEqual(image.owner_id, ami.owner_id) @mock_ec2 def test_get_image_location(self): c, r = self.botoSetup('latent_buildbot_slave') bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', valid_ami_location_regex='amazon/.*' ) image = bs.get_image() self.assertTrue(image.image_location.startswith("amazon/")) @mock_ec2 def test_get_image_location_not_found(self): def create_worker(): ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', valid_ami_location_regex='foobar.*' ) with self.assertRaises(ValueError): create_worker() @mock_ec2 def test_fail_multiplier_and_max_are_none(self): ''' price_multiplier and max_spot_price may not be None at the same time. ''' c, r = self.botoSetup() amis = list(r.images.all()) def create_worker(): ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=amis[0].id, region='us-west-1', spot_instance=True, price_multiplier=None, max_spot_price=None ) with self.assertRaises(ValueError): create_worker() class TestEC2LatentWorkerDefaultKeyairSecurityGroup(unittest.TestCase): ec2_connection = None def setUp(self): super().setUp() if boto3 is None: raise unittest.SkipTest("moto not found") def botoSetup(self): c = boto3.client('ec2', region_name='us-east-1') r = boto3.resource('ec2', region_name='us-east-1') try: r.create_key_pair(KeyName='latent_buildbot_slave') r.create_key_pair(KeyName='test_keypair') except NotImplementedError as e: raise unittest.SkipTest("KeyPairs.create_key_pair not implemented" " in this version of moto, please update.") from e r.create_security_group(GroupName='latent_buildbot_slave', Description='the security group') r.create_security_group(GroupName='test_security_group', Description='other security group') instance = r.create_instances(ImageId=anyImageId(c), MinCount=1, MaxCount=1)[0] c.create_image(InstanceId=instance.id, Name="foo", Description="bar") c.terminate_instances(InstanceIds=[instance.id]) return c, r @mock_ec2 def test_no_default_security_warning_when_security_group_ids(self): c, r = self.botoSetup() amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', ami=amis[0].id, keypair_name='test_keypair', subnet_id=["sn-1"] ) self.assertEqual(bs.security_name, None) @mock_ec2 def test_use_non_default_keypair_security(self): c, r = self.botoSetup() amis = list(r.images.all()) with assertNotProducesWarnings(DeprecatedApiWarning): bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', ami=amis[0].id, security_name='test_security_group', keypair_name='test_keypair', ) self.assertEqual(bs.keypair_name, 'test_keypair') self.assertEqual(bs.security_name, 'test_security_group') buildbot-3.4.0/master/buildbot/test/unit/worker/test_kubernetes.py000066400000000000000000000123521413250514000254460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.test.fake import fakemaster from buildbot.test.fake.fakebuild import FakeBuildForRendering as FakeBuild from buildbot.test.fake.fakeprotocol import FakeTrivialConnection as FakeBot from buildbot.test.fake.kube import KubeClientService from buildbot.test.util.misc import TestReactorMixin from buildbot.util.kubeclientservice import KubeError from buildbot.util.kubeclientservice import KubeHardcodedConfig from buildbot.worker import kubernetes class FakeResult: code = 204 def mock_delete(*args): return defer.succeed(FakeResult()) class TestKubernetesWorker(TestReactorMixin, unittest.TestCase): worker = None def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def setupWorker(self, *args, **kwargs): config = KubeHardcodedConfig(master_url="https://kube.example.com") self.worker = worker = kubernetes.KubeLatentWorker( *args, kube_config=config, **kwargs) master = fakemaster.make_master(self, wantData=True) self._kube = yield KubeClientService.getService(master, self, kube_config=config) worker.setServiceParent(master) yield master.startService() self.assertTrue(config.running) def cleanup(): self._kube.delete = mock_delete self.addCleanup(master.stopService) self.addCleanup(cleanup) return worker def test_instantiate(self): worker = kubernetes.KubeLatentWorker('worker') # class instantiation configures nothing self.assertEqual(getattr(worker, '_kube', None), None) @defer.inlineCallbacks def test_wrong_arg(self): with self.assertRaises(TypeError): yield self.setupWorker('worker', wrong_param='wrong_param') def test_service_arg(self): return self.setupWorker('worker') @defer.inlineCallbacks def test_builds_may_be_incompatible(self): yield self.setupWorker('worker') # http is lazily created on worker substantiation self.assertEqual(self.worker.builds_may_be_incompatible, True) @defer.inlineCallbacks def test_start_service(self): yield self.setupWorker('worker') # http is lazily created on worker substantiation self.assertNotEqual(self.worker._kube, None) @defer.inlineCallbacks def test_start_worker(self): worker = yield self.setupWorker('worker') d = worker.substantiate(None, FakeBuild()) worker.attached(FakeBot()) yield d self.assertEqual(len(worker._kube.pods), 1) pod_name = list(worker._kube.pods.keys())[0] self.assertRegex(pod_name, r'default/buildbot-worker-[0-9a-f]+') pod = worker._kube.pods[pod_name] self.assertEqual( sorted(pod['spec'].keys()), ['containers', 'restartPolicy']) self.assertEqual( sorted(pod['spec']['containers'][0].keys()), ['env', 'image', 'name', 'resources']) self.assertEqual(pod['spec']['containers'][0]['image'], 'rendered:buildbot/buildbot-worker') self.assertEqual(pod['spec']['restartPolicy'], 'Never') @defer.inlineCallbacks def test_start_worker_but_error(self): worker = yield self.setupWorker('worker') def createPod(namespace, spec): raise KubeError({'message': "yeah, but no"}) self.patch(self._kube, 'createPod', createPod) with self.assertRaises(LatentWorkerFailedToSubstantiate): yield worker.substantiate(None, FakeBuild()) self.assertEqual(worker.instance, None) @defer.inlineCallbacks def test_interpolate_renderables_for_new_build(self): build1 = Properties(img_prop="image1") build2 = Properties(img_prop="image2") worker = yield self.setupWorker('worker', image=Interpolate("%(prop:img_prop)s")) yield worker.start_instance(build1) yield worker.stop_instance() self.assertTrue((yield worker.isCompatibleWithBuild(build2))) @defer.inlineCallbacks def test_reject_incompatible_build_while_running(self): build1 = Properties(img_prop="image1") build2 = Properties(img_prop="image2") worker = yield self.setupWorker('worker', image=Interpolate("%(prop:img_prop)s")) yield worker.start_instance(build1) self.assertFalse((yield worker.isCompatibleWithBuild(build2))) buildbot-3.4.0/master/buildbot/test/unit/worker/test_libvirt.py000066400000000000000000000301431413250514000247500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import socket from parameterized import parameterized import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.test.fake import libvirt as libvirtfake from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.runprocess import ExpectMaster from buildbot.test.util.runprocess import MasterRunProcessMixin from buildbot.test.util.warnings import assertProducesWarnings from buildbot.warnings import DeprecatedApiWarning from buildbot.worker import libvirt as libvirtworker # The libvirt module has a singleton threadpool within the module which we can't use in tests as # this makes it impossible to run them concurrently. To work around this we introduce a per-test # threadpool and access it through a class instance class TestThreadWithQueue(libvirtworker.ThreadWithQueue): def __init__(self, pool, uri): super().__init__(pool, uri, connect_backoff_start_seconds=0, connect_backoff_multiplier=0, connect_backoff_max_wait_seconds=0) def libvirt_open(self): return self.pool.case.libvirt_open(self.uri) class TestServerThreadPool(libvirtworker.ServerThreadPool): ThreadClass = TestThreadWithQueue def __init__(self, case): super().__init__() self.case = case class TestLibvirtWorker(libvirtworker.LibVirtWorker): def __init__(self, case, *args, **kwargs): super().__init__(*args, **kwargs) self.case = case self.pool = case.threadpool class TestException(Exception): pass class TestLibVirtWorker(TestReactorMixin, MasterRunProcessMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setup_master_run_process() self.connections = {} self.patch(libvirtworker, "libvirt", libvirtfake) self.threadpool = TestServerThreadPool(self) def libvirt_open(self, uri): if uri not in self.connections: raise Exception('Could not find test connection') return self.connections[uri] def add_fake_conn(self, uri): conn = libvirtfake.Connection(uri) self.connections[uri] = conn return conn def create_worker(self, *args, **kwargs): worker = TestLibvirtWorker(self, *args, **kwargs) worker.parent = mock.Mock() worker.parent.master = mock.Mock() worker.parent.master.reactor = self.reactor return worker def raise_libvirt_error(self): # Helper method to be used from lambdas as they don't accept statements raise libvirtfake.libvirtError() def test_constructor_nolibvirt(self): self.patch(libvirtworker, "libvirt", None) with self.assertRaises(config.ConfigErrors): self.create_worker('bot', 'pass', None, 'path', 'path') def test_deprecated_connection(self): with assertProducesWarnings(DeprecatedApiWarning, message_pattern='connection argument has been deprecated'): self.create_worker('bot', 'pass', libvirtworker.Connection('test'), 'path', 'path') def test_deprecated_connection_and_uri(self): with self.assertRaises(config.ConfigErrors): with assertProducesWarnings(DeprecatedApiWarning, message_pattern='connection argument has been deprecated'): self.create_worker('bot', 'pass', libvirtworker.Connection('test'), 'path', 'path', uri='custom') @defer.inlineCallbacks def test_get_domain_id(self): conn = self.add_fake_conn('fake:///conn') conn.fake_add('bot', 14) bs = self.create_worker('bot', 'pass', hd_image='p', base_image='o', uri='fake:///conn') id = yield bs._get_domain_id() self.assertEqual(id, 14) @defer.inlineCallbacks def test_prepare_base_image_none(self): bs = self.create_worker('bot', 'pass', hd_image='p', base_image=None) yield bs._prepare_base_image() self.assert_all_commands_ran() @defer.inlineCallbacks def test_prepare_base_image_cheap(self): self.expect_commands( ExpectMaster(["qemu-img", "create", "-b", "o", "-f", "qcow2", "p"]) ) bs = self.create_worker('bot', 'pass', hd_image='p', base_image='o') yield bs._prepare_base_image() self.assert_all_commands_ran() @defer.inlineCallbacks def test_prepare_base_image_full(self): self.expect_commands( ExpectMaster(["cp", "o", "p"]) ) bs = self.create_worker('bot', 'pass', hd_image='p', base_image='o') bs.cheap_copy = False yield bs._prepare_base_image() self.assert_all_commands_ran() @defer.inlineCallbacks def test_prepare_base_image_fail(self): self.expect_commands( ExpectMaster(["cp", "o", "p"]) .exit(1) ) bs = self.create_worker('bot', 'pass', hd_image='p', base_image='o') bs.cheap_copy = False with self.assertRaises(LatentWorkerFailedToSubstantiate): yield bs._prepare_base_image() self.assert_all_commands_ran() @defer.inlineCallbacks def _test_stop_instance(self, graceful, fast, expected_destroy, expected_shutdown, shutdown_side_effect=None): domain = mock.Mock() domain.ID.side_effect = lambda: 14 domain.shutdown.side_effect = shutdown_side_effect conn = self.add_fake_conn('fake:///conn') conn.fake_add_domain('name', domain) bs = self.create_worker('name', 'p', hd_image='p', base_image='o', uri='fake:///conn', xml='') bs.graceful_shutdown = graceful with mock.patch('os.remove') as remove_mock: yield bs.stop_instance(fast=fast) self.assertEqual(int(expected_destroy), domain.destroy.call_count) self.assertEqual(int(expected_shutdown), domain.shutdown.call_count) remove_mock.assert_called_once_with('p') self.assert_all_commands_ran() @defer.inlineCallbacks def test_stop_instance_destroy(self): yield self._test_stop_instance(graceful=False, fast=False, expected_destroy=True, expected_shutdown=False) @defer.inlineCallbacks def test_stop_instance_shutdown(self): yield self._test_stop_instance(graceful=True, fast=False, expected_destroy=False, expected_shutdown=True) @defer.inlineCallbacks def test_stop_instance_shutdown_fails(self): yield self._test_stop_instance(graceful=True, fast=False, expected_destroy=True, expected_shutdown=True, shutdown_side_effect=TestException) @defer.inlineCallbacks def test_start_instance_connection_fails(self): bs = self.create_worker('b', 'p', hd_image='p', base_image='o', uri='unknown') prep = mock.Mock() prep.side_effect = lambda: defer.succeed(0) self.patch(bs, "_prepare_base_image", prep) with self.assertRaisesRegex(LatentWorkerFailedToSubstantiate, 'Did not receive connection'): yield bs.start_instance(mock.Mock()) self.assertFalse(prep.called) @defer.inlineCallbacks def test_start_instance_already_active(self): conn = self.add_fake_conn('fake:///conn') conn.fake_add('bot', 14) bs = self.create_worker('bot', 'p', hd_image='p', base_image='o', uri='fake:///conn', xml='') prep = mock.Mock() self.patch(bs, "_prepare_base_image", prep) with self.assertRaisesRegex(LatentWorkerFailedToSubstantiate, 'it\'s already active'): yield bs.start_instance(mock.Mock()) self.assertFalse(prep.called) @defer.inlineCallbacks def test_start_instance_domain_id_error(self): conn = self.add_fake_conn('fake:///conn') domain = conn.fake_add('bot', 14) domain.ID = self.raise_libvirt_error bs = self.create_worker('bot', 'p', hd_image='p', base_image='o', uri='fake:///conn', xml='') prep = mock.Mock() self.patch(bs, "_prepare_base_image", prep) with self.assertRaisesRegex(LatentWorkerFailedToSubstantiate, 'while retrieving domain ID'): yield bs.start_instance(mock.Mock()) self.assertFalse(prep.called) @defer.inlineCallbacks def test_start_instance_connection_create_fails(self): bs = self.create_worker('bot', 'p', hd_image='p', base_image='o', xml='', uri='fake:///conn') conn = self.add_fake_conn('fake:///conn') conn.createXML = lambda _, __: self.raise_libvirt_error() prep = mock.Mock() prep.side_effect = lambda: defer.succeed(0) self.patch(bs, "_prepare_base_image", prep) with self.assertRaisesRegex(LatentWorkerFailedToSubstantiate, 'error while starting VM'): yield bs.start_instance(mock.Mock()) self.assertTrue(prep.called) @defer.inlineCallbacks def test_start_instance_domain_create_fails(self): bs = self.create_worker('bot', 'p', hd_image='p', base_image='o', uri='fake:///conn') conn = self.add_fake_conn('fake:///conn') domain = conn.fake_add('bot', -1) domain.create = self.raise_libvirt_error prep = mock.Mock() prep.side_effect = lambda: defer.succeed(0) self.patch(bs, "_prepare_base_image", prep) with self.assertRaisesRegex(LatentWorkerFailedToSubstantiate, 'error while starting VM'): yield bs.start_instance(mock.Mock()) self.assertTrue(prep.called) @defer.inlineCallbacks def test_start_instance_xml(self): self.add_fake_conn('fake:///conn') bs = self.create_worker('bot', 'p', hd_image='p', base_image='o', uri='fake:///conn', xml='') prep = mock.Mock() prep.side_effect = lambda: defer.succeed(0) self.patch(bs, "_prepare_base_image", prep) started = yield bs.start_instance(mock.Mock()) self.assertEqual(started, True) @parameterized.expand([ ('set_fqdn', {'masterFQDN': 'somefqdn'}, 'somefqdn'), ('auto_fqdn', {}, socket.getfqdn()), ]) @defer.inlineCallbacks def test_start_instance_existing_domain(self, name, kwargs, expect_fqdn): conn = self.add_fake_conn('fake:///conn') domain = conn.fake_add('bot', -1) bs = self.create_worker('bot', 'p', hd_image='p', base_image='o', uri='fake:///conn', **kwargs) prep = mock.Mock() prep.side_effect = lambda: defer.succeed(0) self.patch(bs, "_prepare_base_image", prep) started = yield bs.start_instance(mock.Mock()) self.assertEqual(started, True) self.assertEqual(domain.metadata, { 'buildbot': (libvirtfake.VIR_DOMAIN_METADATA_ELEMENT, 'http://buildbot.net/', ''.format(expect_fqdn), libvirtfake.VIR_DOMAIN_AFFECT_CONFIG) }) buildbot-3.4.0/master/buildbot/test/unit/worker/test_local.py000066400000000000000000000067301413250514000243740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import local class TestLocalWorker(TestReactorMixin, unittest.TestCase): try: from buildbot_worker.bot import LocalWorker as _ # noqa except ImportError: skip = "buildbot-worker package is not installed" def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) self.botmaster = self.master.botmaster self.workers = self.master.workers def createWorker(self, name='bot', attached=False, configured=True, **kwargs): worker = local.LocalWorker(name, **kwargs) if configured: worker.setServiceParent(self.workers) return worker @defer.inlineCallbacks def test_reconfigService_attrs(self): old = self.createWorker('bot', max_builds=2, notify_on_missing=['me@me.com'], missing_timeout=120, properties={'a': 'b'}) new = self.createWorker('bot', configured=False, max_builds=3, notify_on_missing=['her@me.com'], missing_timeout=121, workdir=os.path.abspath('custom'), properties={'a': 'c'}) old.updateWorker = mock.Mock(side_effect=lambda: defer.succeed(None)) yield old.startService() self.assertEqual( old.remote_worker.bot.basedir, os.path.abspath('basedir/workers/bot')) yield old.reconfigServiceWithSibling(new) self.assertEqual(old.max_builds, 3) self.assertEqual(old.notify_on_missing, ['her@me.com']) self.assertEqual(old.missing_timeout, 121) self.assertEqual(old.properties.getProperty('a'), 'c') self.assertEqual(old.registration.updates, ['bot']) self.assertTrue(old.updateWorker.called) # make sure that we can provide an absolute path self.assertEqual( old.remote_worker.bot.basedir, os.path.abspath('custom')) yield old.stopService() @defer.inlineCallbacks def test_workerinfo(self): wrk = self.createWorker('bot', max_builds=2, notify_on_missing=['me@me.com'], missing_timeout=120, properties={'a': 'b'}) yield wrk.startService() info = yield wrk.conn.remoteGetWorkerInfo() self.assertIn("worker_commands", info) yield wrk.stopService() buildbot-3.4.0/master/buildbot/test/unit/worker/test_manager.py000066400000000000000000000101731413250514000247100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from zope.interface import implementer from buildbot import interfaces from buildbot.process import botmaster from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import service from buildbot.worker import manager as workermanager @implementer(interfaces.IWorker) class FakeWorker(service.BuildbotService): reconfig_count = 0 def __init__(self, workername): super().__init__(name=workername) def reconfigService(self): self.reconfig_count += 1 self.configured = True return defer.succeed(None) class FakeWorker2(FakeWorker): pass class TestWorkerManager(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True) self.master.mq = self.master.mq self.workers = workermanager.WorkerManager(self.master) yield self.workers.setServiceParent(self.master) # workers expect a botmaster as well as a manager. self.master.botmaster.disownServiceParent() self.botmaster = botmaster.BotMaster() self.master.botmaster = self.botmaster yield self.master.botmaster.setServiceParent(self.master) self.new_config = mock.Mock() self.workers.startService() def tearDown(self): return self.workers.stopService() @defer.inlineCallbacks def test_reconfigServiceWorkers_add_remove(self): worker = FakeWorker('worker1') self.new_config.workers = [worker] yield self.workers.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(worker.parent, self.workers) self.assertEqual(self.workers.workers, {'worker1': worker}) self.new_config.workers = [] self.assertEqual(worker.running, True) yield self.workers.reconfigServiceWithBuildbotConfig(self.new_config) self.assertEqual(worker.running, False) @defer.inlineCallbacks def test_reconfigServiceWorkers_reconfig(self): worker = FakeWorker('worker1') yield worker.setServiceParent(self.workers) worker.parent = self.master worker.manager = self.workers worker.botmaster = self.master.botmaster worker_new = FakeWorker('worker1') self.new_config.workers = [worker_new] yield self.workers.reconfigServiceWithBuildbotConfig(self.new_config) # worker was not replaced.. self.assertIdentical(self.workers.workers['worker1'], worker) @defer.inlineCallbacks def test_reconfigServiceWorkers_class_changes(self): worker = FakeWorker('worker1') yield worker.setServiceParent(self.workers) worker_new = FakeWorker2('worker1') self.new_config.workers = [worker_new] yield self.workers.reconfigServiceWithBuildbotConfig(self.new_config) # worker *was* replaced (different class) self.assertIdentical(self.workers.workers['worker1'], worker_new) @defer.inlineCallbacks def test_newConnection_remoteGetWorkerInfo_failure(self): class Error(RuntimeError): pass conn = mock.Mock() conn.remoteGetWorkerInfo = mock.Mock( return_value=defer.fail(Error())) yield self.assertFailure( self.workers.newConnection(conn, "worker"), Error) buildbot-3.4.0/master/buildbot/test/unit/worker/test_marathon.py000066400000000000000000000212251413250514000251070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.interfaces import LatentWorkerSubstantiatiationCancelled from buildbot.process.properties import Properties from buildbot.test.fake import fakebuild from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.fakeprotocol import FakeTrivialConnection as FakeBot from buildbot.test.util.misc import TestReactorMixin from buildbot.worker.marathon import MarathonLatentWorker class TestMarathonLatentWorker(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.build = Properties( image="busybox:latest", builder="docker_worker") self.worker = None def tearDown(self): if self.worker is not None: class FakeResult: code = 200 self._http.delete = lambda _: defer.succeed(FakeResult()) self.worker.master.stopService() self.flushLoggedErrors(LatentWorkerSubstantiatiationCancelled) def test_constructor_normal(self): worker = MarathonLatentWorker('bot', 'tcp://marathon.local', 'foo', 'bar', 'debian:wheezy') # class instantiation configures nothing self.assertEqual(worker._http, None) @defer.inlineCallbacks def makeWorker(self, **kwargs): kwargs.setdefault('image', 'debian:wheezy') worker = MarathonLatentWorker('bot', 'tcp://marathon.local', **kwargs) self.worker = worker master = fakemaster.make_master(self, wantData=True) self._http = yield fakehttpclientservice.HTTPClientService.getService( master, self, 'tcp://marathon.local', auth=kwargs.get('auth')) yield worker.setServiceParent(master) worker.reactor = self.reactor yield master.startService() worker.masterhash = "masterhash" return worker @defer.inlineCallbacks def test_builds_may_be_incompatible(self): worker = self.worker = yield self.makeWorker() # http is lazily created on worker substantiation self.assertEqual(worker.builds_may_be_incompatible, True) @defer.inlineCallbacks def test_start_service(self): worker = self.worker = yield self.makeWorker() # http is lazily created on worker substantiation self.assertNotEqual(worker._http, None) @defer.inlineCallbacks def test_start_worker(self): # http://mesosphere.github.io/marathon/docs/rest-api.html#post-v2-apps worker = yield self.makeWorker() worker.password = "pass" worker.masterFQDN = "master" self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') self._http.expect( method='post', ep='/v2/apps', json={ 'instances': 1, 'container': { 'docker': { 'image': 'rendered:debian:wheezy', 'network': 'BRIDGE' }, 'type': 'DOCKER' }, 'id': 'buildbot-worker/buildbot-bot-masterhash', 'env': { 'BUILDMASTER': "master", 'BUILDMASTER_PORT': '1234', 'WORKERNAME': 'bot', 'WORKERPASS': "pass" } }, code=201, content_json={'Id': 'id'}) d = worker.substantiate(None, fakebuild.FakeBuildForRendering()) # we simulate a connection worker.attached(FakeBot()) yield d self.assertEqual(worker.instance, {'Id': 'id'}) # teardown makes sure all containers are cleaned up @defer.inlineCallbacks def test_start_worker_but_no_connection_and_shutdown(self): worker = yield self.makeWorker() worker.password = "pass" worker.masterFQDN = "master" self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') self._http.expect( method='post', ep='/v2/apps', json={ 'instances': 1, 'container': { 'docker': { 'image': 'rendered:debian:wheezy', 'network': 'BRIDGE' }, 'type': 'DOCKER' }, 'id': 'buildbot-worker/buildbot-bot-masterhash', 'env': { 'BUILDMASTER': "master", 'BUILDMASTER_PORT': '1234', 'WORKERNAME': 'bot', 'WORKERPASS': "pass" } }, code=201, content_json={'Id': 'id'}) worker.substantiate(None, fakebuild.FakeBuildForRendering()) self.assertEqual(worker.instance, {'Id': 'id'}) # teardown makes sure all containers are cleaned up @defer.inlineCallbacks def test_start_worker_but_error(self): worker = yield self.makeWorker() self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') self._http.expect( method='post', ep='/v2/apps', json={ 'instances': 1, 'container': { 'docker': { 'image': 'rendered:debian:wheezy', 'network': 'BRIDGE' }, 'type': 'DOCKER' }, 'id': 'buildbot-worker/buildbot-bot-masterhash', 'env': { 'BUILDMASTER': "master", 'BUILDMASTER_PORT': '1234', 'WORKERNAME': 'bot', 'WORKERPASS': "pass" } }, code=404, content_json={'message': 'image not found'}) self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') d = worker.substantiate(None, fakebuild.FakeBuildForRendering()) self.reactor.advance(.1) with self.assertRaises(Exception): yield d self.assertEqual(worker.instance, None) # teardown makes sure all containers are cleaned up @defer.inlineCallbacks def test_start_worker_with_params(self): # http://mesosphere.github.io/marathon/docs/rest-api.html#post-v2-apps worker = yield self.makeWorker(marathon_extra_config={ 'container': { 'docker': { 'network': None } }, 'env': { 'PARAMETER': 'foo' } }) worker.password = "pass" worker.masterFQDN = "master" self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') self._http.expect( method='post', ep='/v2/apps', json={ 'instances': 1, 'container': { 'docker': { 'image': 'rendered:debian:wheezy', 'network': None }, 'type': 'DOCKER' }, 'id': 'buildbot-worker/buildbot-bot-masterhash', 'env': { 'BUILDMASTER': "master", 'BUILDMASTER_PORT': '1234', 'WORKERNAME': 'bot', 'WORKERPASS': "pass", 'PARAMETER': 'foo' } }, code=201, content_json={'Id': 'id'}) d = worker.substantiate(None, fakebuild.FakeBuildForRendering()) # we simulate a connection worker.attached(FakeBot()) yield d self.assertEqual(worker.instance, {'Id': 'id'}) # teardown makes sure all containers are cleaned up buildbot-3.4.0/master/buildbot/test/unit/worker/test_openstack.py000066400000000000000000000472241413250514000252740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2013 Cray Inc. import hashlib import mock from twisted.internet import defer from twisted.trial import unittest import buildbot.test.fake.openstack as novaclient from buildbot import config from buildbot import interfaces from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import openstack class TestOpenStackWorker(TestReactorMixin, unittest.TestCase): os_auth = dict( os_username='user', os_password='pass', os_tenant_name='tenant', os_auth_url='auth') os_auth_custom = dict( token='openstack-token', auth_type='token', auth_url='auth') bs_image_args = dict( flavor=1, image='image-uuid', **os_auth) def setUp(self): self.setUpTestReactor() self.patch(openstack, "client", novaclient) self.patch(openstack, "loading", novaclient) self.patch(openstack, "session", novaclient) self.patch(openstack, "NotFound", novaclient.NotFound) self.build = Properties(image=novaclient.TEST_UUIDS['image'], flavor=novaclient.TEST_UUIDS['flavor'], meta_value='value') self.masterhash = hashlib.sha1(b'fake:/master').hexdigest()[:6] @defer.inlineCallbacks def setupWorker(self, *args, **kwargs): worker = openstack.OpenStackLatentWorker(*args, **kwargs) master = fakemaster.make_master(self, wantData=True) fakemaster.master = master worker.setServiceParent(master) yield master.startService() self.addCleanup(master.stopService) return worker @defer.inlineCallbacks def test_constructor_nonova(self): self.patch(openstack, "client", None) with self.assertRaises(config.ConfigErrors): yield self.setupWorker('bot', 'pass', **self.bs_image_args) @defer.inlineCallbacks def test_constructor_nokeystoneauth(self): self.patch(openstack, "loading", None) with self.assertRaises(config.ConfigErrors): yield self.setupWorker('bot', 'pass', **self.bs_image_args) @defer.inlineCallbacks def test_constructor_minimal(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.flavor, 1) self.assertEqual(bs.image, 'image-uuid') self.assertEqual(bs.block_devices, None) self.assertIsInstance(bs.novaclient, novaclient.Client) @defer.inlineCallbacks def test_builds_may_be_incompatible(self): # Minimal set of parameters bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) self.assertEqual(bs.builds_may_be_incompatible, True) @defer.inlineCallbacks def test_constructor_minimal_keystone_v3(self): bs = yield self.setupWorker( 'bot', 'pass', os_user_domain='test_oud', os_project_domain='test_opd', **self.bs_image_args) self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.flavor, 1) self.assertEqual(bs.image, 'image-uuid') self.assertEqual(bs.block_devices, None) self.assertIsInstance(bs.novaclient, novaclient.Client) self.assertEqual(bs.novaclient.session.auth.user_domain_name, 'test_oud') self.assertEqual(bs.novaclient.session.auth.project_domain_name, 'test_opd') @defer.inlineCallbacks def test_constructor_token_keystone_v3(self): bs = yield self.setupWorker( 'bot', 'pass', os_auth_args=self.os_auth_custom, **self.bs_image_args) self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.flavor, 1) self.assertEqual(bs.image, 'image-uuid') self.assertEqual(bs.block_devices, None) self.assertIsInstance(bs.novaclient, novaclient.Client) self.assertEqual(bs.novaclient.session.auth.user_domain_name, 'token') self.assertEqual(bs.novaclient.session.auth.project_domain_name, 'token') @defer.inlineCallbacks def test_constructor_region(self): bs = yield self.setupWorker( 'bot', 'pass', region="test-region", **self.bs_image_args) self.assertEqual(bs.novaclient.client.region_name, "test-region") @defer.inlineCallbacks def test_constructor_block_devices_default(self): block_devices = [{'uuid': 'uuid', 'volume_size': 10}] bs = yield self.setupWorker('bot', 'pass', flavor=1, block_devices=block_devices, **self.os_auth) self.assertEqual(bs.image, None) self.assertEqual(len(bs.block_devices), 1) self.assertEqual(bs.block_devices, [{'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'image', 'volume_size': 10, 'uuid': 'uuid'}]) @defer.inlineCallbacks def test_constructor_block_devices_get_sizes(self): block_devices = [ {'source_type': 'image', 'uuid': novaclient.TEST_UUIDS['image']}, {'source_type': 'image', 'uuid': novaclient.TEST_UUIDS['image'], 'volume_size': 4}, {'source_type': 'volume', 'uuid': novaclient.TEST_UUIDS['volume']}, {'source_type': 'snapshot', 'uuid': novaclient.TEST_UUIDS['snapshot']}, ] def check_volume_sizes(_images, _flavors, block_devices, nova_args, metas): self.assertEqual(len(block_devices), 4) self.assertEqual(block_devices[0]['volume_size'], 1) self.assertIsInstance(block_devices[0]['volume_size'], int, "Volume size is an integer.") self.assertEqual(block_devices[1]['volume_size'], 4) self.assertEqual(block_devices[2]['volume_size'], 4) self.assertEqual(block_devices[3]['volume_size'], 2) lw = yield self.setupWorker('bot', 'pass', flavor=1, block_devices=block_devices, **self.os_auth) self.assertEqual(lw.image, None) self.assertEqual(lw.block_devices, [{'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'image', 'volume_size': None, 'uuid': novaclient.TEST_UUIDS['image']}, {'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'image', 'volume_size': 4, 'uuid': novaclient.TEST_UUIDS['image']}, {'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'volume', 'volume_size': None, 'uuid': novaclient.TEST_UUIDS['volume']}, {'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'snapshot', 'volume_size': None, 'uuid': novaclient.TEST_UUIDS['snapshot']}]) self.patch(lw, "_start_instance", check_volume_sizes) yield lw.start_instance(self.build) @defer.inlineCallbacks def test_constructor_block_devices_missing(self): block_devices = [ {'source_type': 'image', 'uuid': '9fb2e6e8-110d-4388-8c23-0fcbd1e2fcc1'}, ] lw = yield self.setupWorker('bot', 'pass', flavor=1, block_devices=block_devices, **self.os_auth) yield self.assertFailure(lw.start_instance(self.build), novaclient.NotFound) @defer.inlineCallbacks def test_constructor_no_image(self): """ Must have one of image or block_devices specified. """ with self.assertRaises(ValueError): yield self.setupWorker('bot', 'pass', flavor=1, **self.os_auth) @defer.inlineCallbacks def test_getImage_string(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) image_uuid = yield bs._getImage(self.build) self.assertEqual('image-uuid', image_uuid) @defer.inlineCallbacks def test_getImage_renderable(self): bs = yield self.setupWorker('bot', 'pass', flavor=1, image=Interpolate('%(prop:image)s'), **self.os_auth) image_uuid = yield bs._getImage(self.build) self.assertEqual(novaclient.TEST_UUIDS['image'], image_uuid) @defer.inlineCallbacks def test_getImage_name(self): bs = yield self.setupWorker('bot', 'pass', flavor=1, image='CirrOS 0.3.4', **self.os_auth) image_uuid = yield bs._getImage(self.build) self.assertEqual(novaclient.TEST_UUIDS['image'], image_uuid) @defer.inlineCallbacks def test_getFlavor_string(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) flavor_uuid = yield bs._getFlavor(self.build) self.assertEqual(1, flavor_uuid) @defer.inlineCallbacks def test_getFlavor_renderable(self): bs = yield self.setupWorker('bot', 'pass', image="1", flavor=Interpolate('%(prop:flavor)s'), **self.os_auth) flavor_uuid = yield bs._getFlavor(self.build) self.assertEqual(novaclient.TEST_UUIDS['flavor'], flavor_uuid) @defer.inlineCallbacks def test_getFlavor_name(self): bs = yield self.setupWorker('bot', 'pass', image="1", flavor='m1.small', **self.os_auth) flavor_uuid = yield bs._getFlavor(self.build) self.assertEqual(novaclient.TEST_UUIDS['flavor'], flavor_uuid) @defer.inlineCallbacks def test_start_instance_already_exists(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) bs.instance = mock.Mock() yield self.assertFailure(bs.start_instance(self.build), ValueError) @defer.inlineCallbacks def test_start_instance_first_fetch_fail(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) bs._poll_resolution = 0 self.patch(novaclient.Servers, 'fail_to_get', True) self.patch(novaclient.Servers, 'gets_until_disappears', 0) yield self.assertFailure(bs.start_instance(self.build), interfaces.LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_start_instance_fail_to_find(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) bs._poll_resolution = 0 self.patch(novaclient.Servers, 'fail_to_get', True) yield self.assertFailure(bs.start_instance(self.build), interfaces.LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_start_instance_fail_to_start(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) bs._poll_resolution = 0 self.patch(novaclient.Servers, 'fail_to_start', True) yield self.assertFailure(bs.start_instance(self.build), interfaces.LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_start_instance_success(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) bs._poll_resolution = 0 uuid, image_uuid, time_waiting = yield bs.start_instance(self.build) self.assertTrue(uuid) self.assertEqual(image_uuid, 'image-uuid') self.assertTrue(time_waiting) @defer.inlineCallbacks def test_start_instance_check_meta(self): meta_arg = {'some_key': 'some-value', 'BUILDBOT:instance': self.masterhash} bs = yield self.setupWorker('bot', 'pass', meta=meta_arg, **self.bs_image_args) bs._poll_resolution = 0 uuid, image_uuid, time_waiting = yield bs.start_instance(self.build) self.assertIn('meta', bs.instance.boot_kwargs) self.assertEquals(bs.instance.metadata, meta_arg) @defer.inlineCallbacks def test_start_instance_check_meta_renderable(self): meta_arg = {'some_key': Interpolate('%(prop:meta_value)s')} bs = yield self.setupWorker('bot', 'pass', meta=meta_arg, **self.bs_image_args) bs._poll_resolution = 0 uuid, image_uuid, time_waiting = yield bs.start_instance(self.build) self.assertIn('meta', bs.instance.boot_kwargs) self.assertEquals(bs.instance.metadata, {'some_key': 'value', 'BUILDBOT:instance': self.masterhash}) @defer.inlineCallbacks def test_start_instance_check_nova_args(self): nova_args = {'some-key': 'some-value'} bs = yield self.setupWorker('bot', 'pass', nova_args=nova_args, **self.bs_image_args) bs._poll_resolution = 0 uuid, image_uuid, time_waiting = yield bs.start_instance(self.build) self.assertIn('meta', bs.instance.boot_kwargs) self.assertEquals(bs.instance.boot_kwargs['some-key'], 'some-value') @defer.inlineCallbacks def test_start_instance_check_nova_args_renderable(self): nova_args = {'some-key': Interpolate('%(prop:meta_value)s')} bs = yield self.setupWorker('bot', 'pass', nova_args=nova_args, **self.bs_image_args) bs._poll_resolution = 0 uuid, image_uuid, time_waiting = yield bs.start_instance(self.build) self.assertIn('meta', bs.instance.boot_kwargs) self.assertEquals(bs.instance.boot_kwargs['some-key'], 'value') @defer.inlineCallbacks def test_interpolate_renderables_for_new_build(self): build1 = Properties(image=novaclient.TEST_UUIDS['image'], block_device="some-device") build2 = Properties(image="build2-image") block_devices = [{'uuid': Interpolate('%(prop:block_device)s'), 'volume_size': 10}] bs = yield self.setupWorker( 'bot', 'pass', block_devices=block_devices, **self.bs_image_args) bs._poll_resolution = 0 yield bs.start_instance(build1) yield bs.stop_instance(build1) self.assertTrue((yield bs.isCompatibleWithBuild(build2))) @defer.inlineCallbacks def test_reject_incompatible_build_while_running(self): build1 = Properties(image=novaclient.TEST_UUIDS['image'], block_device="some-device") build2 = Properties(image="build2-image") block_devices = [{'uuid': Interpolate('%(prop:block_device)s'), 'volume_size': 10}] bs = yield self.setupWorker( 'bot', 'pass', block_devices=block_devices, **self.bs_image_args) bs._poll_resolution = 0 yield bs.start_instance(build1) self.assertFalse((yield bs.isCompatibleWithBuild(build2))) @defer.inlineCallbacks def test_stop_instance_cleanup(self): """ Test cleaning up leftover instances before starting new. """ self.patch(novaclient.Servers, 'fail_to_get', False) self.patch(novaclient.Servers, 'gets_until_disappears', 9) novaclient.Servers().create(['bot', novaclient.TEST_UUIDS['image'], novaclient.TEST_UUIDS['flavor']], meta={'BUILDBOT:instance': self.masterhash}) bs = yield self.setupWorker('bot', 'pass', **self.bs_image_args) bs._poll_resolution = 0 uuid, image_uuid, time_waiting = yield bs.start_instance(self.build) self.assertTrue(uuid) self.assertEqual(image_uuid, 'image-uuid') self.assertTrue(time_waiting) @defer.inlineCallbacks def test_stop_instance_not_set(self): """ Test stopping the instance but with no instance to stop. """ bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) bs.instance = None stopped = yield bs.stop_instance() self.assertEqual(stopped, None) @defer.inlineCallbacks def test_stop_instance_missing(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) instance = mock.Mock() instance.id = 'uuid' bs.instance = instance # TODO: Check log for instance not found. bs.stop_instance() @defer.inlineCallbacks def test_stop_instance_fast(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) # Make instance immediately active. self.patch(novaclient.Servers, 'gets_until_active', 0) s = novaclient.Servers() bs.instance = inst = s.create() self.assertIn(inst.id, s.instances) bs.stop_instance(fast=True) self.assertNotIn(inst.id, s.instances) @defer.inlineCallbacks def test_stop_instance_notfast(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) # Make instance immediately active. self.patch(novaclient.Servers, 'gets_until_active', 0) s = novaclient.Servers() bs.instance = inst = s.create() self.assertIn(inst.id, s.instances) bs.stop_instance(fast=False) self.assertNotIn(inst.id, s.instances) @defer.inlineCallbacks def test_stop_instance_unknown(self): bs = yield self.setupWorker( 'bot', 'pass', **self.bs_image_args) # Make instance immediately active. self.patch(novaclient.Servers, 'gets_until_active', 0) s = novaclient.Servers() bs.instance = inst = s.create() # Set status to DELETED. Instance should not be deleted when shutting # down as it already is. inst.status = novaclient.DELETED self.assertIn(inst.id, s.instances) bs.stop_instance() self.assertIn(inst.id, s.instances) buildbot-3.4.0/master/buildbot/test/unit/worker/test_protocols_base.py000066400000000000000000000032541413250514000263160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot.test.fake import fakeprotocol from buildbot.test.util import protocols from buildbot.test.util.misc import TestReactorMixin from buildbot.worker.protocols import base class TestFakeConnection(protocols.ConnectionInterfaceTest, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.worker = mock.Mock() self.conn = fakeprotocol.FakeConnection(self.worker) class TestConnection(protocols.ConnectionInterfaceTest, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.worker = mock.Mock() self.conn = base.Connection(self.worker.workername) def test_notify(self): cb = mock.Mock() self.conn.notifyOnDisconnect(cb) self.assertEqual(cb.call_args_list, []) self.conn.notifyDisconnected() self.assertNotEqual(cb.call_args_list, []) buildbot-3.4.0/master/buildbot/test/unit/worker/test_protocols_pb.py000066400000000000000000000401551413250514000260060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.spread import pb as twisted_pb from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util import protocols as util_protocols from buildbot.test.util.misc import TestReactorMixin from buildbot.worker.protocols import base from buildbot.worker.protocols import pb class TestListener(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) def makeListener(self): listener = pb.Listener(self.master) return listener def test_constructor(self): listener = pb.Listener(self.master) self.assertEqual(listener.master, self.master) self.assertEqual(listener._registrations, {}) @defer.inlineCallbacks def test_updateRegistration_simple(self): listener = pb.Listener(self.master) reg = yield listener.updateRegistration('example', 'pass', 'tcp:1234') self.assertEqual(self.master.pbmanager._registrations, [('tcp:1234', 'example', 'pass')]) self.assertEqual( listener._registrations['example'], ('pass', 'tcp:1234', reg)) @defer.inlineCallbacks def test_updateRegistration_pass_changed(self): listener = pb.Listener(self.master) listener.updateRegistration('example', 'pass', 'tcp:1234') reg1 = yield listener.updateRegistration('example', 'pass1', 'tcp:1234') self.assertEqual( listener._registrations['example'], ('pass1', 'tcp:1234', reg1)) self.assertEqual( self.master.pbmanager._unregistrations, [('tcp:1234', 'example')]) @defer.inlineCallbacks def test_updateRegistration_port_changed(self): listener = pb.Listener(self.master) listener.updateRegistration('example', 'pass', 'tcp:1234') reg1 = yield listener.updateRegistration('example', 'pass', 'tcp:4321') self.assertEqual( listener._registrations['example'], ('pass', 'tcp:4321', reg1)) self.assertEqual( self.master.pbmanager._unregistrations, [('tcp:1234', 'example')]) @defer.inlineCallbacks def test_create_connection(self): listener = pb.Listener(self.master) worker = mock.Mock() worker.workername = 'test' mind = mock.Mock() listener.updateRegistration('example', 'pass', 'tcp:1234') self.master.workers.register(worker) conn = yield listener._create_connection(mind, worker.workername) mind.broker.transport.setTcpKeepAlive.assert_called_with(1) self.assertIsInstance(conn, pb.Connection) class TestConnectionApi(util_protocols.ConnectionInterfaceTest, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.conn = pb.Connection(self.master, mock.Mock(), mock.Mock()) class TestConnection(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.mind = mock.Mock() self.worker = mock.Mock() def test_constructor(self): conn = pb.Connection(self.master, self.worker, self.mind) self.assertEqual(conn.mind, self.mind) self.assertEqual(conn.master, self.master) self.assertEqual(conn.worker, self.worker) @defer.inlineCallbacks def test_attached(self): conn = pb.Connection(self.master, self.worker, self.mind) att = yield conn.attached(self.mind) self.worker.attached.assert_called_with(conn) self.assertEqual(att, conn) self.reactor.pump([10] * 361) expected_call = [ mock.call('print', message="keepalive"), ] self.assertEqual(self.mind.callRemote.call_args_list, expected_call) conn.detached(self.mind) yield conn.waitShutdown() @defer.inlineCallbacks def test_detached(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.attached(self.mind) conn.detached(self.mind) self.assertEqual(conn.keepalive_timer, None) self.assertEqual(conn.mind, None) yield conn.waitShutdown() def test_loseConnection(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.loseConnection() self.assertEqual(conn.keepalive_timer, None) conn.mind.broker.transport.loseConnection.assert_called_with() def test_remotePrint(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.remotePrint(message='test') conn.mind.callRemote.assert_called_with('print', message='test') @defer.inlineCallbacks def test_remoteGetWorkerInfo_slave(self): def side_effect(*args, **kwargs): if args[0] == 'getWorkerInfo': return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) if args[0] == 'getSlaveInfo': return defer.succeed({'info': 'test'}) if args[0] == 'getCommands': return defer.succeed({'x': 1, 'y': 2}) if args[0] == 'getVersion': return defer.succeed('TheVersion') return None self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {'info': 'test', 'worker_commands': { 'y': 2, 'x': 1}, 'version': 'TheVersion'} self.assertEqual(info, r) expected_calls = [ mock.call('getWorkerInfo'), mock.call('print', message='buildbot-slave detected, failing back to deprecated buildslave API. ' '(Ignoring missing getWorkerInfo method.)'), mock.call('getSlaveInfo'), mock.call('getCommands'), mock.call('getVersion'), ] self.assertEqual(self.mind.callRemote.call_args_list, expected_calls) @defer.inlineCallbacks def test_remoteGetWorkerInfo_slave_2_16(self): """In buildslave 2.16 all information about worker is retrieved in a single getSlaveInfo() call.""" def side_effect(*args, **kwargs): if args[0] == 'getWorkerInfo': return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) if args[0] == 'getSlaveInfo': return defer.succeed({ 'info': 'test', 'slave_commands': {'x': 1, 'y': 2}, 'version': 'TheVersion', }) if args[0] == 'print': return None raise ValueError(f"Command unknown: {args}") self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {'info': 'test', 'worker_commands': { 'y': 2, 'x': 1}, 'version': 'TheVersion'} self.assertEqual(info, r) expected_calls = [ mock.call('getWorkerInfo'), mock.call('print', message='buildbot-slave detected, failing back to deprecated buildslave API. ' '(Ignoring missing getWorkerInfo method.)'), mock.call('getSlaveInfo'), ] self.assertEqual(self.mind.callRemote.call_args_list, expected_calls) @defer.inlineCallbacks def test_remoteGetWorkerInfo_worker(self): def side_effect(*args, **kwargs): if args[0] == 'getWorkerInfo': return defer.succeed({ 'info': 'test', 'worker_commands': { 'y': 2, 'x': 1 }, 'version': 'TheVersion', }) raise ValueError(f"Command unknown: {args}") self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {'info': 'test', 'worker_commands': { 'y': 2, 'x': 1}, 'version': 'TheVersion'} self.assertEqual(info, r) expected_calls = [mock.call('getWorkerInfo')] self.assertEqual(self.mind.callRemote.call_args_list, expected_calls) @defer.inlineCallbacks def test_remoteGetWorkerInfo_getWorkerInfo_fails(self): def side_effect(*args, **kwargs): if args[0] == 'getWorkerInfo': return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) if args[0] == 'getSlaveInfo': return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) if args[0] == 'getCommands': return defer.succeed({'x': 1, 'y': 2}) if args[0] == 'getVersion': return defer.succeed('TheVersion') if args[0] == 'print': return None raise ValueError(f"Command unknown: {args}") self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {'worker_commands': {'y': 2, 'x': 1}, 'version': 'TheVersion'} self.assertEqual(info, r) expected_calls = [ mock.call('getWorkerInfo'), mock.call('print', message='buildbot-slave detected, failing back to deprecated buildslave API. ' '(Ignoring missing getWorkerInfo method.)'), mock.call('getSlaveInfo'), mock.call('getCommands'), mock.call('getVersion'), ] self.assertEqual(self.mind.callRemote.call_args_list, expected_calls) @defer.inlineCallbacks def test_remoteGetWorkerInfo_no_info(self): # All remote commands tried in remoteGetWorkerInfo are unavailable. # This should be real old worker... def side_effect(*args, **kwargs): if args[0] == 'print': return None return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {} self.assertEqual(info, r) expected_calls = [ mock.call('getWorkerInfo'), mock.call('print', message='buildbot-slave detected, failing back to deprecated buildslave API. ' '(Ignoring missing getWorkerInfo method.)'), mock.call('getSlaveInfo'), mock.call('getCommands'), mock.call('getVersion'), ] self.assertEqual(self.mind.callRemote.call_args_list, expected_calls) @defer.inlineCallbacks def test_remoteSetBuilderList(self): builders = ['builder1', 'builder2'] self.mind.callRemote.return_value = defer.succeed(builders) conn = pb.Connection(self.master, self.worker, self.mind) r = yield conn.remoteSetBuilderList(builders) self.assertEqual(r, builders) self.assertEqual(conn.builders, builders) self.mind.callRemote.assert_called_with('setBuilderList', builders) def test_remoteStartCommand(self): builders = ['builder'] ret_val = {'builder': mock.Mock()} self.mind.callRemote.return_value = defer.succeed(ret_val) conn = pb.Connection(self.master, self.worker, self.mind) conn.remoteSetBuilderList(builders) RCInstance, builder_name, commandID = base.RemoteCommandImpl( ), "builder", None remote_command, args = "command", {"args": 'args'} conn.remoteStartCommand( RCInstance, builder_name, commandID, remote_command, args) callargs = ret_val['builder'].callRemote.call_args_list[0][0] callargs_without_rc = ( callargs[0], callargs[2], callargs[3], callargs[4]) self.assertEqual(callargs_without_rc, ('startCommand', commandID, remote_command, args)) self.assertIsInstance(callargs[1], pb.RemoteCommand) self.assertEqual(callargs[1].impl, RCInstance) @defer.inlineCallbacks def test_do_keepalive(self): conn = pb.Connection(self.master, self.worker, self.mind) yield conn._do_keepalive() self.mind.callRemote.assert_called_with('print', message="keepalive") def test_remoteShutdown(self): self.mind.callRemote.return_value = defer.succeed(None) conn = pb.Connection(self.master, self.worker, self.mind) # note that we do not test the "old way", as it is now *very* old. conn.remoteShutdown() self.mind.callRemote.assert_called_with('shutdown') def test_remoteStartBuild(self): conn = pb.Connection(self.master, self.worker, self.mind) builders = {'builder': mock.Mock()} self.mind.callRemote.return_value = defer.succeed(builders) conn = pb.Connection(self.master, self.worker, self.mind) conn.remoteSetBuilderList(builders) conn.remoteStartBuild('builder') builders['builder'].callRemote.assert_called_with('startBuild') @defer.inlineCallbacks def test_startStopKeepaliveTimer(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.startKeepaliveTimer() self.mind.callRemote.assert_not_called() self.reactor.pump([10] * 361) expected_call = [ mock.call('print', message="keepalive"), ] self.assertEqual(self.mind.callRemote.call_args_list, expected_call) self.reactor.pump([10] * 361) expected_calls = [ mock.call('print', message="keepalive"), mock.call('print', message="keepalive"), ] self.assertEqual(self.mind.callRemote.call_args_list, expected_calls) conn.stopKeepaliveTimer() yield conn.waitShutdown() def test_perspective_shutdown(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.perspective_shutdown() conn.worker.shutdownRequested.assert_called_with() conn.worker.messageReceivedFromWorker.assert_called_with() def test_perspective_keepalive(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.perspective_keepalive() conn.worker.messageReceivedFromWorker.assert_called_with() class Test_wrapRemoteException(unittest.TestCase): def test_raises_NoSuchMethod(self): def f(): with pb._wrapRemoteException(): raise twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None) with self.assertRaises(pb._NoSuchMethod): f() def test_raises_unknown(self): class Error(Exception): pass def f(): with pb._wrapRemoteException(): raise Error() with self.assertRaises(Error): f() def test_raises_RemoteError(self): def f(): with pb._wrapRemoteException(): raise twisted_pb.RemoteError( 'twisted.spread.flavors.ProtocolError', None, None) with self.assertRaises(twisted_pb.RemoteError): f() buildbot-3.4.0/master/buildbot/test/unit/worker/test_upcloud.py000066400000000000000000000152761413250514000247620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib from twisted.internet import defer from twisted.trial import unittest from buildbot import util from buildbot.config import ConfigErrors from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.fakebuild import FakeBuildForRendering as FakeBuild from buildbot.test.fake.fakeprotocol import FakeTrivialConnection as FakeBot from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import upcloud # Please see https://developers.upcloud.com/ for details upcloudStorageTemplatePayload = { 'storages': { 'storage': [ { 'access': 'public', 'title': 'rendered:test-image', 'uuid': '8b47d21b-b4c3-445d-b75c-5a723ff39681' } ] } } upcloudServerCreatePayload = { 'server': { 'hostname': 'worker', 'password': 'supersecret', 'state': 'maintenance', 'uuid': '438b5b08-4147-4193-bf64-a5318f51d3bd', 'title': 'buildbot-worker-87de7e', 'plan': '1xCPU-1GB' } } upcloudServerStartedPayload = { 'server': { 'hostname': 'worker', 'password': 'supersecret', 'state': 'started', 'uuid': '438b5b08-4147-4193-bf64-a5318f51d3bd', 'title': 'buildbot-worker-87de7e', 'plan': '1xCPU-1GB' } } upcloudServerStoppedPayload = { 'server': { 'hostname': 'worker', 'password': 'supersecret', 'state': 'stopped', 'uuid': '438b5b08-4147-4193-bf64-a5318f51d3bd', 'title': 'buildbot-worker-87de7e', 'plan': '1xCPU-1GB' } } class TestUpcloudWorker(TestReactorMixin, unittest.TestCase): worker = None def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def setupWorker(self, *args, **kwargs): worker = upcloud.UpcloudLatentWorker( *args, api_username='test-api-user', api_password='test-api-password', **kwargs) master = fakemaster.make_master(self, wantData=True) self._http = worker.client = yield fakehttpclientservice.HTTPClientService.getService( master, self, upcloud.DEFAULT_BASE_URL, auth=('test-api-user', 'test-api-password'), debug=False) worker.setServiceParent(master) yield master.startService() self.masterhash = hashlib.sha1(util.unicode2bytes(master.name)).hexdigest()[:6] self.addCleanup(master.stopService) self.worker = worker return worker def test_instantiate(self): worker = upcloud.UpcloudLatentWorker('test-worker', image='test-image', api_username='test-api-user', api_password='test-api-password') self.failUnlessIsInstance(worker, upcloud.UpcloudLatentWorker) def test_missing_config(self): worker = None with self.assertRaises(ConfigErrors): worker = upcloud.UpcloudLatentWorker('test-worker') with self.assertRaises(ConfigErrors): worker = upcloud.UpcloudLatentWorker('test-worker', image='test-image') with self.assertRaises(ConfigErrors): worker = upcloud.UpcloudLatentWorker('test-worker', image='test-image', api_username='test-api-user') self.assertTrue(worker is None) @defer.inlineCallbacks def test_missing_image(self): worker = yield self.setupWorker('worker', image='no-such-image') self._http.expect(method='get', ep='/storage/template', content_json=upcloudStorageTemplatePayload) with self.assertRaises(LatentWorkerFailedToSubstantiate): yield worker.substantiate(None, FakeBuild()) @defer.inlineCallbacks def test_start_worker(self): worker = yield self.setupWorker('worker', image='test-image') # resolve image to storage uuid self._http.expect(method='get', ep='/storage/template', content_json=upcloudStorageTemplatePayload) # actually start server self._http.expect(method='post', ep='/server', params=None, data=None, json={'server': {'zone': 'de-fra1', 'title': 'buildbot-worker-87de7e', 'hostname': 'worker', 'user_data': '', 'login_user': {'username': 'root', 'ssh_keys': {'ssh_key': []}}, 'password_delivery': 'none', 'storage_devices': {'storage_device': [ {'action': 'clone', 'storage': '8b47d21b-b4c3-445d-b75c-5a723ff39681', 'title': 'buildbot-worker-{}'.format(self.masterhash), 'size': 10, 'tier': 'maxiops'}]}, 'plan': '1xCPU-1GB'}}, content_json=upcloudServerCreatePayload, code=202) # determine it's up & running self._http.expect(method='get', ep='/server/438b5b08-4147-4193-bf64-a5318f51d3bd', content_json=upcloudServerStartedPayload) # get root password self._http.expect(method='get', ep='/server/438b5b08-4147-4193-bf64-a5318f51d3bd', content_json=upcloudServerStartedPayload) # stop server self._http.expect(method='post', ep='/server/438b5b08-4147-4193-bf64-a5318f51d3bd/stop', json={'stop_server': {'stop_type': 'hard', 'timeout': '1'}}, content_json=upcloudServerStartedPayload) # now it's stopped self._http.expect(method='get', ep='/server/438b5b08-4147-4193-bf64-a5318f51d3bd', content_json=upcloudServerStoppedPayload) # then delete it self._http.expect(method='delete', ep='/server/438b5b08-4147-4193-bf64-a5318f51d3bd?storages=1', code=204) d = worker.substantiate(None, FakeBuild()) yield worker.attached(FakeBot()) yield d buildbot-3.4.0/master/buildbot/test/unit/www/000077500000000000000000000000001413250514000211765ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/www/__init__.py000066400000000000000000000000001413250514000232750ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/unit/www/test_auth.py000066400000000000000000000237221413250514000235560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse from twisted.cred.credentials import UsernamePassword from twisted.cred.error import UnauthorizedLogin from twisted.internet import defer from twisted.trial import unittest from twisted.web.error import Error from twisted.web.guard import BasicCredentialFactory from twisted.web.guard import HTTPAuthSessionWrapper from twisted.web.resource import IResource from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import auth class AuthResourceMixin: def setUpAuthResource(self): self.master = self.make_master(url='h:/a/b/') self.auth = self.master.config.www['auth'] self.master.www.auth = self.auth self.auth.master = self.master class AuthRootResource(TestReactorMixin, www.WwwTestMixin, AuthResourceMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpAuthResource() self.rsrc = auth.AuthRootResource(self.master) def test_getChild_login(self): glr = mock.Mock(name='glr') self.master.www.auth.getLoginResource = glr child = self.rsrc.getChild(b'login', mock.Mock(name='req')) self.assertIdentical(child, glr()) def test_getChild_logout(self): glr = mock.Mock(name='glr') self.master.www.auth.getLogoutResource = glr child = self.rsrc.getChild(b'logout', mock.Mock(name='req')) self.assertIdentical(child, glr()) class AuthBase(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.auth = auth.AuthBase() self.master = self.make_master(url='h:/a/b/') self.auth.master = self.master self.req = self.make_request(b'/') @defer.inlineCallbacks def test_maybeAutoLogin(self): self.assertEqual((yield self.auth.maybeAutoLogin(self.req)), None) def test_getLoginResource(self): with self.assertRaises(Error): self.auth.getLoginResource() @defer.inlineCallbacks def test_updateUserInfo(self): self.auth.userInfoProvider = auth.UserInfoProviderBase() self.auth.userInfoProvider.getUserInfo = lambda un: {'info': un} self.req.session.user_info = {'username': 'elvira'} yield self.auth.updateUserInfo(self.req) self.assertEqual(self.req.session.user_info, {'info': 'elvira', 'username': 'elvira'}) def getConfigDict(self): self.assertEqual(auth.getConfigDict(), {'name': 'AuthBase'}) class UseAuthInfoProviderBase(unittest.TestCase): @defer.inlineCallbacks def test_getUserInfo(self): uip = auth.UserInfoProviderBase() self.assertEqual((yield uip.getUserInfo('jess')), {'email': 'jess'}) class NoAuth(unittest.TestCase): def test_exists(self): assert auth.NoAuth class RemoteUserAuth(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.auth = auth.RemoteUserAuth(header=b'HDR') self.make_master() self.request = self.make_request(b'/') @defer.inlineCallbacks def test_maybeAutoLogin(self): self.request.input_headers[b'HDR'] = b'rachel@foo.com' yield self.auth.maybeAutoLogin(self.request) self.assertEqual(self.request.session.user_info, { 'username': 'rachel', 'realm': 'foo.com', 'email': 'rachel'}) @defer.inlineCallbacks def test_maybeAutoLogin_no_header(self): try: yield self.auth.maybeAutoLogin(self.request) except Error as e: self.assertEqual(int(e.status), 403) else: self.fail("403 expected") @defer.inlineCallbacks def test_maybeAutoLogin_mismatched_value(self): self.request.input_headers[b'HDR'] = b'rachel' try: yield self.auth.maybeAutoLogin(self.request) except Error as e: self.assertEqual(int(e.status), 403) else: self.fail("403 expected") class AuthRealm(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.auth = auth.RemoteUserAuth(header=b'HDR') self.auth = auth.NoAuth() self.make_master() def test_requestAvatar(self): realm = auth.AuthRealm(self.master, self.auth) itfc, rsrc, logout = realm.requestAvatar("me", None, IResource) self.assertIdentical(itfc, IResource) self.assertIsInstance(rsrc, auth.PreAuthenticatedLoginResource) class TwistedICredAuthBase(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() # twisted.web makes it difficult to simulate the authentication process, so # this only tests the mechanics of the getLoginResource method. def test_getLoginResource(self): self.auth = auth.TwistedICredAuthBase( credentialFactories=[BasicCredentialFactory("buildbot")], checkers=[InMemoryUsernamePasswordDatabaseDontUse(good=b'guy')]) self.auth.master = self.make_master(url='h:/a/b/') rsrc = self.auth.getLoginResource() self.assertIsInstance(rsrc, HTTPAuthSessionWrapper) class UserPasswordAuth(www.WwwTestMixin, unittest.TestCase): def test_passwordStringToBytes(self): login = {"user_string": "password", "user_bytes": b"password"} correct_login = {b"user_string": b"password", b"user_bytes": b"password"} self.auth = auth.UserPasswordAuth(login) self.assertEqual(self.auth.checkers[0].users, correct_login) login = [("user_string", "password"), ("user_bytes", b"password")] correct_login = {b"user_string": b"password", b"user_bytes": b"password"} self.auth = auth.UserPasswordAuth(login) self.assertEqual(self.auth.checkers[0].users, correct_login) class CustomAuth(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): class MockCustomAuth(auth.CustomAuth): def check_credentials(self, us, ps): return us == 'fellow' and ps == 'correct' def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_callable(self): self.auth = self.MockCustomAuth() cred_good = UsernamePassword('fellow', 'correct') result_good = yield self.auth.checkers[0].requestAvatarId(cred_good) self.assertEqual(result_good, 'fellow') cred_bad = UsernamePassword('bandid', 'incorrect') defer_bad = self.auth.checkers[0].requestAvatarId(cred_bad) yield self.assertFailure(defer_bad, UnauthorizedLogin) class LoginResource(TestReactorMixin, www.WwwTestMixin, AuthResourceMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpAuthResource() @defer.inlineCallbacks def test_render(self): self.rsrc = auth.LoginResource(self.master) self.rsrc.renderLogin = mock.Mock( spec=self.rsrc.renderLogin, return_value=defer.succeed(b'hi')) yield self.render_resource(self.rsrc, b'/auth/login') self.rsrc.renderLogin.assert_called_with(mock.ANY) class PreAuthenticatedLoginResource(TestReactorMixin, www.WwwTestMixin, AuthResourceMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpAuthResource() self.rsrc = auth.PreAuthenticatedLoginResource(self.master, 'him') @defer.inlineCallbacks def test_render(self): self.auth.maybeAutoLogin = mock.Mock() def updateUserInfo(request): session = request.getSession() session.user_info['email'] = session.user_info['username'] + "@org" session.updateSession(request) self.auth.updateUserInfo = mock.Mock(side_effect=updateUserInfo) res = yield self.render_resource(self.rsrc, b'/auth/login') self.assertEqual(res, {'redirected': b'h:/a/b/#/'}) self.assertFalse(self.auth.maybeAutoLogin.called) self.auth.updateUserInfo.assert_called_with(mock.ANY) self.assertEqual(self.master.session.user_info, {'email': 'him@org', 'username': 'him'}) class LogoutResource(TestReactorMixin, www.WwwTestMixin, AuthResourceMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpAuthResource() self.rsrc = auth.LogoutResource(self.master) @defer.inlineCallbacks def test_render(self): self.master.session.expire = mock.Mock() res = yield self.render_resource(self.rsrc, b'/auth/logout') self.assertEqual(res, {'redirected': b'h:/a/b/#/'}) self.master.session.expire.assert_called_with() @defer.inlineCallbacks def test_render_with_crlf(self): self.master.session.expire = mock.Mock() res = yield self.render_resource(self.rsrc, b'/auth/logout?redirect=%0d%0abla') # everything after a %0d shall be stripped self.assertEqual(res, {'redirected': b'h:/a/b/#'}) self.master.session.expire.assert_called_with() buildbot-3.4.0/master/buildbot/test/unit/www/test_authz.py000066400000000000000000000242701413250514000237470ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test import fakedb from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import authz from buildbot.www.authz.endpointmatchers import AnyEndpointMatcher from buildbot.www.authz.endpointmatchers import BranchEndpointMatcher from buildbot.www.authz.endpointmatchers import ForceBuildEndpointMatcher from buildbot.www.authz.endpointmatchers import RebuildBuildEndpointMatcher from buildbot.www.authz.endpointmatchers import StopBuildEndpointMatcher from buildbot.www.authz.endpointmatchers import ViewBuildsEndpointMatcher from buildbot.www.authz.roles import RolesFromDomain from buildbot.www.authz.roles import RolesFromEmails from buildbot.www.authz.roles import RolesFromGroups from buildbot.www.authz.roles import RolesFromOwner class Authz(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() authzcfg = authz.Authz( # simple matcher with '*' glob character stringsMatcher=authz.fnmatchStrMatcher, # stringsMatcher = authz.Authz.reStrMatcher, # if you prefer # regular expressions allowRules=[ # admins can do anything, # defaultDeny=False: if user does not have the admin role, we # continue parsing rules AnyEndpointMatcher(role="admins", defaultDeny=False), # rules for viewing builds, builders, step logs # depending on the sourcestamp or buildername ViewBuildsEndpointMatcher( branch="secretbranch", role="agents"), ViewBuildsEndpointMatcher( project="secretproject", role="agents"), ViewBuildsEndpointMatcher(branch="*", role="*"), ViewBuildsEndpointMatcher(project="*", role="*"), StopBuildEndpointMatcher(role="owner"), RebuildBuildEndpointMatcher(role="owner"), # nine-* groups can do stuff on the nine branch BranchEndpointMatcher(branch="nine", role="nine-*"), # eight-* groups can do stuff on the eight branch BranchEndpointMatcher(branch="eight", role="eight-*"), # *-try groups can start "try" builds ForceBuildEndpointMatcher(builder="try", role="*-developers"), # *-mergers groups can start "merge" builds ForceBuildEndpointMatcher(builder="merge", role="*-mergers"), # *-releasers groups can start "release" builds ForceBuildEndpointMatcher( builder="release", role="*-releasers"), ], roleMatchers=[ RolesFromGroups(groupPrefix="buildbot-"), RolesFromEmails(admins=["homer@springfieldplant.com"], agents=["007@mi6.uk"]), RolesFromOwner(role="owner"), RolesFromDomain(admins=["mi7.uk"]) ] ) self.users = dict(homer=dict(email="homer@springfieldplant.com"), bond=dict(email="007@mi6.uk"), moneypenny=dict(email="moneypenny@mi7.uk"), nineuser=dict(email="user@nine.com", groups=["buildbot-nine-mergers", "buildbot-nine-developers"]), eightuser=dict( email="user@eight.com", groups=["buildbot-eight-deverlopers"]) ) self.master = self.make_master(url='h:/a/b/', authz=authzcfg) self.authz = self.master.authz self.master.db.insertTestData([ fakedb.Builder(id=77, name="mybuilder"), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildsetProperty(buildsetid=8822, property_name='owner', property_value='["user@nine.com", "force"]'), fakedb.BuildRequest(id=82, buildsetid=8822, builderid=77), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Build(id=14, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=4), fakedb.Build(id=15, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=5), ]) def setAllowRules(self, allow_rules): # we should add links to authz and master instances in each new rule for r in allow_rules: r.setAuthz(self.authz) self.authz.allowRules = allow_rules def assertUserAllowed(self, ep, action, options, user): return self.authz.assertUserAllowed(tuple(ep.split("/")), action, options, self.users[user]) @defer.inlineCallbacks def assertUserForbidden(self, ep, action, options, user): try: yield self.authz.assertUserAllowed(tuple(ep.split("/")), action, options, self.users[user]) except authz.Forbidden as err: self.assertIn('need to have role', repr(err)) @defer.inlineCallbacks def test_anyEndpoint(self): yield self.assertUserAllowed("foo/bar", "get", {}, "homer") yield self.assertUserAllowed("foo/bar", "get", {}, "moneypenny") yield self.assertUserForbidden("foo/bar", "get", {}, "bond") @defer.inlineCallbacks def test_stopBuild(self): # admin can always stop yield self.assertUserAllowed("builds/13", "stop", {}, "homer") # owner can always stop yield self.assertUserAllowed("builds/13", "stop", {}, "nineuser") yield self.assertUserAllowed("buildrequests/82", "stop", {}, "nineuser") # not owner cannot stop yield self.assertUserForbidden("builds/13", "stop", {}, "eightuser") yield self.assertUserForbidden("buildrequests/82", "stop", {}, "eightuser") @defer.inlineCallbacks def test_rebuildBuild(self): # admin can rebuild yield self.assertUserAllowed("builds/13", "rebuild", {}, "homer") # owner can always rebuild yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") yield self.assertUserAllowed("buildrequests/82", "rebuild", {}, "nineuser") # not owner cannot rebuild yield self.assertUserForbidden("builds/13", "rebuild", {}, "eightuser") yield self.assertUserForbidden("buildrequests/82", "rebuild", {}, "eightuser") @defer.inlineCallbacks def test_fnmatchPatternRoleCheck(self): # defaultDeny is True by default so action is denied if no match allow_rules = [ AnyEndpointMatcher(role="[a,b]dmin?") ] self.setAllowRules(allow_rules) yield self.assertUserAllowed("builds/13", "rebuild", {}, "homer") # check if action is denied with self.assertRaisesRegex(authz.Forbidden, '403 you need to have role .+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") with self.assertRaisesRegex(authz.Forbidden, '403 you need to have role .+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "eightuser") @defer.inlineCallbacks def test_regexPatternRoleCheck(self): # change matcher self.authz.match = authz.reStrMatcher # defaultDeny is True by default so action is denied if no match allow_rules = [ AnyEndpointMatcher(role="(admin|agent)s"), ] self.setAllowRules(allow_rules) yield self.assertUserAllowed("builds/13", "rebuild", {}, "homer") yield self.assertUserAllowed("builds/13", "rebuild", {}, "bond") # check if action is denied with self.assertRaisesRegex(authz.Forbidden, '403 you need to have role .+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") with self.assertRaisesRegex(authz.Forbidden, '403 you need to have role .+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "eightuser") @defer.inlineCallbacks def test_DefaultDenyFalseContinuesCheck(self): # defaultDeny is True in the last rule so action is denied in the last check allow_rules = [ AnyEndpointMatcher(role="not-exists1", defaultDeny=False), AnyEndpointMatcher(role="not-exists2", defaultDeny=False), AnyEndpointMatcher(role="not-exists3", defaultDeny=True) ] self.setAllowRules(allow_rules) # check if action is denied and last check was exact against not-exist3 with self.assertRaisesRegex(authz.Forbidden, '.+not-exists3.+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") @defer.inlineCallbacks def test_DefaultDenyTrueStopsCheckIfFailed(self): # defaultDeny is True in the first rule so action is denied in the first check allow_rules = [ AnyEndpointMatcher(role="not-exists1", defaultDeny=True), AnyEndpointMatcher(role="not-exists2", defaultDeny=False), AnyEndpointMatcher(role="not-exists3", defaultDeny=False) ] self.setAllowRules(allow_rules) # check if action is denied and last check was exact against not-exist1 with self.assertRaisesRegex(authz.Forbidden, '.+not-exists1.+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") buildbot-3.4.0/master/buildbot/test/unit/www/test_avatar.py000066400000000000000000001104631413250514000240720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import auth from buildbot.www import avatar class TestAvatar(avatar.AvatarBase): def getUserAvatar(self, email, username, size, defaultAvatarUrl): return defer.succeed((b"image/png", '{!r} {!r} {!r}'.format( email, size, defaultAvatarUrl).encode('utf-8'))) class AvatarResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_default(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/') self.assertEqual( res, dict(redirected=avatar.AvatarResource.defaultAvatarUrl)) @defer.inlineCallbacks def test_gravatar(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[avatar.AvatarGravatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo') self.assertEqual(res, dict(redirected=b'//www.gravatar.com/avatar/acbd18db4cc2f85ce' b'def654fccc4a4d8?d=retro&s=32')) @defer.inlineCallbacks def test_avatar_call(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[TestAvatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo') self.assertEqual(res, b"b'foo' 32 b'http://a/b/img/nobody.png'") @defer.inlineCallbacks def test_custom_size(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[TestAvatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo&size=64') self.assertEqual(res, b"b'foo' 64 b'http://a/b/img/nobody.png'") @defer.inlineCallbacks def test_invalid_size(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[TestAvatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo&size=abcd') self.assertEqual(res, b"b'foo' 32 b'http://a/b/img/nobody.png'") @defer.inlineCallbacks def test_custom_not_found(self): # use gravatar if the custom avatar fail to return a response class CustomAvatar(avatar.AvatarBase): def getUserAvatar(self, email, username, size, defaultAvatarUrl): return defer.succeed(None) master = self.make_master(url=b'http://a/b/', auth=auth.NoAuth(), avatar_methods=[CustomAvatar(), avatar.AvatarGravatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo') self.assertEqual(res, dict(redirected=b'//www.gravatar.com/avatar/acbd18db4cc2f85ce' b'def654fccc4a4d8?d=retro&s=32')) github_username_search_reply = { "login": "defunkt", "id": 42424242, "node_id": "MDQ6VXNlcjQyNDI0MjQy", "avatar_url": "https://avatars3.githubusercontent.com/u/42424242?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt", "html_url": "https://github.com/defunkt", "followers_url": "https://api.github.com/users/defunkt/followers", "following_url": "https://api.github.com/users/defunkt/following{/other_user}", "gists_url": "https://api.github.com/users/defunkt/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt/subscriptions", "organizations_url": "https://api.github.com/users/defunkt/orgs", "repos_url": "https://api.github.com/users/defunkt/repos", "events_url": "https://api.github.com/users/defunkt/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt/received_events", "type": "User", "site_admin": False, "name": "Defunkt User", "company": None, "blog": "", "location": None, "email": None, "hireable": None, "bio": None, "twitter_username": None, "public_repos": 1, "public_gists": 1, "followers": 1, "following": 1, "created_at": "2000-01-01T00:00:00Z", "updated_at": "2021-01-01T00:00:00Z" } github_username_not_found_reply = { "message": "Not Found", "documentation_url": "https://docs.github.com/rest/reference/users#get-a-user" } github_email_search_reply = { "total_count": 1, "incomplete_results": False, "items": [ { "login": "defunkt", "id": 42424242, "node_id": "MDQ6VXNlcjQyNDI0MjQy", "avatar_url": "https://avatars3.githubusercontent.com/u/42424242?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt", "html_url": "https://github.com/defunkt", "followers_url": "https://api.github.com/users/defunkt/followers", "following_url": "https://api.github.com/users/defunkt/following{/other_user}", "gists_url": "https://api.github.com/users/defunkt/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt/subscriptions", "organizations_url": "https://api.github.com/users/defunkt/orgs", "repos_url": "https://api.github.com/users/defunkt/repos", "events_url": "https://api.github.com/users/defunkt/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt/received_events", "type": "User", "site_admin": False, "score": 1.0 } ] } github_email_search_not_found_reply = { "total_count": 0, "incomplete_results": False, "items": [ ] } github_commit_search_reply = { "total_count": 1, "incomplete_results": False, "items": [ { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits/1111111111111111111111111111111111111111", "sha": "1111111111111111111111111111111111111111", "node_id": "MDY6Q29tbWl0NDM0MzQzNDM6MTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTEx", "html_url": "https://github.com/defunkt-org/defunkt-repo/" "commit/1111111111111111111111111111111111111111", "comments_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits/1111111111111111111111111111111111111111/comments", "commit": { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/commits/1111111111111111111111111111111111111111", "author": { "date": "2021-01-01T01:01:01.000-01:00", "name": "Defunkt User", "email": "defunkt@defunkt.com" }, "committer": { "date": "2021-01-01T01:01:01.000-01:00", "name": "Defunkt User", "email": "defunkt@defunkt.com" }, "message": "defunkt message", "tree": { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/trees/2222222222222222222222222222222222222222", "sha": "2222222222222222222222222222222222222222" }, "comment_count": 0 }, "author": { "login": "defunkt", "id": 42424242, "node_id": "MDQ6VXNlcjQyNDI0MjQy", "avatar_url": "https://avatars3.githubusercontent.com/u/42424242?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt", "html_url": "https://github.com/defunkt", "followers_url": "https://api.github.com/users/defunkt/followers", "following_url": "https://api.github.com/users/defunkt/following{/other_user}", "gists_url": "https://api.github.com/users/defunkt/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt/subscriptions", "organizations_url": "https://api.github.com/users/defunkt/orgs", "repos_url": "https://api.github.com/users/defunkt/repos", "events_url": "https://api.github.com/users/defunkt/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt/received_events", "type": "User", "site_admin": False }, "committer": { "login": "defunkt", "id": 42424242, "node_id": "MDQ6VXNlcjQyNDI0MjQy", "avatar_url": "https://avatars3.githubusercontent.com/u/42424242?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt", "html_url": "https://github.com/defunkt", "followers_url": "https://api.github.com/users/defunkt/followers", "following_url": "https://api.github.com/users/defunkt/following{/other_user}", "gists_url": "https://api.github.com/users/defunkt/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt/subscriptions", "organizations_url": "https://api.github.com/users/defunkt/orgs", "repos_url": "https://api.github.com/users/defunkt/repos", "events_url": "https://api.github.com/users/defunkt/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt/received_events", "type": "User", "site_admin": False }, "parents": [ { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits/3333333333333333333333333333333333333333", "html_url": "https://github.com/defunkt-org/defunkt-repo/" "commit/3333333333333333333333333333333333333333", "sha": "3333333333333333333333333333333333333333" } ], "repository": { "id": 43434343, "node_id": "MDEwOlJlcG9zaXRvcnk0MzQzNDM0Mw==", "name": "defunkt-repo", "full_name": "defunkt-org/defunkt-repo", "private": False, "owner": { "login": "defunkt-org", "id": 44444444, "node_id": "MDEyOk9yZ2FuaXphdGlvbjQ0NDQ0NDQ0", "avatar_url": "https://avatars2.githubusercontent.com/u/44444444?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt-org", "html_url": "https://github.com/defunkt-org", "followers_url": "https://api.github.com/users/defunkt-org/followers", "following_url": "https://api.github.com/users/defunkt-org/" "following{/other_user}", "gists_url": "https://api.github.com/users/defunkt-org/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt-org/" "starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt-org/subscriptions", "organizations_url": "https://api.github.com/users/defunkt-org/orgs", "repos_url": "https://api.github.com/users/defunkt-org/repos", "events_url": "https://api.github.com/users/defunkt-org/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt-org/" "received_events", "type": "Organization", "site_admin": False }, "html_url": "https://github.com/defunkt-org/defunkt-repo", "description": "defunkt project", "fork": False, "url": "https://api.github.com/repos/defunkt-org/defunkt-repo", "forks_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/forks", "keys_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/keys{/key_id}", "collaborators_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "collaborators{/collaborator}", "teams_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/teams", "hooks_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/hooks", "issue_events_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "issues/events{/number}", "events_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/events", "assignees_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "assignees{/user}", "branches_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "branches{/branch}", "tags_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/tags", "blobs_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/blobs{/sha}", "git_tags_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/tags{/sha}", "git_refs_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/refs{/sha}", "trees_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/trees{/sha}", "statuses_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "statuses/{sha}", "languages_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "languages", "stargazers_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "stargazers", "contributors_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "contributors", "subscribers_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "subscribers", "subscription_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "subscription", "commits_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits{/sha}", "git_commits_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/commits{/sha}", "comments_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "comments{/number}", "issue_comment_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "issues/comments{/number}", "contents_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "contents/{+path}", "compare_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "compare/{base}...{head}", "merges_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/merges", "archive_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "{archive_format}{/ref}", "downloads_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "downloads", "issues_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "issues{/number}", "pulls_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "pulls{/number}", "milestones_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "milestones{/number}", "notifications_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "notifications{?since,all,participating}", "labels_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "labels{/name}", "releases_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "releases{/id}", "deployments_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "deployments" }, "score": 1.0 } ] } github_commit_search_no_user_reply = { "total_count": 1, "incomplete_results": False, "items": [ { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits/1111111111111111111111111111111111111111", "sha": "1111111111111111111111111111111111111111", "node_id": "MDY6Q29tbWl0NDM0MzQzNDM6MTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTEx", "html_url": "https://github.com/defunkt-org/defunkt-repo/" "commit/1111111111111111111111111111111111111111", "comments_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits/1111111111111111111111111111111111111111/comments", "commit": { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/commits/1111111111111111111111111111111111111111", "author": { "date": "2021-01-01T01:01:01.000-01:00", "name": "Defunkt User", "email": "defunkt@defunkt.com" }, "committer": { "date": "2021-01-01T01:01:01.000-01:00", "name": "Defunkt User", "email": "defunkt@defunkt.com" }, "message": "defunkt message", "tree": { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/trees/2222222222222222222222222222222222222222", "sha": "2222222222222222222222222222222222222222" }, "comment_count": 0 }, "author": None, "committer": None, "parents": [ { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits/3333333333333333333333333333333333333333", "html_url": "https://github.com/defunkt-org/defunkt-repo/" "commit/3333333333333333333333333333333333333333", "sha": "3333333333333333333333333333333333333333" } ], "repository": { "id": 43434343, "node_id": "MDEwOlJlcG9zaXRvcnk0MzQzNDM0Mw==", "name": "defunkt-repo", "full_name": "defunkt-org/defunkt-repo", "private": False, "owner": { "login": "defunkt-org", "id": 44444444, "node_id": "MDEyOk9yZ2FuaXphdGlvbjQ0NDQ0NDQ0", "avatar_url": "https://avatars2.githubusercontent.com/u/44444444?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt-org", "html_url": "https://github.com/defunkt-org", "followers_url": "https://api.github.com/users/defunkt-org/followers", "following_url": "https://api.github.com/users/defunkt-org/" "following{/other_user}", "gists_url": "https://api.github.com/users/defunkt-org/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt-org/" "starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt-org/subscriptions", "organizations_url": "https://api.github.com/users/defunkt-org/orgs", "repos_url": "https://api.github.com/users/defunkt-org/repos", "events_url": "https://api.github.com/users/defunkt-org/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt-org/" "received_events", "type": "Organization", "site_admin": False }, "html_url": "https://github.com/defunkt-org/defunkt-repo", "description": "defunkt project", "fork": False, "url": "https://api.github.com/repos/defunkt-org/defunkt-repo", "forks_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/forks", "keys_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/keys{/key_id}", "collaborators_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "collaborators{/collaborator}", "teams_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/teams", "hooks_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/hooks", "issue_events_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "issues/events{/number}", "events_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/events", "assignees_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "assignees{/user}", "branches_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "branches{/branch}", "tags_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/tags", "blobs_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/blobs{/sha}", "git_tags_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/tags{/sha}", "git_refs_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/refs{/sha}", "trees_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/trees{/sha}", "statuses_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "statuses/{sha}", "languages_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "languages", "stargazers_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "stargazers", "contributors_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "contributors", "subscribers_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "subscribers", "subscription_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "subscription", "commits_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits{/sha}", "git_commits_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/commits{/sha}", "comments_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "comments{/number}", "issue_comment_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "issues/comments{/number}", "contents_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "contents/{+path}", "compare_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "compare/{base}...{head}", "merges_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/merges", "archive_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "{archive_format}{/ref}", "downloads_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "downloads", "issues_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "issues{/number}", "pulls_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "pulls{/number}", "milestones_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "milestones{/number}", "notifications_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "notifications{?since,all,participating}", "labels_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "labels{/name}", "releases_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "releases{/id}", "deployments_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "deployments" }, "score": 1.0 } ] } github_commit_search_not_found_reply = { "total_count": 0, "incomplete_results": False, "items": [ ] } class GitHubAvatar(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[avatar.AvatarGitHub(token="abcd")]) self.rsrc = avatar.AvatarResource(master) self.rsrc.reconfigResource(master.config) headers = { 'User-Agent': 'Buildbot', 'Authorization': 'token abcd', } self._http = yield fakehttpclientservice.HTTPClientService.getService( master, self, avatar.AvatarGitHub.DEFAULT_GITHUB_API_URL, headers=headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_username(self): username_search_endpoint = '/users/defunkt' self._http.expect('get', username_search_endpoint, content_json=github_username_search_reply, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?username=defunkt') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) @defer.inlineCallbacks def test_username_not_found(self): username_search_endpoint = '/users/inexistent' self._http.expect('get', username_search_endpoint, code=404, content_json=github_username_not_found_reply, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?username=inexistent') self.assertEqual(res, dict(redirected=b'img/nobody.png')) @defer.inlineCallbacks def test_username_error(self): username_search_endpoint = '/users/error' self._http.expect('get', username_search_endpoint, code=500, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?username=error') self.assertEqual(res, dict(redirected=b'img/nobody.png')) @defer.inlineCallbacks def test_username_cached(self): username_search_endpoint = '/users/defunkt' self._http.expect('get', username_search_endpoint, content_json=github_username_search_reply, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?username=defunkt') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) # Second request will give same result but without an HTTP request res = yield self.render_resource(self.rsrc, b'/?username=defunkt') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) @defer.inlineCallbacks def test_email(self): email_search_endpoint = '/search/users?q=defunkt%40defunkt.com+in%3Aemail' self._http.expect('get', email_search_endpoint, content_json=github_email_search_reply, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?email=defunkt@defunkt.com') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) @defer.inlineCallbacks def test_email_commit(self): email_search_endpoint = '/search/users?q=defunkt%40defunkt.com+in%3Aemail' self._http.expect('get', email_search_endpoint, content_json=github_email_search_not_found_reply, headers={'Accept': 'application/vnd.github.v3+json'}) commit_search_endpoint = ('/search/commits?' 'per_page=1&q=author-email%3Adefunkt%40defunkt.com&sort=committer-date') self._http.expect('get', commit_search_endpoint, content_json=github_commit_search_reply, headers={'Accept': 'application/vnd.github.v3+json,' 'application/vnd.github.cloak-preview'}) res = yield self.render_resource(self.rsrc, b'/?email=defunkt@defunkt.com') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) @defer.inlineCallbacks def test_email_commit_no_user(self): email_search_endpoint = '/search/users?q=defunkt%40defunkt.com+in%3Aemail' self._http.expect('get', email_search_endpoint, content_json=github_email_search_not_found_reply, headers={'Accept': 'application/vnd.github.v3+json'}) commit_search_endpoint = ('/search/commits?' 'per_page=1&q=author-email%3Adefunkt%40defunkt.com&sort=committer-date') self._http.expect('get', commit_search_endpoint, content_json=github_commit_search_no_user_reply, headers={'Accept': 'application/vnd.github.v3+json,' 'application/vnd.github.cloak-preview'}) res = yield self.render_resource(self.rsrc, b'/?email=defunkt@defunkt.com') self.assertEqual(res, dict(redirected=b'img/nobody.png')) @defer.inlineCallbacks def test_email_not_found(self): email_search_endpoint = '/search/users?q=notfound%40defunkt.com+in%3Aemail' self._http.expect('get', email_search_endpoint, content_json=github_email_search_not_found_reply, headers={'Accept': 'application/vnd.github.v3+json'}) commit_search_endpoint = ('/search/commits?' 'per_page=1&q=author-email%3Anotfound%40defunkt.com&sort=committer-date') self._http.expect('get', commit_search_endpoint, content_json=github_commit_search_not_found_reply, headers={'Accept': 'application/vnd.github.v3+json,' 'application/vnd.github.cloak-preview'}) res = yield self.render_resource(self.rsrc, b'/?email=notfound@defunkt.com') self.assertEqual(res, dict(redirected=b'img/nobody.png')) @defer.inlineCallbacks def test_email_error(self): email_search_endpoint = '/search/users?q=error%40defunkt.com+in%3Aemail' self._http.expect('get', email_search_endpoint, code=500, headers={'Accept': 'application/vnd.github.v3+json'}) commit_search_endpoint = ('/search/commits?' 'per_page=1&q=author-email%3Aerror%40defunkt.com&sort=committer-date') self._http.expect('get', commit_search_endpoint, code=500, headers={'Accept': 'application/vnd.github.v3+json,' 'application/vnd.github.cloak-preview'}) res = yield self.render_resource(self.rsrc, b'/?email=error@defunkt.com') self.assertEqual(res, dict(redirected=b'img/nobody.png')) class GitHubAvatarBasicAuth(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() avatar_method = avatar.AvatarGitHub(client_id="oauth_id", client_secret="oauth_secret") master = self.make_master(url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[avatar_method]) self.rsrc = avatar.AvatarResource(master) self.rsrc.reconfigResource(master.config) headers = { 'User-Agent': 'Buildbot', # oauth_id:oauth_secret in Base64 'Authorization': 'basic b2F1dGhfaWQ6b2F1dGhfc2VjcmV0', } self._http = yield fakehttpclientservice.HTTPClientService.getService( master, self, avatar.AvatarGitHub.DEFAULT_GITHUB_API_URL, headers=headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() def test_incomplete_credentials(self): with self.assertRaises(config.ConfigErrors): avatar.AvatarGitHub(client_id="oauth_id") with self.assertRaises(config.ConfigErrors): avatar.AvatarGitHub(client_secret="oauth_secret") def test_token_and_client_credentials(self): with self.assertRaises(config.ConfigErrors): avatar.AvatarGitHub(client_id="oauth_id", client_secret="oauth_secret", token="token") @defer.inlineCallbacks def test_username(self): username_search_endpoint = '/users/defunkt' self._http.expect('get', username_search_endpoint, content_json=github_username_search_reply, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?username=defunkt') self.assertEqual(res, {'redirected': b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32'}) buildbot-3.4.0/master/buildbot/test/unit/www/test_config.py000066400000000000000000000115421413250514000240570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import mock from twisted.internet import defer from twisted.python import log from twisted.trial import unittest from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.www import auth from buildbot.www import config class IndexResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_render(self): _auth = auth.NoAuth() _auth.maybeAutoLogin = mock.Mock() custom_versions = [ ['test compoent', '0.1.2'], ['test component 2', '0.2.1']] master = self.make_master( url='h:/a/b/', auth=_auth, versions=custom_versions) rsrc = config.IndexResource(master, "foo") rsrc.reconfigResource(master.config) rsrc.jinja = mock.Mock() template = mock.Mock() rsrc.jinja.get_template = lambda x: template template.render = lambda configjson, config, custom_templates: configjson vjson = [list(v) for v in rsrc.getEnvironmentVersions()] + custom_versions res = yield self.render_resource(rsrc, b'/') res = json.loads(bytes2unicode(res)) _auth.maybeAutoLogin.assert_called_with(mock.ANY) exp = { "authz": {}, "titleURL": "http://buildbot.net", "versions": vjson, "title": "Buildbot", "auth": {"name": "NoAuth"}, "user": {"anonymous": True}, "buildbotURL": "h:/a/b/", "multiMaster": False, "port": None } self.assertEqual(res, exp) master.session.user_info = dict(name="me", email="me@me.org") res = yield self.render_resource(rsrc, b'/') res = json.loads(bytes2unicode(res)) exp = { "authz": {}, "titleURL": "http://buildbot.net", "versions": vjson, "title": "Buildbot", "auth": {"name": "NoAuth"}, "user": {"email": "me@me.org", "name": "me"}, "buildbotURL": "h:/a/b/", "multiMaster": False, "port": None } self.assertEqual(res, exp) master = self.make_master( url='h:/a/c/', auth=_auth, versions=custom_versions) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/') res = json.loads(bytes2unicode(res)) exp = { "authz": {}, "titleURL": "http://buildbot.net", "versions": vjson, "title": "Buildbot", "auth": {"name": "NoAuth"}, "user": {"anonymous": True}, "buildbotURL": "h:/a/b/", "multiMaster": False, "port": None } self.assertEqual(res, exp) def test_parseCustomTemplateDir(self): exp = {'views/builds.html': '
\n
'} try: # we make the test work if pypugjs is present or note # It is better than just skip if pypugjs is not there import pypugjs # pylint: disable=import-outside-toplevel [pypugjs] exp.update({'plugin/views/plugin.html': '
this is customized
'}) except ImportError: log.msg("Only testing html based template override") template_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'test_templates_dir') master = self.make_master(url='h:/a/b/') rsrc = config.IndexResource(master, "foo") res = rsrc.parseCustomTemplateDir(template_dir) self.assertEqual(res, exp) def test_CustomTemplateDir(self): master = self.make_master(url='h:/a/b/') rsrc = config.IndexResource(master, "foo") master.config.www['custom_templates_dir'] = 'foo' rsrc.parseCustomTemplateDir = mock.Mock(return_value="returnvalue") rsrc.reconfigResource(master.config) self.assertNotIn('custom_templates_dir', master.config.www) self.assertEqual('returnvalue', rsrc.custom_templates) buildbot-3.4.0/master/buildbot/test/unit/www/test_endpointmatchers.py000066400000000000000000000135711413250514000261650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.schedulers.forcesched import ForceScheduler from buildbot.test import fakedb from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www.authz import endpointmatchers class EndpointBase(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/a/b/') self.db = self.master.db self.matcher = self.makeMatcher() self.matcher.setAuthz(self.master.authz) self.insertData() def makeMatcher(self): raise NotImplementedError() def assertMatch(self, match): self.assertTrue(match is not None) def assertNotMatch(self, match): self.assertTrue(match is None) def insertData(self): self.db.insertTestData([ fakedb.SourceStamp(id=13, branch='secret'), fakedb.Build( id=15, buildrequestid=16, masterid=1, workerid=2, builderid=21), fakedb.BuildRequest(id=16, buildsetid=17), fakedb.Buildset(id=17), fakedb.BuildsetSourceStamp(id=20, buildsetid=17, sourcestampid=13), fakedb.Builder(id=21, name="builder"), ]) class ValidEndpointMixin: @defer.inlineCallbacks def test_invalidPath(self): ret = yield self.matcher.match(("foo", "bar")) self.assertNotMatch(ret) class AnyEndpointMatcher(EndpointBase): def makeMatcher(self): return endpointmatchers.AnyEndpointMatcher(role="foo") @defer.inlineCallbacks def test_nominal(self): ret = yield self.matcher.match(("foo", "bar")) self.assertMatch(ret) class AnyControlEndpointMatcher(EndpointBase): def makeMatcher(self): return endpointmatchers.AnyControlEndpointMatcher(role="foo") @defer.inlineCallbacks def test_default_action(self): ret = yield self.matcher.match(("foo", "bar")) self.assertMatch(ret) @defer.inlineCallbacks def test_get(self): ret = yield self.matcher.match(("foo", "bar"), action="GET") self.assertNotMatch(ret) @defer.inlineCallbacks def test_other_action(self): ret = yield self.matcher.match(("foo", "bar"), action="foo") self.assertMatch(ret) class ViewBuildsEndpointMatcherBranch(EndpointBase, ValidEndpointMixin): def makeMatcher(self): return endpointmatchers.ViewBuildsEndpointMatcher(branch="secret", role="agent") @defer.inlineCallbacks def test_build(self): ret = yield self.matcher.match(("builds", "15")) self.assertMatch(ret) test_build.skip = "ViewBuildsEndpointMatcher is not implemented yet" class StopBuildEndpointMatcherBranch(EndpointBase, ValidEndpointMixin): def makeMatcher(self): return endpointmatchers.StopBuildEndpointMatcher(builder="builder", role="owner") @defer.inlineCallbacks def test_build(self): ret = yield self.matcher.match(("builds", "15"), "stop") self.assertMatch(ret) @defer.inlineCallbacks def test_build_no_match(self): self.matcher.builder = "foo" ret = yield self.matcher.match(("builds", "15"), "stop") self.assertNotMatch(ret) @defer.inlineCallbacks def test_build_no_builder(self): self.matcher.builder = None ret = yield self.matcher.match(("builds", "15"), "stop") self.assertMatch(ret) class ForceBuildEndpointMatcherBranch(EndpointBase, ValidEndpointMixin): def makeMatcher(self): return endpointmatchers.ForceBuildEndpointMatcher(builder="builder", role="owner") def insertData(self): super().insertData() self.master.allSchedulers = lambda: [ ForceScheduler(name="sched1", builderNames=["builder"])] @defer.inlineCallbacks def test_build(self): ret = yield self.matcher.match(("builds", "15"), "stop") self.assertNotMatch(ret) @defer.inlineCallbacks def test_forcesched(self): ret = yield self.matcher.match(("forceschedulers", "sched1"), "force") self.assertMatch(ret) @defer.inlineCallbacks def test_noforcesched(self): ret = yield self.matcher.match(("forceschedulers", "sched2"), "force") self.assertNotMatch(ret) @defer.inlineCallbacks def test_forcesched_builder_no_match(self): self.matcher.builder = "foo" ret = yield self.matcher.match(("forceschedulers", "sched1"), "force") self.assertNotMatch(ret) @defer.inlineCallbacks def test_forcesched_nobuilder(self): self.matcher.builder = None ret = yield self.matcher.match(("forceschedulers", "sched1"), "force") self.assertMatch(ret) class EnableSchedulerEndpointMatcher(EndpointBase, ValidEndpointMixin): def makeMatcher(self): return endpointmatchers.EnableSchedulerEndpointMatcher(role="agent") @defer.inlineCallbacks def test_build(self): ret = yield self.matcher.match(("builds", "15"), "stop") self.assertNotMatch(ret) @defer.inlineCallbacks def test_scheduler_enable(self): ret = yield self.matcher.match(("schedulers", "15"), "enable") self.assertMatch(ret) buildbot-3.4.0/master/buildbot/test/unit/www/test_graphql.py000066400000000000000000000202541413250514000242500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import connector from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes from buildbot.www import graphql try: import graphql as graphql_core except ImportError: graphql_core = None class V3RootResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): if not graphql_core: skip = "graphql is required for V3RootResource tests" def setUp(self): self.patch(connector.DataConnector, 'submodules', []) self.setUpTestReactor(use_asyncio=True) self.master = self.make_master(url="http://server/path/", wantGraphql=True) self.master.config.www["graphql"] = {"debug": True} self.rsrc = graphql.V3RootResource(self.master) self.rsrc.reconfigResource(self.master.config) def assertSimpleError(self, message_or_error, responseCode): if isinstance(message_or_error, list): errors = message_or_error else: errors = [{"message": message_or_error}] content = json.dumps({"data": None, "errors": errors}) self.assertRequest(content=unicode2bytes(content), responseCode=responseCode) def assertResult(self, result): content = json.dumps({"data": result, "errors": None}) self.assertRequest(content=unicode2bytes(content), responseCode=200) @defer.inlineCallbacks def test_failure(self): self.master.graphql.query = mock.Mock( return_value=defer.fail(RuntimeError("oh noes")) ) yield self.render_resource( self.rsrc, b"/?query={builders{name}}", ) self.assertSimpleError("internal error - see logs", 500) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_invalid_http_method(self): yield self.render_resource(self.rsrc, b"/", method=b"PATCH") self.assertSimpleError("invalid HTTP method", 400) # https://graphql.org/learn/serving-over-http/#get-request @defer.inlineCallbacks def test_get_query(self): yield self.render_resource( self.rsrc, b"/?query={tests{testid}}", ) self.assertResult( { "tests": [ {"testid": 13}, {"testid": 14}, {"testid": 15}, {"testid": 16}, {"testid": 17}, {"testid": 18}, {"testid": 19}, {"testid": 20}, ] } ) @defer.inlineCallbacks def test_get_query_item(self): yield self.render_resource( self.rsrc, b"/?query={test(testid:13){testid, info}}", ) self.assertResult({"test": {"testid": 13, "info": "ok"}}) @defer.inlineCallbacks def test_get_query_subresource(self): yield self.render_resource( self.rsrc, b"/?query={test(testid:13){testid, info, steps { info }}}", ) self.assertResult( { "test": { "testid": 13, "info": "ok", "steps": [{"info": "ok"}, {"info": "failed"}], } } ) @defer.inlineCallbacks def test_get_query_items_result_spec(self): yield self.render_resource( self.rsrc, b"/?query={tests(testid__gt:18){testid, info}}", ) self.assertResult( {"tests": [{"testid": 19, "info": "todo"}, {"testid": 20, "info": "error"}]} ) @defer.inlineCallbacks def test_get_noquery(self): yield self.render_resource( self.rsrc, b"/", ) self.assertSimpleError("GET request must contain a 'query' parameter", 400) # https://graphql.org/learn/serving-over-http/#post-request @defer.inlineCallbacks def test_post_query_graphql_content(self): yield self.render_resource( self.rsrc, method=b"POST", content=b"{tests{testid}}", content_type=b"application/graphql", ) self.assertResult( { "tests": [ {"testid": 13}, {"testid": 14}, {"testid": 15}, {"testid": 16}, {"testid": 17}, {"testid": 18}, {"testid": 19}, {"testid": 20}, ] } ) @defer.inlineCallbacks def test_post_query_json_content(self): query = {"query": "{tests{testid}}"} yield self.render_resource( self.rsrc, method=b"POST", content=json.dumps(query).encode(), content_type=b"application/json", ) self.assertResult( { "tests": [ {"testid": 13}, {"testid": 14}, {"testid": 15}, {"testid": 16}, {"testid": 17}, {"testid": 18}, {"testid": 19}, {"testid": 20}, ] } ) @defer.inlineCallbacks def test_post_query_json_content_operationName(self): query = { "query": "query foo {tests{testid}} query bar {tests{name}}", "operationName": "fsoo", } yield self.render_resource( self.rsrc, method=b"POST", content=json.dumps(query).encode(), content_type=b"application/json", ) self.assertSimpleError("json request unsupported fields: operationName", 400) @defer.inlineCallbacks def test_post_query_json_badcontent_type(self): yield self.render_resource( self.rsrc, method=b"POST", content=b"foo", content_type=b"application/foo" ) self.assertSimpleError("unsupported content-type: application/foo", 400) @defer.inlineCallbacks def test_post_query_json_nocontent_type(self): yield self.render_resource(self.rsrc, method=b"POST") self.assertSimpleError("no content-type", 400) @defer.inlineCallbacks def test_get_bad_query(self): yield self.render_resource( self.rsrc, b"/?query={notexistant{id}}", ) self.assertSimpleError( [ { "message": "Cannot query field 'notexistant' on type 'Query'.", "locations": [{"line": 1, "column": 2}], "path": None, } ], 200, ) class DisabledV3RootResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): if not graphql_core: skip = "graphql is required for V3RootResource tests" def setUp(self): self.setUpTestReactor() self.master = self.make_master(url="http://server/path/") self.rsrc = graphql.V3RootResource(self.master) self.rsrc.reconfigResource(self.master.config) @defer.inlineCallbacks def test_basic_disabled(self): yield self.render_resource(self.rsrc, b"/") self.assertRequest( content=unicode2bytes( json.dumps( {"data": None, "errors": [{"message": "graphql not enabled"}]} ) ), responseCode=501, ) buildbot-3.4.0/master/buildbot/test/unit/www/test_hooks_base.py000066400000000000000000000110671413250514000247310ustar00rootroot00000000000000import json from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.www.change_hook import ChangeHookResource from buildbot.www.hooks.base import BaseHookHandler def _prepare_base_change_hook(testcase, **options): return ChangeHookResource(dialects={ 'base': options }, master=fakeMasterForHooks(testcase)) def _prepare_request(payload, headers=None): if headers is None: headers = { b"Content-type": b"application/x-www-form-urlencoded", b"Accept": b"text/plain"} else: headers = {} if b'comments' not in payload: payload[b'comments'] = b'test_www_hook_base submission' # Required field request = FakeRequest() request.uri = b"/change_hook/base" request.method = b"POST" request.args = payload request.received_headers.update(headers) return request class TestChangeHookConfiguredWithBase(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_base_change_hook(self) @defer.inlineCallbacks def _check_base_with_change(self, payload): self.request = _prepare_request(payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] def _first_or_nothing(val): if isinstance(val, type([])): val = val[0] return bytes2unicode(val) if payload.get(b'files'): files = json.loads(_first_or_nothing(payload.get(b'files'))) else: files = [] self.assertEqual(change['files'], files) if payload.get(b'properties'): props = json.loads(_first_or_nothing(payload.get(b'properties'))) else: props = {} self.assertEqual(change['properties'], props) self.assertEqual( change['author'], _first_or_nothing(payload.get(b'author', payload.get(b'who')))) for field in ('revision', 'committer', 'comments', 'branch', 'category', 'revlink'): self.assertEqual( change[field], _first_or_nothing(payload.get(field.encode()))) for field in ('repository', 'project'): self.assertEqual( change[field], _first_or_nothing(payload.get(field.encode())) or '') def test_base_with_no_change(self): return self._check_base_with_change({}) def test_base_with_changes(self): self._check_base_with_change({ b'revision': [b'1234badcaca5678'], b'branch': [b'master'], b'comments': [b'Fix foo bar'], b'category': [b'bug'], b'revlink': [b'https://git.myproject.org/commit/1234badcaca5678'], b'repository': [b'myproject'], b'project': [b'myproject'], b'author': [b'me '], b'committer': [b'me '], b'files': [b'["src/main.c", "src/foo.c"]'], b'properties': [b'{"color": "blue", "important": true, "size": 2}'], }) class TestChangeHookConfiguredWithCustomBase(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() class CustomBase(BaseHookHandler): def getChanges(self, request): args = request.args chdict = dict( revision=args.get(b'revision'), repository=args.get(b'_repository') or '', project=args.get(b'project') or '', codebase=args.get(b'codebase')) return ([chdict], None) self.changeHook = _prepare_base_change_hook(self, custom_class=CustomBase) @defer.inlineCallbacks def _check_base_with_change(self, payload): self.request = _prepare_request(payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['repository'], payload.get(b'_repository') or '') def test_base_with_no_change(self): return self._check_base_with_change({b'repository': b'foo'}) buildbot-3.4.0/master/buildbot/test/unit/www/test_hooks_bitbucket.py000066400000000000000000000220611413250514000257670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Manba Team from twisted.internet.defer import inlineCallbacks from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.www import change_hook from buildbot.www.hooks.bitbucket import _HEADER_EVENT gitJsonPayload = b"""{ "canon_url": "https://bitbucket.org", "commits": [ { "author": "marcus", "branch": "master", "files": [ { "file": "somefile.py", "type": "modified" } ], "message": "Added some more things to somefile.py", "node": "620ade18607a", "parents": [ "702c70160afc" ], "raw_author": "Marcus Bertrand ", "raw_node": "620ade18607ac42d872b568bb92acaa9a28620e9", "revision": null, "size": -1, "timestamp": "2012-05-30 05:58:56", "utctimestamp": "2012-05-30 03:58:56+00:00" } ], "repository": { "absolute_url": "/marcus/project-x/", "fork": false, "is_private": true, "name": "Project X", "owner": "marcus", "scm": "git", "slug": "project-x", "website": "https://atlassian.com/" }, "user": "marcus" }""" mercurialJsonPayload = b"""{ "canon_url": "https://bitbucket.org", "commits": [ { "author": "marcus", "branch": "master", "files": [ { "file": "somefile.py", "type": "modified" } ], "message": "Added some more things to somefile.py", "node": "620ade18607a", "parents": [ "702c70160afc" ], "raw_author": "Marcus Bertrand ", "raw_node": "620ade18607ac42d872b568bb92acaa9a28620e9", "revision": null, "size": -1, "timestamp": "2012-05-30 05:58:56", "utctimestamp": "2012-05-30 03:58:56+00:00" } ], "repository": { "absolute_url": "/marcus/project-x/", "fork": false, "is_private": true, "name": "Project X", "owner": "marcus", "scm": "hg", "slug": "project-x", "website": "https://atlassian.com/" }, "user": "marcus" }""" gitJsonNoCommitsPayload = b"""{ "canon_url": "https://bitbucket.org", "commits": [ ], "repository": { "absolute_url": "/marcus/project-x/", "fork": false, "is_private": true, "name": "Project X", "owner": "marcus", "scm": "git", "slug": "project-x", "website": "https://atlassian.com/" }, "user": "marcus" }""" mercurialJsonNoCommitsPayload = b"""{ "canon_url": "https://bitbucket.org", "commits": [ ], "repository": { "absolute_url": "/marcus/project-x/", "fork": false, "is_private": true, "name": "Project X", "owner": "marcus", "scm": "hg", "slug": "project-x", "website": "https://atlassian.com/" }, "user": "marcus" }""" class TestChangeHookConfiguredWithBitbucketChange(unittest.TestCase, TestReactorMixin): """Unit tests for BitBucket Change Hook """ def setUp(self): self.setUpTestReactor() self.change_hook = change_hook.ChangeHookResource( dialects={'bitbucket': True}, master=fakeMasterForHooks(self)) @inlineCallbacks def testGitWithChange(self): change_dict = {b'payload': [gitJsonPayload]} request = FakeRequest(change_dict) request.received_headers[_HEADER_EVENT] = b"repo:push" request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) commit = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(commit['files'], ['somefile.py']) self.assertEqual( commit['repository'], 'https://bitbucket.org/marcus/project-x/') self.assertEqual( commit['when_timestamp'], 1338350336 ) self.assertEqual( commit['author'], 'Marcus Bertrand ') self.assertEqual( commit['revision'], '620ade18607ac42d872b568bb92acaa9a28620e9') self.assertEqual( commit['comments'], 'Added some more things to somefile.py') self.assertEqual(commit['branch'], 'master') self.assertEqual( commit['revlink'], 'https://bitbucket.org/marcus/project-x/commits/' '620ade18607ac42d872b568bb92acaa9a28620e9' ) self.assertEqual( commit['properties']['event'], 'repo:push') @inlineCallbacks def testGitWithNoCommitsPayload(self): change_dict = {b'payload': [gitJsonNoCommitsPayload]} request = FakeRequest(change_dict) request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b'no change found') @inlineCallbacks def testMercurialWithChange(self): change_dict = {b'payload': [mercurialJsonPayload]} request = FakeRequest(change_dict) request.received_headers[_HEADER_EVENT] = b"repo:push" request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) commit = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(commit['files'], ['somefile.py']) self.assertEqual( commit['repository'], 'https://bitbucket.org/marcus/project-x/') self.assertEqual( commit['when_timestamp'], 1338350336 ) self.assertEqual( commit['author'], 'Marcus Bertrand ') self.assertEqual( commit['revision'], '620ade18607ac42d872b568bb92acaa9a28620e9') self.assertEqual( commit['comments'], 'Added some more things to somefile.py') self.assertEqual(commit['branch'], 'master') self.assertEqual( commit['revlink'], 'https://bitbucket.org/marcus/project-x/commits/' '620ade18607ac42d872b568bb92acaa9a28620e9' ) self.assertEqual( commit['properties']['event'], 'repo:push') @inlineCallbacks def testMercurialWithNoCommitsPayload(self): change_dict = {b'payload': [mercurialJsonNoCommitsPayload]} request = FakeRequest(change_dict) request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b'no change found') @inlineCallbacks def testWithNoJson(self): request = FakeRequest() request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b'Error processing changes.') request.setResponseCode.assert_called_with( 500, b'Error processing changes.') self.assertEqual(len(self.flushLoggedErrors()), 1) @inlineCallbacks def testGitWithChangeAndProject(self): change_dict = { b'payload': [gitJsonPayload], b'project': [b'project-name']} request = FakeRequest(change_dict) request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) commit = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(commit['project'], 'project-name') buildbot-3.4.0/master/buildbot/test/unit/www/test_hooks_bitbucketcloud.py000066400000000000000000000726701413250514000270310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Mamba Team from io import BytesIO from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes from buildbot.www import change_hook from buildbot.www.hooks.bitbucketcloud import _HEADER_EVENT _CT_JSON = b'application/json' bitbucketPRproperties = { 'pullrequesturl': 'http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21', 'bitbucket.id': '21', 'bitbucket.link': 'http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21', 'bitbucket.title': 'dot 1496311906', 'bitbucket.authorLogin': 'Buildbot', 'bitbucket.fromRef.branch.name': 'branch_1496411680', 'bitbucket.fromRef.branch.rawNode': 'a87e21f7433d8c16ac7be7413483fbb76c72a8ba', 'bitbucket.fromRef.commit.authorTimestamp': 0, 'bitbucket.fromRef.commit.date': None, 'bitbucket.fromRef.commit.hash': 'a87e21f7433d8c16ac7be7413483fbb76c72a8ba', 'bitbucket.fromRef.commit.message': None, 'bitbucket.fromRef.repository.fullName': 'CI/py-repo', 'bitbucket.fromRef.repository.links.self.href': 'http://localhost:7990/projects/CI/repos/py-repo', 'bitbucket.fromRef.repository.owner.display_name': 'CI', 'bitbucket.fromRef.repository.owner.nickname': 'CI', 'bitbucket.fromRef.repository.ownerName': 'CI', 'bitbucket.fromRef.repository.project.key': 'CI', 'bitbucket.fromRef.repository.project.name': 'Continuous Integration', 'bitbucket.fromRef.repository.public': False, 'bitbucket.fromRef.repository.scm': 'git', 'bitbucket.fromRef.repository.slug': 'py-repo', 'bitbucket.toRef.branch.name': 'master', 'bitbucket.toRef.branch.rawNode': '7aebbb0089c40fce138a6d0b36d2281ea34f37f5', 'bitbucket.toRef.commit.authorTimestamp': 0, 'bitbucket.toRef.commit.date': None, 'bitbucket.toRef.commit.hash': '7aebbb0089c40fce138a6d0b36d2281ea34f37f5', 'bitbucket.toRef.commit.message': None, 'bitbucket.toRef.repository.fullName': 'CI/py-repo', 'bitbucket.toRef.repository.links.self.href': 'http://localhost:7990/projects/CI/repos/py-repo', 'bitbucket.toRef.repository.owner.display_name': 'CI', 'bitbucket.toRef.repository.owner.nickname': 'CI', 'bitbucket.toRef.repository.ownerName': 'CI', 'bitbucket.toRef.repository.project.key': 'CI', 'bitbucket.toRef.repository.project.name': 'Continuous Integration', 'bitbucket.toRef.repository.public': False, 'bitbucket.toRef.repository.scm': 'git', 'bitbucket.toRef.repository.slug': 'py-repo' } pushJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" }, "html": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": false, "new": { "type": "branch", "name": "branch_1496411680", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "old": { "type": "branch", "name": "branch_1496411680", "target": { "type": "commit", "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba" } } } ] } } """ pullRequestCreatedJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestUpdatedJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestRejectedJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestFulfilledJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" } } """ deleteTagJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" }, "html": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "ownerName": "BUIL", "public": false, "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": true, "old": { "type": "tag", "name": "1.0.0", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "new": null } ] } } """ deleteBranchJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" }, "html": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "ownerName": "CI", "public": false, "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": true, "old": { "type": "branch", "name": "branch_1496758965", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "new": null } ] } } """ newTagJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" }, "html": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": true, "closed": false, "old": null, "new": { "type": "tag", "name": "1.0.0", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } } } ] } } """ def _prepare_request(payload, headers=None, change_dict=None): headers = headers or {} request = FakeRequest(change_dict) request.uri = b"/change_hook/bitbucketcloud" request.method = b"POST" if isinstance(payload, str): payload = unicode2bytes(payload) request.content = BytesIO(payload) request.received_headers[b'Content-Type'] = _CT_JSON request.received_headers.update(headers) return request class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.change_hook = change_hook.ChangeHookResource( dialects={'bitbucketcloud': { 'bitbucket_property_whitelist': ["bitbucket.*"], }}, master=fakeMasterForHooks(self) ) def assertDictSubset(self, expected_dict, response_dict): expected = {} for key in expected_dict.keys(): self.assertIn(key, set(response_dict.keys())) expected[key] = response_dict[key] self.assertDictEqual(expected_dict, expected) def _checkPush(self, change): self.assertEqual( change['repository'], 'http://localhost:7990/projects/CI/repos/py-repo') self.assertEqual(change['author'], 'John Smith ') self.assertEqual(change['project'], 'Continuous Integration') self.assertEqual(change['revision'], '793d4754230023d85532f9a38dba3290f959beb4') self.assertEqual( change['comments'], 'Bitbucket Cloud commit ' '793d4754230023d85532f9a38dba3290f959beb4') self.assertEqual( change['revlink'], 'http://localhost:7990/projects/CI/repos/py-repo/commits/' '793d4754230023d85532f9a38dba3290f959beb4') @defer.inlineCallbacks def testHookWithChangeOnPushEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithNonDictOption(self): self.change_hook.dialects = {'bitbucketcloud': True} yield self.testHookWithChangeOnPushEvent() def _checkPullRequest(self, change): self.assertEqual( change['repository'], 'http://localhost:7990/projects/CI/repos/py-repo') self.assertEqual(change['author'], 'John Smith ') self.assertEqual(change['project'], 'Continuous Integration') self.assertEqual(change['comments'], 'Bitbucket Cloud Pull Request #21') self.assertEqual(change['revlink'], 'http://localhost:7990/projects/' 'CI/repos/py-repo/pull-requests/21') self.assertEqual(change['revision'], 'a87e21f7433d8c16ac7be7413483fbb76c72a8ba') self.assertDictSubset(bitbucketPRproperties, change["properties"]) @defer.inlineCallbacks def testHookWithChangeOnPullRequestCreated(self): request = _prepare_request( pullRequestCreatedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:created'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/pull-requests/21/merge') self.assertEqual(change['category'], 'pull-created') @defer.inlineCallbacks def testHookWithChangeOnPullRequestUpdated(self): request = _prepare_request( pullRequestUpdatedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:updated'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/pull-requests/21/merge') self.assertEqual(change['category'], 'pull-updated') @defer.inlineCallbacks def testHookWithChangeOnPullRequestRejected(self): request = _prepare_request( pullRequestRejectedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:rejected'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'pull-rejected') @defer.inlineCallbacks def testHookWithChangeOnPullRequestFulfilled(self): request = _prepare_request( pullRequestFulfilledJsonPayload, headers={_HEADER_EVENT: 'pullrequest:fulfilled'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/heads/master') self.assertEqual(change['category'], 'pull-fulfilled') @defer.inlineCallbacks def _checkCodebase(self, event_type, expected_codebase): payloads = { 'repo:push': pushJsonPayload, 'pullrequest:updated': pullRequestUpdatedJsonPayload} request = _prepare_request( payloads[event_type], headers={_HEADER_EVENT: event_type}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(change['codebase'], expected_codebase) @defer.inlineCallbacks def testHookWithCodebaseValueOnPushEvent(self): self.change_hook.dialects = { 'bitbucketcloud': {'codebase': 'super-codebase'}} yield self._checkCodebase('repo:push', 'super-codebase') @defer.inlineCallbacks def testHookWithCodebaseFunctionOnPushEvent(self): self.change_hook.dialects = { 'bitbucketcloud': { 'codebase': lambda payload: payload['repository']['project']['key']}} yield self._checkCodebase('repo:push', 'CI') @defer.inlineCallbacks def testHookWithCodebaseValueOnPullEvent(self): self.change_hook.dialects = { 'bitbucketcloud': {'codebase': 'super-codebase'}} yield self._checkCodebase('pullrequest:updated', 'super-codebase') @defer.inlineCallbacks def testHookWithCodebaseFunctionOnPullEvent(self): self.change_hook.dialects = { 'bitbucketcloud': { 'codebase': lambda payload: payload['repository']['project']['key']}} yield self._checkCodebase('pullrequest:updated', 'CI') @defer.inlineCallbacks def testHookWithUnhandledEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'invented:event'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b"Unknown event: invented_event") @defer.inlineCallbacks def testHookWithChangeOnCreateTag(self): request = _prepare_request( newTagJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/tags/1.0.0') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithChangeOnDeleteTag(self): request = _prepare_request( deleteTagJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/tags/1.0.0') self.assertEqual(change['category'], 'ref-deleted') @defer.inlineCallbacks def testHookWithChangeOnDeleteBranch(self): request = _prepare_request( deleteBranchJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496758965') self.assertEqual(change['category'], 'ref-deleted') @defer.inlineCallbacks def testHookWithInvalidContentType(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: b'repo:push'}) request.received_headers[b'Content-Type'] = b'invalid/content' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b"Unknown content type: invalid/content") buildbot-3.4.0/master/buildbot/test/unit/www/test_hooks_bitbucketserver.py000066400000000000000000000750371413250514000272310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Mamba Team from io import BytesIO from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes from buildbot.www import change_hook from buildbot.www.hooks.bitbucketserver import _HEADER_EVENT _CT_JSON = b'application/json' bitbucketPRproperties = { 'pullrequesturl': 'http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21', 'bitbucket.id': '21', 'bitbucket.link': 'http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21', 'bitbucket.title': 'dot 1496311906', 'bitbucket.authorLogin': 'Buildbot', 'bitbucket.fromRef.branch.name': 'branch_1496411680', 'bitbucket.fromRef.branch.rawNode': 'a87e21f7433d8c16ac7be7413483fbb76c72a8ba', 'bitbucket.fromRef.commit.authorTimestamp': 0, 'bitbucket.fromRef.commit.date': None, 'bitbucket.fromRef.commit.hash': 'a87e21f7433d8c16ac7be7413483fbb76c72a8ba', 'bitbucket.fromRef.commit.message': None, 'bitbucket.fromRef.repository.fullName': 'CI/py-repo', 'bitbucket.fromRef.repository.links.self': [{ 'href': 'http://localhost:7990/projects/CI/repos/py-repo/browse' }], 'bitbucket.fromRef.repository.owner.displayName': 'CI', 'bitbucket.fromRef.repository.owner.username': 'CI', 'bitbucket.fromRef.repository.ownerName': 'CI', 'bitbucket.fromRef.repository.project.key': 'CI', 'bitbucket.fromRef.repository.project.name': 'Continuous Integration', 'bitbucket.fromRef.repository.public': False, 'bitbucket.fromRef.repository.scmId': 'git', 'bitbucket.fromRef.repository.slug': 'py-repo', 'bitbucket.toRef.branch.name': 'master', 'bitbucket.toRef.branch.rawNode': '7aebbb0089c40fce138a6d0b36d2281ea34f37f5', 'bitbucket.toRef.commit.authorTimestamp': 0, 'bitbucket.toRef.commit.date': None, 'bitbucket.toRef.commit.hash': '7aebbb0089c40fce138a6d0b36d2281ea34f37f5', 'bitbucket.toRef.commit.message': None, 'bitbucket.toRef.repository.fullName': 'CI/py-repo', 'bitbucket.toRef.repository.links.self': [{ 'href': 'http://localhost:7990/projects/CI/repos/py-repo/browse' }], 'bitbucket.toRef.repository.owner.displayName': 'CI', 'bitbucket.toRef.repository.owner.username': 'CI', 'bitbucket.toRef.repository.ownerName': 'CI', 'bitbucket.toRef.repository.project.key': 'CI', 'bitbucket.toRef.repository.project.name': 'Continuous Integration', 'bitbucket.toRef.repository.public': False, 'bitbucket.toRef.repository.scmId': 'git', 'bitbucket.toRef.repository.slug': 'py-repo' } pushJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": false, "new": { "type": "branch", "name": "branch_1496411680", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "old": { "type": "branch", "name": "branch_1496411680", "target": { "type": "commit", "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba" } } } ] } } """ pullRequestCreatedJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestUpdatedJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestRejectedJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestFulfilledJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" } } """ deleteTagJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "ownerName": "BUIL", "public": false, "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": true, "old": { "type": "tag", "name": "1.0.0", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "new": null } ] } } """ deleteBranchJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "ownerName": "CI", "public": false, "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": true, "old": { "type": "branch", "name": "branch_1496758965", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "new": null } ] } } """ newTagJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": true, "closed": false, "old": null, "new": { "type": "tag", "name": "1.0.0", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } } } ] } } """ def _prepare_request(payload, headers=None, change_dict=None): headers = headers or {} request = FakeRequest(change_dict) request.uri = b"/change_hook/bitbucketserver" request.method = b"POST" if isinstance(payload, str): payload = unicode2bytes(payload) request.content = BytesIO(payload) request.received_headers[b'Content-Type'] = _CT_JSON request.received_headers.update(headers) return request class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.change_hook = change_hook.ChangeHookResource( dialects={'bitbucketserver': { 'bitbucket_property_whitelist': ["bitbucket.*"], }}, master=fakeMasterForHooks(self) ) def assertDictSubset(self, expected_dict, response_dict): expected = {} for key in expected_dict.keys(): self.assertIn(key, set(response_dict.keys())) expected[key] = response_dict[key] self.assertDictEqual(expected_dict, expected) def _checkPush(self, change): self.assertEqual( change['repository'], 'http://localhost:7990/projects/CI/repos/py-repo/') self.assertEqual(change['author'], 'John Smith ') self.assertEqual(change['project'], 'Continuous Integration') self.assertEqual(change['revision'], '793d4754230023d85532f9a38dba3290f959beb4') self.assertEqual( change['comments'], 'Bitbucket Server commit ' '793d4754230023d85532f9a38dba3290f959beb4') self.assertEqual( change['revlink'], 'http://localhost:7990/projects/CI/repos/py-repo/commits/' '793d4754230023d85532f9a38dba3290f959beb4') @defer.inlineCallbacks def testHookWithChangeOnRefsChangedEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'repo:refs_changed'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithChangeOnPushEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithNonDictOption(self): self.change_hook.dialects = {'bitbucketserver': True} yield self.testHookWithChangeOnPushEvent() def _checkPullRequest(self, change): self.assertEqual( change['repository'], 'http://localhost:7990/projects/CI/repos/py-repo/') self.assertEqual(change['author'], 'John Smith ') self.assertEqual(change['project'], 'Continuous Integration') self.assertEqual(change['comments'], 'Bitbucket Server Pull Request #21') self.assertEqual(change['revlink'], 'http://localhost:7990/projects/' 'CI/repos/py-repo/pull-requests/21') self.assertEqual(change['revision'], 'a87e21f7433d8c16ac7be7413483fbb76c72a8ba') self.assertDictSubset(bitbucketPRproperties, change["properties"]) @defer.inlineCallbacks def testHookWithChangeOnPullRequestCreated(self): request = _prepare_request( pullRequestCreatedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:created'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/pull-requests/21/merge') self.assertEqual(change['category'], 'pull-created') @defer.inlineCallbacks def testHookWithChangeOnPullRequestUpdated(self): request = _prepare_request( pullRequestUpdatedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:updated'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/pull-requests/21/merge') self.assertEqual(change['category'], 'pull-updated') @defer.inlineCallbacks def testHookWithChangeOnPullRequestRejected(self): request = _prepare_request( pullRequestRejectedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:rejected'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'pull-rejected') @defer.inlineCallbacks def testHookWithChangeOnPullRequestFulfilled(self): request = _prepare_request( pullRequestFulfilledJsonPayload, headers={_HEADER_EVENT: 'pullrequest:fulfilled'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/heads/master') self.assertEqual(change['category'], 'pull-fulfilled') @defer.inlineCallbacks def _checkCodebase(self, event_type, expected_codebase): payloads = { 'repo:refs_changed': pushJsonPayload, 'pullrequest:updated': pullRequestUpdatedJsonPayload} request = _prepare_request( payloads[event_type], headers={_HEADER_EVENT: event_type}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(change['codebase'], expected_codebase) @defer.inlineCallbacks def testHookWithCodebaseValueOnPushEvent(self): self.change_hook.dialects = { 'bitbucketserver': {'codebase': 'super-codebase'}} yield self._checkCodebase('repo:refs_changed', 'super-codebase') @defer.inlineCallbacks def testHookWithCodebaseFunctionOnPushEvent(self): self.change_hook.dialects = { 'bitbucketserver': { 'codebase': lambda payload: payload['repository']['project']['key']}} yield self._checkCodebase('repo:refs_changed', 'CI') @defer.inlineCallbacks def testHookWithCodebaseValueOnPullEvent(self): self.change_hook.dialects = { 'bitbucketserver': {'codebase': 'super-codebase'}} yield self._checkCodebase('pullrequest:updated', 'super-codebase') @defer.inlineCallbacks def testHookWithCodebaseFunctionOnPullEvent(self): self.change_hook.dialects = { 'bitbucketserver': { 'codebase': lambda payload: payload['repository']['project']['key']}} yield self._checkCodebase('pullrequest:updated', 'CI') @defer.inlineCallbacks def testHookWithUnhandledEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'invented:event'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b"Unknown event: invented_event") @defer.inlineCallbacks def testHookWithChangeOnCreateTag(self): request = _prepare_request( newTagJsonPayload, headers={_HEADER_EVENT: 'repo:refs_changed'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/tags/1.0.0') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithChangeOnDeleteTag(self): request = _prepare_request( deleteTagJsonPayload, headers={_HEADER_EVENT: 'repo:refs_changed'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/tags/1.0.0') self.assertEqual(change['category'], 'ref-deleted') @defer.inlineCallbacks def testHookWithChangeOnDeleteBranch(self): request = _prepare_request( deleteBranchJsonPayload, headers={_HEADER_EVENT: 'repo:refs_changed'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496758965') self.assertEqual(change['category'], 'ref-deleted') @defer.inlineCallbacks def testHookWithInvalidContentType(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: b'repo:refs_changed'}) request.received_headers[b'Content-Type'] = b'invalid/content' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b"Unknown content type: invalid/content") buildbot-3.4.0/master/buildbot/test/unit/www/test_hooks_github.py000066400000000000000000001616561413250514000253130ustar00rootroot00000000000000# coding: utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hmac from copy import deepcopy from hashlib import sha1 from io import BytesIO from twisted.internet import defer from twisted.trial import unittest from buildbot.plugins import util from buildbot.secrets.manager import SecretManager from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes from buildbot.www.change_hook import ChangeHookResource from buildbot.www.hooks.github import _HEADER_EVENT from buildbot.www.hooks.github import _HEADER_SIGNATURE from buildbot.www.hooks.github import GitHubEventHandler # Sample GITHUB commit payload from http://help.github.com/post-receive-hooks/ # Added "modified" and "removed", and change email # Added "head_commit" # https://developer.github.com/v3/activity/events/types/#webhook-payload-example-26 gitJsonPayload = b""" { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ { "id": "41a212ee83ca127e3c8cf465891ab7216a705f59", "distinct": true, "url": "http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "okay i give in", "timestamp": "2008-02-15T14:57:17-08:00", "added": ["filepath.rb"] }, { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] } ], "head_commit": { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] }, "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/heads/master" } """ gitJsonPayloadCiSkipTemplate = """ { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ { "id": "41a212ee83ca127e3c8cf465891ab7216a705f59", "distinct": true, "url": "http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "okay i give in", "timestamp": "2008-02-15T14:57:17-08:00", "added": ["filepath.rb"] }, { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad %(skip)s", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] } ], "head_commit": { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad %(skip)s", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] }, "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/heads/master" } """ gitJsonPayloadTag = b""" { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ { "id": "41a212ee83ca127e3c8cf465891ab7216a705f59", "distinct": true, "url": "http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "okay i give in", "timestamp": "2008-02-15T14:57:17-08:00", "added": ["filepath.rb"] }, { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] } ], "head_commit": { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] }, "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/tags/v1.0.0" } """ gitJsonPayloadNonBranch = b""" { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ { "id": "41a212ee83ca127e3c8cf465891ab7216a705f59", "distinct": true, "url": "http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "okay i give in", "timestamp": "2008-02-15T14:57:17-08:00", "added": ["filepath.rb"] } ], "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/garbage/master" } """ gitJsonPayloadPullRequest = b""" { "action": "opened", "number": 50, "pull_request": { "url": "https://api.github.com/repos/defunkt/github/pulls/50", "html_url": "https://github.com/defunkt/github/pull/50", "number": 50, "state": "open", "title": "Update the README with new information", "user": { "login": "defunkt", "id": 42, "type": "User" }, "body": "This is a pretty simple change that we need to pull into master.", "created_at": "2014-10-10T00:09:50Z", "updated_at": "2014-10-10T00:09:50Z", "closed_at": null, "merged_at": null, "merge_commit_sha": "cd3ff078a350901f91f4c4036be74f91d0b0d5d6", "head": { "label": "defunkt:changes", "ref": "changes", "sha": "05c588ba8cd510ecbe112d020f215facb17817a7", "user": { "login": "defunkt", "id": 42, "type": "User" }, "repo": { "id": 43, "name": "github", "full_name": "defunkt/github", "owner": { "login": "defunkt", "id": 42, "type": "User" }, "html_url": "https://github.com/defunkt/github", "description": "", "url": "https://api.github.com/repos/defunkt/github", "created_at": "2014-05-20T22:39:43Z", "updated_at": "2014-07-25T16:37:51Z", "pushed_at": "2014-10-10T00:09:49Z", "git_url": "git://github.com/defunkt/github.git", "ssh_url": "git@github.com:defunkt/github.git", "clone_url": "https://github.com/defunkt/github.git", "default_branch": "master" } }, "base": { "label": "defunkt:master", "ref": "master", "sha": "69a8b72e2d3d955075d47f03d902929dcaf74034", "user": { "login": "defunkt", "id": 42, "type": "User" }, "repo": { "id": 43, "name": "github", "full_name": "defunkt/github", "owner": { "login": "defunkt", "id": 42, "type": "User" }, "html_url": "https://github.com/defunkt/github", "description": "", "url": "https://api.github.com/repos/defunkt/github", "created_at": "2014-05-20T22:39:43Z", "updated_at": "2014-07-25T16:37:51Z", "pushed_at": "2014-10-10T00:09:49Z", "git_url": "git://github.com/defunkt/github.git", "ssh_url": "git@github.com:defunkt/github.git", "clone_url": "https://github.com/defunkt/github.git", "default_branch": "master" } }, "_links": { "self": { "href": "https://api.github.com/repos/defunkt/github/pulls/50" }, "html": { "href": "https://github.com/defunkt/github/pull/50" }, "commits": { "href": "https://api.github.com/repos/defunkt/github/pulls/50/commits" } }, "commits": 1, "additions": 2, "deletions": 0, "changed_files": 1 }, "repository": { "id": 43, "name": "github", "full_name": "defunkt/github", "owner": { "login": "defunkt", "id": 42, "type": "User" }, "html_url": "https://github.com/defunkt/github", "description": "", "url": "https://api.github.com/repos/defunkt/github", "created_at": "2014-05-20T22:39:43Z", "updated_at": "2014-07-25T16:37:51Z", "pushed_at": "2014-10-10T00:09:49Z", "git_url": "git://github.com/defunkt/github.git", "ssh_url": "git@github.com:defunkt/github.git", "clone_url": "https://github.com/defunkt/github.git", "default_branch": "master" }, "sender": { "login": "defunkt", "id": 42, "type": "User" } } """ gitJsonPayloadCommit = { "sha": "de8251ff97ee194a289832576287d6f8ad74e3d0", "commit": { "author": { "name": "defunkt", "email": "fred@flinstone.org", "date": "2017-02-12T14:39:33Z" }, "committer": { "name": "defunkt", "email": "fred@flinstone.org", "date": "2017-02-12T14:51:05Z" }, "message": "black magic", "tree": { }, "url": "...", "comment_count": 0 }, "url": "...", "html_url": "...", "comments_url": "...", "author": {}, "committer": {}, "parents": [], "stats": {}, "files": [] } gitJsonPayloadFiles = [ { "filename": "README.md" } ] gitPRproperties = { 'pullrequesturl': 'https://github.com/defunkt/github/pull/50', 'github.head.sha': '05c588ba8cd510ecbe112d020f215facb17817a7', 'github.state': 'open', 'github.base.repo.full_name': 'defunkt/github', 'github.number': 50, 'github.base.ref': 'master', 'github.base.sha': '69a8b72e2d3d955075d47f03d902929dcaf74034', 'github.head.repo.full_name': 'defunkt/github', 'github.merged_at': None, 'github.head.ref': 'changes', 'github.closed_at': None, 'github.title': 'Update the README with new information', 'event': 'pull_request' } gitJsonPayloadEmpty = b""" { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ ], "head_commit": { }, "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/heads/master" } """ gitJsonPayloadCreateTag = b""" { "ref": "refs/tags/v0.9.15.post1", "before": "0000000000000000000000000000000000000000", "after": "ffe1e9affb2b5399369443194c02068032f9295e", "created": true, "deleted": false, "forced": false, "base_ref": null, "compare": "https://github.com/buildbot/buildbot/compare/v0.9.15.post1", "commits": [ ], "head_commit": { "id": "57df618a4a450410c1dee440c7827ee105f5a226", "tree_id": "f9768673dc968b5c8fcbb15f119ce237b50b3252", "distinct": true, "message": "...", "timestamp": "2018-01-07T16:30:52+01:00", "url": "https://github.com/buildbot/buildbot/commit/...", "author": { "name": "User", "email": "userid@example.com", "username": "userid" }, "committer": { "name": "GitHub", "email": "noreply@github.com", "username": "web-flow" }, "added": [ ], "removed": [ "master/buildbot/newsfragments/bit_length.bugfix", "master/buildbot/newsfragments/localworker_umask.bugfix", "master/buildbot/newsfragments/svn-utf8.bugfix" ], "modified": [ ".bbtravis.yml", "circle.yml", "master/docs/relnotes/index.rst" ] }, "repository": { "html_url": "https://github.com/buildbot/buildbot", "name": "buildbot", "full_name": "buildbot" }, "pusher": { "name": "userid", "email": "userid@example.com" }, "organization": { "login": "buildbot", "url": "https://api.github.com/orgs/buildbot", "description": "Continous integration and delivery framework" }, "sender": { "login": "userid", "gravatar_id": "", "type": "User", "site_admin": false }, "ref_name": "v0.9.15.post1", "distinct_commits": [ ] }""" gitJsonPayloadNotFound = b"""{"message":"Not Found"}""" _HEADER_CT = b'Content-Type' _CT_ENCODED = b'application/x-www-form-urlencoded' _CT_JSON = b'application/json' def _prepare_github_change_hook(testcase, **params): return ChangeHookResource(dialects={ 'github': params }, master=fakeMasterForHooks(testcase)) def _prepare_request(event, payload, _secret=None, headers=None): if headers is None: headers = dict() request = FakeRequest() request.uri = b"/change_hook/github" request.method = b"GET" request.received_headers = { _HEADER_EVENT: event } assert isinstance(payload, (bytes, list)), \ "payload can only be bytes or list, not {}".format(type(payload)) if isinstance(payload, bytes): request.content = BytesIO(payload) request.received_headers[_HEADER_CT] = _CT_JSON if _secret is not None: signature = hmac.new(unicode2bytes(_secret), msg=unicode2bytes(payload), digestmod=sha1) request.received_headers[_HEADER_SIGNATURE] = \ 'sha1={}'.format(signature.hexdigest()) else: request.args[b'payload'] = payload request.received_headers[_HEADER_CT] = _CT_ENCODED request.received_headers.update(headers) # print request.received_headers return request class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook( self, strict=False, github_property_whitelist=["github.*"]) self.master = self.changeHook.master fake_headers = {'User-Agent': 'Buildbot'} self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() def assertDictSubset(self, expected_dict, response_dict): expected = {} for key in expected_dict.keys(): self.assertIn(key, set(response_dict.keys())) expected[key] = response_dict[key] self.assertDictEqual(expected_dict, expected) @defer.inlineCallbacks def test_unknown_event(self): bad_event = b'whatever' self.request = _prepare_request(bad_event, gitJsonPayload) yield self.request.test_render(self.changeHook) expected = b'Unknown event: ' + bad_event self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_unknown_content_type(self): bad_content_type = b'application/x-useful' self.request = _prepare_request(b'push', gitJsonPayload, headers={ _HEADER_CT: bad_content_type }) yield self.request.test_render(self.changeHook) expected = b'Unknown content type: ' self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertIn(expected, self.request.written) @defer.inlineCallbacks def _check_ping(self, payload): self.request = _prepare_request(b'ping', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_ping_encoded(self): self._check_ping([b'{}']) def test_ping_json(self): self._check_ping(b'{}') @defer.inlineCallbacks def test_git_with_push_tag(self): self.request = _prepare_request(b'push', gitJsonPayloadTag) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["branch"], "v1.0.0") self.assertEqual(change["category"], "tag") @defer.inlineCallbacks def test_git_with_push_newtag(self): self.request = _prepare_request(b'push', gitJsonPayloadCreateTag) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["author"], "User ") self.assertEqual(change["branch"], "v0.9.15.post1") self.assertEqual(change["category"], "tag") # Test 'base' hook with attributes. We should get a json string # representing a Change object as a dictionary. All values show be set. @defer.inlineCallbacks def _check_git_with_change(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['files'], ['filepath.rb']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203116237) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["revision"], '41a212ee83ca127e3c8cf465891ab7216a705f59') self.assertEqual(change["comments"], "okay i give in") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "41a212ee83ca127e3c8cf465891ab7216a705f59") change = self.changeHook.master.data.updates.changesAdded[1] self.assertEqual(change['files'], ['modfile', 'removedFile']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203114994) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["src"], "git") self.assertEqual(change["revision"], 'de8251ff97ee194a289832576287d6f8ad74e3d0') self.assertEqual(change["comments"], "update pricing a tad") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "de8251ff97ee194a289832576287d6f8ad74e3d0") self.assertEqual(change["properties"]["event"], "push") def test_git_with_change_encoded(self): self._check_git_with_change([gitJsonPayload]) def test_git_with_change_json(self): self._check_git_with_change(gitJsonPayload) # Test that, even with commits not marked as distinct, the changes get # recorded each time we receive the payload. This is important because # without it, commits can get pushed to a non-scheduled branch, get # recorded and associated with that branch, and then later get pushed to a # scheduled branch and not trigger a build. # # For example, if a commit is pushed to a dev branch, it then gets recorded # as a change associated with that dev branch. If that change is later # pushed to master, we still need to trigger a build even though we've seen # the commit before. @defer.inlineCallbacks def testGitWithDistinctFalse(self): self.request = _prepare_request(b'push', [gitJsonPayload.replace(b'"distinct": true,', b'"distinct": false,')]) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['files'], ['filepath.rb']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203116237) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["revision"], '41a212ee83ca127e3c8cf465891ab7216a705f59') self.assertEqual(change["comments"], "okay i give in") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "41a212ee83ca127e3c8cf465891ab7216a705f59") self.assertEqual(change["properties"]["github_distinct"], False) change = self.changeHook.master.data.updates.changesAdded[1] self.assertEqual(change['files'], ['modfile', 'removedFile']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203114994) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["src"], "git") self.assertEqual(change["revision"], 'de8251ff97ee194a289832576287d6f8ad74e3d0') self.assertEqual(change["comments"], "update pricing a tad") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "de8251ff97ee194a289832576287d6f8ad74e3d0") @defer.inlineCallbacks def testGitWithNoJson(self): self.request = _prepare_request(b'push', b'') yield self.request.test_render(self.changeHook) expected = b"Expecting value: line 1 column 1 (char 0)" self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(400, expected) @defer.inlineCallbacks def _check_git_with_no_changes(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) expected = b"no change found" self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) def test_git_with_no_changes_encoded(self): self._check_git_with_no_changes([gitJsonPayloadEmpty]) def test_git_with_no_changes_json(self): self._check_git_with_no_changes(gitJsonPayloadEmpty) @defer.inlineCallbacks def _check_git_with_non_branch_changes(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) expected = b"no change found" self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) def test_git_with_non_branch_changes_encoded(self): self._check_git_with_non_branch_changes([gitJsonPayloadNonBranch]) def test_git_with_non_branch_changes_json(self): self._check_git_with_non_branch_changes(gitJsonPayloadNonBranch) @defer.inlineCallbacks def _check_git_with_pull(self, payload): self.request = _prepare_request('pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["repository"], "https://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1412899790) self.assertEqual(change["author"], "defunkt") self.assertEqual(change["revision"], '05c588ba8cd510ecbe112d020f215facb17817a7') self.assertEqual(change["comments"], "GitHub Pull Request #50 (1 commit)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") self.assertEqual(change["branch"], "refs/pull/50/merge") self.assertEqual(change['files'], []) self.assertEqual(change["revlink"], "https://github.com/defunkt/github/pull/50") self.assertEqual(change['properties']['basename'], "master") self.assertDictSubset(gitPRproperties, change["properties"]) def test_git_with_pull_encoded(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._check_git_with_pull([gitJsonPayloadPullRequest]) def test_git_with_pull_json(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._check_git_with_pull(gitJsonPayloadPullRequest) @defer.inlineCallbacks def _check_git_push_with_skip_message(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_git_push_with_skip_message(self): gitJsonPayloadCiSkips = [ unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[ci skip]'}), unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[skip ci]'}), unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[ ci skip ]'}), ] for payload in gitJsonPayloadCiSkips: self._check_git_push_with_skip_message(payload) class TestChangeHookConfiguredWithGitChangeCustomPullrequestRef( unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook( self, strict=False, github_property_whitelist=["github.*"], pullrequest_ref="head") self.master = self.changeHook.master fake_headers = {'User-Agent': 'Buildbot'} self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_git_pull_request_with_custom_ref(self): commit = deepcopy([gitJsonPayloadPullRequest]) commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadNotFound, code=404) self.request = _prepare_request('pull_request', commit) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["branch"], "refs/pull/50/head") class TestChangeHookConfiguredWithGitChangeCustomPullrequestRefWithAuth( unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() _token = '7e076f41-b73a-4045-a817' self.changeHook = _prepare_github_change_hook( self, strict=False, github_property_whitelist=["github.*"], pullrequest_ref="head", token=_token) self.master = self.changeHook.master fake_headers = { 'User-Agent': 'Buildbot', 'Authorization': 'token ' + _token, } self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_git_pull_request_with_custom_ref(self): commit = deepcopy([gitJsonPayloadPullRequest]) commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadCommit) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadFiles) self.request = _prepare_request('pull_request', commit) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["branch"], "refs/pull/50/head") class TestChangeHookRefWithAuth(unittest.TestCase, TestReactorMixin): secret_name = 'secretkey' secret_value = 'githubtoken' @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.changeHook = \ _prepare_github_change_hook(self, strict=False, github_property_whitelist=["github.*"], token=util.Secret(self.secret_name)) self.master = self.changeHook.master fake_headers = { 'User-Agent': 'Buildbot', 'Authorization': 'token ' + self.secret_value, } self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) fake_storage = FakeSecretStorage() secret_service = SecretManager() secret_service.services = [fake_storage] yield secret_service.setServiceParent(self.master) yield self.master.startService() fake_storage.reconfigService(secretdict={self.secret_name: self.secret_value}) @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_git_pull_request(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadCommit) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadFiles) self.request = _prepare_request('pull_request', gitJsonPayloadPullRequest) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["branch"], "refs/pull/50/merge") class TestChangeHookConfiguredWithAuthAndCustomSkips(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() _token = '7e076f41-b73a-4045-a817' self.changeHook = _prepare_github_change_hook( self, strict=False, skips=[r'\[ *bb *skip *\]'], token=_token) self.master = self.changeHook.master fake_headers = { 'User-Agent': 'Buildbot', 'Authorization': 'token ' + _token, } self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def _check_push_with_skip_message(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_push_with_skip_message(self): gitJsonPayloadCiSkips = [ unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[bb skip]'}), unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[ bb skip ]'}), ] for payload in gitJsonPayloadCiSkips: self._check_push_with_skip_message(payload) @defer.inlineCallbacks def _check_push_no_ci_skip(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) def test_push_no_ci_skip(self): # user overrode the skip pattern already, # so the default patterns should not work. payload = gitJsonPayloadCiSkipTemplate % {'skip': '[ci skip]'} payload = unicode2bytes(payload) self._check_push_no_ci_skip(payload) @defer.inlineCallbacks def _check_pull_request_with_skip_message(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_pull_request_with_skip_message(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' commit = deepcopy(gitJsonPayloadCommit) msgs = ( 'black magic [bb skip]', 'black magic [ bb skip ]', ) for msg in msgs: commit['commit']['message'] = msg self._http.expect('get', api_endpoint, content_json=commit) self._check_pull_request_with_skip_message( gitJsonPayloadPullRequest) @defer.inlineCallbacks def _check_pull_request_no_skip(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) def test_pull_request_no_skip(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadCommit) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadFiles) commit = deepcopy(gitJsonPayloadCommit) commit['commit']['message'] = 'black magic [skip bb]' # pattern not matched self._check_pull_request_no_skip(gitJsonPayloadPullRequest) class TestChangeHookConfiguredWithAuth(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() _token = '7e076f41-b73a-4045-a817' self.changeHook = _prepare_github_change_hook( self, strict=False, token=_token, github_property_whitelist=["github.*"]) self.master = self.changeHook.master fake_headers = { 'User-Agent': 'Buildbot', 'Authorization': 'token ' + _token, } self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() def assertDictSubset(self, expected_dict, response_dict): expected = {} for key in expected_dict.keys(): self.assertIn(key, set(response_dict.keys())) expected[key] = response_dict[key] self.assertDictEqual(expected_dict, expected) @defer.inlineCallbacks def _check_pull_request(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) def test_pull_request(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadCommit) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadFiles) self._check_pull_request(gitJsonPayloadPullRequest) @defer.inlineCallbacks def _check_git_with_pull(self, payload, valid_token=True): self.request = _prepare_request('pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["repository"], "https://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1412899790) self.assertEqual(change["author"], "defunkt") self.assertEqual(change["revision"], '05c588ba8cd510ecbe112d020f215facb17817a7') self.assertEqual(change["comments"], "GitHub Pull Request #50 (1 commit)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") self.assertEqual(change["branch"], "refs/pull/50/merge") if valid_token: self.assertEqual(change['files'], ['README.md']) else: self.assertEqual(change['files'], []) self.assertEqual(change["revlink"], "https://github.com/defunkt/github/pull/50") self.assertEqual(change['properties']['basename'], "master") self.assertDictSubset(gitPRproperties, change["properties"]) def test_git_with_pull_encoded(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadCommit) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadFiles) self._check_git_with_pull([gitJsonPayloadPullRequest]) def test_git_with_pull_json(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadCommit) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadFiles) self._check_git_with_pull(gitJsonPayloadPullRequest) def test_git_with_pull_encoded_and_bad_token(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._check_git_with_pull([gitJsonPayloadPullRequest], valid_token=False) def test_git_with_pull_json_and_bad_token(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._check_git_with_pull(gitJsonPayloadPullRequest, valid_token=False) @defer.inlineCallbacks def _check_git_pull_request_with_skip_message(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_git_pull_request_with_skip_message(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' commit = deepcopy(gitJsonPayloadCommit) msgs = ( 'black magic [ci skip]', 'black magic [skip ci]', 'black magic [ ci skip ]', ) for msg in msgs: commit['commit']['message'] = msg self._http.expect('get', api_endpoint, content_json=commit) self._check_git_pull_request_with_skip_message( gitJsonPayloadPullRequest) class TestChangeHookConfiguredWithCustomApiRoot(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook( self, strict=False, github_api_endpoint='https://black.magic.io') self.master = self.changeHook.master fake_headers = {'User-Agent': 'Buildbot'} self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'https://black.magic.io', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def _check_pull_request(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) def test_pull_request(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadNotFound, code=404) self._check_pull_request(gitJsonPayloadPullRequest) class TestChangeHookConfiguredWithCustomApiRootWithAuth(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() _token = '7e076f41-b73a-4045-a817' self.changeHook = _prepare_github_change_hook( self, strict=False, github_api_endpoint='https://black.magic.io', token=_token) self.master = self.changeHook.master fake_headers = { 'User-Agent': 'Buildbot', 'Authorization': 'token ' + _token, } self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, self, 'https://black.magic.io', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def _check_pull_request(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) def test_pull_request(self): commit_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' files_endpoint = '/repos/defunkt/github/pulls/50/files' self._http.expect('get', commit_endpoint, content_json=gitJsonPayloadCommit) self._http.expect('get', files_endpoint, content_json=gitJsonPayloadFiles) self._check_pull_request(gitJsonPayloadPullRequest) class TestChangeHookConfiguredWithStrict(unittest.TestCase, TestReactorMixin): _SECRET = 'somethingreallysecret' def setUp(self): self.setUpTestReactor() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"secret_key": self._SECRET}) secretService = SecretManager() secretService.services = [fakeStorageService] self.changeHook = _prepare_github_change_hook(self, strict=True, secret=util.Secret("secret_key")) self.changeHook.master.addService(secretService) @defer.inlineCallbacks def test_signature_ok(self): self.request = _prepare_request(b'push', gitJsonPayload, _secret=self._SECRET) yield self.request.test_render(self.changeHook) # Can it somehow be merged w/ the same code above in a different class? self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['files'], ['filepath.rb']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203116237) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["revision"], '41a212ee83ca127e3c8cf465891ab7216a705f59') self.assertEqual(change["comments"], "okay i give in") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "41a212ee83ca127e3c8cf465891ab7216a705f59") change = self.changeHook.master.data.updates.changesAdded[1] self.assertEqual(change['files'], ['modfile', 'removedFile']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203114994) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["src"], "git") self.assertEqual(change["revision"], 'de8251ff97ee194a289832576287d6f8ad74e3d0') self.assertEqual(change["comments"], "update pricing a tad") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "de8251ff97ee194a289832576287d6f8ad74e3d0") @defer.inlineCallbacks def test_unknown_hash(self): bad_hash_type = b'blah' self.request = _prepare_request(b'push', gitJsonPayload, headers={ _HEADER_SIGNATURE: bad_hash_type + b'=doesnotmatter' }) yield self.request.test_render(self.changeHook) expected = b'Unknown hash type: ' + bad_hash_type self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_signature_nok(self): bad_signature = b'sha1=wrongstuff' self.request = _prepare_request(b'push', gitJsonPayload, headers={ _HEADER_SIGNATURE: bad_signature }) yield self.request.test_render(self.changeHook) expected = b'Hash mismatch' self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_missing_secret(self): # override the value assigned in setUp self.changeHook = _prepare_github_change_hook(self, strict=True) self.request = _prepare_request(b'push', gitJsonPayload) yield self.request.test_render(self.changeHook) expected = b'Strict mode is requested while no secret is provided' self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_wrong_signature_format(self): bad_signature = b'hash=value=something' self.request = _prepare_request(b'push', gitJsonPayload, headers={ _HEADER_SIGNATURE: bad_signature }) yield self.request.test_render(self.changeHook) expected = b'Wrong signature format: ' + bad_signature self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_signature_missing(self): self.request = _prepare_request(b'push', gitJsonPayload) yield self.request.test_render(self.changeHook) expected = b'Request has no required signature' self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) class TestChangeHookConfiguredWithCodebaseValue(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook(self, codebase='foobar') @defer.inlineCallbacks def _check_git_with_change(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['codebase'], 'foobar') def test_git_with_change_encoded(self): return self._check_git_with_change([gitJsonPayload]) def test_git_with_change_json(self): return self._check_git_with_change(gitJsonPayload) def _codebase_function(payload): return 'foobar-' + payload['repository']['name'] class TestChangeHookConfiguredWithCodebaseFunction(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook( self, codebase=_codebase_function) @defer.inlineCallbacks def _check_git_with_change(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['codebase'], 'foobar-github') def test_git_with_change_encoded(self): return self._check_git_with_change([gitJsonPayload]) def test_git_with_change_json(self): return self._check_git_with_change(gitJsonPayload) class TestChangeHookConfiguredWithCustomEventHandler(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() class CustomGitHubEventHandler(GitHubEventHandler): def handle_ping(self, _, __): self.master.hook_called = True return [], None self.changeHook = _prepare_github_change_hook( self, **{'class': CustomGitHubEventHandler}) @defer.inlineCallbacks def test_ping(self): self.request = _prepare_request(b'ping', b'{}') yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertTrue(self.changeHook.master.hook_called) buildbot-3.4.0/master/buildbot/test/unit/www/test_hooks_gitlab.py000066400000000000000000001276661413250514000252760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.plugins import util from buildbot.secrets.manager import SecretManager from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.www import change_hook from buildbot.www.hooks.gitlab import _HEADER_EVENT from buildbot.www.hooks.gitlab import _HEADER_GITLAB_TOKEN # Sample GITLAB commit payload from https://docs.gitlab.com/ce/user/project/integrations/webhooks.html # noqa pylint: disable=line-too-long # Added "modified" and "removed", and change email gitJsonPayload = b""" { "before": "95790bf891e76fee5e1747ab589903a6a1f80f22", "after": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "ref": "refs/heads/master", "user_id": 4, "user_name": "John Smith", "repository": { "name": "Diaspora", "url": "git@localhost:diaspora.git", "description": "", "homepage": "http://localhost/diaspora" }, "commits": [ { "id": "b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "message": "Update Catalan translation to e38cb41.", "timestamp": "2011-12-12T14:27:31+02:00", "url": "http://localhost/diaspora/commits/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "author": { "name": "Jordi Mallach", "email": "jordi@softcatala.org" } }, { "id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "message": "fixed readme", "timestamp": "2012-01-03T23:36:29+02:00", "url": "http://localhost/diaspora/commits/da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "author": { "name": "GitLab dev user", "email": "gitlabdev@dv6700.(none)" } } ], "total_commits_count": 2 } """ gitJsonPayloadTag = b""" { "object_kind": "tag_push", "before": "0000000000000000000000000000000000000000", "after": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", "ref": "refs/tags/v1.0.0", "checkout_sha": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", "user_id": 1, "user_name": "John Smith", "repository":{ "name": "Example", "url": "git@localhost:diaspora.git", "description": "", "homepage": "http://example.com/jsmith/example", "git_http_url":"http://example.com/jsmith/example.git", "git_ssh_url":"git@example.com:jsmith/example.git", "visibility_level":0 }, "commits": [ { "id": "b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "message": "Update Catalan translation to e38cb41.", "timestamp": "2011-12-12T14:27:31+02:00", "url": "http://localhost/diaspora/commits/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "author": { "name": "Jordi Mallach", "email": "jordi@softcatala.org" } }, { "id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "message": "fixed readme", "timestamp": "2012-01-03T23:36:29+02:00", "url": "http://localhost/diaspora/commits/da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "author": { "name": "GitLab dev user", "email": "gitlabdev@dv6700.(none)" } } ], "total_commits_count": 2 } """ # == Merge requests from a different branch of the same project # GITLAB commit payload from an actual version 10.7.1-ee gitlab instance # chronicling the lives and times of a trivial MR through the operations # open, edit description, add commit, close, and reopen, in that order. # (Tidied with json_pp --json_opt=canonical,pretty and an editor.) # FIXME: only show diffs here to keep file smaller and increase clarity gitJsonPayloadMR_open = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "open", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "This to both gitlab gateways!", "head_pipeline_id" : 29931, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "92268bc781b24f0a61b907da062950e9e5252a69", "message" : "Remove the dummy line again", "timestamp" : "2018-05-14T07:54:04-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/92268bc781b24f0a61b907da062950e9e5252a69" }, "last_edited_at" : null, "last_edited_by_id" : null, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "unchecked", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 07:45:37 -0700", "updated_by_id" : null, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ # noqa pylint: disable=line-too-long gitJsonPayloadMR_editdesc = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "update", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "Edited description.", "head_pipeline_id" : 29931, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "92268bc781b24f0a61b907da062950e9e5252a69", "message" : "Remove the dummy line again", "timestamp" : "2018-05-14T07:54:04-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/92268bc781b24f0a61b907da062950e9e5252a69" }, "last_edited_at" : "2018-05-15 07:49:55 -0700", "last_edited_by_id" : 15, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "can_be_merged", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 07:49:55 -0700", "updated_by_id" : 15, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ # noqa pylint: disable=line-too-long gitJsonPayloadMR_addcommit = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "update", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "Edited description.", "head_pipeline_id" : 29931, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "cee8b01dcbaeed89563c2822f7c59a93c813eb6b", "message" : "debian/compat: update to 9", "timestamp" : "2018-05-15T07:51:11-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/cee8b01dcbaeed89563c2822f7c59a93c813eb6b" }, "last_edited_at" : "2018-05-15 14:49:55 UTC", "last_edited_by_id" : 15, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "unchecked", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "oldrev" : "92268bc781b24f0a61b907da062950e9e5252a69", "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 14:51:27 UTC", "updated_by_id" : 15, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ # noqa pylint: disable=line-too-long gitJsonPayloadMR_close = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "close", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "Edited description.", "head_pipeline_id" : 29958, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "cee8b01dcbaeed89563c2822f7c59a93c813eb6b", "message" : "debian/compat: update to 9", "timestamp" : "2018-05-15T07:51:11-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/cee8b01dcbaeed89563c2822f7c59a93c813eb6b" }, "last_edited_at" : "2018-05-15 07:49:55 -0700", "last_edited_by_id" : 15, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "can_be_merged", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "closed", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 07:52:01 -0700", "updated_by_id" : 15, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ # noqa pylint: disable=line-too-long gitJsonPayloadMR_reopen = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "reopen", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "Edited description.", "head_pipeline_id" : 29958, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "cee8b01dcbaeed89563c2822f7c59a93c813eb6b", "message" : "debian/compat: update to 9", "timestamp" : "2018-05-15T07:51:11-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/cee8b01dcbaeed89563c2822f7c59a93c813eb6b" }, "last_edited_at" : "2018-05-15 07:49:55 -0700", "last_edited_by_id" : 15, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "can_be_merged", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 07:53:27 -0700", "updated_by_id" : 15, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ # noqa pylint: disable=line-too-long # == Merge requests from a fork of the project # (Captured more accurately than above test data) gitJsonPayloadMR_open_forked = b""" { "changes" : { "total_time_spent" : { "current" : 0, "previous" : null } }, "event_type" : "merge_request", "labels" : [], "object_attributes" : { "action" : "open", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-19 06:57:12 -0700", "description" : "This is a merge request from a fork of the project.", "head_pipeline_id" : null, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10914, "iid" : 7, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "e46ee239f3d6d41ade4d1e610669dd71ed86ec80", "message" : "Add note to README", "timestamp" : "2018-05-19T06:35:26-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/e46ee239f3d6d41ade4d1e610669dd71ed86ec80" }, "last_edited_at" : null, "last_edited_by_id" : null, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : "0" }, "merge_status" : "unchecked", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/build/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:build/awesome_project.git", "homepage" : "https://gitlab.example.com/build/awesome_project", "http_url" : "https://gitlab.example.com/build/awesome_project.git", "id" : 2337, "name" : "awesome_project", "namespace" : "build", "path_with_namespace" : "build/awesome_project", "ssh_url" : "git@gitlab.example.com:build/awesome_project.git", "url" : "git@gitlab.example.com:build/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/build/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 2337, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Add note to README", "total_time_spent" : 0, "updated_at" : "2018-05-19 06:57:12 -0700", "updated_by_id" : null, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/7", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "repository" : { "description" : "Trivial project for testing build machinery quickly", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "name" : "awesome_project", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ # noqa pylint: disable=line-too-long def FakeRequestMR(content): request = FakeRequest(content=content) request.uri = b"/change_hook/gitlab" request.args = {b'codebase': [b'MyCodebase']} request.received_headers[_HEADER_EVENT] = b"Merge Request Hook" request.method = b"POST" return request class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.changeHook = change_hook.ChangeHookResource( dialects={'gitlab': True}, master=fakeMasterForHooks(self)) def check_changes_tag_event(self, r, project='', codebase=None): self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["repository"], "git@localhost:diaspora.git") self.assertEqual( change["when_timestamp"], 1323692851 ) self.assertEqual(change["branch"], "v1.0.0") def check_changes_mr_event(self, r, project='awesome_project', codebase=None, timestamp=1526309644, source_repo=None): self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["repository"], "https://gitlab.example.com/mmusterman/awesome_project.git") if source_repo is None: source_repo = "https://gitlab.example.com/mmusterman/awesome_project.git" self.assertEqual(change['properties']["source_repository"], source_repo) self.assertEqual(change['properties']["target_repository"], "https://gitlab.example.com/mmusterman/awesome_project.git") self.assertEqual( change["when_timestamp"], timestamp ) self.assertEqual(change["branch"], "master") self.assertEqual(change['properties']["source_branch"], 'ms-viewport') self.assertEqual(change['properties']["target_branch"], 'master') self.assertEqual(change["category"], "merge_request") self.assertEqual(change.get("project"), project) def check_changes_push_event(self, r, project='diaspora', codebase=None): self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["repository"], "git@localhost:diaspora.git") self.assertEqual( change["when_timestamp"], 1323692851 ) self.assertEqual( change["author"], "Jordi Mallach ") self.assertEqual( change["revision"], 'b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327') self.assertEqual( change["comments"], "Update Catalan translation to e38cb41.") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://localhost/diaspora/commits/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327") change = self.changeHook.master.data.updates.changesAdded[1] self.assertEqual(change["repository"], "git@localhost:diaspora.git") self.assertEqual( change["when_timestamp"], 1325626589 ) self.assertEqual( change["author"], "GitLab dev user ") self.assertEqual(change["src"], "git") self.assertEqual( change["revision"], 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7') self.assertEqual(change["comments"], "fixed readme") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://localhost/diaspora/commits/da1560886d4f094c3e6c9ef40349f7d38b5d27d7") # FIXME: should we convert project name to canonical case? # Or should change filter be case insensitive? self.assertEqual(change.get("project").lower(), project.lower()) self.assertEqual(change.get("codebase"), codebase) # Test 'base' hook with attributes. We should get a json string representing # a Change object as a dictionary. All values show be set. @defer.inlineCallbacks def testGitWithChange(self): self.request = FakeRequest(content=gitJsonPayload) self.request.uri = b"/change_hook/gitlab" self.request.method = b"POST" self.request.received_headers[_HEADER_EVENT] = b"Push Hook" res = yield self.request.test_render(self.changeHook) self.check_changes_push_event(res) @defer.inlineCallbacks def testGitWithChange_WithProjectToo(self): self.request = FakeRequest(content=gitJsonPayload) self.request.uri = b"/change_hook/gitlab" self.request.args = {b'project': [b'Diaspora']} self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.method = b"POST" res = yield self.request.test_render(self.changeHook) self.check_changes_push_event(res, project="Diaspora") @defer.inlineCallbacks def testGitWithChange_WithCodebaseToo(self): self.request = FakeRequest(content=gitJsonPayload) self.request.uri = b"/change_hook/gitlab" self.request.args = {b'codebase': [b'MyCodebase']} self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.method = b"POST" res = yield self.request.test_render(self.changeHook) self.check_changes_push_event(res, codebase="MyCodebase") @defer.inlineCallbacks def testGitWithChange_WithPushTag(self): self.request = FakeRequest(content=gitJsonPayloadTag) self.request.uri = b"/change_hook/gitlab" self.request.args = {b'codebase': [b'MyCodebase']} self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.method = b"POST" res = yield self.request.test_render(self.changeHook) self.check_changes_tag_event(res, codebase="MyCodebase") @defer.inlineCallbacks def testGitWithNoJson(self): self.request = FakeRequest() self.request.uri = b"/change_hook/gitlab" self.request.method = b"POST" self.request.received_headers[_HEADER_EVENT] = b"Push Hook" yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertIn(b"Error loading JSON:", self.request.written) self.request.setResponseCode.assert_called_with(400, mock.ANY) @defer.inlineCallbacks def test_event_property(self): self.request = FakeRequest(content=gitJsonPayload) self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.uri = b"/change_hook/gitlab" self.request.method = b"POST" yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["properties"]["event"], "Push Hook") self.assertEqual(change["category"], "Push Hook") @defer.inlineCallbacks def testGitWithChange_WithMR_open(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_open) res = yield self.request.test_render(self.changeHook) self.check_changes_mr_event(res, codebase="MyCodebase") change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["category"], "merge_request") @defer.inlineCallbacks def testGitWithChange_WithMR_editdesc(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_editdesc) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def testGitWithChange_WithMR_addcommit(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_addcommit) res = yield self.request.test_render(self.changeHook) self.check_changes_mr_event(res, codebase="MyCodebase", timestamp=1526395871) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["category"], "merge_request") @defer.inlineCallbacks def testGitWithChange_WithMR_close(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_close) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def testGitWithChange_WithMR_reopen(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_reopen) res = yield self.request.test_render(self.changeHook) self.check_changes_mr_event(res, codebase="MyCodebase", timestamp=1526395871) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["category"], "merge_request") @defer.inlineCallbacks def testGitWithChange_WithMR_open_forked(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_open_forked) res = yield self.request.test_render(self.changeHook) self.check_changes_mr_event( res, codebase="MyCodebase", timestamp=1526736926, source_repo="https://gitlab.example.com/build/awesome_project.git") change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["category"], "merge_request") class TestChangeHookConfiguredWithSecret(unittest.TestCase, TestReactorMixin): _SECRET = 'thesecret' def setUp(self): self.setUpTestReactor() self.master = fakeMasterForHooks(self) fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"secret_key": self._SECRET}) self.secretService = SecretManager() self.secretService.services = [fakeStorageService] self.master.addService(self.secretService) self.changeHook = change_hook.ChangeHookResource( dialects={'gitlab': {'secret': util.Secret("secret_key")}}, master=self.master) @defer.inlineCallbacks def test_missing_secret(self): self.request = FakeRequest(content=gitJsonPayloadTag) self.request.uri = b"/change_hook/gitlab" self.request.args = {b'codebase': [b'MyCodebase']} self.request.method = b"POST" self.request.received_headers[_HEADER_EVENT] = b"Push Hook" yield self.request.test_render(self.changeHook) expected = b'Invalid secret' self.assertEqual(self.request.written, expected) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_valid_secret(self): self.request = FakeRequest(content=gitJsonPayload) self.request.received_headers[_HEADER_GITLAB_TOKEN] = self._SECRET self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.uri = b"/change_hook/gitlab" self.request.method = b"POST" yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) buildbot-3.4.0/master/buildbot/test/unit/www/test_hooks_gitorious.py000066400000000000000000000103701413250514000260370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.www import change_hook # Sample Gitorious commit payload # source: http://gitorious.org/gitorious/pages/WebHooks gitJsonPayload = b""" { "after": "df5744f7bc8663b39717f87742dc94f52ccbf4dd", "before": "b4ca2d38e756695133cbd0e03d078804e1dc6610", "commits": [ { "author": { "email": "jason@nospam.org", "name": "jason" }, "committed_at": "2012-01-10T11:02:27-07:00", "id": "df5744f7bc8663b39717f87742dc94f52ccbf4dd", "message": "added a place to put the docstring for Book", "timestamp": "2012-01-10T11:02:27-07:00", "url": "http://gitorious.org/q/mainline/commit/df5744f7bc8663b39717f87742dc94f52ccbf4dd" } ], "project": { "description": "a webapp to organize your ebook collectsion.", "name": "q" }, "pushed_at": "2012-01-10T11:09:25-07:00", "pushed_by": "jason", "ref": "new_look", "repository": { "clones": 4, "description": "", "name": "mainline", "owner": { "name": "jason" }, "url": "http://gitorious.org/q/mainline" } } """ class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() dialects = {'gitorious': True} self.changeHook = change_hook.ChangeHookResource( dialects=dialects, master=fakeMasterForHooks(self)) # Test 'base' hook with attributes. We should get a json string # representing a Change object as a dictionary. All values show be set. @defer.inlineCallbacks def testGitWithChange(self): changeDict = {b"payload": [gitJsonPayload]} self.request = FakeRequest(changeDict) self.request.uri = b"/change_hook/gitorious" self.request.method = b"POST" yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] # Gitorious doesn't send changed files self.assertEqual(change['files'], []) self.assertEqual(change["repository"], "http://gitorious.org/q/mainline") self.assertEqual( change["when_timestamp"], 1326218547 ) self.assertEqual(change["author"], "jason ") self.assertEqual(change["revision"], 'df5744f7bc8663b39717f87742dc94f52ccbf4dd') self.assertEqual(change["comments"], "added a place to put the docstring for Book") self.assertEqual(change["branch"], "new_look") revlink = ("http://gitorious.org/q/mainline/commit/" "df5744f7bc8663b39717f87742dc94f52ccbf4dd") self.assertEqual(change["revlink"], revlink) @defer.inlineCallbacks def testGitWithNoJson(self): self.request = FakeRequest() self.request.uri = b"/change_hook/gitorious" self.request.method = b"GET" yield self.request.test_render(self.changeHook) expected = b"Error processing changes." self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(500, expected) self.assertEqual(len(self.flushLoggedErrors()), 1) buildbot-3.4.0/master/buildbot/test/unit/www/test_hooks_poller.py000066400000000000000000000140061413250514000253100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import util from buildbot.changes import base from buildbot.changes.manager import ChangeManager from buildbot.test.fake import fakemaster from buildbot.test.fake.web import FakeRequest from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.warnings import assertProducesWarnings from buildbot.warnings import DeprecatedApiWarning from buildbot.www import change_hook class TestPollingChangeHook(TestReactorMixin, unittest.TestCase): # New sources should derive from ReconfigurablePollingChangeSource, # but older sources will be using PollingChangeSource. # Both must work. class Subclass(base.ReconfigurablePollingChangeSource): pollInterval = None called = False def poll(self): self.called = True class OldstyleSubclass(base.PollingChangeSource): pollInterval = None called = False def poll(self): self.called = True def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def setUpRequest(self, args, options=True, activate=True, poller_cls=Subclass): self.request = FakeRequest(args=args) self.request.uri = b"/change_hook/poller" self.request.method = b"GET" www = self.request.site.master.www self.master = master = self.request.site.master = \ fakemaster.make_master(self, wantData=True) master.www = www yield self.master.startService() self.changeHook = change_hook.ChangeHookResource( dialects={'poller': options}, master=master) master.change_svc = ChangeManager() yield master.change_svc.setServiceParent(master) self.changesrc = poller_cls(21, name=b'example') yield self.changesrc.setServiceParent(master.change_svc) self.otherpoller = poller_cls(22, name=b"otherpoller") yield self.otherpoller.setServiceParent(master.change_svc) anotherchangesrc = base.ChangeSource(name=b'notapoller') anotherchangesrc.setName("notapoller") yield anotherchangesrc.setServiceParent(master.change_svc) yield self.request.test_render(self.changeHook) yield util.asyncSleep(0.1) def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def test_no_args(self): yield self.setUpRequest({}) self.assertEqual(self.request.written, b"no change found") self.assertEqual(self.changesrc.called, True) self.assertEqual(self.otherpoller.called, True) @defer.inlineCallbacks def test_no_poller(self): yield self.setUpRequest({b"poller": [b"nosuchpoller"]}) expected = b"Could not find pollers: nosuchpoller" self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(400, expected) self.assertEqual(self.changesrc.called, False) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_invalid_poller(self): yield self.setUpRequest({b"poller": [b"notapoller"]}) expected = b"Could not find pollers: notapoller" self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(400, expected) self.assertEqual(self.changesrc.called, False) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_trigger_poll(self): yield self.setUpRequest({b"poller": [b"example"]}) self.assertEqual(self.request.written, b"no change found") self.assertEqual(self.changesrc.called, True) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_allowlist_deny(self): yield self.setUpRequest({b"poller": [b"otherpoller"]}, options={b"allowed": [b"example"]}) expected = b"Could not find pollers: otherpoller" self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(400, expected) self.assertEqual(self.changesrc.called, False) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_allowlist_allow(self): yield self.setUpRequest({b"poller": [b"example"]}, options={b"allowed": [b"example"]}) self.assertEqual(self.request.written, b"no change found") self.assertEqual(self.changesrc.called, True) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_allowlist_all(self): yield self.setUpRequest({}, options={b"allowed": [b"example"]}) self.assertEqual(self.request.written, b"no change found") self.assertEqual(self.changesrc.called, True) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_trigger_old_poller(self): with assertProducesWarnings(DeprecatedApiWarning, num_warnings=2, message_pattern="use ReconfigurablePollingChangeSource"): yield self.setUpRequest({b"poller": [b"example"]}, poller_cls=self.OldstyleSubclass) self.assertEqual(self.request.written, b"no change found") self.assertEqual(self.changesrc.called, True) self.assertEqual(self.otherpoller.called, False) buildbot-3.4.0/master/buildbot/test/unit/www/test_ldapuserinfo.py000066400000000000000000000332321413250514000253050ustar00rootroot00000000000000# coding: utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import types import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.www import WwwTestMixin from buildbot.www import avatar from buildbot.www import ldapuserinfo try: import ldap3 except ImportError: ldap3 = None def get_config_parameter(p): params = {'DEFAULT_SERVER_ENCODING': 'utf-8'} return params[p] fake_ldap = types.ModuleType('ldap3') fake_ldap.SEARCH_SCOPE_WHOLE_SUBTREE = 2 fake_ldap.get_config_parameter = get_config_parameter class FakeLdap: def __init__(self): def search(base, filterstr='f', scope=None, attributes=None): pass self.search = mock.Mock(spec=search) class CommonTestCase(unittest.TestCase): if not ldap3: skip = 'ldap3 is required for LdapUserInfo tests' """Common fixture for all ldapuserinfo tests we completely fake the ldap3 module, so no need to require it to run the unit tests """ def setUp(self): self.ldap = FakeLdap() self.makeUserInfoProvider() self.userInfoProvider.connectLdap = lambda: self.ldap def search(base, filterstr='f', attributes=None): pass self.userInfoProvider.search = mock.Mock(spec=search) def makeUserInfoProvider(self): """To be implemented by subclasses""" raise NotImplementedError def _makeSearchSideEffect(self, attribute_type, ret): ret = [[{'dn': i[0], attribute_type: i[1]} for i in r] for r in ret] self.userInfoProvider.search.side_effect = ret def makeSearchSideEffect(self, ret): return self._makeSearchSideEffect('attributes', ret) def makeRawSearchSideEffect(self, ret): return self._makeSearchSideEffect('raw_attributes', ret) def assertSearchCalledWith(self, exp): got = self.userInfoProvider.search.call_args_list self.assertEqual(len(exp), len(got)) for i, val in enumerate(exp): self.assertEqual(exp[i][0][0], got[i][0][1]) self.assertEqual(exp[i][0][1], got[i][0][2]) self.assertEqual(exp[i][0][2], got[i][1]['attributes']) class LdapUserInfo(CommonTestCase): def makeUserInfoProvider(self): self.userInfoProvider = ldapuserinfo.LdapUserInfo( uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", groupBase="groupbase", accountPattern="accpattern", groupMemberPattern="groupMemberPattern", accountFullName="accountFullName", accountEmail="accountEmail", groupName="groupName", avatarPattern="avatar", avatarData="picture", accountExtraFields=["myfield"]) @defer.inlineCallbacks def test_updateUserInfoNoResults(self): self.makeSearchSideEffect([[], [], []]) try: yield self.userInfoProvider.getUserInfo("me") except KeyError as e: self.assertRegex( repr(e), r"KeyError\('ldap search \"accpattern\" returned 0 results',?\)") else: self.fail("should have raised a key error") @defer.inlineCallbacks def test_updateUserInfoNoGroups(self): self.makeSearchSideEffect([[( "cn", {"accountFullName": "me too", "accountEmail": "mee@too"})], [], []]) res = yield self.userInfoProvider.getUserInfo("me") self.assertSearchCalledWith([ (('accbase', 'accpattern', ['accountEmail', 'accountFullName', 'myfield']), {}), (('groupbase', 'groupMemberPattern', ['groupName']), {}), ]) self.assertEqual(res, {'email': 'mee@too', 'full_name': 'me too', 'groups': [], 'username': 'me'}) @defer.inlineCallbacks def test_updateUserInfoGroups(self): self.makeSearchSideEffect([[("cn", {"accountFullName": "me too", "accountEmail": "mee@too"})], [("cn", {"groupName": ["group"]}), ("cn", {"groupName": ["group2"]}) ], []]) res = yield self.userInfoProvider.getUserInfo("me") self.assertEqual(res, {'email': 'mee@too', 'full_name': 'me too', 'groups': ["group", "group2"], 'username': 'me'}) @defer.inlineCallbacks def test_updateUserInfoGroupsUnicodeDn(self): # In case of non Ascii DN, ldap3 lib returns an UTF-8 str dn = "cn=Sébastien,dc=example,dc=org" # If groupMemberPattern is an str, and dn is not decoded, # the resulting filter will be an str, leading to UnicodeDecodeError # in ldap3.protocol.convert.validate_assertion_value() # So we use an unicode pattern: self.userInfoProvider.groupMemberPattern = '(member=%(dn)s)' self.makeSearchSideEffect([[(dn, {"accountFullName": "me too", "accountEmail": "mee@too"})], [("cn", {"groupName": ["group"]}), ("cn", {"groupName": ["group2"]}) ], []]) res = yield self.userInfoProvider.getUserInfo("me") self.assertEqual(res, {'email': 'mee@too', 'full_name': 'me too', 'groups': ["group", "group2"], 'username': 'me'}) class LdapAvatar(CommonTestCase, TestReactorMixin, WwwTestMixin): @defer.inlineCallbacks def setUp(self): CommonTestCase.setUp(self) self.setUpTestReactor() master = self.make_master( url='http://a/b/', avatar_methods=[self.userInfoProvider]) self.rsrc = avatar.AvatarResource(master) self.rsrc.reconfigResource(master.config) yield self.master.startService() def makeUserInfoProvider(self): self.userInfoProvider = ldapuserinfo.LdapUserInfo( uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", groupBase="groupbase", accountPattern="accpattern=%(username)s", groupMemberPattern="groupMemberPattern", accountFullName="accountFullName", accountEmail="accountEmail", groupName="groupName", avatarPattern="avatar=%(email)s", avatarData="picture", accountExtraFields=["myfield"]) @defer.inlineCallbacks def _getUserAvatar(self, mimeTypeAndData): (mimeType, data) = mimeTypeAndData self.makeRawSearchSideEffect([ [("cn", {"picture": [data]})]]) res = yield self.render_resource(self.rsrc, b'/?email=me') self.assertSearchCalledWith([ (('accbase', 'avatar=me', ['picture']), {}), ]) return res @defer.inlineCallbacks def test_getUserAvatarPNG(self): mimeTypeAndData = (b'image/png', b'\x89PNG lljklj') yield self._getUserAvatar(mimeTypeAndData) self.assertRequest(contentType=mimeTypeAndData[0], content=mimeTypeAndData[1]) @defer.inlineCallbacks def test_getUserAvatarJPEG(self): mimeTypeAndData = (b'image/jpeg', b'\xff\xd8\xff lljklj') yield self._getUserAvatar(mimeTypeAndData) self.assertRequest(contentType=mimeTypeAndData[0], content=mimeTypeAndData[1]) @defer.inlineCallbacks def test_getUserAvatarGIF(self): mimeTypeAndData = (b'image/gif', b'GIF8 lljklj') yield self._getUserAvatar(mimeTypeAndData) self.assertRequest(contentType=mimeTypeAndData[0], content=mimeTypeAndData[1]) @defer.inlineCallbacks def test_getUserAvatarUnknownType(self): mimeTypeAndData = (b'', b'unknown image format') res = yield self._getUserAvatar(mimeTypeAndData) # Unknown format means data won't be sent self.assertEqual(res, dict(redirected=b'img/nobody.png')) @defer.inlineCallbacks def test_getUsernameAvatar(self): mimeType = b'image/gif' data = b'GIF8 lljklj' self.makeRawSearchSideEffect([ [("cn", {"picture": [data]})]]) yield self.render_resource(self.rsrc, b'/?username=me') self.assertSearchCalledWith([ (('accbase', 'accpattern=me', ['picture']), {}), ]) self.assertRequest(contentType=mimeType, content=data) @defer.inlineCallbacks def test_getUnknownUsernameAvatar(self): self.makeSearchSideEffect([[], [], []]) res = yield self.render_resource(self.rsrc, b'/?username=other') self.assertSearchCalledWith([ (('accbase', 'accpattern=other', ['picture']), {}), ]) self.assertEqual(res, dict(redirected=b'img/nobody.png')) class LdapUserInfoNotEscCharsDn(CommonTestCase): def makeUserInfoProvider(self): self.userInfoProvider = ldapuserinfo.LdapUserInfo( uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", groupBase="groupbase", accountPattern="accpattern", groupMemberPattern="(member=%(dn)s)", accountFullName="accountFullName", accountEmail="accountEmail", groupName="groupName", avatarPattern="avatar", avatarData="picture") @defer.inlineCallbacks def test_getUserInfoGroupsNotEscCharsDn(self): dn = "cn=Lastname, Firstname \28UIDxxx\29,dc=example,dc=org" pattern = self.userInfoProvider.groupMemberPattern % dict(dn=dn) self.makeSearchSideEffect([[(dn, {"accountFullName": "Lastname, Firstname (UIDxxx)", "accountEmail": "mee@too"})], [("cn", {"groupName": ["group"]}), ("cn", {"groupName": ["group2"]}) ], []]) res = yield self.userInfoProvider.getUserInfo("me") self.assertSearchCalledWith([ (('accbase', 'accpattern', ['accountEmail', 'accountFullName']), {}), (('groupbase', pattern, ['groupName']), {}), ]) self.assertEqual(res, {'email': 'mee@too', 'full_name': 'Lastname, Firstname (UIDxxx)', 'groups': ["group", "group2"], 'username': 'me'}) class LdapUserInfoNoGroups(CommonTestCase): def makeUserInfoProvider(self): self.userInfoProvider = ldapuserinfo.LdapUserInfo( uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", accountPattern="accpattern", accountFullName="accountFullName", accountEmail="accountEmail", avatarPattern="avatar", avatarData="picture", accountExtraFields=["myfield"]) @defer.inlineCallbacks def test_updateUserInfo(self): self.makeSearchSideEffect([[( "cn", {"accountFullName": "me too", "accountEmail": "mee@too"})], [], []]) res = yield self.userInfoProvider.getUserInfo("me") self.assertSearchCalledWith([ (('accbase', 'accpattern', ['accountEmail', 'accountFullName', 'myfield']), {}), ]) self.assertEqual(res, {'email': 'mee@too', 'full_name': 'me too', 'groups': [], 'username': 'me'}) class Config(unittest.TestCase): if not ldap3: skip = 'ldap3 is required for LdapUserInfo tests' def test_missing_group_name(self): with self.assertRaises(ValueError): ldapuserinfo.LdapUserInfo(groupMemberPattern="member=%(dn)s", groupBase="grpbase", uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", accountPattern="accpattern", accountFullName="accountFullName", accountEmail="accountEmail") def test_missing_group_base(self): with self.assertRaises(ValueError): ldapuserinfo.LdapUserInfo(groupMemberPattern="member=%(dn)s", groupName="group", uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", accountPattern="accpattern", accountFullName="accountFullName", accountEmail="accountEmail") def test_missing_two_params(self): with self.assertRaises(ValueError): ldapuserinfo.LdapUserInfo(groupName="group", uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", accountPattern="accpattern", accountFullName="accountFullName", accountEmail="accountEmail") buildbot-3.4.0/master/buildbot/test/unit/www/test_oauth.py000066400000000000000000000616501413250514000237370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import webbrowser import mock import twisted from twisted.internet import defer from twisted.internet import reactor from twisted.internet import threads from twisted.python import failure from twisted.trial import unittest from twisted.web.resource import Resource from twisted.web.server import Site import buildbot from buildbot.process.properties import Secret from buildbot.secrets.manager import SecretManager from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util import www from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode try: import requests except ImportError: requests = None if requests: from buildbot.www import oauth2 # pylint: disable=ungrouped-imports class FakeResponse: def __init__(self, _json): self.json = lambda: _json self.content = json.dumps(_json) def raise_for_status(self): pass class OAuth2Auth(TestReactorMixin, www.WwwTestMixin, ConfigErrorsMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() if requests is None: raise unittest.SkipTest("Need to install requests to test oauth2") self.patch(requests, 'request', mock.Mock(spec=requests.request)) self.patch(requests, 'post', mock.Mock(spec=requests.post)) self.patch(requests, 'get', mock.Mock(spec=requests.get)) self.googleAuth = oauth2.GoogleAuth("ggclientID", "clientSECRET") self.githubAuth = oauth2.GitHubAuth("ghclientID", "clientSECRET") self.githubAuth_v4 = oauth2.GitHubAuth( "ghclientID", "clientSECRET", apiVersion=4) self.githubAuth_v4_teams = oauth2.GitHubAuth( "ghclientID", "clientSECRET", apiVersion=4, getTeamsMembership=True) self.githubAuthEnt = oauth2.GitHubAuth( "ghclientID", "clientSECRET", serverURL="https://git.corp.fakecorp.com") self.githubAuthEnt_v4 = oauth2.GitHubAuth( "ghclientID", "clientSECRET", apiVersion=4, getTeamsMembership=True, serverURL="https://git.corp.fakecorp.com") self.gitlabAuth = oauth2.GitLabAuth( "https://gitlab.test/", "glclientID", "clientSECRET") self.bitbucketAuth = oauth2.BitbucketAuth("bbclientID", "clientSECRET") for auth in [self.googleAuth, self.githubAuth, self.githubAuth_v4, self.githubAuth_v4_teams, self.githubAuthEnt, self.gitlabAuth, self.bitbucketAuth, self.githubAuthEnt_v4]: self._master = master = self.make_master(url='h:/a/b/', auth=auth) auth.reconfigAuth(master, master.config) self.githubAuth_secret = oauth2.GitHubAuth( Secret("client-id"), Secret("client-secret"), apiVersion=4) self._master = master = self.make_master(url='h:/a/b/', auth=auth) fake_storage_service = FakeSecretStorage() fake_storage_service.reconfigService(secretdict={"client-id": "secretClientId", "client-secret": "secretClientSecret"}) secret_service = SecretManager() secret_service.services = [fake_storage_service] yield secret_service.setServiceParent(self._master) self.githubAuth_secret.reconfigAuth(master, master.config) @defer.inlineCallbacks def test_getGoogleLoginURL(self): res = yield self.googleAuth.getLoginURL('http://redir') exp = ("https://accounts.google.com/o/oauth2/auth?client_id=ggclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fuserinfo.email+" "https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fuserinfo.profile&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.googleAuth.getLoginURL(None) exp = ("https://accounts.google.com/o/oauth2/auth?client_id=ggclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fuserinfo.email+" "https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fuserinfo.profile") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getGithubLoginURL(self): res = yield self.githubAuth.getLoginURL('http://redir') exp = ("https://github.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.githubAuth.getLoginURL(None) exp = ("https://github.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getGithubLoginURL_with_secret(self): res = yield self.githubAuth_secret.getLoginURL('http://redir') exp = ("https://github.com/login/oauth/authorize?client_id=secretClientId&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.githubAuth_secret.getLoginURL(None) exp = ("https://github.com/login/oauth/authorize?client_id=secretClientId&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getGithubELoginURL(self): res = yield self.githubAuthEnt.getLoginURL('http://redir') exp = ("https://git.corp.fakecorp.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.githubAuthEnt.getLoginURL(None) exp = ("https://git.corp.fakecorp.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getGithubLoginURL_v4(self): res = yield self.githubAuthEnt_v4.getLoginURL('http://redir') exp = ("https://git.corp.fakecorp.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.githubAuthEnt_v4.getLoginURL(None) exp = ("https://git.corp.fakecorp.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getGitLabLoginURL(self): res = yield self.gitlabAuth.getLoginURL('http://redir') exp = ("https://gitlab.test/oauth/authorize" "?client_id=glclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&" "response_type=code&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.gitlabAuth.getLoginURL(None) exp = ("https://gitlab.test/oauth/authorize" "?client_id=glclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&" "response_type=code") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getBitbucketLoginURL(self): res = yield self.bitbucketAuth.getLoginURL('http://redir') exp = ("https://bitbucket.org/site/oauth2/authorize?" "client_id=bbclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&" "response_type=code&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.bitbucketAuth.getLoginURL(None) exp = ("https://bitbucket.org/site/oauth2/authorize?" "client_id=bbclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&" "response_type=code") self.assertEqual(res, exp) @defer.inlineCallbacks def test_GoogleVerifyCode(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.googleAuth.get = mock.Mock(side_effect=[dict( name="foo bar", email="bar@foo", picture="http://pic")]) res = yield self.googleAuth.verifyCode("code!") self.assertEqual({'avatar_url': 'http://pic', 'email': 'bar@foo', 'full_name': 'foo bar', 'username': 'bar'}, res) @defer.inlineCallbacks def test_GithubVerifyCode(self): test = self requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] def fake_get(self, ep, **kwargs): test.assertEqual( self.headers, { 'Authorization': 'token TOK3N', 'User-Agent': 'buildbot/{}'.format(buildbot.version), }) if ep == '/user': return dict( login="bar", name="foo bar", email="buzz@bar") if ep == '/user/emails': return [ {'email': 'buzz@bar', 'verified': True, 'primary': False}, {'email': 'bar@foo', 'verified': True, 'primary': True}] if ep == '/user/orgs': return [ dict(login="hello"), dict(login="grp"), ] return None self.githubAuth.get = fake_get res = yield self.githubAuth.verifyCode("code!") self.assertEqual({'email': 'bar@foo', 'username': 'bar', 'groups': ["hello", "grp"], 'full_name': 'foo bar'}, res) @defer.inlineCallbacks def test_GithubVerifyCode_v4(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.githubAuth_v4.post = mock.Mock(side_effect=[ { 'data': { 'viewer': { 'organizations': { 'edges': [ { 'node': { 'login': 'hello' } }, { 'node': { 'login': 'grp' } } ] }, 'login': 'bar', 'email': 'bar@foo', 'name': 'foo bar' } } } ]) res = yield self.githubAuth_v4.verifyCode("code!") self.assertEqual({'email': 'bar@foo', 'username': 'bar', 'groups': ["hello", "grp"], 'full_name': 'foo bar'}, res) @defer.inlineCallbacks def test_GithubVerifyCode_v4_teams(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.githubAuth_v4_teams.post = mock.Mock(side_effect=[ { 'data': { 'viewer': { 'organizations': { 'edges': [ { 'node': { 'login': 'hello' } }, { 'node': { 'login': 'grp' } } ] }, 'login': 'bar', 'email': 'bar@foo', 'name': 'foo bar' } } }, { 'data': { 'hello': { 'teams': { 'edges': [ { 'node': { 'name': 'developers', 'slug': 'develpers' } }, { 'node': { 'name': 'contributors', 'slug': 'contributors' } } ] } }, 'grp': { 'teams': { 'edges': [ { 'node': { 'name': 'developers', 'slug': 'develpers' } }, { 'node': { 'name': 'contributors', 'slug': 'contributors' } }, { 'node': { 'name': 'committers', 'slug': 'committers' } }, { 'node': { 'name': 'Team with spaces and caps', 'slug': 'team-with-spaces-and-caps' } }, ] } }, } } ]) res = yield self.githubAuth_v4_teams.verifyCode("code!") self.assertEqual({'email': 'bar@foo', 'username': 'bar', 'groups': [ 'hello', 'grp', 'grp/Team with spaces and caps', 'grp/committers', 'grp/contributors', 'grp/developers', 'grp/develpers', 'grp/team-with-spaces-and-caps', 'hello/contributors', 'hello/developers', 'hello/develpers', ], 'full_name': 'foo bar'}, res) def test_GitHubAuthBadApiVersion(self): for bad_api_version in (2, 5, 'a'): with self.assertRaisesConfigError( 'GitHubAuth apiVersion must be 3 or 4 not '): oauth2.GitHubAuth("ghclientID", "clientSECRET", apiVersion=bad_api_version) def test_GitHubAuthRaiseErrorWithApiV3AndGetTeamMembership(self): with self.assertRaisesConfigError('Retrieving team membership information using ' 'GitHubAuth is only possible using GitHub api v4.'): oauth2.GitHubAuth("ghclientID", "clientSECRET", apiVersion=3, getTeamsMembership=True) @defer.inlineCallbacks def test_GitlabVerifyCode(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.gitlabAuth.get = mock.Mock(side_effect=[ { # /user "name": "Foo Bar", "username": "fbar", "id": 5, "avatar_url": "https://avatar/fbar.png", "email": "foo@bar", "twitter": "fb", }, [ # /groups {"id": 10, "name": "Hello", "path": "hello"}, {"id": 20, "name": "Group", "path": "grp"}, ]]) res = yield self.gitlabAuth.verifyCode("code!") self.assertEqual({"full_name": "Foo Bar", "username": "fbar", "email": "foo@bar", "avatar_url": "https://avatar/fbar.png", "groups": ["hello", "grp"]}, res) @defer.inlineCallbacks def test_BitbucketVerifyCode(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.bitbucketAuth.get = mock.Mock(side_effect=[ dict( # /user username="bar", display_name="foo bar"), dict( # /user/emails values=[ {'email': 'buzz@bar', 'is_primary': False}, {'email': 'bar@foo', 'is_primary': True}]), dict( # /teams?role=member values=[ {'username': 'hello'}, {'username': 'grp'}]) ]) res = yield self.bitbucketAuth.verifyCode("code!") self.assertEqual({'email': 'bar@foo', 'username': 'bar', "groups": ["hello", "grp"], 'full_name': 'foo bar'}, res) @defer.inlineCallbacks def test_loginResource(self): class fakeAuth: homeUri = "://me" getLoginURL = mock.Mock(side_effect=lambda x: defer.succeed("://")) verifyCode = mock.Mock( side_effect=lambda code: defer.succeed({"username": "bar"})) acceptToken = mock.Mock( side_effect=lambda token: defer.succeed({"username": "bar"})) userInfoProvider = None rsrc = self.githubAuth.getLoginResource() rsrc.auth = fakeAuth() res = yield self.render_resource(rsrc, b'/') rsrc.auth.getLoginURL.assert_called_once_with(None) rsrc.auth.verifyCode.assert_not_called() self.assertEqual(res, {'redirected': b'://'}) rsrc.auth.getLoginURL.reset_mock() rsrc.auth.verifyCode.reset_mock() res = yield self.render_resource(rsrc, b'/?code=code!') rsrc.auth.getLoginURL.assert_not_called() rsrc.auth.verifyCode.assert_called_once_with(b"code!") self.assertEqual(self.master.session.user_info, {'username': 'bar'}) self.assertEqual(res, {'redirected': b'://me'}) # token not supported anymore res = yield self.render_resource(rsrc, b'/?token=token!') rsrc.auth.getLoginURL.assert_called_once() def test_getConfig(self): self.assertEqual(self.githubAuth.getConfigDict(), {'fa_icon': 'fa-github', 'autologin': False, 'name': 'GitHub', 'oauth2': True}) self.assertEqual(self.googleAuth.getConfigDict(), {'fa_icon': 'fa-google-plus', 'autologin': False, 'name': 'Google', 'oauth2': True}) self.assertEqual(self.gitlabAuth.getConfigDict(), {'fa_icon': 'fa-git', 'autologin': False, 'name': 'GitLab', 'oauth2': True}) self.assertEqual(self.bitbucketAuth.getConfigDict(), {'fa_icon': 'fa-bitbucket', 'autologin': False, 'name': 'Bitbucket', 'oauth2': True}) # unit tests are not very useful to write new oauth support # so following is an e2e test, which opens a browser, and do the oauth # negotiation. The browser window close in the end of the test # in order to use this tests, you need to create Github/Google ClientID (see doc on how to do it) # point OAUTHCONF environment variable to a file with following params: # { # "GitHubAuth": { # "CLIENTID": "XX # "CLIENTSECRET": "XX" # }, # "GoogleAuth": { # "CLIENTID": "XX", # "CLIENTSECRET": "XX" # } # "GitLabAuth": { # "INSTANCEURI": "XX", # "CLIENTID": "XX", # "CLIENTSECRET": "XX" # } # } class OAuth2AuthGitHubE2E(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): authClass = "GitHubAuth" def _instantiateAuth(self, cls, config): return cls(config["CLIENTID"], config["CLIENTSECRET"]) def setUp(self): self.setUpTestReactor() if requests is None: raise unittest.SkipTest("Need to install requests to test oauth2") if "OAUTHCONF" not in os.environ: raise unittest.SkipTest( "Need to pass OAUTHCONF path to json file via environ to run this e2e test") with open(os.environ['OAUTHCONF']) as f: jsonData = f.read() config = json.loads(jsonData)[self.authClass] from buildbot.www import oauth2 self.auth = self._instantiateAuth( getattr(oauth2, self.authClass), config) # 5000 has to be hardcoded, has oauth clientids are bound to a fully # classified web site master = self.make_master(url='http://localhost:5000/', auth=self.auth) self.auth.reconfigAuth(master, master.config) def tearDown(self): from twisted.internet.tcp import Server # browsers has the bad habit on not closing the persistent # connections, so we need to hack them away to make trial happy f = failure.Failure(Exception("test end")) for reader in reactor.getReaders(): if isinstance(reader, Server): reader.connectionLost(f) @defer.inlineCallbacks def test_E2E(self): d = defer.Deferred() twisted.web.http._logDateTimeUsers = 1 class HomePage(Resource): isLeaf = True def render_GET(self, request): info = request.getSession().user_info reactor.callLater(0, d.callback, info) return (b"WORKED: " + info + b"") class MySite(Site): def makeSession(self): uid = self._mkuid() session = self.sessions[uid] = self.sessionFactory(self, uid) return session root = Resource() root.putChild(b"", HomePage()) auth = Resource() root.putChild(b'auth', auth) auth.putChild(b'login', self.auth.getLoginResource()) site = MySite(root) listener = reactor.listenTCP(5000, site) def thd(): res = requests.get('http://localhost:5000/auth/login') content = bytes2unicode(res.content) webbrowser.open(content) threads.deferToThread(thd) res = yield d yield listener.stopListening() yield site.stopFactory() self.assertIn("full_name", res) self.assertIn("email", res) self.assertIn("username", res) class OAuth2AuthGoogleE2E(OAuth2AuthGitHubE2E): authClass = "GoogleAuth" class OAuth2AuthGitLabE2E(OAuth2AuthGitHubE2E): authClass = "GitLabAuth" def _instantiateAuth(self, cls, config): return cls(config["INSTANCEURI"], config["CLIENTID"], config["CLIENTSECRET"]) buildbot-3.4.0/master/buildbot/test/unit/www/test_resource.py000066400000000000000000000040441413250514000244400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import resource class ResourceSubclass(resource.Resource): needsReconfig = True class Resource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() def test_base_url(self): master = self.make_master(url=b'h:/a/b/') rsrc = resource.Resource(master) self.assertEqual(rsrc.base_url, b'h:/a/b/') def test_reconfigResource_registration(self): master = self.make_master(url=b'h:/a/b/') rsrc = ResourceSubclass(master) master.www.resourceNeedsReconfigs.assert_called_with(rsrc) class RedirectResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() def test_redirect(self): master = self.make_master(url=b'h:/a/b/') rsrc = resource.RedirectResource(master, b'foo') self.render_resource(rsrc, b'/') self.assertEqual(self.request.redirected_to, b'h:/a/b/foo') def test_redirect_cr_lf(self): master = self.make_master(url=b'h:/a/b/') rsrc = resource.RedirectResource(master, b'foo\r\nbar') self.render_resource(rsrc, b'/') self.assertEqual(self.request.redirected_to, b'h:/a/b/foo') buildbot-3.4.0/master/buildbot/test/unit/www/test_rest.py000066400000000000000000001202701413250514000235660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import re import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data.exceptions import InvalidQueryParameter from buildbot.test.fake import endpoint from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www import authz from buildbot.www import graphql from buildbot.www import rest from buildbot.www.rest import JSONRPC_CODES class RestRootResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): maxVersion = 3 def setUp(self): self.setUpTestReactor() [graphql] # used for import side effect @defer.inlineCallbacks def test_render(self): master = self.make_master(url='h:/a/b/') rsrc = rest.RestRootResource(master) rv = yield self.render_resource(rsrc, b'/') self.assertIn(b'api_versions', rv) def test_versions(self): master = self.make_master(url='h:/a/b/') rsrc = rest.RestRootResource(master) versions = [unicode2bytes('v{}'.format(v)) for v in range(2, self.maxVersion + 1)] versions = [unicode2bytes(v) for v in versions] versions.append(b'latest') self.assertEqual(sorted(rsrc.listNames()), sorted(versions)) def test_versions_limited(self): master = self.make_master(url='h:/a/b/') master.config.www['rest_minimum_version'] = 2 rsrc = rest.RestRootResource(master) versions = [unicode2bytes('v{}'.format(v)) for v in range(2, self.maxVersion + 1)] versions.append(b'latest') self.assertEqual(sorted(rsrc.listNames()), sorted(versions)) class V2RootResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='http://server/path/') self.master.data._scanModule(endpoint) self.rsrc = rest.V2RootResource(self.master) self.rsrc.reconfigResource(self.master.config) def assertSimpleError(self, message, responseCode): content = json.dumps({'error': message}) self.assertRequest(content=unicode2bytes(content), responseCode=responseCode) @defer.inlineCallbacks def test_failure(self): self.rsrc.renderRest = mock.Mock( return_value=defer.fail(RuntimeError('oh noes'))) yield self.render_resource(self.rsrc, b'/') self.assertSimpleError('internal error - see logs', 500) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_invalid_http_method(self): yield self.render_resource(self.rsrc, b'/', method=b'PATCH') self.assertSimpleError('invalid HTTP method', 400) def do_check_origin_regexp(self, goods, bads): self.assertEqual(len(self.rsrc.origins), 1) regexp = self.rsrc.origins[0] for good in goods: self.assertTrue( regexp.match(good), "{} should match default origin({}), but its not".format( good, regexp.pattern )) for bad in bads: self.assertFalse( regexp.match(bad), "{} should not match default origin({}), but it is".format( bad, regexp.pattern )) def test_default_origin(self): self.master.config.buildbotURL = 'http://server/path/' self.rsrc.reconfigResource(self.master.config) self.do_check_origin_regexp( ["http://server"], ["http://otherserver", "http://otherserver:909"], ) self.master.config.buildbotURL = 'http://server/' self.rsrc.reconfigResource(self.master.config) self.do_check_origin_regexp( ["http://server"], ["http://otherserver", "http://otherserver:909"], ) self.master.config.buildbotURL = 'http://server:8080/' self.rsrc.reconfigResource(self.master.config) self.do_check_origin_regexp( ["http://server:8080"], ["http://otherserver", "http://server:909"], ) self.master.config.buildbotURL = 'https://server:8080/' self.rsrc.reconfigResource(self.master.config) self.do_check_origin_regexp( ["https://server:8080"], ["http://server:8080", "https://otherserver:8080"], ) class V2RootResource_CORS(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/') self.master.data._scanModule(endpoint) self.rsrc = rest.V2RootResource(self.master) self.master.config.www['allowed_origins'] = [b'h://good'] self.rsrc.reconfigResource(self.master.config) def renderRest(request): request.write(b'ok') return defer.succeed(None) self.rsrc.renderRest = renderRest def assertOk(self, expectHeaders=True, content=b'ok', origin=b'h://good'): hdrs = { b'access-control-allow-origin': [origin], b'access-control-allow-headers': [b'Content-Type'], b'access-control-max-age': [b'3600'], } if expectHeaders else {} self.assertRequest(content=content, responseCode=200, headers=hdrs) def assertNotOk(self, message): content = json.dumps({'error': message}) content = unicode2bytes(content) self.assertRequest(content=content, responseCode=400) @defer.inlineCallbacks def test_cors_no_origin(self): # if the browser doesn't send Origin, there's nothing we can do to # protect the user yield self.render_resource(self.rsrc, b'/') self.assertOk(expectHeaders=False) @defer.inlineCallbacks def test_cors_origin_match(self): yield self.render_resource(self.rsrc, b'/', origin=b'h://good') self.assertOk() @defer.inlineCallbacks def test_cors_origin_match_star(self): self.master.config.www['allowed_origins'] = ['*'] self.rsrc.reconfigResource(self.master.config) yield self.render_resource(self.rsrc, b'/', origin=b'h://good') self.assertOk() @defer.inlineCallbacks def test_cors_origin_patterns(self): self.master.config.www['allowed_origins'] = ['h://*.good', 'hs://*.secure'] self.rsrc.reconfigResource(self.master.config) yield self.render_resource(self.rsrc, b'/', origin=b'h://foo.good') self.assertOk(origin=b'h://foo.good') yield self.render_resource(self.rsrc, b'/', origin=b'hs://x.secure') self.assertOk(origin=b'hs://x.secure') yield self.render_resource(self.rsrc, b'/', origin=b'h://x.secure') self.assertNotOk('invalid origin') @defer.inlineCallbacks def test_cors_origin_mismatch(self): yield self.render_resource(self.rsrc, b'/', origin=b'h://bad') self.assertNotOk('invalid origin') @defer.inlineCallbacks def test_cors_origin_mismatch_post(self): yield self.render_resource(self.rsrc, b'/', method=b'POST', origin=b'h://bad') content = json.dumps({'error': {'message': 'invalid origin'}}) content = unicode2bytes(content) self.assertRequest(content=content, responseCode=400) @defer.inlineCallbacks def test_cors_origin_preflight_match_GET(self): yield self.render_resource(self.rsrc, b'/', method=b'OPTIONS', origin=b'h://good', access_control_request_method=b'GET') self.assertOk(content=b'') @defer.inlineCallbacks def test_cors_origin_preflight_match_POST(self): yield self.render_resource(self.rsrc, b'/', method=b'OPTIONS', origin=b'h://good', access_control_request_method=b'POST') self.assertOk(content=b'') @defer.inlineCallbacks def test_cors_origin_preflight_bad_method(self): yield self.render_resource(self.rsrc, b'/', method=b'OPTIONS', origin=b'h://good', access_control_request_method=b'PATCH') self.assertNotOk(message='invalid method') @defer.inlineCallbacks def test_cors_origin_preflight_bad_origin(self): yield self.render_resource(self.rsrc, b'/', method=b'OPTIONS', origin=b'h://bad', access_control_request_method=b'GET') self.assertNotOk(message='invalid origin') class V2RootResource_REST(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/') self.master.config.www['debug'] = True self.master.data._scanModule(endpoint) self.rsrc = rest.V2RootResource(self.master) self.rsrc.reconfigResource(self.master.config) def allow(*args, **kw): return self.master.www.assertUserAllowed = allow endpoint.TestEndpoint.rtype = mock.MagicMock() endpoint.TestsEndpoint.rtype = mock.MagicMock() endpoint.Test.isCollection = True endpoint.Test.rtype = endpoint.Test def assertRestCollection(self, typeName, items, total=None, contentType=None, orderSignificant=False): self.assertFalse(isinstance(self.request.written, str)) got = {} got['content'] = json.loads(bytes2unicode(self.request.written)) got['contentType'] = self.request.headers[b'content-type'] got['responseCode'] = self.request.responseCode meta = {} if total is not None: meta['total'] = total exp = {} exp['content'] = {typeName: items, 'meta': meta} exp['contentType'] = [contentType or b'text/plain; charset=utf-8'] exp['responseCode'] = 200 # if order is not significant, sort so the comparison works if not orderSignificant: if 'content' in got and typeName in got['content']: got['content'][typeName].sort(key=lambda x: sorted(x.items())) exp['content'][typeName].sort(key=lambda x: sorted(x.items())) if 'meta' in got['content'] and 'links' in got['content']['meta']: got['content']['meta']['links'].sort( key=lambda l: (l['rel'], l['href'])) self.assertEqual(got, exp) def assertRestDetails(self, typeName, item, contentType=None): got = {} got['content'] = json.loads(bytes2unicode(self.request.written)) got['contentType'] = self.request.headers[b'content-type'] got['responseCode'] = self.request.responseCode exp = {} exp['content'] = { typeName: [item], 'meta': {}, } exp['contentType'] = [contentType or b'text/plain; charset=utf-8'] exp['responseCode'] = 200 self.assertEqual(got, exp) def assertRestError(self, responseCode, message): content = json.loads(bytes2unicode(self.request.written)) gotResponseCode = self.request.responseCode self.assertEqual(list(content.keys()), ['error']) self.assertRegex(content['error'], message) self.assertEqual(responseCode, gotResponseCode) @defer.inlineCallbacks def test_not_found(self): yield self.render_resource(self.rsrc, b'/not/found') self.assertRequest( contentJson=dict(error='Invalid path: not/found'), contentType=b'text/plain; charset=utf-8', responseCode=404) @defer.inlineCallbacks def test_invalid_query(self): yield self.render_resource(self.rsrc, b'/test?huh=1') self.assertRequest( contentJson=dict(error="unrecognized query parameter 'huh'"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_raw(self): yield self.render_resource(self.rsrc, b'/rawtest') self.assertRequest( content=b"value", contentType=b'text/test; charset=utf-8', responseCode=200, headers={b"content-disposition": [b'attachment; filename=test.txt']}) @defer.inlineCallbacks def test_api_head(self): get = yield self.render_resource(self.rsrc, b'/test', method=b'GET') head = yield self.render_resource(self.rsrc, b'/test', method=b'HEAD') self.assertEqual(head, b'') self.assertEqual(int(self.request.headers[b'content-length'][0]), len(get)) @defer.inlineCallbacks def test_api_collection(self): yield self.render_resource(self.rsrc, b'/test') self.assertRestCollection(typeName='tests', items=list(endpoint.testData.values()), total=8) @defer.inlineCallbacks def do_test_api_collection_pagination(self, query, ids, links): yield self.render_resource(self.rsrc, b'/test' + query) self.assertRestCollection(typeName='tests', items=[v for k, v in endpoint.testData.items() if k in ids], total=8) def test_api_collection_limit(self): return self.do_test_api_collection_pagination(b'?limit=2', [13, 14], { 'self': '%(self)s?limit=2', 'next': '%(self)s?offset=2&limit=2', }) def test_api_collection_offset(self): return self.do_test_api_collection_pagination(b'?offset=2', [15, 16, 17, 18, 19, 20], { 'self': '%(self)s?offset=2', 'first': '%(self)s', }) def test_api_collection_offset_limit(self): return self.do_test_api_collection_pagination(b'?offset=5&limit=2', [18, 19], { 'first': '%(self)s?limit=2', 'prev': '%(self)s?offset=3&limit=2', 'next': '%(self)s?offset=7&limit=2', 'self': '%(self)s?offset=5&limit=2', }) def test_api_collection_limit_at_end(self): return self.do_test_api_collection_pagination(b'?offset=5&limit=3', [18, 19, 20], { 'first': '%(self)s?limit=3', 'prev': '%(self)s?offset=2&limit=3', 'self': '%(self)s?offset=5&limit=3', }) def test_api_collection_limit_past_end(self): return self.do_test_api_collection_pagination(b'?offset=5&limit=20', [18, 19, 20], { 'first': '%(self)s?limit=20', 'prev': '%(self)s?limit=5', 'self': '%(self)s?offset=5&limit=20', }) def test_api_collection_offset_past_end(self): return self.do_test_api_collection_pagination(b'?offset=50&limit=10', [], { 'first': '%(self)s?limit=10', 'prev': '%(self)s?offset=40&limit=10', 'self': '%(self)s?offset=50&limit=10', }) @defer.inlineCallbacks def test_api_collection_invalid_limit(self): yield self.render_resource(self.rsrc, b'/test?limit=foo!') self.assertRequest( contentJson=dict(error="invalid limit"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_invalid_offset(self): yield self.render_resource(self.rsrc, b'/test?offset=foo!') self.assertRequest( contentJson=dict(error="invalid offset"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_invalid_simple_filter_value(self): yield self.render_resource(self.rsrc, b'/test?success=sorta') self.assertRequest( contentJson=dict(error="invalid filter value for success"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_invalid_filter_value(self): yield self.render_resource(self.rsrc, b'/test?testid__lt=fifteen') self.assertRequest( contentJson=dict(error="invalid filter value for testid__lt"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_fields(self): yield self.render_resource(self.rsrc, b'/test?field=success&field=info') self.assertRestCollection(typeName='tests', items=[{'success': v['success'], 'info': v['info']} for v in endpoint.testData.values()], total=8) @defer.inlineCallbacks def test_api_collection_invalid_field(self): yield self.render_resource(self.rsrc, b'/test?field=success&field=WTF') self.assertRequest( contentJson=dict(error="no such field 'WTF'"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_simple_filter(self): yield self.render_resource(self.rsrc, b'/test?success=yes') self.assertRestCollection(typeName='tests', items=[v for v in endpoint.testData.values() if v['success']], total=5) @defer.inlineCallbacks def test_api_collection_list_filter(self): yield self.render_resource(self.rsrc, b'/test?tags__contains=a') self.assertRestCollection(typeName='tests', items=[v for v in endpoint.testData.values() if 'a' in v['tags']], total=2) @defer.inlineCallbacks def test_api_collection_operator_filter(self): yield self.render_resource(self.rsrc, b'/test?info__lt=skipped') self.assertRestCollection(typeName='tests', items=[v for v in endpoint.testData.values() if v['info'] < 'skipped'], total=4) @defer.inlineCallbacks def test_api_collection_order(self): yield self.render_resource(self.rsrc, b'/test?order=info') self.assertRestCollection(typeName='tests', items=sorted(list(endpoint.testData.values()), key=lambda v: v['info']), total=8, orderSignificant=True) @defer.inlineCallbacks def test_api_collection_filter_and_order(self): yield self.render_resource(self.rsrc, b'/test?field=info&order=info') self.assertRestCollection(typeName='tests', items=sorted(list([{'info': v['info']} for v in endpoint.testData.values()]), key=lambda v: v['info']), total=8, orderSignificant=True) @defer.inlineCallbacks def test_api_collection_order_desc(self): yield self.render_resource(self.rsrc, b'/test?order=-info') self.assertRestCollection(typeName='tests', items=sorted(list(endpoint.testData.values()), key=lambda v: v['info'], reverse=True), total=8, orderSignificant=True) @defer.inlineCallbacks def test_api_collection_filter_and_order_desc(self): yield self.render_resource(self.rsrc, b'/test?field=info&order=-info') self.assertRestCollection(typeName='tests', items=sorted(list([{'info': v['info']} for v in endpoint.testData.values()]), key=lambda v: v['info'], reverse=True), total=8, orderSignificant=True) @defer.inlineCallbacks def test_api_collection_order_on_unselected(self): yield self.render_resource(self.rsrc, b'/test?field=testid&order=info') self.assertRestError(message="cannot order on un-selected fields", responseCode=400) @defer.inlineCallbacks def test_api_collection_filter_on_unselected(self): yield self.render_resource(self.rsrc, b'/test?field=testid&info__gt=xx') self.assertRestError(message="cannot filter on un-selected fields", responseCode=400) @defer.inlineCallbacks def test_api_collection_filter_pagination(self): yield self.render_resource(self.rsrc, b'/test?success=false&limit=2') # note that the limit/offset and total are *after* the filter self.assertRestCollection(typeName='tests', items=sorted( [v for v in endpoint.testData.values() if not v['success']], key=lambda v: v['testid'])[:2], total=3) @defer.inlineCallbacks def test_api_details(self): yield self.render_resource(self.rsrc, b'/test/13') self.assertRestDetails(typeName='tests', item=endpoint.testData[13]) @defer.inlineCallbacks def test_api_details_none(self): self.maxDiff = None yield self.render_resource(self.rsrc, b'/test/0') self.assertRequest( contentJson={'error': "not found while getting from endpoint for " "/tests/n:testid,/test/n:testid with arguments" " ResultSpec(**{'filters': [], 'fields': None, " "'properties': [], " "'order': None, 'limit': None, 'offset': None}) " "and {'testid': 0}"}, contentType=b'text/plain; charset=utf-8', responseCode=404) @defer.inlineCallbacks def test_api_details_filter_fails(self): yield self.render_resource(self.rsrc, b'/test/13?success=false') self.assertRequest( contentJson=dict(error="this is not a collection"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_details_fields(self): yield self.render_resource(self.rsrc, b'/test/13?field=info') self.assertRestDetails(typeName='tests', item={'info': endpoint.testData[13]['info']}) @defer.inlineCallbacks def test_api_with_accept(self): # when 'application/json' is accepted, the result has that type yield self.render_resource(self.rsrc, b'/test/13', accept=b'application/json') self.assertRestDetails(typeName='tests', item=endpoint.testData[13], contentType=b'application/json; charset=utf-8') @defer.inlineCallbacks def test_api_fails(self): yield self.render_resource(self.rsrc, b'/test/fail') self.assertRestError(message=r"RuntimeError\('oh noes',?\)", responseCode=500) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) def test_decode_result_spec_raise_bad_request_on_bad_property_value(self): expected_props = [None, 'test2'] self.make_request(b'/test') self.request.args = {b'property': expected_props} with self.assertRaises(InvalidQueryParameter): self.rsrc.decodeResultSpec(self.request, endpoint.TestsEndpoint) def test_decode_result_spec_limit(self): expected_limit = 5 self.make_request(b'/test') self.request.args = {b'limit': str(expected_limit)} spec = self.rsrc.decodeResultSpec(self.request, endpoint.TestsEndpoint) self.assertEqual(spec.limit, expected_limit) def test_decode_result_spec_order(self): expected_order = ('info',) self.make_request(b'/test') self.request.args = {b'order': expected_order} spec = self.rsrc.decodeResultSpec(self.request, endpoint.Test) self.assertEqual(spec.order, expected_order) def test_decode_result_spec_offset(self): expected_offset = 5 self.make_request(b'/test') self.request.args = {b'offset': str(expected_offset)} spec = self.rsrc.decodeResultSpec(self.request, endpoint.TestsEndpoint) self.assertEqual(spec.offset, expected_offset) def test_decode_result_spec_properties(self): expected_props = ['test1', 'test2'] self.make_request(b'/test') self.request.args = {b'property': expected_props} spec = self.rsrc.decodeResultSpec(self.request, endpoint.TestsEndpoint) self.assertEqual(spec.properties[0].values, expected_props) def test_decode_result_spec_not_a_collection_limit(self): def expectRaiseInvalidQueryParameter(): limit = 5 self.make_request(b'/test') self.request.args = {b'limit': limit} self.rsrc.decodeResultSpec(self.request, endpoint.TestEndpoint) with self.assertRaises(InvalidQueryParameter): expectRaiseInvalidQueryParameter() def test_decode_result_spec_not_a_collection_order(self): def expectRaiseInvalidQueryParameter(): order = ('info',) self.make_request(b'/test') self.request.args = {b'order': order} self.rsrc.decodeResultSpec(self.request, endpoint.TestEndpoint) with self.assertRaises(InvalidQueryParameter): expectRaiseInvalidQueryParameter() def test_decode_result_spec_not_a_collection_offset(self): def expectRaiseInvalidQueryParameter(): offset = 0 self.make_request(b'/test') self.request.args = {b'offset': offset} self.rsrc.decodeResultSpec(self.request, endpoint.TestEndpoint) with self.assertRaises(InvalidQueryParameter): expectRaiseInvalidQueryParameter() def test_decode_result_spec_not_a_collection_properties(self): expected_props = ['test1', 'test2'] self.make_request(b'/test') self.request.args = {b'property': expected_props} spec = self.rsrc.decodeResultSpec(self.request, endpoint.TestEndpoint) self.assertEqual(spec.properties[0].values, expected_props) @defer.inlineCallbacks def test_authz_forbidden(self): def deny(request, ep, action, options): if "test" in ep: raise authz.Forbidden("no no") return None self.master.www.assertUserAllowed = deny yield self.render_resource(self.rsrc, b'/test') self.assertRestAuthError(message=re.compile('no no'), responseCode=403) def assertRestAuthError(self, message, responseCode=400): got = {} got['contentType'] = self.request.headers[b'content-type'] got['responseCode'] = self.request.responseCode content = json.loads(bytes2unicode(self.request.written)) if 'error' not in content: self.fail("response does not have proper error form: %r" % (content,)) got['error'] = content['error'] exp = {} exp['contentType'] = [b'text/plain; charset=utf-8'] exp['responseCode'] = responseCode exp['error'] = message # process a regular expression for message, if given if not isinstance(message, str): if message.match(got['error']): exp['error'] = got['error'] else: exp['error'] = "MATCHING: {}".format(message.pattern) self.assertEqual(got, exp) class V2RootResource_JSONRPC2(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/') def allow(*args, **kw): return self.master.www.assertUserAllowed = allow self.master.data._scanModule(endpoint) self.rsrc = rest.V2RootResource(self.master) self.rsrc.reconfigResource(self.master.config) def assertJsonRpcError(self, message, responseCode=400, jsonrpccode=None): got = {} got['contentType'] = self.request.headers[b'content-type'] got['responseCode'] = self.request.responseCode content = json.loads(bytes2unicode(self.request.written)) if ('error' not in content or sorted(content['error'].keys()) != ['code', 'message']): self.fail("response does not have proper error form: %r" % (content,)) got['error'] = content['error'] exp = {} exp['contentType'] = [b'application/json'] exp['responseCode'] = responseCode exp['error'] = {'code': jsonrpccode, 'message': message} # process a regular expression for message, if given if not isinstance(message, str): if message.match(got['error']['message']): exp['error']['message'] = got['error']['message'] else: exp['error']['message'] = "MATCHING: {}".format(message.pattern) self.assertEqual(got, exp) @defer.inlineCallbacks def test_invalid_path(self): yield self.render_control_resource(self.rsrc, b'/not/found') self.assertJsonRpcError( message='Invalid path: not/found', jsonrpccode=JSONRPC_CODES['invalid_request'], responseCode=404) @defer.inlineCallbacks def test_invalid_action(self): yield self.render_control_resource(self.rsrc, b'/test', action='nosuch') self.assertJsonRpcError( message='action: nosuch is not supported', jsonrpccode=JSONRPC_CODES['method_not_found'], responseCode=501) @defer.inlineCallbacks def test_invalid_json(self): yield self.render_control_resource(self.rsrc, b'/test', requestJson="{abc") self.assertJsonRpcError( message=re.compile('^JSON parse error'), jsonrpccode=JSONRPC_CODES['parse_error']) @defer.inlineCallbacks def test_invalid_content_type(self): yield self.render_control_resource(self.rsrc, b'/test', requestJson='{"jsonrpc": "2.0", "method": "foo",' '"id":"abcdef", "params": {}}', content_type='application/x-www-form-urlencoded') self.assertJsonRpcError( message=re.compile('Invalid content-type'), jsonrpccode=JSONRPC_CODES['invalid_request']) @defer.inlineCallbacks def test_list_request(self): yield self.render_control_resource(self.rsrc, b'/test', requestJson="[1,2]") self.assertJsonRpcError( message="JSONRPC batch requests are not supported", jsonrpccode=JSONRPC_CODES['invalid_request']) @defer.inlineCallbacks def test_bad_req_type(self): yield self.render_control_resource(self.rsrc, b'/test', requestJson='"a string?!"') self.assertJsonRpcError( message="JSONRPC root object must be an object", jsonrpccode=JSONRPC_CODES['invalid_request']) @defer.inlineCallbacks def do_test_invalid_req(self, requestJson, message): yield self.render_control_resource(self.rsrc, b'/test', requestJson=requestJson) self.assertJsonRpcError( message=message, jsonrpccode=JSONRPC_CODES['invalid_request']) def test_bad_req_jsonrpc_missing(self): return self.do_test_invalid_req( '{"method": "foo", "id":"abcdef", "params": {}}', "missing key 'jsonrpc'") def test_bad_req_jsonrpc_type(self): return self.do_test_invalid_req( '{"jsonrpc": 13, "method": "foo", "id":"abcdef", "params": {}}', "'jsonrpc' must be a string") def test_bad_req_jsonrpc_value(self): return self.do_test_invalid_req( '{"jsonrpc": "3.0", "method": "foo", "id":"abcdef", "params": {}}', "only JSONRPC 2.0 is supported") def test_bad_req_method_missing(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "id":"abcdef", "params": {}}', "missing key 'method'") def test_bad_req_method_type(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": 999, "id":"abcdef", "params": {}}', "'method' must be a string") def test_bad_req_id_missing(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": "foo", "params": {}}', "missing key 'id'") def test_bad_req_id_type(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": "foo", "id": {}, "params": {}}', "'id' must be a string, number, or null") def test_bad_req_params_missing(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": "foo", "id": "abc"}', "missing key 'params'") def test_bad_req_params_type(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": "foo", "id": "abc", "params": 999}', "'params' must be an object") @defer.inlineCallbacks def test_valid(self): yield self.render_control_resource(self.rsrc, b'/test/13', action="testy", params={'foo': 3, 'bar': 5}) self.assertRequest( contentJson={ 'id': self.UUID, 'jsonrpc': '2.0', 'result': { 'action': 'testy', 'args': {'foo': 3, 'bar': 5, 'owner': 'anonymous'}, 'kwargs': {'testid': 13}, }, }, contentType=b'application/json', responseCode=200) @defer.inlineCallbacks def test_valid_int_id(self): yield self.render_control_resource(self.rsrc, b'/test/13', action="testy", params={'foo': 3, 'bar': 5}, id=1823) self.assertRequest( contentJson={ 'id': 1823, 'jsonrpc': '2.0', 'result': { 'action': 'testy', 'args': {'foo': 3, 'bar': 5, 'owner': 'anonymous', }, 'kwargs': {'testid': 13}, }, }, contentType=b'application/json', responseCode=200) @defer.inlineCallbacks def test_valid_fails(self): yield self.render_control_resource(self.rsrc, b'/test/13', action="fail") self.assertJsonRpcError( message=re.compile('^RuntimeError'), jsonrpccode=JSONRPC_CODES['internal_error'], responseCode=500) # the error gets logged, too: self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_authz_forbidden(self): def deny(request, ep, action, options): if "13" in ep: raise authz.Forbidden("no no") return None self.master.www.assertUserAllowed = deny yield self.render_control_resource(self.rsrc, b'/test/13', action="fail") self.assertJsonRpcError( message=re.compile('no no'), jsonrpccode=JSONRPC_CODES['invalid_request'], responseCode=403) @defer.inlineCallbacks def test_owner_without_email(self): self.master.session.user_info = { "username": "defunkt", "full_name": "Defunkt user", } yield self.render_control_resource(self.rsrc, b'/test/13', action="testy") self.assertRequest( contentJson={ 'id': self.UUID, 'jsonrpc': '2.0', 'result': { 'action': 'testy', 'args': {'owner': 'defunkt'}, 'kwargs': {'testid': 13}, }, }, contentType=b'application/json', responseCode=200) @defer.inlineCallbacks def test_owner_with_only_full_name(self): self.master.session.user_info = { "full_name": "Defunkt user", } yield self.render_control_resource(self.rsrc, b'/test/13', action="testy") self.assertRequest( contentJson={ 'id': self.UUID, 'jsonrpc': '2.0', 'result': { 'action': 'testy', 'args': {'owner': 'Defunkt user'}, 'kwargs': {'testid': 13}, }, }, contentType=b'application/json', responseCode=200) @defer.inlineCallbacks def test_owner_with_email(self): self.master.session.user_info = { "email": "defunkt@example.org", "username": "defunkt", "full_name": "Defunkt user", } yield self.render_control_resource(self.rsrc, b'/test/13', action="testy") self.assertRequest( contentJson={ 'id': self.UUID, 'jsonrpc': '2.0', 'result': { 'action': 'testy', 'args': {'owner': 'defunkt@example.org'}, 'kwargs': {'testid': 13}, }, }, contentType=b'application/json', responseCode=200) class ContentTypeParser(unittest.TestCase): def test_simple(self): self.assertEqual( rest.ContentTypeParser(b"application/json").gettype(), "application/json") def test_complex(self): self.assertEqual(rest.ContentTypeParser(b"application/json; Charset=UTF-8").gettype(), "application/json") def test_text(self): self.assertEqual( rest.ContentTypeParser(b"text/plain; Charset=UTF-8").gettype(), "text/plain") buildbot-3.4.0/master/buildbot/test/unit/www/test_roles.py000066400000000000000000000074641413250514000237460ustar00rootroot00000000000000# redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.test.util.config import ConfigErrorsMixin from buildbot.www.authz import roles class RolesFromGroups(unittest.TestCase): def setUp(self): self.roles = roles.RolesFromGroups("buildbot-") def test_noGroups(self): ret = self.roles.getRolesFromUser(dict( username="homer")) self.assertEqual(ret, []) def test_noBuildbotGroups(self): ret = self.roles.getRolesFromUser(dict( username="homer", groups=["employee"])) self.assertEqual(ret, []) def test_someBuildbotGroups(self): ret = self.roles.getRolesFromUser(dict( username="homer", groups=["employee", "buildbot-maintainer", "buildbot-admin"])) self.assertEqual(ret, ["maintainer", "admin"]) class RolesFromEmails(unittest.TestCase): def setUp(self): self.roles = roles.RolesFromEmails( employee=["homer@plant.com", "burns@plant.com"], boss=["burns@plant.com"]) def test_noUser(self): ret = self.roles.getRolesFromUser(dict( username="lisa", email="lisa@school.com")) self.assertEqual(ret, []) def test_User1(self): ret = self.roles.getRolesFromUser(dict( username="homer", email="homer@plant.com")) self.assertEqual(ret, ["employee"]) def test_User2(self): ret = self.roles.getRolesFromUser(dict( username="burns", email="burns@plant.com")) self.assertEqual(sorted(ret), ["boss", "employee"]) class RolesFromOwner(unittest.TestCase): def setUp(self): self.roles = roles.RolesFromOwner("ownerofbuild") def test_noOwner(self): ret = self.roles.getRolesFromUser(dict( username="lisa", email="lisa@school.com"), None) self.assertEqual(ret, []) def test_notOwner(self): ret = self.roles.getRolesFromUser(dict( username="lisa", email="lisa@school.com"), "homer@plant.com") self.assertEqual(ret, []) def test_owner(self): ret = self.roles.getRolesFromUser(dict( username="homer", email="homer@plant.com"), "homer@plant.com") self.assertEqual(ret, ["ownerofbuild"]) class RolesFromUsername(unittest.TestCase, ConfigErrorsMixin): def setUp(self): self.roles = roles.RolesFromUsername(roles=["admins"], usernames=["Admin"]) self.roles2 = roles.RolesFromUsername( roles=["developers", "integrators"], usernames=["Alice", "Bob"]) def test_anonymous(self): ret = self.roles.getRolesFromUser(dict(anonymous=True)) self.assertEqual(ret, []) def test_normalUser(self): ret = self.roles.getRolesFromUser(dict(username="Alice")) self.assertEqual(ret, []) def test_admin(self): ret = self.roles.getRolesFromUser(dict(username="Admin")) self.assertEqual(ret, ["admins"]) def test_multipleGroups(self): ret = self.roles2.getRolesFromUser(dict(username="Bob")) self.assertEqual(ret, ["developers", "integrators"]) def test_badUsernames(self): with self.assertRaisesConfigError('Usernames cannot be None'): roles.RolesFromUsername(roles=[], usernames=[None]) buildbot-3.4.0/master/buildbot/test/unit/www/test_service.py000066400000000000000000000242131413250514000242510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import calendar import datetime import jwt import mock from twisted.cred import strcred from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse from twisted.internet import defer from twisted.trial import unittest from twisted.web._auth.wrapper import HTTPAuthSessionWrapper from twisted.web.server import Request from buildbot.test.unit.www import test_hooks_base from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import auth from buildbot.www import change_hook from buildbot.www import resource from buildbot.www import rest from buildbot.www import service class FakeChannel: transport = None def isSecure(self): return False def getPeer(self): return None def getHost(self): return None class NeedsReconfigResource(resource.Resource): needsReconfig = True reconfigs = 0 def reconfigResource(self, config): NeedsReconfigResource.reconfigs += 1 class Test(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/a/b/') self.svc = self.master.www = service.WWWService() yield self.svc.setServiceParent(self.master) def makeConfig(self, **kwargs): w = dict(port=None, auth=auth.NoAuth(), logfileName='l') w.update(kwargs) new_config = mock.Mock() new_config.www = w new_config.buildbotURL = 'h:/' self.master.config = new_config return new_config @defer.inlineCallbacks def test_reconfigService_no_port(self): new_config = self.makeConfig() yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertEqual(self.svc.site, None) @defer.inlineCallbacks def test_reconfigService_reconfigResources(self): new_config = self.makeConfig(port=8080) self.patch(rest, 'RestRootResource', NeedsReconfigResource) NeedsReconfigResource.reconfigs = 0 # first time, reconfigResource gets called along with setupSite yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertEqual(NeedsReconfigResource.reconfigs, 1) # and the next time, setupSite isn't called, but reconfigResource is yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertEqual(NeedsReconfigResource.reconfigs, 2) @defer.inlineCallbacks def test_reconfigService_port(self): new_config = self.makeConfig(port=20) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertNotEqual(self.svc.site, None) self.assertNotEqual(self.svc.port_service, None) self.assertEqual(self.svc.port, 20) @defer.inlineCallbacks def test_reconfigService_expiration_time(self): new_config = self.makeConfig(port=80, cookie_expiration_time=datetime.timedelta(minutes=1)) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertNotEqual(self.svc.site, None) self.assertNotEqual(self.svc.port_service, None) self.assertEqual(service.BuildbotSession.expDelay, datetime.timedelta(minutes=1)) @defer.inlineCallbacks def test_reconfigService_port_changes(self): new_config = self.makeConfig(port=20) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) newer_config = self.makeConfig(port=999) yield self.svc.reconfigServiceWithBuildbotConfig(newer_config) self.assertNotEqual(self.svc.site, None) self.assertNotEqual(self.svc.port_service, None) self.assertEqual(self.svc.port, 999) @defer.inlineCallbacks def test_reconfigService_port_changes_to_none(self): new_config = self.makeConfig(port=20) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) newer_config = self.makeConfig() yield self.svc.reconfigServiceWithBuildbotConfig(newer_config) # (note the site sticks around) self.assertEqual(self.svc.port_service, None) self.assertEqual(self.svc.port, None) def test_setupSite(self): self.svc.setupSite(self.makeConfig()) site = self.svc.site # check that it has the right kind of resources attached to its # root root = site.resource req = mock.Mock() self.assertIsInstance(root.getChildWithDefault(b'api', req), rest.RestRootResource) def test_setupSiteWithProtectedHook(self): checker = InMemoryUsernamePasswordDatabaseDontUse() checker.addUser("guest", "password") self.svc.setupSite(self.makeConfig( change_hook_dialects={'base': True}, change_hook_auth=[checker])) site = self.svc.site # check that it has the right kind of resources attached to its # root root = site.resource req = mock.Mock() self.assertIsInstance(root.getChildWithDefault(b'change_hook', req), HTTPAuthSessionWrapper) @defer.inlineCallbacks def test_setupSiteWithHook(self): new_config = self.makeConfig( change_hook_dialects={'base': True}) self.svc.setupSite(new_config) site = self.svc.site # check that it has the right kind of resources attached to its # root root = site.resource req = mock.Mock() ep = root.getChildWithDefault(b'change_hook', req) self.assertIsInstance(ep, change_hook.ChangeHookResource) # not yet configured self.assertEqual(ep.dialects, {}) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) # now configured self.assertEqual(ep.dialects, {'base': True}) rsrc = self.svc.site.resource.getChildWithDefault(b'change_hook', mock.Mock()) path = b'/change_hook/base' request = test_hooks_base._prepare_request({}) self.master.data.updates.addChange = mock.Mock() yield self.render_resource(rsrc, path, request=request) self.master.data.updates.addChange.assert_called() @defer.inlineCallbacks def test_setupSiteWithHookAndAuth(self): fn = self.mktemp() with open(fn, 'w') as f: f.write("user:pass") new_config = self.makeConfig( port=8080, plugins={}, change_hook_dialects={'base': True}, change_hook_auth=[strcred.makeChecker("file:" + fn)]) self.svc.setupSite(new_config) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) rsrc = self.svc.site.resource.getChildWithDefault(b'', mock.Mock()) res = yield self.render_resource(rsrc, b'') self.assertIn(b'{"type": "file"}', res) rsrc = self.svc.site.resource.getChildWithDefault( b'change_hook', mock.Mock()) res = yield self.render_resource(rsrc, b'/change_hook/base') # as UnauthorizedResource is in private namespace, we cannot use # assertIsInstance :-( self.assertIn('UnauthorizedResource', repr(res)) class TestBuildbotSite(unittest.SynchronousTestCase): SECRET = 'secret' def setUp(self): self.site = service.BuildbotSite(None, "logs", 0, 0) self.site.setSessionSecret(self.SECRET) def test_getSession_from_bad_jwt(self): """ if the cookie is bad (maybe from previous version of buildbot), then we should raise KeyError for consumption by caller, and log the JWT error """ with self.assertRaises(KeyError): self.site.getSession("xxx") self.flushLoggedErrors(jwt.exceptions.DecodeError) def test_getSession_from_correct_jwt(self): payload = {'user_info': {'some': 'payload'}} uid = jwt.encode(payload, self.SECRET, algorithm=service.SESSION_SECRET_ALGORITHM) session = self.site.getSession(uid) self.assertEqual(session.user_info, {'some': 'payload'}) def test_getSession_from_expired_jwt(self): # expired one week ago exp = datetime.datetime.utcnow() - datetime.timedelta(weeks=1) exp = calendar.timegm(datetime.datetime.timetuple(exp)) payload = {'user_info': {'some': 'payload'}, 'exp': exp} uid = jwt.encode(payload, self.SECRET, algorithm=service.SESSION_SECRET_ALGORITHM) with self.assertRaises(KeyError): self.site.getSession(uid) def test_getSession_with_no_user_info(self): payload = {'foo': 'bar'} uid = jwt.encode(payload, self.SECRET, algorithm=service.SESSION_SECRET_ALGORITHM) with self.assertRaises(KeyError): self.site.getSession(uid) def test_makeSession(self): session = self.site.makeSession() self.assertEqual(session.user_info, {'anonymous': True}) def test_updateSession(self): session = self.site.makeSession() request = Request(FakeChannel(), False) request.sitepath = [b"bb"] session.updateSession(request) self.assertEqual(len(request.cookies), 1) name, value = request.cookies[0].split(b";")[0].split(b"=") decoded = jwt.decode(value, self.SECRET, algorithms=[service.SESSION_SECRET_ALGORITHM]) self.assertEqual(decoded['user_info'], {'anonymous': True}) self.assertIn('exp', decoded) def test_absentServerHeader(self): request = Request(FakeChannel(), False) self.assertEqual(request.responseHeaders.hasHeader('Server'), False) buildbot-3.4.0/master/buildbot/test/unit/www/test_sse.py000066400000000000000000000117441413250514000234100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import json from twisted.trial import unittest from buildbot.test.unit.data import test_changes from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.util import datetime2epoch from buildbot.util import unicode2bytes from buildbot.www import sse class EventResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = master = self.make_master(url=b'h:/a/b/') self.sse = sse.EventResource(master) def test_simpleapi(self): self.render_resource(self.sse, b'/changes/*/*') self.readUUID(self.request) self.assertReceivesChangeNewMessage(self.request) self.assertEqual(self.request.finished, False) def test_listen(self): self.render_resource(self.sse, b'/listen/changes/*/*') self.readUUID(self.request) self.assertReceivesChangeNewMessage(self.request) self.assertEqual(self.request.finished, False) def test_listen_add_then_close(self): self.render_resource(self.sse, b'/listen') request = self.request self.request = None uuid = self.readUUID(request) self.render_resource(self.sse, b'/add/' + unicode2bytes(uuid) + b"/changes/*/*") self.assertReceivesChangeNewMessage(request) self.assertEqual(self.request.finished, True) self.assertEqual(request.finished, False) request.finish() # fake close connection on client side with self.assertRaises(AssertionError): self.assertReceivesChangeNewMessage(request) def test_listen_add_then_remove(self): self.render_resource(self.sse, b'/listen') request = self.request uuid = self.readUUID(request) self.render_resource(self.sse, b'/add/' + unicode2bytes(uuid) + b"/changes/*/*") self.assertReceivesChangeNewMessage(request) self.assertEqual(request.finished, False) self.render_resource(self.sse, b'/remove/' + unicode2bytes(uuid) + b"/changes/*/*") with self.assertRaises(AssertionError): self.assertReceivesChangeNewMessage(request) def test_listen_add_nouuid(self): self.render_resource(self.sse, b'/listen') request = self.request self.readUUID(request) self.render_resource(self.sse, b'/add/') self.assertEqual(self.request.finished, True) self.assertEqual(self.request.responseCode, 400) self.assertIn(b"need uuid", self.request.written) def test_listen_add_baduuid(self): self.render_resource(self.sse, b'/listen') request = self.request self.readUUID(request) self.render_resource(self.sse, b'/add/foo') self.assertEqual(self.request.finished, True) self.assertEqual(self.request.responseCode, 400) self.assertIn(b"unknown uuid", self.request.written) def readEvent(self, request): kw = {} hasEmptyLine = False for line in request.written.splitlines(): if line.find(b":") > 0: k, v = line.split(b": ", 1) self.assertTrue(k not in kw, k + b" in " + unicode2bytes(str(kw))) kw[k] = v else: self.assertEqual(line, b"") hasEmptyLine = True request.written = b"" self.assertTrue(hasEmptyLine) return kw def readUUID(self, request): kw = self.readEvent(request) self.assertEqual(kw[b"event"], b"handshake") return kw[b"data"] def assertReceivesChangeNewMessage(self, request): self.master.mq.callConsumer( ("changes", "500", "new"), test_changes.Change.changeEvent) kw = self.readEvent(request) self.assertEqual(kw[b"event"], b"event") msg = json.loads(bytes2unicode(kw[b"data"])) self.assertEqual(msg["key"], ['changes', '500', 'new']) self.assertEqual(msg["message"], json.loads( json.dumps(test_changes.Change.changeEvent, default=self._toJson))) def _toJson(self, obj): if isinstance(obj, datetime.datetime): return datetime2epoch(obj) return None buildbot-3.4.0/master/buildbot/test/unit/www/test_ws.py000066400000000000000000000235231413250514000232450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from unittest.case import SkipTest from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.www import ws class WsResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor(use_asyncio=True) self.master = master = self.make_master( url="h:/a/b/", wantMq=True, wantGraphql=True ) self.skip_graphql = False if not self.master.graphql.enabled: self.skip_graphql = True self.ws = ws.WsResource(master) self.proto = self.ws._factory.buildProtocol("me") self.proto.sendMessage = Mock(spec=self.proto.sendMessage) def assert_called_with_json(self, obj, expected_json): jsonArg = obj.call_args[0][0] jsonArg = bytes2unicode(jsonArg) actual_json = json.loads(jsonArg) self.assertEqual(actual_json, expected_json) def do_onConnect(self, protocols): self.proto.is_graphql = None class FakeRequest: pass r = FakeRequest() r.protocols = protocols return self.proto.onConnect(r) def test_onConnect(self): self.assertEqual(self.do_onConnect([]), None) self.assertEqual(self.do_onConnect(["foo", "graphql-websocket"]), None) self.assertEqual(self.proto.is_graphql, None) # undecided yet self.assertEqual(self.do_onConnect(["graphql-ws"]), "graphql-ws") self.assertEqual(self.proto.is_graphql, True) self.assertEqual(self.do_onConnect(["foo", "graphql-ws"]), "graphql-ws") self.assertEqual(self.proto.is_graphql, True) def test_ping(self): self.proto.onMessage(json.dumps(dict(cmd="ping", _id=1)), False) self.assert_called_with_json( self.proto.sendMessage, {"msg": "pong", "code": 200, "_id": 1} ) def test_bad_cmd(self): self.proto.onMessage(json.dumps(dict(cmd="poing", _id=1)), False) self.assert_called_with_json( self.proto.sendMessage, {"_id": 1, "code": 404, "error": "no such command type 'poing'"}, ) def test_no_cmd(self): self.proto.onMessage(json.dumps(dict(_id=1)), False) self.assert_called_with_json( self.proto.sendMessage, {"_id": None, "code": 400, "error": "no 'cmd' in websocket frame"}, ) def test_too_many_arguments(self): self.proto.onMessage(json.dumps(dict(_id=1, cmd="ping", foo="bar")), False) self.assert_called_with_json( self.proto.sendMessage, { "_id": 1, "code": 400, "error": "Invalid method argument 'cmd_ping() got an unexpected keyword " "argument 'foo''", }, ) def test_too_many_arguments_graphql(self): self.proto.is_graphql = True self.proto.onMessage( json.dumps(dict(id=1, type="connection_init", foo="bar")), False ) self.assert_called_with_json( self.proto.sendMessage, { "id": None, "message": "Invalid method argument 'graphql_cmd_connection_init() got an " "unexpected keyword argument 'foo''", "type": "error", }, ) def test_no_type_while_graphql(self): self.proto.is_graphql = True self.proto.onMessage(json.dumps(dict(_id=1, cmd="ping")), False) self.assert_called_with_json( self.proto.sendMessage, { "id": None, "message": "missing 'type' in websocket frame when already started using " "graphql", "type": "error", }, ) def test_type_while_not_graphql(self): self.proto.is_graphql = False self.proto.onMessage(json.dumps(dict(_id=1, type="ping")), False) self.assert_called_with_json( self.proto.sendMessage, { "_id": None, "error": "using 'type' in websocket frame when " "already started using buildbot protocol", "code": 400, }, ) def test_no_id(self): self.proto.onMessage(json.dumps(dict(cmd="ping")), False) self.assert_called_with_json( self.proto.sendMessage, { "_id": None, "code": 400, "error": "no '_id' or 'type' in websocket frame", }, ) def test_startConsuming(self): self.proto.onMessage( json.dumps(dict(cmd="startConsuming", path="builds/*/*", _id=1)), False ) self.assert_called_with_json( self.proto.sendMessage, {"msg": "OK", "code": 200, "_id": 1} ) self.master.mq.verifyMessages = False self.master.mq.callConsumer(("builds", "1", "new"), {"buildid": 1}) self.assert_called_with_json( self.proto.sendMessage, {"k": "builds/1/new", "m": {"buildid": 1}} ) def test_startConsumingBadPath(self): self.proto.onMessage( json.dumps(dict(cmd="startConsuming", path={}, _id=1)), False ) self.assert_called_with_json( self.proto.sendMessage, {"_id": 1, "code": 400, "error": "invalid path format '{}'"}, ) def test_stopConsumingNotRegistered(self): self.proto.onMessage( json.dumps(dict(cmd="stopConsuming", path="builds/*/*", _id=1)), False ) self.assert_called_with_json( self.proto.sendMessage, {"_id": 1, "code": 400, "error": "path was not consumed 'builds/*/*'"}, ) def test_stopConsuming(self): self.proto.onMessage( json.dumps(dict(cmd="startConsuming", path="builds/*/*", _id=1)), False ) self.assert_called_with_json( self.proto.sendMessage, {"msg": "OK", "code": 200, "_id": 1} ) self.proto.onMessage( json.dumps(dict(cmd="stopConsuming", path="builds/*/*", _id=2)), False ) self.assert_called_with_json( self.proto.sendMessage, {"msg": "OK", "code": 200, "_id": 2} ) # graphql def test_connection_init(self): self.proto.onMessage(json.dumps(dict(type="connection_init")), False) self.assert_called_with_json(self.proto.sendMessage, {"type": "connection_ack"}) @defer.inlineCallbacks def test_start_stop_graphql(self): if self.skip_graphql: raise SkipTest("graphql-core not installed") yield self.proto.onMessage( json.dumps( dict(type="start", payload=dict(query="{builders{name}}"), id=1) ), False, ) self.assertEqual(len(self.proto.graphql_subs), 1) self.assert_called_with_json( self.proto.sendMessage, { "payload": { "data": {"builders": []}, "errors": None, }, "type": "data", "id": 1, }, ) self.proto.sendMessage.reset_mock() yield self.proto.graphql_dispatch_events.function() self.proto.sendMessage.assert_not_called() # auto create a builder in the db yield self.master.db.builders.findBuilderId("builder1") self.master.mq.callConsumer( ("builders", "1", "started"), {"name": "builder1", "masterid": 1, "builderid": 1}, ) self.assertNotEqual(self.proto.graphql_dispatch_events.phase, 0) # then force the call anyway to speed up the test yield self.proto.graphql_dispatch_events.function() self.assert_called_with_json( self.proto.sendMessage, { "payload": { "data": {"builders": [{"name": "builder1"}]}, "errors": None, }, "type": "data", "id": 1, }, ) yield self.proto.onMessage(json.dumps(dict(type="stop", id=1)), False) self.assertEqual(len(self.proto.graphql_subs), 0) @defer.inlineCallbacks def test_start_graphql_bad_query(self): if self.skip_graphql: raise SkipTest("graphql-core not installed") yield self.proto.onMessage( json.dumps( dict(type="start", payload=dict(query="{builders{not_existing}}"), id=1) ), False, ) self.assert_called_with_json( self.proto.sendMessage, { "payload": { "data": None, "errors": [ { "locations": [{"column": 11, "line": 1}], "message": "Cannot query field 'not_existing' on type 'Builder'.", "path": None, } ], }, "id": 1, "type": "data", }, ) self.assertEqual(len(self.proto.graphql_subs), 0) buildbot-3.4.0/master/buildbot/test/util/000077500000000000000000000000001413250514000203505ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/util/__init__.py000066400000000000000000000000001413250514000224470ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/test/util/changesource.py000066400000000000000000000072431413250514000233760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import task from buildbot.test.fake import fakemaster class ChangeSourceMixin: """ This class is used for testing change sources, and handles a few things: - starting and stopping a ChangeSource service - a fake master with a data API implementation """ changesource = None started = False DUMMY_CHANGESOURCE_ID = 20 OTHER_MASTER_ID = 93 DEFAULT_NAME = "ChangeSource" def setUpChangeSource(self): "Set up the mixin - returns a deferred." self.master = fakemaster.make_master(self, wantDb=True, wantData=True) assert not hasattr(self.master, 'addChange') # just checking.. return defer.succeed(None) @defer.inlineCallbacks def tearDownChangeSource(self): "Tear down the mixin - returns a deferred." if not self.started: return if self.changesource.running: yield self.changesource.stopService() yield self.changesource.disownServiceParent() return @defer.inlineCallbacks def attachChangeSource(self, cs): "Set up a change source for testing; sets its .master attribute" self.changesource = cs # FIXME some changesource does not have master property yet but # mailchangesource has :-/ try: self.changesource.master = self.master except AttributeError: yield self.changesource.setServiceParent(self.master) # configure the service to let secret manager render the secrets try: yield self.changesource.configureService() except NotImplementedError: # non-reconfigurable change sources can't reconfig pass # also, now that changesources are ClusteredServices, setting up # the clock here helps in the unit tests that check that behavior self.changesource.clock = task.Clock() return cs def startChangeSource(self): "start the change source as a service" self.started = True return self.changesource.startService() @defer.inlineCallbacks def stopChangeSource(self): "stop the change source again; returns a deferred" yield self.changesource.stopService() self.started = False def setChangeSourceToMaster(self, otherMaster): # some tests build the CS late, so for those tests we will require that # they use the default name in order to run tests that require master # assignments if self.changesource is not None: name = self.changesource.name else: name = self.DEFAULT_NAME self.master.data.updates.changesourceIds[ name] = self.DUMMY_CHANGESOURCE_ID if otherMaster: self.master.data.updates.changesourceMasters[ self.DUMMY_CHANGESOURCE_ID] = otherMaster else: del self.master.data.updates.changesourceMasters[ self.DUMMY_CHANGESOURCE_ID] buildbot-3.4.0/master/buildbot/test/util/config.py000066400000000000000000000045501413250514000221730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot import config class _AssertRaisesConfigErrorContext: def __init__(self, substr_or_re, case): self.substr_or_re = substr_or_re self.case = case def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): if exc_type is None: self.case.fail("ConfigErrors not raised") if not issubclass(exc_type, config.ConfigErrors): self.case.fail("ConfigErrors not raised, instead got {0}".format( exc_type.__name__)) self.case.assertConfigError(exc_value, self.substr_or_re) return True class ConfigErrorsMixin: def assertConfigError(self, errors, substr_or_re): if len(errors.errors) > 1: self.fail("too many errors: {}".format(errors.errors)) elif not errors.errors: self.fail("expected error did not occur") else: curr_error = errors.errors[0] if isinstance(substr_or_re, str): if substr_or_re not in curr_error: self.fail("non-matching error: {}, expected: {}".format(curr_error, substr_or_re)) else: if not substr_or_re.search(curr_error): self.fail("non-matching error: {}".format(curr_error)) def assertRaisesConfigError(self, substr_or_re, fn=None): context = _AssertRaisesConfigErrorContext(substr_or_re, self) if fn is None: return context with context: fn() return None def assertNoConfigErrors(self, errors): self.assertEqual(errors.errors, []) buildbot-3.4.0/master/buildbot/test/util/configurators.py000066400000000000000000000052631413250514000236150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.config import MasterConfig class ConfiguratorMixin: """ Support for testing configurators. @ivar configurator: the configurator under test @ivar config_dict: the config dict that the configurator is modifying """ def setUp(self): self.config_dict = {} def setupConfigurator(self, *args, **kwargs): self.configurator = self.ConfiguratorClass(*args, **kwargs) return self.configurator.configure(self.config_dict) def expectWorker(self, name, klass): if 'workers' in self.config_dict and 'slaves' in self.config_dict: self.fail("both 'workers' and 'slaves' are in the config dict!") for worker in self.config_dict.get('workers', []) + self.config_dict.get('slaves', []): if isinstance(worker, klass) and worker.name == name: return worker self.fail("expected a worker named {} of class {}".format(name, klass)) return None def expectScheduler(self, name, klass): for scheduler in self.config_dict['schedulers']: if scheduler.name == name and isinstance(scheduler, klass): return scheduler self.fail("expected a scheduler named {} of class {}".format(name, klass)) return None def expectBuilder(self, name): for builder in self.config_dict['builders']: if builder.name == name: return builder self.fail("expected a builder named {}".format(name)) return None def expectBuilderHasSteps(self, name, step_classes): builder = self.expectBuilder(name) for step_class in step_classes: found = [ step for step in builder.factory.steps if step.factory == step_class ] if not found: self.fail("expected a buildstep of {!r} in {}".format(step_class, name)) def expectNoConfigError(self): config = MasterConfig() config.loadFromDict(self.config_dict, "test") buildbot-3.4.0/master/buildbot/test/util/connector_component.py000066400000000000000000000052561413250514000250060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import types from twisted.internet import defer from buildbot.db import model from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util.misc import TestReactorMixin class FakeDBConnector: pass class ConnectorComponentMixin(TestReactorMixin, db.RealDatabaseMixin): """ Implements a mock DBConnector object, replete with a thread pool and a DB model. This includes a RealDatabaseMixin, so subclasses should not instantiate that class directly. The connector appears at C{self.db}, and the component should be attached to it as an attribute. @ivar db: fake database connector @ivar db.pool: DB thread pool @ivar db.model: DB model """ @defer.inlineCallbacks def setUpConnectorComponent(self, table_names=None, basedir='basedir', dialect_name='sqlite'): self.setUpTestReactor() """Set up C{self.db}, using the given db_url and basedir.""" if table_names is None: table_names = [] yield self.setUpRealDatabase(table_names=table_names, basedir=basedir) self.db = FakeDBConnector() self.db.pool = self.db_pool self.db.master = fakemaster.make_master(self) self.db.model = model.Model(self.db) self.db._engine = types.SimpleNamespace(dialect=types.SimpleNamespace(name=dialect_name)) @defer.inlineCallbacks def tearDownConnectorComponent(self): yield self.tearDownRealDatabase() # break some reference loops, just for fun del self.db.pool del self.db.model del self.db class FakeConnectorComponentMixin(TestReactorMixin): # Just like ConnectorComponentMixin, but for working with fake database def setUpConnectorComponent(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData return defer.succeed(None) buildbot-3.4.0/master/buildbot/test/util/db.py000066400000000000000000000263551413250514000213220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from sqlalchemy.schema import MetaData from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from twisted.trial import unittest from buildbot.db import enginestrategy from buildbot.db import model from buildbot.db import pool from buildbot.db.connector import DBConnector from buildbot.util.sautils import sa_version from buildbot.util.sautils import withoutSqliteForeignKeys def skip_for_dialect(dialect): """Decorator to skip a test for a particular SQLAlchemy dialect.""" def dec(fn): def wrap(self, *args, **kwargs): if self.db_engine.dialect.name == dialect: raise unittest.SkipTest("Not supported on dialect '{}'".format(dialect)) return fn(self, *args, **kwargs) return wrap return dec class RealDatabaseMixin: """ A class that sets up a real database for testing. This sets self.db_url to the URL for the database. By default, it specifies an in-memory SQLite database, but if the BUILDBOT_TEST_DB_URL environment variable is set, it will use the specified database, being careful to clean out *all* tables in the database before and after the tests are run - so each test starts with a clean database. @ivar db_pool: a (real) DBThreadPool instance that can be used as desired @ivar db_url: the DB URL used to run these tests @ivar db_engine: the engine created for the test database Note that this class uses the production database model. A re-implementation would be virtually identical and just require extra work to keep synchronized. Similarly, this class uses the production DB thread pool. This achieves a few things: - affords more thorough tests for the pool - avoids repetitive implementation - cooperates better at runtime with thread-sensitive DBAPI's Finally, it duplicates initialization performed in db.connector.DBConnector.setup(). Never call that method in tests that use RealDatabaseMixin, use RealDatabaseWithConnectorMixin. """ def __thd_clean_database(self, conn): # In general it's nearly impossible to do "bullet proof" database # cleanup with SQLAlchemy that will work on a range of databases # and they configurations. # # Following approaches were considered. # # 1. Drop Buildbot Model schema: # # model.Model.metadata.drop_all(bind=conn, checkfirst=True) # # Dropping schema from model is correct and working operation only # if database schema is exactly corresponds to the model schema. # # If it is not (e.g. migration script failed or migration results in # old version of model), then some tables outside model schema may be # present, which may reference tables in the model schema. # In this case either dropping model schema will fail (if database # enforces referential integrity, e.g. PostgreSQL), or # dropping left tables in the code below will fail (if database allows # removing of tables on which other tables have references, # e.g. SQLite). # # 2. Introspect database contents and drop found tables. # # meta = MetaData(bind=conn) # meta.reflect() # meta.drop_all() # # May fail if schema contains reference cycles (and Buildbot schema # has them). Reflection looses metadata about how reference cycles # can be teared up (e.g. use_alter=True). # Introspection may fail if schema has invalid references # (e.g. possible in SQLite). # # 3. What is actually needed here is accurate code for each engine # and each engine configuration that will drop all tables, # indexes, constraints, etc in proper order or in a proper way # (using tables alternation, or DROP TABLE ... CASCADE, etc). # # Conclusion: use approach 2 with manually teared apart known # reference cycles. # pylint: disable=too-many-nested-blocks try: meta = MetaData(bind=conn) # Reflect database contents. May fail, e.g. if table references # non-existent table in SQLite. meta.reflect() # Table.foreign_key_constraints introduced in SQLAlchemy 1.0. if sa_version()[:2] >= (1, 0): # Restore `use_alter` settings to break known reference cycles. # Main goal of this part is to remove SQLAlchemy warning # about reference cycle. # Looks like it's OK to do it only with SQLAlchemy >= 1.0.0, # since it's not issued in SQLAlchemy == 0.8.0 # List of reference links (table_name, ref_table_name) that # should be broken by adding use_alter=True. table_referenced_table_links = [ ('buildsets', 'builds'), ('builds', 'buildrequests')] for table_name, ref_table_name in table_referenced_table_links: if table_name in meta.tables: table = meta.tables[table_name] for fkc in table.foreign_key_constraints: if fkc.referred_table.name == ref_table_name: fkc.use_alter = True # Drop all reflected tables and indices. May fail, e.g. if # SQLAlchemy wouldn't be able to break circular references. # Sqlalchemy fk support with sqlite is not yet perfect, so we must deactivate fk during # that operation, even though we made our possible to use use_alter with withoutSqliteForeignKeys(conn.engine, conn): meta.drop_all() except Exception: # sometimes this goes badly wrong; being able to see the schema # can be a big help if conn.engine.dialect.name == 'sqlite': r = conn.execute("select sql from sqlite_master " "where type='table'") log.msg("Current schema:") for row in r.fetchall(): log.msg(row.sql) raise def __thd_create_tables(self, conn, table_names): table_names_set = set(table_names) tables = [t for t in model.Model.metadata.tables.values() if t.name in table_names_set] # Create tables using create_all() method. This way not only tables # and direct indices are created, but also deferred references # (that use use_alter=True in definition). model.Model.metadata.create_all( bind=conn, tables=tables, checkfirst=True) @defer.inlineCallbacks def setUpRealDatabase(self, table_names=None, basedir='basedir', want_pool=True, sqlite_memory=True): """ Set up a database. Ordinarily sets up an engine and a pool and takes care of cleaning out any existing tables in the database. If C{want_pool} is false, then no pool will be created, and the database will not be cleaned. @param table_names: list of names of tables to instantiate @param basedir: (optional) basedir for the engine @param want_pool: (optional) false to not create C{self.db_pool} @param sqlite_memory: (optional) False to avoid using an in-memory db @returns: Deferred """ if table_names is None: table_names = [] self.__want_pool = want_pool default_sqlite = 'sqlite://' self.db_url = os.environ.get('BUILDBOT_TEST_DB_URL', default_sqlite) if not sqlite_memory and self.db_url == default_sqlite: self.db_url = "sqlite:///tmp.sqlite" if not os.path.exists(basedir): os.makedirs(basedir) self.basedir = basedir self.db_engine = enginestrategy.create_engine(self.db_url, basedir=basedir) # if the caller does not want a pool, we're done. if not want_pool: return None self.db_pool = pool.DBThreadPool(self.db_engine, reactor=reactor) log.msg("cleaning database {}".format(self.db_url)) yield self.db_pool.do(self.__thd_clean_database) yield self.db_pool.do(self.__thd_create_tables, table_names) return None @defer.inlineCallbacks def tearDownRealDatabase(self): if self.__want_pool: yield self.db_pool.do(self.__thd_clean_database) yield self.db_pool.shutdown() @defer.inlineCallbacks def insertTestData(self, rows): """Insert test data into the database for use during the test. @param rows: be a sequence of L{fakedb.Row} instances. These will be sorted by table dependencies, so order does not matter. @returns: Deferred """ # sort the tables by dependency all_table_names = {row.table for row in rows} ordered_tables = [t for t in model.Model.metadata.sorted_tables if t.name in all_table_names] def thd(conn): # insert into tables -- in order for tbl in ordered_tables: for row in [r for r in rows if r.table == tbl.name]: tbl = model.Model.metadata.tables[row.table] try: tbl.insert(bind=conn).execute(row.values) except Exception: log.msg("while inserting {} - {}".format(row, row.values)) raise yield self.db_pool.do(thd) class RealDatabaseWithConnectorMixin(RealDatabaseMixin): # Same as RealDatabaseMixin, except that a real DBConnector is also setup in a correct way. @defer.inlineCallbacks def setUpRealDatabaseWithConnector(self, master, table_names=None, basedir='basedir', want_pool=True, sqlite_memory=True): yield self.setUpRealDatabase(table_names, basedir, want_pool, sqlite_memory) master.config.db['db_url'] = self.db_url master.db = DBConnector(self.basedir) yield master.db.setServiceParent(master) master.db.pool = self.db_pool def tearDownRealDatabaseWithConnector(self): return self.tearDownRealDatabase() class TestCase(unittest.TestCase): @defer.inlineCallbacks def assertFailure(self, d, excp): exception = None try: yield d except Exception as e: exception = e self.assertIsInstance(exception, excp) self.flushLoggedErrors(excp) buildbot-3.4.0/master/buildbot/test/util/decorators.py000066400000000000000000000042461413250514000230750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Various decorators for test cases """ import os import sys from twisted.python import runtime _FLAKY_ENV_VAR = 'RUN_FLAKY_TESTS' def todo(message): """ decorator to mark a todo test """ def wrap(func): """ just mark the test """ func.todo = message return func return wrap def flaky(bugNumber=None, issueNumber=None, onPlatform=None): def wrap(fn): if onPlatform is not None and sys.platform != onPlatform: return fn if os.environ.get(_FLAKY_ENV_VAR): return fn if bugNumber is not None: fn.skip = (("Flaky test (http://trac.buildbot.net/ticket/{}) " "- set ${} to run anyway").format(bugNumber, _FLAKY_ENV_VAR)) if issueNumber is not None: fn.skip = (("Flaky test (https://github.com/buildbot/buildbot/issues/{}) " "- set ${} to run anyway").format(issueNumber, _FLAKY_ENV_VAR)) return fn return wrap def skipUnlessPlatformIs(platform): def closure(test): if runtime.platformType != platform: test.skip = "not a {} platform".format(platform) return test return closure def skipIfPythonVersionIsLess(min_version_info): assert isinstance(min_version_info, tuple) def closure(test): if sys.version_info < min_version_info: test.skip = "requires Python >= {0}".format(min_version_info) return test return closure buildbot-3.4.0/master/buildbot/test/util/dirs.py000066400000000000000000000026211413250514000216640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil from twisted.internet import defer class DirsMixin: _dirs = None def setUpDirs(self, *dirs): """Make sure C{dirs} exist and are empty, and set them up to be deleted in tearDown.""" self._dirs = map(os.path.abspath, dirs) for dir in self._dirs: if os.path.exists(dir): shutil.rmtree(dir) os.makedirs(dir) # return a deferred to make chaining easier return defer.succeed(None) def tearDownDirs(self): for dir in self._dirs: if os.path.exists(dir): shutil.rmtree(dir) # return a deferred to make chaining easier return defer.succeed(None) buildbot-3.4.0/master/buildbot/test/util/endpoint.py000066400000000000000000000101241413250514000225400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import resultspec from buildbot.test.fake import fakemaster from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin from buildbot.util import pathmatch class EndpointMixin(TestReactorMixin, interfaces.InterfaceTests): # test mixin for testing Endpoint subclasses # class being tested endpointClass = None # the corresponding resource type - this will be instantiated at # self.data.rtypes[rtype.type] and self.rtype resourceTypeClass = None def setUpEndpoint(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.db = self.master.db self.mq = self.master.mq self.data = self.master.data self.matcher = pathmatch.Matcher() rtype = self.rtype = self.resourceTypeClass(self.master) setattr(self.data.rtypes, rtype.name, rtype) self.ep = self.endpointClass(rtype, self.master) # this usually fails when a single-element pathPattern does not have a # trailing comma pathPatterns = self.ep.pathPatterns.split() for pp in pathPatterns: if pp == '/': continue if not pp.startswith('/') or pp.endswith('/'): raise AssertionError("invalid pattern %r" % (pp,)) pathPatterns = [tuple(pp.split('/')[1:]) for pp in pathPatterns] for pp in pathPatterns: self.matcher[pp] = self.ep self.pathArgs = [ {arg.split(':', 1)[1] for arg in pp if ':' in arg} for pp in pathPatterns if pp is not None] def tearDownEndpoint(self): pass def validateData(self, object): validation.verifyData(self, self.rtype.entityType, {}, object) # call methods, with extra checks @defer.inlineCallbacks def callGet(self, path, resultSpec=None): self.assertIsInstance(path, tuple) if resultSpec is None: resultSpec = resultspec.ResultSpec() endpoint, kwargs = self.matcher[path] self.assertIdentical(endpoint, self.ep) rv = yield endpoint.get(resultSpec, kwargs) if self.ep.isCollection: self.assertIsInstance(rv, (list, base.ListResult)) else: self.assertIsInstance(rv, (dict, type(None))) return rv def callControl(self, action, args, path): self.assertIsInstance(path, tuple) endpoint, kwargs = self.matcher[path] self.assertIdentical(endpoint, self.ep) d = self.ep.control(action, args, kwargs) self.assertIsInstance(d, defer.Deferred) return d # interface tests def test_get_spec(self): @self.assertArgSpecMatches(self.ep.get) def get(self, resultSpec, kwargs): pass def test_control_spec(self): @self.assertArgSpecMatches(self.ep.control) def control(self, action, args, kwargs): pass def test_rootLinkName(self): rootLinkName = self.ep.rootLinkName if not rootLinkName: return try: self.assertEqual(self.matcher[(rootLinkName,)][0], self.ep) except KeyError: self.fail('No match for rootlink: ' + rootLinkName) buildbot-3.4.0/master/buildbot/test/util/fuzz.py000066400000000000000000000024331413250514000217220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest class FuzzTestCase(unittest.TestCase): # run each test case for 10s FUZZ_TIME = 10 @defer.inlineCallbacks def test_fuzz(self): # note that this will loop if do_fuzz doesn't take long enough endTime = reactor.seconds() + self.FUZZ_TIME while reactor.seconds() < endTime: yield self.do_fuzz(endTime) # delete this test case entirely if fuzzing is not enabled if 'BUILDBOT_FUZZ' not in os.environ: del test_fuzz buildbot-3.4.0/master/buildbot/test/util/integration.py000066400000000000000000000353201413250514000232500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import sys from io import StringIO import mock from twisted.internet import defer from twisted.internet import reactor from twisted.python.filepath import FilePath from twisted.trial import unittest from zope.interface import implementer from buildbot.config import MasterConfig from buildbot.data import resultspec from buildbot.interfaces import IConfigLoader from buildbot.master import BuildMaster from buildbot.plugins import worker from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.process.results import statusToString from buildbot.test.util.misc import DebugIntegrationLogsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.sandboxed_worker import SandboxedWorker from buildbot.worker.local import LocalWorker try: from buildbot_worker.bot import Worker except ImportError: Worker = None @implementer(IConfigLoader) class DictLoader: def __init__(self, config_dict): self.config_dict = config_dict def loadConfig(self): return MasterConfig.loadFromDict(self.config_dict, '') @defer.inlineCallbacks def getMaster(case, reactor, config_dict): """ Create a started ``BuildMaster`` with the given configuration. """ basedir = FilePath(case.mktemp()) basedir.createDirectory() config_dict['buildbotNetUsageData'] = None master = BuildMaster( basedir.path, reactor=reactor, config_loader=DictLoader(config_dict)) if 'db_url' not in config_dict: config_dict['db_url'] = 'sqlite://' # TODO: Allow BuildMaster to transparently upgrade the database, at least # for tests. master.config.db['db_url'] = config_dict['db_url'] yield master.db.setup(check_version=False) yield master.db.model.upgrade() master.db.setup = lambda: None yield master.startService() case.addCleanup(master.db.pool.shutdown) case.addCleanup(master.stopService) return master class RunFakeMasterTestCase(unittest.TestCase, TestReactorMixin, DebugIntegrationLogsMixin): def setUp(self): self.setUpTestReactor() self.setupDebugIntegrationLogs() def tearDown(self): self.assertFalse(self.master.running, "master is still running!") @defer.inlineCallbacks def setup_master(self, config_dict): self.master = yield getMaster(self, self.reactor, config_dict) @defer.inlineCallbacks def reconfig_master(self, config_dict=None): if config_dict is not None: self.master.config_loader.config_dict = config_dict yield self.master.doReconfig() @defer.inlineCallbacks def clean_master_shutdown(self, quick=False): yield self.master.botmaster.cleanShutdown(quickMode=quick, stopReactor=False) def createLocalWorker(self, name, **kwargs): workdir = FilePath(self.mktemp()) workdir.createDirectory() return LocalWorker(name, workdir.path, **kwargs) @defer.inlineCallbacks def assertBuildResults(self, build_id, result): dbdict = yield self.master.db.builds.getBuild(build_id) self.assertEqual(result, dbdict['results']) @defer.inlineCallbacks def assertStepStateString(self, step_id, state_string): datadict = yield self.master.data.get(('steps', step_id)) self.assertEqual(datadict['state_string'], state_string) @defer.inlineCallbacks def assertLogs(self, build_id, exp_logs): got_logs = {} data_logs = yield self.master.data.get(('builds', build_id, 'steps', 1, 'logs')) for log in data_logs: self.assertTrue(log['complete']) log_contents = yield self.master.data.get(('builds', build_id, 'steps', 1, 'logs', log['slug'], 'contents')) got_logs[log['name']] = log_contents['content'] self.assertEqual(got_logs, exp_logs) @defer.inlineCallbacks def create_build_request(self, builder_ids, properties=None): properties = properties.asDict() if properties is not None else None ret = yield self.master.data.updates.addBuildset( waited_for=False, builderids=builder_ids, sourcestamps=[ {'codebase': '', 'repository': '', 'branch': None, 'revision': None, 'project': ''}, ], properties=properties, ) return ret @defer.inlineCallbacks def do_test_build_by_name(self, builder_name): builder_id = yield self.master.data.updates.findBuilderId(builder_name) yield self.do_test_build(builder_id) @defer.inlineCallbacks def do_test_build(self, builder_id): # setup waiting for build to finish d_finished = defer.Deferred() def on_finished(_, __): if not d_finished.called: d_finished.callback(None) consumer = yield self.master.mq.startConsuming(on_finished, ('builds', None, 'finished')) # start the builder yield self.create_build_request([builder_id]) # and wait for build completion yield d_finished yield consumer.stopConsuming() class RunMasterBase(unittest.TestCase): proto = "null" if Worker is None: skip = "buildbot-worker package is not installed" @defer.inlineCallbacks def setupConfig(self, config_dict, startWorker=True, **worker_kwargs): """ Setup and start a master configured by the function configFunc defined in the test module. @type config_dict: dict @param configFunc: The BuildmasterConfig dictionary. """ # mock reactor.stop (which trial *really* doesn't # like test code to call!) stop = mock.create_autospec(reactor.stop) self.patch(reactor, 'stop', stop) if startWorker: if self.proto == 'pb': proto = {"pb": {"port": "tcp:0:interface=127.0.0.1"}} workerclass = worker.Worker elif self.proto == 'null': proto = {"null": {}} workerclass = worker.LocalWorker config_dict['workers'] = [workerclass("local1", password=Interpolate("localpw"), missing_timeout=0)] config_dict['protocols'] = proto m = yield getMaster(self, reactor, config_dict) self.master = m self.assertFalse(stop.called, "startService tried to stop the reactor; check logs") if not startWorker: return if self.proto == 'pb': # We find out the worker port automatically workerPort = list(m.pbmanager.dispatchers.values())[ 0].port.getHost().port # create a worker, and attach it to the master, it will be started, and stopped # along with the master worker_dir = FilePath(self.mktemp()) worker_dir.createDirectory() sandboxed_worker_path = os.environ.get( "SANDBOXED_WORKER_PATH", None) if sandboxed_worker_path is None: self.w = Worker( "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path, False, **worker_kwargs) else: self.w = SandboxedWorker( "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path, sandboxed_worker_path, **worker_kwargs) self.addCleanup(self.w.shutdownWorker) elif self.proto == 'null': self.w = None if self.w is not None: yield self.w.setServiceParent(m) @defer.inlineCallbacks def dump(): if not self._passed: dump = StringIO() print(u"FAILED! dumping build db for debug", file=dump) builds = yield self.master.data.get(("builds",)) for build in builds: yield self.printBuild(build, dump, withLogs=True) raise self.failureException(dump.getvalue()) self.addCleanup(dump) @defer.inlineCallbacks def doForceBuild(self, wantSteps=False, wantProperties=False, wantLogs=False, useChange=False, forceParams=None, triggerCallback=None): if forceParams is None: forceParams = {} # force a build, and wait until it is finished d = defer.Deferred() # in order to allow trigger based integration tests # we wait until the first started build is finished self.firstbsid = None def newCallback(_, data): if self.firstbsid is None: self.firstbsid = data['bsid'] newConsumer.stopConsuming() def finishedCallback(_, data): if self.firstbsid == data['bsid']: d.callback(data) newConsumer = yield self.master.mq.startConsuming( newCallback, ('buildsets', None, 'new')) finishedConsumer = yield self.master.mq.startConsuming( finishedCallback, ('buildsets', None, 'complete')) if triggerCallback is not None: yield triggerCallback() elif useChange is False: # use data api to force a build yield self.master.data.control("force", forceParams, ("forceschedulers", "force")) else: # use data api to force a build, via a new change yield self.master.data.updates.addChange(**useChange) # wait until we receive the build finished event buildset = yield d buildrequests = yield self.master.data.get( ('buildrequests',), filters=[resultspec.Filter('buildsetid', 'eq', [buildset['bsid']])]) buildrequest = buildrequests[-1] builds = yield self.master.data.get( ('builds',), filters=[resultspec.Filter('buildrequestid', 'eq', [buildrequest['buildrequestid']])]) # if the build has been retried, there will be several matching builds. # We return the last build build = builds[-1] finishedConsumer.stopConsuming() yield self.enrichBuild(build, wantSteps, wantProperties, wantLogs) return build @defer.inlineCallbacks def enrichBuild(self, build, wantSteps=False, wantProperties=False, wantLogs=False): # enrich the build result, with the step results if wantSteps: build["steps"] = yield self.master.data.get(("builds", build['buildid'], "steps")) # enrich the step result, with the logs results if wantLogs: build["steps"] = list(build["steps"]) for step in build["steps"]: step['logs'] = yield self.master.data.get(("steps", step['stepid'], "logs")) step["logs"] = list(step['logs']) for log in step["logs"]: log['contents'] = yield self.master.data.get(("logs", log['logid'], "contents")) if wantProperties: build["properties"] = yield self.master.data.get(("builds", build['buildid'], "properties")) @defer.inlineCallbacks def printBuild(self, build, out=sys.stdout, withLogs=False): # helper for debugging: print a build yield self.enrichBuild(build, wantSteps=True, wantProperties=True, wantLogs=True) print(u"*** BUILD {} *** ==> {} ({})".format(build['buildid'], build['state_string'], statusToString(build['results'])), file=out) for step in build['steps']: print(u" *** STEP {} *** ==> {} ({})".format(step['name'], step['state_string'], statusToString(step['results'])), file=out) for url in step['urls']: print(u" url:{} ({})".format(url['name'], url['url']), file=out) for log in step['logs']: print(u" log:{} ({})".format(log['name'], log['num_lines']), file=out) if step['results'] != SUCCESS or withLogs: self.printLog(log, out) def _match_patterns_consume(self, text, patterns, is_regex): for pattern in patterns[:]: if is_regex: if re.search(pattern, text): patterns.remove(pattern) else: if pattern in text: patterns.remove(pattern) return patterns @defer.inlineCallbacks def checkBuildStepLogExist(self, build, expectedLog, onlyStdout=False, regex=False): if isinstance(expectedLog, str): expectedLog = [expectedLog] if not isinstance(expectedLog, list): raise Exception('The expectedLog argument must be either string or a list of strings') yield self.enrichBuild(build, wantSteps=True, wantProperties=True, wantLogs=True) for step in build['steps']: for log in step['logs']: for line in log['contents']['content'].splitlines(): if onlyStdout and line[0] != 'o': continue expectedLog = self._match_patterns_consume(line, expectedLog, is_regex=regex) if expectedLog: print(f"{expectedLog} not found in logs") return len(expectedLog) == 0 def printLog(self, log, out): print(u" " * 8 + "*********** LOG: {} *********".format(log['name']), file=out) if log['type'] == 's': for line in log['contents']['content'].splitlines(): linetype = line[0] line = line[1:] if linetype == 'h': # cyan line = "\x1b[36m" + line + "\x1b[0m" if linetype == 'e': # red line = "\x1b[31m" + line + "\x1b[0m" print(u" " * 8 + line) else: print(u"" + log['contents']['content'], file=out) print(u" " * 8 + "********************************", file=out) buildbot-3.4.0/master/buildbot/test/util/interfaces.py000066400000000000000000000113261413250514000230500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect import pkg_resources import zope.interface.interface from twisted.trial import unittest from zope.interface.interface import Attribute class InterfaceTests: # assertions def assertArgSpecMatches(self, actualMethod, *fakeMethods): """Usage:: @self.assertArgSpecMatches(obj.methodUnderTest) def methodTemplate(self, arg1, arg2): pass or, more useful when you will be faking out C{methodUnderTest}: self.assertArgSpecMatches(obj.methodUnderTest, self.fakeMethod) """ def filter(spec): # the tricky thing here is to align args and defaults, since the # defaults correspond to the *last* n elements of args. To make # things easier, we go in reverse, and keep a separate counter for # the defaults args = spec[0] defaults = list(spec[3] if spec[3] is not None else []) di = -1 for ai in range(len(args) - 1, -1, -1): arg = args[ai] if arg.startswith('_') or (arg == 'self' and ai == 0): del args[ai] if -di <= len(defaults): del defaults[di] di += 1 di -= 1 return (args, spec[1], spec[2], defaults or None) def remove_decorators(func): try: return func.__wrapped__ except AttributeError: return func def filter_argspec(func): return filter( inspect.getfullargspec(remove_decorators(func))) def assert_same_argspec(expected, actual): if expected != actual: msg = "Expected: {}; got: {}".format(inspect.formatargspec(*expected), inspect.formatargspec(*actual)) self.fail(msg) actual_argspec = filter_argspec(actualMethod) for fakeMethod in fakeMethods: fake_argspec = filter_argspec(fakeMethod) assert_same_argspec(actual_argspec, fake_argspec) def assert_same_argspec_decorator(decorated): expected_argspec = filter_argspec(decorated) assert_same_argspec(expected_argspec, actual_argspec) # The decorated function works as usual. return decorated return assert_same_argspec_decorator def assertInterfacesImplemented(self, cls): "Given a class, assert that the zope.interface.Interfaces are implemented to specification." # see if this version of zope.interface is too old to run these tests zi_vers = pkg_resources.working_set.find( pkg_resources.Requirement.parse('zope.interface')).version if pkg_resources.parse_version(zi_vers) < pkg_resources.parse_version('4.1.1'): raise unittest.SkipTest( "zope.interfaces is too old to run this test") for interface in zope.interface.implementedBy(cls): for attr, template_argspec in interface.namesAndDescriptions(): if not hasattr(cls, attr): msg = ("Expected: {}; to implement: {} as specified in {}" ).format(repr(cls), attr, repr(interface)) self.fail(msg) actual_argspec = getattr(cls, attr) if isinstance(template_argspec, Attribute): continue # else check method signatures while hasattr(actual_argspec, '__wrapped__'): actual_argspec = actual_argspec.__wrapped__ actual_argspec = zope.interface.interface.fromMethod( actual_argspec) if actual_argspec.getSignatureInfo() != template_argspec.getSignatureInfo(): msg = ("{}: expected: {}; got: {}" ).format(attr, template_argspec.getSignatureString(), actual_argspec.getSignatureString()) self.fail(msg) buildbot-3.4.0/master/buildbot/test/util/logging.py000066400000000000000000000036071413250514000223560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.python import log class LoggingMixin: def setUpLogging(self): self._logEvents = [] log.addObserver(self._logEvents.append) self.addCleanup(log.removeObserver, self._logEvents.append) def logContainsMessage(self, regexp): r = re.compile(regexp) for event in self._logEvents: msg = log.textFromEventDict(event) if msg is not None: assert not msg.startswith("Unable to format event"), msg if msg is not None and r.search(msg): return True return False def assertLogged(self, regexp): if not self.logContainsMessage(regexp): lines = [log.textFromEventDict(e) for e in self._logEvents] self.fail("{} not matched in log output.\n{} ".format(repr(regexp), lines)) def assertNotLogged(self, regexp): if self.logContainsMessage(regexp): lines = [log.textFromEventDict(e) for e in self._logEvents] self.fail("{} matched in log output.\n{} ".format(repr(regexp), lines)) def assertWasQuiet(self): self.assertEqual([ log.textFromEventDict(event) for event in self._logEvents], []) buildbot-3.4.0/master/buildbot/test/util/migration.py000066400000000000000000000076231413250514000227230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sqlalchemy as sa from alembic.runtime.migration import MigrationContext from twisted.internet import defer from twisted.python import log from buildbot.db import connector from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util import dirs from buildbot.test.util import querylog from buildbot.test.util.misc import TestReactorMixin from buildbot.util import sautils # test_upgrade vs. migration tests # # test_upgrade is an integration test -- it tests the whole upgrade process, # including the code in model.py. Migrate tests are unit tests, and test a # single db upgrade script. class MigrateTestMixin(TestReactorMixin, db.RealDatabaseMixin, dirs.DirsMixin): @defer.inlineCallbacks def setUpMigrateTest(self): self.setUpTestReactor() self.basedir = os.path.abspath("basedir") self.setUpDirs('basedir') yield self.setUpRealDatabase() master = fakemaster.make_master(self) self.db = connector.DBConnector(self.basedir) yield self.db.setServiceParent(master) self.db.pool = self.db_pool def tearDownMigrateTest(self): self.tearDownDirs() return self.tearDownRealDatabase() @defer.inlineCallbacks def do_test_migration(self, base_revision, target_revision, setup_thd_cb, verify_thd_cb): def setup_thd(conn): metadata = sa.MetaData() table = sautils.Table( 'alembic_version', metadata, sa.Column("version_num", sa.String(32), nullable=False), ) table.create(bind=conn) conn.execute(table.insert(), version_num=base_revision) setup_thd_cb(conn) yield self.db.pool.do(setup_thd) alembic_scripts = self.alembic_get_scripts() def upgrade_thd(engine): with querylog.log_queries(): with sautils.withoutSqliteForeignKeys(engine): with engine.connect() as conn: def upgrade(rev, context): log.msg(f'Upgrading from {rev} to {target_revision}') return alembic_scripts._upgrade_revs(target_revision, rev) context = MigrationContext.configure(conn, opts={'fn': upgrade}) with context.begin_transaction(): context.run_migrations() yield self.db.pool.do_with_engine(upgrade_thd) def check_table_charsets_thd(engine): # charsets are only a problem for MySQL if engine.dialect.name != 'mysql': return dbs = [r[0] for r in engine.execute("show tables")] for tbl in dbs: r = engine.execute("show create table {}".format(tbl)) create_table = r.fetchone()[1] self.assertIn('DEFAULT CHARSET=utf8', create_table, "table {} does not have the utf8 charset".format(tbl)) yield self.db.pool.do(check_table_charsets_thd) def verify_thd(engine): with sautils.withoutSqliteForeignKeys(engine): verify_thd_cb(engine) yield self.db.pool.do(verify_thd) buildbot-3.4.0/master/buildbot/test/util/misc.py000066400000000000000000000163711413250514000216650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import asyncio import os import sys from io import StringIO from twisted.internet import threads from twisted.python import log from twisted.python import threadpool from twisted.trial.unittest import TestCase import buildbot from buildbot.asyncio import AsyncIOLoopWithTwisted from buildbot.process.buildstep import BuildStep from buildbot.test.fake.reactor import NonThreadPool from buildbot.test.fake.reactor import TestReactor from buildbot.util.eventual import _setReactor class PatcherMixin: """ Mix this in to get a few special-cased patching methods """ def patch_os_uname(self, replacement): # twisted's 'patch' doesn't handle the case where an attribute # doesn't exist.. if hasattr(os, 'uname'): self.patch(os, 'uname', replacement) else: def cleanup(): del os.uname self.addCleanup(cleanup) os.uname = replacement class StdoutAssertionsMixin: """ Mix this in to be able to assert on stdout during the test """ def setUpStdoutAssertions(self): self.stdout = StringIO() self.patch(sys, 'stdout', self.stdout) def assertWasQuiet(self): self.assertEqual(self.stdout.getvalue(), '') def assertInStdout(self, exp): self.assertIn(exp, self.stdout.getvalue()) def getStdout(self): return self.stdout.getvalue().strip() class TestReactorMixin: """ Mix this in to get TestReactor as self.reactor which is correctly cleaned up at the end """ def setUpTestReactor(self, use_asyncio=False): self.patch(threadpool, 'ThreadPool', NonThreadPool) self.reactor = TestReactor() _setReactor(self.reactor) def deferToThread(f, *args, **kwargs): return threads.deferToThreadPool(self.reactor, self.reactor.getThreadPool(), f, *args, **kwargs) self.patch(threads, 'deferToThread', deferToThread) # During shutdown sequence we must first stop the reactor and only then # set unset the reactor used for eventually() because any callbacks # that are run during reactor.stop() may use eventually() themselves. self.addCleanup(_setReactor, None) self.addCleanup(self.reactor.stop) if use_asyncio: self.asyncio_loop = AsyncIOLoopWithTwisted(self.reactor) asyncio.set_event_loop(self.asyncio_loop) self.asyncio_loop.start() def stop(): self.asyncio_loop.stop() self.asyncio_loop.close() asyncio.set_event_loop(None) self.addCleanup(stop) class TimeoutableTestCase(TestCase): # The addCleanup in current Twisted does not time out any functions # registered via addCleanups. Until we can depend on fixed Twisted, use # TimeoutableTestCase whenever test failure may cause it to block and not # report anything. def deferRunCleanups(self, ignored, result): self._deferRunCleanupResult = result d = self._run('deferRunCleanupsTimeoutable', result) d.addErrback(self._ebGotMaybeTimeout, result) return d def _ebGotMaybeTimeout(self, failure, result): result.addError(self, failure) def deferRunCleanupsTimeoutable(self): return super().deferRunCleanups(None, self._deferRunCleanupResult) def encodeExecutableAndArgs(executable, args, encoding="utf-8"): """ Encode executable and arguments from unicode to bytes. This avoids a deprecation warning when calling reactor.spawnProcess() """ if isinstance(executable, str): executable = executable.encode(encoding) argsBytes = [] for arg in args: if isinstance(arg, str): arg = arg.encode(encoding) argsBytes.append(arg) return (executable, argsBytes) def enable_trace(case, trace_exclusions=None, f=sys.stdout): """This function can be called to enable tracing of the execution """ if trace_exclusions is None: trace_exclusions = [ "twisted", "worker_transition.py", "util/tu", "util/path", "log.py", "/mq/", "/db/", "buildbot/data/", "fake/reactor.py" ] bbbase = os.path.dirname(buildbot.__file__) state = {'indent': 0} def tracefunc(frame, event, arg): if frame.f_code.co_filename.startswith(bbbase): if not any(te in frame.f_code.co_filename for te in trace_exclusions): if event == "call": state['indent'] += 2 print("-" * state['indent'], frame.f_code.co_filename.replace(bbbase, ""), frame.f_code.co_name, frame.f_code.co_varnames, file=f) if event == "return": state['indent'] -= 2 return tracefunc sys.settrace(tracefunc) case.addCleanup(sys.settrace, lambda _a, _b, _c: None) class DebugIntegrationLogsMixin: def setupDebugIntegrationLogs(self): # to ease debugging we display the error logs in the test log origAddCompleteLog = BuildStep.addCompleteLog def addCompleteLog(self, name, _log): if name.endswith("err.text"): log.msg("got error log!", name, _log) return origAddCompleteLog(self, name, _log) self.patch(BuildStep, "addCompleteLog", addCompleteLog) if 'BBTRACE' in os.environ: enable_trace(self) class BuildDictLookAlike: """ a class whose instances compares to any build dict that this reporter is supposed to send out""" def __init__(self, extra_keys=None, expected_missing_keys=None, **assertions): self.keys = [ 'builder', 'builderid', 'buildid', 'buildrequest', 'buildrequestid', 'buildset', 'complete', 'complete_at', 'masterid', 'number', 'parentbuild', 'parentbuilder', 'properties', 'results', 'started_at', 'state_string', 'url', 'workerid' ] if extra_keys: self.keys.extend(extra_keys) if expected_missing_keys is not None: for key in expected_missing_keys: self.keys.remove(key) self.keys.sort() self.assertions = assertions def __eq__(self, b): if sorted(b.keys()) != self.keys: print(set(b.keys()) - set(self.keys)) print(set(self.keys) - set(b.keys())) return False for k, v in self.assertions.items(): if b[k] != v: return False return True def __ne__(self, b): return not (self == b) def __repr__(self): return "{ any build }" buildbot-3.4.0/master/buildbot/test/util/patch_delay.py000066400000000000000000000055301413250514000232020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # Portions of this file include source code of Python 3.7 from # cpython/Lib/unittest/mock.py file. # # It is licensed under PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2. # Copyright (c) 2001-2019 Python Software Foundation. All rights reserved. import contextlib import functools import mock from twisted.internet import defer def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) def _importer(target): components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".{}".format(comp) thing = _dot_lookup(thing, comp, import_path) return thing def _get_target(target): try: target, attribute = target.rsplit('.', 1) except (TypeError, ValueError) as e: raise TypeError("Need a valid target to patch. You supplied: %r" % (target,)) from e return _importer(target), attribute class DelayWrapper: def __init__(self): self._deferreds = [] def add_new(self): d = defer.Deferred() self._deferreds.append(d) return d def __len__(self): return len(self._deferreds) def fire(self): deferreds = self._deferreds self._deferreds = [] for d in deferreds: d.callback(None) @contextlib.contextmanager def patchForDelay(target_name): class Default: pass default = Default() target, attribute = _get_target(target_name) original = getattr(target, attribute, default) if original is default: raise Exception('Could not find name {}'.format(target_name)) if not callable(original): raise Exception('{} is not callable'.format(target_name)) delay = DelayWrapper() @functools.wraps(original) @defer.inlineCallbacks def wrapper(*args, **kwargs): yield delay.add_new() return (yield original(*args, **kwargs)) with mock.patch(target_name, new=wrapper): try: yield delay finally: delay.fire() buildbot-3.4.0/master/buildbot/test/util/pbmanager.py000066400000000000000000000041351413250514000226610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer class PBManagerMixin: def setUpPBChangeSource(self): "Set up a fake self.pbmanager." self.registrations = [] self.unregistrations = [] pbm = self.pbmanager = mock.Mock() pbm.register = self._fake_register def _fake_register(self, portstr, username, password, factory): reg = mock.Mock() def unregister(): self.unregistrations.append((portstr, username, password)) return defer.succeed(None) reg.unregister = unregister self.registrations.append((portstr, username, password)) return reg def assertNotRegistered(self): self.assertEqual(self.registrations, []) def assertNotUnregistered(self): self.assertEqual(self.unregistrations, []) def assertRegistered(self, portstr, username, password): for ps, un, pw in self.registrations: if ps == portstr and username == un and pw == password: return self.fail("not registered: {} not in {}".format(repr(portstr, username, password), self.registrations)) def assertUnregistered(self, portstr, username, password): for ps, un, pw in self.unregistrations: if ps == portstr and username == un and pw == password: return self.fail("still registered") buildbot-3.4.0/master/buildbot/test/util/properties.py000066400000000000000000000017111413250514000231160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from zope.interface import implementer from buildbot.interfaces import IRenderable @implementer(IRenderable) class ConstantRenderable: def __init__(self, value): self.value = value def getRenderingFor(self, props): return self.value buildbot-3.4.0/master/buildbot/test/util/protocols.py000066400000000000000000000047221413250514000227530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.test.util import interfaces class ConnectionInterfaceTest(interfaces.InterfaceTests): def setUp(self): # subclasses must set self.conn in this method raise NotImplementedError def test_sig_notifyOnDisconnect(self): @self.assertArgSpecMatches(self.conn.notifyOnDisconnect) def notifyOnDisconnect(self, cb): pass def test_sig_loseConnection(self): @self.assertArgSpecMatches(self.conn.loseConnection) def loseConnection(self): pass def test_sig_remotePrint(self): @self.assertArgSpecMatches(self.conn.remotePrint) def remotePrint(self, message): pass def test_sig_remoteGetWorkerInfo(self): @self.assertArgSpecMatches(self.conn.remoteGetWorkerInfo) def remoteGetWorkerInfo(self): pass def test_sig_remoteSetBuilderList(self): @self.assertArgSpecMatches(self.conn.remoteSetBuilderList) def remoteSetBuilderList(self, builders): pass def test_sig_remoteStartCommand(self): @self.assertArgSpecMatches(self.conn.remoteStartCommand) def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): pass def test_sig_remoteShutdown(self): @self.assertArgSpecMatches(self.conn.remoteShutdown) def remoteShutdown(self): pass def test_sig_remoteStartBuild(self): @self.assertArgSpecMatches(self.conn.remoteStartBuild) def remoteStartBuild(self, builderName): pass def test_sig_remoteInterruptCommand(self): @self.assertArgSpecMatches(self.conn.remoteInterruptCommand) def remoteInterruptCommand(builderName, commandId, why): pass buildbot-3.4.0/master/buildbot/test/util/querylog.py000066400000000000000000000065711413250514000226020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import contextlib import logging from twisted.python import log # These routines provides a way to dump SQLAlchemy SQL commands and their # results into Twisted's log. # Logging wrappers are not re-entrant. class _QueryToTwistedHandler(logging.Handler): def __init__(self, log_query_result=False, record_mode=False): super().__init__() self._log_query_result = log_query_result self.recordMode = record_mode self.records = [] def emit(self, record): if self.recordMode: self.records.append(record.getMessage()) return if record.levelno == logging.DEBUG: if self._log_query_result: log.msg("{name}:{thread}:result: {msg}".format( name=record.name, thread=record.threadName, msg=record.getMessage())) else: log.msg("{name}:{thread}:query: {msg}".format( name=record.name, thread=record.threadName, msg=record.getMessage())) def start_log_queries(log_query_result=False, record_mode=False): handler = _QueryToTwistedHandler( log_query_result=log_query_result, record_mode=record_mode) # In 'sqlalchemy.engine' logging namespace SQLAlchemy outputs SQL queries # on INFO level, and SQL queries results on DEBUG level. logger = logging.getLogger('sqlalchemy.engine') # TODO: this is not documented field of logger, so it's probably private. handler.prev_level = logger.level logger.setLevel(logging.DEBUG) logger.addHandler(handler) # Do not propagate SQL echoing into ancestor handlers handler.prev_propagate = logger.propagate logger.propagate = False # Return previous values of settings, so they can be carefully restored # later. return handler def stop_log_queries(handler): assert isinstance(handler, _QueryToTwistedHandler) logger = logging.getLogger('sqlalchemy.engine') logger.removeHandler(handler) # Restore logger settings or set them to reasonable defaults. logger.propagate = handler.prev_propagate logger.setLevel(handler.prev_level) @contextlib.contextmanager def log_queries(): handler = start_log_queries() try: yield finally: stop_log_queries(handler) class SqliteMaxVariableMixin: @contextlib.contextmanager def assertNoMaxVariables(self): handler = start_log_queries(record_mode=True) try: yield finally: stop_log_queries(handler) for line in handler.records: self.assertFalse(line.count("?") > 999, "too much variables in " + line) buildbot-3.4.0/master/buildbot/test/util/reporter.py000066400000000000000000000175771413250514000226050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.results import SUCCESS from buildbot.test import fakedb class ReporterTestMixin: def setup_reporter_test(self): self.reporter_test_project = 'testProject' self.reporter_test_repo = 'https://example.org/repo' self.reporter_test_revision = 'd34db33fd43db33f' self.reporter_test_branch = "master" self.reporter_test_codebase = 'cbgerrit' self.reporter_test_change_id = 'I5bdc2e500d00607af53f0fa4df661aada17f81fc' self.reporter_test_builder_name = 'Builder0' self.reporter_test_props = { 'Stash_branch': 'refs/changes/34/1234/1', 'project': self.reporter_test_project, 'got_revision': self.reporter_test_revision, 'revision': self.reporter_test_revision, 'event.change.id': self.reporter_test_change_id, 'event.change.project': self.reporter_test_project, 'branch': 'refs/pull/34/merge', } self.reporter_test_thing_url = 'http://thing.example.com' @defer.inlineCallbacks def insert_build(self, results, insert_ss=True, parent_plan=False, insert_patch=False): self.insertTestData([results], results, insertSS=insert_ss, parentPlan=parent_plan, insert_patch=insert_patch) build = yield self.master.data.get(("builds", 20)) return build @defer.inlineCallbacks def insert_build_finished(self, results=SUCCESS, **kwargs): return (yield self.insert_build(results=results, **kwargs)) @defer.inlineCallbacks def insert_build_new(self, **kwargs): return (yield self.insert_build(results=None, **kwargs)) @defer.inlineCallbacks def insert_buildrequest_new(self, insert_patch=False, **kwargs): self.db = self.master.db self.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Builder(id=79, name='Builder0'), fakedb.Builder(id=80, name='Builder1'), fakedb.Buildset(id=98, results=None, reason="testReason1", parent_buildid=None), fakedb.BuildRequest(id=11, buildsetid=98, builderid=79) ]) patchid = 99 if insert_patch else None self.db.insertTestData([ fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.SourceStamp( id=234, branch=self.reporter_test_branch, project=self.reporter_test_project, revision=self.reporter_test_revision, repository=self.reporter_test_repo, codebase=self.reporter_test_codebase, patchid=patchid), fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='him@foo', patch_comment='foo', subdir='/foo', patchlevel=3) ]) request = yield self.master.data.get(("buildrequests", 11)) return request def insertTestData(self, buildResults, finalResult, insertSS=True, parentPlan=False, insert_patch=False): self.db = self.master.db self.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Builder(id=79, name='Builder0'), fakedb.Builder(id=80, name='Builder1'), fakedb.Buildset(id=98, results=finalResult, reason="testReason1", parent_buildid=19 if parentPlan else None), fakedb.Change(changeid=13, branch=self.reporter_test_branch, revision='9283', author='me@foo', repository=self.reporter_test_repo, codebase=self.reporter_test_codebase, project='world-domination', sourcestampid=234), ]) if parentPlan: self.db.insertTestData([ fakedb.Worker(id=12, name='wrk_parent'), fakedb.Builder(id=78, name='Builder_parent'), fakedb.Buildset(id=97, results=finalResult, reason="testReason0"), fakedb.BuildRequest(id=10, buildsetid=98, builderid=78), fakedb.Build(id=19, number=1, builderid=78, buildrequestid=10, workerid=12, masterid=92, results=finalResult, state_string="buildText"), ]) if insertSS: patchid = 99 if insert_patch else None self.db.insertTestData([ fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.SourceStamp( id=234, branch=self.reporter_test_branch, project=self.reporter_test_project, revision=self.reporter_test_revision, repository=self.reporter_test_repo, codebase=self.reporter_test_codebase, patchid=patchid), fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='him@foo', patch_comment='foo', subdir='/foo', patchlevel=3), ]) for i, results in enumerate(buildResults): started_at = 10000001 complete_at = None if results is None else 10000005 self.db.insertTestData([ fakedb.BuildRequest( id=11 + i, buildsetid=98, builderid=79 + i), fakedb.Build(id=20 + i, number=i, builderid=79 + i, buildrequestid=11 + i, workerid=13, masterid=92, results=results, state_string="buildText", started_at=started_at, complete_at=complete_at), fakedb.Step(id=50 + i, buildid=20 + i, number=5, name='make'), fakedb.Log(id=60 + i, stepid=50 + i, name='stdio', slug='stdio', type='s', num_lines=7), fakedb.LogChunk(logid=60 + i, first_line=0, last_line=1, compressed=0, content='Unicode log with non-ascii (\u00E5\u00E4\u00F6).'), fakedb.BuildProperty( buildid=20 + i, name="workername", value="wrk"), fakedb.BuildProperty( buildid=20 + i, name="reason", value="because"), fakedb.BuildProperty( buildid=20 + i, name="buildername", value="Builder0"), fakedb.BuildProperty( buildid=20 + i, name="buildnumber", value="{}".format(i)), fakedb.BuildProperty(buildid=20 + i, name="scheduler", value="checkin"), ]) for k, v in self.reporter_test_props.items(): self.db.insertTestData([ fakedb.BuildProperty(buildid=20 + i, name=k, value=v) ]) self.setup_fake_get_changes_for_build() def setup_fake_get_changes_for_build(self, has_change=True): @defer.inlineCallbacks def getChangesForBuild(buildid): if not has_change: return [] assert buildid == 20 ch = yield self.master.db.changes.getChange(13) return [ch] self.master.db.changes.getChangesForBuild = getChangesForBuild buildbot-3.4.0/master/buildbot/test/util/runprocess.py000066400000000000000000000100141413250514000231210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.util import runprocess def _check_env_is_expected(test, expected_env, env): if expected_env is None: return env = env or {} for var, value in expected_env.items(): test.assertEqual(env.get(var), value, 'Expected environment to have {} = {}'.format(var, repr(value))) class ExpectMaster: _stdout = b"" _stderr = b"" _exit = 0 _workdir = None _env = None def __init__(self, command): self._command = command def stdout(self, stdout): assert(isinstance(stdout, bytes)) self._stdout = stdout return self def stderr(self, stderr): assert(isinstance(stderr, bytes)) self._stderr = stderr return self def exit(self, exit): self._exit = exit return self def workdir(self, workdir): self._workdir = workdir return self def env(self, env): self._env = env return self def check(self, test, command, workdir, env): test.assertDictEqual({'command': command, 'workdir': workdir}, {'command': self._command, 'workdir': self._workdir}, "unexpected command run") _check_env_is_expected(test, self._env, env) return (self._exit, self._stdout, self._stderr) def __repr__(self): return "".format(self._command) class MasterRunProcessMixin: long_message = True def setup_master_run_process(self): self._master_run_process_patched = False self._expected_master_commands = [] self._master_run_process_expect_env = {} def assert_all_commands_ran(self): self.assertEqual(self._expected_master_commands, [], "assert all expected commands were run") def patched_run_process(self, reactor, command, workdir=None, env=None, collect_stdout=True, collect_stderr=True, stderr_is_error=False, io_timeout=300, runtime_timeout=3600, sigterm_timeout=5, initial_stdin=None): _check_env_is_expected(self, self._master_run_process_expect_env, env) if not self._expected_master_commands: self.fail("got command {} when no further commands were expected".format(command)) expect = self._expected_master_commands.pop(0) rc, stdout, stderr = expect.check(self, command, workdir, env) if not collect_stderr and stderr_is_error and stderr: rc = -1 if collect_stdout and collect_stderr: return (rc, stdout, stderr) if collect_stdout: return (rc, stdout) if collect_stderr: return (rc, stderr) return rc def _patch_runprocess(self): if not self._master_run_process_patched: self.patch(runprocess, "run_process", self.patched_run_process) self._master_run_process_patched = True def add_run_process_expect_env(self, d): self._master_run_process_expect_env.update(d) def expect_commands(self, *exp): for e in exp: if not isinstance(e, ExpectMaster): raise Exception('All expectation must be an instance of ExpectMaster') self._patch_runprocess() self._expected_master_commands.extend(exp) buildbot-3.4.0/master/buildbot/test/util/sandboxed_worker.py000066400000000000000000000063331413250514000242670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import subprocess from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from buildbot.util.service import AsyncService class WorkerProcessProtocol(protocol.ProcessProtocol): def __init__(self): self.finished_deferred = defer.Deferred() def outReceived(self, data): print(data) def errReceived(self, data): print(data) def processEnded(self, _): self.finished_deferred.callback(None) def waitForFinish(self): return self.finished_deferred class SandboxedWorker(AsyncService): def __init__(self, masterhost, port, name, passwd, workerdir, sandboxed_worker_path): self.masterhost = masterhost self.port = port self.workername = name self.workerpasswd = passwd self.workerdir = workerdir self.sandboxed_worker_path = sandboxed_worker_path self.worker = None def startService(self): # Note that we create the worker with sync API # We don't really care as we are in tests res = subprocess.run([self.sandboxed_worker_path, "create-worker", '-q', self.workerdir, self.masterhost + ":" + str(self.port), self.workername, self.workerpasswd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False) if res.returncode != 0: # we do care about finding out why it failed though raise RuntimeError("\n".join([ "Unable to create worker!", res.stdout.decode(), res.stderr.decode() ])) self.processprotocol = processProtocol = WorkerProcessProtocol() # we need to spawn the worker asynchronously though args = [self.sandboxed_worker_path, 'start', '--nodaemon', self.workerdir] self.process = reactor.spawnProcess(processProtocol, self.sandboxed_worker_path, args=args) self.worker = self.master.workers.getWorkerByName(self.workername) return super().startService() @defer.inlineCallbacks def shutdownWorker(self): if self.worker is None: return # on windows, we killing a process does not work well. # we use the graceful shutdown feature of buildbot-worker instead to kill the worker # but we must do that before the master is stopping. yield self.worker.shutdown() # wait for process to disappear yield self.processprotocol.waitForFinish() buildbot-3.4.0/master/buildbot/test/util/scheduler.py000066400000000000000000000233161413250514000227050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.schedulers import base from buildbot.test import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import interfaces class SchedulerMixin(interfaces.InterfaceTests): """ This class fakes out enough of a master and the various relevant database connectors to test schedulers. All of the database methods have identical signatures to the real database connectors, but for ease of testing always return an already-fired Deferred, meaning that there is no need to wait for events to complete. This class is tightly coupled with the various L{buildbot.test.fake.fakedb} module. All instance variables are only available after C{attachScheduler} has been called. @ivar sched: scheduler instance @ivar master: the fake master @ivar db: the fake db (same as C{self.master.db}, but shorter) """ OTHER_MASTER_ID = 93 def setUpScheduler(self): self.master = fakemaster.make_master(self, wantDb=True, wantMq=True, wantData=True) def tearDownScheduler(self): pass def attachScheduler(self, scheduler, objectid, schedulerid, overrideBuildsetMethods=False, createBuilderDB=False): """Set up a scheduler with a fake master and db; sets self.sched, and sets the master's basedir to the absolute path of 'basedir' in the test directory. If C{overrideBuildsetMethods} is true, then all of the addBuildsetForXxx methods are overridden to simply append the method name and arguments to self.addBuildsetCalls. These overridden methods return buildsets starting with 500 and buildrequest IDs starting with 100. For C{addBuildsetForSourceStamp}, this also overrides DB API methods C{addSourceStamp} and C{addSourceStampSet}, and uses that information to generate C{addBuildsetForSourceStamp} results. @returns: scheduler """ scheduler.objectid = objectid # set up a fake master db = self.db = self.master.db self.mq = self.master.mq scheduler.setServiceParent(self.master) rows = [fakedb.Object(id=objectid, name=scheduler.name, class_name='SomeScheduler'), fakedb.Scheduler(id=schedulerid, name=scheduler.name), ] if createBuilderDB is True: rows.extend([fakedb.Builder(name=bname) for bname in scheduler.builderNames]) db.insertTestData(rows) if overrideBuildsetMethods: for method in ( 'addBuildsetForSourceStampsWithDefaults', 'addBuildsetForChanges', 'addBuildsetForSourceStamps'): actual = getattr(scheduler, method) fake = getattr(self, 'fake_{}'.format(method)) self.assertArgSpecMatches(actual, fake) setattr(scheduler, method, fake) self.addBuildsetCalls = [] self._bsidGenerator = iter(range(500, 999)) self._bridGenerator = iter(range(100, 999)) # temporarily override the sourcestamp and sourcestampset methods self.addedSourceStamps = [] self.addedSourceStampSets = [] def fake_addSourceStamp(**kwargs): self.assertEqual(kwargs['sourcestampsetid'], 400 + len(self.addedSourceStampSets) - 1) self.addedSourceStamps.append(kwargs) return defer.succeed(300 + len(self.addedSourceStamps) - 1) self.db.sourcestamps.addSourceStamp = fake_addSourceStamp def fake_addSourceStampSet(): self.addedSourceStampSets.append([]) return defer.succeed(400 + len(self.addedSourceStampSets) - 1) self.db.sourcestamps.addSourceStampSet = fake_addSourceStampSet # patch methods to detect a failure to upcall the activate and # deactivate methods .. unless we're testing BaseScheduler def patch(meth): oldMethod = getattr(scheduler, meth) @defer.inlineCallbacks def newMethod(): self._parentMethodCalled = False rv = yield oldMethod() self.assertTrue(self._parentMethodCalled, "'{}' did not call its parent".format(meth)) return rv setattr(scheduler, meth, newMethod) oldParent = getattr(base.BaseScheduler, meth) def newParent(self_): self._parentMethodCalled = True return oldParent(self_) self.patch(base.BaseScheduler, meth, newParent) if scheduler.__class__.activate != base.BaseScheduler.activate: patch('activate') if scheduler.__class__.deactivate != base.BaseScheduler.deactivate: patch('deactivate') self.sched = scheduler return scheduler @defer.inlineCallbacks def setSchedulerToMaster(self, otherMaster): sched_id = yield self.master.data.updates.findSchedulerId(self.sched.name) if otherMaster: self.master.data.updates.schedulerMasters[sched_id] = otherMaster else: del self.master.data.updates.schedulerMasters[sched_id] class FakeChange: who = '' files = [] comments = '' isdir = 0 links = None revision = None when = None branch = None category = None revlink = '' properties = {} repository = '' project = '' codebase = '' def makeFakeChange(self, **kwargs): """Utility method to make a fake Change object with the given attributes""" ch = self.FakeChange() ch.__dict__.update(kwargs) return ch @defer.inlineCallbacks def _addBuildsetReturnValue(self, builderNames): if builderNames is None: builderNames = self.sched.builderNames builderids = [] builders = yield self.db.builders.getBuilders() for builderName in builderNames: for bldrDict in builders: if builderName == bldrDict["name"]: builderids.append(bldrDict["id"]) break assert len(builderids) == len(builderNames) bsid = next(self._bsidGenerator) brids = dict(zip(builderids, self._bridGenerator)) return (bsid, brids) def fake_addBuildsetForSourceStampsWithDefaults(self, reason, sourcestamps=None, waited_for=False, properties=None, builderNames=None, **kw): properties = properties.asDict() if properties is not None else None self.assertIsInstance(sourcestamps, list) def sourceStampKey(sourceStamp): return sourceStamp.get("codebase") sourcestamps = sorted(sourcestamps, key=sourceStampKey) self.addBuildsetCalls.append(('addBuildsetForSourceStampsWithDefaults', dict(reason=reason, sourcestamps=sourcestamps, waited_for=waited_for, properties=properties, builderNames=builderNames))) return self._addBuildsetReturnValue(builderNames) def fake_addBuildsetForChanges(self, waited_for=False, reason='', external_idstring=None, changeids=None, builderNames=None, properties=None, **kw): if changeids is None: changeids = [] properties = properties.asDict() if properties is not None else None self.addBuildsetCalls.append(('addBuildsetForChanges', dict(waited_for=waited_for, reason=reason, external_idstring=external_idstring, changeids=changeids, properties=properties, builderNames=builderNames, ))) return self._addBuildsetReturnValue(builderNames) def fake_addBuildsetForSourceStamps(self, waited_for=False, sourcestamps=None, reason='', external_idstring=None, properties=None, builderNames=None, **kw): if sourcestamps is None: sourcestamps = [] properties = properties.asDict() if properties is not None else None self.assertIsInstance(sourcestamps, list) sourcestamps.sort() self.addBuildsetCalls.append(('addBuildsetForSourceStamps', dict(reason=reason, external_idstring=external_idstring, properties=properties, builderNames=builderNames, sourcestamps=sourcestamps))) return self._addBuildsetReturnValue(builderNames) buildbot-3.4.0/master/buildbot/test/util/sourcesteps.py000066400000000000000000000037421413250514000233070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from buildbot.test.util import steps class SourceStepMixin(steps.BuildStepMixin): """ Support for testing source steps. Aside from the capabilities of L{BuildStepMixin}, this adds: - fake sourcestamps The following instance variables are available after C{setupSourceStep}, in addition to those made available by L{BuildStepMixin}: @ivar sourcestamp: fake SourceStamp for the build """ def setUpSourceStep(self): return super().setUpBuildStep() def tearDownSourceStep(self): return super().tearDownBuildStep() # utilities def setupStep(self, step, args=None, patch=None, **kwargs): """ Set up C{step} for testing. This calls L{BuildStepMixin}'s C{setupStep} and then does setup specific to a Source step. """ step = super().setupStep(step, **kwargs) if args is None: args = {} ss = self.sourcestamp = mock.Mock(name="sourcestamp") ss.ssid = 9123 ss.branch = args.get('branch', None) ss.revision = args.get('revision', None) ss.project = '' ss.repository = '' ss.patch = patch ss.patch_info = None ss.changes = [] self.build.getSourceStamp = lambda x=None: ss return step buildbot-3.4.0/master/buildbot/test/util/steps.py000066400000000000000000000456521413250514000220740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.python import log from twisted.python.reflect import namedModule from buildbot.process import buildstep from buildbot.process import remotecommand as real_remotecommand from buildbot.process.results import EXCEPTION from buildbot.test.fake import fakebuild from buildbot.test.fake import fakemaster from buildbot.test.fake import logfile from buildbot.test.fake import remotecommand from buildbot.test.fake import worker from buildbot.util import bytes2unicode def _dict_diff(d1, d2): """ Given two dictionaries describe their difference For nested dictionaries, key-paths are concatenated with the '.' operator @return The list of keys missing in d1, the list of keys missing in d2, and the differences in any nested keys """ d1_keys = set(d1.keys()) d2_keys = set(d2.keys()) both = d1_keys & d2_keys missing_in_d1 = [] missing_in_d2 = [] different = [] for k in both: if isinstance(d1[k], dict) and isinstance(d2[k], dict): missing_in_v1, missing_in_v2, different_in_v = _dict_diff( d1[k], d2[k]) missing_in_d1.extend(['{0}.{1}'.format(k, m) for m in missing_in_v1]) missing_in_d2.extend(['{0}.{1}'.format(k, m) for m in missing_in_v2]) for child_k, left, right in different_in_v: different.append(('{0}.{1}'.format(k, child_k), left, right)) continue if d1[k] != d2[k]: different.append((k, d1[k], d2[k])) missing_in_d1.extend(d2_keys - both) missing_in_d2.extend(d1_keys - both) return missing_in_d1, missing_in_d2, different def _describe_cmd_difference(exp_command, exp_args, got_command, got_args): if exp_command != got_command: return 'Expected command type {} got {}. Expected args {}'.format(exp_command, got_command, repr(exp_args)) if exp_args == got_args: return "" text = "" missing_in_exp, missing_in_cmd, diff = _dict_diff(exp_args, got_args) if missing_in_exp: missing_dict = {key: got_args[key] for key in missing_in_exp} text += 'Keys in cmd missing from expectation: {0!r}\n'.format(missing_dict) if missing_in_cmd: missing_dict = {key: exp_args[key] for key in missing_in_cmd} text += 'Keys in expectation missing from command: {0!r}\n'.format(missing_dict) if diff: formatted_diff = [ '"{0}": expected {1!r}, got {2!r}'.format(*d) for d in diff] text += ('Key differences between expectation and command: {0}\n'.format( '\n'.join(formatted_diff))) return text class BuildStepMixin: """ Support for testing build steps. This class adds two capabilities: - patch out RemoteCommand with fake versions that check expected commands and produce the appropriate results - surround a step with the mock objects that it needs to execute The following instance variables are available after C{setupStep}: @ivar step: the step under test @ivar build: the fake build containing the step @ivar progress: mock progress object @ivar worker: mock worker object @ivar properties: build properties (L{Properties} instance) """ def setUpBuildStep(self, wantData=True, wantDb=False, wantMq=False): """ @param wantData(bool): Set to True to add data API connector to master. Default value: True. @param wantDb(bool): Set to True to add database connector to master. Default value: False. @param wantMq(bool): Set to True to add mq connector to master. Default value: False. """ if not hasattr(self, 'reactor'): raise Exception('Reactor has not yet been setup for step') self._next_remote_command_number = 0 self._interrupt_remote_command_numbers = [] def create_fake_remote_command(*args, **kwargs): cmd = remotecommand.FakeRemoteCommand(*args, **kwargs) cmd.testcase = self if self._next_remote_command_number in self._interrupt_remote_command_numbers: cmd.set_run_interrupt() self._next_remote_command_number += 1 return cmd def create_fake_remote_shell_command(*args, **kwargs): cmd = remotecommand.FakeRemoteShellCommand(*args, **kwargs) cmd.testcase = self if self._next_remote_command_number in self._interrupt_remote_command_numbers: cmd.set_run_interrupt() self._next_remote_command_number += 1 return cmd self.patch(real_remotecommand, 'RemoteCommand', create_fake_remote_command) self.patch(real_remotecommand, 'RemoteShellCommand', create_fake_remote_shell_command) self.expected_remote_commands = [] self._expected_remote_commands_popped = 0 self.master = fakemaster.make_master(self, wantData=wantData, wantDb=wantDb, wantMq=wantMq) def tearDownBuildStep(self): pass def setupStep(self, step, worker_version=None, worker_env=None, buildFiles=None, wantDefaultWorkdir=True): """ Set up C{step} for testing. This begins by using C{step} as a factory to create a I{new} step instance, thereby testing that the factory arguments are handled correctly. It then creates a comfortable environment for the worker to run in, replete with a fake build and a fake worker. As a convenience, it can set the step's workdir with C{'wkdir'}. @param worker_version: worker version to present, as a dictionary mapping command name to version. A command name of '*' will apply for all commands. @param worker_env: environment from the worker at worker startup """ if worker_version is None: worker_version = { '*': '99.99' } if worker_env is None: worker_env = dict() if buildFiles is None: buildFiles = list() step = self.step = buildstep.create_step_from_step_or_factory(step) # set defaults if wantDefaultWorkdir: step.workdir = step._workdir or 'wkdir' # step.build b = self.build = fakebuild.FakeBuild(master=self.master) b.allFiles = lambda: buildFiles b.master = self.master def getWorkerVersion(cmd, oldversion): if cmd in worker_version: return worker_version[cmd] if '*' in worker_version: return worker_version['*'] return oldversion b.getWorkerCommandVersion = getWorkerVersion b.workerEnvironment = worker_env.copy() step.setBuild(b) self.build.builder.config.env = worker_env.copy() # watch for properties being set self.properties = b.getProperties() # step.progress step.progress = mock.Mock(name="progress") # step.worker self.worker = step.worker = worker.FakeWorker(self.master) self.worker.attached(None) # step overrides def addLog(name, type='s', logEncoding=None): _log = logfile.FakeLogFile(name) self.step.logs[name] = _log self.step._connectPendingLogObservers() return defer.succeed(_log) step.addLog = addLog def addHTMLLog(name, html): _log = logfile.FakeLogFile(name) html = bytes2unicode(html) _log.addStdout(html) return defer.succeed(None) step.addHTMLLog = addHTMLLog def addCompleteLog(name, text): _log = logfile.FakeLogFile(name) if name in self.step.logs: raise Exception('Attempt to add log {} twice to the logs'.format(name)) self.step.logs[name] = _log _log.addStdout(text) return defer.succeed(None) step.addCompleteLog = addCompleteLog self._got_test_result_sets = [] self._next_test_result_set_id = 1000 def add_test_result_set(description, category, value_unit): self._got_test_result_sets.append((description, category, value_unit)) setid = self._next_test_result_set_id self._next_test_result_set_id += 1 return defer.succeed(setid) step.addTestResultSet = add_test_result_set self._got_test_results = [] def add_test_result(setid, value, test_name=None, test_code_path=None, line=None, duration_ns=None): self._got_test_results.append((setid, value, test_name, test_code_path, line, duration_ns)) step.addTestResult = add_test_result self._got_build_data = {} def set_build_data(name, value, source): self._got_build_data[name] = (value, source) return defer.succeed(None) step.setBuildData = set_build_data # expectations self.exp_result = None self.exp_state_string = None self.exp_properties = {} self.exp_missing_properties = [] self.exp_logfiles = {} self._exp_logfiles_stderr = {} self.exp_hidden = False self.exp_exception = None self._exp_test_result_sets = [] self._exp_test_results = [] self._exp_build_data = {} # check that the step's name is not None self.assertNotEqual(step.name, None) return step def expectCommands(self, *exp): """ Add to the expected remote commands, along with their results. Each argument should be an instance of L{Expect}. """ self.expected_remote_commands.extend(exp) def expectOutcome(self, result, state_string=None): """ Expect the given result (from L{buildbot.process.results}) and status text (a list). """ self.exp_result = result if state_string: self.exp_state_string = state_string def expectProperty(self, property, value, source=None): """ Expect the given property to be set when the step is complete. """ self.exp_properties[property] = (value, source) def expectNoProperty(self, property): """ Expect the given property is *not* set when the step is complete """ self.exp_missing_properties.append(property) def expectLogfile(self, logfile, contents): """ Expect a logfile with the given contents """ self.exp_logfiles[logfile] = contents def expect_log_file_stderr(self, logfile, contents): self._exp_logfiles_stderr[logfile] = contents def expect_build_data(self, name, value, source): self._exp_build_data[name] = (value, source) def expectHidden(self, hidden): """ Set whether the step is expected to be hidden. """ self.exp_hidden = hidden def expectException(self, exception_class): """ Set whether the step is expected to raise an exception. """ self.exp_exception = exception_class self.expectOutcome(EXCEPTION) def expectTestResultSets(self, sets): self._exp_test_result_sets = sets def expectTestResults(self, results): self._exp_test_results = results def _dump_logs(self): for l in self.step.logs.values(): if l.stdout: log.msg("{0} stdout:\n{1}".format(l.name, l.stdout)) if l.stderr: log.msg("{0} stderr:\n{1}".format(l.name, l.stderr)) @defer.inlineCallbacks def runStep(self): """ Run the step set up with L{setupStep}, and check the results. @returns: Deferred """ self.conn = mock.Mock(name="WorkerForBuilder(connection)") self.step.setupProgress() result = yield self.step.startStep(self.conn) # finish up the debounced updateSummary before checking self.reactor.advance(1) if self.expected_remote_commands: log.msg("un-executed remote commands:") for rc in self.expected_remote_commands: log.msg(repr(rc)) raise AssertionError("un-executed remote commands; see logs") # in case of unexpected result, display logs in stdout for # debugging failing tests if result != self.exp_result: msg = "unexpected result from step; expected {}, got {}".format(self.exp_result, result) log.msg("{}; dumping logs".format(msg)) self._dump_logs() raise AssertionError("{}; see logs".format(msg)) if self.exp_state_string: stepStateString = self.master.data.updates.stepStateString stepids = list(stepStateString) assert stepids, "no step state strings were set" self.assertEqual( self.exp_state_string, stepStateString[stepids[0]], "expected state_string {0!r}, got {1!r}".format( self.exp_state_string, stepStateString[stepids[0]])) for pn, (pv, ps) in self.exp_properties.items(): self.assertTrue(self.properties.hasProperty(pn), "missing property '{}'".format(pn)) self.assertEqual(self.properties.getProperty(pn), pv, "property '{}'".format(pn)) if ps is not None: self.assertEqual( self.properties.getPropertySource(pn), ps, "property {0!r} source has source {1!r}".format( pn, self.properties.getPropertySource(pn))) for pn in self.exp_missing_properties: self.assertFalse(self.properties.hasProperty(pn), "unexpected property '{}'".format(pn)) for l, exp in self.exp_logfiles.items(): got = self.step.logs[l].stdout self._match_log(exp, got, 'stdout') for l, exp in self._exp_logfiles_stderr.items(): got = self.step.logs[l].stderr self._match_log(exp, got, 'stderr') if self.exp_exception: self.assertEqual( len(self.flushLoggedErrors(self.exp_exception)), 1) self.assertEqual(self._exp_test_result_sets, self._got_test_result_sets) self.assertEqual(self._exp_test_results, self._got_test_results) self.assertEqual(self._exp_build_data, self._got_build_data) # XXX TODO: hidden # self.step_status.setHidden.assert_called_once_with(self.exp_hidden) def _match_log(self, exp, got, log_type): if hasattr(exp, 'match'): if exp.match(got) is None: log.msg("Unexpected {} log output:\n{}".format(log_type, exp)) log.msg("Expected {} to match:\n{}".format(log_type, got)) raise AssertionError("Unexpected {} log output; see logs".format(log_type)) else: if got != exp: log.msg("Unexpected {} log output:\n{}".format(log_type, exp)) log.msg("Expected {} log output:\n{}".format(log_type, got)) raise AssertionError("Unexpected {} log output; see logs".format(log_type)) # callbacks from the running step @defer.inlineCallbacks def _validate_expectation(self, exp, command): got = (command.remote_command, command.args) for child_exp in exp.nestedExpectations(): try: yield self._validate_expectation(child_exp, command) exp.expectationPassed(exp) except AssertionError as e: # log this error, as the step may swallow the AssertionError or # otherwise obscure the failure. Trial will see the exception in # the log and print an [ERROR]. This may result in # double-reporting, but that's better than non-reporting! log.err() exp.raiseExpectationFailure(child_exp, e) if exp.shouldAssertCommandEqualExpectation(): self.assertEqual(exp.interrupted, command.interrupted) # first check any ExpectedRemoteReference instances exp_tup = (exp.remote_command, exp.args) if exp_tup != got: msg = "Command contents different from expected (command index: {}); {}".format( self._expected_remote_commands_popped, _describe_cmd_difference(exp.remote_command, exp.args, command.remote_command, command.args)) raise AssertionError(msg) if exp.shouldRunBehaviors(): # let the Expect object show any behaviors that are required yield exp.runBehaviors(command) @defer.inlineCallbacks def _remotecommand_run(self, command, step, conn, builder_name): self.assertEqual(step, self.step) self.assertEqual(conn, self.conn) got = (command.remote_command, command.args) if not self.expected_remote_commands: self.fail("got command %r when no further commands were expected" % (got,)) exp = self.expected_remote_commands[0] try: yield self._validate_expectation(exp, command) exp.expectationPassed(exp) except AssertionError as e: # log this error, as the step may swallow the AssertionError or # otherwise obscure the failure. Trial will see the exception in # the log and print an [ERROR]. This may result in # double-reporting, but that's better than non-reporting! log.err() exp.raiseExpectationFailure(exp, e) finally: if not exp.shouldKeepMatchingAfter(command): self.expected_remote_commands.pop(0) self._expected_remote_commands_popped += 1 return command def changeWorkerSystem(self, system): self.worker.worker_system = system if system in ['nt', 'win32']: self.build.path_module = namedModule('ntpath') self.worker.worker_basedir = '\\wrk' else: self.build.path_module = namedModule('posixpath') self.worker.worker_basedir = '/wrk' def interrupt_nth_remote_command(self, number): self._interrupt_remote_command_numbers.append(number) buildbot-3.4.0/master/buildbot/test/util/tuplematching.py000066400000000000000000000040631413250514000235710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class TupleMatchingMixin: # a bunch of tuple-matching tests that all call do_test_match # this is used to test this behavior in a few places def do_test_match(self, routingKey, shouldMatch, *tuples): raise NotImplementedError def test_simple_tuple_match(self): return self.do_test_match(('abc',), True, ('abc',)) def test_simple_tuple_no_match(self): return self.do_test_match(('abc',), False, ('def',)) def test_multiple_tuple_match(self): return self.do_test_match(('a', 'b', 'c'), True, ('a', 'b', 'c')) def test_multiple_tuple_match_tuple_prefix(self): return self.do_test_match(('a', 'b', 'c'), False, ('a', 'b')) def test_multiple_tuple_match_tuple_suffix(self): return self.do_test_match(('a', 'b', 'c'), False, ('b', 'c')) def test_multiple_tuple_match_rk_prefix(self): return self.do_test_match(('a', 'b'), False, ('a', 'b', 'c')) def test_multiple_tuple_match_rk_suffix(self): return self.do_test_match(('b', 'c'), False, ('a', 'b', 'c')) def test_None_match(self): return self.do_test_match(('a', 'b', 'c'), True, ('a', None, 'c')) def test_None_match_empty(self): return self.do_test_match(('a', '', 'c'), True, ('a', None, 'c')) def test_None_no_match(self): return self.do_test_match(('a', 'b', 'c'), False, ('a', None, 'x')) buildbot-3.4.0/master/buildbot/test/util/validation.py000066400000000000000000000554741413250514000230730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # See "Type Validation" in master/docs/developer/tests.rst import datetime import json import re from buildbot.util import UTC from buildbot.util import bytes2unicode # Base class validatorsByName = {} class Validator: name = None hasArgs = False def validate(self, name, object): raise NotImplementedError class __metaclass__(type): def __new__(mcs, name, bases, attrs): cls = type.__new__(mcs, name, bases, attrs) if 'name' in attrs and attrs['name']: assert attrs['name'] not in validatorsByName validatorsByName[attrs['name']] = cls return cls # Basic types class InstanceValidator(Validator): types = () def validate(self, name, object): if not isinstance(object, self.types): yield "{} ({!r}) is not a {}".format( name, object, self.name or repr(self.types)) class IntValidator(InstanceValidator): types = (int,) name = 'integer' class BooleanValidator(InstanceValidator): types = (bool,) name = 'boolean' class StringValidator(InstanceValidator): # strings must be unicode types = (str,) name = 'string' class BinaryValidator(InstanceValidator): types = (bytes,) name = 'bytestring' class StrValidator(InstanceValidator): types = (str,) name = 'str' class DateTimeValidator(Validator): types = (datetime.datetime,) name = 'datetime' def validate(self, name, object): if not isinstance(object, datetime.datetime): yield "{} - {!r} - is not a datetime".format(name, object) elif object.tzinfo != UTC: yield "{} is not a UTC datetime".format(name) class IdentifierValidator(Validator): types = (str,) name = 'identifier' hasArgs = True ident_re = re.compile('^[a-zA-Z\u00a0-\U0010ffff_-][a-zA-Z0-9\u00a0-\U0010ffff_-]*$', flags=re.UNICODE) def __init__(self, len): self.len = len def validate(self, name, object): if not isinstance(object, str): yield "{} - {!r} - is not a unicode string".format(name, object) elif not self.ident_re.match(object): yield "{} - {!r} - is not an identifier".format(name, object) elif not object: yield "{} - identifiers cannot be an empty string".format(name) elif len(object) > self.len: yield "{} - {!r} - is longer than {} characters".format( name, object, self.len) # Miscellaneous class NoneOk: def __init__(self, original): self.original = original def validate(self, name, object): if object is None: return else: for msg in self.original.validate(name, object): yield msg class Any: def validate(self, name, object): return # Compound Types class DictValidator(Validator): name = 'dict' def __init__(self, optionalNames=None, **keys): if optionalNames is None: optionalNames = [] self.optionalNames = set(optionalNames) self.keys = keys self.expectedNames = set(keys.keys()) def validate(self, name, object): # this uses isinstance, allowing dict subclasses as used by the DB API if not isinstance(object, dict): yield "{} ({!r}) is not a dictionary (got type {})".format( name, object, type(object)) return gotNames = set(object.keys()) unexpected = gotNames - self.expectedNames if unexpected: yield "{} has unexpected keys {}".format(name, ", ".join([repr(n) for n in unexpected])) missing = self.expectedNames - self.optionalNames - gotNames if missing: yield "{} is missing keys {}".format(name, ", ".join([repr(n) for n in missing])) for k in gotNames & self.expectedNames: for msg in self.keys[k].validate("{}[{!r}]".format(name, k), object[k]): yield msg class SequenceValidator(Validator): type = None def __init__(self, elementValidator): self.elementValidator = elementValidator def validate(self, name, object): if not isinstance(object, self.type): # noqa pylint: disable=isinstance-second-argument-not-valid-type yield "{} ({!r}) is not a {}".format(name, object, self.name) return for idx, elt in enumerate(object): for msg in self.elementValidator.validate("{}[{}]".format(name, idx), elt): yield msg class ListValidator(SequenceValidator): type = list name = 'list' class TupleValidator(SequenceValidator): type = tuple name = 'tuple' class StringListValidator(ListValidator): name = 'string-list' def __init__(self): super().__init__(StringValidator()) class SourcedPropertiesValidator(Validator): name = 'sourced-properties' def validate(self, name, object): if not isinstance(object, dict): yield "{} is not sourced properties (not a dict)".format(name) return for k, v in object.items(): if not isinstance(k, str): yield "{} property name {!r} is not unicode".format(name, k) if not isinstance(v, tuple) or len(v) != 2: yield "{} property value for '{}' is not a 2-tuple".format(name, k) return propval, propsrc = v if not isinstance(propsrc, str): yield "{}[{}] source {!r} is not unicode".format(name, k, propsrc) try: json.dumps(propval) except (TypeError, ValueError): yield "{}[{!r}] value is not JSON-able".format(name, k) class JsonValidator(Validator): name = 'json' def validate(self, name, object): try: json.dumps(object) except (TypeError, ValueError): yield "{}[{!r}] value is not JSON-able".format(name, object) class PatchValidator(Validator): name = 'patch' validator = DictValidator( body=NoneOk(BinaryValidator()), level=NoneOk(IntValidator()), subdir=NoneOk(StringValidator()), author=NoneOk(StringValidator()), comment=NoneOk(StringValidator()), ) def validate(self, name, object): for msg in self.validator.validate(name, object): yield msg class MessageValidator(Validator): routingKeyValidator = TupleValidator(StrValidator()) def __init__(self, events, messageValidator): self.events = [bytes2unicode(e) for e in set(events)] self.messageValidator = messageValidator def validate(self, name, routingKey_message): try: routingKey, message = routingKey_message except (TypeError, ValueError) as e: yield "{!r}: not a routing key and message: {}".format(routingKey_message, e) routingKeyBad = False for msg in self.routingKeyValidator.validate("routingKey", routingKey): yield msg routingKeyBad = True if not routingKeyBad: event = routingKey[-1] if event not in self.events: yield "routing key event {!r} is not valid".format(event) for msg in self.messageValidator.validate("{} message".format(routingKey[0]), message): yield msg class Selector(Validator): def __init__(self): self.selectors = [] def add(self, selector, validator): self.selectors.append((selector, validator)) def validate(self, name, arg_object): try: arg, object = arg_object except (TypeError, ValueError) as e: yield "{!r}: not a not data options and data dict: {}".format(arg_object, e) for selector, validator in self.selectors: if selector is None or selector(arg): for msg in validator.validate(name, object): yield msg return yield "no match for selector argument {!r}".format(arg) # Type definitions message = {} dbdict = {} # parse and use a ResourceType class's dataFields into a validator # masters message['masters'] = Selector() message['masters'].add(None, MessageValidator( events=[b'started', b'stopped'], messageValidator=DictValidator( masterid=IntValidator(), name=StringValidator(), active=BooleanValidator(), # last_active is not included ))) dbdict['masterdict'] = DictValidator( id=IntValidator(), name=StringValidator(), active=BooleanValidator(), last_active=DateTimeValidator(), ) # sourcestamp _sourcestamp = dict( ssid=IntValidator(), branch=NoneOk(StringValidator()), revision=NoneOk(StringValidator()), repository=StringValidator(), project=StringValidator(), codebase=StringValidator(), created_at=DateTimeValidator(), patch=NoneOk(DictValidator( body=NoneOk(BinaryValidator()), level=NoneOk(IntValidator()), subdir=NoneOk(StringValidator()), author=NoneOk(StringValidator()), comment=NoneOk(StringValidator()))), ) message['sourcestamps'] = Selector() message['sourcestamps'].add(None, DictValidator( **_sourcestamp )) dbdict['ssdict'] = DictValidator( ssid=IntValidator(), branch=NoneOk(StringValidator()), revision=NoneOk(StringValidator()), patchid=NoneOk(IntValidator()), patch_body=NoneOk(BinaryValidator()), patch_level=NoneOk(IntValidator()), patch_subdir=NoneOk(StringValidator()), patch_author=NoneOk(StringValidator()), patch_comment=NoneOk(StringValidator()), codebase=StringValidator(), repository=StringValidator(), project=StringValidator(), created_at=DateTimeValidator(), ) # builder message['builders'] = Selector() message['builders'].add(None, MessageValidator( events=[b'started', b'stopped'], messageValidator=DictValidator( builderid=IntValidator(), masterid=IntValidator(), name=StringValidator(), ))) dbdict['builderdict'] = DictValidator( id=IntValidator(), masterids=ListValidator(IntValidator()), name=StringValidator(), description=NoneOk(StringValidator()), tags=ListValidator(StringValidator()), ) # worker dbdict['workerdict'] = DictValidator( id=IntValidator(), name=StringValidator(), configured_on=ListValidator( DictValidator( masterid=IntValidator(), builderid=IntValidator(), ) ), paused=BooleanValidator(), graceful=BooleanValidator(), connected_to=ListValidator(IntValidator()), workerinfo=JsonValidator(), ) # buildset _buildset = dict( bsid=IntValidator(), external_idstring=NoneOk(StringValidator()), reason=StringValidator(), submitted_at=IntValidator(), complete=BooleanValidator(), complete_at=NoneOk(IntValidator()), results=NoneOk(IntValidator()), parent_buildid=NoneOk(IntValidator()), parent_relationship=NoneOk(StringValidator()), ) _buildsetEvents = [b'new', b'complete'] message['buildsets'] = Selector() message['buildsets'].add(lambda k: k[-1] == 'new', MessageValidator( events=_buildsetEvents, messageValidator=DictValidator( scheduler=StringValidator(), # only for 'new' sourcestamps=ListValidator( DictValidator( **_sourcestamp )), **_buildset ))) message['buildsets'].add(None, MessageValidator( events=_buildsetEvents, messageValidator=DictValidator( sourcestamps=ListValidator( DictValidator( **_sourcestamp )), **_buildset ))) dbdict['bsdict'] = DictValidator( bsid=IntValidator(), external_idstring=NoneOk(StringValidator()), reason=StringValidator(), sourcestamps=ListValidator(IntValidator()), submitted_at=DateTimeValidator(), complete=BooleanValidator(), complete_at=NoneOk(DateTimeValidator()), results=NoneOk(IntValidator()), parent_buildid=NoneOk(IntValidator()), parent_relationship=NoneOk(StringValidator()), ) # buildrequest message['buildrequests'] = Selector() message['buildrequests'].add(None, MessageValidator( events=[b'new', b'claimed', b'unclaimed'], messageValidator=DictValidator( # TODO: probably wrong! brid=IntValidator(), builderid=IntValidator(), bsid=IntValidator(), buildername=StringValidator(), ))) # change message['changes'] = Selector() message['changes'].add(None, MessageValidator( events=[b'new'], messageValidator=DictValidator( changeid=IntValidator(), parent_changeids=ListValidator(IntValidator()), author=StringValidator(), committer=StringValidator(), files=ListValidator(StringValidator()), comments=StringValidator(), revision=NoneOk(StringValidator()), when_timestamp=IntValidator(), branch=NoneOk(StringValidator()), category=NoneOk(StringValidator()), revlink=NoneOk(StringValidator()), properties=SourcedPropertiesValidator(), repository=StringValidator(), project=StringValidator(), codebase=StringValidator(), sourcestamp=DictValidator( **_sourcestamp ), ))) dbdict['chdict'] = DictValidator( changeid=IntValidator(), author=StringValidator(), committer=StringValidator(), files=ListValidator(StringValidator()), comments=StringValidator(), revision=NoneOk(StringValidator()), when_timestamp=DateTimeValidator(), branch=NoneOk(StringValidator()), category=NoneOk(StringValidator()), revlink=NoneOk(StringValidator()), properties=SourcedPropertiesValidator(), repository=StringValidator(), project=StringValidator(), codebase=StringValidator(), sourcestampid=IntValidator(), parent_changeids=ListValidator(IntValidator()), ) # changesources dbdict['changesourcedict'] = DictValidator( id=IntValidator(), name=StringValidator(), masterid=NoneOk(IntValidator()), ) # schedulers dbdict['schedulerdict'] = DictValidator( id=IntValidator(), name=StringValidator(), masterid=NoneOk(IntValidator()), enabled=BooleanValidator(), ) # builds _build = dict( buildid=IntValidator(), number=IntValidator(), builderid=IntValidator(), buildrequestid=IntValidator(), workerid=IntValidator(), masterid=IntValidator(), started_at=IntValidator(), complete=BooleanValidator(), complete_at=NoneOk(IntValidator()), state_string=StringValidator(), results=NoneOk(IntValidator()), ) _buildEvents = [b'new', b'complete'] message['builds'] = Selector() message['builds'].add(None, MessageValidator( events=_buildEvents, messageValidator=DictValidator( **_build ))) # As build's properties are fetched at DATA API level, # a distinction shall be made as both are not equal. # Validates DB layer dbdict['dbbuilddict'] = buildbase = DictValidator( id=IntValidator(), number=IntValidator(), builderid=IntValidator(), buildrequestid=IntValidator(), workerid=IntValidator(), masterid=IntValidator(), started_at=DateTimeValidator(), complete_at=NoneOk(DateTimeValidator()), state_string=StringValidator(), results=NoneOk(IntValidator()), ) # Validates DATA API layer dbdict['builddict'] = DictValidator( properties=NoneOk(SourcedPropertiesValidator()), **buildbase.keys) # build data _build_data_msgdict = DictValidator( buildid=IntValidator(), name=StringValidator(), value=NoneOk(BinaryValidator()), length=IntValidator(), source=StringValidator(), ) message['build_data'] = Selector() message['build_data'].add(None, MessageValidator(events=[], messageValidator=_build_data_msgdict)) dbdict['build_datadict'] = DictValidator( buildid=IntValidator(), name=StringValidator(), value=NoneOk(BinaryValidator()), length=IntValidator(), source=StringValidator(), ) # steps _step = dict( stepid=IntValidator(), number=IntValidator(), name=IdentifierValidator(50), buildid=IntValidator(), started_at=IntValidator(), complete=BooleanValidator(), complete_at=NoneOk(IntValidator()), state_string=StringValidator(), results=NoneOk(IntValidator()), urls=ListValidator(StringValidator()), hidden=BooleanValidator(), ) _stepEvents = [b'new', b'complete'] message['steps'] = Selector() message['steps'].add(None, MessageValidator( events=_stepEvents, messageValidator=DictValidator( **_step ))) dbdict['stepdict'] = DictValidator( id=IntValidator(), number=IntValidator(), name=IdentifierValidator(50), buildid=IntValidator(), started_at=DateTimeValidator(), complete_at=NoneOk(DateTimeValidator()), state_string=StringValidator(), results=NoneOk(IntValidator()), urls=ListValidator(StringValidator()), hidden=BooleanValidator(), ) # logs _log = dict( logid=IntValidator(), name=IdentifierValidator(50), stepid=IntValidator(), complete=BooleanValidator(), num_lines=IntValidator(), type=IdentifierValidator(1)) _logEvents = ['new', 'complete', 'appended'] # message['log'] dbdict['logdict'] = DictValidator( id=IntValidator(), stepid=IntValidator(), name=StringValidator(), slug=IdentifierValidator(50), complete=BooleanValidator(), num_lines=IntValidator(), type=IdentifierValidator(1)) # test results sets _test_result_set_msgdict = DictValidator( builderid=IntValidator(), buildid=IntValidator(), stepid=IntValidator(), description=NoneOk(StringValidator()), category=StringValidator(), value_unit=StringValidator(), tests_passed=NoneOk(IntValidator()), tests_failed=NoneOk(IntValidator()), complete=BooleanValidator() ) message['test_result_sets'] = Selector() message['test_result_sets'].add(None, MessageValidator(events=[b'new', b'completed'], messageValidator=_test_result_set_msgdict)) dbdict['test_result_setdict'] = DictValidator( id=IntValidator(), builderid=IntValidator(), buildid=IntValidator(), stepid=IntValidator(), description=NoneOk(StringValidator()), category=StringValidator(), value_unit=StringValidator(), tests_passed=NoneOk(IntValidator()), tests_failed=NoneOk(IntValidator()), complete=BooleanValidator() ) # test results _test_results_msgdict = DictValidator( builderid=IntValidator(), test_result_setid=IntValidator(), test_name=NoneOk(StringValidator()), test_code_path=NoneOk(StringValidator()), line=NoneOk(IntValidator()), duration_ns=NoneOk(IntValidator()), value=StringValidator(), ) message['test_results'] = Selector() message['test_results'].add(None, MessageValidator(events=[b'new'], messageValidator=_test_results_msgdict)) dbdict['test_resultdict'] = DictValidator( id=IntValidator(), builderid=IntValidator(), test_result_setid=IntValidator(), test_name=NoneOk(StringValidator()), test_code_path=NoneOk(StringValidator()), line=NoneOk(IntValidator()), duration_ns=NoneOk(IntValidator()), value=StringValidator(), ) # external functions def _verify(testcase, validator, name, object): msgs = list(validator.validate(name, object)) if msgs: msg = "; ".join(msgs) if testcase: testcase.fail(msg) else: raise AssertionError(msg) def verifyMessage(testcase, routingKey, message_): # the validator is a Selector wrapping a MessageValidator, so we need to # pass (arg, (routingKey, message)), where the routing key is the arg # the "type" of the message is identified by last path name # -1 being the event, and -2 the id. validator = message[bytes2unicode(routingKey[-3])] _verify(testcase, validator, '', (routingKey, (routingKey, message_))) def verifyDbDict(testcase, type, value): _verify(testcase, dbdict[type], type, value) def verifyData(testcase, entityType, options, value): _verify(testcase, entityType, entityType.name, value) def verifyType(testcase, name, value, validator): _verify(testcase, validator, name, value) buildbot-3.4.0/master/buildbot/test/util/warnings.py000066400000000000000000000076101413250514000225560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Utility functions for catching Python warnings. # Twisted's TestCase already gathers produced warnings # (see t.t.u.T.flushWarnings()), however Twisted's implementation doesn't # allow fine-grained control over caught warnings. # This implementation uses context wrapper style to specify interesting # block of code to catch warnings, which allows to easily specify which # exactly statements should generate warnings and which shouldn't. # Also this implementation allows nested checks. import contextlib import re import warnings @contextlib.contextmanager def _recordWarnings(category, output): assert isinstance(output, list) unrelated_warns = [] with warnings.catch_warnings(record=True) as all_warns: # Cause all warnings of the provided category to always be # triggered. warnings.simplefilter("always", category) yield # Filter warnings. for w in all_warns: if isinstance(w.message, category): output.append(w) else: unrelated_warns.append(w) # Re-raise unrelated warnings. for w in unrelated_warns: warnings.warn_explicit(w.message, w.category, w.filename, w.lineno) @contextlib.contextmanager def assertProducesWarnings(filter_category, num_warnings=None, messages_patterns=None, message_pattern=None): if messages_patterns is not None: assert message_pattern is None assert num_warnings is None num_warnings = len(messages_patterns) else: assert num_warnings is not None or message_pattern is not None warns = [] with _recordWarnings(filter_category, warns): yield if num_warnings is None: num_warnings = 1 assert len(warns) == num_warnings, \ "Number of occurred warnings is not correct. " \ "Expected {num} warnings, received {num_received}:\n" \ "{warns}".format( num=num_warnings, num_received=len(warns), warns="\n".join(map(str, warns))) if messages_patterns is None and message_pattern is not None: messages_patterns = [message_pattern] * num_warnings if messages_patterns is not None: for w, pattern in zip(warns, messages_patterns): # TODO: Maybe don't use regexp, but use simple substring check? assert re.search(pattern, str(w.message)), \ "Warning pattern doesn't match. Expected pattern:\n" \ "{pattern}\n" \ "Received message:\n" \ "{message}\n" \ "All gathered warnings:\n" \ "{warns}".format(pattern=pattern, message=w.message, warns="\n".join(map(str, warns))) @contextlib.contextmanager def assertProducesWarning(filter_category, message_pattern=None): with assertProducesWarnings(filter_category, num_warnings=1, message_pattern=message_pattern): yield @contextlib.contextmanager def assertNotProducesWarnings(filter_category): with assertProducesWarnings(filter_category, 0): yield @contextlib.contextmanager def ignoreWarning(category): with _recordWarnings(category, []): yield buildbot-3.4.0/master/buildbot/test/util/www.py000066400000000000000000000175361413250514000215620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import pkg_resources from io import BytesIO from io import StringIO from urllib.parse import parse_qs from urllib.parse import unquote as urlunquote from uuid import uuid1 import mock from twisted.internet import defer from twisted.web import server from buildbot.test.fake import fakemaster from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www import auth from buildbot.www import authz class FakeSession: def __init__(self): self.user_info = {"anonymous": True} def updateSession(self, request): pass class FakeRequest: written = b'' finished = False redirected_to = None rendered_resource = None failure = None method = b'GET' path = b'/req.path' responseCode = 200 def __init__(self, path=None): self.headers = {} self.input_headers = {} self.prepath = [] x = path.split(b'?', 1) if len(x) == 1: self.path = path self.args = {} else: path, argstring = x self.path = path self.args = parse_qs(argstring, 1) self.uri = self.path self.postpath = [] for p in path[1:].split(b'/'): path = urlunquote(bytes2unicode(p)) self.postpath.append(unicode2bytes(path)) self.deferred = defer.Deferred() def write(self, data): self.written = self.written + data def redirect(self, url): self.redirected_to = url def render(self, rsrc): rendered_resource = rsrc self.deferred.callback(rendered_resource) def finish(self): self.finished = True if self.redirected_to is not None: self.deferred.callback(dict(redirected=self.redirected_to)) else: self.deferred.callback(self.written) def setResponseCode(self, code, text=None): # twisted > 16 started to assert this assert isinstance(code, int) self.responseCode = code self.responseText = text def setHeader(self, hdr, value): assert isinstance(hdr, bytes) assert isinstance(value, bytes) self.headers.setdefault(hdr, []).append(value) def getHeader(self, key): assert isinstance(key, bytes) return self.input_headers.get(key) def processingFailed(self, f): self.deferred.errback(f) def notifyFinish(self): d = defer.Deferred() @self.deferred.addBoth def finished(res): d.callback(res) return res return d def getSession(self): return self.session class RequiresWwwMixin: # mix this into a TestCase to skip if buildbot-www is not installed if not list(pkg_resources.iter_entry_points('buildbot.www', 'base')): if 'BUILDBOT_TEST_REQUIRE_WWW' in os.environ: raise RuntimeError('$BUILDBOT_TEST_REQUIRE_WWW is set but ' 'buildbot-www is not installed') skip = 'buildbot-www not installed' class WwwTestMixin(RequiresWwwMixin): UUID = str(uuid1()) def make_master(self, wantGraphql=False, url=None, **kwargs): master = fakemaster.make_master(self, wantData=True, wantGraphql=wantGraphql) self.master = master master.www = mock.Mock() # to handle the resourceNeedsReconfigs call master.www.getUserInfos = lambda _: getattr( self.master.session, "user_info", {"anonymous": True}) cfg = dict(port=None, auth=auth.NoAuth(), authz=authz.Authz()) cfg.update(kwargs) master.config.www = cfg if url is not None: master.config.buildbotURL = url self.master.session = FakeSession() self.master.authz = cfg["authz"] self.master.authz.setMaster(self.master) return master def make_request(self, path=None, method=b'GET'): self.request = FakeRequest(path) self.request.session = self.master.session self.request.method = method return self.request def render_resource(self, rsrc, path=b'/', accept=None, method=b'GET', origin=None, access_control_request_method=None, extraHeaders=None, request=None, content=None, content_type=None): if not request: request = self.make_request(path, method=method) if accept: request.input_headers[b'accept'] = accept if origin: request.input_headers[b'origin'] = origin if access_control_request_method: request.input_headers[b'access-control-request-method'] = \ access_control_request_method if extraHeaders is not None: request.input_headers.update(extraHeaders) if content_type is not None: request.input_headers.update({b'content-type': content_type}) request.content = BytesIO(content) rv = rsrc.render(request) if rv != server.NOT_DONE_YET: if rv is not None: request.write(rv) request.finish() return request.deferred @defer.inlineCallbacks def render_control_resource(self, rsrc, path=b'/', params=None, requestJson=None, action="notfound", id=None, content_type=b'application/json'): # pass *either* a request or postpath if params is None: params = {} id = id or self.UUID request = self.make_request(path) request.method = b"POST" request.content = StringIO(requestJson or json.dumps( {"jsonrpc": "2.0", "method": action, "params": params, "id": id})) request.input_headers = {b'content-type': content_type} rv = rsrc.render(request) if rv == server.NOT_DONE_YET: rv = yield request.deferred res = json.loads(bytes2unicode(rv)) self.assertIn("jsonrpc", res) self.assertEqual(res["jsonrpc"], "2.0") if not requestJson: # requestJson is used for invalid requests, so don't expect ID self.assertIn("id", res) self.assertEqual(res["id"], id) def assertRequest(self, content=None, contentJson=None, contentType=None, responseCode=None, contentDisposition=None, headers=None): if headers is None: headers = {} got, exp = {}, {} if content is not None: got['content'] = self.request.written exp['content'] = content if contentJson is not None: got['contentJson'] = json.loads( bytes2unicode(self.request.written)) exp['contentJson'] = contentJson if contentType is not None: got['contentType'] = self.request.headers[b'content-type'] exp['contentType'] = [contentType] if responseCode is not None: got['responseCode'] = str(self.request.responseCode) exp['responseCode'] = str(responseCode) for header, value in headers.items(): got[header] = self.request.headers.get(header) exp[header] = value self.assertEqual(got, exp) buildbot-3.4.0/master/buildbot/util/000077500000000000000000000000001413250514000173715ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/util/__init__.py000066400000000000000000000350571413250514000215140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import calendar import datetime import itertools import json import locale import re import sys import textwrap import time from builtins import bytes from urllib.parse import urlsplit from urllib.parse import urlunsplit import dateutil.tz from twisted.python import reflect from twisted.python.deprecate import deprecatedModuleAttribute from twisted.python.versions import Version from zope.interface import implementer from buildbot.interfaces import IConfigured from buildbot.util.giturlparse import giturlparse from buildbot.util.misc import deferredLocked from ._notifier import Notifier def naturalSort(array): array = array[:] def try_int(s): try: return int(s) except ValueError: return s def key_func(item): return [try_int(s) for s in re.split(r'(\d+)', item)] # prepend integer keys to each element, sort them, then strip the keys keyed_array = sorted([(key_func(i), i) for i in array]) array = [i[1] for i in keyed_array] return array def flattened_iterator(l, types=(list, tuple)): """ Generator for a list/tuple that potentially contains nested/lists/tuples of arbitrary nesting that returns every individual non-list/tuple element. In other words, # [(5, 6, [8, 3]), 2, [2, 1, (3, 4)]] will yield 5, 6, 8, 3, 2, 2, 1, 3, 4 This is safe to call on something not a list/tuple - the original input is yielded. """ if not isinstance(l, types): yield l return for element in l: for sub_element in flattened_iterator(element, types): yield sub_element def flatten(l, types=(list, )): """ Given a list/tuple that potentially contains nested lists/tuples of arbitrary nesting, flatten into a single dimension. In other words, turn [(5, 6, [8, 3]), 2, [2, 1, (3, 4)]] into [5, 6, 8, 3, 2, 2, 1, 3, 4] This is safe to call on something not a list/tuple - the original input is returned as a list """ # For backwards compatibility, this returned a list, not an iterable. # Changing to return an iterable could break things. if not isinstance(l, types): return l return list(flattened_iterator(l, types)) def now(_reactor=None): if _reactor and hasattr(_reactor, "seconds"): return _reactor.seconds() return time.time() def formatInterval(eta): eta_parts = [] if eta > 3600: eta_parts.append("%d hrs" % (eta / 3600)) eta %= 3600 if eta > 60: eta_parts.append("%d mins" % (eta / 60)) eta %= 60 eta_parts.append("%d secs" % eta) return ", ".join(eta_parts) def fuzzyInterval(seconds): """ Convert time interval specified in seconds into fuzzy, human-readable form """ if seconds <= 1: return "a moment" if seconds < 20: return "{:d} seconds".format(seconds) if seconds < 55: return "{:d} seconds".format(round(seconds / 10.) * 10) minutes = round(seconds / 60.) if minutes == 1: return "a minute" if minutes < 20: return "{:d} minutes".format(minutes) if minutes < 55: return "{:d} minutes".format(round(minutes / 10.) * 10) hours = round(minutes / 60.) if hours == 1: return "an hour" if hours < 24: return "{:d} hours".format(hours) days = (hours + 6) // 24 if days == 1: return "a day" if days < 30: return "{:d} days".format(days) months = int((days + 10) / 30.5) if months == 1: return "a month" if months < 12: return "{} months".format(months) years = round(days / 365.25) if years == 1: return "a year" return "{} years".format(years) @implementer(IConfigured) class ComparableMixin: compare_attrs = () class _None: pass def __hash__(self): compare_attrs = [] reflect.accumulateClassList( self.__class__, 'compare_attrs', compare_attrs) alist = [self.__class__] + \ [getattr(self, name, self._None) for name in compare_attrs] return hash(tuple(map(str, alist))) def _cmp_common(self, them): if type(self) != type(them): return (False, None, None) if self.__class__ != them.__class__: return (False, None, None) compare_attrs = [] reflect.accumulateClassList( self.__class__, 'compare_attrs', compare_attrs) self_list = [getattr(self, name, self._None) for name in compare_attrs] them_list = [getattr(them, name, self._None) for name in compare_attrs] return (True, self_list, them_list) def __eq__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list == them_list @staticmethod def isEquivalent(us, them): if isinstance(them, ComparableMixin): them, us = us, them if isinstance(us, ComparableMixin): (isComparable, us_list, them_list) = us._cmp_common(them) if not isComparable: return False return all(ComparableMixin.isEquivalent(v, them_list[i]) for i, v in enumerate(us_list)) return us == them def __ne__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return True return self_list != them_list def __lt__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list < them_list def __le__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list <= them_list def __gt__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list > them_list def __ge__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list >= them_list def getConfigDict(self): compare_attrs = [] reflect.accumulateClassList( self.__class__, 'compare_attrs', compare_attrs) return {k: getattr(self, k) for k in compare_attrs if hasattr(self, k) and k not in ("passwd", "password")} def diffSets(old, new): if not isinstance(old, set): old = set(old) if not isinstance(new, set): new = set(new) return old - new, new - old # Remove potentially harmful characters from builder name if it is to be # used as the build dir. badchars_map = bytes.maketrans(b"\t !#$%&'()*+,./:;<=>?@[\\]^{|}~", b"______________________________") def safeTranslate(s): if isinstance(s, str): s = s.encode('utf8') return s.translate(badchars_map) def none_or_str(x): if x is not None and not isinstance(x, str): return str(x) return x def unicode2bytes(x, encoding='utf-8', errors='strict'): if isinstance(x, str): x = x.encode(encoding, errors) return x def bytes2unicode(x, encoding='utf-8', errors='strict'): if isinstance(x, (str, type(None))): return x return str(x, encoding, errors) _hush_pyflakes = [json] deprecatedModuleAttribute( Version("buildbot", 0, 9, 4), message="Use json from the standard library instead.", moduleName="buildbot.util", name="json", ) def toJson(obj): if isinstance(obj, datetime.datetime): return datetime2epoch(obj) return None # changes and schedulers consider None to be a legitimate name for a branch, # which makes default function keyword arguments hard to handle. This value # is always false. class NotABranch: def __bool__(self): return False NotABranch = NotABranch() # time-handling methods # this used to be a custom class; now it's just an instance of dateutil's class UTC = dateutil.tz.tzutc() def epoch2datetime(epoch): """Convert a UNIX epoch time to a datetime object, in the UTC timezone""" if epoch is not None: return datetime.datetime.fromtimestamp(epoch, tz=UTC) return None def datetime2epoch(dt): """Convert a non-naive datetime object to a UNIX epoch timestamp""" if dt is not None: return calendar.timegm(dt.utctimetuple()) return None # TODO: maybe "merge" with formatInterval? def human_readable_delta(start, end): """ Return a string of human readable time delta. """ start_date = datetime.datetime.fromtimestamp(start) end_date = datetime.datetime.fromtimestamp(end) delta = end_date - start_date result = [] if delta.days > 0: result.append('%d days' % (delta.days,)) if delta.seconds > 0: hours = int(delta.seconds / 3600) if hours > 0: result.append('%d hours' % (hours,)) minutes = int((delta.seconds - hours * 3600) / 60) if minutes: result.append('%d minutes' % (minutes,)) seconds = delta.seconds % 60 if seconds > 0: result.append('%d seconds' % (seconds,)) if result: return ', '.join(result) return 'super fast' def makeList(input): if isinstance(input, str): return [input] elif input is None: return [] return list(input) def in_reactor(f): """decorate a function by running it with maybeDeferred in a reactor""" def wrap(*args, **kwargs): from twisted.internet import reactor, defer result = [] def _async(): d = defer.maybeDeferred(f, *args, **kwargs) @d.addErrback def eb(f): f.printTraceback(file=sys.stderr) @d.addBoth def do_stop(r): result.append(r) reactor.stop() reactor.callWhenRunning(_async) reactor.run() return result[0] wrap.__doc__ = f.__doc__ wrap.__name__ = f.__name__ wrap._orig = f # for tests return wrap def string2boolean(str): return { b'on': True, b'true': True, b'yes': True, b'1': True, b'off': False, b'false': False, b'no': False, b'0': False, }[str.lower()] def asyncSleep(delay, reactor=None): from twisted.internet import defer from twisted.internet import reactor as internet_reactor if reactor is None: reactor = internet_reactor d = defer.Deferred() reactor.callLater(delay, d.callback, None) return d def check_functional_environment(config): try: locale.getdefaultlocale() except (KeyError, ValueError) as e: config.error("\n".join([ "Your environment has incorrect locale settings. This means python cannot handle " "strings safely.", " Please check 'LANG', 'LC_CTYPE', 'LC_ALL' and 'LANGUAGE'" " are either unset or set to a valid locale.", str(e) ])) _netloc_url_re = re.compile(r':[^@]*@') def stripUrlPassword(url): parts = list(urlsplit(url)) parts[1] = _netloc_url_re.sub(':xxxx@', parts[1]) return urlunsplit(parts) def join_list(maybeList): if isinstance(maybeList, (list, tuple)): return ' '.join(bytes2unicode(s) for s in maybeList) return bytes2unicode(maybeList) def command_to_string(command): words = command if isinstance(words, (bytes, str)): words = words.split() try: len(words) except (AttributeError, TypeError): # WithProperties and Property don't have __len__ # For old-style classes instances AttributeError raised, # for new-style classes instances - TypeError. return None # flatten any nested lists words = flatten(words, (list, tuple)) # strip instances and other detritus (which can happen if a # description is requested before rendering) stringWords = [] for w in words: if isinstance(w, (bytes, str)): # If command was bytes, be gentle in # trying to covert it. w = bytes2unicode(w, errors="replace") stringWords.append(w) words = stringWords if not words: return None if len(words) < 3: rv = "'{}'".format(' '.join(words)) else: rv = "'{} ...'".format(' '.join(words[:2])) return rv def rewrap(text, width=None): """ Rewrap text for output to the console. Removes common indentation and rewraps paragraphs according to the console width. Line feeds between paragraphs preserved. Formatting of paragraphs that starts with additional indentation preserved. """ if width is None: width = 80 # Remove common indentation. text = textwrap.dedent(text) def needs_wrapping(line): # Line always non-empty. return not line[0].isspace() # Split text by lines and group lines that comprise paragraphs. wrapped_text = "" for do_wrap, lines in itertools.groupby(text.splitlines(True), key=needs_wrapping): paragraph = ''.join(lines) if do_wrap: paragraph = textwrap.fill(paragraph, width) wrapped_text += paragraph return wrapped_text def dictionary_merge(a, b): """merges dictionary b into a Like dict.update, but recursive """ for key, value in b.items(): if key in a and isinstance(a[key], dict) and isinstance(value, dict): dictionary_merge(a[key], b[key]) continue a[key] = b[key] return a __all__ = [ 'naturalSort', 'now', 'formatInterval', 'ComparableMixin', 'safeTranslate', 'none_or_str', 'NotABranch', 'deferredLocked', 'UTC', 'diffSets', 'makeList', 'in_reactor', 'string2boolean', 'check_functional_environment', 'human_readable_delta', 'rewrap', 'Notifier', "giturlparse", ] buildbot-3.4.0/master/buildbot/util/_notifier.py000066400000000000000000000030141413250514000217170ustar00rootroot00000000000000# Copyright Buildbot Team Members # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from twisted.internet.defer import Deferred class Notifier: def __init__(self): self._waiters = [] def wait(self): d = Deferred() self._waiters.append(d) return d def notify(self, result): if self._waiters: waiters, self._waiters = self._waiters, [] for waiter in waiters: waiter.callback(result) def __bool__(self): return bool(self._waiters) buildbot-3.4.0/master/buildbot/util/backoff.py000066400000000000000000000052311413250514000213370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import time from twisted.internet import defer from buildbot.util import asyncSleep class BackoffTimeoutExceededError(Exception): pass class ExponentialBackoffEngine: def __init__(self, start_seconds, multiplier, max_wait_seconds): if start_seconds < 0: raise ValueError('start_seconds cannot be negative') if multiplier < 0: raise ValueError('multiplier cannot be negative') if max_wait_seconds < 0: raise ValueError('max_wait_seconds cannot be negative') self.start_seconds = start_seconds self.multiplier = multiplier self.max_wait_seconds = max_wait_seconds self.on_success() def on_success(self): self.current_total_wait_seconds = 0 self.current_wait_seconds = self.start_seconds def wait_on_failure(self): raise NotImplementedError() def calculate_wait_on_failure_seconds(self): if self.current_total_wait_seconds >= self.max_wait_seconds: raise BackoffTimeoutExceededError() seconds = self.current_wait_seconds self.current_wait_seconds *= self.multiplier if self.current_total_wait_seconds + seconds < self.max_wait_seconds: self.current_total_wait_seconds += seconds else: seconds = self.max_wait_seconds - self.current_total_wait_seconds self.current_total_wait_seconds = self.max_wait_seconds return seconds class ExponentialBackoffEngineSync(ExponentialBackoffEngine): def wait_on_failure(self): seconds = self.calculate_wait_on_failure_seconds() time.sleep(seconds) class ExponentialBackoffEngineAsync(ExponentialBackoffEngine): def __init__(self, reactor, *args, **kwargs): super().__init__(*args, **kwargs) self.reactor = reactor @defer.inlineCallbacks def wait_on_failure(self): seconds = self.calculate_wait_on_failure_seconds() yield asyncSleep(seconds, reactor=self.reactor) buildbot-3.4.0/master/buildbot/util/bbcollections.py000066400000000000000000000025631413250514000225730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this is here for compatibility from collections import defaultdict assert defaultdict class KeyedSets: def __init__(self): self.d = dict() def add(self, key, value): if key not in self.d: self.d[key] = set() self.d[key].add(value) def discard(self, key, value): if key in self.d: self.d[key].discard(value) if not self.d[key]: del self.d[key] def __contains__(self, key): return key in self.d def __getitem__(self, key): return self.d.get(key, set()) def pop(self, key): if key in self.d: return self.d.pop(key) return set() buildbot-3.4.0/master/buildbot/util/codebase.py000066400000000000000000000032571413250514000215170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer class AbsoluteSourceStampsMixin: # record changes and revisions per codebase _lastCodebases = None @defer.inlineCallbacks def getCodebaseDict(self, codebase): assert self.codebases if self._lastCodebases is None: self._lastCodebases = yield self.getState('lastCodebases', {}) # may fail with KeyError return self._lastCodebases.get(codebase, self.codebases[codebase]) @defer.inlineCallbacks def recordChange(self, change): codebase = yield self.getCodebaseDict(change.codebase) lastChange = codebase.get('lastChange', -1) if change.number > lastChange: self._lastCodebases[change.codebase] = { 'repository': change.repository, 'branch': change.branch, 'revision': change.revision, 'lastChange': change.number } yield self.setState('lastCodebases', self._lastCodebases) buildbot-3.4.0/master/buildbot/util/config.py000066400000000000000000000044051413250514000212130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.cred.checkers import FilePasswordDB from twisted.python.components import registerAdapter from zope.interface import implementer from buildbot.interfaces import IConfigured @implementer(IConfigured) class _DefaultConfigured: def __init__(self, value): self.value = value def getConfigDict(self): return self.value registerAdapter(_DefaultConfigured, object, IConfigured) @implementer(IConfigured) class _ListConfigured: def __init__(self, value): self.value = value def getConfigDict(self): return [IConfigured(e).getConfigDict() for e in self.value] registerAdapter(_ListConfigured, list, IConfigured) @implementer(IConfigured) class _DictConfigured: def __init__(self, value): self.value = value def getConfigDict(self): return {k: IConfigured(v).getConfigDict() for k, v in self.value.items()} registerAdapter(_DictConfigured, dict, IConfigured) @implementer(IConfigured) class _SREPatternConfigured: def __init__(self, value): self.value = value def getConfigDict(self): return dict(name="re", pattern=self.value.pattern) registerAdapter(_SREPatternConfigured, type(re.compile("")), IConfigured) @implementer(IConfigured) class ConfiguredMixin: def getConfigDict(self): return {'name': self.name} @implementer(IConfigured) class _FilePasswordDBConfigured: def __init__(self, value): pass def getConfigDict(self): return {'type': 'file'} registerAdapter(_FilePasswordDBConfigured, FilePasswordDB, IConfigured) buildbot-3.4.0/master/buildbot/util/croniter.py000066400000000000000000000241741413250514000216000ustar00rootroot00000000000000#!/usr/bin/python # Copied from croniter # https://github.com/taichino/croniter # Licensed under MIT license # Pyflakes warnings corrected # -*- coding: utf-8 -*- import re from datetime import datetime from time import mktime from time import time from dateutil.relativedelta import relativedelta search_re = re.compile(r'^([^-]+)-([^-/]+)(/(.*))?$') only_int_re = re.compile(r'^\d+$') any_int_re = re.compile(r'^\d+') star_or_int_re = re.compile(r'^(\d+|\*)$') __all__ = ('croniter',) class croniter: RANGES = ( (0, 59), (0, 23), (1, 31), (1, 12), (0, 6), (0, 59) ) DAYS = ( 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 ) ALPHACONV = ( {}, {}, {}, {'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6, 'jul': 7, 'aug': 8, 'sep': 9, 'oct': 10, 'nov': 11, 'dec': 12}, {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 0}, {} ) LOWMAP = ( {}, {}, {0: 1}, {0: 1}, {7: 0}, {}, ) bad_length = 'Exactly 5 or 6 columns has to be specified for iterator' \ 'expression.' def __init__(self, expr_format, start_time=time()): if isinstance(start_time, datetime): start_time = mktime(start_time.timetuple()) self.cur = start_time self.exprs = expr_format.split() if len(self.exprs) != 5 and len(self.exprs) != 6: raise ValueError(self.bad_length) expanded = [] for i, expr in enumerate(self.exprs): e_list = expr.split(',') res = [] while e_list: e = e_list.pop() t = re.sub(r'^\*(/.+)$', r'%d-%d\1' % (self.RANGES[i][0], self.RANGES[i][1]), str(e)) m = search_re.search(t) if m: (low, high, step) = m.group(1), m.group(2), m.group(4) or 1 if not any_int_re.search(low): low = self.ALPHACONV[i][low.lower()] if not any_int_re.search(high): high = self.ALPHACONV[i][high.lower()] if (not low or not high or int(low) > int(high) or not only_int_re.search(str(step))): raise ValueError("[{}] is not acceptable".format(expr_format)) for j in range(int(low), int(high) + 1): if j % int(step) == 0: e_list.append(j) else: if not star_or_int_re.search(t): t = self.ALPHACONV[i][t.lower()] try: t = int(t) except (ValueError, TypeError): pass if t in self.LOWMAP[i]: t = self.LOWMAP[i][t] if t != '*' and (int(t) < self.RANGES[i][0] or int(t) > self.RANGES[i][1]): raise ValueError("[{}] is not acceptable, out of range".format(expr_format)) res.append(t) res.sort() expanded.append( ['*'] if (len(res) == 1 and res[0] == '*') else res) self.expanded = expanded def get_next(self, ret_type=float): return self._get_next(ret_type, is_prev=False) def get_prev(self, ret_type=float): return self._get_next(ret_type, is_prev=True) def _get_next(self, ret_type=float, is_prev=False): expanded = self.expanded[:] if ret_type not in (float, datetime): raise TypeError("Invalid ret_type, only 'float' or 'datetime' " "is acceptable.") if expanded[2][0] != '*' and expanded[4][0] != '*': bak = expanded[4] expanded[4] = ['*'] t1 = self._calc(self.cur, expanded, is_prev) expanded[4] = bak expanded[2] = ['*'] t2 = self._calc(self.cur, expanded, is_prev) if not is_prev: result = t1 if t1 < t2 else t2 else: result = t1 if t1 > t2 else t2 else: result = self._calc(self.cur, expanded, is_prev) self.cur = result if ret_type == datetime: result = datetime.fromtimestamp(result) return result def _calc(self, now, expanded, is_prev): if is_prev: nearest_diff_method = self._get_prev_nearest_diff sign = -1 else: nearest_diff_method = self._get_next_nearest_diff sign = 1 offset = 1 if len(expanded) == 6 else 60 dst = now = datetime.fromtimestamp(now + sign * offset) # BUILDBOT: unused 'day' omitted due to pyflakes warning month, year = dst.month, dst.year current_year = now.year DAYS = self.DAYS def proc_month(d): if expanded[3][0] != '*': diff_month = nearest_diff_method(d.month, expanded[3], 12) days = DAYS[month - 1] if month == 2 and self.is_leap(year): days += 1 reset_day = days if is_prev else 1 if diff_month is not None and diff_month != 0: if is_prev: d += relativedelta(months=diff_month) else: d += relativedelta(months=diff_month, day=reset_day, hour=0, minute=0, second=0) return True, d return False, d def proc_day_of_month(d): if expanded[2][0] != '*': days = DAYS[month - 1] if month == 2 and self.is_leap(year): days += 1 diff_day = nearest_diff_method(d.day, expanded[2], days) if diff_day is not None and diff_day != 0: if is_prev: d += relativedelta(days=diff_day) else: d += relativedelta(days=diff_day, hour=0, minute=0, second=0) return True, d return False, d def proc_day_of_week(d): if expanded[4][0] != '*': diff_day_of_week = nearest_diff_method( d.isoweekday() % 7, expanded[4], 7) if diff_day_of_week is not None and diff_day_of_week != 0: if is_prev: d += relativedelta(days=diff_day_of_week) else: d += relativedelta(days=diff_day_of_week, hour=0, minute=0, second=0) return True, d return False, d def proc_hour(d): if expanded[1][0] != '*': diff_hour = nearest_diff_method(d.hour, expanded[1], 24) if diff_hour is not None and diff_hour != 0: if is_prev: d += relativedelta(hours=diff_hour) else: d += relativedelta(hours=diff_hour, minute=0, second=0) return True, d return False, d def proc_minute(d): if expanded[0][0] != '*': diff_min = nearest_diff_method(d.minute, expanded[0], 60) if diff_min is not None and diff_min != 0: if is_prev: d += relativedelta(minutes=diff_min) else: d += relativedelta(minutes=diff_min, second=0) return True, d return False, d def proc_second(d): if len(expanded) == 6: if expanded[5][0] != '*': diff_sec = nearest_diff_method(d.second, expanded[5], 60) if diff_sec is not None and diff_sec != 0: d += relativedelta(seconds=diff_sec) return True, d else: d += relativedelta(second=0) return False, d if is_prev: procs = [proc_second, proc_minute, proc_hour, proc_day_of_week, proc_day_of_month, proc_month] else: procs = [proc_month, proc_day_of_month, proc_day_of_week, proc_hour, proc_minute, proc_second] while abs(year - current_year) <= 1: next = False for proc in procs: (changed, dst) = proc(dst) if changed: next = True break if next: continue return mktime(dst.timetuple()) raise("failed to find prev date") def _get_next_nearest(self, x, to_check): small = [item for item in to_check if item < x] large = [item for item in to_check if item >= x] large.extend(small) return large[0] def _get_prev_nearest(self, x, to_check): small = [item for item in to_check if item <= x] large = [item for item in to_check if item > x] small.reverse() large.reverse() small.extend(large) return small[0] def _get_next_nearest_diff(self, x, to_check, range_val): for i, d in enumerate(to_check): if d >= x: return d - x return to_check[0] - x + range_val def _get_prev_nearest_diff(self, x, to_check, range_val): candidates = to_check[:] candidates.reverse() for d in candidates: if d <= x: return d - x return (candidates[0]) - x - range_val def is_leap(self, year): return year % 400 == 0 or (year % 4 == 0 and year % 100 != 0) if __name__ == '__main__': base = datetime(2010, 1, 25) itr = croniter('0 0 1 * *', base) n1 = itr.get_next(datetime) print(n1) buildbot-3.4.0/master/buildbot/util/debounce.py000066400000000000000000000073721413250514000215400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import functools from twisted.internet import defer from twisted.python import log # debounce phases PH_IDLE = 0 PH_WAITING = 1 PH_RUNNING = 2 PH_RUNNING_QUEUED = 3 class Debouncer: __slots__ = ['phase', 'timer', 'wait', 'function', 'stopped', 'completeDeferreds', 'get_reactor'] def __init__(self, wait, function, get_reactor): # time to wait self.wait = wait # zero-argument callable to invoke self.function = function # current phase self.phase = PH_IDLE # Twisted timer for waiting self.timer = None # true if this instance is stopped self.stopped = False # deferreds to fire when the call is complete self.completeDeferreds = None # for tests self.get_reactor = get_reactor def __call__(self): if self.stopped: return phase = self.phase if phase == PH_IDLE: self.timer = self.get_reactor().callLater(self.wait, self.invoke) self.phase = PH_WAITING elif phase == PH_RUNNING: self.phase = PH_RUNNING_QUEUED else: # phase == PH_WAITING or phase == PH_RUNNING_QUEUED: pass def __repr__(self): return "" % (self.function, self.wait, self.phase) def invoke(self): self.phase = PH_RUNNING self.completeDeferreds = [] d = defer.maybeDeferred(self.function) d.addErrback(log.err, 'from debounced function:') @d.addCallback def retry(_): queued = self.phase == PH_RUNNING_QUEUED self.phase = PH_IDLE while self.completeDeferreds: self.completeDeferreds.pop(0).callback(None) if queued: self.__call__() def start(self): self.stopped = False def stop(self): self.stopped = True if self.phase == PH_WAITING: self.timer.cancel() self.invoke() # fall through with PH_RUNNING if self.phase in (PH_RUNNING, PH_RUNNING_QUEUED): d = defer.Deferred() self.completeDeferreds.append(d) return d return defer.succeed(None) class _Descriptor: def __init__(self, fn, wait, attrName, get_reactor): self.fn = fn self.wait = wait self.attrName = attrName self.get_reactor = get_reactor def __get__(self, instance, cls): try: db = getattr(instance, self.attrName) except AttributeError: db = Debouncer(self.wait, functools.partial(self.fn, instance), functools.partial(self.get_reactor, instance)) setattr(instance, self.attrName, db) return db def _get_reactor_from_master(o): return o.master.reactor def method(wait, get_reactor=_get_reactor_from_master): def wrap(fn): stateName = "__debounce_" + fn.__name__ + "__" return _Descriptor(fn, wait, stateName, get_reactor) return wrap buildbot-3.4.0/master/buildbot/util/deferwaiter.py000066400000000000000000000067601413250514000222550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from twisted.python import log from buildbot.util import Notifier class DeferWaiter: """ This class manages a set of Deferred objects and allows waiting for their completion """ def __init__(self): self._waited = {} self._finish_notifier = Notifier() def _finished(self, result, d): # most likely nothing is consuming the errors, so do it here if isinstance(result, failure.Failure): log.err(result) self._waited.pop(id(d)) if not self._waited: self._finish_notifier.notify(None) return result def add(self, d): if not isinstance(d, defer.Deferred): return None self._waited[id(d)] = d d.addBoth(self._finished, d) return d def cancel(self): for d in list(self._waited.values()): d.cancel() self._waited.clear() def has_waited(self): return bool(self._waited) @defer.inlineCallbacks def wait(self): if not self._waited: return yield self._finish_notifier.wait() class RepeatedActionHandler: """ This class handles a repeated action such as submitting keepalive requests. It integrates with DeferWaiter to correctly control shutdown of such process. """ def __init__(self, reactor, waiter, interval, action, start_timer_after_action_completes=False): self._reactor = reactor self._waiter = waiter self._interval = interval self._action = action self._enabled = False self._timer = None self._start_timer_after_action_completes = start_timer_after_action_completes def setInterval(self, interval): self._interval = interval def start(self): if self._enabled: return self._enabled = True self._start_timer() def stop(self): if not self._enabled: return self._enabled = False if self._timer and self._timer.active(): self._timer.cancel() self._timer = None def _start_timer(self): self._timer = self._reactor.callLater(self._interval, self._handle_timeout) @defer.inlineCallbacks def _do_action(self): try: yield self._action() except Exception as e: log.err(e, 'Got exception in RepeatedActionHandler') def _handle_timeout(self): self._waiter.add(self._handle_action()) @defer.inlineCallbacks def _handle_action(self): if self._start_timer_after_action_completes: yield self._do_action() if self._enabled: self._start_timer() if not self._start_timer_after_action_completes: yield self._do_action() buildbot-3.4.0/master/buildbot/util/eventual.py000066400000000000000000000053271413250514000215750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # copied from foolscap from twisted.internet import defer from twisted.internet import reactor from twisted.python import log class _SimpleCallQueue: _reactor = reactor def __init__(self): self._events = [] self._flushObservers = [] self._timer = None self._in_turn = False def append(self, cb, args, kwargs): self._events.append((cb, args, kwargs)) if not self._timer: self._timer = self._reactor.callLater(0, self._turn) def _turn(self): self._timer = None self._in_turn = True # flush all the messages that are currently in the queue. If anything # gets added to the queue while we're doing this, those events will # be put off until the next turn. events, self._events = self._events, [] for cb, args, kwargs in events: try: cb(*args, **kwargs) except Exception: log.err() self._in_turn = False if self._events and not self._timer: self._timer = self._reactor.callLater(0, self._turn) if not self._events: observers, self._flushObservers = self._flushObservers, [] for o in observers: o.callback(None) def flush(self): if not self._events and not self._in_turn: return defer.succeed(None) d = defer.Deferred() self._flushObservers.append(d) return d _theSimpleQueue = _SimpleCallQueue() def eventually(cb, *args, **kwargs): _theSimpleQueue.append(cb, args, kwargs) def fireEventually(value=None): d = defer.Deferred() eventually(d.callback, value) return d def flushEventualQueue(_ignored=None): return _theSimpleQueue.flush() def _setReactor(r=None): # This sets the reactor used to schedule future events to r. If r is None # (the default), the reactor is reset to its default value. # This should only be used for unit tests. if r is None: r = reactor _theSimpleQueue._reactor = r buildbot-3.4.0/master/buildbot/util/git.py000066400000000000000000000305351413250514000205340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from pkg_resources import parse_version from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process.properties import Properties from buildbot.util import bytes2unicode RC_SUCCESS = 0 def getSshArgsForKeys(keyPath, knownHostsPath): args = ['-o', 'BatchMode=yes'] if keyPath is not None: args += ['-i', keyPath] if knownHostsPath is not None: args += ['-o', 'UserKnownHostsFile={0}'.format(knownHostsPath)] return args def escapeShellArgIfNeeded(arg): if re.match(r"^[a-zA-Z0-9_-]+$", arg): return arg return '"{0}"'.format(arg) def getSshCommand(keyPath, knownHostsPath): command = ['ssh'] + getSshArgsForKeys(keyPath, knownHostsPath) command = [escapeShellArgIfNeeded(arg) for arg in command] return ' '.join(command) class GitMixin: def setupGit(self, logname=None): if logname is None: logname = 'GitMixin' if self.sshHostKey is not None and self.sshPrivateKey is None: config.error('{}: sshPrivateKey must be provided in order use sshHostKey'.format( logname)) if self.sshKnownHosts is not None and self.sshPrivateKey is None: config.error('{}: sshPrivateKey must be provided in order use sshKnownHosts'.format( logname)) if self.sshHostKey is not None and self.sshKnownHosts is not None: config.error('{}: only one of sshKnownHosts and sshHostKey can be provided'.format( logname)) self.gitInstalled = False self.supportsBranch = False self.supportsProgress = False self.supportsSubmoduleForce = False self.supportsSubmoduleCheckout = False self.supportsSshPrivateKeyAsEnvOption = False self.supportsSshPrivateKeyAsConfigOption = False self.supportsFilters = False def parseGitFeatures(self, version_stdout): match = re.match(r"^git version (\d+(\.\d+)*)", version_stdout) if not match: return version = parse_version(match.group(1)) self.gitInstalled = True if version >= parse_version("1.6.5"): self.supportsBranch = True if version >= parse_version("1.7.2"): self.supportsProgress = True if version >= parse_version("1.7.6"): self.supportsSubmoduleForce = True if version >= parse_version("1.7.8"): self.supportsSubmoduleCheckout = True if version >= parse_version("2.3.0"): self.supportsSshPrivateKeyAsEnvOption = True if version >= parse_version("2.10.0"): self.supportsSshPrivateKeyAsConfigOption = True if version >= parse_version("2.27.0"): self.supportsFilters = True def adjustCommandParamsForSshPrivateKey(self, command, env, keyPath, sshWrapperPath=None, knownHostsPath=None): ssh_command = getSshCommand(keyPath, knownHostsPath) if self.supportsSshPrivateKeyAsConfigOption: command.append('-c') command.append('core.sshCommand={0}'.format(ssh_command)) elif self.supportsSshPrivateKeyAsEnvOption: env['GIT_SSH_COMMAND'] = ssh_command else: if sshWrapperPath is None: raise Exception('Only SSH wrapper script is supported but path ' 'not given') env['GIT_SSH'] = sshWrapperPath def getSshWrapperScriptContents(keyPath, knownHostsPath=None): ssh_command = getSshCommand(keyPath, knownHostsPath) # note that this works on windows if using git with MINGW embedded. return '#!/bin/sh\n{0} "$@"\n'.format(ssh_command) def getSshKnownHostsContents(hostKey): host_name = '*' return '{0} {1}'.format(host_name, hostKey) class GitStepMixin(GitMixin): def setupGitStep(self): self.didDownloadSshPrivateKey = False self.setupGit(logname='Git') if not self.repourl: config.error("Git: must provide repourl.") def _isSshPrivateKeyNeededForGitCommand(self, command): if not command or self.sshPrivateKey is None: return False gitCommandsThatNeedSshKey = [ 'clone', 'submodule', 'fetch', 'push' ] if command[0] in gitCommandsThatNeedSshKey: return True return False def _getSshDataPath(self): # we can't use the workdir for temporary ssh-related files, because # it's needed when cloning repositories and git does not like the # destination directory being non-empty. We have to use separate # temporary directory for that data to ensure the confidentiality of it. # So instead of # '{path}/{to}/{workerbuilddir}/{workdir}/.buildbot-ssh-key' # we put the key in # '{path}/{to}/.{workerbuilddir}.{workdir}.buildbot/ssh-key'. # basename and dirname interpret the last element being empty for paths # ending with a slash path_module = self.build.path_module workerbuilddir = bytes2unicode(self.build.builder.config.workerbuilddir) workdir = self._getSshDataWorkDir().rstrip('/\\') if path_module.isabs(workdir): parent_path = path_module.dirname(workdir) else: parent_path = path_module.join(self.worker.worker_basedir, path_module.dirname(workdir)) basename = '.{0}.{1}.buildbot'.format(workerbuilddir, path_module.basename(workdir)) return path_module.join(parent_path, basename) def _getSshPrivateKeyPath(self, ssh_data_path): return self.build.path_module.join(ssh_data_path, 'ssh-key') def _getSshHostKeyPath(self, ssh_data_path): return self.build.path_module.join(ssh_data_path, 'ssh-known-hosts') def _getSshWrapperScriptPath(self, ssh_data_path): return self.build.path_module.join(ssh_data_path, 'ssh-wrapper.sh') def _adjustCommandParamsForSshPrivateKey(self, full_command, full_env): ssh_data_path = self._getSshDataPath() key_path = self._getSshPrivateKeyPath(ssh_data_path) ssh_wrapper_path = self._getSshWrapperScriptPath(ssh_data_path) host_key_path = None if self.sshHostKey is not None or self.sshKnownHosts is not None: host_key_path = self._getSshHostKeyPath(ssh_data_path) self.adjustCommandParamsForSshPrivateKey(full_command, full_env, key_path, ssh_wrapper_path, host_key_path) @defer.inlineCallbacks def _dovccmd(self, command, abandonOnFailure=True, collectStdout=False, initialStdin=None): full_command = ['git'] full_env = self.env.copy() if self.env else {} if self.config is not None: for name, value in self.config.items(): full_command.append('-c') full_command.append('{}={}'.format(name, value)) if self._isSshPrivateKeyNeededForGitCommand(command): self._adjustCommandParamsForSshPrivateKey(full_command, full_env) full_command.extend(command) # check for the interruptSignal flag sigtermTime = None interruptSignal = None # If possible prefer to send a SIGTERM to git before we send a SIGKILL. # If we send a SIGKILL, git is prone to leaving around stale lockfiles. # By priming it with a SIGTERM first we can ensure that it has a chance to shut-down # gracefully before getting terminated if not self.workerVersionIsOlderThan("shell", "2.16"): # git should shut-down quickly on SIGTERM. If it doesn't don't let it # stick around for too long because this is on top of any timeout # we have hit. sigtermTime = 1 else: # Since sigtermTime is unavailable try to just use SIGTERM by itself instead of # killing. This should be safe. if self.workerVersionIsOlderThan("shell", "2.15"): log.msg( "NOTE: worker does not allow master to specify " "interruptSignal. This may leave a stale lockfile around " "if the command is interrupted/times out\n") else: interruptSignal = 'TERM' cmd = remotecommand.RemoteShellCommand(self.workdir, full_command, env=full_env, logEnviron=self.logEnviron, timeout=self.timeout, sigtermTime=sigtermTime, interruptSignal=interruptSignal, collectStdout=collectStdout, initialStdin=initialStdin) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command {}".format(cmd)) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc @defer.inlineCallbacks def checkFeatureSupport(self): stdout = yield self._dovccmd(['--version'], collectStdout=True) self.parseGitFeatures(stdout) return self.gitInstalled @defer.inlineCallbacks def _downloadSshPrivateKeyIfNeeded(self): if self.sshPrivateKey is None: return RC_SUCCESS p = Properties() p.master = self.master private_key = yield p.render(self.sshPrivateKey) host_key = yield p.render(self.sshHostKey) known_hosts_contents = yield p.render(self.sshKnownHosts) # not using self.workdir because it may be changed depending on step # options workdir = self._getSshDataWorkDir() ssh_data_path = self._getSshDataPath() yield self.runMkdir(ssh_data_path) private_key_path = self._getSshPrivateKeyPath(ssh_data_path) yield self.downloadFileContentToWorker(private_key_path, private_key, workdir=workdir, mode=0o400) known_hosts_path = None if self.sshHostKey is not None or self.sshKnownHosts is not None: known_hosts_path = self._getSshHostKeyPath(ssh_data_path) if self.sshHostKey is not None: known_hosts_contents = getSshKnownHostsContents(host_key) yield self.downloadFileContentToWorker(known_hosts_path, known_hosts_contents, workdir=workdir, mode=0o400) if not self.supportsSshPrivateKeyAsEnvOption: script_path = self._getSshWrapperScriptPath(ssh_data_path) script_contents = getSshWrapperScriptContents(private_key_path, known_hosts_path) yield self.downloadFileContentToWorker(script_path, script_contents, workdir=workdir, mode=0o700) self.didDownloadSshPrivateKey = True return RC_SUCCESS @defer.inlineCallbacks def _removeSshPrivateKeyIfNeeded(self): if not self.didDownloadSshPrivateKey: return RC_SUCCESS yield self.runRmdir(self._getSshDataPath()) return RC_SUCCESS buildbot-3.4.0/master/buildbot/util/giturlparse.py000066400000000000000000000032401413250514000223030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from collections import namedtuple # The regex is matching more than it should and is not intended to be an url validator. # It is intended to efficiently and reliably extract information from the various examples # that are described in the unit tests. _giturlmatcher = re.compile( r'(?P(https?://|ssh://|git://|))' r'((?P.*)@)?' r'(?P[^\/:]+)(:((?P[0-9]+)/)?|/)' r'((?P.+)/)?(?P[^/]+?)(\.git)?$') GitUrl = namedtuple('GitUrl', ['proto', 'user', 'domain', 'port', 'owner', 'repo']) def giturlparse(url): res = _giturlmatcher.match(url) if res is None: return None port = res.group("port") if port is not None: port = int(port) proto = res.group("proto") if proto: proto = proto[:-3] else: proto = 'ssh' # implicit proto is ssh return GitUrl(proto, res.group('user'), res.group("domain"), port, res.group('owner'), res.group('repo')) buildbot-3.4.0/master/buildbot/util/httpclientservice.py000066400000000000000000000172271413250514000235130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can) # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json as jsonmodule import textwrap from twisted.internet import defer from twisted.web.client import Agent from twisted.web.client import HTTPConnectionPool from zope.interface import implementer from buildbot import config from buildbot.interfaces import IHttpResponse from buildbot.util import service from buildbot.util import toJson from buildbot.util import unicode2bytes from buildbot.util.logger import Logger try: import txrequests except ImportError: txrequests = None try: import treq implementer(IHttpResponse)(treq.response._Response) except ImportError: treq = None log = Logger() @implementer(IHttpResponse) class TxRequestsResponseWrapper: def __init__(self, res): self._res = res def content(self): return defer.succeed(self._res.content) def json(self): return defer.succeed(self._res.json()) @property def code(self): return self._res.status_code @property def url(self): return self._res.url class HTTPClientService(service.SharedService): """A SharedService class that can make http requests to remote services. I can use either txrequests or treq, depending on what I find installed I provide minimal get/post/put/delete API with automatic baseurl joining, and json data encoding that is suitable for use from buildbot services. """ TREQ_PROS_AND_CONS = textwrap.dedent(""" txrequests is based on requests and is probably a bit more mature, but it requires threads to run, so has more overhead. treq is better integrated in twisted and is more and more feature equivalent txrequests is 2.8x slower than treq due to the use of threads. http://treq.readthedocs.io/en/latest/#feature-parity-w-requests pip install txrequests or pip install treq """) # Those could be in theory be overridden in master.cfg by using # import buildbot.util.httpclientservice.HTTPClientService.PREFER_TREQ = True # We prefer at the moment keeping it simple PREFER_TREQ = False MAX_THREADS = 5 def __init__(self, base_url, auth=None, headers=None, verify=None, debug=False, skipEncoding=False): assert not base_url.endswith( "/"), "baseurl should not end with /: " + base_url super().__init__() self._base_url = base_url self._auth = auth self._headers = headers self._pool = None self._session = None self.verify = verify self.debug = debug self.skipEncoding = skipEncoding def updateHeaders(self, headers): if self._headers is None: self._headers = {} self._headers.update(headers) @staticmethod def checkAvailable(from_module): """Call me at checkConfig time to properly report config error if neither txrequests or treq is installed """ if txrequests is None and treq is None: config.error(("neither txrequests nor treq is installed, but {} is " "requiring it\n\n{}").format(from_module, HTTPClientService.TREQ_PROS_AND_CONS)) def startService(self): # treq only supports basicauth, so we force txrequests if the auth is # something else if self._auth is not None and not isinstance(self._auth, tuple): self.PREFER_TREQ = False if txrequests is not None and not self.PREFER_TREQ: self._session = txrequests.Session() self._doRequest = self._doTxRequest elif treq is None: raise ImportError("{classname} requires either txrequest or treq install." " Users should call {classname}.checkAvailable() during checkConfig()" " to properly alert the user.".format( classname=self.__class__.__name__)) else: self._doRequest = self._doTReq self._pool = HTTPConnectionPool(self.master.reactor) self._pool.maxPersistentPerHost = self.MAX_THREADS self._agent = Agent(self.master.reactor, pool=self._pool) return super().startService() @defer.inlineCallbacks def stopService(self): if self._session: yield self._session.close() if self._pool: yield self._pool.closeCachedConnections() yield super().stopService() def _prepareRequest(self, ep, kwargs): assert ep == "" or ep.startswith("/"), "ep should start with /: " + ep url = self._base_url + ep if self._auth is not None and 'auth' not in kwargs: kwargs['auth'] = self._auth headers = kwargs.get('headers', {}) if self._headers is not None: headers.update(self._headers) kwargs['headers'] = headers # we manually do the json encoding in order to automatically convert timestamps # for txrequests and treq json = kwargs.pop('json', None) if isinstance(json, (dict, list)): jsonStr = jsonmodule.dumps(json, default=toJson) kwargs['headers']['Content-Type'] = 'application/json' if self.skipEncoding: kwargs['data'] = jsonStr else: jsonBytes = unicode2bytes(jsonStr) kwargs['data'] = jsonBytes return url, kwargs @defer.inlineCallbacks def _doTxRequest(self, method, ep, **kwargs): url, kwargs = yield self._prepareRequest(ep, kwargs) if self.debug: log.debug("http {url} {kwargs}", url=url, kwargs=kwargs) def readContent(session, res): # this forces reading of the content inside the thread res.content if self.debug: log.debug("==> {code}: {content}", code=res.status_code, content=res.content) return res # read the whole content in the thread kwargs['background_callback'] = readContent if self.verify is False: kwargs['verify'] = False res = yield self._session.request(method, url, **kwargs) return IHttpResponse(TxRequestsResponseWrapper(res)) @defer.inlineCallbacks def _doTReq(self, method, ep, **kwargs): url, kwargs = yield self._prepareRequest(ep, kwargs) # treq requires header values to be an array kwargs['headers'] = {k: [v] for k, v in kwargs['headers'].items()} kwargs['agent'] = self._agent res = yield getattr(treq, method)(url, **kwargs) return IHttpResponse(res) # lets be nice to the auto completers, and don't generate that code def get(self, ep, **kwargs): return self._doRequest('get', ep, **kwargs) def put(self, ep, **kwargs): return self._doRequest('put', ep, **kwargs) def delete(self, ep, **kwargs): return self._doRequest('delete', ep, **kwargs) def post(self, ep, **kwargs): return self._doRequest('post', ep, **kwargs) buildbot-3.4.0/master/buildbot/util/identifiers.py000066400000000000000000000037461413250514000222620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from buildbot import util ident_re = re.compile('^[a-zA-Z\u00a0-\U0010ffff_-][a-zA-Z0-9\u00a0-\U0010ffff_-]*$', flags=re.UNICODE) initial_re = re.compile('^[^a-zA-Z_-]') subsequent_re = re.compile('[^a-zA-Z0-9_-]') trailing_digits_re = re.compile('_([0-9]+)$') def isIdentifier(maxLength, obj): if not isinstance(obj, str): return False elif not ident_re.match(obj): return False elif not obj or len(obj) > maxLength: return False return True def forceIdentifier(maxLength, s): if not isinstance(s, str): raise TypeError("%r cannot be coerced to an identifier" % (str,)) # usually bytes2unicode can handle it s = util.bytes2unicode(s) if isIdentifier(maxLength, s): return s # trim to length and substitute out invalid characters s = s[:maxLength] s = initial_re.sub('_', s) s = subsequent_re.subn('_', s)[0] return s def incrementIdentifier(maxLength, ident): num = 1 mo = trailing_digits_re.search(ident) if mo: ident = ident[:mo.start(1) - 1] num = int(mo.group(1)) num = '_%d' % (num + 1) if len(num) > maxLength: raise ValueError("cannot generate a larger identifier") ident = ident[:maxLength - len(num)] + num return ident buildbot-3.4.0/master/buildbot/util/kubeclientservice.py000066400000000000000000000233161413250514000234560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import abc import base64 import os import time from twisted.internet import defer from twisted.internet import reactor from twisted.internet.error import ProcessExitedAlready from twisted.python.failure import Failure from buildbot import config from buildbot.util import asyncSleep from buildbot.util.httpclientservice import HTTPClientService from buildbot.util.logger import Logger from buildbot.util.protocol import LineProcessProtocol from buildbot.util.service import BuildbotService log = Logger() # this is a BuildbotService, so that it can be started and destroyed. # this is needed to implement kubectl proxy lifecycle class KubeConfigLoaderBase(BuildbotService): name = "KubeConfig" @abc.abstractmethod def getConfig(self): """ @return dictionary with optional params { 'master_url': 'https://kube_master.url', 'namespace': 'default_namespace', 'headers' { 'Authentication': XXX } # todo (quite hard to implement with treq): 'cert': 'optional client certificate used to connect to ssl' 'verify': 'kube master certificate authority to use to connect' } """ def getAuthorization(self): return None def __str__(self): """return unique str for SharedService""" # hash is implemented from ComparableMixin return "{}({})".format(self.__class__.__name__, hash(self)) class KubeHardcodedConfig(KubeConfigLoaderBase): def reconfigService(self, master_url=None, bearerToken=None, basicAuth=None, headers=None, cert=None, verify=None, namespace="default"): self.config = {'master_url': master_url, 'namespace': namespace, 'headers': {}} if headers is not None: self.config['headers'] = headers if basicAuth and bearerToken: raise Exception("set one of basicAuth and bearerToken, not both") self.basicAuth = basicAuth self.bearerToken = bearerToken if cert is not None: self.config['cert'] = cert if verify is not None: self.config['verify'] = verify checkConfig = reconfigService @defer.inlineCallbacks def getAuthorization(self): if self.basicAuth is not None: basicAuth = yield self.renderSecrets(self.basicAuth) authstring = "{user}:{password}".format(**basicAuth).encode('utf-8') encoded = base64.b64encode(authstring) return "Basic {0}".format(encoded) if self.bearerToken is not None: bearerToken = yield self.renderSecrets(self.bearerToken) return "Bearer {0}".format(bearerToken) return None def getConfig(self): return self.config class KubeCtlProxyConfigLoader(KubeConfigLoaderBase): """ We use kubectl proxy to connect to kube master. Parsing the config and setting up SSL is complex. So for now, we use kubectl proxy to load the config and connect to master. This will run the kube proxy as a subprocess, and return configuration with http://localhost:PORT """ kube_ctl_proxy_cmd = ['kubectl', 'proxy'] # for tests override class LocalPP(LineProcessProtocol): def __init__(self): super().__init__() self.got_output_deferred = defer.Deferred() self.terminated_deferred = defer.Deferred() self.first_line = b"" def outLineReceived(self, line): if not self.got_output_deferred.called: self.got_output_deferred.callback(line) def errLineReceived(self, line): if not self.got_output_deferred.called: self.got_output_deferred.errback(Failure(RuntimeError(line))) def processEnded(self, status): super().processEnded(status) self.terminated_deferred.callback(None) def checkConfig(self, proxy_port=8001, namespace="default"): self.pp = None self.process = None @defer.inlineCallbacks def ensureSubprocessKilled(self): if self.pp is not None: try: self.process.signalProcess("TERM") except ProcessExitedAlready: pass # oh well yield self.pp.terminated_deferred @defer.inlineCallbacks def reconfigService(self, proxy_port=8001, namespace="default"): self.proxy_port = proxy_port self.namespace = namespace yield self.ensureSubprocessKilled() self.pp = self.LocalPP() self.process = reactor.spawnProcess( self.pp, self.kube_ctl_proxy_cmd[0], self.kube_ctl_proxy_cmd + ["-p", str(self.proxy_port)], env=None) self.kube_proxy_output = yield self.pp.got_output_deferred def stopService(self): return self.ensureSubprocessKilled() def getConfig(self): return { 'master_url': "http://localhost:{}".format(self.proxy_port), 'namespace': self.namespace } class KubeInClusterConfigLoader(KubeConfigLoaderBase): kube_dir = '/var/run/secrets/kubernetes.io/serviceaccount/' kube_namespace_file = os.path.join(kube_dir, 'namespace') kube_token_file = os.path.join(kube_dir, 'token') kube_cert_file = os.path.join(kube_dir, 'ca.crt') def checkConfig(self): if not os.path.exists(self.kube_dir): config.error( "Not in kubernetes cluster (kube_dir not found: {})".format( self.kube_dir)) def reconfigService(self): self.config = {} self.config['master_url'] = os.environ['KUBERNETES_PORT'].replace( 'tcp', 'https') self.config['verify'] = self.kube_cert_file with open(self.kube_token_file, encoding="utf-8") as token_content: token = token_content.read().strip() self.config['headers'] = { 'Authorization': 'Bearer {0}'.format(token) } with open(self.kube_namespace_file, encoding="utf-8") as namespace_content: self.config['namespace'] = namespace_content.read().strip() def getConfig(self): return self.config class KubeError(RuntimeError): def __init__(self, response_json): super().__init__(response_json['message']) self.json = response_json self.reason = response_json.get('reason') class KubeClientService(HTTPClientService): def __init__(self, kube_config=None): self.config = kube_config super().__init__('') self._namespace = None kube_config.setServiceParent(self) @defer.inlineCallbacks def _prepareRequest(self, ep, kwargs): config = self.config.getConfig() self._base_url = config['master_url'] url, req_kwargs = super()._prepareRequest(ep, kwargs) if 'headers' not in req_kwargs: req_kwargs['headers'] = {} if 'headers' in config: req_kwargs['headers'].update(config['headers']) auth = yield self.config.getAuthorization() if auth is not None: req_kwargs['headers']['Authorization'] = auth # warning: this only works with txrequests! not treq for arg in ['cert', 'verify']: if arg in config: req_kwargs[arg] = config[arg] return (url, req_kwargs) @defer.inlineCallbacks def createPod(self, namespace, spec): url = '/api/v1/namespaces/{namespace}/pods'.format(namespace=namespace) res = yield self.post(url, json=spec) res_json = yield res.json() if res.code not in (200, 201, 202): raise KubeError(res_json) return res_json @defer.inlineCallbacks def deletePod(self, namespace, name, graceperiod=0): url = '/api/v1/namespaces/{namespace}/pods/{name}'.format( namespace=namespace, name=name) res = yield self.delete(url, params={'graceperiod': graceperiod}) res_json = yield res.json() if res.code != 200: raise KubeError(res_json) return res_json @defer.inlineCallbacks def waitForPodDeletion(self, namespace, name, timeout): t1 = time.time() url = '/api/v1/namespaces/{namespace}/pods/{name}/status'.format( namespace=namespace, name=name) while True: if time.time() - t1 > timeout: raise TimeoutError( "Did not see pod {name} terminate after {timeout}s".format( name=name, timeout=timeout)) res = yield self.get(url) res_json = yield res.json() if res.code == 404: break # 404 means the pod has terminated if res.code != 200: raise KubeError(res_json) yield asyncSleep(1) return res_json @property def namespace(self): if self._namespace is None: self._namespace = self.config.getConfig()['namespace'] return self._namespace buildbot-3.4.0/master/buildbot/util/latent.py000066400000000000000000000036521413250514000212400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy from twisted.internet import defer class CompatibleLatentWorkerMixin: builds_may_be_incompatible = True _actual_build_props = None def renderWorkerProps(self, build): # Deriving classes should implement this method to render and return # a Deferred that will have all properties that are needed to start a # worker as its result. The Deferred should result in data that can # be copied via copy.deepcopy # # During actual startup, renderWorkerPropsOnStart should be called # which will invoke renderWorkerProps, store a copy of the results for # later comparison and return them. raise NotImplementedError() @defer.inlineCallbacks def renderWorkerPropsOnStart(self, build): props = yield self.renderWorkerProps(build) self._actual_build_props = copy.deepcopy(props) return props def resetWorkerPropsOnStop(self): self._actual_build_props = None @defer.inlineCallbacks def isCompatibleWithBuild(self, build): if self._actual_build_props is None: return True requested_props = yield self.renderWorkerProps(build) return requested_props == self._actual_build_props buildbot-3.4.0/master/buildbot/util/lineboundaries.py000066400000000000000000000060651413250514000227550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from buildbot.util.logger import Logger log = Logger() class LineBoundaryFinder: __slots__ = ['partialLine', 'callback', 'warned'] # split at reasonable line length. # too big lines will fill master's memory, and slow down the UI too much. MAX_LINELENGTH = 4096 # the lookahead here (`(?=.)`) ensures that `\r` doesn't match at the end # of the buffer # we also convert cursor control sequence to newlines # and ugly \b+ (use of backspace to implement progress bar) newline_re = re.compile(r'(\r\n|\r(?=.)|\033\[u|\033\[[0-9]+;[0-9]+[Hf]|\033\[2J|\x08+)') def __init__(self, callback): self.partialLine = None self.callback = callback self.warned = False def append(self, text): if self.partialLine: if len(self.partialLine) > self.MAX_LINELENGTH: if not self.warned: # Unfortunately we cannot give more hint as per which log that is log.warn("Splitting long line: {line_start} {length} " "(not warning anymore for this log)", line_start=self.partialLine[:30], length=len(self.partialLine)) self.warned = True # switch the variables, and return previous _partialLine_, # split every MAX_LINELENGTH plus a trailing \n self.partialLine, text = text, self.partialLine ret = [] while len(text) > self.MAX_LINELENGTH: ret.append(text[:self.MAX_LINELENGTH]) text = text[self.MAX_LINELENGTH:] ret.append(text) return self.callback("\n".join(ret) + "\n") text = self.partialLine + text self.partialLine = None text = self.newline_re.sub('\n', text) if text: if text[-1] != '\n': i = text.rfind('\n') if i >= 0: i = i + 1 text, self.partialLine = text[:i], text[i:] else: self.partialLine = text return defer.succeed(None) return self.callback(text) return defer.succeed(None) def flush(self): if self.partialLine: return self.append('\n') return defer.succeed(None) buildbot-3.4.0/master/buildbot/util/logger.py000066400000000000000000000025711413250514000212270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from twisted.logger import Logger except ImportError: from twisted.python import log class Logger: """A simplistic backporting of the new logger system for old versions of twisted""" def _log(self, format, *args, **kwargs): log.msg(format.format(args, **kwargs)) # legacy logging system do not support log level. # We don't bother inventing something. If needed, user can upgrade debug = _log info = _log warn = _log error = _log critical = _log def failure(self, format, failure, *args, **kwargs): log.error(failure, format.format(args, **kwargs)) __all__ = ["Logger"] buildbot-3.4.0/master/buildbot/util/lru.py000066400000000000000000000160331413250514000205500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import defaultdict from collections import deque from itertools import filterfalse from weakref import WeakValueDictionary from twisted.internet import defer from twisted.python import log class LRUCache: """ A least-recently-used cache, with a fixed maximum size. See buildbot manual for more information. """ __slots__ = ('max_size max_queue miss_fn queue cache weakrefs ' 'refcount hits refhits misses'.split()) sentinel = object() QUEUE_SIZE_FACTOR = 10 def __init__(self, miss_fn, max_size=50): self.max_size = max_size self.max_queue = max_size * self.QUEUE_SIZE_FACTOR self.queue = deque() self.cache = {} self.weakrefs = WeakValueDictionary() self.hits = self.misses = self.refhits = 0 self.refcount = defaultdict(lambda: 0) self.miss_fn = miss_fn def put(self, key, value): cached = key in self.cache or key in self.weakrefs self.cache[key] = value self.weakrefs[key] = value self._ref_key(key) if not cached: self._purge() def get(self, key, **miss_fn_kwargs): try: return self._get_hit(key) except KeyError: pass self.misses += 1 result = self.miss_fn(key, **miss_fn_kwargs) if result is not None: self.cache[key] = result self.weakrefs[key] = result self._ref_key(key) self._purge() return result def keys(self): return list(self.cache) def set_max_size(self, max_size): if self.max_size == max_size: return self.max_size = max_size self.max_queue = max_size * self.QUEUE_SIZE_FACTOR self._purge() def inv(self): global inv_failed # the keys of the queue and cache should be identical cache_keys = set(self.cache.keys()) queue_keys = set(self.queue) if queue_keys - cache_keys: log.msg("INV: uncached keys in queue:", queue_keys - cache_keys) inv_failed = True if cache_keys - queue_keys: log.msg("INV: unqueued keys in cache:", cache_keys - queue_keys) inv_failed = True # refcount should always represent the number of times each key appears # in the queue exp_refcount = dict() for k in self.queue: exp_refcount[k] = exp_refcount.get(k, 0) + 1 if exp_refcount != self.refcount: log.msg("INV: refcounts differ:") log.msg(" expected:", sorted(exp_refcount.items())) log.msg(" got:", sorted(self.refcount.items())) inv_failed = True def _ref_key(self, key): """Record a reference to the argument key.""" queue = self.queue refcount = self.refcount queue.append(key) refcount[key] = refcount[key] + 1 # periodically compact the queue by eliminating duplicate keys # while preserving order of most recent access. Note that this # is only required when the cache does not exceed its maximum # size if len(queue) > self.max_queue: refcount.clear() queue_appendleft = queue.appendleft queue_appendleft(self.sentinel) for k in filterfalse(refcount.__contains__, iter(queue.pop, self.sentinel)): queue_appendleft(k) refcount[k] = 1 def _get_hit(self, key): """Try to do a value lookup from the existing cache entries.""" try: result = self.cache[key] self.hits += 1 self._ref_key(key) return result except KeyError: pass result = self.weakrefs[key] self.refhits += 1 self.cache[key] = result self._ref_key(key) return result def _purge(self): """ Trim the cache down to max_size by evicting the least-recently-used entries. """ if len(self.cache) <= self.max_size: return cache = self.cache refcount = self.refcount queue = self.queue max_size = self.max_size # purge least recently used entries, using refcount to count entries # that appear multiple times in the queue while len(cache) > max_size: refc = 1 while refc: k = queue.popleft() refc = refcount[k] = refcount[k] - 1 del cache[k] del refcount[k] class AsyncLRUCache(LRUCache): """ An LRU cache with asynchronous locking to ensure that in the common case of multiple concurrent requests for the same key, only one fetch is performed. """ __slots__ = ['concurrent'] def __init__(self, miss_fn, max_size=50): super().__init__(miss_fn, max_size=max_size) self.concurrent = {} def get(self, key, **miss_fn_kwargs): try: result = self._get_hit(key) return defer.succeed(result) except KeyError: pass concurrent = self.concurrent conc = concurrent.get(key) if conc: self.hits += 1 d = defer.Deferred() conc.append(d) return d # if we're here, we've missed and need to fetch self.misses += 1 # create a list of waiting deferreds for this key d = defer.Deferred() assert key not in concurrent concurrent[key] = [d] miss_d = self.miss_fn(key, **miss_fn_kwargs) def handle_result(result): if result is not None: self.cache[key] = result self.weakrefs[key] = result # reference the key once, possibly standing in for multiple # concurrent accesses self._ref_key(key) self._purge() # and fire all of the waiting Deferreds dlist = concurrent.pop(key) for d in dlist: d.callback(result) def handle_failure(f): # errback all of the waiting Deferreds dlist = concurrent.pop(key) for d in dlist: d.errback(f) miss_d.addCallbacks(handle_result, handle_failure) miss_d.addErrback(log.err) return d # for tests inv_failed = False buildbot-3.4.0/master/buildbot/util/maildir.py000066400000000000000000000140441413250514000213670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ This is a class which watches a maildir for new messages. It uses the linux dirwatcher API (if available) to look for new files. The .messageReceived method is invoked with the filename of the new message, relative to the top of the maildir (so it will look like "new/blahblah"). """ import os from twisted.application import internet from twisted.internet import defer from twisted.internet import reactor # We have to put it here, since we use it to provide feedback from twisted.python import log from twisted.python import runtime from buildbot.util import service dnotify = None try: import dnotify except ImportError: log.msg("unable to import dnotify, so Maildir will use polling instead") class NoSuchMaildir(Exception): pass class MaildirService(service.BuildbotService): pollinterval = 10 # only used if we don't have DNotify name = 'MaildirService' def __init__(self, basedir=None): super().__init__() if basedir: self.setBasedir(basedir) self.files = [] self.dnotify = None self.timerService = None def setBasedir(self, basedir): # some users of MaildirService (scheduler.Try_Jobdir, in particular) # don't know their basedir until setServiceParent, since it is # relative to the buildmaster's basedir. So let them set it late. We # don't actually need it until our own startService. self.basedir = basedir self.newdir = os.path.join(self.basedir, "new") self.curdir = os.path.join(self.basedir, "cur") @defer.inlineCallbacks def startService(self): if not os.path.isdir(self.newdir) or not os.path.isdir(self.curdir): raise NoSuchMaildir("invalid maildir '{}'".format(self.basedir)) try: if dnotify: # we must hold an fd open on the directory, so we can get # notified when it changes. self.dnotify = dnotify.DNotify(self.newdir, self.dnotify_callback, [dnotify.DNotify.DN_CREATE]) except (IOError, OverflowError): # IOError is probably linux<2.4.19, which doesn't support # dnotify. OverflowError will occur on some 64-bit machines # because of a python bug log.msg("DNotify failed, falling back to polling") if not self.dnotify: self.timerService = internet.TimerService( self.pollinterval, self.poll) yield self.timerService.setServiceParent(self) self.poll() yield super().startService() def dnotify_callback(self): log.msg("dnotify noticed something, now polling") # give it a moment. I found that qmail had problems when the message # was removed from the maildir instantly. It shouldn't, that's what # maildirs are made for. I wasn't able to eyeball any reason for the # problem, and safecat didn't behave the same way, but qmail reports # "Temporary_error_on_maildir_delivery" (qmail-local.c:165, # maildir_child() process exited with rc not in 0,2,3,4). Not sure # why, and I'd have to hack qmail to investigate further, so it's # easier to just wait a second before yanking the message out of new/ reactor.callLater(0.1, self.poll) def stopService(self): if self.dnotify: self.dnotify.remove() self.dnotify = None if self.timerService is not None: self.timerService.disownServiceParent() self.timerService = None return super().stopService() @defer.inlineCallbacks def poll(self): try: assert self.basedir # see what's new for f in self.files: if not os.path.isfile(os.path.join(self.newdir, f)): self.files.remove(f) newfiles = [] for f in os.listdir(self.newdir): if f not in self.files: newfiles.append(f) self.files.extend(newfiles) for n in newfiles: try: yield self.messageReceived(n) except Exception: log.err(None, "while reading '{}' from maildir '{}':".format(n, self.basedir)) except Exception: log.err(None, "while polling maildir '{}':".format(self.basedir)) def moveToCurDir(self, filename): if runtime.platformType == "posix": # open the file before moving it, because I'm afraid that once # it's in cur/, someone might delete it at any moment path = os.path.join(self.newdir, filename) f = open(path, "r") os.rename(os.path.join(self.newdir, filename), os.path.join(self.curdir, filename)) elif runtime.platformType == "win32": # do this backwards under windows, because you can't move a file # that somebody is holding open. This was causing a Permission # Denied error on bear's win32-twisted1.3 worker. os.rename(os.path.join(self.newdir, filename), os.path.join(self.curdir, filename)) path = os.path.join(self.curdir, filename) f = open(path, "r") return f def messageReceived(self, filename): raise NotImplementedError buildbot-3.4.0/master/buildbot/util/misc.py000066400000000000000000000032441413250514000207010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Miscellaneous utilities; these should be imported from C{buildbot.util}, not directly from this module. """ import os from twisted.internet import reactor def deferredLocked(lock_or_attr): def decorator(fn): def wrapper(*args, **kwargs): lock = lock_or_attr if isinstance(lock, str): lock = getattr(args[0], lock) return lock.run(fn, *args, **kwargs) return wrapper return decorator def cancelAfter(seconds, deferred, _reactor=reactor): delayedCall = _reactor.callLater(seconds, deferred.cancel) # cancel the delayedCall when the underlying deferred fires @deferred.addBoth def cancelTimer(x): if delayedCall.active(): delayedCall.cancel() return x return deferred def writeLocalFile(path, contents, mode=None): # pragma: no cover with open(path, 'w') as file: if mode is not None: os.chmod(path, mode) file.write(contents) buildbot-3.4.0/master/buildbot/util/netstrings.py000066400000000000000000000044611413250514000221500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet.interfaces import IAddress from twisted.internet.interfaces import ITransport from twisted.protocols import basic from zope.interface import implementer from buildbot.util import unicode2bytes @implementer(IAddress) class NullAddress: "an address for NullTransport" @implementer(ITransport) class NullTransport: "a do-nothing transport to make NetstringReceiver happy" def write(self, data): raise NotImplementedError def writeSequence(self, data): raise NotImplementedError def loseConnection(self): pass def getPeer(self): return NullAddress def getHost(self): return NullAddress class NetstringParser(basic.NetstringReceiver): """ Adapts the Twisted netstring support (which assumes it is on a socket) to work on simple strings, too. Call the C{feed} method with arbitrary blocks of data, and override the C{stringReceived} method to get called for each embedded netstring. The default implementation collects the netstrings in the list C{self.strings}. """ def __init__(self): # most of the complexity here is stubbing out the transport code so # that Twisted-10.2.0 and higher believes that this is a valid protocol self.makeConnection(NullTransport()) self.strings = [] def feed(self, data): data = unicode2bytes(data) self.dataReceived(data) # dataReceived handles errors unusually quietly! if self.brokenPeer: raise basic.NetstringParseError def stringReceived(self, string): self.strings.append(string) buildbot-3.4.0/master/buildbot/util/pathmatch.py000066400000000000000000000052531413250514000217210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re _ident_re = re.compile('^[a-zA-Z_-][.a-zA-Z0-9_-]*$') def ident(x): if _ident_re.match(x): return x raise TypeError class Matcher: def __init__(self): self._patterns = {} self._dirty = True def __setitem__(self, path, value): assert path not in self._patterns, "duplicate path {}".format(path) self._patterns[path] = value self._dirty = True def __repr__(self): return '' % (self._patterns,) path_elt_re = re.compile('^(.?):([a-z0-9_.]+)$') type_fns = dict(n=int, i=ident) def __getitem__(self, path): if self._dirty: self._compile() patterns = self._by_length.get(len(path), {}) for pattern in patterns: kwargs = {} for pattern_elt, path_elt in zip(pattern, path): mo = self.path_elt_re.match(pattern_elt) if mo: type_flag, arg_name = mo.groups() if type_flag: try: type_fn = self.type_fns[type_flag] except Exception: assert type_flag in self.type_fns, \ "no such type flag {}".format(type_flag) try: path_elt = type_fn(path_elt) except Exception: break kwargs[arg_name] = path_elt else: if pattern_elt != path_elt: break else: # complete match return patterns[pattern], kwargs else: raise KeyError('No match for %r' % (path,)) def iterPatterns(self): return list(self._patterns.items()) def _compile(self): self._by_length = {} for k, v in self.iterPatterns(): length = len(k) self._by_length.setdefault(length, {})[k] = v buildbot-3.4.0/master/buildbot/util/poll.py000066400000000000000000000122741413250514000207170ustar00rootroot00000000000000 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from random import randint from twisted.internet import defer from twisted.python import log _poller_instances = None class Poller: def __init__(self, fn, instance, reactor): self.fn = fn self.instance = instance self.running = False self.pending = False # Invariants: # - If self._call is not None or self._currently_executing then it is guaranteed that # self.pending and self._run_complete_deferreds will be handled at some point in the # future. # - If self._call is not None then _run will be executed at some point, but it's not being # executed now. self._currently_executing = False self._call = None self._next_call_time = None # valid when self._call is not None self._start_time = 0 self._interval = 0 self._random_delay_min = 0 self._random_delay_max = 0 self._run_complete_deferreds = [] self._reactor = reactor @defer.inlineCallbacks def _run(self): self._call = None self._currently_executing = True try: yield self.fn(self.instance) except Exception as e: log.err(e, 'while executing {}'.format(self.fn)) finally: self._currently_executing = False was_pending = self.pending self.pending = False if self.running: self._schedule(force_now=was_pending) while self._run_complete_deferreds: self._run_complete_deferreds.pop(0).callback(None) def _get_wait_time(self, curr_time, force_now=False, force_initial_now=False): if force_now: return 0 extra_wait = randint(self._random_delay_min, self._random_delay_max) if force_initial_now or self._interval == 0: return extra_wait # note that differently from twisted.internet.task.LoopingCall, we don't care about # floating-point precision issues as we don't have the withCount feature. running_time = curr_time - self._start_time return self._interval - (running_time % self._interval) + extra_wait def _schedule(self, force_now=False, force_initial_now=False): curr_time = self._reactor.seconds() wait_time = self._get_wait_time(curr_time, force_now=force_now, force_initial_now=force_initial_now) next_call_time = curr_time + wait_time if self._call is not None: # Note that self._call can ever be moved to earlier time, so we can always cancel it. self._call.cancel() self._next_call_time = next_call_time self._call = self._reactor.callLater(wait_time, self._run) def __call__(self): if not self.running: return if self._currently_executing: self.pending = True else: self._schedule(force_now=True) def start(self, interval, now=False, random_delay_min=0, random_delay_max=0): assert not self.running self._interval = interval self._random_delay_min = random_delay_min self._random_delay_max = random_delay_max self._start_time = self._reactor.seconds() self.running = True self._schedule(force_initial_now=now) @defer.inlineCallbacks def stop(self): self.running = False if self._call is not None: self._call.cancel() self._call = None if self._currently_executing: d = defer.Deferred() self._run_complete_deferreds.append(d) yield d class _Descriptor: def __init__(self, fn, attrName): self.fn = fn self.attrName = attrName def __get__(self, instance, cls): try: poller = getattr(instance, self.attrName) except AttributeError: poller = Poller(self.fn, instance, instance.master.reactor) setattr(instance, self.attrName, poller) # track instances when testing if _poller_instances is not None: _poller_instances.append((instance, self.attrName)) return poller def method(fn): stateName = "__poll_" + fn.__name__ + "__" return _Descriptor(fn, stateName) def track_poll_methods(): global _poller_instances _poller_instances = [] def reset_poll_methods(): global _poller_instances for instance, attrname in _poller_instances: # pylint: disable=not-an-iterable delattr(instance, attrname) _poller_instances = None buildbot-3.4.0/master/buildbot/util/private_tempdir.py000066400000000000000000000033531413250514000231450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil import stat import tempfile class PrivateTemporaryDirectory: """ Works similarly to python 3.2+ TemporaryDirectory except the also sets the permissions of the created directory and Note, that Windows ignores the permissions. """ def __init__(self, suffix=None, prefix=None, dir=None, mode=0o700): self.name = tempfile.mkdtemp(suffix, prefix, dir) self.mode = mode self._cleanup_needed = True def __enter__(self): return self.name def __exit__(self, exc, value, tb): self.cleanup() def cleanup(self): if self._cleanup_needed: def remove_readonly(func, path, _): """ Workaround Permission Error on Windows if any files in path are read-only. See https://docs.python.org/3/library/shutil.html#rmtree-example """ os.chmod(path, stat.S_IWRITE) func(path) shutil.rmtree(self.name, onerror=remove_readonly) self._cleanup_needed = False buildbot-3.4.0/master/buildbot/util/protocol.py000066400000000000000000000050661413250514000216130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members from twisted.internet import protocol class LineBuffer: def __init__(self): self._buffer = b'' def add_data(self, data): # returns lines that have been processed, if any lines = (self._buffer + data).split(b'\n') self._buffer = lines.pop(-1) for l in lines: yield l.rstrip(b'\r') def get_trailing_line(self): if self._buffer: ret = [self._buffer] self._buffer = b'' return ret return [] class LineProcessProtocol(protocol.ProcessProtocol): def __init__(self): self._out_buffer = LineBuffer() self._err_buffer = LineBuffer() def outReceived(self, data): """ Translates bytes into lines, and calls outLineReceived. """ for line in self._out_buffer.add_data(data): self.outLineReceived(line) def errReceived(self, data): """ Translates bytes into lines, and calls errLineReceived. """ for line in self._err_buffer.add_data(data): self.errLineReceived(line) def processEnded(self, status): for line in self._out_buffer.get_trailing_line(): self.outLineReceived(line) for line in self._err_buffer.get_trailing_line(): self.errLineReceived(line) def outLineReceived(self, line): """ Callback to which stdout lines will be sent. Any line that is not terminated by a newline will be processed once the next line comes, or when processEnded is called. """ raise NotImplementedError def errLineReceived(self, line): """ Callback to which stdout lines will be sent. Any line that is not terminated by a newline will be processed once the next line comes, or when processEnded is called. """ raise NotImplementedError buildbot-3.4.0/master/buildbot/util/pullrequest.py000066400000000000000000000026701413250514000223350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from fnmatch import fnmatch class PullRequestMixin: external_property_whitelist = [] external_property_denylist = [] def extractProperties(self, payload): def flatten(properties, base, info_dict): for k, v in info_dict.items(): name = ".".join([base, k]) if name in self.external_property_denylist: continue if isinstance(v, dict): flatten(properties, name, v) elif any([fnmatch(name, expr) for expr in self.external_property_whitelist]): properties[name] = v properties = {} flatten(properties, self.property_basename, payload) return properties buildbot-3.4.0/master/buildbot/util/queue.py000066400000000000000000000131021413250514000210640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members import queue import threading from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.util import backoff class UndoableQueue(queue.Queue): def unget(self, x): with self.mutex: self.queue.appendleft(x) class _TerminateRequest: pass class ConnectableThreadQueue(threading.Thread): """ This provides worker thread that is processing work given via execute_in_thread() method. The return value of the function submitted to execute_in_thread() is returned via Deferred. All work is performed in a "connection", which is established in create_connection() which is intended to be overridden by user. The user is expected to return an opaque connection object from create_connection(). create_connection() must not throw exceptions. The connection is from the user-side closed by calling close_connection(). The connection is passed as the first argument to the functions submitted to execute_in_thread(). When the thread is joined, it will execute all currently pending items and call on_close_connection() if needed to close the connection. Any work submitted after join() is called will be ignored. """ def __init__(self, connect_backoff_start_seconds=1, connect_backoff_multiplier=1.1, connect_backoff_max_wait_seconds=3600): self._queue = UndoableQueue() self._conn = None self._backoff_engine = \ backoff.ExponentialBackoffEngineSync(start_seconds=connect_backoff_start_seconds, multiplier=connect_backoff_multiplier, max_wait_seconds=connect_backoff_max_wait_seconds) super().__init__(daemon=True) self.connecting = False self.start() def join(self, *args, **kwargs): self.execute_in_thread(_TerminateRequest()) super().join(*args, **kwargs) def execute_in_thread(self, cb, *args, **kwargs): d = defer.Deferred() self._queue.put((d, cb, args, kwargs)) return d @property def conn(self): return self._conn def close_connection(self): self._conn = None self.connecting = False def on_close_connection(self, conn): # override to perform any additional connection closing tasks self.close_connection() def create_connection(self): # override to create a new connection raise NotImplementedError() def _handle_backoff(self, msg): # returns True if termination has been requested log.err(msg) try: self._backoff_engine.wait_on_failure() except backoff.BackoffTimeoutExceededError: self._backoff_engine.on_success() # reset the timers if self._drain_queue_with_exception(backoff.BackoffTimeoutExceededError(msg)): return True return False def _drain_queue_with_exception(self, e): # returns True if termination has been requested try: while True: result_d, next_operation, args, kwargs = self._queue.get(block=False) if isinstance(next_operation, _TerminateRequest): self._queue.task_done() reactor.callFromThread(result_d.callback, None) return True else: self._queue.task_done() reactor.callFromThread(result_d.errback, e) except queue.Empty: return False def run(self): while True: result_d, next_operation, args, kwargs = self._queue.get() if isinstance(next_operation, _TerminateRequest): self._queue.task_done() reactor.callFromThread(result_d.callback, None) break if not self._conn: self.connecting = True self._queue.unget((result_d, next_operation, args, kwargs)) try: self._conn = self.create_connection() self.connecting = False if self._conn is not None: self._backoff_engine.on_success() elif self._handle_backoff('Did not receive connection'): break except Exception as e: self.connecting = False if self._handle_backoff('Exception received: {}'.format(e)): break continue try: result = next_operation(self._conn, *args, **kwargs) reactor.callFromThread(result_d.callback, result) except Exception as e: reactor.callFromThread(result_d.errback, e) self._queue.task_done() if self._conn is not None: self.on_close_connection(self._conn) buildbot-3.4.0/master/buildbot/util/raml.py000066400000000000000000000104031413250514000206740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import json import os import yaml try: from collections import OrderedDict except ImportError: # pragma: no cover from ordereddict import OrderedDict # minimalistic raml loader. Support !include tags, and mapping as OrderedDict class RamlLoader(yaml.SafeLoader): pass def construct_include(loader, node): path = os.path.join(os.path.dirname(loader.stream.name), node.value) with open(path) as f: return yaml.load(f, Loader=RamlLoader) def construct_mapping(loader, node): loader.flatten_mapping(node) return OrderedDict(loader.construct_pairs(node)) RamlLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) RamlLoader.add_constructor('!include', construct_include) class RamlSpec: """ This class loads the raml specification, and expose useful aspects of the spec Main usage for now is for the doc, but it can be extended to make sure raml spec matches other spec implemented in the tests """ def __init__(self): fn = os.path.join(os.path.dirname(__file__), os.pardir, 'spec', 'api.raml') with open(fn) as f: self.api = yaml.load(f, Loader=RamlLoader) with open(fn) as f: self.rawraml = f.read() endpoints = {} self.endpoints_by_type = {} self.rawendpoints = {} self.endpoints = self.parse_endpoints(endpoints, "", self.api) self.types = self.parse_types() def parse_endpoints(self, endpoints, base, api, uriParameters=None): if uriParameters is None: uriParameters = OrderedDict() for k, v in api.items(): if k.startswith("/"): ep = base + k p = copy.deepcopy(uriParameters) if v is not None: p.update(v.get("uriParameters", {})) v["uriParameters"] = p endpoints[ep] = v self.parse_endpoints(endpoints, ep, v, p) elif k in ['get', 'post']: if 'is' not in v: continue for _is in v['is']: if not isinstance(_is, dict): raise Exception('Unexpected "is" target {}: {}'.format(type(_is), _is)) if 'bbget' in _is: try: v['eptype'] = _is['bbget']['bbtype'] except TypeError as e: raise Exception('Unexpected "is" target {}'.format(_is['bbget'])) from e self.endpoints_by_type.setdefault(v['eptype'], {}) self.endpoints_by_type[v['eptype']][base] = api if 'bbgetraw' in _is: self.rawendpoints.setdefault(base, {}) self.rawendpoints[base] = api return endpoints def reindent(self, s, indent): return s.replace("\n", "\n" + " " * indent) def format_json(self, j, indent): j = json.dumps(j, indent=4).replace(", \n", ",\n") return self.reindent(j, indent) def parse_types(self): types = self.api['types'] return types def iter_actions(self, endpoint): ACTIONS_MAGIC = '/actions/' for k, v in endpoint.items(): if k.startswith(ACTIONS_MAGIC): k = k[len(ACTIONS_MAGIC):] v = v['post'] # simplify the raml tree for easier processing v['body'] = v['body']['application/json'].get('properties', {}) yield (k, v) buildbot-3.4.0/master/buildbot/util/runprocess.py000066400000000000000000000236421413250514000221550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import io import os import subprocess from twisted.internet import defer from twisted.internet import error from twisted.internet import protocol from twisted.python import failure from twisted.python import log from twisted.python import runtime from buildbot.util import unicode2bytes class RunProcessPP(protocol.ProcessProtocol): def __init__(self, run_process, initial_stdin=None): self.run_process = run_process self.initial_stdin = initial_stdin def connectionMade(self): if self.initial_stdin: self.transport.write(self.initial_stdin) self.transport.closeStdin() def outReceived(self, data): self.run_process.add_stdout(data) def errReceived(self, data): self.run_process.add_stderr(data) def processEnded(self, reason): self.run_process.process_ended(reason.value.signal, reason.value.exitCode) class RunProcess: TIMEOUT_KILL = 5 interrupt_signal = "KILL" def __init__(self, reactor, command, workdir=None, env=None, collect_stdout=True, collect_stderr=True, stderr_is_error=False, io_timeout=300, runtime_timeout=3600, sigterm_timeout=5, initial_stdin=None): self._reactor = reactor self.command = command self.workdir = workdir self.process = None self.environ = env self.initial_stdin = initial_stdin self.output_stdout = io.BytesIO() if collect_stdout else None self.output_stderr = io.BytesIO() if collect_stderr else None self.stderr_is_error = stderr_is_error self.io_timeout = io_timeout self.io_timer = None self.sigterm_timeout = sigterm_timeout self.sigterm_timer = None self.runtime_timeout = runtime_timeout self.runtime_timer = None self.killed = False self.kill_timer = None def __repr__(self): return "<{0} '{1}'>".format(self.__class__.__name__, self.command) def get_os_env(self): return os.environ def resolve_environment(self, env): os_env = self.get_os_env() if env is None: return os_env.copy() new_env = {} for key in os_env: if key not in env or env[key] is not None: new_env[key] = os_env[key] for key, value in env.items(): if value is not None: new_env[key] = value return new_env def start(self): self.deferred = defer.Deferred() try: self._start_command() except Exception as e: self.deferred.errback(failure.Failure(e)) return self.deferred def _start_command(self): self.pp = RunProcessPP(self, initial_stdin=self.initial_stdin) environ = self.resolve_environment(self.environ) # $PWD usually indicates the current directory; spawnProcess may not # update this value, though, so we set it explicitly here. This causes # weird problems (bug #456) on msys if not environ.get('MACHTYPE', None) == 'i686-pc-msys' and self.workdir is not None: environ['PWD'] = os.path.abspath(self.workdir) argv = unicode2bytes(self.command) self.process = self._reactor.spawnProcess(self.pp, argv[0], argv, environ, self.workdir) if self.io_timeout: self.io_timer = self._reactor.callLater(self.io_timeout, self.io_timed_out) if self.runtime_timeout: self.runtime_timer = self._reactor.callLater(self.runtime_timeout, self.runtime_timed_out) def add_stdout(self, data): if self.output_stdout is not None: self.output_stdout.write(data) if self.io_timer: self.io_timer.reset(self.io_timeout) def add_stderr(self, data): if self.output_stderr is not None: self.output_stderr.write(data) elif self.stderr_is_error: self.kill('command produced stderr which is interpreted as error') if self.io_timer: self.io_timer.reset(self.io_timeout) def _build_result(self, rc): if self.output_stdout is not None and self.output_stderr is not None: return (rc, self.output_stdout.getvalue(), self.output_stderr.getvalue()) if self.output_stdout is not None: return (rc, self.output_stdout.getvalue()) if self.output_stderr is not None: return (rc, self.output_stderr.getvalue()) return rc def process_ended(self, sig, rc): if self.killed and rc == 0: log.msg("process was killed, but exited with status 0; faking a failure") # windows returns '1' even for signalled failures, while POSIX returns -1 if runtime.platformType == 'win32': rc = 1 else: rc = -1 if sig is not None: rc = -1 self._cancel_timers() d = self.deferred self.deferred = None if d: d.callback(self._build_result(rc)) else: log.err("{}: command finished twice".format(self)) def failed(self, why): self._cancel_timers() d = self.deferred self.deferred = None if d: d.errback(why) else: log.err("{}: command finished twice".format(self)) def io_timed_out(self): self.io_timer = None msg = "{}: command timed out: {} seconds without output".format(self, self.io_timeout) self.kill(msg) def runtime_timed_out(self): self.runtime_timer = None msg = "{}: command timed out: {} seconds elapsed".format(self, self.runtime_timeout) self.kill(msg) def is_dead(self): if self.process.pid is None: return True pid = int(self.process.pid) try: os.kill(pid, 0) except OSError: return True return False def check_process_was_killed(self): self.sigterm_timer = None if not self.is_dead(): if not self.send_signal(self.interrupt_signal): log.msg("{}: failed to kill process again".format(self)) self.cleanup_killed_process() def cleanup_killed_process(self): if runtime.platformType == "posix": # we only do this under posix because the win32eventreactor # blocks here until the process has terminated, while closing # stderr. This is weird. self.pp.transport.loseConnection() if self.deferred: # finished ought to be called momentarily. Just in case it doesn't, # set a timer which will abandon the command. self.kill_timer = self._reactor.callLater(self.TIMEOUT_KILL, self.kill_timed_out) def send_signal(self, interrupt_signal): success = False log.msg('{}: killing process using {}'.format(self, interrupt_signal)) if runtime.platformType == "win32": if interrupt_signal is not None and self.process.pid is not None: if interrupt_signal == "TERM": # TODO: blocks subprocess.check_call("TASKKILL /PID {0} /T".format(self.process.pid)) success = True elif interrupt_signal == "KILL": # TODO: blocks subprocess.check_call("TASKKILL /F /PID {0} /T".format(self.process.pid)) success = True # try signalling the process itself (works on Windows too, sorta) if not success: try: self.process.signalProcess(interrupt_signal) success = True except OSError as e: log.err("{}: from process.signalProcess: {}".format(self, e)) # could be no-such-process, because they finished very recently except error.ProcessExitedAlready: log.msg("{}: process exited already - can't kill".format(self)) # the process has already exited, and likely finished() has # been called already or will be called shortly return success def kill(self, msg): log.msg('{}: killing process because {}'.format(self, msg)) self._cancel_timers() self.killed = True if self.sigterm_timeout is not None: self.send_signal("TERM") self.sigterm_timer = self._reactor.callLater(self.sigterm_timeout, self.check_process_was_killed) else: if not self.send_signal(self.interrupt_signal): log.msg("{}: failed to kill process".format(self)) self.cleanup_killed_process() def kill_timed_out(self): self.kill_timer = None log.msg("{}: attempted to kill process, but it wouldn't die".format(self)) self.failed(RuntimeError("SIG{} failed to kill process".format(self.interrupt_signal))) def _cancel_timers(self): for name in ('io_timer', 'kill_timer', 'runtime_timer', 'sigterm_timer'): timer = getattr(self, name, None) if timer: timer.cancel() setattr(self, name, None) def run_process(*args, **kwargs): process = RunProcess(*args, **kwargs) return process.start() buildbot-3.4.0/master/buildbot/util/sautils.py000066400000000000000000000055551413250514000214410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from contextlib import contextmanager import sqlalchemy as sa from sqlalchemy.ext import compiler from sqlalchemy.sql.expression import ClauseElement from sqlalchemy.sql.expression import Executable # from http: # //www.sqlalchemy.org/docs/core/compiler.html#compiling-sub-elements-of-a-custom-expression-construct # noqa pylint: disable=line-too-long # _execution_options per # http://docs.sqlalchemy.org/en/rel_0_7/core/compiler.html#enabling-compiled-autocommit # (UpdateBase requires sqlalchemy 0.7.0) class InsertFromSelect(Executable, ClauseElement): _execution_options = \ Executable._execution_options.union({'autocommit': True}) def __init__(self, table, select): self.table = table self.select = select @compiler.compiles(InsertFromSelect) def _visit_insert_from_select(element, compiler, **kw): return "INSERT INTO {} {}".format(compiler.process(element.table, asfrom=True), compiler.process(element.select)) def sa_version(): if hasattr(sa, '__version__'): def tryint(s): try: return int(s) except (ValueError, TypeError): return -1 return tuple(map(tryint, sa.__version__.split('.'))) return (0, 0, 0) # "it's old" def Table(*args, **kwargs): """Wrap table creation to add any necessary dialect-specific options""" # work around the case where a database was created for us with # a non-utf8 character set (mysql's default) kwargs['mysql_character_set'] = 'utf8' return sa.Table(*args, **kwargs) @contextmanager def withoutSqliteForeignKeys(engine, connection=None): conn = connection if engine.dialect.name == 'sqlite': if conn is None: conn = engine.connect() # This context is not re-entrant. Ensure it. assert not getattr(engine, 'fk_disabled', False) engine.fk_disabled = True conn.execute('pragma foreign_keys=OFF') try: yield finally: if engine.dialect.name == 'sqlite': engine.fk_disabled = False conn.execute('pragma foreign_keys=ON') if connection is None: conn.close() buildbot-3.4.0/master/buildbot/util/service.py000066400000000000000000000474441413250514000214200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib from twisted.application import service from twisted.internet import defer from twisted.internet import task from twisted.python import log from twisted.python import reflect from twisted.python.reflect import accumulateClassList from buildbot import util from buildbot.util import bytes2unicode from buildbot.util import config from buildbot.util import unicode2bytes class ReconfigurableServiceMixin: reconfig_priority = 128 @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): if not service.IServiceCollection.providedBy(self): return # get a list of child services to reconfigure reconfigurable_services = [svc for svc in self if isinstance(svc, ReconfigurableServiceMixin)] # sort by priority reconfigurable_services.sort(key=lambda svc: -svc.reconfig_priority) for svc in reconfigurable_services: yield svc.reconfigServiceWithBuildbotConfig(new_config) # twisted 16's Service is now an new style class, better put everybody new style # to catch issues even on twisted < 16 class AsyncService(service.Service): # service.Service.setServiceParent does not wait for neither disownServiceParent nor addService # to complete @defer.inlineCallbacks def setServiceParent(self, parent): if self.parent is not None: yield self.disownServiceParent() parent = service.IServiceCollection(parent, parent) self.parent = parent yield self.parent.addService(self) # service.Service.disownServiceParent does not wait for removeService to complete before # setting parent to None @defer.inlineCallbacks def disownServiceParent(self): yield self.parent.removeService(self) self.parent = None # We recurse over the parent services until we find a MasterService @property def master(self): if self.parent is None: return None return self.parent.master class AsyncMultiService(AsyncService, service.MultiService): def startService(self): # Do NOT use super() here. # The method resolution order would cause MultiService.startService() to # be called which we explicitly want to override with this method. service.Service.startService(self) dl = [] # if a service attaches another service during the reconfiguration # then the service will be started twice, so we don't use iter, but rather # copy in a list for svc in list(self): # handle any deferreds, passing up errors and success dl.append(defer.maybeDeferred(svc.startService)) return defer.gatherResults(dl, consumeErrors=True) @defer.inlineCallbacks def stopService(self): # Do NOT use super() here. # The method resolution order would cause MultiService.stopService() to # be called which we explicitly want to override with this method. service.Service.stopService(self) services = list(self) services.reverse() dl = [] for svc in services: if not isinstance(svc, SharedService): dl.append(defer.maybeDeferred(svc.stopService)) # unlike MultiService, consume errors in each individual deferred, and # pass the first error in a child service up to our caller yield defer.gatherResults(dl, consumeErrors=True) for svc in services: if isinstance(svc, SharedService): yield svc.stopService() def addService(self, service): if service.name is not None: if service.name in self.namedServices: raise RuntimeError(("cannot have two services with same name" " '{}'").format(service.name)) self.namedServices[service.name] = service self.services.append(service) if self.running: # It may be too late for that, but we will do our best service.privilegedStartService() return service.startService() return defer.succeed(None) class MasterService(AsyncMultiService): # master service is the service that stops the master property recursion @property def master(self): return self class SharedService(AsyncMultiService): """a service that is created only once per parameter set in a parent service""" @classmethod @defer.inlineCallbacks def getService(cls, parent, *args, **kwargs): name = cls.getName(*args, **kwargs) if name in parent.namedServices: return parent.namedServices[name] instance = cls(*args, **kwargs) # The class is not required to initialized its name # but we use the name to identify the instance in the parent service # so we force it with the name we used instance.name = name yield instance.setServiceParent(parent) # we put the service on top of the list, so that it is stopped the last # This make sense as the shared service is used as a dependency # for other service parent.services.remove(instance) parent.services.insert(0, instance) # hook the return value to the instance object return instance @classmethod def getName(cls, *args, **kwargs): _hash = hashlib.sha1() for arg in args: arg = unicode2bytes(str(arg)) _hash.update(arg) for k, v in sorted(kwargs.items()): k = unicode2bytes(str(k)) v = unicode2bytes(str(v)) _hash.update(k) _hash.update(v) return cls.__name__ + "_" + _hash.hexdigest() class BuildbotService(AsyncMultiService, config.ConfiguredMixin, util.ComparableMixin, ReconfigurableServiceMixin): compare_attrs = ('name', '_config_args', '_config_kwargs') name = None configured = False objectid = None def __init__(self, *args, **kwargs): name = kwargs.pop("name", None) if name is not None: self.name = bytes2unicode(name) self.checkConfig(*args, **kwargs) if self.name is None: raise ValueError("{}: must pass a name to constructor".format(type(self))) self._config_args = args self._config_kwargs = kwargs self.rendered = False super().__init__() def getConfigDict(self): _type = type(self) return {'name': self.name, 'class': _type.__module__ + "." + _type.__name__, 'args': self._config_args, 'kwargs': self._config_kwargs} @defer.inlineCallbacks def reconfigServiceWithSibling(self, sibling): # only reconfigure if sibling is configured differently. # sibling == self is using ComparableMixin's implementation # only compare compare_attrs if self.configured and util.ComparableMixin.isEquivalent(sibling, self): return None self.configured = True # render renderables in parallel # Properties import to resolve cyclic import issue from buildbot.process.properties import Properties p = Properties() p.master = self.master # render renderables in parallel secrets = [] kwargs = {} accumulateClassList(self.__class__, 'secrets', secrets) for k, v in sibling._config_kwargs.items(): if k in secrets: # for non reconfigurable services, we force the attribute v = yield p.render(v) setattr(sibling, k, v) setattr(self, k, v) kwargs[k] = v d = yield self.reconfigService(*sibling._config_args, **kwargs) return d def canReconfigWithSibling(self, sibling): return reflect.qual(self.__class__) == reflect.qual(sibling.__class__) def configureService(self): # reconfigServiceWithSibling with self, means first configuration return self.reconfigServiceWithSibling(self) @defer.inlineCallbacks def startService(self): if not self.configured: try: yield self.configureService() except NotImplementedError: pass yield super().startService() def checkConfig(self, *args, **kwargs): return defer.succeed(True) def reconfigService(self, name=None, *args, **kwargs): return defer.succeed(None) def renderSecrets(self, *args): # Properties import to resolve cyclic import issue from buildbot.process.properties import Properties p = Properties() p.master = self.master if len(args) == 1: return p.render(args[0]) return defer.gatherResults([p.render(s) for s in args], consumeErrors=True) class ClusteredBuildbotService(BuildbotService): """ ClusteredBuildbotService-es are meant to be executed on a single master only. When starting such a service, by means of "yield startService", it will first try to claim it on the current master and: - return without actually starting it if it was already claimed by another master (self.active == False). It will however keep trying to claim it, in case another master stops, and takes the job back. - return after it starts else. """ compare_attrs = ('name',) POLL_INTERVAL_SEC = 5 * 60 # 5 minutes serviceid = None active = False def __init__(self, *args, **kwargs): self.serviceid = None self.active = False self._activityPollCall = None self._activityPollDeferred = None super().__init__(*args, **kwargs) # activity handling def isActive(self): return self.active def activate(self): # will run when this instance becomes THE CHOSEN ONE for the cluster return defer.succeed(None) def deactivate(self): # to be overridden by subclasses # will run when this instance loses its chosen status return defer.succeed(None) # service arbitration hooks def _getServiceId(self): # retrieve the id for this service; we assume that, once we have a valid id, # the id doesn't change. This may return a Deferred. raise NotImplementedError def _claimService(self): # Attempt to claim the service for this master. Should return True or False # (optionally via a Deferred) to indicate whether this master now owns the # service. raise NotImplementedError def _unclaimService(self): # Release the service from this master. This will only be called by a claimed # service, and this really should be robust and release the claim. May return # a Deferred. raise NotImplementedError # default implementation to delegate to the above methods @defer.inlineCallbacks def startService(self): # subclasses should override startService only to perform actions that should # run on all instances, even if they never get activated on this # master. yield super().startService() self._startServiceDeferred = defer.Deferred() self._startActivityPolling() yield self._startServiceDeferred @defer.inlineCallbacks def stopService(self): # subclasses should override stopService only to perform actions that should # run on all instances, even if they never get activated on this # master. self._stopActivityPolling() # need to wait for prior activations to finish if self._activityPollDeferred: yield self._activityPollDeferred if self.active: self.active = False try: yield self.deactivate() yield self._unclaimService() except Exception as e: msg = "Caught exception while deactivating ClusteredService({})".format(self.name) log.err(e, _why=msg) yield super().stopService() def _startActivityPolling(self): self._activityPollCall = task.LoopingCall(self._activityPoll) # plug in a clock if we have one, for tests if hasattr(self, 'clock'): self._activityPollCall.clock = self.clock d = self._activityPollCall.start(self.POLL_INTERVAL_SEC, now=True) self._activityPollDeferred = d # this should never happen, but just in case: d.addErrback(log.err, 'while polling for service activity:') def _stopActivityPolling(self): if self._activityPollCall: self._activityPollCall.stop() self._activityPollCall = None return self._activityPollDeferred return None def _callbackStartServiceDeferred(self): if self._startServiceDeferred is not None: self._startServiceDeferred.callback(None) self._startServiceDeferred = None @defer.inlineCallbacks def _activityPoll(self): try: # just in case.. if self.active: return if self.serviceid is None: self.serviceid = yield self._getServiceId() try: claimed = yield self._claimService() except Exception: msg = ('WARNING: ClusteredService({}) got exception while trying to claim' ).format(self.name) log.err(_why=msg) return if not claimed: # this master is not responsible # for this service, we callback for StartService # if it was not callback-ed already, # and keep polling to take back the service # if another one lost it self._callbackStartServiceDeferred() return try: # this master is responsible for this service # we activate it self.active = True yield self.activate() except Exception: # this service is half-active, and noted as such in the db.. msg = 'WARNING: ClusteredService({}) is only partially active'.format(self.name) log.err(_why=msg) finally: # cannot wait for its deactivation # with yield self._stopActivityPolling # as we're currently executing the # _activityPollCall callback # we just call it without waiting its stop # (that may open race conditions) self._stopActivityPolling() self._callbackStartServiceDeferred() except Exception: # don't pass exceptions into LoopingCall, which can cause it to # fail msg = 'WARNING: ClusteredService({}) failed during activity poll'.format(self.name) log.err(_why=msg) class BuildbotServiceManager(AsyncMultiService, config.ConfiguredMixin, ReconfigurableServiceMixin): config_attr = "services" name = "services" def getConfigDict(self): return {'name': self.name, 'childs': [v.getConfigDict() for v in self.namedServices.values()]} @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): # arrange childs by name old_by_name = self.namedServices old_set = set(old_by_name) new_config_attr = getattr(new_config, self.config_attr) if isinstance(new_config_attr, list): new_by_name = {s.name: s for s in new_config_attr} elif isinstance(new_config_attr, dict): new_by_name = new_config_attr else: raise TypeError("config.{} should be a list or dictionary".format(self.config_attr)) new_set = set(new_by_name) # calculate new childs, by name, and removed childs removed_names, added_names = util.diffSets(old_set, new_set) # find any children for which the old instance is not # able to do a reconfig with the new sibling # and add them to both removed and added, so that we # run the new version for n in old_set & new_set: old = old_by_name[n] new = new_by_name[n] # check if we are able to reconfig service if not old.canReconfigWithSibling(new): removed_names.add(n) added_names.add(n) if removed_names or added_names: log.msg("adding {} new {}, removing {}".format(len(added_names), self.config_attr, len(removed_names))) for n in removed_names: child = old_by_name[n] # disownServiceParent calls stopService after removing the relationship # as child might use self.master.data to stop itself, its better to stop it first # (this is related to the fact that self.master is found by recursively looking at # self.parent for a master) yield child.stopService() # it has already called, so do not call it again child.stopService = lambda: None yield child.disownServiceParent() for n in added_names: child = new_by_name[n] # setup service's objectid if hasattr(child, 'objectid'): class_name = '{}.{}'.format(child.__class__.__module__, child.__class__.__name__) objectid = yield self.master.db.state.getObjectId( child.name, class_name) child.objectid = objectid yield child.setServiceParent(self) # As the services that were just added got # reconfigServiceWithSibling called by # setServiceParent->startService, # we avoid calling it again by selecting # in reconfigurable_services, services # that were not added just now reconfigurable_services = [svc for svc in self if svc.name not in added_names] # sort by priority reconfigurable_services.sort(key=lambda svc: -svc.reconfig_priority) for svc in reconfigurable_services: if not svc.name: raise ValueError( "{}: child {} should have a defined name attribute".format(self, svc)) config_sibling = new_by_name.get(svc.name) try: yield svc.reconfigServiceWithSibling(config_sibling) except NotImplementedError: # legacy support. Its too painful to transition old code to new Service life cycle # so we implement switch of child when the service raises NotImplementedError # Note this means that self will stop, and sibling will take ownership # means that we have a small time where the service is unavailable. yield svc.disownServiceParent() config_sibling.objectid = svc.objectid yield config_sibling.setServiceParent(self) buildbot-3.4.0/master/buildbot/util/ssfilter.py000066400000000000000000000212471413250514000216040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from buildbot.util import ComparableMixin from buildbot.util import NotABranch def is_re_pattern(obj): # re.Pattern only exists in Python 3.7 return hasattr(obj, 'search') and hasattr(obj, 'match') def extract_filter_values(values, filter_name): if not isinstance(values, (list, str)): raise ValueError("Values of filter {} must be list of strings or a string".format( filter_name)) if isinstance(values, str): values = [values] else: for value in values: if not isinstance(value, str): raise ValueError("Value of filter {} must be string".format(filter_name)) return values def extract_filter_values_branch(values, filter_name): if not isinstance(values, (list, str, type(None))): raise ValueError("Values of filter {} must be list of strings, a string or None".format( filter_name)) if isinstance(values, (str, type(None))): values = [values] else: for value in values: if not isinstance(value, (str, type(None))): raise ValueError("Value of filter {} must be string or None".format(filter_name)) return values def extract_filter_values_regex(values, filter_name): if not isinstance(values, (list, str)) and not is_re_pattern(values): raise ValueError("Values of filter {} must be list of strings, a string or regex".format( filter_name)) if isinstance(values, str) or is_re_pattern(values): values = [values] else: for value in values: if not isinstance(value, str) and not is_re_pattern(value): raise ValueError("Value of filter {} must be string or regex".format(filter_name)) return values class _FilterExactMatch: def __init__(self, values): self.values = values def is_matched(self, value): return value in self.values def describe(self, prop): return '{} in {}'.format(prop, self.values) class _FilterExactMatchInverse: def __init__(self, values): self.values = values def is_matched(self, value): return value not in self.values def describe(self, prop): return '{} not in {}'.format(prop, self.values) class _FilterRegex: def __init__(self, regexes): self.regexes = [self._compile(regex) for regex in regexes] def _compile(self, regex): if is_re_pattern(regex): return regex return re.compile(regex) def is_matched(self, value): if value is None: return False for regex in self.regexes: if regex.match(value) is not None: return True return False def describe(self, prop): return '{} matches {}'.format(prop, self.regexes) class _FilterRegexInverse: def __init__(self, regexes): self.regexes = [self._compile(regex) for regex in regexes] def _compile(self, regex): if is_re_pattern(regex): return regex return re.compile(regex) def is_matched(self, value): if value is None: return True for regex in self.regexes: if regex.match(value) is not None: return False return True def describe(self, prop): return '{} does not match {}'.format(prop, self.regexes) class SourceStampFilter(ComparableMixin): compare_attrs = ( 'filter_fn', 'project_filters', 'codebase_filters', 'repository_filters', 'branch_filters' ) def __init__(self, # gets a SourceStamp dictionary, returns boolean filter_fn=None, project_eq=None, project_not_eq=None, project_re=None, project_not_re=None, repository_eq=None, repository_not_eq=None, repository_re=None, repository_not_re=None, branch_eq=NotABranch, branch_not_eq=NotABranch, branch_re=None, branch_not_re=None, codebase_eq=None, codebase_not_eq=None, codebase_re=None, codebase_not_re=None): self.filter_fn = filter_fn self.project_filters = self.create_filters(project_eq, project_not_eq, project_re, project_not_re, 'project') self.codebase_filters = self.create_filters(codebase_eq, codebase_not_eq, codebase_re, codebase_not_re, 'codebase') self.repository_filters = self.create_filters(repository_eq, repository_not_eq, repository_re, repository_not_re, 'repository') self.branch_filters = self.create_branch_filters(branch_eq, branch_not_eq, branch_re, branch_not_re, 'branch') def create_branch_filters(self, eq, not_eq, regex, not_regex, filter_name): filters = [] if eq is not NotABranch: values = extract_filter_values_branch(eq, filter_name + '_eq') filters.append(_FilterExactMatch(values)) if not_eq is not NotABranch: values = extract_filter_values_branch(not_eq, filter_name + '_not_eq') filters.append(_FilterExactMatchInverse(values)) if regex is not None: values = extract_filter_values_regex(regex, filter_name + '_re') filters.append(_FilterRegex(values)) if not_regex is not None: values = extract_filter_values_regex(not_regex, filter_name + '_re') filters.append(_FilterRegexInverse(values)) return filters def create_filters(self, eq, not_eq, regex, not_regex, filter_name): filters = [] if eq is not None: values = extract_filter_values(eq, filter_name + '_eq') filters.append(_FilterExactMatch(values)) if not_eq is not None: values = extract_filter_values(not_eq, filter_name + '_not_eq') filters.append(_FilterExactMatchInverse(values)) if regex is not None: values = extract_filter_values_regex(regex, filter_name + '_re') filters.append(_FilterRegex(values)) if not_regex is not None: values = extract_filter_values_regex(not_regex, filter_name + '_re') filters.append(_FilterRegexInverse(values)) return filters def do_prop_match(self, ss, prop, filters): value = ss.get(prop, '') for filter in filters: if not filter.is_matched(value): return False return True def is_matched(self, ss): if self.filter_fn is not None and not self.filter_fn(ss): return False if self.project_filters and not self.do_prop_match(ss, 'project', self.project_filters): return False if self.codebase_filters and not self.do_prop_match(ss, 'codebase', self.codebase_filters): return False if self.repository_filters and \ not self.do_prop_match(ss, 'repository', self.repository_filters): return False if self.branch_filters and not self.do_prop_match(ss, 'branch', self.branch_filters): return False return True def is_matched_codebase(self, codebase): for filter in self.codebase_filters: if not filter.is_matched(codebase): return False return True def _repr_filters(self, filters, prop): return [filter.describe(prop) for filter in filters] def __repr__(self): filters = [] if self.filter_fn is not None: filters.append('{}()'.format(self.filter_fn.__name__)) filters += self._repr_filters(self.project_filters, 'project') filters += self._repr_filters(self.codebase_filters, 'codebase') filters += self._repr_filters(self.repository_filters, 'repository') filters += self._repr_filters(self.branch_filters, 'branch') return "<{} on {}>".format(self.__class__.__name__, ' and '.join(filters)) buildbot-3.4.0/master/buildbot/util/ssl.py000066400000000000000000000027171413250514000205530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ This modules acts the same as twisted.internet.ssl except it does not raise ImportError Modules using this should call ensureHasSSL in order to make sure that the user installed buildbot[tls] """ import unittest from buildbot.config import error try: from twisted.internet.ssl import * # noqa pylint: disable=unused-wildcard-import, wildcard-import ssl_import_error = None has_ssl = True except ImportError as e: ssl_import_error = str(e) has_ssl = False def ensureHasSSL(module): if not has_ssl: error(("TLS dependencies required for {} are not installed : " "{}\n pip install 'buildbot[tls]'").format(module, ssl_import_error)) def skipUnless(f): return unittest.skipUnless(has_ssl, "TLS dependencies required")(f) buildbot-3.4.0/master/buildbot/util/state.py000066400000000000000000000031731413250514000210670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer class StateMixin: # state management _objectid = None @defer.inlineCallbacks def getState(self, *args, **kwargs): # get the objectid, if not known if self._objectid is None: self._objectid = yield self.master.db.state.getObjectId(self.name, self.__class__.__name__) rv = yield self.master.db.state.getState(self._objectid, *args, **kwargs) return rv @defer.inlineCallbacks def setState(self, key, value): # get the objectid, if not known if self._objectid is None: self._objectid = yield self.master.db.state.getObjectId(self.name, self.__class__.__name__) yield self.master.db.state.setState(self._objectid, key, value) buildbot-3.4.0/master/buildbot/util/subscription.py000066400000000000000000000054241413250514000224740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.util import Notifier class SubscriptionPoint: def __init__(self, name): self.name = name self.subscriptions = set() self._unfinished_deliveries = [] self._unfinished_notifier = Notifier() self._got_exceptions = [] def __str__(self): return "".format(self.name) def subscribe(self, callback): sub = Subscription(self, callback) self.subscriptions.add(sub) return sub def deliver(self, *args, **kwargs): self._unfinished_deliveries.append(self) for sub in list(self.subscriptions): try: d = sub.callback(*args, **kwargs) if isinstance(d, defer.Deferred): self._unfinished_deliveries.append(d) d.addErrback(self._notify_delivery_exception, sub) d.addBoth(self._notify_delivery_finished, d) except Exception as e: self._notify_delivery_exception(e, sub) self._notify_delivery_finished(None, self) def waitForDeliveriesToFinish(self): # returns a deferred if not self._unfinished_deliveries: return defer.succeed(None) return self._unfinished_notifier.wait() def pop_exceptions(self): exceptions = self._got_exceptions self._got_exceptions = None return exceptions def _unsubscribe(self, subscription): self.subscriptions.remove(subscription) def _notify_delivery_exception(self, e, sub): log.err(e, 'while invoking callback {} to {}'.format(sub.callback, self)) self._got_exceptions.append(e) def _notify_delivery_finished(self, _, d): self._unfinished_deliveries.remove(d) if not self._unfinished_deliveries: self._unfinished_notifier.notify(None) class Subscription: def __init__(self, subpt, callback): self.subpt = subpt self.callback = callback def unsubscribe(self): self.subpt._unsubscribe(self) buildbot-3.4.0/master/buildbot/util/test_result_submitter.py000066400000000000000000000132101413250514000244130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.util import deferwaiter class TestResultSubmitter: def __init__(self, batch_n=3000): self._batch_n = batch_n self._curr_batch = [] self._pending_batches = [] self._waiter = deferwaiter.DeferWaiter() self._master = None self._builderid = None self._add_pass_fail_result = None # will be set to a callable if enabled self._tests_passed = None self._tests_failed = None @defer.inlineCallbacks def setup(self, step, description, category, value_unit): builderid = yield step.build.getBuilderId() yield self.setup_by_ids(step.master, builderid, step.build.buildid, step.stepid, description, category, value_unit) @defer.inlineCallbacks def setup_by_ids(self, master, builderid, buildid, stepid, description, category, value_unit): self._master = master self._category = category self._value_unit = value_unit self._initialize_pass_fail_recording_if_needed() self._builderid = builderid self._setid = yield self._master.data.updates.addTestResultSet(builderid, buildid, stepid, description, category, value_unit) @defer.inlineCallbacks def finish(self): self._submit_batch() yield self._waiter.wait() yield self._master.data.updates.completeTestResultSet(self._setid, tests_passed=self._tests_passed, tests_failed=self._tests_failed) def get_test_result_set_id(self): return self._setid def _submit_batch(self): batch = self._curr_batch self._curr_batch = [] if not batch: return self._pending_batches.append(batch) if self._waiter.has_waited(): return self._waiter.add(self._process_batches()) @defer.inlineCallbacks def _process_batches(self): # at most one instance of this function may be running at the same time while self._pending_batches: batch = self._pending_batches.pop(0) yield self._master.data.updates.addTestResults(self._builderid, self._setid, batch) def _initialize_pass_fail_recording(self, function): self._add_pass_fail_result = function self._compute_pass_fail = True self._tests_passed = 0 self._tests_failed = 0 def _initialize_pass_fail_recording_if_needed(self): if self._category == 'pass_fail' and self._value_unit == 'boolean': self._initialize_pass_fail_recording(self._add_pass_fail_result_category_pass_fail) return if self._category == 'pass_only': self._initialize_pass_fail_recording(self._add_pass_fail_result_category_pass_only) return if self._category == 'fail_only' or self._category == 'code_issue': self._initialize_pass_fail_recording(self._add_pass_fail_result_category_fail_only) return def _add_pass_fail_result_category_fail_only(self, value): self._tests_failed += 1 def _add_pass_fail_result_category_pass_only(self, value): self._tests_passed += 1 def _add_pass_fail_result_category_pass_fail(self, value): try: is_success = bool(int(value)) if is_success: self._tests_passed += 1 else: self._tests_failed += 1 except Exception as e: log.err(e, 'When parsing test result success status') def add_test_result(self, value, test_name=None, test_code_path=None, line=None, duration_ns=None): if not isinstance(value, str): raise TypeError('value must be a string') result = {'value': value} if test_name is not None: if not isinstance(test_name, str): raise TypeError('test_name must be a string') result['test_name'] = test_name if test_code_path is not None: if not isinstance(test_code_path, str): raise TypeError('test_code_path must be a string') result['test_code_path'] = test_code_path if line is not None: if not isinstance(line, int): raise TypeError('line must be an integer') result['line'] = line if duration_ns is not None: if not isinstance(duration_ns, int): raise TypeError('duration_ns must be an integer') result['duration_ns'] = duration_ns if self._add_pass_fail_result is not None: self._add_pass_fail_result(value) self._curr_batch.append(result) if len(self._curr_batch) >= self._batch_n: self._submit_batch() buildbot-3.4.0/master/buildbot/util/tuplematch.py000066400000000000000000000016321413250514000221130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members def matchTuple(routingKey, filter): if len(filter) != len(routingKey): return False for k, f in zip(routingKey, filter): if f is not None and f != k: return False return True buildbot-3.4.0/master/buildbot/wamp/000077500000000000000000000000001413250514000173605ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/wamp/__init__.py000066400000000000000000000000001413250514000214570ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/wamp/connector.py000066400000000000000000000136731413250514000217360ustar00rootroot00000000000000# This file is part of . Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Team Members import txaio from autobahn.twisted.wamp import ApplicationSession from autobahn.twisted.wamp import Service from autobahn.wamp.exception import TransportLost from twisted.internet import defer from twisted.python import failure from twisted.python import log from buildbot.util import bytes2unicode from buildbot.util import service class MasterService(ApplicationSession, service.AsyncMultiService): """ concatenation of all the wamp services of buildbot """ def __init__(self, config): # Cannot use super() here. # We must explicitly call both parent constructors. ApplicationSession.__init__(self) service.AsyncMultiService.__init__(self) self.config = config self.leaving = False self.setServiceParent(config.extra['parent']) @defer.inlineCallbacks def onJoin(self, details): log.msg("Wamp connection succeed!") for handler in [self] + self.services: yield self.register(handler) yield self.subscribe(handler) yield self.publish("org.buildbot.{}.connected".format(self.master.masterid)) self.parent.service = self self.parent.serviceDeferred.callback(self) @defer.inlineCallbacks def onLeave(self, details): if self.leaving: return # XXX We don't handle crossbar reboot, or any other disconnection well. # this is a tricky problem, as we would have to reconnect with exponential backoff # re-subscribe to subscriptions, queue messages until reconnection. # This is quite complicated, and I believe much better handled in autobahn # It is possible that such failure is practically non-existent # so for now, we just crash the master log.msg("Guru meditation! We have been disconnected from wamp server") log.msg( "We don't know how to recover this without restarting the whole system") log.msg(str(details)) yield self.master.stopService() def onUserError(self, e, msg): log.err(e, msg) def make(config): if config: return MasterService(config) # if no config given, return a description of this WAMPlet .. return {'label': 'Buildbot master wamplet', 'description': 'This contains all the wamp methods provided by a buildbot master'} class WampConnector(service.ReconfigurableServiceMixin, service.AsyncMultiService): serviceClass = Service name = "wamp" def __init__(self): super().__init__() self.app = None self.router_url = None self.realm = None self.wamp_debug_level = None self.serviceDeferred = defer.Deferred() self.service = None def getService(self): if self.service is not None: return defer.succeed(self.service) d = defer.Deferred() @self.serviceDeferred.addCallback def gotService(service): d.callback(service) return service return d def stopService(self): if self.service is not None: self.service.leaving = True super().stopService() @defer.inlineCallbacks def publish(self, topic, data, options=None): service = yield self.getService() try: ret = yield service.publish(topic, data, options=options) except TransportLost: log.err(failure.Failure(), "while publishing event " + topic) return None return ret @defer.inlineCallbacks def subscribe(self, callback, topic=None, options=None): service = yield self.getService() ret = yield service.subscribe(callback, topic, options) return ret @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): if new_config.mq.get('type', 'simple') != "wamp": if self.app is not None: raise ValueError("Cannot use different wamp settings when reconfiguring") return wamp = new_config.mq log.msg("Starting wamp with config: %r", wamp) router_url = wamp.get('router_url', None) realm = bytes2unicode(wamp.get('realm', 'buildbot')) wamp_debug_level = wamp.get('wamp_debug_level', 'error') # MQ router can be reconfigured only once. Changes to configuration are not supported. # We can't switch realm nor the URL as that would leave transactions in inconsistent state. # Implementing reconfiguration just for wamp_debug_level does not seem like a good # investment. if self.app is not None: if self.router_url != router_url or self.realm != realm or \ self.wamp_debug_level != wamp_debug_level: raise ValueError("Cannot use different wamp settings when reconfiguring") return if router_url is None: return self.router_url = router_url self.realm = realm self.wamp_debug_level = wamp_debug_level self.app = self.serviceClass( url=self.router_url, extra=dict(master=self.master, parent=self), realm=realm, make=make ) txaio.set_global_log_level(wamp_debug_level) yield self.app.setServiceParent(self) yield super().reconfigServiceWithBuildbotConfig(new_config) buildbot-3.4.0/master/buildbot/warnings.py000066400000000000000000000025631413250514000206240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings class ConfigWarning(Warning): """ Warning for issues in the configuration. Use DeprecatedApiWarning for deprecated APIs """ # DeprecationWarning or PendingDeprecationWarning may be used as # the base class, but by default deprecation warnings are disabled in Python, # so by default old-API usage warnings will be ignored - this is not what # we want. class DeprecatedApiWarning(Warning): """ Warning for deprecated configuration options. """ def warn_deprecated(version, msg, stacklevel=2): warnings.warn("[{} and later] {}".format(version, msg), category=DeprecatedApiWarning, stacklevel=stacklevel) buildbot-3.4.0/master/buildbot/worker/000077500000000000000000000000001413250514000177255ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/worker/__init__.py000066400000000000000000000016421413250514000220410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.worker.base import AbstractWorker from buildbot.worker.base import Worker from buildbot.worker.latent import AbstractLatentWorker _hush_pyflakes = [ AbstractWorker, Worker, AbstractLatentWorker, ] buildbot-3.4.0/master/buildbot/worker/base.py000066400000000000000000000657611413250514000212300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Canonical Ltd. 2009 import time from twisted.internet import defer from twisted.python import log from twisted.python.reflect import namedModule from zope.interface import implementer from buildbot import config from buildbot.interfaces import IWorker from buildbot.process import metrics from buildbot.process.properties import Properties from buildbot.util import Notifier from buildbot.util import bytes2unicode from buildbot.util import service @implementer(IWorker) class AbstractWorker(service.BuildbotService): """This is the master-side representative for a remote buildbot worker. There is exactly one for each worker described in the config file (the c['workers'] list). When buildbots connect in (.attach), they get a reference to this instance. The BotMaster object is stashed as the .botmaster attribute. The BotMaster is also our '.parent' Service. I represent a worker -- a remote machine capable of running builds. I am instantiated by the configuration file, and can be subclassed to add extra functionality.""" # reconfig workers after builders reconfig_priority = 64 quarantine_timer = None quarantine_timeout = quarantine_initial_timeout = 10 quarantine_max_timeout = 60 * 60 start_missing_on_startup = True DEFAULT_MISSING_TIMEOUT = 3600 DEFAULT_KEEPALIVE_INTERVAL = 3600 # override to True if isCompatibleWithBuild may return False builds_may_be_incompatible = False def checkConfig(self, name, password, max_builds=None, notify_on_missing=None, missing_timeout=None, properties=None, defaultProperties=None, locks=None, keepalive_interval=DEFAULT_KEEPALIVE_INTERVAL, machine_name=None): """ @param name: botname this machine will supply when it connects @param password: password this machine will supply when it connects @param max_builds: maximum number of simultaneous builds that will be run concurrently on this worker (the default is None for no limit) @param properties: properties that will be applied to builds run on this worker @type properties: dictionary @param defaultProperties: properties that will be applied to builds run on this worker only if the property has not been set by another source @type defaultProperties: dictionary @param locks: A list of locks that must be acquired before this worker can be used @type locks: dictionary @param machine_name: The name of the machine to associate with the worker. """ self.name = name = bytes2unicode(name) self.machine_name = machine_name self.password = password # protocol registration self.registration = None self._graceful = False self._paused = False # these are set when the service is started self.manager = None self.workerid = None self.info = Properties() self.worker_commands = None self.workerforbuilders = {} self.max_builds = max_builds self.access = [] if locks: self.access = locks self.lock_subscriptions = [] self.properties = Properties() self.properties.update(properties or {}, "Worker") self.properties.setProperty("workername", name, "Worker") self.defaultProperties = Properties() self.defaultProperties.update(defaultProperties or {}, "Worker") if self.machine_name is not None: self.properties.setProperty('machine_name', self.machine_name, 'Worker') self.machine = None self.lastMessageReceived = 0 if notify_on_missing is None: notify_on_missing = [] if isinstance(notify_on_missing, str): notify_on_missing = [notify_on_missing] self.notify_on_missing = notify_on_missing for i in notify_on_missing: if not isinstance(i, str): config.error( 'notify_on_missing arg %r is not a string' % (i,)) self.missing_timeout = missing_timeout self.missing_timer = None # a protocol connection, if we're currently connected self.conn = None # during disconnection self.conn will be set to None before all disconnection notifications # are delivered. During that period _pending_conn_shutdown_notifier will be set to # a notifier and allows interested users to wait until all disconnection notifications are # delivered. self._pending_conn_shutdown_notifier = None self._old_builder_list = None self._configured_builderid_list = None def __repr__(self): return "<{} {}>".format(self.__class__.__name__, repr(self.name)) @property def workername(self): # workername is now an alias to twisted.Service's name return self.name @property def botmaster(self): if self.master is None: return None return self.master.botmaster @defer.inlineCallbacks def updateLocks(self): """Convert the L{LockAccess} objects in C{self.locks} into real lock objects, while also maintaining the subscriptions to lock releases.""" # unsubscribe from any old locks for s in self.lock_subscriptions: s.unsubscribe() # convert locks into their real form locks = yield self.botmaster.getLockFromLockAccesses(self.access, self.config_version) self.locks = [(l.getLockForWorker(self.workername), la) for l, la in locks] self.lock_subscriptions = [l.subscribeToReleases(self._lockReleased) for l, la in self.locks] def locksAvailable(self): """ I am called to see if all the locks I depend on are available, in which I return True, otherwise I return False """ if not self.locks: return True for lock, access in self.locks: if not lock.isAvailable(self, access): return False return True def acquireLocks(self): """ I am called when a build is preparing to run. I try to claim all the locks that are needed for a build to happen. If I can't, then my caller should give up the build and try to get another worker to look at it. """ log.msg("acquireLocks(worker {}, locks {})".format(self, self.locks)) if not self.locksAvailable(): log.msg("worker {} can't lock, giving up".format(self)) return False # all locks are available, claim them all for lock, access in self.locks: lock.claim(self, access) return True def releaseLocks(self): """ I am called to release any locks after a build has finished """ log.msg("releaseLocks({}): {}".format(self, self.locks)) for lock, access in self.locks: lock.release(self, access) def _lockReleased(self): """One of the locks for this worker was released; try scheduling builds.""" if not self.botmaster: return # oh well.. self.botmaster.maybeStartBuildsForWorker(self.name) def _applyWorkerInfo(self, info): if not info: return # set defaults self.info.setProperty("version", "(unknown)", "Worker") # store everything as Properties for k, v in info.items(): if k in ('environ', 'worker_commands'): continue self.info.setProperty(k, v, "Worker") @defer.inlineCallbacks def _getWorkerInfo(self): worker = yield self.master.data.get( ('workers', self.workerid)) self._paused = worker["paused"] self._applyWorkerInfo(worker['workerinfo']) def setServiceParent(self, parent): # botmaster needs to set before setServiceParent which calls # startService self.manager = parent return super().setServiceParent(parent) @defer.inlineCallbacks def startService(self): # tracks config version for locks self.config_version = self.master.config_version self.updateLocks() self.workerid = yield self.master.data.updates.findWorkerId( self.name) self.workerActionConsumer = yield self.master.mq.startConsuming(self.controlWorker, ("control", "worker", str(self.workerid), None)) yield self._getWorkerInfo() yield super().startService() # startMissingTimer wants the service to be running to really start if self.start_missing_on_startup: self.startMissingTimer() @defer.inlineCallbacks def reconfigService(self, name, password, max_builds=None, notify_on_missing=None, missing_timeout=DEFAULT_MISSING_TIMEOUT, properties=None, defaultProperties=None, locks=None, keepalive_interval=DEFAULT_KEEPALIVE_INTERVAL, machine_name=None): # Given a Worker config arguments, configure this one identically. # Because Worker objects are remotely referenced, we can't replace them # without disconnecting the worker, yet there's no reason to do that. assert self.name == name self.password = yield self.renderSecrets(password) # adopt new instance's configuration parameters self.max_builds = max_builds self.access = [] if locks: self.access = locks if notify_on_missing is None: notify_on_missing = [] if isinstance(notify_on_missing, str): notify_on_missing = [notify_on_missing] self.notify_on_missing = notify_on_missing if self.missing_timeout != missing_timeout: running_missing_timer = self.missing_timer self.stopMissingTimer() self.missing_timeout = missing_timeout if running_missing_timer: self.startMissingTimer() self.properties = Properties() self.properties.update(properties or {}, "Worker") self.properties.setProperty("workername", name, "Worker") self.defaultProperties = Properties() self.defaultProperties.update(defaultProperties or {}, "Worker") # Note that before first reconfig self.machine will always be None and # out of sync with self.machine_name, thus more complex logic is needed. if self.machine is not None and self.machine_name != machine_name: self.machine.unregisterWorker(self) self.machine = None self.machine_name = machine_name if self.machine is None and self.machine_name is not None: self.machine = self.master.machine_manager.getMachineByName(self.machine_name) if self.machine is not None: self.machine.registerWorker(self) self.properties.setProperty("machine_name", self.machine_name, "Worker") else: log.err("Unknown machine '{}' for worker '{}'".format( self.machine_name, self.name)) # update our records with the worker manager if not self.registration: self.registration = yield self.master.workers.register(self) yield self.registration.update(self, self.master.config) # tracks config version for locks self.config_version = self.master.config_version self.updateLocks() @defer.inlineCallbacks def reconfigServiceWithSibling(self, sibling): # reconfigServiceWithSibling will only reconfigure the worker when it is configured # differently. # However, the worker configuration depends on which builder it is configured yield super().reconfigServiceWithSibling(sibling) # update the attached worker's notion of which builders are attached. # This assumes that the relevant builders have already been configured, # which is why the reconfig_priority is set low in this class. bids = [ b.getBuilderId() for b in self.botmaster.getBuildersForWorker(self.name)] bids = yield defer.gatherResults(bids, consumeErrors=True) if self._configured_builderid_list != bids: yield self.master.data.updates.workerConfigured(self.workerid, self.master.masterid, bids) yield self.updateWorker() self._configured_builderid_list = bids @defer.inlineCallbacks def stopService(self): if self.registration: yield self.registration.unregister() self.registration = None self.workerActionConsumer.stopConsuming() self.stopMissingTimer() self.stopQuarantineTimer() # mark this worker as configured for zero builders in this master yield self.master.data.updates.workerConfigured(self.workerid, self.master.masterid, []) # during master shutdown we need to wait until the disconnection notification deliveries # are completed, otherwise some of the events may still be firing long after the master # is completely shut down. yield self.disconnect() yield self.waitForCompleteShutdown() yield super().stopService() def isCompatibleWithBuild(self, build_props): # given a build properties object, determines whether the build is # compatible with the currently running worker or not. This is most # often useful for latent workers where it's possible to request # different kinds of workers. return defer.succeed(True) def startMissingTimer(self): if self.missing_timeout and self.parent and self.running: self.stopMissingTimer() # in case it's already running self.missing_timer = self.master.reactor.callLater(self.missing_timeout, self._missing_timer_fired) def stopMissingTimer(self): if self.missing_timer: if self.missing_timer.active(): self.missing_timer.cancel() self.missing_timer = None def isConnected(self): return self.conn def _missing_timer_fired(self): self.missing_timer = None # notify people, but only if we're still in the config if not self.parent: return last_connection = time.ctime(time.time() - self.missing_timeout) self.master.data.updates.workerMissing( workerid=self.workerid, masterid=self.master.masterid, last_connection=last_connection, notify=self.notify_on_missing ) def updateWorker(self): """Called to add or remove builders after the worker has connected. @return: a Deferred that indicates when an attached worker has accepted the new builders and/or released the old ones.""" if self.conn: return self.sendBuilderList() # else: return defer.succeed(None) @defer.inlineCallbacks def attached(self, conn): """This is called when the worker connects.""" assert self.conn is None metrics.MetricCountEvent.log("AbstractWorker.attached_workers", 1) # now we go through a sequence of calls, gathering information, then # tell the Botmaster that it can finally give this worker to all the # Builders that care about it. # Reset graceful shutdown status self._graceful = False self.conn = conn self._old_builder_list = None # clear builder list before proceed self._applyWorkerInfo(conn.info) self.worker_commands = conn.info.get("worker_commands", {}) self.worker_environ = conn.info.get("environ", {}) self.worker_basedir = conn.info.get("basedir", None) self.worker_system = conn.info.get("system", None) # The _detach_sub member is only ever used from tests. self._detached_sub = self.conn.notifyOnDisconnect(self.detached) workerinfo = { 'admin': conn.info.get('admin'), 'host': conn.info.get('host'), 'access_uri': conn.info.get('access_uri'), 'version': conn.info.get('version') } yield self.master.data.updates.workerConnected( workerid=self.workerid, masterid=self.master.masterid, workerinfo=workerinfo ) if self.worker_system == "nt": self.path_module = namedModule("ntpath") else: # most everything accepts / as separator, so posix should be a # reasonable fallback self.path_module = namedModule("posixpath") log.msg("bot attached") self.messageReceivedFromWorker() self.stopMissingTimer() yield self.updateWorker() yield self.botmaster.maybeStartBuildsForWorker(self.name) self.updateState() def messageReceivedFromWorker(self): now = time.time() self.lastMessageReceived = now def setupProperties(self, props): for name in self.properties.properties: props.setProperty( name, self.properties.getProperty(name), "Worker") for name in self.defaultProperties.properties: if name not in props: props.setProperty( name, self.defaultProperties.getProperty(name), "Worker") @defer.inlineCallbacks def _handle_conn_shutdown_notifier(self, conn): self._pending_conn_shutdown_notifier = Notifier() yield conn.waitShutdown() self._pending_conn_shutdown_notifier.notify(None) self._pending_conn_shutdown_notifier = None @defer.inlineCallbacks def detached(self): conn = self.conn self.conn = None self._handle_conn_shutdown_notifier(conn) # Note that _pending_conn_shutdown_notifier will not be fired until detached() # is complete. metrics.MetricCountEvent.log("AbstractWorker.attached_workers", -1) self._old_builder_list = [] log.msg("Worker.detached({})".format(self.name)) self.releaseLocks() yield self.master.data.updates.workerDisconnected( workerid=self.workerid, masterid=self.master.masterid, ) def disconnect(self): """Forcibly disconnect the worker. This severs the TCP connection and returns a Deferred that will fire (with None) when the connection is probably gone. If the worker is still alive, they will probably try to reconnect again in a moment. This is called in two circumstances. The first is when a worker is removed from the config file. In this case, when they try to reconnect, they will be rejected as an unknown worker. The second is when we wind up with two connections for the same worker, in which case we disconnect the older connection. """ if self.conn is None: return defer.succeed(None) log.msg("disconnecting old worker {} now".format(self.name)) # When this Deferred fires, we'll be ready to accept the new worker return self._disconnect(self.conn) def waitForCompleteShutdown(self): # This function waits until the disconnection to happen and the disconnection # notifications have been delivered and acted upon. return self._waitForCompleteShutdownImpl(self.conn) @defer.inlineCallbacks def _waitForCompleteShutdownImpl(self, conn): if conn: yield conn.wait_shutdown_started() yield conn.waitShutdown() elif self._pending_conn_shutdown_notifier is not None: yield self._pending_conn_shutdown_notifier.wait() @defer.inlineCallbacks def _disconnect(self, conn): # This function waits until the disconnection to happen and the disconnection # notifications have been delivered and acted upon d = self._waitForCompleteShutdownImpl(conn) conn.loseConnection() log.msg("waiting for worker to finish disconnecting") yield d @defer.inlineCallbacks def sendBuilderList(self): our_builders = self.botmaster.getBuildersForWorker(self.name) blist = [(b.name, b.config.workerbuilddir) for b in our_builders] if blist == self._old_builder_list: return slist = yield self.conn.remoteSetBuilderList(builders=blist) self._old_builder_list = blist # Nothing has changed, so don't need to re-attach to everything if not slist: return dl = [] for name in slist: # use get() since we might have changed our mind since then b = self.botmaster.builders.get(name) if b: d1 = self.attachBuilder(b) dl.append(d1) yield defer.DeferredList(dl) def attachBuilder(self, builder): return builder.attached(self, self.worker_commands) def controlWorker(self, key, params): log.msg("worker {} wants to {}: {}".format(self.name, key[-1], params)) if key[-1] == "stop": return self.shutdownRequested() if key[-1] == "pause": self.pause() if key[-1] == "unpause": self.unpause() if key[-1] == "kill": self.shutdown() return None def shutdownRequested(self): self._graceful = True self.maybeShutdown() self.updateState() def addWorkerForBuilder(self, wfb): self.workerforbuilders[wfb.builder_name] = wfb def removeWorkerForBuilder(self, wfb): try: del self.workerforbuilders[wfb.builder_name] except KeyError: pass def buildFinished(self, wfb): """This is called when a build on this worker is finished.""" self.botmaster.maybeStartBuildsForWorker(self.name) def canStartBuild(self): """ I am called when a build is requested to see if this worker can start a build. This function can be used to limit overall concurrency on the worker. Note for subclassers: if a worker can become willing to start a build without any action on that worker (for example, by a resource in use on another worker becoming available), then you must arrange for L{maybeStartBuildsForWorker} to be called at that time, or builds on this worker will not start. """ # If we're waiting to shutdown gracefully, paused or quarantined then we shouldn't # accept any new jobs. if self._graceful or self._paused or self.quarantine_timer: return False if self.max_builds: active_builders = [wfb for wfb in self.workerforbuilders.values() if wfb.isBusy()] if len(active_builders) >= self.max_builds: return False if not self.locksAvailable(): return False return True @defer.inlineCallbacks def shutdown(self): """Shutdown the worker""" if not self.conn: log.msg("no remote; worker is already shut down") return yield self.conn.remoteShutdown() def maybeShutdown(self): """Shut down this worker if it has been asked to shut down gracefully, and has no active builders.""" if not self._graceful: return active_builders = [wfb for wfb in self.workerforbuilders.values() if wfb.isBusy()] if active_builders: return d = self.shutdown() d.addErrback(log.err, 'error while shutting down worker') def updateState(self): self.master.data.updates.setWorkerState(self.workerid, self._paused, self._graceful) def pause(self): """Stop running new builds on the worker.""" self._paused = True self.updateState() def unpause(self): """Restart running new builds on the worker.""" self._paused = False self.stopQuarantineTimer() self.botmaster.maybeStartBuildsForWorker(self.name) self.updateState() def isPaused(self): return self._paused def resetQuarantine(self): self.quarantine_timeout = self.quarantine_initial_timeout def putInQuarantine(self): if self.quarantine_timer: # already in quarantine return self.quarantine_timer = self.master.reactor.callLater( self.quarantine_timeout, self.exitQuarantine) log.msg("{} has been put in quarantine for {}s".format( self.name, self.quarantine_timeout)) # next we will wait twice as long self.quarantine_timeout *= 2 if self.quarantine_timeout > self.quarantine_max_timeout: # unless we hit the max timeout self.quarantine_timeout = self.quarantine_max_timeout def exitQuarantine(self): log.msg("{} has left quarantine".format(self.name)) self.quarantine_timer = None self.botmaster.maybeStartBuildsForWorker(self.name) def stopQuarantineTimer(self): if self.quarantine_timer is not None: self.quarantine_timer.cancel() self.exitQuarantine() class Worker(AbstractWorker): @defer.inlineCallbacks def detached(self): yield super().detached() self.botmaster.workerLost(self) self.startMissingTimer() @defer.inlineCallbacks def attached(self, bot): try: yield super().attached(bot) except Exception as e: log.err(e, "worker {} cannot attach".format(self.name)) return def buildFinished(self, wfb): """This is called when a build on this worker is finished.""" super().buildFinished(wfb) # If we're gracefully shutting down, and we have no more active # builders, then it's safe to disconnect self.maybeShutdown() buildbot-3.4.0/master/buildbot/worker/docker.py000066400000000000000000000332501413250514000215510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib import json import socket from io import BytesIO from twisted.internet import defer from twisted.internet import threads from twisted.python import log from buildbot import config from buildbot.interfaces import LatentWorkerCannotSubstantiate from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util import unicode2bytes from buildbot.util.latent import CompatibleLatentWorkerMixin from buildbot.worker import AbstractLatentWorker try: import docker from docker import client from docker.errors import NotFound _hush_pyflakes = [docker, client] docker_py_version = float(docker.__version__.rsplit(".", 1)[0]) except ImportError: docker = None client = None docker_py_version = 0.0 def _handle_stream_line(line): """\ Input is the json representation of: {'stream': "Content\ncontent"} Output is a generator yield "Content", and then "content" """ # XXX This necessary processing is probably a bug from docker-py, # hence, might break if the bug is fixed, i.e. we should get decoded JSON # directly from the API. line = json.loads(line) if 'error' in line: content = "ERROR: " + line['error'] else: content = line.get('stream', '') for streamline in content.split('\n'): if streamline: yield streamline class DockerBaseWorker(AbstractLatentWorker): def checkConfig(self, name, password=None, image=None, masterFQDN=None, **kwargs): # Set build_wait_timeout to 0 if not explicitly set: Starting a # container is almost immediate, we can afford doing so for each build. if 'build_wait_timeout' not in kwargs: kwargs['build_wait_timeout'] = 0 if image is not None and not isinstance(image, str): if not hasattr(image, 'getRenderingFor'): config.error("image must be a string") super().checkConfig(name, password, **kwargs) def reconfigService(self, name, password=None, image=None, masterFQDN=None, **kwargs): # Set build_wait_timeout to 0 if not explicitly set: Starting a # container is almost immediate, we can afford doing so for each build. if 'build_wait_timeout' not in kwargs: kwargs['build_wait_timeout'] = 0 if password is None: password = self.getRandomPass() if masterFQDN is None: masterFQDN = socket.getfqdn() self.masterFQDN = masterFQDN self.image = image masterName = unicode2bytes(self.master.name) self.masterhash = hashlib.sha1(masterName).hexdigest()[:6] return super().reconfigService(name, password, **kwargs) def getContainerName(self): return ('buildbot-{worker}-{hash}'.format(worker=self.workername, hash=self.masterhash)).replace("_", "-") @property def shortid(self): if self.instance is None: return None return self.instance['Id'][:6] def createEnvironment(self, build=None): result = { "BUILDMASTER": self.masterFQDN, "WORKERNAME": self.name, "WORKERPASS": self.password } if self.registration is not None: result["BUILDMASTER_PORT"] = str(self.registration.getPBPort()) if ":" in self.masterFQDN: result["BUILDMASTER"], result["BUILDMASTER_PORT"] = self.masterFQDN.split(":") return result @staticmethod def get_fqdn(): return socket.getfqdn() @staticmethod def get_ip(): fqdn = socket.getfqdn() try: return socket.gethostbyname(fqdn) except socket.gaierror: return fqdn class DockerLatentWorker(CompatibleLatentWorkerMixin, DockerBaseWorker): instance = None def checkConfig(self, name, password, docker_host, image=None, command=None, volumes=None, dockerfile=None, version=None, tls=None, followStartupLogs=False, masterFQDN=None, hostconfig=None, autopull=False, alwaysPull=False, custom_context=False, encoding='gzip', buildargs=None, hostname=None, **kwargs): super().checkConfig(name, password, image, masterFQDN, **kwargs) if not client: config.error("The python module 'docker>=2.0' is needed to use a" " DockerLatentWorker") if not image and not dockerfile: config.error("DockerLatentWorker: You need to specify at least" " an image name, or a dockerfile") # Following block is only for checking config errors, # actual parsing happens in self.parse_volumes() # Renderables can be direct volumes definition or list member if isinstance(volumes, list): for volume_string in (volumes or []): if not isinstance(volume_string, str): continue try: bind, volume = volume_string.split(":", 1) except ValueError: config.error(("Invalid volume definition for docker " "{}. Skipping...").format(volume_string)) continue @defer.inlineCallbacks def reconfigService(self, name, password, docker_host, image=None, command=None, volumes=None, dockerfile=None, version=None, tls=None, followStartupLogs=False, masterFQDN=None, hostconfig=None, autopull=False, alwaysPull=False, custom_context=False, encoding='gzip', buildargs=None, hostname=None, **kwargs): yield super().reconfigService(name, password, image, masterFQDN, **kwargs) self.volumes = volumes or [] self.followStartupLogs = followStartupLogs self.command = command or [] self.dockerfile = dockerfile self.hostconfig = hostconfig or {} self.autopull = autopull self.alwaysPull = alwaysPull self.custom_context = custom_context self.encoding = encoding self.buildargs = buildargs # Prepare the parameters for the Docker Client object. self.client_args = {'base_url': docker_host} if version is not None: self.client_args['version'] = version if tls is not None: self.client_args['tls'] = tls self.hostname = hostname def _thd_parse_volumes(self, volumes): volume_list = [] for volume_string in (volumes or []): try: _, volume = volume_string.split(":", 1) except ValueError: config.error(("Invalid volume definition for docker " "{}. Skipping...").format(volume_string)) continue if volume.endswith(':ro') or volume.endswith(':rw'): volume = volume[:-3] volume_list.append(volume) return volume_list, volumes def _getDockerClient(self): if docker.version[0] == '1': docker_client = client.Client(**self.client_args) else: docker_client = client.APIClient(**self.client_args) return docker_client def renderWorkerProps(self, build): return build.render((self.image, self.dockerfile, self.volumes, self.custom_context, self.encoding, self.buildargs, self.hostname)) @defer.inlineCallbacks def start_instance(self, build): if self.instance is not None: raise ValueError('instance active') image, dockerfile, volumes, custom_context, encoding, buildargs, hostname = \ yield self.renderWorkerPropsOnStart(build) res = yield threads.deferToThread(self._thd_start_instance, image, dockerfile, volumes, custom_context, encoding, buildargs, hostname) return res def _image_exists(self, client, name): # Make sure the image exists for image in client.images(): for tag in image['RepoTags'] or []: if ':' in name and tag == name: return True if tag.startswith(name + ':'): return True return False def _thd_start_instance(self, image, dockerfile, volumes, custom_context, encoding, buildargs, hostname): docker_client = self._getDockerClient() container_name = self.getContainerName() # cleanup the old instances instances = docker_client.containers( all=1, filters=dict(name=container_name)) container_name = "/{0}".format(container_name) for instance in instances: if container_name not in instance['Names']: continue try: docker_client.remove_container(instance['Id'], v=True, force=True) except NotFound: pass # that's a race condition found = False if image is not None: found = self._image_exists(docker_client, image) else: image = '{}_{}_image'.format(self.workername, id(self)) if (not found) and (dockerfile is not None): log.msg("Image '{}' not found, building it from scratch".format(image)) if (custom_context): with open(dockerfile, 'rb') as fin: lines = docker_client.build(fileobj=fin, custom_context=custom_context, encoding=encoding, tag=image, buildargs=buildargs) else: lines = docker_client.build( fileobj=BytesIO(dockerfile.encode('utf-8')), tag=image, ) for line in lines: for streamline in _handle_stream_line(line): log.msg(streamline) imageExists = self._image_exists(docker_client, image) if ((not imageExists) or self.alwaysPull) and self.autopull: if (not imageExists): log.msg("Image '{}' not found, pulling from registry".format(image)) docker_client.pull(image) if (not self._image_exists(docker_client, image)): msg = 'Image "{}" not found on docker host.'.format(image) log.msg(msg) raise LatentWorkerCannotSubstantiate(msg) volumes, binds = self._thd_parse_volumes(volumes) host_conf = self.hostconfig.copy() host_conf['binds'] = binds if docker_py_version >= 2.2 and 'init' not in host_conf: host_conf['init'] = True host_conf = docker_client.create_host_config(**host_conf) instance = docker_client.create_container( image, self.command, name=self.getContainerName(), volumes=volumes, environment=self.createEnvironment(), host_config=host_conf, hostname=hostname ) if instance.get('Id') is None: log.msg('Failed to create the container') raise LatentWorkerFailedToSubstantiate( 'Failed to start container' ) shortid = instance['Id'][:6] log.msg('Container created, Id: {}...'.format(shortid)) instance['image'] = image self.instance = instance docker_client.start(instance) log.msg('Container started') if self.followStartupLogs: logs = docker_client.attach( container=instance, stdout=True, stderr=True, stream=True) for line in logs: log.msg("docker VM {}: {}".format(shortid, line.strip())) if self.conn: break del logs return [instance['Id'], image] def stop_instance(self, fast=False): if self.instance is None: # be gentle. Something may just be trying to alert us that an # instance never attached, and it's because, somehow, we never # started. return defer.succeed(None) instance = self.instance self.instance = None self.resetWorkerPropsOnStop() return threads.deferToThread(self._thd_stop_instance, instance, fast) def _thd_stop_instance(self, instance, fast): docker_client = self._getDockerClient() log.msg('Stopping container {}...'.format(instance['Id'][:6])) docker_client.stop(instance['Id']) if not fast: docker_client.wait(instance['Id']) docker_client.remove_container(instance['Id'], v=True, force=True) if self.image is None: try: docker_client.remove_image(image=instance['image']) except docker.errors.APIError as e: log.msg('Error while removing the image: %s', e) buildbot-3.4.0/master/buildbot/worker/ec2.py000066400000000000000000000601261413250514000207550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Canonical Ltd. 2009 """ A latent worker that uses EC2 to instantiate the workers on demand. Tested with Python boto 1.5c """ import os import re import time from twisted.internet import defer from twisted.internet import threads from twisted.python import log from buildbot import config from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.worker import AbstractLatentWorker try: import boto3 import botocore from botocore.client import ClientError except ImportError: boto3 = None PENDING = 'pending' RUNNING = 'running' SHUTTINGDOWN = 'shutting-down' TERMINATED = 'terminated' SPOT_REQUEST_PENDING_STATES = ['pending-evaluation', 'pending-fulfillment'] FULFILLED = 'fulfilled' PRICE_TOO_LOW = 'price-too-low' class EC2LatentWorker(AbstractLatentWorker): instance = image = None _poll_resolution = 5 # hook point for tests def __init__(self, name, password, instance_type, ami=None, valid_ami_owners=None, valid_ami_location_regex=None, elastic_ip=None, identifier=None, secret_identifier=None, aws_id_file_path=None, user_data=None, region=None, keypair_name=None, security_name=None, spot_instance=False, max_spot_price=1.6, volumes=None, placement=None, price_multiplier=1.2, tags=None, product_description='Linux/UNIX', subnet_id=None, security_group_ids=None, instance_profile_name=None, block_device_map=None, session=None, **kwargs): if not boto3: config.error("The python module 'boto3' is needed to use a " "EC2LatentWorker") if keypair_name is None: config.error("EC2LatentWorker: 'keypair_name' parameter must be " "specified") if security_name is None and not subnet_id: config.error("EC2LatentWorker: 'security_name' parameter must be " "specified") if volumes is None: volumes = [] if tags is None: tags = {} super().__init__(name, password, **kwargs) if security_name and subnet_id: raise ValueError( 'security_name (EC2 classic security groups) is not supported ' 'in a VPC. Use security_group_ids instead.') if not ((ami is not None) ^ (valid_ami_owners is not None or valid_ami_location_regex is not None)): raise ValueError( 'You must provide either a specific ami, or one or both of ' 'valid_ami_location_regex and valid_ami_owners') self.ami = ami if valid_ami_owners is not None: if isinstance(valid_ami_owners, int): valid_ami_owners = (valid_ami_owners,) else: for element in valid_ami_owners: if not isinstance(element, int): raise ValueError( 'valid_ami_owners should be int or iterable ' 'of ints', element) if valid_ami_location_regex is not None: if not isinstance(valid_ami_location_regex, str): raise ValueError( 'valid_ami_location_regex should be a string') # pre-compile the regex valid_ami_location_regex = re.compile(valid_ami_location_regex) if spot_instance and price_multiplier is None and max_spot_price is None: raise ValueError('You must provide either one, or both, of ' 'price_multiplier or max_spot_price') self.valid_ami_owners = None if valid_ami_owners: self.valid_ami_owners = [str(o) for o in valid_ami_owners] self.valid_ami_location_regex = valid_ami_location_regex self.instance_type = instance_type self.keypair_name = keypair_name self.security_name = security_name self.user_data = user_data self.spot_instance = spot_instance self.max_spot_price = max_spot_price self.volumes = volumes self.price_multiplier = price_multiplier self.product_description = product_description if None not in [placement, region]: self.placement = '{}{}'.format(region, placement) else: self.placement = None if identifier is None: assert secret_identifier is None, ( 'supply both or neither of identifier, secret_identifier') if aws_id_file_path is None: home = os.environ['HOME'] default_path = os.path.join(home, '.ec2', 'aws_id') if os.path.exists(default_path): aws_id_file_path = default_path if aws_id_file_path: log.msg('WARNING: EC2LatentWorker is using deprecated ' 'aws_id file') with open(aws_id_file_path, 'r') as aws_file: identifier = aws_file.readline().strip() secret_identifier = aws_file.readline().strip() else: assert aws_id_file_path is None, \ 'if you supply the identifier and secret_identifier, ' \ 'do not specify the aws_id_file_path' assert secret_identifier is not None, \ 'supply both or neither of identifier, secret_identifier' region_found = None # Make the EC2 connection. self.session = session if self.session is None: if region is not None: for r in boto3.Session( aws_access_key_id=identifier, aws_secret_access_key=secret_identifier).get_available_regions('ec2'): if r == region: region_found = r if region_found is not None: self.session = boto3.Session( region_name=region, aws_access_key_id=identifier, aws_secret_access_key=secret_identifier) else: raise ValueError( 'The specified region does not exist: ' + region) else: # boto2 defaulted to us-east-1 when region was unset, we # mimic this here in boto3 region = botocore.session.get_session().get_config_variable('region') if region is None: region = 'us-east-1' self.session = boto3.Session( aws_access_key_id=identifier, aws_secret_access_key=secret_identifier, region_name=region ) self.ec2 = self.session.resource('ec2') self.ec2_client = self.session.client('ec2') # Make a keypair # # We currently discard the keypair data because we don't need it. # If we do need it in the future, we will always recreate the keypairs # because there is no way to # programmatically retrieve the private key component, unless we # generate it and store it on the filesystem, which is an unnecessary # usage requirement. try: self.ec2.KeyPair(self.keypair_name).load() # key_pair.delete() # would be used to recreate except ClientError as e: if 'InvalidKeyPair.NotFound' not in str(e): if 'AuthFailure' in str(e): log.msg('POSSIBLE CAUSES OF ERROR:\n' ' Did you supply your AWS credentials?\n' ' Did you sign up for EC2?\n' ' Did you put a credit card number in your AWS ' 'account?\n' 'Please doublecheck before reporting a problem.\n') raise # make one; we would always do this, and stash the result, if we # needed the key (for instance, to SSH to the box). We'd then # use paramiko to use the key to connect. self.ec2.create_key_pair(KeyName=keypair_name) # create security group if security_name: try: self.ec2_client.describe_security_groups(GroupNames=[security_name]) except ClientError as e: if 'InvalidGroup.NotFound' in str(e): self.security_group = self.ec2.create_security_group( GroupName=security_name, Description='Authorization to access the buildbot instance.') # Authorize the master as necessary # TODO this is where we'd open the hole to do the reverse pb # connect to the buildbot # ip = urllib.urlopen( # 'http://checkip.amazonaws.com').read().strip() # self.security_group.authorize('tcp', 22, 22, '{}/32'.format(ip)) # self.security_group.authorize('tcp', 80, 80, '{}/32'.format(ip)) else: raise # get the image if self.ami is not None: self.image = self.ec2.Image(self.ami) else: # verify we have access to at least one acceptable image discard = self.get_image() assert discard # get the specified elastic IP, if any if elastic_ip is not None: # Using ec2.vpc_addresses.filter(PublicIps=[elastic_ip]) throws a # NotImplementedError("Filtering not supported in describe_address.") in moto # https://github.com/spulec/moto/blob/100ec4e7c8aa3fde87ff6981e2139768816992e4/moto/ec2/responses/elastic_ip_addresses.py#L52 addresses = self.ec2.meta.client.describe_addresses( PublicIps=[elastic_ip])['Addresses'] if not addresses: raise ValueError( 'Could not find EIP for IP: ' + elastic_ip) allocation_id = addresses[0]['AllocationId'] elastic_ip = self.ec2.VpcAddress(allocation_id) self.elastic_ip = elastic_ip self.subnet_id = subnet_id self.security_group_ids = security_group_ids self.classic_security_groups = [ self.security_name] if self.security_name else None self.instance_profile_name = instance_profile_name self.tags = tags self.block_device_map = self.create_block_device_mapping( block_device_map) if block_device_map else None def create_block_device_mapping(self, mapping_definitions): if not isinstance(mapping_definitions, list): config.error("EC2LatentWorker: 'block_device_map' must be a list") for mapping_definition in mapping_definitions: ebs = mapping_definition.get('Ebs') if ebs: ebs.setdefault('DeleteOnTermination', True) return mapping_definitions def get_image(self): # pylint: disable=too-many-nested-blocks if self.image is not None: return self.image images = self.ec2.images.all() if self.valid_ami_owners: images = images.filter(Owners=self.valid_ami_owners) if self.valid_ami_location_regex: level = 0 options = [] get_match = self.valid_ami_location_regex.match for image in images: # Image must be available if image.state != 'available': continue # Image must match regex match = get_match(image.image_location) if not match: continue # Gather sorting information alpha_sort = int_sort = None if level < 2: try: alpha_sort = match.group(1) except IndexError: level = 2 else: if level == 0: try: int_sort = int(alpha_sort) except ValueError: level = 1 options.append([int_sort, alpha_sort, image.image_location, image.id, image]) if level: log.msg('sorting images at level %d' % level) options = [candidate[level:] for candidate in options] else: options = [(image.image_location, image.id, image) for image in images] options.sort() images = ['{} ({})'.format(candidate[-1].id, candidate[-1].image_location) for candidate in options] log.msg('sorted images (last is chosen): {}'.format(', '.join(images))) if not options: raise ValueError('no available images match constraints') return options[-1][-1] def dns(self): if self.instance is None: return None return self.instance.public_dns_name dns = property(dns) def start_instance(self, build): if self.instance is not None: raise ValueError('instance active') if self.spot_instance: return threads.deferToThread(self._request_spot_instance) return threads.deferToThread(self._start_instance) def _remove_none_opts(self, *args, **opts): if args: opts = args[0] return dict((k, v) for k, v in opts.items() if v is not None) def _start_instance(self): image = self.get_image() launch_opts = dict( ImageId=image.id, KeyName=self.keypair_name, SecurityGroups=self.classic_security_groups, InstanceType=self.instance_type, UserData=self.user_data, Placement=self._remove_none_opts( AvailabilityZone=self.placement, ), MinCount=1, MaxCount=1, SubnetId=self.subnet_id, SecurityGroupIds=self.security_group_ids, IamInstanceProfile=self._remove_none_opts( Name=self.instance_profile_name, ), BlockDeviceMappings=self.block_device_map ) launch_opts = self._remove_none_opts(launch_opts) reservations = self.ec2.create_instances( **launch_opts ) self.instance = reservations[0] instance_id, start_time = self._wait_for_instance() if None not in [instance_id, image.id, start_time]: return [instance_id, image.id, start_time] else: self.failed_to_start(self.instance.id, self.instance.state['Name']) return None def stop_instance(self, fast=False): if self.instance is None: # be gentle. Something may just be trying to alert us that an # instance never attached, and it's because, somehow, we never # started. return defer.succeed(None) instance = self.instance self.output = self.instance = None return threads.deferToThread( self._stop_instance, instance, fast) def _attach_volumes(self): for volume_id, device_node in self.volumes: vol = self.ec2.Volume(volume_id) vol.attach_to_instance( InstanceId=self.instance.id, Device=device_node) log.msg('Attaching EBS volume {} to {}.'.format(volume_id, device_node)) def _stop_instance(self, instance, fast): if self.elastic_ip is not None: self.elastic_ip.association.delete() instance.reload() if instance.state['Name'] not in (SHUTTINGDOWN, TERMINATED): instance.terminate() log.msg('{} {} terminating instance {}'.format(self.__class__.__name__, self.workername, instance.id)) duration = 0 interval = self._poll_resolution if fast: goal = (SHUTTINGDOWN, TERMINATED) instance.reload() else: goal = (TERMINATED,) while instance.state['Name'] not in goal: time.sleep(interval) duration += interval if duration % 60 == 0: log.msg('{} {} has waited {} minutes for instance {} to end'.format( self.__class__.__name__, self.workername, duration // 60, instance.id)) instance.reload() log.msg('{} {} instance {} {} after about {} minutes {} seconds'.format( self.__class__.__name__, self.workername, instance.id, goal, duration // 60, duration % 60)) def _bid_price_from_spot_price_history(self): timestamp_yesterday = time.gmtime(int(time.time() - 86400)) spot_history_starttime = time.strftime( '%Y-%m-%dT%H:%M:%SZ', timestamp_yesterday) spot_prices = self.ec2.meta.client.describe_spot_price_history( StartTime=spot_history_starttime, ProductDescriptions=[self.product_description], AvailabilityZone=self.placement) price_sum = 0.0 price_count = 0 for price in spot_prices['SpotPriceHistory']: if price['InstanceType'] == self.instance_type: price_sum += float(price['SpotPrice']) price_count += 1 if price_count == 0: bid_price = 0.02 else: bid_price = (price_sum / price_count) * self.price_multiplier return bid_price def _request_spot_instance(self): if self.price_multiplier is None: bid_price = self.max_spot_price else: bid_price = self._bid_price_from_spot_price_history() if self.max_spot_price is not None \ and bid_price > self.max_spot_price: bid_price = self.max_spot_price log.msg('%s %s requesting spot instance with price %0.4f' % (self.__class__.__name__, self.workername, bid_price)) image = self.get_image() reservations = self.ec2.meta.client.request_spot_instances( SpotPrice=str(bid_price), LaunchSpecification=self._remove_none_opts( ImageId=self.ami, KeyName=self.keypair_name, SecurityGroups=self.classic_security_groups, UserData=self.user_data, InstanceType=self.instance_type, Placement=self._remove_none_opts( AvailabilityZone=self.placement, ), SubnetId=self.subnet_id, SecurityGroupIds=self.security_group_ids, BlockDeviceMappings=self.block_device_map, IamInstanceProfile=self._remove_none_opts( Name=self.instance_profile_name, ) ) ) request, success = self._thd_wait_for_request( reservations['SpotInstanceRequests'][0]) if not success: raise LatentWorkerFailedToSubstantiate() instance_id = request['InstanceId'] self.instance = self.ec2.Instance(instance_id) instance_id, start_time = self._wait_for_instance() return instance_id, image.id, start_time def _wait_for_instance(self): log.msg('{} {} waiting for instance {} to start'.format(self.__class__.__name__, self.workername, self.instance.id)) duration = 0 interval = self._poll_resolution while self.instance.state['Name'] == PENDING: time.sleep(interval) duration += interval if duration % 60 == 0: log.msg('{} {} has waited {} minutes for instance {}'.format( self.__class__.__name__, self.workername, duration // 60, self.instance.id)) self.instance.reload() if self.instance.state['Name'] == RUNNING: self.properties.setProperty("instance", self.instance.id, "Worker") self.output = self.instance.console_output().get('Output') minutes = duration // 60 seconds = duration % 60 log.msg('{} {} instance {} started on {} in about {} minutes {} seconds ({})'.format( self.__class__.__name__, self.workername, self.instance.id, self.dns, minutes, seconds, self.output)) if self.elastic_ip is not None: self.elastic_ip.associate(InstanceId=self.instance.id) start_time = '%02d:%02d:%02d' % ( minutes // 60, minutes % 60, seconds) if self.volumes: self._attach_volumes() if self.tags: self.instance.create_tags(Tags=[{"Key": k, "Value": v} for k, v in self.tags.items()]) return self.instance.id, start_time else: self.failed_to_start(self.instance.id, self.instance.state['Name']) def _thd_wait_for_request(self, reservation): duration = 0 interval = self._poll_resolution while True: # Sometimes it can take a second or so for the spot request to be # ready. If it isn't ready, you will get a "Spot instance request # ID 'sir-abcd1234' does not exist" exception. try: requests = self.ec2.meta.client.describe_spot_instance_requests( SpotInstanceRequestIds=[reservation['SpotInstanceRequestId']]) except ClientError as e: if 'InvalidSpotInstanceRequestID.NotFound' in str(e): requests = None else: raise if requests is not None: request = requests['SpotInstanceRequests'][0] request_status = request['Status']['Code'] if request_status not in SPOT_REQUEST_PENDING_STATES: break time.sleep(interval) duration += interval if duration % 10 == 0: log.msg('{} {} has waited {} seconds for spot request {}'.format( self.__class__.__name__, self.workername, duration, reservation['SpotInstanceRequestId'])) if request_status == FULFILLED: minutes = duration // 60 seconds = duration % 60 log.msg('{} {} spot request {} fulfilled in about {} minutes {} seconds'.format( self.__class__.__name__, self.workername, request['SpotInstanceRequestId'], minutes, seconds)) return request, True elif request_status == PRICE_TOO_LOW: self.ec2.meta.client.cancel_spot_instance_requests( SpotInstanceRequestIds=[request['SpotInstanceRequestId']]) log.msg('{} {} spot request rejected, spot price too low'.format( self.__class__.__name__, self.workername)) raise LatentWorkerFailedToSubstantiate( request['SpotInstanceRequestId'], request_status) else: log.msg('{} {} failed to fulfill spot request {} with status {}'.format( self.__class__.__name__, self.workername, request['SpotInstanceRequestId'], request_status)) # try to cancel, just for good measure self.ec2.meta.client.cancel_spot_instance_requests( SpotInstanceRequestIds=[request['SpotInstanceRequestId']]) raise LatentWorkerFailedToSubstantiate( request['SpotInstanceRequestId'], request_status) buildbot-3.4.0/master/buildbot/worker/kubernetes.py000066400000000000000000000113371413250514000224530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util import kubeclientservice from buildbot.util.latent import CompatibleLatentWorkerMixin from buildbot.util.logger import Logger from buildbot.worker.docker import DockerBaseWorker log = Logger() class KubeLatentWorker(CompatibleLatentWorkerMixin, DockerBaseWorker): instance = None @defer.inlineCallbacks def getPodSpec(self, build): image = yield build.render(self.image) env = yield self.createEnvironment(build) defer.returnValue({ "apiVersion": "v1", "kind": "Pod", "metadata": { "name": self.getContainerName() }, "spec": { "containers": [{ "name": self.getContainerName(), "image": image, "env": [{ "name": k, "value": v } for k, v in env.items()], "resources": (yield self.getBuildContainerResources(build)) }] + (yield self.getServicesContainers(build)), "restartPolicy": "Never" } }) def getBuildContainerResources(self, build): # customization point to generate Build container resources return {} def getServicesContainers(self, build): # customization point to create services containers around the build container # those containers will run within the same localhost as the build container (aka within # the same pod) return [] def renderWorkerProps(self, build_props): return self.getPodSpec(build_props) def checkConfig(self, name, image='buildbot/buildbot-worker', namespace=None, masterFQDN=None, kube_config=None, **kwargs): super().checkConfig(name, None, **kwargs) kubeclientservice.KubeClientService.checkAvailable( self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, name, image='buildbot/buildbot-worker', namespace=None, masterFQDN=None, kube_config=None, **kwargs): # Set build_wait_timeout to 0 if not explicitly set: Starting a # container is almost immediate, we can afford doing so for each build. if 'build_wait_timeout' not in kwargs: kwargs['build_wait_timeout'] = 0 if masterFQDN is None: masterFQDN = self.get_ip if callable(masterFQDN): masterFQDN = masterFQDN() yield super().reconfigService(name, image=image, masterFQDN=masterFQDN, **kwargs) self._kube = yield kubeclientservice.KubeClientService.getService( self.master, kube_config=kube_config) self.namespace = namespace or self._kube.namespace @defer.inlineCallbacks def start_instance(self, build): yield self.stop_instance(reportFailure=False) pod_spec = yield self.renderWorkerPropsOnStart(build) try: yield self._kube.createPod(self.namespace, pod_spec) except kubeclientservice.KubeError as e: raise LatentWorkerFailedToSubstantiate(str(e)) from e return True @defer.inlineCallbacks def stop_instance(self, fast=False, reportFailure=True): self.current_pod_spec = None self.resetWorkerPropsOnStop() try: yield self._kube.deletePod(self.namespace, self.getContainerName()) except kubeclientservice.KubeError as e: if reportFailure and e.reason != 'NotFound': raise if fast: return yield self._kube.waitForPodDeletion( self.namespace, self.getContainerName(), timeout=self.missing_timeout) buildbot-3.4.0/master/buildbot/worker/latent.py000066400000000000000000000543211413250514000215730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Canonical Ltd. 2009 import enum import random import string from twisted.internet import defer from twisted.python import failure from twisted.python import log from zope.interface import implementer from buildbot.interfaces import ILatentMachine from buildbot.interfaces import ILatentWorker from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.interfaces import LatentWorkerSubstantiatiationCancelled from buildbot.util import Notifier from buildbot.util import deferwaiter from buildbot.worker.base import AbstractWorker class States(enum.Enum): # Represents the states of AbstractLatentWorker NOT_SUBSTANTIATED = 0 # When in this state, self._substantiation_notifier is waited on. The # notifier is notified immediately after the state transition out of # SUBSTANTIATING. SUBSTANTIATING = 1 # This is the same as SUBSTANTIATING, the difference is that start_instance # has been called SUBSTANTIATING_STARTING = 2 SUBSTANTIATED = 3 # When in this state, self._start_stop_lock is held. INSUBSTANTIATING = 4 # This state represents the case when insubstantiation is in progress and # we also request substantiation at the same time. Substantiation will be # started as soon as insubstantiation completes. Note, that the opposite # actions are not supported: insubstantiation during substantiation will # cancel the substantiation. # # When in this state, self._start_stop_lock is held. # # When in this state self.substantiation_build is not None. INSUBSTANTIATING_SUBSTANTIATING = 5 # This state represents a worker that is shut down. Effectively, it's NOT_SUBSTANTIATED # plus that we will abort if anyone tries to substantiate it. SHUT_DOWN = 6 @implementer(ILatentWorker) class AbstractLatentWorker(AbstractWorker): """A worker that will start up a worker instance when needed. To use, subclass and implement start_instance and stop_instance. Additionally, if the instances render any kind of data affecting instance type from the build properties, set the class variable builds_may_be_incompatible to True and override isCompatibleWithBuild method. See ec2.py for a concrete example. """ substantiation_build = None build_wait_timer = None start_missing_on_startup = False # override if the latent worker may connect without substantiate. Most # often this will be used in workers whose lifetime is managed by # latent machines. starts_without_substantiate = False # Caveats: The handling of latent workers is much more complex than it # might seem. The code must handle at least the following conditions: # # - non-silent disconnection by the worker at any time which generated # TCP resets and in the end resulted in detached() being called # # - silent disconnection by worker at any time by silent TCP connection # failure which did not generate TCP resets, but on the other hand no # response may be received. self.conn is not None is that case. # # - no disconnection by worker during substantiation when # build_wait_timeout param is negative. # # - worker attaching before start_instance returned. # # The above means that the following parts of the state must be tracked separately and can # result in various state combinations: # - connection state of the worker (self.conn) # - intended state of the worker (self.state) # - whether start_instance() has been called and has not yet finished. state = States.NOT_SUBSTANTIATED ''' state transitions: substantiate(): either of NOT_SUBSTANTIATED -> SUBSTANTIATING INSUBSTANTIATING -> INSUBSTANTIATING_SUBSTANTIATING _substantiate(): either of: SUBSTANTIATING -> SUBSTANTIATING_STARTING SUBSTANTIATING -> SUBSTANTIATING_STARTING -> SUBSTANTIATED attached(): either of: SUBSTANTIATING -> SUBSTANTIATED SUBSTANTIATING_STARTING -> SUBSTANTIATED then: self.conn -> not None detached(): self.conn -> None errors in any of above will call insubstantiate() insubstantiate(): either of: SUBSTANTIATED -> INSUBSTANTIATING INSUBSTANTIATING_SUBSTANTIATING -> INSUBSTANTIATING (cancels substantiation request) SUBSTANTIATING -> INSUBSTANTIATING SUBSTANTIATING -> INSUBSTANTIATING_SUBSTANTIATING SUBSTANTIATING_STARTING -> INSUBSTANTIATING SUBSTANTIATING_STARTING -> INSUBSTANTIATING_SUBSTANTIATING then: < other state transitions may happen during this time > then either of: INSUBSTANTIATING_SUBSTANTIATING -> SUBSTANTIATING INSUBSTANTIATING -> NOT_SUBSTANTIATED stopService(): NOT_SUBSTANTIATED -> SHUT_DOWN ''' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._substantiation_notifier = Notifier() self._start_stop_lock = defer.DeferredLock() self._deferwaiter = deferwaiter.DeferWaiter() def checkConfig(self, name, password, build_wait_timeout=60 * 10, **kwargs): super().checkConfig(name, password, **kwargs) def reconfigService(self, name, password, build_wait_timeout=60 * 10, **kwargs): self.build_wait_timeout = build_wait_timeout return super().reconfigService(name, password, **kwargs) def _generate_random_password(self): return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(20)) def getRandomPass(self): """ Compute a random password. Latent workers are started by the master, so master can setup the password too. Backends should prefer to use this API as it handles edge cases. """ # We should return an existing password if we're reconfiguring a substantiated worker. # Otherwise the worker may be rejected if its password was changed during substantiation. # To simplify, we only allow changing passwords for workers that aren't substantiated. if self.state not in [States.NOT_SUBSTANTIATED, States.SHUT_DOWN]: if self.password is not None: return self.password # pragma: no cover log.err('{}: could not reuse password of substantiated worker (password == None)', repr(self)) return self._generate_random_password() @property def building(self): # A LatentWorkerForBuilder will only be busy if it is building. return {wfb for wfb in self.workerforbuilders.values() if wfb.isBusy()} def failed_to_start(self, instance_id, instance_state): log.msg('{} {} failed to start instance {} ({})'.format(self.__class__.__name__, self.workername, instance_id, instance_state)) raise LatentWorkerFailedToSubstantiate(instance_id, instance_state) def _log_start_stop_locked(self, action_str): if self._start_stop_lock.locked: log.msg(('while {} worker {}: waiting until previous ' + 'start_instance/stop_instance finishes').format(action_str, self)) def start_instance(self, build): # responsible for starting instance that will try to connect with this # master. Should return deferred with either True (instance started) # or False (instance not started, so don't run a build here). Problems # should use an errback. raise NotImplementedError def stop_instance(self, fast=False): # responsible for shutting down instance. raise NotImplementedError @property def substantiated(self): return self.state == States.SUBSTANTIATED and self.conn is not None def substantiate(self, wfb, build): log.msg("substantiating worker {}".format(wfb)) if self.state == States.SHUT_DOWN: return defer.succeed(False) if self.state == States.SUBSTANTIATED and self.conn is not None: self._setBuildWaitTimer() return defer.succeed(True) if self.state in [States.SUBSTANTIATING, States.SUBSTANTIATING_STARTING, States.INSUBSTANTIATING_SUBSTANTIATING]: return self._substantiation_notifier.wait() self.startMissingTimer() # if anything of the following fails synchronously we need to have a # deferred ready to be notified d = self._substantiation_notifier.wait() if self.state == States.SUBSTANTIATED and self.conn is None: # connection dropped while we were substantiated. # insubstantiate to clean up and then substantiate normally. d_ins = self.insubstantiate(force_substantiation_build=build) d_ins.addErrback(log.err, 'while insubstantiating') return d assert self.state in [States.NOT_SUBSTANTIATED, States.INSUBSTANTIATING] if self.state == States.NOT_SUBSTANTIATED: self.state = States.SUBSTANTIATING self._substantiate(build) else: self.state = States.INSUBSTANTIATING_SUBSTANTIATING self.substantiation_build = build return d @defer.inlineCallbacks def _substantiate(self, build): assert self.state == States.SUBSTANTIATING try: # if build_wait_timeout is negative we don't ever disconnect the # worker ourselves, so we don't need to wait for it to attach # to declare it as substantiated. dont_wait_to_attach = \ self.build_wait_timeout < 0 and self.conn is not None start_success = True if ILatentMachine.providedBy(self.machine): start_success = yield self.machine.substantiate(self) try: self._log_start_stop_locked('substantiating') yield self._start_stop_lock.acquire() if start_success: self.state = States.SUBSTANTIATING_STARTING start_success = yield self.start_instance(build) finally: self._start_stop_lock.release() if not start_success: # this behaviour is kept as compatibility, but it is better # to just errback with a workable reason msg = "Worker does not want to substantiate at this time" raise LatentWorkerFailedToSubstantiate(self.name, msg) if dont_wait_to_attach and \ self.state == States.SUBSTANTIATING_STARTING and \ self.conn is not None: log.msg(r"Worker {} substantiated (already attached)".format(self.name)) self.state = States.SUBSTANTIATED self._fireSubstantiationNotifier(True) except Exception as e: self.stopMissingTimer() self._substantiation_failed(failure.Failure(e)) # swallow the failure as it is notified def _fireSubstantiationNotifier(self, result): if not self._substantiation_notifier: log.msg("No substantiation deferred for {}".format(self.name)) return result_msg = 'success' if result is True else 'failure' log.msg("Firing {} substantiation deferred with {}".format(self.name, result_msg)) self._substantiation_notifier.notify(result) @defer.inlineCallbacks def attached(self, bot): if self.state != States.SUBSTANTIATING_STARTING and \ self.build_wait_timeout >= 0: msg = ('Worker {} received connection while not trying to substantiate.' 'Disconnecting.').format(self.name) log.msg(msg) self._deferwaiter.add(self._disconnect(bot)) raise RuntimeError(msg) try: yield super().attached(bot) except Exception: self._substantiation_failed(failure.Failure()) return log.msg(r"Worker {} substantiated \o/".format(self.name)) # only change state when we are actually substantiating. We could # end up at this point in different state than SUBSTANTIATING_STARTING # if build_wait_timeout is negative. In that case, the worker is never # shut down, but it may reconnect if the connection drops on its side # without master seeing this condition. # # When build_wait_timeout is not negative, we throw an error (see above) if self.state in [States.SUBSTANTIATING, States.SUBSTANTIATING_STARTING]: self.state = States.SUBSTANTIATED self._fireSubstantiationNotifier(True) def attachBuilder(self, builder): wfb = self.workerforbuilders.get(builder.name) return wfb.attached(self, self.worker_commands) def _missing_timer_fired(self): self.missing_timer = None return self._substantiation_failed(defer.TimeoutError()) def _substantiation_failed(self, failure): if self.state in [States.SUBSTANTIATING, States.SUBSTANTIATING_STARTING]: self._fireSubstantiationNotifier(failure) d = self.insubstantiate() d.addErrback(log.err, 'while insubstantiating') self._deferwaiter.add(d) # notify people, but only if we're still in the config if not self.parent or not self.notify_on_missing: return None return self.master.data.updates.workerMissing( workerid=self.workerid, masterid=self.master.masterid, last_connection="Latent worker never connected", notify=self.notify_on_missing ) def canStartBuild(self): # we were disconnected, but all the builds are not yet cleaned up. if self.conn is None and self.building: return False return super().canStartBuild() def buildStarted(self, wfb): assert wfb.isBusy() self._clearBuildWaitTimer() if ILatentMachine.providedBy(self.machine): self.machine.notifyBuildStarted() def buildFinished(self, wfb): assert not wfb.isBusy() if not self.building: if self.build_wait_timeout == 0: # we insubstantiate asynchronously to trigger more bugs with # the fake reactor self.master.reactor.callLater(0, self._soft_disconnect) # insubstantiate will automatically retry to create build for # this worker else: self._setBuildWaitTimer() # AbstractWorker.buildFinished() will try to start the next build for # that worker super().buildFinished(wfb) if ILatentMachine.providedBy(self.machine): self.machine.notifyBuildFinished() def _clearBuildWaitTimer(self): if self.build_wait_timer is not None: if self.build_wait_timer.active(): self.build_wait_timer.cancel() self.build_wait_timer = None def _setBuildWaitTimer(self): self._clearBuildWaitTimer() if self.build_wait_timeout <= 0: return self.build_wait_timer = self.master.reactor.callLater( self.build_wait_timeout, self._soft_disconnect) @defer.inlineCallbacks def insubstantiate(self, fast=False, force_substantiation_build=None): # If force_substantiation_build is not None, we'll try to substantiate the given build # after insubstantiation concludes. This parameter allows to go directly to the # SUBSTANTIATING state without going through NOT_SUBSTANTIATED state. log.msg("insubstantiating worker {}".format(self)) if self.state == States.INSUBSTANTIATING_SUBSTANTIATING: # there's another insubstantiation ongoing. We'll wait for it to finish by waiting # on self._start_stop_lock self.state = States.INSUBSTANTIATING self.substantiation_build = None self._fireSubstantiationNotifier( failure.Failure(LatentWorkerSubstantiatiationCancelled())) try: self._log_start_stop_locked('insubstantiating') yield self._start_stop_lock.acquire() assert self.state not in [States.INSUBSTANTIATING, States.INSUBSTANTIATING_SUBSTANTIATING] if self.state in [States.NOT_SUBSTANTIATED, States.SHUT_DOWN]: return prev_state = self.state if force_substantiation_build is not None: self.state = States.INSUBSTANTIATING_SUBSTANTIATING self.substantiation_build = force_substantiation_build else: self.state = States.INSUBSTANTIATING if prev_state in [States.SUBSTANTIATING, States.SUBSTANTIATING_STARTING]: self._fireSubstantiationNotifier( failure.Failure(LatentWorkerSubstantiatiationCancelled())) self._clearBuildWaitTimer() if prev_state in [States.SUBSTANTIATING_STARTING, States.SUBSTANTIATED]: try: yield self.stop_instance(fast) except Exception as e: # The case of failure for insubstantiation is bad as we have a # left-over costing resource There is not much thing to do here # generically, so we must put the problem of stop_instance # reliability to the backend driver log.err(e, "while insubstantiating") assert self.state in [States.INSUBSTANTIATING, States.INSUBSTANTIATING_SUBSTANTIATING] if self.state == States.INSUBSTANTIATING_SUBSTANTIATING: build, self.substantiation_build = self.substantiation_build, None self.state = States.SUBSTANTIATING self._substantiate(build) else: # self.state == States.INSUBSTANTIATING: self.state = States.NOT_SUBSTANTIATED finally: self._start_stop_lock.release() self.botmaster.maybeStartBuildsForWorker(self.name) @defer.inlineCallbacks def _soft_disconnect(self, fast=False, stopping_service=False): # a negative build_wait_timeout means the worker should never be shut # down, so just disconnect. if not stopping_service and self.build_wait_timeout < 0: yield super().disconnect() return self.stopMissingTimer() # we add the Deferreds to DeferWaiter because we don't wait for a Deferred if # the other Deferred errbacks yield defer.DeferredList([ self._deferwaiter.add(super().disconnect()), self._deferwaiter.add(self.insubstantiate(fast)) ], consumeErrors=True, fireOnOneErrback=True) def disconnect(self): self._deferwaiter.add(self._soft_disconnect()) # this removes the worker from all builders. It won't come back # without a restart (or maybe a sighup) self.botmaster.workerLost(self) @defer.inlineCallbacks def stopService(self): # stops the service. Waits for any pending substantiations, insubstantiations or builds # that are running or about to start to complete. while self.state not in [States.NOT_SUBSTANTIATED, States.SHUT_DOWN]: if self.state in [States.INSUBSTANTIATING, States.INSUBSTANTIATING_SUBSTANTIATING, States.SUBSTANTIATING, States.SUBSTANTIATING_STARTING]: self._log_start_stop_locked('stopService') yield self._start_stop_lock.acquire() self._start_stop_lock.release() if self.conn is not None or self.state in [States.SUBSTANTIATED, States.SUBSTANTIATING_STARTING]: yield self._soft_disconnect(stopping_service=True) yield self._deferwaiter.wait() # prevent any race conditions with any future builds that are in the process of # being started. if self.state == States.NOT_SUBSTANTIATED: self.state = States.SHUT_DOWN self._clearBuildWaitTimer() res = yield super().stopService() return res def updateWorker(self): """Called to add or remove builders after the worker has connected. Also called after botmaster's builders are initially set. @return: a Deferred that indicates when an attached worker has accepted the new builders and/or released the old ones.""" for b in self.botmaster.getBuildersForWorker(self.name): if b.name not in self.workerforbuilders: b.addLatentWorker(self) return super().updateWorker() class LocalLatentWorker(AbstractLatentWorker): """ A worker that can be suspended by shutting down or suspending the hardware it runs on. It is intended to be used with LatentMachines. """ starts_without_substantiate = True def checkConfig(self, name, password, **kwargs): super.checkConfig(self, name, password, build_wait_timeout=-1, **kwargs) def reconfigService(self, name, password, **kwargs): return super().reconfigService(name, password, build_wait_timeout=-1, **kwargs) buildbot-3.4.0/master/buildbot/worker/libvirt.py000066400000000000000000000224111413250514000217520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2010 Isotoma Limited import os import socket from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util import runprocess from buildbot.util.queue import ConnectableThreadQueue from buildbot.warnings import warn_deprecated from buildbot.worker import AbstractLatentWorker try: import libvirt except ImportError: libvirt = None def handle_connect_close(conn, reason, opaque): opaque.close_connection() class ThreadWithQueue(ConnectableThreadQueue): def __init__(self, pool, uri, *args, **kwargs): self.pool = pool # currently used only for testing self.uri = uri super().__init__(*args, **kwargs) def on_close_connection(self, conn): self.close_connection() def close_connection(self): conn = self.conn super().close_connection() conn.close() def libvirt_open(self): return libvirt.open(self.uri) def create_connection(self): try: log.msg("Connecting to {}".format(self.uri)) conn = self.libvirt_open() conn.registerCloseCallback(handle_connect_close, self) log.msg("Connected to {}".format(self.uri)) return conn except Exception as e: log.err("Error connecting to {}: {}, will retry later".format(self.uri, e)) return None class ServerThreadPool: ThreadClass = ThreadWithQueue def __init__(self): self.threads = {} def do(self, uri, func, *args, **kwargs): # returns a Deferred if uri not in self.threads: self.threads[uri] = self.ThreadClass(self, uri) def logging_func(conn, *args, **kwargs): try: return func(conn, *args, **kwargs) except Exception as e: log.err("libvirt: Exception on {}: {}".format(uri, str(e))) raise return self.threads[uri].execute_in_thread(logging_func, *args, **kwargs) def is_connected(self, uri): if uri in self.threads: return self.threads[uri].conn is not None return False def is_connecting(self, uri): if uri in self.threads: return self.threads[uri].connecting return False @defer.inlineCallbacks def get_or_create_connection(self, uri): if uri not in self.threads: yield self.do(uri, lambda: None) return self.threads[uri].conn def reset_connection(self, uri): if uri in self.threads: self.threads[uri].close_connection() else: log.err('libvirt.ServerThreadPool: Unknown connection {}'.format(uri)) # A module is effectively a singleton class, so this is OK threadpool = ServerThreadPool() class Connection: def __init__(self, uri): self.uri = uri class LibVirtWorker(AbstractLatentWorker): pool = threadpool metadata = '' ns = 'http://buildbot.net/' metakey = 'buildbot' def __init__(self, name, password, connection=None, hd_image=None, base_image=None, uri="system:///", xml=None, masterFQDN=None, **kwargs): super().__init__(name, password, **kwargs) if not libvirt: config.error( "The python module 'libvirt' is needed to use a LibVirtWorker") if connection is not None: warn_deprecated('3.2.0', 'LibVirtWorker connection argument has been deprecated: ' + 'please use uri') if uri != "system:///": config.error('connection and uri arguments cannot be used together') uri = connection.uri self.uri = uri self.image = hd_image self.base_image = base_image self.xml = xml if masterFQDN: self.masterFQDN = masterFQDN else: self.masterFQDN = socket.getfqdn() self.cheap_copy = True self.graceful_shutdown = False def _pool_do(self, func): return self.pool.do(self.uri, func) @defer.inlineCallbacks def _get_domain(self): try: domain = yield self._pool_do(lambda conn: conn.lookupByName(self.workername)) return domain except libvirt.libvirtError as e: log.err('LibVirtWorker: got error when accessing domain: {}'.format(e)) try: self.pool.reset_connection(self.uri) except Exception as e1: log.err('LibVirtWorker: got error when resetting connection: {}'.format(e1)) raise e @defer.inlineCallbacks def _get_domain_id(self): domain = yield self._get_domain() if domain is None: return -1 domain_id = yield self._pool_do(lambda conn: domain.ID()) return domain_id @defer.inlineCallbacks def _prepare_base_image(self): """ I am a private method for creating (possibly cheap) copies of a base_image for start_instance to boot. """ if not self.base_image: return if self.cheap_copy: clone_cmd = ['qemu-img', 'create', '-b', self.base_image, '-f', 'qcow2', self.image] else: clone_cmd = ['cp', self.base_image, self.image] log.msg("Cloning base image: {}'".format(clone_cmd)) try: rc = yield runprocess.run_process(self.master.reactor, clone_cmd, collect_stdout=False, collect_stderr=False) if rc != 0: raise LatentWorkerFailedToSubstantiate('Failed to clone image (rc={})'.format(rc)) except Exception as e: log.err("Cloning failed: {}".format(e)) raise @defer.inlineCallbacks def start_instance(self, build): """ I start a new instance of a VM. If a base_image is specified, I will make a clone of that otherwise i will use image directly. If i'm not given libvirt domain definition XML, I will look for my name in the list of defined virtual machines and start that. """ try: domain_id = yield self._get_domain_id() if domain_id != -1: raise LatentWorkerFailedToSubstantiate( "{}: Cannot start_instance as it's already active".format(self)) except Exception as e: raise LatentWorkerFailedToSubstantiate( '{}: Got error while retrieving domain ID: {}'.format(self, e)) yield self._prepare_base_image() try: if self.xml: yield self._pool_do(lambda conn: conn.createXML(self.xml, 0)) else: domain = yield self._get_domain() yield self._pool_do(lambda conn: domain.setMetadata( libvirt.VIR_DOMAIN_METADATA_ELEMENT, self.metadata.format(self.workername, self.password, self.masterFQDN), self.metakey, self.ns, libvirt.VIR_DOMAIN_AFFECT_CONFIG)) yield self._pool_do(lambda conn: domain.create()) except Exception as e: raise LatentWorkerFailedToSubstantiate( '{}: Got error while starting VM: {}'.format(self, e)) return True @defer.inlineCallbacks def stop_instance(self, fast=False): """ I attempt to stop a running VM. I make sure any connection to the worker is removed. If the VM was using a cloned image, I remove the clone When everything is tidied up, I ask that bbot looks for work to do """ domain_id = yield self._get_domain_id() if domain_id == -1: log.msg("{}: Domain is unexpectedly not running".format(self)) return domain = yield self._get_domain() if self.graceful_shutdown and not fast: log.msg("Graceful shutdown chosen for {}".format(self.workername)) try: yield self._pool_do(lambda conn: domain.shutdown()) except Exception as e: log.msg('{}: Graceful shutdown failed ({}). Force destroying domain'.format( self, e)) # Don't re-throw to stop propagating shutdown error if destroy was successful. yield self._pool_do(lambda conn: domain.destroy()) else: yield self._pool_do(lambda conn: domain.destroy()) if self.base_image: log.msg('{}: Removing image {}'.format(self, self.image)) os.remove(self.image) buildbot-3.4.0/master/buildbot/worker/local.py000066400000000000000000000044221413250514000213730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members import os from twisted.internet import defer from buildbot.config import error from buildbot.worker.base import Worker class LocalWorker(Worker): def checkConfig(self, name, workdir=None, **kwargs): kwargs['password'] = None super().checkConfig(name, **kwargs) self.LocalWorkerFactory = None try: # importing here to avoid dependency on buildbot worker package from buildbot_worker.bot import LocalWorker as RemoteLocalWorker self.LocalWorkerFactory = RemoteLocalWorker except ImportError: error("LocalWorker needs the buildbot-worker package installed " "(pip install buildbot-worker)") self.remote_worker = None @defer.inlineCallbacks def reconfigService(self, name, workdir=None, **kwargs): kwargs['password'] = None yield super().reconfigService(name, **kwargs) if workdir is None: workdir = name workdir = os.path.abspath( os.path.join(self.master.basedir, "workers", workdir)) if not os.path.isdir(workdir): os.makedirs(workdir) if self.remote_worker is None: # create the actual worker as a child service # we only create at reconfig, to avoid polluting memory in case of # reconfig self.remote_worker = self.LocalWorkerFactory(name, workdir) yield self.remote_worker.setServiceParent(self) else: # The case of a reconfig, we forward the parameters self.remote_worker.bot.basedir = workdir buildbot-3.4.0/master/buildbot/worker/manager.py000066400000000000000000000124411413250514000217130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.process.measured_service import MeasuredBuildbotServiceManager from buildbot.util import misc from buildbot.worker.protocols import pb as bbpb class WorkerRegistration: __slots__ = ['master', 'worker', 'pbReg'] def __init__(self, master, worker): self.master = master self.worker = worker def __repr__(self): return "<{} for {}>".format(self.__class__.__name__, repr(self.worker.workername)) @defer.inlineCallbacks def unregister(self): bs = self.worker # update with portStr=None to remove any registration in place yield self.master.workers.pb.updateRegistration( bs.workername, bs.password, None) yield self.master.workers._unregister(self) @defer.inlineCallbacks def update(self, worker_config, global_config): # For most protocols, there's nothing to do, but for PB we must # update the registration in case the port or password has changed. if 'pb' in global_config.protocols: self.pbReg = yield self.master.workers.pb.updateRegistration( worker_config.workername, worker_config.password, global_config.protocols['pb']['port']) def getPBPort(self): return self.pbReg.getPort() class WorkerManager(MeasuredBuildbotServiceManager): name = "WorkerManager" managed_services_name = "workers" config_attr = "workers" PING_TIMEOUT = 10 reconfig_priority = 127 def __init__(self, master): super().__init__() self.pb = bbpb.Listener(master) # WorkerRegistration instances keyed by worker name self.registrations = {} # connection objects keyed by worker name self.connections = {} @property def workers(self): # self.workers contains a ready Worker instance for each # potential worker, i.e. all the ones listed in the config file. # If the worker is connected, self.workers[workername].worker will # contain a RemoteReference to their Bot instance. If it is not # connected, that attribute will hold None. # workers attribute is actually just an alias to multiService's # namedService return self.namedServices def getWorkerByName(self, workerName): return self.registrations[workerName].worker def register(self, worker): # TODO: doc that reg.update must be called, too workerName = worker.workername reg = WorkerRegistration(self.master, worker) self.registrations[workerName] = reg return defer.succeed(reg) def _unregister(self, registration): del self.registrations[registration.worker.workername] @defer.inlineCallbacks def newConnection(self, conn, workerName): if workerName in self.connections: log.msg(("Got duplication connection from '{}'" " starting arbitration procedure").format(workerName)) old_conn = self.connections[workerName] try: yield misc.cancelAfter(self.PING_TIMEOUT, old_conn.remotePrint("master got a duplicate connection"), self.master.reactor) # if we get here then old connection is still alive, and new # should be rejected raise RuntimeError("rejecting duplicate worker") except defer.CancelledError: old_conn.loseConnection() log.msg("Connected worker '{}' ping timed out after {} seconds".format(workerName, self.PING_TIMEOUT)) except RuntimeError: raise except Exception as e: old_conn.loseConnection() log.msg("Got error while trying to ping connected worker {}:{}".format(workerName, e)) log.msg("Old connection for '{}' was lost, accepting new".format(workerName)) try: yield conn.remotePrint(message="attached") info = yield conn.remoteGetWorkerInfo() log.msg("Got workerinfo from '{}'".format(workerName)) except Exception as e: log.msg("Failed to communicate with worker '{}'\n{}".format(workerName, e)) raise conn.info = info self.connections[workerName] = conn def remove(): del self.connections[workerName] conn.notifyOnDisconnect(remove) # accept the connection return True buildbot-3.4.0/master/buildbot/worker/marathon.py000066400000000000000000000111671413250514000221160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import util from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util.httpclientservice import HTTPClientService from buildbot.util.latent import CompatibleLatentWorkerMixin from buildbot.util.logger import Logger from buildbot.worker.docker import DockerBaseWorker log = Logger() class MarathonLatentWorker(CompatibleLatentWorkerMixin, DockerBaseWorker): """Marathon is a distributed docker container launcher for Mesos""" instance = None image = None _http = None def checkConfig(self, name, marathon_url, image, marathon_auth=None, marathon_extra_config=None, marathon_app_prefix="buildbot-worker/", masterFQDN=None, **kwargs): super().checkConfig(name, image=image, masterFQDN=masterFQDN, **kwargs) HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, name, marathon_url, image, marathon_auth=None, marathon_extra_config=None, marathon_app_prefix="buildbot-worker/", masterFQDN=None, **kwargs): # Set build_wait_timeout to 0s if not explicitly set: Starting a # container is almost immediate, we can afford doing so for each build. if 'build_wait_timeout' not in kwargs: kwargs['build_wait_timeout'] = 0 yield super().reconfigService(name, image=image, masterFQDN=masterFQDN, **kwargs) self._http = yield HTTPClientService.getService( self.master, marathon_url, auth=marathon_auth) if marathon_extra_config is None: marathon_extra_config = {} self.marathon_extra_config = marathon_extra_config self.marathon_app_prefix = marathon_app_prefix def getApplicationId(self): return self.marathon_app_prefix + self.getContainerName() def renderWorkerProps(self, build): return build.render((self.image, self.marathon_extra_config)) @defer.inlineCallbacks def start_instance(self, build): yield self.stop_instance(reportFailure=False) image, marathon_extra_config = \ yield self.renderWorkerPropsOnStart(build) marathon_config = { "container": { "docker": { "image": image, "network": "BRIDGE", }, "type": "DOCKER" }, "id": self.getApplicationId(), "instances": 1, "env": self.createEnvironment() } util.dictionary_merge(marathon_config, marathon_extra_config) res = yield self._http.post("/v2/apps", json=marathon_config) res_json = yield res.json() if res.code != 201: raise LatentWorkerFailedToSubstantiate( "Unable to create Marathon app: {} {}: {} {}".format( self.getApplicationId(), res.code, res_json['message'], res_json)) self.instance = res_json return True @defer.inlineCallbacks def stop_instance(self, fast=False, reportFailure=True): res = yield self._http.delete("/v2/apps/{}".format( self.getApplicationId())) self.instance = None self.resetWorkerPropsOnStop() if res.code != 200 and reportFailure: res_json = yield res.json() # the error is not documented :-( log.warn( "Unable to delete Marathon app: {id} {code}: {message} {details}", id=self.getApplicationId(), code=res.code, message=res_json.get('message'), details=res_json) buildbot-3.4.0/master/buildbot/worker/openstack.py000066400000000000000000000365421413250514000223000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2013 Cray Inc. import hashlib import math import time from twisted.internet import defer from twisted.internet import threads from twisted.python import log from buildbot import config from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util import unicode2bytes from buildbot.util.latent import CompatibleLatentWorkerMixin from buildbot.worker import AbstractLatentWorker try: from keystoneauth1 import loading from keystoneauth1 import session from novaclient import client from novaclient.exceptions import NotFound _hush_pyflakes = [client] except ImportError: NotFound = Exception client = None loading = None session = None ACTIVE = 'ACTIVE' BUILD = 'BUILD' DELETED = 'DELETED' UNKNOWN = 'UNKNOWN' class OpenStackLatentWorker(CompatibleLatentWorkerMixin, AbstractLatentWorker): instance = None _poll_resolution = 5 # hook point for tests def checkConfig(self, name, password, flavor, os_username=None, os_password=None, os_tenant_name=None, os_auth_url=None, os_user_domain=None, os_project_domain=None, os_auth_args=None, block_devices=None, region=None, image=None, meta=None, # Have a nova_args parameter to allow passing things directly # to novaclient. nova_args=None, client_version='2', **kwargs): if not client: config.error("The python module 'novaclient' is needed " "to use a OpenStackLatentWorker. " "Please install 'python-novaclient' package.") if not loading or not session: config.error("The python module 'keystoneauth1' is needed " "to use a OpenStackLatentWorker. " "Please install the 'keystoneauth1' package.") if block_devices is None and image is None: raise ValueError('One of block_devices or image must be given') if os_auth_args is None: if os_auth_url is None: config.error("Missing os_auth_url OpenStackLatentWorker " "and os_auth_args not provided.") if os_username is None or os_password is None: config.error("Missing os_username / os_password for OpenStackLatentWorker " "and os_auth_args not provided.") else: # ensure that at least auth_url is provided if os_auth_args.get('auth_url') is None: config.error("Missing 'auth_url' from os_auth_args for OpenStackLatentWorker") super().checkConfig(name, password, **kwargs) @defer.inlineCallbacks def reconfigService(self, name, password, flavor, os_username=None, os_password=None, os_tenant_name=None, os_auth_url=None, os_user_domain=None, os_project_domain=None, os_auth_args=None, block_devices=None, region=None, image=None, meta=None, # Have a nova_args parameter to allow passing things directly # to novaclient. nova_args=None, client_version='2', **kwargs): yield super().reconfigService(name, password, **kwargs) if os_auth_args is None: os_auth_args = { 'auth_url': os_auth_url, 'username': os_username, 'password': os_password } if os_tenant_name is not None: os_auth_args['project_name'] = os_tenant_name if os_user_domain is not None: os_auth_args['user_domain_name'] = os_user_domain if os_project_domain is not None: os_auth_args['project_domain_name'] = os_project_domain self.flavor = flavor self.client_version = client_version if client: os_auth_args = yield self.renderSecrets(os_auth_args) self.novaclient = self._constructClient(client_version, os_auth_args) if region is not None: self.novaclient.client.region_name = region if block_devices is not None: self.block_devices = [ self._parseBlockDevice(bd) for bd in block_devices] else: self.block_devices = None self.image = image self.meta = meta self.nova_args = nova_args if nova_args is not None else {} masterName = unicode2bytes(self.master.name) self.masterhash = hashlib.sha1(masterName).hexdigest()[:6] def _constructClient(self, client_version, auth_args): """Return a novaclient from the given args.""" auth_plugin = auth_args.pop('auth_type', 'password') loader = loading.get_plugin_loader(auth_plugin) auth = loader.load_from_options(**auth_args) sess = session.Session(auth=auth) return client.Client(client_version, session=sess) def _parseBlockDevice(self, block_device): """ Parse a higher-level view of the block device mapping into something novaclient wants. This should be similar to how Horizon presents it. Required keys: device_name: The name of the device; e.g. vda or xda. source_type: image, snapshot, volume, or blank/None. destination_type: Destination of block device: volume or local. delete_on_termination: True/False. uuid: The image, snapshot, or volume id. boot_index: Integer used for boot order. volume_size: Size of the device in GiB. """ client_block_device = {} client_block_device['device_name'] = block_device.get( 'device_name', 'vda') client_block_device['source_type'] = block_device.get( 'source_type', 'image') client_block_device['destination_type'] = block_device.get( 'destination_type', 'volume') client_block_device['delete_on_termination'] = bool( block_device.get('delete_on_termination', True)) client_block_device['uuid'] = block_device['uuid'] client_block_device['boot_index'] = int( block_device.get('boot_index', 0)) # Allow None here. It will be rendered later. client_block_device['volume_size'] = block_device.get('volume_size') return client_block_device @defer.inlineCallbacks def _renderBlockDevice(self, block_device, build): """Render all of the block device's values.""" rendered_block_device = yield build.render(block_device) if rendered_block_device['volume_size'] is None: source_type = rendered_block_device['source_type'] source_uuid = rendered_block_device['uuid'] volume_size = self._determineVolumeSize(source_type, source_uuid) rendered_block_device['volume_size'] = volume_size return rendered_block_device def _determineVolumeSize(self, source_type, source_uuid): """ Determine the minimum size the volume needs to be for the source. Returns the size in GiB. """ nova = self.novaclient if source_type == 'image': # The size returned for an image is in bytes. Round up to the next # integer GiB. image = nova.glance.get(source_uuid) if hasattr(image, 'OS-EXT-IMG-SIZE:size'): size = getattr(image, 'OS-EXT-IMG-SIZE:size') size_gb = int(math.ceil(size / 1024.0**3)) return size_gb elif source_type == 'volume': # Volumes are easy because they are already in GiB. volume = nova.volumes.get(source_uuid) return volume.size elif source_type == 'snapshot': snap = nova.volume_snapshots.get(source_uuid) return snap.size else: unknown_source = ("The source type '{}' for UUID '{}' is unknown".format(source_type, source_uuid)) raise ValueError(unknown_source) return None @defer.inlineCallbacks def _getImage(self, build): image_uuid = yield build.render(self.image) # check if we got name instead of uuid for image in self.novaclient.glance.list(): if image.name == image_uuid: image_uuid = image.id return image_uuid @defer.inlineCallbacks def _getFlavor(self, build): flavor_uuid = yield build.render(self.flavor) # check if we got name instead of uuid for flavor in self.novaclient.flavors.list(): if flavor.name == flavor_uuid: flavor_uuid = flavor.id return flavor_uuid @defer.inlineCallbacks def renderWorkerProps(self, build): image = yield self._getImage(build) flavor = yield self._getFlavor(build) nova_args = yield build.render(self.nova_args) meta = yield build.render(self.meta) worker_meta = { 'BUILDBOT:instance': self.masterhash, } if meta is None: meta = worker_meta else: meta.update(worker_meta) if self.block_devices is not None: block_devices = [] for bd in self.block_devices: rendered_block_device = yield self._renderBlockDevice(bd, build) block_devices.append(rendered_block_device) else: block_devices = None return (image, flavor, block_devices, nova_args, meta) @defer.inlineCallbacks def start_instance(self, build): if self.instance is not None: raise ValueError('instance active') image, flavor, block_devices, nova_args, meta = yield self.renderWorkerPropsOnStart(build) res = yield threads.deferToThread(self._start_instance, image, flavor, block_devices, nova_args, meta) return res def _start_instance(self, image_uuid, flavor_uuid, block_devices, nova_args, meta): # ensure existing, potentially duplicated, workers are stopped self._stop_instance(None, True) # then try to start new one boot_args = [self.workername, image_uuid, flavor_uuid] boot_kwargs = dict( meta=meta, block_device_mapping_v2=block_devices, **nova_args) instance = self.novaclient.servers.create(*boot_args, **boot_kwargs) # There is an issue when using sessions that the status is not # available on the first try. Trying again will work fine. Fetch the # instance to avoid that. try: instance = self.novaclient.servers.get(instance.id) except NotFound as e: log.msg('{class_name} {name} instance {instance.id} ' '({instance.name}) never found', class_name=self.__class__.__name__, name=self.workername, instance=instance) raise LatentWorkerFailedToSubstantiate(instance.id, BUILD) from e self.instance = instance log.msg('{} {} starting instance {} (image {})'.format(self.__class__.__name__, self.workername, instance.id, image_uuid)) duration = 0 interval = self._poll_resolution while instance.status.startswith(BUILD): time.sleep(interval) duration += interval if duration % 60 == 0: log.msg(('{} {} has waited {} minutes for instance {}' ).format(self.__class__.__name__, self.workername, duration // 60, instance.id)) try: instance = self.novaclient.servers.get(instance.id) except NotFound as e: log.msg('{} {} instance {} ({}) went missing'.format(self.__class__.__name__, self.workername, instance.id, instance.name)) raise LatentWorkerFailedToSubstantiate(instance.id, instance.status) from e if instance.status == ACTIVE: minutes = duration // 60 seconds = duration % 60 log.msg('{} {} instance {} ({}) started in about {} minutes {} seconds'.format( self.__class__.__name__, self.workername, instance.id, instance.name, minutes, seconds)) return [instance.id, image_uuid, '%02d:%02d:%02d' % (minutes // 60, minutes % 60, seconds)] else: self.failed_to_start(instance.id, instance.status) def stop_instance(self, fast=False): instance = self.instance self.instance = None self.resetWorkerPropsOnStop() self._stop_instance(instance, fast) def _stop_instance(self, instance_param, fast): instances = [] try: if instance_param is None: filter_f = lambda instance: \ instance.metadata.get("BUILDBOT:instance", "") == self.masterhash instances = list(filter(filter_f, self.novaclient.servers.findall(name=self.name))) else: instances = [self.novaclient.servers.get(instance_param.id)] except NotFound: # If can't find the instance, then it's already gone. log.msg('{} {} instance {} ({}) already terminated'.format(self.__class__.__name__, self.workername, instance_param.id, instance_param.name)) for instance in instances: if instance.status not in (DELETED, UNKNOWN): instance.delete() log.msg('{} {} terminating instance {} ({})'.format(self.__class__.__name__, self.workername, instance.id, instance.name)) buildbot-3.4.0/master/buildbot/worker/protocols/000077500000000000000000000000001413250514000217515ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/worker/protocols/__init__.py000066400000000000000000000000001413250514000240500ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/worker/protocols/base.py000066400000000000000000000121411413250514000232340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.util import ComparableMixin from buildbot.util import subscription from buildbot.util.eventual import eventually class Listener: pass class UpdateRegistrationListener(Listener): def __init__(self): super().__init__() # username : (password, portstr, PBManager registration) self._registrations = {} @defer.inlineCallbacks def updateRegistration(self, username, password, portStr): # NOTE: this method is only present on the PB protocol; others do not # use registrations if username in self._registrations: currentPassword, currentPortStr, currentReg = \ self._registrations[username] else: currentPassword, currentPortStr, currentReg = None, None, None iseq = (ComparableMixin.isEquivalent(currentPassword, password) and ComparableMixin.isEquivalent(currentPortStr, portStr)) if iseq: return currentReg if currentReg: yield currentReg.unregister() del self._registrations[username] if portStr and password: reg = yield self.get_manager().register(portStr, username, password, self._create_connection) self._registrations[username] = (password, portStr, reg) return reg return currentReg @defer.inlineCallbacks def _create_connection(self, mind, workerName): self.before_connection_setup(mind, workerName) worker = self.master.workers.getWorkerByName(workerName) conn = self.ConnectionClass(self.master, worker, mind) # inform the manager, logging any problems in the deferred accepted = yield self.master.workers.newConnection(conn, workerName) # return the Connection as the perspective if accepted: return conn else: # TODO: return something more useful raise RuntimeError("rejecting duplicate worker") class Connection: proxies = {} def __init__(self, name): self._disconnectSubs = subscription.SubscriptionPoint("disconnections from {}".format(name)) # This method replace all Impl args by their Proxy protocol implementation def createArgsProxies(self, args): newargs = {} for k, v in args.items(): for implclass, proxyclass in self.proxies.items(): if isinstance(v, implclass): v = proxyclass(v) newargs[k] = v return newargs # disconnection handling def wait_shutdown_started(self): d = defer.Deferred() self.notifyOnDisconnect(lambda: eventually(d.callback, None)) return d def waitShutdown(self): return self._disconnectSubs.waitForDeliveriesToFinish() def notifyOnDisconnect(self, cb): return self._disconnectSubs.subscribe(cb) def notifyDisconnected(self): self._disconnectSubs.deliver() def loseConnection(self): raise NotImplementedError # methods to send messages to the worker def remotePrint(self, message): raise NotImplementedError def remoteGetWorkerInfo(self): raise NotImplementedError def remoteSetBuilderList(self, builders): raise NotImplementedError def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): raise NotImplementedError def remoteShutdown(self): raise NotImplementedError def remoteStartBuild(self, builderName): raise NotImplementedError def remoteInterruptCommand(self, builderName, commandId, why): raise NotImplementedError # RemoteCommand base implementation and base proxy class RemoteCommandImpl: def remote_update(self, updates): raise NotImplementedError def remote_complete(self, failure=None): raise NotImplementedError # FileWriter base implementation class FileWriterImpl: def remote_write(self, data): raise NotImplementedError def remote_utime(self, accessed_modified): raise NotImplementedError def remote_unpack(self): raise NotImplementedError def remote_close(self): raise NotImplementedError # FileReader base implementation class FileReaderImpl: def remote_read(self, maxLength): raise NotImplementedError def remote_close(self): raise NotImplementedError buildbot-3.4.0/master/buildbot/worker/protocols/null.py000066400000000000000000000077651413250514000233140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.util.eventual import fireEventually from buildbot.warnings import warn_deprecated from buildbot.worker.protocols import base class Listener(base.Listener): pass class ProxyMixin(): def __init__(self, impl): assert isinstance(impl, self.ImplClass) self.impl = impl self._disconnect_listeners = [] def callRemote(self, message, *args, **kw): method = getattr(self.impl, "remote_{}".format(message), None) if method is None: raise AttributeError("No such method: remote_{}".format(message)) try: state = method(*args, **kw) except TypeError: log.msg("{} didn't accept {} and {}".format(method, args, kw)) raise # break callback recursion for large transfers by using fireEventually return fireEventually(state) def notifyOnDisconnect(self, cb): pass def dontNotifyOnDisconnect(self, cb): pass # just add ProxyMixin capability to the RemoteCommandProxy # so that callers of callRemote actually directly call the proper method class RemoteCommandProxy(ProxyMixin): ImplClass = base.RemoteCommandImpl class FileReaderProxy(ProxyMixin): ImplClass = base.FileReaderImpl class FileWriterProxy(ProxyMixin): ImplClass = base.FileWriterImpl class Connection(base.Connection): proxies = {base.FileWriterImpl: FileWriterProxy, base.FileReaderImpl: FileReaderProxy} def __init__(self, master_or_worker, worker=None): # All the existing code passes just the name to the Connection, however we'll need to # support an older versions of buildbot-worker using two parameter signature for some time. if worker is None: worker = master_or_worker else: warn_deprecated('3.2.0', 'LocalWorker: Using different version of buildbot-worker ' + 'than buildbot is not supported') super().__init__(worker.workername) self.worker = worker def loseConnection(self): self.notifyDisconnected() def remotePrint(self, message): return defer.maybeDeferred(self.worker.bot.remote_print, message) def remoteGetWorkerInfo(self): return defer.maybeDeferred(self.worker.bot.remote_getWorkerInfo) def remoteSetBuilderList(self, builders): return defer.maybeDeferred(self.worker.bot.remote_setBuilderList, builders) def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): remoteCommand = RemoteCommandProxy(remoteCommand) args = self.createArgsProxies(args) workerforbuilder = self.worker.bot.builders[builderName] return defer.maybeDeferred(workerforbuilder.remote_startCommand, remoteCommand, commandId, commandName, args) def remoteShutdown(self): return defer.maybeDeferred(self.worker.stopService) def remoteStartBuild(self, builderName): return defer.succeed(self.worker.bot.builders[builderName].remote_startBuild()) def remoteInterruptCommand(self, builderName, commandId, why): workerforbuilder = self.worker.bot.builders[builderName] return defer.maybeDeferred(workerforbuilder.remote_interruptCommand, commandId, why) buildbot-3.4.0/master/buildbot/worker/protocols/pb.py000066400000000000000000000246451413250514000227370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import contextlib from twisted.internet import defer from twisted.python import log from twisted.spread import pb from buildbot.pbutil import decode from buildbot.util import deferwaiter from buildbot.worker.protocols import base class Listener(base.UpdateRegistrationListener): name = "pbListener" def __init__(self, master): super().__init__() self.ConnectionClass = Connection self.master = master def get_manager(self): return self.master.pbmanager def before_connection_setup(self, mind, workerName): log.msg("worker '{}' attaching from {}".format(workerName, mind.broker.transport.getPeer())) try: mind.broker.transport.setTcpKeepAlive(1) except Exception: log.err("Can't set TcpKeepAlive") class ReferenceableProxy(pb.Referenceable): def __init__(self, impl): assert isinstance(impl, self.ImplClass) self.impl = impl def __getattr__(self, name): return getattr(self.impl, name) # Proxy are just ReferenceableProxy to the Impl classes class RemoteCommand(ReferenceableProxy): ImplClass = base.RemoteCommandImpl class FileReaderProxy(ReferenceableProxy): ImplClass = base.FileReaderImpl class FileWriterProxy(ReferenceableProxy): ImplClass = base.FileWriterImpl class _NoSuchMethod(Exception): """Rewrapped pb.NoSuchMethod remote exception""" @contextlib.contextmanager def _wrapRemoteException(): try: yield except pb.RemoteError as e: if e.remoteType in (b'twisted.spread.flavors.NoSuchMethod', 'twisted.spread.flavors.NoSuchMethod'): raise _NoSuchMethod(e) from e raise class Connection(base.Connection, pb.Avatar): proxies = {base.FileWriterImpl: FileWriterProxy, base.FileReaderImpl: FileReaderProxy} # TODO: configure keepalive_interval in # c['protocols']['pb']['keepalive_interval'] keepalive_timer = None keepalive_interval = 3600 info = None def __init__(self, master, worker, mind): super().__init__(worker.workername) self.master = master self.worker = worker self.mind = mind self._keepalive_waiter = deferwaiter.DeferWaiter() self._keepalive_action_handler = \ deferwaiter.RepeatedActionHandler(master.reactor, self._keepalive_waiter, self.keepalive_interval, self._do_keepalive) # methods called by the PBManager @defer.inlineCallbacks def attached(self, mind): self.startKeepaliveTimer() self.notifyOnDisconnect(self._stop_keepalive_timer) # pbmanager calls perspective.attached; pass this along to the # worker yield self.worker.attached(self) # and then return a reference to the avatar return self def detached(self, mind): self.stopKeepaliveTimer() self.mind = None self.notifyDisconnected() # disconnection handling @defer.inlineCallbacks def _stop_keepalive_timer(self): self.stopKeepaliveTimer() yield self._keepalive_waiter.wait() def loseConnection(self): self.stopKeepaliveTimer() tport = self.mind.broker.transport # this is the polite way to request that a socket be closed tport.loseConnection() try: # but really we don't want to wait for the transmit queue to # drain. The remote end is unlikely to ACK the data, so we'd # probably have to wait for a (20-minute) TCP timeout. # tport._closeSocket() # however, doing _closeSocket (whether before or after # loseConnection) somehow prevents the notifyOnDisconnect # handlers from being run. Bummer. tport.offset = 0 tport.dataBuffer = b"" except Exception: # however, these hacks are pretty internal, so don't blow up if # they fail or are unavailable log.msg("failed to accelerate the shutdown process") # keepalive handling def _do_keepalive(self): return self.mind.callRemote('print', message="keepalive") def stopKeepaliveTimer(self): self._keepalive_action_handler.stop() def startKeepaliveTimer(self): assert self.keepalive_interval self._keepalive_action_handler.start() # methods to send messages to the worker def remotePrint(self, message): return self.mind.callRemote('print', message=message) @defer.inlineCallbacks def remoteGetWorkerInfo(self): try: with _wrapRemoteException(): # Try to call buildbot-worker method. info = yield self.mind.callRemote('getWorkerInfo') return decode(info) except _NoSuchMethod: yield self.remotePrint( "buildbot-slave detected, failing back to deprecated buildslave API. " "(Ignoring missing getWorkerInfo method.)") info = {} # Probably this is deprecated buildslave. log.msg("Worker.getWorkerInfo is unavailable - falling back to " "deprecated buildslave API") try: with _wrapRemoteException(): info = yield self.mind.callRemote('getSlaveInfo') except _NoSuchMethod: log.msg("Worker.getSlaveInfo is unavailable - ignoring") # newer workers send all info in one command if "slave_commands" in info: assert "worker_commands" not in info info["worker_commands"] = info.pop("slave_commands") return info # Old version buildslave - need to retrieve list of supported # commands and version using separate requests. try: with _wrapRemoteException(): info["worker_commands"] = yield self.mind.callRemote( 'getCommands') except _NoSuchMethod: log.msg("Worker.getCommands is unavailable - ignoring") try: with _wrapRemoteException(): info["version"] = yield self.mind.callRemote('getVersion') except _NoSuchMethod: log.msg("Worker.getVersion is unavailable - ignoring") return decode(info) @defer.inlineCallbacks def remoteSetBuilderList(self, builders): builders = yield self.mind.callRemote('setBuilderList', builders) self.builders = builders return builders def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): workerforbuilder = self.builders.get(builderName) remoteCommand = RemoteCommand(remoteCommand) args = self.createArgsProxies(args) return workerforbuilder.callRemote('startCommand', remoteCommand, commandId, commandName, args) @defer.inlineCallbacks def remoteShutdown(self): # First, try the "new" way - calling our own remote's shutdown # method. The method was only added in 0.8.3, so ignore NoSuchMethod # failures. @defer.inlineCallbacks def new_way(): try: with _wrapRemoteException(): yield self.mind.callRemote('shutdown') # successful shutdown request return True except _NoSuchMethod: # fall through to the old way return False except pb.PBConnectionLost: # the worker is gone, so call it finished return True if (yield new_way()): return # done! # Now, the old way. Look for a builder with a remote reference to the # client side worker. If we can find one, then call "shutdown" on the # remote builder, which will cause the worker buildbot process to exit. def old_way(): d = None for b in self.worker.workerforbuilders.values(): if b.remote: d = b.mind.callRemote("shutdown") break if d: name = self.worker.workername log.msg("Shutting down (old) worker: {}".format(name)) # The remote shutdown call will not complete successfully since # the buildbot process exits almost immediately after getting # the shutdown request. # Here we look at the reason why the remote call failed, and if # it's because the connection was lost, that means the worker # shutdown as expected. @d.addErrback def _errback(why): if why.check(pb.PBConnectionLost): log.msg("Lost connection to {}".format(name)) else: log.err("Unexpected error when trying to shutdown {}".format(name)) return d log.err("Couldn't find remote builder to shut down worker") return defer.succeed(None) yield old_way() def remoteStartBuild(self, builderName): workerforbuilder = self.builders.get(builderName) return workerforbuilder.callRemote('startBuild') def remoteInterruptCommand(self, builderName, commandId, why): workerforbuilder = self.builders.get(builderName) return defer.maybeDeferred(workerforbuilder.callRemote, "interruptCommand", commandId, why) # perspective methods called by the worker def perspective_keepalive(self): self.worker.messageReceivedFromWorker() def perspective_shutdown(self): self.worker.messageReceivedFromWorker() self.worker.shutdownRequested() buildbot-3.4.0/master/buildbot/worker/upcloud.py000066400000000000000000000216741413250514000217640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # -*- Coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function import hashlib import socket from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot import util from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util.httpclientservice import HTTPClientService from buildbot.worker import AbstractLatentWorker DEFAULT_ZONE = "de-fra1" DEFAULT_PLAN = "1xCPU-1GB" DEFAULT_BASE_URL = "https://api.upcloud.com/1.3" DEFAULT_OS_DISK_SIZE = 10 DEFAULT_CORE_NUMBER = 1 DEFAULT_MEMORY_AMOUNT = 512 class UpcloudLatentWorker(AbstractLatentWorker): instance = None def checkConfig(self, name, password=None, api_username=None, api_password=None, image=None, hostconfig=None, base_url=DEFAULT_BASE_URL, masterFQDN=None, **kwargs): if image is None or api_username is None or api_password is None: config.error("UpcloudLatentWorker: You need to specify at least" " an image name, zone, api_username and api_password") AbstractLatentWorker.checkConfig(self, name, password, **kwargs) @defer.inlineCallbacks def reconfigService(self, name, password=None, zone=None, api_username=None, api_password=None, image=None, hostconfig=None, base_url=DEFAULT_BASE_URL, masterFQDN=None, **kwargs): if password is None: password = self.getRandomPass() if masterFQDN is None: masterFQDN = socket.getfqdn() self.masterFQDN = masterFQDN self.image = image if hostconfig is None: hostconfig = {} self.hostconfig = hostconfig self.client = yield HTTPClientService.getService(self.master, base_url, auth=(api_username, api_password), debug=kwargs.get('debug', False)) masterName = util.unicode2bytes(self.master.name) self.masterhash = hashlib.sha1(masterName).hexdigest()[:6] yield AbstractLatentWorker.reconfigService(self, name, password, **kwargs) @defer.inlineCallbacks def _resolve_image(self, image): # get templates result = yield self.client.get("/storage/template") uuid = None if result.code == 200: templates = yield result.json() for template in templates["storages"]["storage"]: if image == template["title"]: uuid = template["uuid"] break return uuid def getContainerName(self): return ('buildbot-{worker}-{hash}'.format(worker=self.workername, hash=self.masterhash)).replace("_", "-") @defer.inlineCallbacks def start_instance(self, build): if self.instance is not None: raise ValueError('instance active') # convert image to UUID image, hostconfig = yield build.render([self.image, self.hostconfig]) image_uuid = yield self._resolve_image(image) if image_uuid is None: log.msg("{} {}: Instance creation failed: Cannot find template {}".format( self.__class__.__name__, self.workername, image)) raise LatentWorkerFailedToSubstantiate(self.getContainerName(), 'resolving image') # compose json req = { "server": { "zone": hostconfig.get('zone', DEFAULT_ZONE), "title": self.getContainerName(), "hostname": hostconfig.get('hostname', self.name), "user_data": hostconfig.get('user_data', ""), "login_user": { "username": "root", "ssh_keys": { "ssh_key": hostconfig.get('ssh_keys', []), }, }, "password_delivery": "none", "storage_devices": { "storage_device": [{ "action": "clone", "storage": image_uuid, "title": self.getContainerName(), "size": hostconfig.get("os_disk_size", DEFAULT_OS_DISK_SIZE), "tier": "maxiops", }], } } } req["server"]["plan"] = hostconfig.get("plan", DEFAULT_PLAN) if req["server"]["plan"] == "custom": req["server"]["core_number"] = hostconfig.get("core_number", DEFAULT_CORE_NUMBER) req["server"]["memory_amount"] = hostconfig.get("memory_amount", DEFAULT_MEMORY_AMOUNT) # request instance result = yield self.client.post("/server", json=req) if result.code // 100 != 2: reason = yield result.content() log.msg("{} {}: Instance creation failed: {} {}".format( self.__class__.__name__, self.workername, result.code, reason)) self.failed_to_start(req['server']['hostname'], 'starting') instance = yield result.json() self.instance = instance["server"] self.instance["Id"] = self.instance["uuid"].split("-")[-1] # wait until server is actually up while (yield self._state()) not in ["started"]: yield util.asyncSleep(1, reactor=self.master.reactor) result = yield self.client.get("/server/{}".format(self.instance["uuid"])) instance = yield result.json() log.msg("{} {}: Instance {} created (root password {})".format( self.__class__.__name__, self.workername, self.instance["Id"], self.instance['password'])) # include root password as worker property self.properties.setProperty("root_password", self.instance['password'], "Worker") return [self.instance["Id"], image] @defer.inlineCallbacks def _state(self): result = yield self.client.get("/server/{}".format(self.instance["uuid"])) if result.code == 404: return "absent" else: server = yield result.json() return server["server"]["state"] @defer.inlineCallbacks def stop_instance(self, fast=False): if self.instance is None: # be gentle. Something may just be trying to alert us that an # instance never attached, and it's because, somehow, we never # started. return log.msg('{} {}: Stopping instance {}...'.format( self.__class__.__name__, self.workername, self.instance["Id"])) result = yield self.client.post("/server/{}/stop".format(self.instance["uuid"],), json={ "stop_server": { "stop_type": "hard", "timeout": "1" }} ) if result.code // 100 != 2: reason = yield result.content() reason = '{} {} failed to stop instance {} ({}): {}'.format(self.__class__.__name__, self.workername, self.instance["Id"], self._state(), reason.decode()) self.instance = None raise Exception(reason) while (yield self._state()) not in ["stopped", "absent"]: yield util.asyncSleep(1, reactor=self.master.reactor) # destroy it result = yield self.client.delete("/server/{}?storages=1".format(self.instance["uuid"])) if result.code // 100 != 2: reason = yield result.content() reason = '{} {} failed to delete instance {} ({}): {}'.format(self.__class__.__name__, self.workername, self.instance["Id"], self._state(), reason.decode()) self.instance = None raise Exception(reason) buildbot-3.4.0/master/buildbot/www/000077500000000000000000000000001413250514000172405ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/www/__init__.py000066400000000000000000000000001413250514000213370ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/www/auth.py000066400000000000000000000171601413250514000205600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from abc import ABCMeta from abc import abstractmethod from twisted.cred.checkers import FilePasswordDB from twisted.cred.checkers import ICredentialsChecker from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse from twisted.cred.credentials import IUsernamePassword from twisted.cred.error import UnauthorizedLogin from twisted.cred.portal import IRealm from twisted.cred.portal import Portal from twisted.internet import defer from twisted.web.error import Error from twisted.web.guard import BasicCredentialFactory from twisted.web.guard import DigestCredentialFactory from twisted.web.guard import HTTPAuthSessionWrapper from twisted.web.resource import IResource from zope.interface import implementer from buildbot.util import bytes2unicode from buildbot.util import config from buildbot.util import unicode2bytes from buildbot.www import resource class AuthRootResource(resource.Resource): def getChild(self, path, request): # return dynamically generated resources if path == b'login': return self.master.www.auth.getLoginResource() elif path == b'logout': return self.master.www.auth.getLogoutResource() return super().getChild(path, request) class AuthBase(config.ConfiguredMixin): def __init__(self, userInfoProvider=None): self.userInfoProvider = userInfoProvider def reconfigAuth(self, master, new_config): self.master = master def maybeAutoLogin(self, request): return defer.succeed(None) def getLoginResource(self): raise Error(501, b"not implemented") def getLogoutResource(self): return LogoutResource(self.master) @defer.inlineCallbacks def updateUserInfo(self, request): session = request.getSession() if self.userInfoProvider is not None: infos = yield self.userInfoProvider.getUserInfo(session.user_info['username']) session.user_info.update(infos) session.updateSession(request) def getConfigDict(self): return {'name': type(self).__name__} class UserInfoProviderBase(config.ConfiguredMixin): name = "noinfo" def getUserInfo(self, username): return defer.succeed({'email': username}) class LoginResource(resource.Resource): def render_GET(self, request): return self.asyncRenderHelper(request, self.renderLogin) @defer.inlineCallbacks def renderLogin(self, request): raise NotImplementedError class NoAuth(AuthBase): pass class RemoteUserAuth(AuthBase): header = b"REMOTE_USER" headerRegex = re.compile(br"(?P[^ @]+)@(?P[^ @]+)") def __init__(self, header=None, headerRegex=None, **kwargs): super().__init__(**kwargs) if self.userInfoProvider is None: self.userInfoProvider = UserInfoProviderBase() if header is not None: self.header = unicode2bytes(header) if headerRegex is not None: self.headerRegex = re.compile(unicode2bytes(headerRegex)) @defer.inlineCallbacks def maybeAutoLogin(self, request): header = request.getHeader(self.header) if header is None: msg = b"missing http header " + self.header + b". Check your reverse proxy config!" raise Error(403, msg) res = self.headerRegex.match(header) if res is None: msg = b'http header does not match regex! "' + header + b'" not matching ' + \ self.headerRegex.pattern raise Error(403, msg) session = request.getSession() user_info = {k: bytes2unicode(v) for k, v in res.groupdict().items()} if session.user_info != user_info: session.user_info = user_info yield self.updateUserInfo(request) @implementer(IRealm) class AuthRealm: def __init__(self, master, auth): self.auth = auth self.master = master def requestAvatar(self, avatarId, mind, *interfaces): if IResource in interfaces: return (IResource, PreAuthenticatedLoginResource(self.master, avatarId), lambda: None) raise NotImplementedError() class TwistedICredAuthBase(AuthBase): def __init__(self, credentialFactories, checkers, **kwargs): super().__init__(**kwargs) if self.userInfoProvider is None: self.userInfoProvider = UserInfoProviderBase() self.credentialFactories = credentialFactories self.checkers = checkers def getLoginResource(self): return HTTPAuthSessionWrapper( Portal(AuthRealm(self.master, self), self.checkers), self.credentialFactories) class HTPasswdAuth(TwistedICredAuthBase): def __init__(self, passwdFile, **kwargs): super().__init__([DigestCredentialFactory(b"MD5", b"buildbot"), BasicCredentialFactory(b"buildbot")], [FilePasswordDB(passwdFile)], **kwargs) class UserPasswordAuth(TwistedICredAuthBase): def __init__(self, users, **kwargs): if isinstance(users, dict): users = {user: unicode2bytes(pw) for user, pw in users.items()} elif isinstance(users, list): users = [(user, unicode2bytes(pw)) for user, pw in users] super().__init__([DigestCredentialFactory(b"MD5", b"buildbot"), BasicCredentialFactory(b"buildbot")], [InMemoryUsernamePasswordDatabaseDontUse(**dict(users))], **kwargs) @implementer(ICredentialsChecker) class CustomAuth(TwistedICredAuthBase): __metaclass__ = ABCMeta credentialInterfaces = [IUsernamePassword] def __init__(self, **kwargs): super().__init__([BasicCredentialFactory(b"buildbot")], [self], **kwargs) def requestAvatarId(self, cred): if self.check_credentials(cred.username, cred.password): return defer.succeed(cred.username) return defer.fail(UnauthorizedLogin()) @abstractmethod def check_credentials(username, password): return False def _redirect(master, request): url = request.args.get(b"redirect", [b"/"])[0] url = bytes2unicode(url) return resource.Redirect(master.config.buildbotURL + "#" + url) class PreAuthenticatedLoginResource(LoginResource): # a LoginResource which is already authenticated via a # HTTPAuthSessionWrapper def __init__(self, master, username): super().__init__(master) self.username = username @defer.inlineCallbacks def renderLogin(self, request): session = request.getSession() session.user_info = dict(username=bytes2unicode(self.username)) yield self.master.www.auth.updateUserInfo(request) raise _redirect(self.master, request) class LogoutResource(resource.Resource): def render_GET(self, request): session = request.getSession() session.expire() session.updateSession(request) request.redirect(_redirect(self.master, request).url) return b'' buildbot-3.4.0/master/buildbot/www/authz/000077500000000000000000000000001413250514000203735ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/www/authz/__init__.py000066400000000000000000000004121413250514000225010ustar00rootroot00000000000000from buildbot.www.authz.authz import Authz from buildbot.www.authz.authz import Forbidden from buildbot.www.authz.authz import fnmatchStrMatcher from buildbot.www.authz.authz import reStrMatcher __all__ = ["Authz", "fnmatchStrMatcher", "reStrMatcher", "Forbidden"] buildbot-3.4.0/master/buildbot/www/authz/authz.py000066400000000000000000000066161413250514000221110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import fnmatch import re from twisted.internet import defer from twisted.web.error import Error from zope.interface import implementer from buildbot.interfaces import IConfigured from buildbot.util import unicode2bytes from buildbot.www.authz.roles import RolesFromOwner class Forbidden(Error): def __init__(self, msg): super().__init__(403, msg) # fnmatch and re.match are reversed API, we cannot just rename them def fnmatchStrMatcher(value, match): return fnmatch.fnmatch(value, match) def reStrMatcher(value, match): return re.match(match, value) @implementer(IConfigured) class Authz: def getConfigDict(self): return {} def __init__(self, allowRules=None, roleMatchers=None, stringsMatcher=fnmatchStrMatcher): self.match = stringsMatcher if allowRules is None: allowRules = [] if roleMatchers is None: roleMatchers = [] self.allowRules = allowRules self.roleMatchers = [ r for r in roleMatchers if not isinstance(r, RolesFromOwner)] self.ownerRoleMatchers = [ r for r in roleMatchers if isinstance(r, RolesFromOwner)] def setMaster(self, master): self.master = master for r in self.roleMatchers + self.ownerRoleMatchers + self.allowRules: r.setAuthz(self) def getRolesFromUser(self, userDetails): roles = set() for roleMatcher in self.roleMatchers: roles.update(set(roleMatcher.getRolesFromUser(userDetails))) return roles def getOwnerRolesFromUser(self, userDetails, owner): roles = set() for roleMatcher in self.ownerRoleMatchers: roles.update(set(roleMatcher.getRolesFromUser(userDetails, owner))) return roles @defer.inlineCallbacks def assertUserAllowed(self, ep, action, options, userDetails): roles = self.getRolesFromUser(userDetails) for rule in self.allowRules: match = yield rule.match(ep, action, options) if match is not None: # only try to get owner if there are owner Matchers if self.ownerRoleMatchers: owner = yield match.getOwner() if owner is not None: roles.update( self.getOwnerRolesFromUser(userDetails, owner)) for role in roles: if self.match(role, rule.role): return None if not rule.defaultDeny: continue # check next suitable rule if not denied error_msg = unicode2bytes("you need to have role '{}'".format(rule.role)) raise Forbidden(error_msg) return None buildbot-3.4.0/master/buildbot/www/authz/endpointmatchers.py000066400000000000000000000157541413250514000243300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect from twisted.internet import defer from buildbot.data.exceptions import InvalidPathError from buildbot.util import bytes2unicode class EndpointMatcherBase: def __init__(self, role, defaultDeny=True): self.role = role self.defaultDeny = defaultDeny self.owner = None def setAuthz(self, authz): self.authz = authz self.master = authz.master def match(self, ep, action="get", options=None): if options is None: options = {} try: epobject, epdict = self.master.data.getEndpoint(ep) for klass in inspect.getmro(epobject.__class__): m = getattr( self, "match_" + klass.__name__ + "_" + action, None) if m is not None: return m(epobject, epdict, options) m = getattr(self, "match_" + klass.__name__, None) if m is not None: return m(epobject, epdict, options) except InvalidPathError: return defer.succeed(None) return defer.succeed(None) def __repr__(self): # a repr for debugging. displays the class, and string attributes args = [] for k, v in self.__dict__.items(): if isinstance(v, str): args.append("{}='{}'".format(k, v)) return "{}({})".format(self.__class__.__name__, ", ".join(args)) class Match: def __init__(self, master, build=None, buildrequest=None, buildset=None): self.master = master self.build = build self.buildrequest = buildrequest self.buildset = buildset def getOwner(self): if self.buildset: return self.getOwnerFromBuildset(self.buildset) elif self.buildrequest: return self.getOwnerFromBuildRequest(self.buildrequest) elif self.build: return self.getOwnerFromBuild(self.build) return defer.succeed(None) @defer.inlineCallbacks def getOwnerFromBuild(self, build): br = yield self.master.data.get(("buildrequests", build['buildrequestid'])) owner = yield self.getOwnerFromBuildRequest(br) return owner @defer.inlineCallbacks def getOwnerFromBuildsetOrBuildRequest(self, buildsetorbuildrequest): props = yield self.master.data.get(("buildsets", buildsetorbuildrequest['buildsetid'], "properties")) if 'owner' in props: return props['owner'][0] return None getOwnerFromBuildRequest = getOwnerFromBuildsetOrBuildRequest getOwnerFromBuildSet = getOwnerFromBuildsetOrBuildRequest class AnyEndpointMatcher(EndpointMatcherBase): def match(self, ep, action="get", options=None): return defer.succeed(Match(self.master)) class AnyControlEndpointMatcher(EndpointMatcherBase): def match(self, ep, action="", options=None): if bytes2unicode(action).lower() != "get": return defer.succeed(Match(self.master)) return defer.succeed(None) class StopBuildEndpointMatcher(EndpointMatcherBase): def __init__(self, builder=None, **kwargs): self.builder = builder super().__init__(**kwargs) @defer.inlineCallbacks def matchFromBuilderId(self, builderid): if builderid is not None: builder = yield self.master.data.get(('builders', builderid)) buildername = builder['name'] return self.authz.match(buildername, self.builder) return False @defer.inlineCallbacks def match_BuildEndpoint_stop(self, epobject, epdict, options): build = yield epobject.get({}, epdict) if self.builder is None: # no filtering needed: we match! return Match(self.master, build=build) # if filtering needed, we need to get some more info ret = yield self.matchFromBuilderId(build['builderid']) if ret: return Match(self.master, build=build) return None @defer.inlineCallbacks def match_BuildRequestEndpoint_stop(self, epobject, epdict, options): buildrequest = yield epobject.get({}, epdict) if self.builder is None: # no filtering needed: we match! return Match(self.master, buildrequest=buildrequest) # if filtering needed, we need to get some more info ret = yield self.matchFromBuilderId(buildrequest['builderid']) if ret: return Match(self.master, buildrequest=buildrequest) return None class ForceBuildEndpointMatcher(EndpointMatcherBase): def __init__(self, builder=None, **kwargs): self.builder = builder super().__init__(**kwargs) @defer.inlineCallbacks def match_ForceSchedulerEndpoint_force(self, epobject, epdict, options): if self.builder is None: # no filtering needed: we match without querying! return Match(self.master) sched = yield epobject.findForceScheduler(epdict['schedulername']) if sched is not None: builderNames = options.get('builderNames') builderid = options.get('builderid') builderNames = yield sched.computeBuilderNames(builderNames, builderid) for buildername in builderNames: if self.authz.match(buildername, self.builder): return Match(self.master) return None class RebuildBuildEndpointMatcher(EndpointMatcherBase): def __init__(self, builder=None, **kwargs): self.builder = builder super().__init__(**kwargs) @defer.inlineCallbacks def match_BuildEndpoint_rebuild(self, epobject, epdict, options): build = yield epobject.get({}, epdict) return Match(self.master, build=build) class EnableSchedulerEndpointMatcher(EndpointMatcherBase): def match_SchedulerEndpoint_enable(self, epobject, epdict, options): return defer.succeed(Match(self.master)) ##### # not yet implemented class ViewBuildsEndpointMatcher(EndpointMatcherBase): def __init__(self, branch=None, project=None, builder=None, **kwargs): super().__init__(**kwargs) self.branch = branch self.project = project self.builder = builder class BranchEndpointMatcher(EndpointMatcherBase): def __init__(self, branch, **kwargs): self.branch = branch super().__init__(**kwargs) buildbot-3.4.0/master/buildbot/www/authz/roles.py000066400000000000000000000057631413250514000221040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class RolesFromBase: def __init__(self): pass def getRolesFromUser(self, userDetails): return [] def setAuthz(self, authz): self.authz = authz self.master = authz.master class RolesFromGroups(RolesFromBase): def __init__(self, groupPrefix=""): super().__init__() self.groupPrefix = groupPrefix def getRolesFromUser(self, userDetails): roles = [] if 'groups' in userDetails: for group in userDetails['groups']: if group.startswith(self.groupPrefix): roles.append(group[len(self.groupPrefix):]) return roles class RolesFromEmails(RolesFromBase): def __init__(self, **kwargs): super().__init__() self.roles = {} for role, emails in kwargs.items(): for email in emails: self.roles.setdefault(email, []).append(role) def getRolesFromUser(self, userDetails): if 'email' in userDetails: return self.roles.get(userDetails['email'], []) return [] class RolesFromDomain(RolesFromEmails): def __init__(self, **kwargs): super().__init__() self.domain_roles = {} for role, domains in kwargs.items(): for domain in domains: self.domain_roles.setdefault(domain, []).append(role) def getRolesFromUser(self, userDetails): if 'email' in userDetails: email = userDetails['email'] edomain = email.split('@')[-1] return self.domain_roles.get(edomain, []) return [] class RolesFromOwner(RolesFromBase): def __init__(self, role): super().__init__() self.role = role def getRolesFromUser(self, userDetails, owner): if 'email' in userDetails: if userDetails['email'] == owner and owner is not None: return [self.role] return [] class RolesFromUsername(RolesFromBase): def __init__(self, roles, usernames): self.roles = roles if None in usernames: from buildbot import config config.error('Usernames cannot be None') self.usernames = usernames def getRolesFromUser(self, userDetails): if userDetails.get('username') in self.usernames: return self.roles return [] buildbot-3.4.0/master/buildbot/www/avatar.py000066400000000000000000000216611413250514000210760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import hashlib from urllib.parse import urlencode from urllib.parse import urljoin from urllib.parse import urlparse from urllib.parse import urlunparse from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.util import httpclientservice from buildbot.util import unicode2bytes from buildbot.util.config import ConfiguredMixin from buildbot.www import resource class AvatarBase(ConfiguredMixin): name = "noavatar" def getUserAvatar(self, email, username, size, defaultAvatarUrl): raise NotImplementedError() class AvatarGitHub(AvatarBase): name = "github" DEFAULT_GITHUB_API_URL = 'https://api.github.com' def __init__(self, github_api_endpoint=None, token=None, client_id=None, client_secret=None, debug=False, verify=False): httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) self.github_api_endpoint = github_api_endpoint if github_api_endpoint is None: self.github_api_endpoint = self.DEFAULT_GITHUB_API_URL self.token = token self.client_creds = None if bool(client_id) != bool(client_secret): config.error('client_id and client_secret must be both provided or none') if client_id: if token: config.error('client_id and client_secret must not be provided when token is') self.client_creds = base64.b64encode(b':'.join( cred.encode('utf-8') for cred in (client_id, client_secret) )).decode('ascii') self.debug = debug self.verify = verify self.master = None self.client = None @defer.inlineCallbacks def _get_http_client(self): if self.client is not None: return self.client headers = { 'User-Agent': 'Buildbot', } if self.token: headers['Authorization'] = 'token ' + self.token elif self.client_creds: headers['Authorization'] = 'basic ' + self.client_creds self.client = yield httpclientservice.HTTPClientService.getService(self.master, self.github_api_endpoint, headers=headers, debug=self.debug, verify=self.verify) return self.client @defer.inlineCallbacks def _get_avatar_by_username(self, username): headers = { 'Accept': 'application/vnd.github.v3+json', } url = '/users/{}'.format(username) http = yield self._get_http_client() res = yield http.get(url, headers=headers) if res.code == 404: # Not found return None if 200 <= res.code < 300: data = yield res.json() return data['avatar_url'] log.msg('Failed looking up user: response code {}'.format(res.code)) return None @defer.inlineCallbacks def _search_avatar_by_user_email(self, email): headers = { 'Accept': 'application/vnd.github.v3+json', } query = '{} in:email'.format(email) url = '/search/users?{}'.format(urlencode({ 'q': query, })) http = yield self._get_http_client() res = yield http.get(url, headers=headers) if 200 <= res.code < 300: data = yield res.json() if data['total_count'] == 0: # Not found return None return data['items'][0]['avatar_url'] log.msg('Failed searching user by email: response code {}'.format(res.code)) return None @defer.inlineCallbacks def _search_avatar_by_commit(self, email): headers = { 'Accept': 'application/vnd.github.v3+json,application/vnd.github.cloak-preview', } query = { 'q': 'author-email:{}'.format(email), 'sort': 'committer-date', 'per_page': '1', } sorted_query = sorted(query.items(), key=lambda x: x[0]) url = '/search/commits?{}'.format(urlencode(sorted_query)) http = yield self._get_http_client() res = yield http.get(url, headers=headers) if 200 <= res.code < 300: data = yield res.json() if data['total_count'] == 0: # Not found return None author = data['items'][0]['author'] if author is None: # No Github account found return None return author['avatar_url'] log.msg('Failed searching user by commit: response code {}'.format(res.code)) return None def _add_size_to_url(self, avatar, size): parts = urlparse(avatar) query = parts.query if query: query += '&' query += 's={0}'.format(size) return urlunparse((parts.scheme, parts.netloc, parts.path, parts.params, query, parts.fragment)) @defer.inlineCallbacks def getUserAvatar(self, email, username, size, defaultAvatarUrl): avatar = None if username: username = username.decode('utf-8') if email: email = email.decode('utf-8') if username: avatar = yield self._get_avatar_by_username(username) if not avatar and email: # Try searching a user with said mail avatar = yield self._search_avatar_by_user_email(email) if not avatar and email: # No luck, try to find a commit with this email avatar = yield self._search_avatar_by_commit(email) if not avatar: # No luck return None if size: avatar = self._add_size_to_url(avatar, size) raise resource.Redirect(avatar) class AvatarGravatar(AvatarBase): name = "gravatar" # gravatar does not want intranet URL, which is most of where the bots are # just use same default as github (retro) default = "retro" def getUserAvatar(self, email, username, size, defaultAvatarUrl): # construct the url emailBytes = unicode2bytes(email.lower()) emailHash = hashlib.md5(emailBytes) gravatar_url = "//www.gravatar.com/avatar/" gravatar_url += emailHash.hexdigest() + "?" if self.default != "url": defaultAvatarUrl = self.default url = {'d': defaultAvatarUrl, 's': str(size)} sorted_url = sorted(url.items(), key=lambda x: x[0]) gravatar_url += urlencode(sorted_url) raise resource.Redirect(gravatar_url) class AvatarResource(resource.Resource): # enable reconfigResource calls needsReconfig = True defaultAvatarUrl = b"img/nobody.png" def reconfigResource(self, new_config): self.avatarMethods = new_config.www.get('avatar_methods', []) self.defaultAvatarFullUrl = urljoin( unicode2bytes(new_config.buildbotURL), unicode2bytes(self.defaultAvatarUrl)) self.cache = {} # ensure the avatarMethods is a iterable if isinstance(self.avatarMethods, AvatarBase): self.avatarMethods = (self.avatarMethods, ) for method in self.avatarMethods: method.master = self.master def render_GET(self, request): return self.asyncRenderHelper(request, self.renderAvatar) @defer.inlineCallbacks def renderAvatar(self, request): email = request.args.get(b"email", [b""])[0] size = request.args.get(b"size", [32])[0] try: size = int(size) except ValueError: size = 32 username = request.args.get(b"username", [None])[0] cache_key = (email, username, size) if self.cache.get(cache_key): raise self.cache[cache_key] for method in self.avatarMethods: try: res = yield method.getUserAvatar(email, username, size, self.defaultAvatarFullUrl) except resource.Redirect as r: self.cache[cache_key] = r raise if res is not None: request.setHeader(b'content-type', res[0]) request.setHeader(b'content-length', unicode2bytes(str(len(res[1])))) request.write(res[1]) return raise resource.Redirect(self.defaultAvatarUrl) buildbot-3.4.0/master/buildbot/www/change_hook.py000066400000000000000000000153001413250514000220560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # code inspired/copied from contrib/github_buildbot # and inspired from code from the Chromium project # otherwise, Andrew Melo wrote the rest # but "the rest" is pretty minimal import re from datetime import datetime from twisted.internet import defer from twisted.python import log from twisted.web import server from buildbot.plugins.db import get_plugins from buildbot.util import bytes2unicode from buildbot.util import datetime2epoch from buildbot.util import unicode2bytes from buildbot.www import resource class ChangeHookResource(resource.Resource): # this is a cheap sort of template thingy contentType = "text/html; charset=utf-8" children = {} needsReconfig = True def __init__(self, dialects=None, master=None): """ The keys of 'dialects' select a modules to load under master/buildbot/www/hooks/ The value is passed to the module's getChanges function, providing configuration options to the dialect. """ super().__init__(master) if dialects is None: dialects = {} self.dialects = dialects self._dialect_handlers = {} self.request_dialect = None self._plugins = get_plugins("webhooks") def reconfigResource(self, new_config): self.dialects = new_config.www.get('change_hook_dialects', {}) def getChild(self, name, request): return self def render_GET(self, request): """ Responds to events and starts the build process different implementations can decide on what methods they will accept """ return self.render_POST(request) def render_POST(self, request): """ Responds to events and starts the build process different implementations can decide on what methods they will accept :arguments: request the http request object """ try: d = self.getAndSubmitChanges(request) except Exception: d = defer.fail() def ok(_): request.setResponseCode(202) request.finish() def err(why): code = 500 if why.check(ValueError): code = 400 msg = unicode2bytes(why.getErrorMessage()) else: log.err(why, "adding changes from web hook") msg = b'Error processing changes.' request.setResponseCode(code, msg) request.write(msg) request.finish() d.addCallbacks(ok, err) return server.NOT_DONE_YET @defer.inlineCallbacks def getAndSubmitChanges(self, request): changes, src = yield self.getChanges(request) if not changes: request.write(b"no change found") else: yield self.submitChanges(changes, request, src) request.write(unicode2bytes("{} change found".format(len(changes)))) def makeHandler(self, dialect): """create and cache the handler object for this dialect""" if dialect not in self.dialects: m = "The dialect specified, '{}', wasn't whitelisted in change_hook".format(dialect) log.msg(m) log.msg("Note: if dialect is 'base' then it's possible your URL is " "malformed and we didn't regex it properly") raise ValueError(m) if dialect not in self._dialect_handlers: if dialect not in self._plugins: m = ("The dialect specified, '{}', is not registered as " "a buildbot.webhook plugin").format(dialect) log.msg(m) raise ValueError(m) options = self.dialects[dialect] if isinstance(options, dict) and 'custom_class' in options: klass = options['custom_class'] else: klass = self._plugins.get(dialect) self._dialect_handlers[dialect] = klass(self.master, self.dialects[dialect]) return self._dialect_handlers[dialect] @defer.inlineCallbacks def getChanges(self, request): """ Take the logic from the change hook, and then delegate it to the proper handler We use the buildbot plugin mechanisms to find out about dialects and call getChanges() the return value is a list of changes if DIALECT is unspecified, a sample implementation is provided """ uriRE = re.search(r'^/change_hook/?([a-zA-Z0-9_]*)', bytes2unicode(request.uri)) if not uriRE: msg = "URI doesn't match change_hook regex: {}".format(request.uri) log.msg(msg) raise ValueError(msg) changes = [] src = None # Was there a dialect provided? if uriRE.group(1): dialect = uriRE.group(1) else: dialect = 'base' handler = self.makeHandler(dialect) changes, src = yield handler.getChanges(request) return (changes, src) @defer.inlineCallbacks def submitChanges(self, changes, request, src): for chdict in changes: when_timestamp = chdict.get('when_timestamp') if isinstance(when_timestamp, datetime): chdict['when_timestamp'] = datetime2epoch(when_timestamp) # unicodify stuff for k in ('comments', 'author', 'committer', 'revision', 'branch', 'category', 'revlink', 'repository', 'codebase', 'project'): if k in chdict: chdict[k] = bytes2unicode(chdict[k]) if chdict.get('files'): chdict['files'] = [bytes2unicode(f) for f in chdict['files']] if chdict.get('properties'): chdict['properties'] = dict((bytes2unicode(k), v) for k, v in chdict['properties'].items()) chid = yield self.master.data.updates.addChange(src=bytes2unicode(src), **chdict) log.msg("injected change {}".format(chid)) buildbot-3.4.0/master/buildbot/www/config.py000066400000000000000000000136431413250514000210660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import posixpath import jinja2 from twisted.internet import defer from twisted.python import log from twisted.web.error import Error from buildbot.interfaces import IConfigured from buildbot.util import unicode2bytes from buildbot.www import resource class IndexResource(resource.Resource): # enable reconfigResource calls needsReconfig = True def __init__(self, master, staticdir): super().__init__(master) loader = jinja2.FileSystemLoader(staticdir) self.jinja = jinja2.Environment( loader=loader, undefined=jinja2.StrictUndefined) def reconfigResource(self, new_config): self.config = new_config.www versions = self.getEnvironmentVersions() vs = self.config.get('versions') if isinstance(vs, list): versions += vs self.config['versions'] = versions self.custom_templates = {} template_dir = self.config.pop('custom_templates_dir', None) if template_dir is not None: template_dir = os.path.join(self.master.basedir, template_dir) self.custom_templates = self.parseCustomTemplateDir(template_dir) def render_GET(self, request): return self.asyncRenderHelper(request, self.renderIndex) def parseCustomTemplateDir(self, template_dir): res = {} allowed_ext = [".html"] try: import pypugjs # pylint: disable=import-outside-toplevel allowed_ext.append(".jade") except ImportError: # pragma: no cover log.msg("pypugjs not installed. Ignoring .jade files from {}".format(template_dir)) pypugjs = None for root, dirs, files in os.walk(template_dir): if root == template_dir: template_name = posixpath.join("views", "%s.html") else: # template_name is a url, so we really want '/' # root is a os.path, though template_name = posixpath.join( os.path.basename(root), "views", "%s.html") for f in files: fn = os.path.join(root, f) basename, ext = os.path.splitext(f) if ext not in allowed_ext: continue if ext == ".html": with open(fn) as f: html = f.read().strip() elif ext == ".jade": with open(fn) as f: jade = f.read() parser = pypugjs.parser.Parser(jade) block = parser.parse() compiler = pypugjs.ext.html.Compiler( block, pretty=False) html = compiler.compile() res[template_name % (basename,)] = html return res @staticmethod def getEnvironmentVersions(): import sys # pylint: disable=import-outside-toplevel import twisted # pylint: disable=import-outside-toplevel from buildbot import version as bbversion # pylint: disable=import-outside-toplevel pyversion = '.'.join(map(str, sys.version_info[:3])) tx_version_info = (twisted.version.major, twisted.version.minor, twisted.version.micro) txversion = '.'.join(map(str, tx_version_info)) return [ ('Python', pyversion), ('Buildbot', bbversion), ('Twisted', txversion), ] @defer.inlineCallbacks def renderIndex(self, request): config = {} request.setHeader(b"content-type", b'text/html') request.setHeader(b"Cache-Control", b"public,max-age=0") try: yield self.config['auth'].maybeAutoLogin(request) except Error as e: config["on_load_warning"] = e.message user_info = self.master.www.getUserInfos(request) config.update({"user": user_info}) config.update(self.config) config['buildbotURL'] = self.master.config.buildbotURL config['title'] = self.master.config.title config['titleURL'] = self.master.config.titleURL config['multiMaster'] = self.master.config.multiMaster # delete things that may contain secrets if 'change_hook_dialects' in config: del config['change_hook_dialects'] def toJson(obj): try: obj = IConfigured(obj).getConfigDict() except TypeError: # this happens for old style classes (not deriving objects) pass if isinstance(obj, dict): return obj # don't leak object memory address obj = obj.__class__.__module__ + "." + obj.__class__.__name__ return repr(obj) + " not yet IConfigured" tpl = self.jinja.get_template('index.html') # we use Jinja in order to render some server side dynamic stuff # For example, custom_templates javascript is generated by the # layout.jade jinja template tpl = tpl.render(configjson=json.dumps(config, default=toJson), custom_templates=self.custom_templates, config=self.config) return unicode2bytes(tpl, encoding='ascii') buildbot-3.4.0/master/buildbot/www/graphql.py000066400000000000000000000072371413250514000212610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from twisted.python import log from twisted.web.error import Error from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www import resource from buildbot.www.rest import RestRootResource class V3RootResource(resource.Resource): isLeaf = True # enable reconfigResource calls needsReconfig = True def reconfigResource(self, new_config): # @todo v2 has cross origin support, which might need to be factorized graphql_config = new_config.www.get("graphql") self.debug = True self.graphql = None if graphql_config is not None: self.graphql = True def render(self, request): def writeError(msg, errcode=400): if isinstance(msg, list): errors = msg else: msg = bytes2unicode(msg) errors = [{"message": msg}] if self.debug: log.msg("HTTP error: {}".format(errors)) request.setResponseCode(errcode) request.setHeader(b"content-type", b"application/json; charset=utf-8") data = json.dumps({"data": None, "errors": errors}) data = unicode2bytes(data) request.write(data) request.finish() return self.asyncRenderHelper(request, self.asyncRender, writeError) @defer.inlineCallbacks def asyncRender(self, request): if self.graphql is None: raise Error(501, "graphql not enabled") # graphql accepts its query either in post data or get query if request.method == b"POST": content_type = request.getHeader(b"content-type") if content_type == b"application/graphql": query = request.content.read().decode() elif content_type == b"application/json": json_query = json.load(request.content) query = json_query.pop('query') if json_query: fields = " ".join(json_query.keys()) raise Error(400, b"json request unsupported fields: " + fields.encode()) elif content_type is None: raise Error(400, b"no content-type") else: raise Error(400, b"unsupported content-type: " + content_type) elif request.method in (b"GET"): if b"query" not in request.args: raise Error(400, b"GET request must contain a 'query' parameter") query = request.args[b"query"][0].decode() else: raise Error(400, b"invalid HTTP method") res = yield self.master.graphql.query(query) errors = None if res.errors: errors = [e.formatted for e in res.errors] request.setHeader(b"content-type", b"application/json; charset=utf-8") data = json.dumps({"data": res.data, "errors": errors}).encode() request.write(data) RestRootResource.addApiVersion(3, V3RootResource) buildbot-3.4.0/master/buildbot/www/hooks/000077500000000000000000000000001413250514000203635ustar00rootroot00000000000000buildbot-3.4.0/master/buildbot/www/hooks/__init__.py000066400000000000000000000000071413250514000224710ustar00rootroot00000000000000# test buildbot-3.4.0/master/buildbot/www/hooks/base.py000066400000000000000000000067341413250514000216610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # code inspired/copied from contrib/github_buildbot # and inspired from code from the Chromium project # otherwise, Andrew Melo wrote the rest # but "the rest" is pretty minimal import json from buildbot.util import bytes2unicode class BaseHookHandler: def __init__(self, master, options): self.master = master self.options = options def getChanges(self, request): """ Consumes a naive build notification (the default for now) basically, set POST variables to match commit object parameters: revision, revlink, comments, branch, who, files, links files, links and properties will be de-json'd, the rest are interpreted as strings """ def firstOrNothing(value): """ Small helper function to return the first value (if value is a list) or return the whole thing otherwise. Make sure to properly decode bytes to unicode strings. """ if (isinstance(value, type([]))): value = value[0] return bytes2unicode(value) args = request.args # first, convert files, links and properties files = None if args.get(b'files'): files = json.loads(firstOrNothing(args.get(b'files'))) else: files = [] properties = None if args.get(b'properties'): properties = json.loads(firstOrNothing(args.get(b'properties'))) else: properties = {} revision = firstOrNothing(args.get(b'revision')) when = firstOrNothing(args.get(b'when_timestamp')) if when is None: when = firstOrNothing(args.get(b'when')) if when is not None: when = float(when) author = firstOrNothing(args.get(b'author')) if not author: author = firstOrNothing(args.get(b'who')) committer = firstOrNothing(args.get(b'committer')) comments = firstOrNothing(args.get(b'comments')) branch = firstOrNothing(args.get(b'branch')) category = firstOrNothing(args.get(b'category')) revlink = firstOrNothing(args.get(b'revlink')) repository = firstOrNothing(args.get(b'repository')) or '' project = firstOrNothing(args.get(b'project')) or '' codebase = firstOrNothing(args.get(b'codebase')) chdict = dict(author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=when, branch=branch, category=category, revlink=revlink, properties=properties, repository=repository, project=project, codebase=codebase) return ([chdict], None) base = BaseHookHandler # alternate name for buildbot plugin buildbot-3.4.0/master/buildbot/www/hooks/bitbucket.py000066400000000000000000000051421413250514000227130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright 2013 (c) Mamba Team import json from dateutil.parser import parse as dateparse from twisted.python import log from buildbot.util import bytes2unicode from buildbot.www.hooks.base import BaseHookHandler _HEADER_EVENT = b'X-Event-Key' class BitBucketHandler(BaseHookHandler): def getChanges(self, request): """Catch a POST request from BitBucket and start a build process Check the URL below if you require more information about payload https://confluence.atlassian.com/display/BITBUCKET/POST+Service+Management :param request: the http request Twisted object :param options: additional options """ event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) payload = json.loads(bytes2unicode(request.args[b'payload'][0])) repo_url = '{}{}'.format( payload['canon_url'], payload['repository']['absolute_url']) project = request.args.get(b'project', [b''])[0] project = bytes2unicode(project) changes = [] for commit in payload['commits']: changes.append({ 'author': commit['raw_author'], 'files': [f['file'] for f in commit['files']], 'comments': commit['message'], 'revision': commit['raw_node'], 'when_timestamp': dateparse(commit['utctimestamp']), 'branch': commit['branch'], 'revlink': '{}commits/{}'.format(repo_url, commit['raw_node']), 'repository': repo_url, 'project': project, 'properties': { 'event': event_type, }, }) log.msg('New revision: {}'.format(commit['node'])) log.msg('Received {} changes from bitbucket'.format(len(changes))) return (changes, payload['repository']['scm']) bitbucket = BitBucketHandler buildbot-3.4.0/master/buildbot/www/hooks/bitbucketcloud.py000066400000000000000000000144721413250514000237500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Mamba Team import json from twisted.python import log from buildbot.util import bytes2unicode from buildbot.util.pullrequest import PullRequestMixin GIT_BRANCH_REF = "refs/heads/{}" GIT_MERGE_REF = "refs/pull-requests/{}/merge" GIT_TAG_REF = "refs/tags/{}" _HEADER_EVENT = b'X-Event-Key' class BitbucketCloudEventHandler(PullRequestMixin): property_basename = "bitbucket" def __init__(self, master, options=None): self.master = master if not isinstance(options, dict): options = {} self.options = options self._codebase = self.options.get('codebase', None) self.external_property_whitelist = self.options.get( 'bitbucket_property_whitelist', [] ) def process(self, request): payload = self._get_payload(request) event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) log.msg("Processing event {header}: {event}" .format(header=_HEADER_EVENT, event=event_type)) event_type = event_type.replace(":", "_") handler = getattr(self, 'handle_{}'.format(event_type), None) if handler is None: raise ValueError('Unknown event: {}'.format(event_type)) return handler(payload) def _get_payload(self, request): content = request.content.read() content = bytes2unicode(content) content_type = request.getHeader(b'Content-Type') content_type = bytes2unicode(content_type) if content_type.startswith('application/json'): payload = json.loads(content) else: raise ValueError('Unknown content type: {}' .format(content_type)) log.msg("Payload: {}".format(payload)) return payload def handle_repo_push(self, payload): changes = [] project = payload['repository'].get('project', {'name': 'none'})['name'] repo_url = payload['repository']['links']['self']['href'] web_url = payload['repository']['links']['html']['href'] for payload_change in payload['push']['changes']: if payload_change['new']: age = 'new' category = 'push' else: # when new is null the ref is deleted age = 'old' category = 'ref-deleted' commit_hash = payload_change[age]['target']['hash'] if payload_change[age]['type'] == 'branch': branch = GIT_BRANCH_REF.format(payload_change[age]['name']) elif payload_change[age]['type'] == 'tag': branch = GIT_TAG_REF.format(payload_change[age]['name']) change = { 'revision': commit_hash, 'revlink': '{}/commits/{}'.format(web_url, commit_hash), 'repository': repo_url, 'author': '{} <{}>'.format(payload['actor']['display_name'], payload['actor']['nickname']), 'comments': 'Bitbucket Cloud commit {}'.format(commit_hash), 'branch': branch, 'project': project, 'category': category } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase changes.append(change) return (changes, payload['repository']['scm']) def handle_pullrequest_created(self, payload): return self.handle_pullrequest( payload, GIT_MERGE_REF.format(int(payload['pullrequest']['id'])), "pull-created") def handle_pullrequest_updated(self, payload): return self.handle_pullrequest( payload, GIT_MERGE_REF.format(int(payload['pullrequest']['id'])), "pull-updated") def handle_pullrequest_fulfilled(self, payload): return self.handle_pullrequest( payload, GIT_BRANCH_REF.format( payload['pullrequest']['toRef']['branch']['name']), "pull-fulfilled") def handle_pullrequest_rejected(self, payload): return self.handle_pullrequest( payload, GIT_BRANCH_REF.format( payload['pullrequest']['fromRef']['branch']['name']), "pull-rejected") def handle_pullrequest(self, payload, refname, category): pr_number = int(payload['pullrequest']['id']) repo_url = payload['repository']['links']['self']['href'] project = payload['repository'].get('project', {'name': 'none'})['name'] revlink = payload['pullrequest']['link'] change = { 'revision': payload['pullrequest']['fromRef']['commit']['hash'], 'revlink': revlink, 'repository': repo_url, 'author': '{} <{}>'.format(payload['actor']['display_name'], payload['actor']['nickname']), 'comments': 'Bitbucket Cloud Pull Request #{}'.format(pr_number), 'branch': refname, 'project': project, 'category': category, 'properties': { 'pullrequesturl': revlink, **self.extractProperties(payload['pullrequest']), } } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase return [change], payload['repository']['scm'] def getChanges(self, request): return self.process(request) bitbucketcloud = BitbucketCloudEventHandler buildbot-3.4.0/master/buildbot/www/hooks/bitbucketserver.py000066400000000000000000000154631413250514000241510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Mamba Team import json from twisted.python import log from buildbot.util import bytes2unicode from buildbot.util.pullrequest import PullRequestMixin GIT_BRANCH_REF = "refs/heads/{}" GIT_MERGE_REF = "refs/pull-requests/{}/merge" GIT_TAG_REF = "refs/tags/{}" _HEADER_EVENT = b'X-Event-Key' class BitbucketServerEventHandler(PullRequestMixin): property_basename = "bitbucket" def __init__(self, master, options=None): if options is None: options = {} self.master = master if not isinstance(options, dict): options = {} self.options = options self._codebase = self.options.get('codebase', None) self.external_property_whitelist = self.options.get( 'bitbucket_property_whitelist', [] ) def process(self, request): payload = self._get_payload(request) event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) log.msg("Processing event {header}: {event}" .format(header=_HEADER_EVENT, event=event_type)) event_type = event_type.replace(":", "_") handler = getattr(self, 'handle_{}'.format(event_type), None) if handler is None: raise ValueError('Unknown event: {}'.format(event_type)) return handler(payload) def _get_payload(self, request): content = request.content.read() content = bytes2unicode(content) content_type = request.getHeader(b'Content-Type') content_type = bytes2unicode(content_type) if content_type.startswith('application/json'): payload = json.loads(content) else: raise ValueError('Unknown content type: {}' .format(content_type)) log.msg("Payload: {}".format(payload)) return payload def handle_repo_refs_changed(self, payload): return self._handle_repo_refs_changed_common(payload) def handle_repo_push(self, payload): # repo:push works exactly like repo:refs_changed, but is no longer documented (not even # in the historical documentation of old versions of Bitbucket Server). The old code path # has been preserved for backwards compatibility. return self._handle_repo_refs_changed_common(payload) def _handle_repo_refs_changed_common(self, payload): changes = [] project = payload['repository']['project']['name'] repo_url = payload['repository']['links']['self'][0]['href'] repo_url = repo_url.rstrip('browse') for payload_change in payload['push']['changes']: if payload_change['new']: age = 'new' category = 'push' else: # when new is null the ref is deleted age = 'old' category = 'ref-deleted' commit_hash = payload_change[age]['target']['hash'] if payload_change[age]['type'] == 'branch': branch = GIT_BRANCH_REF.format(payload_change[age]['name']) elif payload_change[age]['type'] == 'tag': branch = GIT_TAG_REF.format(payload_change[age]['name']) change = { 'revision': commit_hash, 'revlink': '{}commits/{}'.format(repo_url, commit_hash), 'repository': repo_url, 'author': '{} <{}>'.format(payload['actor']['displayName'], payload['actor']['username']), 'comments': 'Bitbucket Server commit {}'.format(commit_hash), 'branch': branch, 'project': project, 'category': category } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase changes.append(change) return (changes, payload['repository']['scmId']) def handle_pullrequest_created(self, payload): return self.handle_pullrequest( payload, GIT_MERGE_REF.format(int(payload['pullrequest']['id'])), "pull-created") def handle_pullrequest_updated(self, payload): return self.handle_pullrequest( payload, GIT_MERGE_REF.format(int(payload['pullrequest']['id'])), "pull-updated") def handle_pullrequest_fulfilled(self, payload): return self.handle_pullrequest( payload, GIT_BRANCH_REF.format( payload['pullrequest']['toRef']['branch']['name']), "pull-fulfilled") def handle_pullrequest_rejected(self, payload): return self.handle_pullrequest( payload, GIT_BRANCH_REF.format( payload['pullrequest']['fromRef']['branch']['name']), "pull-rejected") def handle_pullrequest(self, payload, refname, category): pr_number = int(payload['pullrequest']['id']) repo_url = payload['repository']['links']['self'][0]['href'] repo_url = repo_url.rstrip('browse') revlink = payload['pullrequest']['link'] change = { 'revision': payload['pullrequest']['fromRef']['commit']['hash'], 'revlink': revlink, 'repository': repo_url, 'author': '{} <{}>'.format(payload['actor']['displayName'], payload['actor']['username']), 'comments': 'Bitbucket Server Pull Request #{}'.format(pr_number), 'branch': refname, 'project': payload['repository']['project']['name'], 'category': category, 'properties': { 'pullrequesturl': revlink, **self.extractProperties(payload['pullrequest']), } } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase return [change], payload['repository']['scmId'] def getChanges(self, request): return self.process(request) bitbucketserver = BitbucketServerEventHandler buildbot-3.4.0/master/buildbot/www/hooks/github.py000066400000000000000000000353521413250514000222270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hmac import json import logging import re from hashlib import sha1 from dateutil.parser import parse as dateparse from twisted.internet import defer from twisted.python import log from buildbot.process.properties import Properties from buildbot.util import bytes2unicode from buildbot.util import httpclientservice from buildbot.util import unicode2bytes from buildbot.util.pullrequest import PullRequestMixin from buildbot.www.hooks.base import BaseHookHandler _HEADER_EVENT = b'X-GitHub-Event' _HEADER_SIGNATURE = b'X-Hub-Signature' DEFAULT_SKIPS_PATTERN = (r'\[ *skip *ci *\]', r'\[ *ci *skip *\]') DEFAULT_GITHUB_API_URL = 'https://api.github.com' class GitHubEventHandler(PullRequestMixin): property_basename = "github" def __init__(self, secret, strict, codebase=None, github_property_whitelist=None, master=None, skips=None, github_api_endpoint=None, pullrequest_ref=None, token=None, debug=False, verify=False): if github_property_whitelist is None: github_property_whitelist = [] self._secret = secret self._strict = strict self._token = token self._codebase = codebase self.external_property_whitelist = github_property_whitelist self.pullrequest_ref = pullrequest_ref self.skips = skips self.github_api_endpoint = github_api_endpoint self.master = master if skips is None: self.skips = DEFAULT_SKIPS_PATTERN if github_api_endpoint is None: self.github_api_endpoint = DEFAULT_GITHUB_API_URL if self._strict and not self._secret: raise ValueError('Strict mode is requested ' 'while no secret is provided') self.debug = debug self.verify = verify @defer.inlineCallbacks def process(self, request): payload = yield self._get_payload(request) event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) log.msg("X-GitHub-Event: {}".format( event_type), logLevel=logging.DEBUG) handler = getattr(self, 'handle_{}'.format(event_type), None) if handler is None: raise ValueError('Unknown event: {}'.format(event_type)) result = yield handler(payload, event_type) return result @defer.inlineCallbacks def _get_payload(self, request): content = request.content.read() content = bytes2unicode(content) signature = request.getHeader(_HEADER_SIGNATURE) signature = bytes2unicode(signature) if not signature and self._strict: raise ValueError('Request has no required signature') if self._secret and signature: try: hash_type, hexdigest = signature.split('=') except ValueError as e: raise ValueError( 'Wrong signature format: {}'.format(signature)) from e if hash_type != 'sha1': raise ValueError('Unknown hash type: {}'.format(hash_type)) p = Properties() p.master = self.master rendered_secret = yield p.render(self._secret) mac = hmac.new(unicode2bytes(rendered_secret), msg=unicode2bytes(content), digestmod=sha1) def _cmp(a, b): try: # try the more secure compare_digest() first from hmac import compare_digest return compare_digest(a, b) except ImportError: # pragma: no cover # and fallback to the insecure simple comparison otherwise return a == b if not _cmp(bytes2unicode(mac.hexdigest()), hexdigest): raise ValueError('Hash mismatch') content_type = request.getHeader(b'Content-Type') if content_type == b'application/json': payload = json.loads(content) elif content_type == b'application/x-www-form-urlencoded': payload = json.loads(bytes2unicode(request.args[b'payload'][0])) else: raise ValueError('Unknown content type: {}'.format(content_type)) log.msg("Payload: {}".format(payload), logLevel=logging.DEBUG) return payload def handle_ping(self, _, __): return [], 'git' def handle_push(self, payload, event): # This field is unused: user = None # user = payload['pusher']['name'] repo = payload['repository']['name'] repo_url = payload['repository']['html_url'] # NOTE: what would be a reasonable value for project? # project = request.args.get('project', [''])[0] project = payload['repository']['full_name'] # Inject some additional white-listed event payload properties properties = self.extractProperties(payload) changes = self._process_change(payload, user, repo, repo_url, project, event, properties) log.msg("Received {} changes from github".format(len(changes))) return changes, 'git' @defer.inlineCallbacks def handle_pull_request(self, payload, event): changes = [] number = payload['number'] refname = 'refs/pull/{}/{}'.format(number, self.pullrequest_ref) basename = payload['pull_request']['base']['ref'] commits = payload['pull_request']['commits'] title = payload['pull_request']['title'] comments = payload['pull_request']['body'] repo_full_name = payload['repository']['full_name'] head_sha = payload['pull_request']['head']['sha'] revlink = payload['pull_request']['_links']['html']['href'] log.msg('Processing GitHub PR #{}'.format(number), logLevel=logging.DEBUG) head_msg = yield self._get_commit_msg(repo_full_name, head_sha) if self._has_skip(head_msg): log.msg("GitHub PR #{}, Ignoring: " "head commit message contains skip pattern".format(number)) return ([], 'git') action = payload.get('action') if action not in ('opened', 'reopened', 'synchronize'): log.msg("GitHub PR #{} {}, ignoring".format(number, action)) return (changes, 'git') files = yield self._get_pr_files(repo_full_name, number) properties = { 'pullrequesturl': revlink, 'event': event, 'basename': basename, **self.extractProperties(payload['pull_request']), } change = { 'revision': payload['pull_request']['head']['sha'], 'when_timestamp': dateparse(payload['pull_request']['created_at']), 'branch': refname, 'files': files, 'revlink': payload['pull_request']['_links']['html']['href'], 'repository': payload['repository']['html_url'], 'project': payload['pull_request']['base']['repo']['full_name'], 'category': 'pull', # TODO: Get author name based on login id using txgithub module 'author': payload['sender']['login'], 'comments': 'GitHub Pull Request #{0} ({1} commit{2})\n{3}\n{4}'.format( number, commits, 's' if commits != 1 else '', title, comments), 'properties': properties, } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase changes.append(change) log.msg("Received {} changes from GitHub PR #{}".format( len(changes), number)) return (changes, 'git') @defer.inlineCallbacks def _get_commit_msg(self, repo, sha): ''' :param repo: the repo full name, ``{owner}/{project}``. e.g. ``buildbot/buildbot`` ''' headers = { 'User-Agent': 'Buildbot', } if self._token: p = Properties() p.master = self.master token = yield p.render(self._token) headers['Authorization'] = 'token ' + token url = '/repos/{}/commits/{}'.format(repo, sha) http = yield httpclientservice.HTTPClientService.getService( self.master, self.github_api_endpoint, headers=headers, debug=self.debug, verify=self.verify) res = yield http.get(url) if 200 <= res.code < 300: data = yield res.json() return data['commit']['message'] log.msg('Failed fetching PR commit message: response code {}'.format(res.code)) return 'No message field' @defer.inlineCallbacks def _get_pr_files(self, repo, number): """ Get Files that belong to the Pull Request :param repo: the repo full name, ``{owner}/{project}``. e.g. ``buildbot/buildbot`` :param number: the pull request number. """ headers = {"User-Agent": "Buildbot"} if self._token: p = Properties() p.master = self.master token = yield p.render(self._token) headers["Authorization"] = "token " + token url = "/repos/{}/pulls/{}/files".format(repo, number) http = yield httpclientservice.HTTPClientService.getService( self.master, self.github_api_endpoint, headers=headers, debug=self.debug, verify=self.verify, ) res = yield http.get(url) if 200 <= res.code < 300: data = yield res.json() return [f["filename"] for f in data] log.msg('Failed fetching PR files: response code {}'.format(res.code)) return [] def _process_change(self, payload, user, repo, repo_url, project, event, properties): """ Consumes the JSON as a python object and actually starts the build. :arguments: payload Python Object that represents the JSON sent by GitHub Service Hook. """ changes = [] refname = payload['ref'] # We only care about regular heads or tags match = re.match(r"^refs/(heads|tags)/(.+)$", refname) if not match: log.msg("Ignoring refname `{}': Not a branch".format(refname)) return changes category = None # None is the legacy category for when hook only supported push if match.group(1) == "tags": category = "tag" branch = match.group(2) if payload.get('deleted'): log.msg("Branch `{}' deleted, ignoring".format(branch)) return changes # check skip pattern in commit message. e.g.: [ci skip] and [skip ci] head_msg = payload['head_commit'].get('message', '') if self._has_skip(head_msg): return changes commits = payload['commits'] if payload.get('created'): commits = [payload['head_commit']] for commit in commits: files = [] for kind in ('added', 'modified', 'removed'): files.extend(commit.get(kind, [])) when_timestamp = dateparse(commit['timestamp']) log.msg("New revision: {}".format(commit['id'][:8])) change = { 'author': '{} <{}>'.format(commit['author']['name'], commit['author']['email']), 'committer': '{} <{}>'.format(commit['committer']['name'], commit['committer']['email']), 'files': files, 'comments': commit['message'], 'revision': commit['id'], 'when_timestamp': when_timestamp, 'branch': branch, 'revlink': commit['url'], 'repository': repo_url, 'project': project, 'properties': { 'github_distinct': commit.get('distinct', True), 'event': event, }, 'category': category } # Update with any white-listed github event properties change['properties'].update(properties) if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase changes.append(change) return changes def _has_skip(self, msg): ''' The message contains the skipping keyword or not. :return type: Bool ''' for skip in self.skips: if re.search(skip, msg): return True return False # for GitHub, we do another level of indirection because # we already had documented API that encouraged people to subclass GitHubEventHandler # so we need to be careful not breaking that API. class GitHubHandler(BaseHookHandler): def __init__(self, master, options): if options is None: options = {} super().__init__(master, options) klass = options.get('class', GitHubEventHandler) klass_kwargs = { 'master': master, 'codebase': options.get('codebase', None), 'github_property_whitelist': options.get('github_property_whitelist', None), 'skips': options.get('skips', None), 'github_api_endpoint': options.get('github_api_endpoint', None) or 'https://api.github.com', 'pullrequest_ref': options.get('pullrequest_ref', None) or 'merge', 'token': options.get('token', None), 'debug': options.get('debug', None) or False, 'verify': options.get('verify', None) or False, } handler = klass(options.get('secret', None), options.get('strict', False), **klass_kwargs) self.handler = handler def getChanges(self, request): return self.handler.process(request) github = GitHubHandler buildbot-3.4.0/master/buildbot/www/hooks/gitlab.py000066400000000000000000000174661413250514000222150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import re from dateutil.parser import parse as dateparse from twisted.internet.defer import inlineCallbacks from twisted.python import log from buildbot.process.properties import Properties from buildbot.util import bytes2unicode from buildbot.www.hooks.base import BaseHookHandler _HEADER_EVENT = b'X-Gitlab-Event' _HEADER_GITLAB_TOKEN = b'X-Gitlab-Token' class GitLabHandler(BaseHookHandler): def _process_change(self, payload, user, repo, repo_url, event, codebase=None): """ Consumes the JSON as a python object and actually starts the build. :arguments: payload Python Object that represents the JSON sent by GitLab Service Hook. """ changes = [] refname = payload['ref'] # project name from http headers is empty for me, so get it from repository/name project = payload['repository']['name'] # We only care about regular heads or tags match = re.match(r"^refs/(heads|tags)/(.+)$", refname) if not match: log.msg("Ignoring refname `{}': Not a branch".format(refname)) return changes branch = match.group(2) if payload.get('deleted'): log.msg("Branch `{}' deleted, ignoring".format(branch)) return changes for commit in payload['commits']: if not commit.get('distinct', True): log.msg('Commit `{}` is a non-distinct commit, ignoring...'.format(commit['id'])) continue files = [] for kind in ('added', 'modified', 'removed'): files.extend(commit.get(kind, [])) when_timestamp = dateparse(commit['timestamp']) log.msg("New revision: {}".format(commit['id'][:8])) change = { 'author': '{} <{}>'.format(commit['author']['name'], commit['author']['email']), 'files': files, 'comments': commit['message'], 'revision': commit['id'], 'when_timestamp': when_timestamp, 'branch': branch, 'revlink': commit['url'], 'repository': repo_url, 'project': project, 'category': event, 'properties': { 'event': event, }, } if codebase is not None: change['codebase'] = codebase changes.append(change) return changes def _process_merge_request_change(self, payload, event, codebase=None): """ Consumes the merge_request JSON as a python object and turn it into a buildbot change. :arguments: payload Python Object that represents the JSON sent by GitLab Service Hook. """ attrs = payload['object_attributes'] commit = attrs['last_commit'] when_timestamp = dateparse(commit['timestamp']) # @todo provide and document a way to choose between http and ssh url repo_url = attrs['target']['git_http_url'] # project name from http headers is empty for me, so get it from # object_attributes/target/name project = attrs['target']['name'] # Filter out uninteresting events state = attrs['state'] if re.match('^(closed|merged|approved)$', state): log.msg("GitLab MR#{}: Ignoring because state is {}".format(attrs['iid'], state)) return [] action = attrs['action'] if not re.match('^(open|reopen)$', action) and \ not (action == "update" and "oldrev" in attrs): log.msg("GitLab MR#{}: Ignoring because action {} was not open or " "reopen or an update that added code".format(attrs['iid'], action)) return [] changes = [{ 'author': '{} <{}>'.format(commit['author']['name'], commit['author']['email']), 'files': [], # @todo use rest API 'comments': "MR#{}: {}\n\n{}".format(attrs['iid'], attrs['title'], attrs['description']), 'revision': commit['id'], 'when_timestamp': when_timestamp, 'branch': attrs['target_branch'], 'repository': repo_url, 'project': project, 'category': event, 'revlink': attrs['url'], 'properties': { 'source_branch': attrs['source_branch'], 'source_project_id': attrs['source_project_id'], 'source_repository': attrs['source']['git_http_url'], 'source_git_ssh_url': attrs['source']['git_ssh_url'], 'target_branch': attrs['target_branch'], 'target_project_id': attrs['target_project_id'], 'target_repository': attrs['target']['git_http_url'], 'target_git_ssh_url': attrs['target']['git_ssh_url'], 'event': event, }, }] if codebase is not None: changes[0]['codebase'] = codebase return changes @inlineCallbacks def getChanges(self, request): """ Reponds only to POST events and starts the build process :arguments: request the http request object """ expected_secret = isinstance(self.options, dict) and self.options.get('secret') if expected_secret: received_secret = request.getHeader(_HEADER_GITLAB_TOKEN) received_secret = bytes2unicode(received_secret) p = Properties() p.master = self.master expected_secret_value = yield p.render(expected_secret) if received_secret != expected_secret_value: raise ValueError("Invalid secret") try: content = request.content.read() payload = json.loads(bytes2unicode(content)) except Exception as e: raise ValueError("Error loading JSON: " + str(e)) from e event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) # newer version of gitlab have a object_kind parameter, # which allows not to use the http header event_type = payload.get('object_kind', event_type) codebase = request.args.get(b'codebase', [None])[0] codebase = bytes2unicode(codebase) if event_type in ("push", "tag_push", "Push Hook"): user = payload['user_name'] repo = payload['repository']['name'] repo_url = payload['repository']['url'] changes = self._process_change( payload, user, repo, repo_url, event_type, codebase=codebase) elif event_type == 'merge_request': changes = self._process_merge_request_change( payload, event_type, codebase=codebase) else: changes = [] if changes: log.msg("Received {} changes from {} gitlab event".format( len(changes), event_type)) return (changes, 'git') gitlab = GitLabHandler buildbot-3.4.0/master/buildbot/www/hooks/gitorious.py000066400000000000000000000055121413250514000227640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # note: this file is based on github.py import json import re from dateutil.parser import parse as dateparse from twisted.python import log from buildbot.util import bytes2unicode from buildbot.www.hooks.base import BaseHookHandler class GitoriousHandler(BaseHookHandler): def getChanges(self, request): payload = json.loads(bytes2unicode(request.args[b'payload'][0])) user = payload['repository']['owner']['name'] repo = payload['repository']['name'] repo_url = payload['repository']['url'] project = payload['project']['name'] changes = self.process_change(payload, user, repo, repo_url, project) log.msg("Received {} changes from gitorious".format(len(changes))) return (changes, 'git') def process_change(self, payload, user, repo, repo_url, project): changes = [] newrev = payload['after'] branch = payload['ref'] if re.match(r"^0*$", newrev): log.msg("Branch `{}' deleted, ignoring".format(branch)) return [] else: for commit in payload['commits']: files = [] # Gitorious doesn't send these, maybe later # if 'added' in commit: # files.extend(commit['added']) # if 'modified' in commit: # files.extend(commit['modified']) # if 'removed' in commit: # files.extend(commit['removed']) when_timestamp = dateparse(commit['timestamp']) log.msg("New revision: {}".format(commit['id'][:8])) changes.append({ 'author': '{} <{}>'.format(commit['author']['name'], commit['author']['email']), 'files': files, 'comments': commit['message'], 'revision': commit['id'], 'when_timestamp': when_timestamp, 'branch': branch, 'revlink': commit['url'], 'repository': repo_url, 'project': project }) return changes gitorious = GitoriousHandler buildbot-3.4.0/master/buildbot/www/hooks/poller.py000066400000000000000000000043541413250514000222400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # This change hook allows GitHub or a hand crafted curl invocation to "knock on # the door" and trigger a change source to poll. from buildbot.changes.base import ReconfigurablePollingChangeSource from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www.hooks.base import BaseHookHandler class PollingHandler(BaseHookHandler): def getChanges(self, req): change_svc = req.site.master.change_svc poll_all = b"poller" not in req.args allow_all = True allowed = [] if isinstance(self.options, dict) and b"allowed" in self.options: allow_all = False allowed = self.options[b"allowed"] pollers = [] for source in change_svc: if not isinstance(source, ReconfigurablePollingChangeSource): continue if not hasattr(source, "name"): continue if (not poll_all and unicode2bytes(source.name) not in req.args[b'poller']): continue if not allow_all and unicode2bytes(source.name) not in allowed: continue pollers.append(source) if not poll_all: missing = (set(req.args[b'poller']) - set(unicode2bytes(s.name) for s in pollers)) if missing: raise ValueError("Could not find pollers: {}".format( bytes2unicode(b",".join(missing)))) for p in pollers: p.force() return [], None poller = PollingHandler buildbot-3.4.0/master/buildbot/www/ldapuserinfo.py000066400000000000000000000153661413250514000223200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # NOTE regarding LDAP encodings: # # By default the encoding used in ldap3 is utf-8. The encoding is user-configurable, though. # For more information check ldap3's documentation on this topic: # http://ldap3.readthedocs.io/encoding.html # # It is recommended to use ldap3's auto-decoded `attributes` values for # `unicode` and `raw_*` attributes for `bytes`. from urllib.parse import urlparse from twisted.internet import threads from buildbot.util import bytes2unicode from buildbot.util import flatten from buildbot.www import auth from buildbot.www import avatar try: import ldap3 except ImportError: import importlib ldap3 = None class LdapUserInfo(avatar.AvatarBase, auth.UserInfoProviderBase): name = 'ldap' def __init__(self, uri, bindUser, bindPw, accountBase, accountPattern, accountFullName, accountEmail, groupBase=None, groupMemberPattern=None, groupName=None, avatarPattern=None, avatarData=None, accountExtraFields=None): # Throw import error now that this is being used if not ldap3: importlib.import_module('ldap3') self.uri = uri self.bindUser = bindUser self.bindPw = bindPw self.accountBase = accountBase self.accountEmail = accountEmail self.accountPattern = accountPattern self.accountFullName = accountFullName group_params = [p for p in (groupName, groupMemberPattern, groupBase) if p is not None] if len(group_params) not in (0, 3): raise ValueError( "Incomplete LDAP groups configuration. " "To use Ldap groups, you need to specify the three " "parameters (groupName, groupMemberPattern and groupBase). ") self.groupName = groupName self.groupMemberPattern = groupMemberPattern self.groupBase = groupBase self.avatarPattern = avatarPattern self.avatarData = avatarData if accountExtraFields is None: accountExtraFields = [] self.accountExtraFields = accountExtraFields self.ldap_encoding = ldap3.get_config_parameter('DEFAULT_SERVER_ENCODING') def connectLdap(self): server = urlparse(self.uri) netloc = server.netloc.split(":") # define the server and the connection s = ldap3.Server(netloc[0], port=int(netloc[1]), use_ssl=server.scheme == 'ldaps', get_info=ldap3.ALL) auth = ldap3.SIMPLE if self.bindUser is None and self.bindPw is None: auth = ldap3.ANONYMOUS c = ldap3.Connection(s, auto_bind=True, client_strategy=ldap3.SYNC, user=self.bindUser, password=self.bindPw, authentication=auth) return c def search(self, c, base, filterstr='f', attributes=None): c.search(base, filterstr, ldap3.SUBTREE, attributes=attributes) return c.response def getUserInfo(self, username): username = bytes2unicode(username) def thd(): c = self.connectLdap() infos = {'username': username} pattern = self.accountPattern % dict(username=username) res = self.search(c, self.accountBase, pattern, attributes=[ self.accountEmail, self.accountFullName] + self.accountExtraFields) if len(res) != 1: raise KeyError("ldap search \"{}\" returned {} results".format(pattern, len(res))) dn, ldap_infos = res[0]['dn'], res[0]['attributes'] def getFirstLdapInfo(x): if isinstance(x, list): x = x[0] if x else None return x infos['full_name'] = getFirstLdapInfo(ldap_infos[self.accountFullName]) infos['email'] = getFirstLdapInfo(ldap_infos[self.accountEmail]) for f in self.accountExtraFields: if f in ldap_infos: infos[f] = getFirstLdapInfo(ldap_infos[f]) if self.groupMemberPattern is None: infos['groups'] = [] return infos # needs double quoting of backslashing pattern = self.groupMemberPattern % dict(dn=ldap3.utils.conv.escape_filter_chars(dn)) res = self.search(c, self.groupBase, pattern, attributes=[self.groupName]) infos['groups'] = flatten([group_infos['attributes'][self.groupName] for group_infos in res]) return infos return threads.deferToThread(thd) def findAvatarMime(self, data): # http://en.wikipedia.org/wiki/List_of_file_signatures if data.startswith(b"\xff\xd8\xff"): return (b"image/jpeg", data) if data.startswith(b"\x89PNG"): return (b"image/png", data) if data.startswith(b"GIF8"): return (b"image/gif", data) # ignore unknown image format return None def getUserAvatar(self, email, username, size, defaultAvatarUrl): if username: username = bytes2unicode(username) if email: email = bytes2unicode(email) def thd(): c = self.connectLdap() if username: pattern = self.accountPattern % dict(username=username) elif email: pattern = self.avatarPattern % dict(email=email) else: return None res = self.search(c, self.accountBase, pattern, attributes=[self.avatarData]) if not res: return None ldap_infos = res[0]['raw_attributes'] if self.avatarData in ldap_infos and ldap_infos[self.avatarData]: data = ldap_infos[self.avatarData][0] return self.findAvatarMime(data) return None return threads.deferToThread(thd) buildbot-3.4.0/master/buildbot/www/oauth2.py000066400000000000000000000362211413250514000210200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import re import textwrap from posixpath import join from urllib.parse import parse_qs from urllib.parse import urlencode import jinja2 import requests from twisted.internet import defer from twisted.internet import threads import buildbot from buildbot import config from buildbot.process.properties import Properties from buildbot.util import bytes2unicode from buildbot.util.logger import Logger from buildbot.www import auth from buildbot.www import resource log = Logger() class OAuth2LoginResource(auth.LoginResource): # disable reconfigResource calls needsReconfig = False def __init__(self, master, _auth): super().__init__(master) self.auth = _auth def render_POST(self, request): return self.asyncRenderHelper(request, self.renderLogin) @defer.inlineCallbacks def renderLogin(self, request): code = request.args.get(b"code", [b""])[0] if not code: url = request.args.get(b"redirect", [None])[0] url = yield self.auth.getLoginURL(url) raise resource.Redirect(url) details = yield self.auth.verifyCode(code) if self.auth.userInfoProvider is not None: infos = yield self.auth.userInfoProvider.getUserInfo(details['username']) details.update(infos) session = request.getSession() session.user_info = details session.updateSession(request) state = request.args.get(b"state", [b""])[0] if state: for redirect in parse_qs(state).get('redirect', []): raise resource.Redirect(self.auth.homeUri + "#" + redirect) raise resource.Redirect(self.auth.homeUri) class OAuth2Auth(auth.AuthBase): name = 'oauth2' getTokenUseAuthHeaders = False authUri = None tokenUri = None grantType = 'authorization_code' authUriAdditionalParams = {} tokenUriAdditionalParams = {} loginUri = None homeUri = None sslVerify = None def __init__(self, clientId, clientSecret, autologin=False, **kwargs): super().__init__(**kwargs) self.clientId = clientId self.clientSecret = clientSecret self.autologin = autologin def reconfigAuth(self, master, new_config): self.master = master self.loginUri = join(new_config.buildbotURL, "auth/login") self.homeUri = new_config.buildbotURL def getConfigDict(self): return dict(name=self.name, oauth2=True, fa_icon=self.faIcon, autologin=self.autologin ) def getLoginResource(self): return OAuth2LoginResource(self.master, self) @defer.inlineCallbacks def getLoginURL(self, redirect_url): """ Returns the url to redirect the user to for user consent """ p = Properties() p.master = self.master clientId = yield p.render(self.clientId) oauth_params = {'redirect_uri': self.loginUri, 'client_id': clientId, 'response_type': 'code'} if redirect_url is not None: oauth_params['state'] = urlencode(dict(redirect=redirect_url)) oauth_params.update(self.authUriAdditionalParams) sorted_oauth_params = sorted(oauth_params.items(), key=lambda val: val[0]) return "{}?{}".format(self.authUri, urlencode(sorted_oauth_params)) def createSessionFromToken(self, token): s = requests.Session() s.params = {'access_token': token['access_token']} s.verify = self.sslVerify return s def get(self, session, path): ret = session.get(self.resourceEndpoint + path) return ret.json() # based on https://github.com/maraujop/requests-oauth # from Miguel Araujo, augmented to support header based clientSecret # passing @defer.inlineCallbacks def verifyCode(self, code): # everything in deferToThread is not counted with trial --coverage :-( def thd(client_id, client_secret): url = self.tokenUri data = {'redirect_uri': self.loginUri, 'code': code, 'grant_type': self.grantType} auth = None if self.getTokenUseAuthHeaders: auth = (client_id, client_secret) else: data.update( {'client_id': client_id, 'client_secret': client_secret}) data.update(self.tokenUriAdditionalParams) response = requests.post( url, data=data, auth=auth, verify=self.sslVerify) response.raise_for_status() responseContent = bytes2unicode(response.content) try: content = json.loads(responseContent) except ValueError: content = parse_qs(responseContent) for k, v in content.items(): content[k] = v[0] except TypeError: content = responseContent session = self.createSessionFromToken(content) return self.getUserInfoFromOAuthClient(session) p = Properties() p.master = self.master client_id = yield p.render(self.clientId) client_secret = yield p.render(self.clientSecret) result = yield threads.deferToThread(thd, client_id, client_secret) return result def getUserInfoFromOAuthClient(self, c): return {} class GoogleAuth(OAuth2Auth): name = "Google" faIcon = "fa-google-plus" resourceEndpoint = "https://www.googleapis.com/oauth2/v1" authUri = 'https://accounts.google.com/o/oauth2/auth' tokenUri = 'https://accounts.google.com/o/oauth2/token' authUriAdditionalParams = dict(scope=" ".join([ 'https://www.googleapis.com/auth/userinfo.email', 'https://www.googleapis.com/auth/userinfo.profile' ])) def getUserInfoFromOAuthClient(self, c): data = self.get(c, '/userinfo') return dict(full_name=data["name"], username=data['email'].split("@")[0], email=data["email"], avatar_url=data["picture"]) class GitHubAuth(OAuth2Auth): name = "GitHub" faIcon = "fa-github" authUri = 'https://github.com/login/oauth/authorize' authUriAdditionalParams = {'scope': 'user:email read:org'} tokenUri = 'https://github.com/login/oauth/access_token' resourceEndpoint = 'https://api.github.com' getUserTeamsGraphqlTpl = textwrap.dedent(r''' {%- if organizations %} query getOrgTeamMembership { {%- for org_slug, org_name in organizations.items() %} {{ org_slug }}: organization(login: "{{ org_name }}") { teams(first: 100 userLogins: ["{{ user_info.username }}"]) { edges { node { name, slug } } } } {%- endfor %} } {%- endif %} ''') def __init__(self, clientId, clientSecret, serverURL=None, autologin=False, apiVersion=3, getTeamsMembership=False, debug=False, **kwargs): super().__init__(clientId, clientSecret, autologin, **kwargs) self.apiResourceEndpoint = None if serverURL is not None: # setup for enterprise github serverURL = serverURL.rstrip("/") # v3 is accessible directly at /api/v3 for enterprise, but directly for SaaS.. self.resourceEndpoint = serverURL + '/api/v3' # v4 is accessible endpoint for enterprise self.apiResourceEndpoint = serverURL + '/api/graphql' self.authUri = '{0}/login/oauth/authorize'.format(serverURL) self.tokenUri = '{0}/login/oauth/access_token'.format(serverURL) self.serverURL = serverURL or self.resourceEndpoint if apiVersion not in (3, 4): config.error( 'GitHubAuth apiVersion must be 3 or 4 not {}'.format( apiVersion)) self.apiVersion = apiVersion if apiVersion == 3: if getTeamsMembership is True: config.error( 'Retrieving team membership information using GitHubAuth is only ' 'possible using GitHub api v4.') else: defaultGraphqlEndpoint = self.serverURL + '/graphql' self.apiResourceEndpoint = self.apiResourceEndpoint or defaultGraphqlEndpoint if getTeamsMembership: # GraphQL name aliases must comply with /^[_a-zA-Z][_a-zA-Z0-9]*$/ self._orgname_slug_sub_re = re.compile(r'[^_a-zA-Z0-9]') self.getUserTeamsGraphqlTplC = jinja2.Template( self.getUserTeamsGraphqlTpl.strip()) self.getTeamsMembership = getTeamsMembership self.debug = debug def post(self, session, query): if self.debug: log.info('{klass} GraphQL POST Request: {endpoint} -> ' 'DATA:\n----\n{data}\n----', klass=self.__class__.__name__, endpoint=self.apiResourceEndpoint, data=query) ret = session.post(self.apiResourceEndpoint, json={'query': query}) return ret.json() def getUserInfoFromOAuthClient(self, c): if self.apiVersion == 3: return self.getUserInfoFromOAuthClient_v3(c) return self.getUserInfoFromOAuthClient_v4(c) def getUserInfoFromOAuthClient_v3(self, c): user = self.get(c, '/user') emails = self.get(c, '/user/emails') for email in emails: if email.get('primary', False): user['email'] = email['email'] break orgs = self.get(c, '/user/orgs') return dict(full_name=user['name'], email=user['email'], username=user['login'], groups=[org['login'] for org in orgs]) def createSessionFromToken(self, token): s = requests.Session() s.headers = { 'Authorization': 'token ' + token['access_token'], 'User-Agent': 'buildbot/{}'.format(buildbot.version), } s.verify = self.sslVerify return s def getUserInfoFromOAuthClient_v4(self, c): graphql_query = textwrap.dedent(''' query { viewer { email login name organizations(first: 100) { edges { node { login } } } } } ''') data = self.post(c, graphql_query.strip()) data = data['data'] if self.debug: log.info('{klass} GraphQL Response: {response}', klass=self.__class__.__name__, response=data) user_info = dict(full_name=data['viewer']['name'], email=data['viewer']['email'], username=data['viewer']['login'], groups=[org['node']['login'] for org in data['viewer']['organizations']['edges']]) if self.getTeamsMembership: orgs_name_slug_mapping = { self._orgname_slug_sub_re.sub('_', n): n for n in user_info['groups']} graphql_query = self.getUserTeamsGraphqlTplC.render( {'user_info': user_info, 'organizations': orgs_name_slug_mapping}) if graphql_query: data = self.post(c, graphql_query) if self.debug: log.info('{klass} GraphQL Response: {response}', klass=self.__class__.__name__, response=data) teams = set() for org, team_data in data['data'].items(): if team_data is None: # Organizations can have OAuth App access restrictions enabled, # disallowing team data access to third-parties. continue for node in team_data['teams']['edges']: # On github we can mentions organization teams like # @org-name/team-name. Let's keep the team formatting # identical with the inclusion of the organization # since different organizations might share a common # team name teams.add('{}/{}'.format(orgs_name_slug_mapping[org], node['node']['name'])) teams.add('{}/{}'.format(orgs_name_slug_mapping[org], node['node']['slug'])) user_info['groups'].extend(sorted(teams)) if self.debug: log.info('{klass} User Details: {user_info}', klass=self.__class__.__name__, user_info=user_info) return user_info class GitLabAuth(OAuth2Auth): name = "GitLab" faIcon = "fa-git" def __init__(self, instanceUri, clientId, clientSecret, **kwargs): uri = instanceUri.rstrip("/") self.authUri = "{}/oauth/authorize".format(uri) self.tokenUri = "{}/oauth/token".format(uri) self.resourceEndpoint = "{}/api/v4".format(uri) super().__init__(clientId, clientSecret, **kwargs) def getUserInfoFromOAuthClient(self, c): user = self.get(c, "/user") groups = self.get(c, "/groups") return dict(full_name=user["name"], username=user["username"], email=user["email"], avatar_url=user["avatar_url"], groups=[g["path"] for g in groups]) class BitbucketAuth(OAuth2Auth): name = "Bitbucket" faIcon = "fa-bitbucket" authUri = 'https://bitbucket.org/site/oauth2/authorize' tokenUri = 'https://bitbucket.org/site/oauth2/access_token' resourceEndpoint = 'https://api.bitbucket.org/2.0' def getUserInfoFromOAuthClient(self, c): user = self.get(c, '/user') emails = self.get(c, '/user/emails') for email in emails["values"]: if email.get('is_primary', False): user['email'] = email['email'] break orgs = self.get(c, '/teams?role=member') return dict(full_name=user['display_name'], email=user['email'], username=user['username'], groups=[org['username'] for org in orgs["values"]]) buildbot-3.4.0/master/buildbot/www/plugin.py000066400000000000000000000030361413250514000211120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import pkg_resources from twisted.web import static from buildbot.util import bytes2unicode class Application: def __init__(self, modulename, description, ui=True): self.description = description self.version = pkg_resources.resource_string( modulename, "VERSION").strip() self.version = bytes2unicode(self.version) self.static_dir = pkg_resources.resource_filename( modulename, "static") self.resource = static.File(self.static_dir) self.ui = ui def setMaster(self, master): self.master = master def setConfiguration(self, config): self.config = config def __repr__(self): return ("www.plugin.Application(version=%(version)s, " "description=%(description)s, " "static_dir=%(static_dir)s)") % self.__dict__ buildbot-3.4.0/master/buildbot/www/resource.py000066400000000000000000000074461413250514000214540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from twisted.python import log from twisted.web import resource from twisted.web import server from twisted.web.error import Error from buildbot.util import unicode2bytes _CR_LF_RE = re.compile(br"[\r\n]+.*") def protect_redirect_url(url): return _CR_LF_RE.sub(b"", url) class Redirect(Error): def __init__(self, url): super().__init__(302, "redirect") self.url = protect_redirect_url(unicode2bytes(url)) class Resource(resource.Resource): # if this is true for a class, then instances will have their # reconfigResource(new_config) methods called on reconfig. needsReconfig = False # as a convenience, subclasses have a ``master`` attribute, a # ``base_url`` attribute giving Buildbot's base URL, # and ``static_url`` attribute giving Buildbot's static files URL @property def base_url(self): return self.master.config.buildbotURL def __init__(self, master): super().__init__() self.master = master if self.needsReconfig and master is not None: master.www.resourceNeedsReconfigs(self) def reconfigResource(self, new_config): raise NotImplementedError def asyncRenderHelper(self, request, _callable, writeError=None): def writeErrorDefault(msg, errcode=400): request.setResponseCode(errcode) request.setHeader(b'content-type', b'text/plain; charset=utf-8') request.write(msg) request.finish() if writeError is None: writeError = writeErrorDefault try: d = _callable(request) except Exception as e: d = defer.fail(e) @d.addCallback def finish(s): try: if s is not None: request.write(s) request.finish() except RuntimeError: # pragma: no cover # this occurs when the client has already disconnected; ignore # it (see #2027) log.msg("http client disconnected before results were sent") @d.addErrback def failHttpRedirect(f): f.trap(Redirect) request.redirect(f.value.url) request.finish() return None @d.addErrback def failHttpError(f): f.trap(Error) e = f.value message = unicode2bytes(e.message) writeError(message, errcode=int(e.status)) @d.addErrback def fail(f): log.err(f, 'While rendering resource:') try: writeError(b'internal error - see logs', errcode=500) except Exception: try: request.finish() except Exception: pass return server.NOT_DONE_YET class RedirectResource(Resource): def __init__(self, master, basepath): super().__init__(master) self.basepath = basepath def render(self, request): redir = self.base_url + self.basepath request.redirect(protect_redirect_url(redir)) return redir buildbot-3.4.0/master/buildbot/www/rest.py000066400000000000000000000415601413250514000205750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import cgi import datetime import fnmatch import json import re from contextlib import contextmanager from urllib.parse import urlparse from twisted.internet import defer from twisted.python import log from twisted.web.error import Error from buildbot.data import exceptions from buildbot.util import bytes2unicode from buildbot.util import toJson from buildbot.util import unicode2bytes from buildbot.www import resource from buildbot.www.authz import Forbidden class BadJsonRpc2(Exception): def __init__(self, message, jsonrpccode): self.message = message self.jsonrpccode = jsonrpccode class ContentTypeParser: def __init__(self, contenttype): self.typeheader = contenttype def gettype(self): mimetype, options = cgi.parse_header( bytes2unicode(self.typeheader)) return mimetype URL_ENCODED = b"application/x-www-form-urlencoded" JSON_ENCODED = b"application/json" class RestRootResource(resource.Resource): version_classes = {} @classmethod def addApiVersion(cls, version, version_cls): cls.version_classes[version] = version_cls version_cls.apiVersion = version def __init__(self, master): super().__init__(master) min_vers = master.config.www.get('rest_minimum_version', 0) latest = max(list(self.version_classes)) for version, klass in self.version_classes.items(): if version < min_vers: continue child = klass(master) child_path = 'v{}'.format(version) child_path = unicode2bytes(child_path) self.putChild(child_path, child) if version == latest: self.putChild(b'latest', child) def render(self, request): request.setHeader(b"content-type", JSON_ENCODED) min_vers = self.master.config.www.get('rest_minimum_version', 0) api_versions = dict(('v{}'.format(v), '{}api/v{}'.format(self.base_url, v)) for v in self.version_classes if v > min_vers) data = json.dumps(dict(api_versions=api_versions)) return unicode2bytes(data) JSONRPC_CODES = dict(parse_error=-32700, invalid_request=-32600, method_not_found=-32601, invalid_params=-32602, internal_error=-32603) class V2RootResource(resource.Resource): # For GETs, this API follows http://jsonapi.org. The getter API does not # permit create, update, or delete, so this is limited to reading. # # Data API control methods can be invoked via a POST to the appropriate # URL. These follow http://www.jsonrpc.org/specification, with a few # limitations: # - params as list is not supported # - rpc call batching is not supported # - jsonrpc2 notifications are not supported (you always get an answer) # rather than construct the entire possible hierarchy of Rest resources, # this is marked as a leaf node, and any remaining path items are parsed # during rendering isLeaf = True # enable reconfigResource calls needsReconfig = True @defer.inlineCallbacks def getEndpoint(self, request, method, params): # note that trailing slashes are not allowed request_postpath = tuple(bytes2unicode(p) for p in request.postpath) yield self.master.www.assertUserAllowed(request, request_postpath, method, params) ret = yield self.master.data.getEndpoint(request_postpath) return ret @contextmanager def handleErrors(self, writeError): try: yield except exceptions.InvalidPathError as e: msg = unicode2bytes(e.args[0]) writeError(msg or b"invalid path", errcode=404, jsonrpccode=JSONRPC_CODES['invalid_request']) return except exceptions.InvalidControlException as e: msg = unicode2bytes(str(e)) writeError(msg or b"invalid control action", errcode=501, jsonrpccode=JSONRPC_CODES["method_not_found"]) return except exceptions.InvalidQueryParameter as e: msg = unicode2bytes(e.args[0]) writeError(msg or b"invalid request", errcode=400, jsonrpccode=JSONRPC_CODES["method_not_found"]) return except BadJsonRpc2 as e: msg = unicode2bytes(e.message) writeError(msg, errcode=400, jsonrpccode=e.jsonrpccode) return except Forbidden as e: # There is nothing in jsonrc spec about forbidden error, so pick # invalid request msg = unicode2bytes(e.message) writeError( msg, errcode=403, jsonrpccode=JSONRPC_CODES["invalid_request"]) return except Exception as e: log.err(_why='while handling API request') msg = unicode2bytes(repr(e)) writeError(repr(e), errcode=500, jsonrpccode=JSONRPC_CODES["internal_error"]) return # JSONRPC2 support def decodeJsonRPC2(self, request): # Verify the content-type. Browsers are easily convinced to send # POST data to arbitrary URLs via 'form' elements, but they won't # use the application/json content-type. if ContentTypeParser(request.getHeader(b'content-type')).gettype() != "application/json": raise BadJsonRpc2('Invalid content-type (use application/json)', JSONRPC_CODES["invalid_request"]) try: data = json.loads(bytes2unicode(request.content.read())) except Exception as e: raise BadJsonRpc2("JSON parse error: {}".format( str(e)), JSONRPC_CODES["parse_error"]) from e if isinstance(data, list): raise BadJsonRpc2("JSONRPC batch requests are not supported", JSONRPC_CODES["invalid_request"]) if not isinstance(data, dict): raise BadJsonRpc2("JSONRPC root object must be an object", JSONRPC_CODES["invalid_request"]) def check(name, types, typename): if name not in data: raise BadJsonRpc2("missing key '{}'".format(name), JSONRPC_CODES["invalid_request"]) if not isinstance(data[name], types): raise BadJsonRpc2("'{}' must be {}".format(name, typename), JSONRPC_CODES["invalid_request"]) check("jsonrpc", (str,), "a string") check("method", (str,), "a string") check("id", (str, int, type(None)), "a string, number, or null") check("params", (dict,), "an object") if data['jsonrpc'] != '2.0': raise BadJsonRpc2("only JSONRPC 2.0 is supported", JSONRPC_CODES['invalid_request']) return data["method"], data["id"], data['params'] @defer.inlineCallbacks def renderJsonRpc(self, request): jsonRpcReply = {'jsonrpc': "2.0"} def writeError(msg, errcode=399, jsonrpccode=JSONRPC_CODES["internal_error"]): if isinstance(msg, bytes): msg = bytes2unicode(msg) if self.debug: log.msg("JSONRPC error: {}".format(msg)) request.setResponseCode(errcode) request.setHeader(b'content-type', JSON_ENCODED) if "error" not in jsonRpcReply: # already filled in by caller jsonRpcReply['error'] = dict(code=jsonrpccode, message=msg) data = json.dumps(jsonRpcReply) data = unicode2bytes(data) request.write(data) with self.handleErrors(writeError): method, id, params = self.decodeJsonRPC2(request) jsonRpcReply['id'] = id ep, kwargs = yield self.getEndpoint(request, method, params) userinfos = self.master.www.getUserInfos(request) if 'anonymous' in userinfos and userinfos['anonymous']: owner = "anonymous" else: for field in ('email', 'username', 'full_name'): owner = userinfos.get(field, None) if owner: break params['owner'] = owner result = yield ep.control(method, params, kwargs) jsonRpcReply['result'] = result data = json.dumps(jsonRpcReply, default=toJson, sort_keys=True, separators=(',', ':')) request.setHeader(b'content-type', JSON_ENCODED) if request.method == b"HEAD": request.setHeader(b"content-length", unicode2bytes(str(len(data)))) request.write(b'') else: data = unicode2bytes(data) request.write(data) def decodeResultSpec(self, request, endpoint): args = request.args entityType = endpoint.rtype.entityType return self.master.data.resultspec_from_jsonapi(args, entityType, endpoint.isCollection) def encodeRaw(self, data, request): request.setHeader(b"content-type", unicode2bytes(data['mime-type']) + b'; charset=utf-8') request.setHeader(b"content-disposition", b'attachment; filename=' + unicode2bytes(data['filename'])) request.write(unicode2bytes(data['raw'])) return @defer.inlineCallbacks def renderRest(self, request): def writeError(msg, errcode=404, jsonrpccode=None): if self.debug: log.msg("REST error: {}".format(msg)) request.setResponseCode(errcode) request.setHeader(b'content-type', b'text/plain; charset=utf-8') msg = bytes2unicode(msg) data = json.dumps(dict(error=msg)) data = unicode2bytes(data) request.write(data) with self.handleErrors(writeError): ep, kwargs = yield self.getEndpoint(request, bytes2unicode(request.method), {}) rspec = self.decodeResultSpec(request, ep) data = yield ep.get(rspec, kwargs) if data is None: msg = ("not found while getting from {} with " "arguments {} and {}").format(repr(ep), repr(rspec), str(kwargs)) msg = unicode2bytes(msg) writeError(msg, errcode=404) return if ep.isRaw: self.encodeRaw(data, request) return # post-process any remaining parts of the resultspec data = rspec.apply(data) # annotate the result with some metadata meta = {} if ep.isCollection: offset, total = data.offset, data.total if offset is None: offset = 0 # add total, if known if total is not None: meta['total'] = total # get the real list instance out of the ListResult data = data.data else: data = [data] typeName = ep.rtype.plural data = { typeName: data, 'meta': meta } # set up the content type and formatting options; if the request # accepts text/html or text/plain, the JSON will be rendered in a # readable, multiline format. if b'application/json' in (request.getHeader(b'accept') or b''): compact = True request.setHeader(b"content-type", b'application/json; charset=utf-8') else: compact = False request.setHeader(b"content-type", b'text/plain; charset=utf-8') # set up caching if self.cache_seconds: now = datetime.datetime.utcnow() expires = now + datetime.timedelta(seconds=self.cache_seconds) expiresBytes = unicode2bytes( expires.strftime("%a, %d %b %Y %H:%M:%S GMT")) request.setHeader(b"Expires", expiresBytes) request.setHeader(b"Pragma", b"no-cache") # filter out blanks if necessary and render the data if compact: data = json.dumps(data, default=toJson, sort_keys=True, separators=(',', ':')) else: data = json.dumps(data, default=toJson, sort_keys=True, indent=2) if request.method == b"HEAD": request.setHeader(b"content-length", unicode2bytes(str(len(data)))) else: data = unicode2bytes(data) request.write(data) def reconfigResource(self, new_config): # buildbotURL may contain reverse proxy path, Origin header is just # scheme + host + port buildbotURL = urlparse(unicode2bytes(new_config.buildbotURL)) origin_self = buildbotURL.scheme + b"://" + buildbotURL.netloc # pre-translate the origin entries in the config self.origins = [] for o in new_config.www.get('allowed_origins', [origin_self]): origin = bytes2unicode(o).lower() self.origins.append(re.compile(fnmatch.translate(origin))) # and copy some other flags self.debug = new_config.www.get('debug') self.cache_seconds = new_config.www.get('json_cache_seconds', 0) def render(self, request): def writeError(msg, errcode=400): msg = bytes2unicode(msg) if self.debug: log.msg("HTTP error: {}".format(msg)) request.setResponseCode(errcode) request.setHeader(b'content-type', b'text/plain; charset=utf-8') if request.method == b'POST': # jsonRPC callers want the error message in error.message data = json.dumps(dict(error=dict(message=msg))) data = unicode2bytes(data) request.write(data) else: data = json.dumps(dict(error=msg)) data = unicode2bytes(data) request.write(data) request.finish() return self.asyncRenderHelper(request, self.asyncRender, writeError) @defer.inlineCallbacks def asyncRender(self, request): # Handle CORS, if necessary. origins = self.origins if origins is not None: isPreflight = False reqOrigin = request.getHeader(b'origin') if reqOrigin: err = None reqOrigin = reqOrigin.lower() if not any(o.match(bytes2unicode(reqOrigin)) for o in self.origins): err = b"invalid origin" elif request.method == b'OPTIONS': preflightMethod = request.getHeader( b'access-control-request-method') if preflightMethod not in (b'GET', b'POST', b'HEAD'): err = b'invalid method' isPreflight = True if err: raise Error(400, err) # If it's OK, then let the browser know we checked it out. The # Content-Type header is included here because CORS considers # content types other than form data and text/plain to not be # simple. request.setHeader(b"access-control-allow-origin", reqOrigin) request.setHeader(b"access-control-allow-headers", b"Content-Type") request.setHeader(b"access-control-max-age", b'3600') # if this was a preflight request, we're done if isPreflight: return b"" # based on the method, this is either JSONRPC or REST if request.method == b'POST': res = yield self.renderJsonRpc(request) elif request.method in (b'GET', b'HEAD'): res = yield self.renderRest(request) else: raise Error(400, b"invalid HTTP method") return res RestRootResource.addApiVersion(2, V2RootResource) buildbot-3.4.0/master/buildbot/www/service.py000066400000000000000000000351241413250514000212570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import calendar import datetime import os from binascii import hexlify import jwt from twisted.application import strports from twisted.cred.portal import IRealm from twisted.cred.portal import Portal from twisted.internet import defer from twisted.python import components from twisted.python import log from twisted.python.logfile import LogFile from twisted.web import guard from twisted.web import resource from twisted.web import server from zope.interface import implementer from buildbot.plugins.db import get_plugins from buildbot.util import bytes2unicode from buildbot.util import service from buildbot.util import unicode2bytes from buildbot.www import auth from buildbot.www import avatar from buildbot.www import change_hook from buildbot.www import config as wwwconfig from buildbot.www import graphql from buildbot.www import rest from buildbot.www import sse from buildbot.www import ws # as per: # http://security.stackexchange.com/questions/95972/what-are-requirements-for-hmac-secret-key # we need 128 bit key for HS256 SESSION_SECRET_LENGTH = 128 SESSION_SECRET_ALGORITHM = "HS256" class BuildbotSession(server.Session): # We deviate a bit from the twisted API in order to implement that. # We keep it a subclass of server.Session (to be safe against isinstance), # but we re implement all its API. # But as there is no support in twisted web for clustered session management, this leaves # us with few choice. expDelay = datetime.timedelta(weeks=1) def __init__(self, site, token=None): """ Initialize a session with a unique ID for that session. """ self.site = site assert self.site.session_secret is not None, "site.session_secret is not configured yet!" # Cannot use super() here as it would call server.Session.__init__ # which we explicitly want to override. However, we still want to call # server.Session parent class constructor components.Componentized.__init__(self) if token: self._fromToken(token) else: self._defaultValue() def _defaultValue(self): self.user_info = {"anonymous": True} def _fromToken(self, token): try: decoded = jwt.decode(token, self.site.session_secret, algorithms=[ SESSION_SECRET_ALGORITHM]) except jwt.exceptions.ExpiredSignatureError as e: raise KeyError(str(e)) from e except Exception as e: log.err(e, "while decoding JWT session") raise KeyError(str(e)) from e # might raise KeyError: will be caught by caller, which makes the token invalid self.user_info = decoded['user_info'] def updateSession(self, request): """ Update the cookie after session object was modified @param request: the request object which should get a new cookie """ # we actually need to copy some hardcoded constants from twisted :-( # Make sure we aren't creating a secure session on a non-secure page secure = request.isSecure() if not secure: cookieString = b"TWISTED_SESSION" else: cookieString = b"TWISTED_SECURE_SESSION" cookiename = b"_".join([cookieString] + request.sitepath) request.addCookie(cookiename, self.uid, path=b"/", secure=secure) def expire(self): # caller must still call self.updateSession() to actually expire it self._defaultValue() def notifyOnExpire(self, callback): raise NotImplementedError( "BuildbotSession can't support notify on session expiration") def touch(self): pass @property def uid(self): """uid is now generated automatically according to the claims. This should actually only be used for cookie generation """ exp = datetime.datetime.utcnow() + self.expDelay claims = { 'user_info': self.user_info, # Note that we use JWT standard 'exp' field to implement session expiration # we completely bypass twisted.web session expiration mechanisms 'exp': calendar.timegm(datetime.datetime.timetuple(exp))} return jwt.encode(claims, self.site.session_secret, algorithm=SESSION_SECRET_ALGORITHM) class BuildbotSite(server.Site): """ A custom Site for Buildbot needs. Supports rotating logs, and JWT sessions """ def __init__(self, root, logPath, rotateLength, maxRotatedFiles): super().__init__(root, logPath=logPath) self.rotateLength = rotateLength self.maxRotatedFiles = maxRotatedFiles self.session_secret = None def _openLogFile(self, path): self._nativeize = True return LogFile.fromFullPath( path, rotateLength=self.rotateLength, maxRotatedFiles=self.maxRotatedFiles) def getResourceFor(self, request): request.responseHeaders.removeHeader('Server') return server.Site.getResourceFor(self, request) def setSessionSecret(self, secret): self.session_secret = secret def makeSession(self): """ Generate a new Session instance, but not store it for future reference (because it will be used by another master instance) The session will still be cached by twisted.request """ return BuildbotSession(self) def getSession(self, uid): """ Get a previously generated session. @param uid: Unique ID of the session (a JWT token). @type uid: L{bytes}. @raise: L{KeyError} if the session is not found. """ return BuildbotSession(self, uid) class WWWService(service.ReconfigurableServiceMixin, service.AsyncMultiService): name = 'www' def __init__(self): super().__init__() self.port = None self.port_service = None self.site = None # load the apps early, in case something goes wrong in Python land self.apps = get_plugins('www', None, load_now=True) @property def auth(self): return self.master.config.www['auth'] @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): www = new_config.www self.authz = www.get('authz') if self.authz is not None: self.authz.setMaster(self.master) need_new_site = False if self.site: # if config params have changed, set need_new_site to True. # There are none right now. need_new_site = False else: if www['port']: need_new_site = True if need_new_site: self.setupSite(new_config) if self.site: self.reconfigSite(new_config) yield self.makeSessionSecret() if www['port'] != self.port: if self.port_service: yield self.port_service.disownServiceParent() self.port_service = None self.port = www['port'] if self.port: port = self.port if isinstance(port, int): port = "tcp:%d" % port self.port_service = strports.service(port, self.site) # monkey-patch in some code to get the actual Port object # returned by endpoint.listen(). But only for tests. if port == "tcp:0:interface=127.0.0.1": if hasattr(self.port_service, 'endpoint'): old_listen = self.port_service.endpoint.listen @defer.inlineCallbacks def listen(factory): port = yield old_listen(factory) self._getPort = lambda: port return port self.port_service.endpoint.listen = listen else: # older twisted's just have the port sitting there # as an instance attribute self._getPort = lambda: self.port_service._port yield self.port_service.setServiceParent(self) if not self.port_service: log.msg("No web server configured on this master") yield super().reconfigServiceWithBuildbotConfig(new_config) def getPortnum(self): # for tests, when the configured port is 0 and the kernel selects a # dynamic port. This will fail if the monkeypatch in reconfigService # was not made. return self._getPort().getHost().port def configPlugins(self, root, new_config): known_plugins = set(new_config.www.get('plugins', {})) | set(['base']) for key, plugin in list(new_config.www.get('plugins', {}).items()): log.msg("initializing www plugin %r" % (key,)) if key not in self.apps: raise RuntimeError("could not find plugin {}; is it installed?".format(key)) app = self.apps.get(key) app.setMaster(self.master) app.setConfiguration(plugin) root.putChild(unicode2bytes(key), app.resource) if not app.ui: del new_config.www['plugins'][key] for plugin_name in set(self.apps.names) - known_plugins: log.msg("NOTE: www plugin %r is installed but not " "configured" % (plugin_name,)) def setupSite(self, new_config): self.reconfigurableResources = [] # we're going to need at least the base plugin (buildbot-www) if 'base' not in self.apps: raise RuntimeError("could not find buildbot-www; is it installed?") root = self.apps.get('base').resource self.configPlugins(root, new_config) # / root.putChild(b'', wwwconfig.IndexResource( self.master, self.apps.get('base').static_dir)) # /auth root.putChild(b'auth', auth.AuthRootResource(self.master)) # /avatar root.putChild(b'avatar', avatar.AvatarResource(self.master)) # /api root.putChild(b'api', rest.RestRootResource(self.master)) [graphql] # import is made for side effects # /ws root.putChild(b'ws', ws.WsResource(self.master)) # /sse root.putChild(b'sse', sse.EventResource(self.master)) # /change_hook resource_obj = change_hook.ChangeHookResource(master=self.master) # FIXME: this does not work with reconfig change_hook_auth = new_config.www.get('change_hook_auth') if change_hook_auth is not None: resource_obj = self.setupProtectedResource( resource_obj, change_hook_auth) root.putChild(b"change_hook", resource_obj) self.root = root rotateLength = new_config.www.get( 'logRotateLength') or self.master.log_rotation.rotateLength maxRotatedFiles = new_config.www.get( 'maxRotatedFiles') or self.master.log_rotation.maxRotatedFiles httplog = None if new_config.www['logfileName']: httplog = os.path.abspath( os.path.join(self.master.basedir, new_config.www['logfileName'])) self.site = BuildbotSite(root, logPath=httplog, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) self.site.sessionFactory = None # Make sure site.master is set. It is required for poller change_hook self.site.master = self.master # convert this to a tuple so it can't be appended anymore (in # case some dynamically created resources try to get reconfigs) self.reconfigurableResources = tuple(self.reconfigurableResources) def resourceNeedsReconfigs(self, resource): # flag this resource as needing to know when a reconfig occurs self.reconfigurableResources.append(resource) def reconfigSite(self, new_config): root = self.apps.get('base').resource self.configPlugins(root, new_config) new_config.www['auth'].reconfigAuth(self.master, new_config) cookie_expiration_time = new_config.www.get('cookie_expiration_time') if cookie_expiration_time is not None: BuildbotSession.expDelay = cookie_expiration_time for rsrc in self.reconfigurableResources: rsrc.reconfigResource(new_config) @defer.inlineCallbacks def makeSessionSecret(self): state = self.master.db.state objectid = yield state.getObjectId( "www", "buildbot.www.service.WWWService") def create_session_secret(): # Bootstrap: We need to create a key, that will be shared with other masters # and other runs of this master # we encode that in hex for db storage convenience return bytes2unicode(hexlify(os.urandom(int(SESSION_SECRET_LENGTH / 8)))) session_secret = yield state.atomicCreateState(objectid, "session_secret", create_session_secret) self.site.setSessionSecret(session_secret) def setupProtectedResource(self, resource_obj, checkers): @implementer(IRealm) class SimpleRealm: """ A realm which gives out L{ChangeHookResource} instances for authenticated users. """ def requestAvatar(self, avatarId, mind, *interfaces): if resource.IResource in interfaces: return (resource.IResource, resource_obj, lambda: None) raise NotImplementedError() portal = Portal(SimpleRealm(), checkers) credentialFactory = guard.BasicCredentialFactory('Protected area') wrapper = guard.HTTPAuthSessionWrapper(portal, [credentialFactory]) return wrapper def getUserInfos(self, request): session = request.getSession() return session.user_info def assertUserAllowed(self, request, ep, action, options): user_info = self.getUserInfos(request) return self.authz.assertUserAllowed(ep, action, options, user_info) buildbot-3.4.0/master/buildbot/www/sse.py000066400000000000000000000112011413250514000203770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import uuid from twisted.python import log from twisted.web import resource from twisted.web import server from buildbot.data.exceptions import InvalidPathError from buildbot.util import bytes2unicode from buildbot.util import toJson from buildbot.util import unicode2bytes class Consumer: def __init__(self, request): self.request = request self.qrefs = {} def stopConsuming(self, key=None): if key is not None: self.qrefs[key].stopConsuming() else: for qref in self.qrefs.values(): qref.stopConsuming() self.qrefs = {} def onMessage(self, event, data): request = self.request key = [bytes2unicode(e) for e in event] msg = dict(key=key, message=data) request.write(b"event: " + b"event" + b"\n") request.write( b"data: " + unicode2bytes(json.dumps(msg, default=toJson)) + b"\n") request.write(b"\n") def registerQref(self, path, qref): self.qrefs[path] = qref class EventResource(resource.Resource): isLeaf = True def __init__(self, master): super().__init__() self.master = master self.consumers = {} def decodePath(self, path): for i, p in enumerate(path): if p == b'*': path[i] = None return path def finish(self, request, code, msg): request.setResponseCode(code) request.setHeader(b'content-type', b'text/plain; charset=utf-8') request.write(msg) return def render(self, request): command = b"listen" path = request.postpath if path and path[-1] == b'': path = path[:-1] if path and path[0] in (b"listen", b"add", b"remove"): command = path[0] path = path[1:] if command == b"listen": cid = unicode2bytes(str(uuid.uuid4())) consumer = Consumer(request) elif command in (b"add", b"remove"): if path: cid = path[0] path = path[1:] if cid not in self.consumers: return self.finish(request, 400, b"unknown uuid") consumer = self.consumers[cid] else: return self.finish(request, 400, b"need uuid") pathref = b"/".join(path) path = self.decodePath(path) if command == b"add" or (command == b"listen" and path): options = request.args for k in options: if len(options[k]) == 1: options[k] = options[k][1] try: d = self.master.mq.startConsuming( consumer.onMessage, tuple([bytes2unicode(p) for p in path])) @d.addCallback def register(qref): consumer.registerQref(pathref, qref) d.addErrback(log.err, "while calling startConsuming") except NotImplementedError: return self.finish(request, 404, b"not implemented") except InvalidPathError: return self.finish(request, 404, b"not implemented") elif command == b"remove": try: consumer.stopConsuming(pathref) except KeyError: return self.finish(request, 404, b"consumer is not listening to this event") if command == b"listen": self.consumers[cid] = consumer request.setHeader(b"content-type", b"text/event-stream") request.write(b"") request.write(b"event: handshake\n") request.write(b"data: " + cid + b"\n") request.write(b"\n") d = request.notifyFinish() @d.addBoth def onEndRequest(_): consumer.stopConsuming() del self.consumers[cid] return server.NOT_DONE_YET self.finish(request, 200, b"ok") return None buildbot-3.4.0/master/buildbot/www/ws.py000066400000000000000000000234621413250514000202520ustar00rootroot00000000000000# This file is part of . Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Team Members import hashlib import json from autobahn.twisted.resource import WebSocketResource from autobahn.twisted.websocket import WebSocketServerFactory from autobahn.twisted.websocket import WebSocketServerProtocol from twisted.internet import defer from twisted.python import log from buildbot.util import bytes2unicode from buildbot.util import debounce from buildbot.util import toJson class Subscription: def __init__(self, query, id): self.query = query self.id = id self.last_value_chksum = None class WsProtocol(WebSocketServerProtocol): def __init__(self, master): super().__init__() self.master = master self.qrefs = {} self.debug = self.master.config.www.get("debug", False) self.is_graphql = None self.graphql_subs = {} self.graphql_consumer = None def to_json(self, msg): return json.dumps(msg, default=toJson, separators=(",", ":")).encode() def send_json_message(self, **msg): return self.sendMessage(self.to_json(msg)) def send_error(self, error, code, _id): if self.is_graphql: return self.send_json_message(message=error, type="error", id=_id) return self.send_json_message(error=error, code=code, _id=_id) def onMessage(self, frame, isBinary): """ Parse the incoming request. Can be either a graphql ws: https://github.com/apollographql/subscriptions-transport-ws/blob/master/PROTOCOL.md or legacy "buildbot" protocol (documented in www-server.rst) as they are very similar, we use the same routing method, distinguishing by the presence of _id or type attributes. """ if self.debug: log.msg("FRAME {}".format(frame)) frame = json.loads(bytes2unicode(frame)) _id = frame.get("_id") _type = frame.pop("type", None) if _id is None and _type is None: return self.send_error( error="no '_id' or 'type' in websocket frame", code=400, _id=None ) if _type is not None: cmdmeth = "graphql_cmd_" + _type if self.is_graphql is None: self.is_graphql = True elif not self.is_graphql: return self.send_error( error="using 'type' in websocket frame when" " already started using buildbot protocol", code=400, _id=None, ) else: if self.is_graphql is None: self.is_graphql = False elif self.is_graphql: return self.send_error( error="missing 'type' in websocket frame when" " already started using graphql", code=400, _id=None, ) self.is_graphql = False cmd = frame.pop("cmd", None) if cmd is None: return self.send_error( error="no 'cmd' in websocket frame", code=400, _id=None ) cmdmeth = "cmd_" + cmd meth = getattr(self, cmdmeth, None) if meth is None: return self.send_error( error="no such command type '{}'".format(cmd), code=404, _id=_id ) try: return meth(**frame) except TypeError as e: return self.send_error( error="Invalid method argument '{}'".format(str(e)), code=400, _id=_id ) except Exception as e: log.err(e, "while calling command {}".format(cmdmeth)) return self.send_error( error="Internal Error '{}'".format(str(e)), code=500, _id=_id ) # legacy protocol methods def ack(self, _id): return self.send_json_message(msg="OK", code=200, _id=_id) def parsePath(self, path): path = path.split("/") return tuple([str(p) if p != "*" else None for p in path]) def isPath(self, path): if not isinstance(path, str): return False return True @defer.inlineCallbacks def cmd_startConsuming(self, path, _id): if not self.isPath(path): yield self.send_json_message( error="invalid path format '{}'".format(str(path)), code=400, _id=_id ) return # if it's already subscribed, don't leak a subscription if self.qrefs is not None and path in self.qrefs: yield self.ack(_id=_id) return def callback(key, message): # protocol is deliberately concise in size return self.send_json_message(k="/".join(key), m=message) qref = yield self.master.mq.startConsuming(callback, self.parsePath(path)) # race conditions handling if self.qrefs is None or path in self.qrefs: qref.stopConsuming() # only store and ack if we were not disconnected in between if self.qrefs is not None: self.qrefs[path] = qref self.ack(_id=_id) @defer.inlineCallbacks def cmd_stopConsuming(self, path, _id): if not self.isPath(path): yield self.send_json_message( error="invalid path format '{}'".format(str(path)), code=400, _id=_id ) return # only succeed if path has been started if path in self.qrefs: qref = self.qrefs.pop(path) yield qref.stopConsuming() yield self.ack(_id=_id) return yield self.send_json_message( error="path was not consumed '{}'".format(str(path)), code=400, _id=_id ) def cmd_ping(self, _id): self.send_json_message(msg="pong", code=200, _id=_id) # graphql methods def graphql_cmd_connection_init(self, payload=None, id=None): return self.send_json_message(type="connection_ack") def graphql_got_event(self, key, message): # for now, we just ignore the events # an optimization would be to only re-run queries that # are impacted by the event self.graphql_dispatch_events() @debounce.method(0.1) @defer.inlineCallbacks def graphql_dispatch_events(self): """We got a bunch of events, dispatch them to the subscriptions For now, we just re-run all queries and see if they changed. We use a debouncer to ensure we only do that once a second per connection """ for sub in self.graphql_subs.values(): yield self.graphql_run_query(sub) @defer.inlineCallbacks def graphql_run_query(self, sub): res = yield self.master.graphql.query(sub.query) if res.data is None: # bad query, better not re-run it! self.graphql_cmd_stop(sub.id) errors = None if res.errors: errors = [e.formatted for e in res.errors] data = self.to_json( { "type": "data", "payload": {"data": res.data, "errors": errors}, "id": sub.id, } ) cksum = hashlib.blake2b(data).digest() if cksum != sub.last_value_chksum: sub.last_value_chksum = cksum self.sendMessage(data) @defer.inlineCallbacks def graphql_cmd_start(self, id, payload=None): sub = Subscription(payload.get("query"), id) if not self.graphql_subs: # consume all events! self.graphql_consumer = yield self.master.mq.startConsuming( self.graphql_got_event, (None, None, None) ) self.graphql_subs[id] = sub yield self.graphql_run_query(sub) def graphql_cmd_stop(self, id, payload=None): if id in self.graphql_subs: del self.graphql_subs[id] else: return self.send_error( error="stopping unknown subscription", code=400, _id=id ) if not self.graphql_subs and self.graphql_consumer: self.graphql_consumer.stopConsuming() self.graphql_consumer = None return None def connectionLost(self, reason): if self.debug: log.msg("connection lost", system=self) for qref in self.qrefs.values(): qref.stopConsuming() if self.graphql_consumer: self.graphql_consumer.stopConsuming() self.qrefs = None # to be sure we don't add any more def onConnect(self, request): # we don't mandate graphql-ws subprotocol, but if it is presented # we must acknowledge it if "graphql-ws" in request.protocols: self.is_graphql = True return "graphql-ws" return None class WsProtocolFactory(WebSocketServerFactory): def __init__(self, master): super().__init__() self.master = master pingInterval = self.master.config.www.get("ws_ping_interval", 0) self.setProtocolOptions(webStatus=False, autoPingInterval=pingInterval) def buildProtocol(self, addr): p = WsProtocol(self.master) p.factory = self return p class WsResource(WebSocketResource): def __init__(self, master): super().__init__(WsProtocolFactory(master)) buildbot-3.4.0/master/docker/000077500000000000000000000000001413250514000160575ustar00rootroot00000000000000buildbot-3.4.0/master/docker/README.md000066400000000000000000000012101413250514000173300ustar00rootroot00000000000000Buildbot-Master docker container ================================ [Buildbot](http://buildbot.net) is a continuous integration framework written and configured in python. You can look at the [tutorial](http://docs.buildbot.net/latest/tutorial/docker.html) to learn how to use it. This container is based on alpine linux, and thus very lightweight. Another version based on ubuntu exists if you need more custom environment. The container expects a /var/lib/buildbot volume to store its configuration, and will open port 8010 for web server, and 9989 for worker connection. It is also expecting a postgresql container attached for storing state. buildbot-3.4.0/master/docker/buildbot.tac000066400000000000000000000011551413250514000203560ustar00rootroot00000000000000import os import sys from twisted.application import service from twisted.python.log import FileLogObserver from twisted.python.log import ILogObserver from buildbot.master import BuildMaster basedir = os.environ.get("BUILDBOT_BASEDIR", os.path.abspath(os.path.dirname(__file__))) configfile = 'master.cfg' # note: this line is matched against to check that this is a buildmaster # directory; do not edit it. application = service.Application('buildmaster') application.setComponent(ILogObserver, FileLogObserver(sys.stdout).emit) m = BuildMaster(basedir, configfile, umask=None) m.setServiceParent(application) buildbot-3.4.0/master/docker/start_buildbot.sh000077500000000000000000000036021413250514000214400ustar00rootroot00000000000000#!/bin/bash # startup script for purely stateless master # we download the config from an arbitrary curl accessible tar.gz file (which github can generate for us) export PATH="/buildbot_venv/bin:$PATH" B=$(pwd) if [ -z "$BUILDBOT_CONFIG_URL" ] then if [ ! -f "$B/master.cfg" ] then echo "No master.cfg found nor \$BUILDBOT_CONFIG_URL!" echo "Please provide a master.cfg file in $B or provide a \$BUILDBOT_CONFIG_URL variable via -e" exit 1 fi else BUILDBOT_CONFIG_DIR=${BUILDBOT_CONFIG_DIR:-config} mkdir -p "$B/$BUILDBOT_CONFIG_DIR" # if it ends with .tar.gz then its a tarball, else its directly the file if echo "$BUILDBOT_CONFIG_URL" | grep '.tar.gz$' >/dev/null then until curl -sL "$BUILDBOT_CONFIG_URL" | tar -xz --strip-components=1 --directory="$B/$BUILDBOT_CONFIG_DIR" do echo "Can't download from \$BUILDBOT_CONFIG_URL: $BUILDBOT_CONFIG_URL" sleep 1 done ln -sf "$B/$BUILDBOT_CONFIG_DIR/master.cfg" "$B/master.cfg" if [ -f "$B/$BUILDBOT_CONFIG_DIR/buildbot.tac" ] then ln -sf "$B/$BUILDBOT_CONFIG_DIR/buildbot.tac" "$B/buildbot.tac" fi else until curl -sL "$BUILDBOT_CONFIG_URL" > "$B/master.cfg" do echo "Can't download from $$BUILDBOT_CONFIG_URL: $BUILDBOT_CONFIG_URL" done fi fi # copy the default buildbot.tac if not provided by the config if [ ! -f "$B/buildbot.tac" ] then cp /usr/src/buildbot/buildbot.tac "$B" fi # Fixed buildbot master not start error in docker rm -f "$B/twistd.pid" # wait for db to start by trying to upgrade the master until buildbot upgrade-master "$B" do echo "Can't upgrade master yet. Waiting for database ready?" sleep 1 done # we use exec so that twistd use the pid 1 of the container, and so that signals are properly forwarded exec twistd -ny "$B/buildbot.tac" buildbot-3.4.0/master/docs/000077500000000000000000000000001413250514000155405ustar00rootroot00000000000000buildbot-3.4.0/master/docs/Makefile000066400000000000000000000131561413250514000172060ustar00rootroot00000000000000all: docs.tgz dev: clean html .PHONY: tutorial manual VERSION := $(shell if [ -n "$$VERSION" ]; then echo $$VERSION; else PYTHONPATH=..:$${PYTHONPATH} python -c 'from buildbot import version; print(version)'; fi) # sphinx is fastest on Buildbot docs when using around 8 cores. Anything more does not improve the build speed JOBS := $(shell python -c 'import multiprocessing; print(min(8, multiprocessing.cpu_count()))') TAR_VERSION := $(shell tar --version) TAR_TRANSFORM := $(if $(filter bsdtar,$(TAR_VERSION)),-s /^html/$(VERSION)/,--transform s/^html/$(VERSION)/) docs.tgz: clean html singlehtml sed -e 's!href="index.html#!href="#!g' < _build/singlehtml/index.html > _build/html/full.html tar -C _build $(TAR_TRANSFORM) -zcf $@ html # -- Makefile for Sphinx documentation -- # You can set these variables from the command line. SPHINXOPTS = -q -W -j $(JOBS) SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: conf.py $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: conf.py $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: conf.py $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: conf.py $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: conf.py $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: conf.py $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: conf.py $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/BuildbotTutorial.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/BuildbotTutorial.qhc" devhelp: conf.py $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/BuildbotTutorial" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/BuildbotTutorial" @echo "# devhelp" epub: conf.py $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: conf.py $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: conf.py $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: conf.py $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: conf.py $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: conf.py $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: conf.py $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." spelling: conf.py $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling @echo @echo "Spelling check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/spelling/output.txt." doctest: conf.py $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." buildbot-3.4.0/master/docs/_images/000077500000000000000000000000001413250514000171445ustar00rootroot00000000000000buildbot-3.4.0/master/docs/_images/Makefile000066400000000000000000000016151413250514000206070ustar00rootroot00000000000000 SOURCES = overview_src.svg changes_src.svg workers_src.svg master_src.svg multimaster_src.svg auth_rules_src.svg SVGS = $(patsubst %_src.svg,%.svg,$(SOURCES)) PNGS = $(patsubst %_src.svg,%.png,$(SOURCES)) .PHONY: images-svg images-png images-eps all: $(SVGS) $(PNGS) $(EPSS) images-svg: $(SVGS) images-png: $(PNGS) # Source SVG files contains text labels in "Noto Sans" font which may not be # installed on end user machine. Render these text labels into paths. # "Noto Sans" font can be downloaded from Google Fonts collection. %.svg: %_src.svg cp $< _tmp.svg # Render text as path. inkscape _tmp.svg --verb EditSelectAll --verb=ObjectToPath --verb FileSave --verb FileQuit # Cleanup SVG to reduce its size. scour --enable-comment-stripping --remove-metadata -i _tmp.svg -o $@ rm -f _tmp.svg %.png: %.svg inkscape -b white --export-png $@ $< mogrify -trim +repage $@ clean: rm -f $(PNGS) $(SVGS) buildbot-3.4.0/master/docs/_images/auth_rules.png000066400000000000000000000523551413250514000220370ustar00rootroot00000000000000PNG  IHDR\2gAMA a cHRMz&u0`:pQ<bKGD pHYs+tIME 8$.UmSIDATxw|MWl2DHbv EUG(J-VKŨ=K{Ĉ@#{+ 1/~>m99}>ytT* !%scOP%=uMjyB|$B m)T ڵVHu~)711 ПvmZ ^`[!D6nam#$DgH- O+W6y<66nD0qw8.&KE!O!T*Ӿ7a4 Kb{ӾGB-YS:i"clV i9Bh7ݵ":7^zϋ\⍫[~'qLOZZ;ߴ4Z>Wػ zktT_/8t(/k)Bh B|hيUmk++m7E| ѶuK,_Ul !D>s-N=Kʹkݲ9_Zum7EwvXX!!!$%%i,'$$m7Qg.^ŋ vS rE_\xXm7SnAzXhyiC/nvSК|lwؑZj|ΝԪUAi=r6mJӦM۷#^ϏM2xlLӦM9qℶUٻOxi)oMll,hn !<<\My% c}j)BhE x2]Ǐ$!^˹ק|n"))_DDk@tRWO?R{҆'O݃RRR VZ㭿P\YLMM9w"VkDIIIL2='QQQSxq?~644pR@͚59v|g{CѢEzIII\~###ʕ+իWIOO϶o}}}ʗ/͛7IJJtΜJeWtttř3gԩƲB ѭ[7F;iʕ+3g-[dٲeٶIMMK5߽{7+W|U3QSM❓`9NÇgKq200 $$___ƍDDDsNNjprrzvm9ڲi&oΘ1cy544$..sQzuu2ovSS_>… 8'%Jp;XI\DDD?33&[fϞ>jժŸqhذ!;vLۇܻw… |:VJʊ+WR?YjM4a„WQL2L2%u ѣG޽֭[t;jԨE(RUVח Y~=&&&7oÇԪUK.@zz:zzz|lذS|y.]m-ZW^977&44f͚qq:DÆ _~)O &MZ2d˗q_}tԉǏ^۹]ϢɓϹ3 sMNN&##]]]ttt^) @ jݻ7n˗/1}*UCj8p'OQF>lZn͚5kX~Kkn@=@=\]] ˗/'N0{lO~BCCfĈX~A D pssDEE)9JAZ*{tܙrѢE С/_4lؐ3fÕ+W=z4TTI9رcIHHVϏJ*n:?ŋYjÇGWWƏ>k֬TRӇ?.]sQ7oror+D~"v. ;v,Ǐ?رc̘1CɗU۾};۷oإvYfΜt|޵t Fo}bcc9w^^^nݚ{~nݻ$$$cƌy6ԫWի+RRR޽;~-7ofݦGsRQzu%˱unΝܹs4!C丝fyek׮l޼ӧO3h 6nȜ9sLȚޒu+d愄pilllhРA;&:>_^뾗wo^]JJJbʔ)ࠤ5ڵOҠA̙3W[laʔ)9eHKKח bnnΨQXf ;v`xyyqI=J///VX}j֬ѬY3VZիWiذ!ʍdŊO]v?+7JOOgٸRvmVXAtt4QQQ.\Ç#ڷo\ÇgРAjtԉ ϵ@<} wޔ-[wҷo_.\DuMO DtFzbbbظq#F]@@ҫOcݥK썶UV='O̶MbOT*ڵkǔ)Sذay9~:Tn'---''5jԠ\rJO_Nc&D)8/,X… 9t[OZnHGMݻiddĆ ׽)0] B ǓNYxR:U=3h58;;ckkKdd$wܡL2CbccPP|yN|ۓc(U%JoR###ݭ[HMM9@2娿FFFX[[NLL ƍ@^&uIV_|N7-9%坕q}m/g~68S૯bǎ,ZH#خQ2Y;wJ*7x̜vU,QQQJ RD V^MRr u|riرǏs)4he_ /9ʕc̙‚-Z_A*U*zuvDDFT||]r}vE.]mFIuoS^ʕӠWuonaA~,,X$_4R>|-ƆH"""r MXXgNGzI`:DFFϟ'::m8/^8/G=jWϲҨ£c޿8ׯ_Դ&\MObfvy!g.VfrMMMaǎ9ɍ-={䧟~b 'zzz,\ڵjrb -?#;w N닿? 4P8(DGGhmnݺ_e_*\\\s'NMTS?W`uqĉ<y~[>TD4u@wՕ-[vZ}r~9uK,H!xBVL:5kiucr>(R~~~̚5 yfׯW\v kOr+ŧz3224Rpppq`BX6mJpp}5y7j/cKJJ8ʕ+M?{4R%CkWzlڴINHH`֭@p P<\go=g11PVD`dhLL {쉡!gΜˋAѠA177C@bwww>s|||[-Zׯc9ׯWwk׮|wJW^D)yj… $&&ҤI,--r 7cǎo9ϟ{t=״ vJjj _ݿ_ s۶m}*UдiS @-߿?׮]SJeN r\ؚ4iWiС>!266VPڷoOǎfݺuSzg̘'G8pSSl%}||޽;{бcGT*+W|xuTTy_gqN7wQN7NNNq"^^^QF4 6TROrw6'۷o~oFɽ"w3˗/Ύ`Xb\Rgmm%Tz֭J½{صkv9gbŨWO>eݺu>ժU˖wₙ9sKKKV\IѢE~:'Ňٳgg0&pa_nvYW]$""I&~w݉`ǎ\pe*NNNaxxxCBBBI?rsu嘲N ͛7)S g֨{cһwoĉн{wI|\]]f׮]:tsss̙-ԩSJr+矣æM4UB+n y{{n:%͢N:۷O?x ZښΝ;5֩ Ɔ+*OAWmF{dQ9r$vz=]pvvĄ׼̍d ώ!xq ߾ܹs)WYfCr7nɓ'tO<ʊJ*iV˗ԩSc5RHuF˖-5~a8pƨ+WRЎ#G2t88s JWڵ9yRQTR9(N8lwIzzƤ1?WмysLMHGo^= .L`` **r9K.u>vrrbgbggDž HHH`ڵAM++˹uקaÆt֍+W2dtBrr2Rn]_ڴiCx)gߵ={h<9s^zC>}߿i|'-x68@قӋ&7}~CC@#}Ws>}dUH]Ͼǣfddk)׭aaaxܾeVhP)o%-[F~022bݛӧOsi 1b:… Yly]?-[65kFڵ9qcǎRJ^:[ZR^زe n֭[>ǍGhh({eҥ4y/B~2TZmwsBرիW+=ٞaiiF0 jʱ溩) 鄕jS#І'-Oltuu̶/ssၻ2SPKKKjժK #%K^zO\٧[ڠLzpqqTI}N߅8D֭epmGN4!<rhQ{{g[KFشi!!!̜9Scr OΝqrrbJ^!>$l !D>Կ7*㕘Ȧ0_n[X1|||u6mQFcʔ)8::ҹsgZextutuȐ VhGF/l2-[I6T*t_s¡Uv-*T`}:G)'uj5tԉ}6666oLƍ)WfRCf9s}BhЉVn׉b+3;w(*:%V4S2O߱cGކhZ,׽x"X|ڤKL7ʮ}jG$U&OΆWZJNNחe˖ql=FFFnݚ[ny> BRTGcyJ7ʊ];ӦUKo~x'/׽kkkIzh)~^:)/#.>a#G1oڷm;"00%K+UU244$%%… Oɒ%IIIa\ +ŻKeJt13vDu!6lHNؼy3Mb3wh3c : ܜ;oC-r!6=]]%ړ?TR53gBFFFa&,c> Џ^,;r_-]Fvm:dPY _|"Fm5ի 9}4 ow|2cؗpƔf!苣o{d*BC1cT֩& ޒ`[hͱc b̘1#N8Aƍ]p .h ԮU7| ڇ=ܡ=%JhiB疿 8IY\LL,!kl HOOĉ8ph?z֭KKKG> BLL>tݻvfͺ?8-FFEDsY9Jkӊmm&Am.\pBtj~W%o[]799Y_&pܹz [[1!sQv?6`fjJre(joO`jbאӧONXx8AA7Z*э^^ғ-K`[h:_Zj 8PY^fM~WHLLD>|HJJ ܻwGGGm1-}FfJ+]ΝN!..DT (18+J5']00xwH-B]vm*UҒ'OpIptt$44K2tPnJhh( `[-100Z*TZEMBJPf=rH %۷WKoB!gҳ-޹舣#uɶm۶DEE #aѢEܽ{%Kҷo_[-WWW/,UۧB!9Jv#B!ކ.=gUvSD>s4!B!I#B!HH-B SNڵ+CB!B| \2vvvܻw9s0zhm7+_`[!#gggݻ9wSL`ӦMjlą WeIIIɶܿ__LL]$B!G뇍 *K.)>Lڵi֬۷Ã뼽$ ~MƿJۇu-B`ccCTTЯ_?iӦ ]ŋSpa WZ2e3Cwl !B#)))ܿbŊb i׮-zՋ_~>}]v_ۇސ4!B|"==Yfnnn:t=z(6j;;;"""v횶m!B܃U$%%ajj… '55;wi:::TXQ @TTkJzB!򉌌 .]Jݺux * }}}5/TYi$fϞMʕ\2K,!kB!"EG~ؾ}NGGJQy uu2VXVZѪU+ʖ-C: B! oooزe XXXCzz:qqq?yKKd`[!"pppO?%55UVK:;;pe[J˗P`[!"ӧk֬!66vZecǎF"BԫWs56l۷/;w8::~zYXlfe_puueРA>d`[!"ٳ'ƍcҥݛ%J|rFΝ;A8Aٳgsŋx">Q=;T!#ѥwjnV$%%2[$@jj*aaa-ZeWHٲe_DD)))tttpppѣGk366FۧA+:±~ҳ-B*P%Jȶ  ZoLr]WP!. B!-`[!BDm!B! B!xK$B!-`[!BDJ|kl'OӵsG*/h߸Ap]B#11QO8cLLLptpDqʖ)%Mm/14kڄŋk9"balO,-,ݜRx){p{(Q8ŊRkkk@OOOM ==IIp[Fp]YÓMSٵ:::n EtM, ZnȇNt=ܵݔt~7LBXX:'''j֭֬[Z*ݻgv-W {*:P8xnn$Bȱs,7l177go>r4UV'u~SSS_:u*XWxqƍG>}PTTR 6(G e+H)>n%BW11L7"vvnHc!L[6է{OMMeL:[nistt믿fРAk8,XS'k|Lr"7'<=ܨS">2kӽ,|gddyf*VH^4… 3sL_ód)OhFm!ȇΝӧWOm7E|xsU~D[DDDc(Zh+><[m^ROu&4jmT(n|>}Zc_~%cƌ)ćJm!QF3ŤIprrRߟgf{#=z4dĈDEE)뭬;w. :4vݛӧ~;v *Uh5jS~ѡJ*L0Zj0tP_>|ejc)55=3u<&===HȬ;F.]pwwgo帷lݻ4iŊ{#U&K@GG'6iii$$$Ӈ˪o߾?Otߋ4IS5+ ؿ?ƍɓۙ Cm!^gΟ?ÇŅ"E(?*U `ӧO)U...ܾ}+W`gg15kfVxXYY)nɒ%sm[jj*gϞE__4jK֖ Ο?OnP㧟~ɉ~ҥKkԋ7󘙚Ps_GΝs\w֭"vɒ%RF FFbb"Km?1cPn]6mڤSΕq.9˖/g?8~zSSSKF+'/i֬YTX[nѲeK"""ظq# /]v^i&>}J*UUR3^Jѡ^z,_I&QfMVX¶]tj׮͉'cРA9n۫W/G\\]t,_y@5߱N:9#55lIOOWP.TF{HOODf 6[ccc .cbcctIIITPE*∉Q~ht$"""055`ܽ{7466.wԙTJ500L2˞MSz_}ZlO8ݻwڵ+}E(zƍU,X0|p5;*Tq)S2Ó'O`_~L8mۦ@f 9s&֭`ѢEMٳgر#ڵcԭlxRٳhiWzm…ٿJ'$$R8|0_~ƺT?^͋C嘞\W^ĉXJ3 x }ٳg7|MFcƌz8;;sUJzz:{ʊ+VtR۷/-Zƍ,_ŋ?fks|"Q@ٳǏyfhٲeKqDGGG۶mqppȶ}@@2Πv'O'E!##;wRX1U?Õ+W(S ͛7Wnվ}(TM6U(P%KrI5rO:E\\:u*[ؿ?wؘuSwr-ؾ}; O>Q_v{Jrhܸ/gaccCӦM}۷˗/ckkK6m47nڵk4jԈx֭[3-[|xqGb=fٲe4iGG7~ `[נ,?dÆ ߴlْƍdddp)tuu\2xAI b9 66M6=w'9sٓ һwo+ nݺݛ={gYl 4'y}_~O>*Tz|r-- k zUڵkԩǏg2kӽ{(\0˟}o?̾}Xt)П~Gbnnd4oޜ`e}NźuXf /fܸq|wʲ rV\IfHOOgԭ[2eh˗/sq>3e?իg #""O%%_iܸ1t҅ 8p UT2o Gc?ҥK+W0tP峘?>GQt1`zΜ9ؐ;w2{l6nȘ1cs;v|`ۡXQСT·l'{&##ݻwgttthѢEm~7oShtؑU7#FPWcʖ-9@IKKc"M4А?C"~)ժU#55渍Bȑ#=M4ʊ0n޼m޼9* 6.޽GGG|}}5GGGcfjmz7iĈߟ%K`kkKDD~ɒ%/駟r_MY~=k&66V#`ҥ(P9sh" ,ȪU8p {imƼy4-[gVJcΜ9Kf(l~C011qmڴaѢE>|́_5,]cرcIOOg^CҼfKҽ{w.\w`Æ ۷ѣT'ɓ'M۶m5z sc^B|g[Ծ}{RSSIJJBGGѣGSZl۩{)lذ'i&6mđ#GXv-*UzvV 8m ޽{s~~~\v ###J|ȕ+WhҤIj?#F_~ҥKcgg077UVʶ4nܘ &(m{Vʕ1c9y*Ձ)SpuuՕm۶qQ"##2LY|ǸREDm!^III$%%_0|9s&͚5S~‚ӢE [޽;ǎVB0yFFFJ c עE2gΜW..T/_V֥Э[7o` [۷y[l2{ۘexG}@OOVZ~z:FjH$&:>ʾܹsxyyѺukz~ #WNJJܲmrڌz ܹqxyylgMqrrH"J+O077ؘOѣx"yU%G:q]v }}}*W,sss#((˗/kmcl|Im!^Қ5k>ח &d2xfIƍ\27&** 60pnJRիWzjق.](*V܊%/bjjX<]RQ^=+Ν;;!OV$bcc144dȐ!vgoN˗/JJNVίBWWWUƹs?/^y_vvv4k֌kJ}6j߹s@4Cfieeܼ[]]]t¼yزeK_uo=| 6L2*OINNI~6/Aٲe9x ۷#Y+WGEE)5 ,H%Xz5f+W__N9jFFFH7Ӳ%#qYې`ymΫ y۽{رc.]%Jlܹ!C'ժUۤԁziFFSVDlA^ħC#wޜ;w_~=zVϰUA+WN*ÿKΝCRi^oY` {T*G(SQ7hY'̌ŋs]Ν; XKKKSsp$7]taڴiomBVL:5kX!C`kkˏ?TL!66.Oyט:'11Qcz:d4#333>}:S &ŋdɒ?~e0anf͚Ejj*sΥ[nڵ+Okԯ_cǢ˴iӲ x̀011ח^z1zhFDΝ;j1$иq\ڵ+}]OJ{קCr4J:>ѣJo T@ܻwƺelY%ZM\>}##\*3OWs{_w74s,O4--퍜޻Sb6L!^Yx1ϟԩST\/jCsssců4x(44T SOOO.]_sǏT*w{dSvڥ4k +++l߳7 ...TN+Wdȑ$|DSjh?xѣGkLBUn4cQ+WPv͗~mTTƲUVe<ڵ#003fč7=' |4lPٺuk&ۛ+V~zLMM155eerʴk.ۤGYАj֬200t1eׯό3&::ZٮXb#f̘.>>>tؑY&.]N:t1շo_ϣG([,nСCo|jUI-D:u J1ΎK.R@yݺu9sGOo&66{{\?ٓv=7^ &0j(%|Ť@flE)Sg[nmmMbŸzFϴ}I(Ǜ֯_n޼Irr2KVf.ܹu/ԩS?~ƾ6lg{WXArr+i&Ow,\JMNN6Ћ*gQ,3 byzMBBBI\\\!88X/ {1wB#8MKKDFF'`ggܘ9;;%׬Yĉӧ&99O?4PT+yj999aaaJҥK@ \WHHO4l|ImM N_|Mۧ"W`{„ ,=5'n"VI!gڶiŞTfNבhۺ"H-Luwرwߙϱ{~ Rw&Jm!ȇͦ?">RO- WJ|Mm!ȇedžMG,]+l7\4mb*!>VRO!_[Ooj׬KnWWW\]] ^ ⟽{Y7m7Em!ȧJ/C5GbccOHy 9|(%BulwjL5'ɘxL~U*ұ};m7GBM8CCCf4m7G|Ҙ=w>L4^!Bs̟3ظ8~?$m7I|`6c6Xt0lV%tEXtdate:create2021-09-07T15:56:36+03:00;t%tEXtdate:modify2021-09-07T15:56:36+03:00JqtEXtSoftwarewww.inkscape.org<IENDB`buildbot-3.4.0/master/docs/_images/auth_rules.svg000066400000000000000000001061001413250514000220360ustar00rootroot00000000000000 buildbot-3.4.0/master/docs/_images/auth_rules_src.svg000066400000000000000000010647231413250514000227240ustar00rootroot00000000000000 image/svg+xmlProcessAny processing function.Auxiliary OperationOffline operation. REST API endpoint Role RoleMatcher EndpointMatcher User Authenticated User Auth buildbot-3.4.0/master/docs/_images/badges-badgeio.png000066400000000000000000000067411413250514000224770ustar00rootroot00000000000000PNG  IHDRTױiCCPICC profile(}=H@_Sb+vqPu *U(BP+`r4iHR\ׂUg]\AIEJ_Rhq?{ܽZiV8鶙tfU B@}EPf1'I _.ʳڟsY>xMAv \f>I6л \\75e ٔ]OS3  ty5q pp){ͻZ{L4rf bKGD pHYstpN-tIME *ake IDAThPTW?khqED0RjDG] TS 5.C%U&8F !n6U;effD7d0&# em_w?:IV{9w}V,˲A+-9t̓DkPt{Y=meYVYYUUU#~4ˍEe PȻ%g~͛#~Юᑗ:B mU[nM;t^fP9['ݝbY`AH4 @{K|OiLQQQzILLR~zƏG)[ݻw믳m6 6lmţؼ' :}t6meee 0>ϟ?ϫzGZ]ܹsg|:|~z{1xG@A2Ct UiŔ)S8vXH𤔤l2jkk rQRRB}}=)))<3lذ4z)Μ9CLL qqqs7:gΝ a޽v}ڵTTT_RXXHmm-NxN>{+Ν;Zl¾}"55\ l&-B4bccQݎFJɔ)S9s&۶m --͛7ģc 222P]]Mo#D ЄQ}cW**ǚ5;^!w. !Mv'v%c4lBŏAKP Vn2/q 7j)X 6~z7Ɉa&vNəWj%)kyhģLj7INNfС={`=JG$%%K/Djj*yyy0a$$$/r~a233y뭷HKKc,[n7tQTT@YY4C 0 ^/6l7ޠիWSXXfu'$33hò,L a/e1NVZEss3YYY;-҂8xb? @#m4$UKq#X#0{<Τ]\a 2U徘iLwgE"f0c# U|5m@@DH2VhDCN&>t֬Y8pMӨbڵ )%躎墦Eddd0sa&.FR2aFow8=37u7pǏZcccihh>z*{aڵFv{}~,bҥ/u|\.***-!ܧ^x[h9ױK?:ב&L:_aV5 Lt} iZ|VSɆ^H㯩j ~u֯y9go]>D)^CMc֬Y|>.\9s& !RiZi!k^6-ÔMu{hg4Ci,l6q$$$CPU>cV\0[ŷy]Ci|HS01w~ƌ9ddlUiU4 i M%N!?7o7N®O7{1~.d 273Sζ.>$ԩScѢE,^ŋf̙gG3f sRJf̘ACCmmm]c#GpB4 I';3hO.]Dbb"RJbccIII edd0~xy+g{?36~iq]h8ia#gcI'+x"ؤ4-jL f%UWxtb6#Q C˜$iqر4Eo;|7|"a8=!|U'EH{'>C.4r[,嚯sXB`b3l<ˉ+V\7ߥh>O?̿+E(8P?aq҇a/~LXY{phXK')dPŲ,+0}wvp8!W\!222x4tPf͚5(ӉirׯzQU5l[SS5qDFFwԁ<RuDEE( Ġ*iv6lz vS9n'** Mӂk@˝1]QU͆eݎ/}!S,[%6bHc%9 6T y#hK&v 8cyQ`P:J>0SŵFQYϫYl/RnwulKʝH ciՂ>BNi:Kfݞn[HB)}3 >ߎ?~JN'w}+LK- Is{|=ۯ]n#h Ni-Ż֠vv9@yHÃD^3]q yyyx^>#n- [h#v/ozPZϱCIENDB`buildbot-3.4.0/master/docs/_images/bitbucket-status-push.png000066400000000000000000000217301413250514000241270ustar00rootroot00000000000000PNG  IHDR@> zTXtRaw profile type exifxڭkv sY_yNvCHl7'3&Q?o,*ʹF^מ'_N>z̥±luL H`Oz1`'gNIr=LB?־Nuq}L~όBmTi *9J|~kRmuupR{&>1~-з"S-]#N~A)/Ew۽絺QQO08)yy?y{ޝROTnH54M9b'79/kVZyQ*Vz@nJrKz۟djQEtQ& ;FE4kiNea[f5N(3x؄z#}6]˩AFWl\L*-* pT[woddi%ZyjH3ͭ@4g0Whr}pfKI!0x7:i :ecH18khr8"Y3m:].9ms4ۓe::Ex>1j?-n+p8Fq{I2*f (755yhY^\KA!LARGe9R|]*m23F v3rץ{.HET*ET,(n{D_"E`wii(^kbmh=V=h Cd8A(՝U(6Z] +L쏮HNw3GX,m wٕJN\XR@ ɝY/]Ä6|y%R^O@xFTl 1kD IN $ G[̧ [0jinv(Zd-GQGYi+& PmZ?igD9*liPoI.չB܏wh1 ۱kL@ӾC]&QP\ĉ@[)=YtLf:4&D0#[HI s=UTt~6NJF8G hJS6*Bu.ma"dRY2`gT b,#3AKm̢(<3Ȥ"CµfgojZYZ6[mz-ZYeCC9$8E**t3C9R>>{(!!B!$I !BB!IZ!BIB!$-BIB!$i!BHB!$I !BB!$I !Bqs$Y/ѼVf༡2q)ż*A>/#[+ϑSh'1KoLXjU?d|rC{))\~wb_l}_'wd骕'1.7whR^7Q=.4Ң3>Ka5.nR+#1F\-k-ykxyFb G NO I-R)#1F\-?`cF§I #_<KioљN.<2;2߾juxSlO, SOK/}&7Wc,JM*úy26;k-23Ğ*bPG?`ul6:n@&ڦN!zZ"s B1c$I_M*=NaDMO 7`2wZV<Iew-EgXw(/&(< Wa6X/ңEo507+1?fҽ -BbIz^3;OLUt08a{eDs3ql5{]v7!%3tUSG FۘVUgmIЩIUK1{ցt tc6e0=Gqt'DsBb^ğ>Ax*1Fb$+0[3iS!>Ue^3>f>eH׆!:ǁc狯JPB _JGx7D0(f6.Z:zT>H/atO{ j(5SJxu$՘ϐ>CJEgZăxK#I`*4 h &2cž赊lV`4^QWq!;k`D/t5s$#wt!FJsb'U䗟sKsCfY-۟ެ*SWK9[!Hr bIֽT?:r!3H|;R2k\ퟭn"3[0}k,2Rz'N#1mImAG\Ix`5u}:] C@o=}2nպ JM,1wg' j(]VޛmmcYIc$\UUuce^3_ήxObܷ1b3M?Ț[jؙs7ȶ>1(01U xýn9ht " QIVFqsbEIb϶$ƈ"ILm$}zU 'Ejup + znqxS`,mIUI;_; SoPeBCʾP| POt1Jujk_rfZ!*ßU}"?{&d%A׵zwwwlY2>6WK8_70MM5'S-!n$=jIDp媷% :=TZ뒠mKEC R~#Zea"ozBTjo*e9 -!nȖ^Z(Z6YPܓFZQQ~ay]vnm!nL"7mq]&!]5<}V-)ßB>J!$ 0k3 !>?2:@qs'Kvϻ]ݍ1A/_z3>Ery9_Y\H˲:ԙ ޟ"Vo^c;+v.x/QUH[c²[91!$ iY,]||}<7^vh|1擹+/(BQ"3(9̪ ?bX `ܨ{PM>ƴfMG{u2Ӑ˲5[91EQ% њѾM cى9 ##/(bGTXҩjE6rO7ʍ vKquJ=}"V~Oiy9-2ep6na_1@tD XN ޞy~w~rgT$s4&1f.VnYڷ>IM)-+ϖp$!`.ZG>ݸ.%xٵa*oa出 hҟ '#+>YH.:WlƳOh2*[ځ<` 3ku h4aK ?.Ύ|շK[™dwE"[8GNv.~t mCLl nK$Yxq:VֺNQq)GnwͬNŹ 7jN0p"<(--md6g=|-#fuՕ#b4Pf*GWg}Öݼ4y >Mc#QSYG^}vdcX\xª{h|1ޘBYKֳq 7Nҹc;,Wr$|j]k9/Neљ'hb2;bGNGUٰe7Ypsq&_PĿgίu kqT*^ MxXjπ~=ӳs9vq.e֋QZV;3Хc0ZoVlbjGD:+S*3?_FstЖ2(zdelşK{DxͲou]?@X֤edW@v^>#iA>~>^dfU6nSظ5.ڊ}}<;y:B>l ?Z 9ySX\B( zna4AQ 9y$LWθ8ǣ`Ҳnϵ v6x7Qgͤ^@Ւ_HZ71m<1^;Å3\:AQޞ W[yY+w{eU}a[5*Nd;bRQpz&]_^:8L8zBP@s~2yX,Tz *QQ( :TEb6뻷oWVVnLj߬QHTz婏pD~;"F/L@|`sv׷5bq7Ǔݶ9#).)eGɂJS凘kFZY_- m,_ UU##jLҦ:i2[(Uq^\X005`b6̹jm{D-a>>ώbu[{(k+J^_#>[exra~ygw<@Ll~޸JZUU T puqbz܊?&9q*k]UU<tUUQUغ3M}pws/'ORTl7Au:ť]7툉G'G=17_ț/M$<-CJЗZJdp$x ^M8;XmVlٱ؃Dv'=3?]Lф^O>K@Ȩw(Yv?N##!,5|~v@PiҤe޼yL<'BN[-Ud'Nȼyظq#6mvr޴i0x/7nT/SNlŏSR>QbEK/(==]vѮ];֯_ϢEW^dgg3~x0ZnMǎQT>HJJ~`ݺu\xH8p ={~`l6bbbhٲ%&..z ݒ$rQJxi;vয়~b߾}deeFV1b+{Ǣ[|96lPt:VJ׮]౫Ț5kXf 111K^M0LO- +-RJ|t _ڶm \|ҥ {.F 0Ǘ~@Ɨʕ+s)r{r*TP6Enݺٳg#G0o޼|W^XVfϞɓiѢ3f 44Tw1ϕ+WOf͚5Ӈɓ's^piNb۶mL< ЪU+~WZ泹iڴB2e |$i/^$>>/^z)ؽ{w3n8ƌر߱rYJ/C6m|AI^z?8gϞG^wX,B/G\zU~iV^]$Hqݍ/uԑKFir*(]FEE,ӿx rD> / \Ro;w8֯_ٺu+ƍSR?>v-[sN6l@||<&L`Ĉ Fhc:uϩW>(ׯ 3uT9}ԩv;l0vɲeعs'͚5jףF߿N:KCK.U\|rxȐ!T.s|Yoz_XbK/DWՎ;׌3.*U\Fu, s̑=zT>~0b/z?:cccXv([,&LzBBB/:V\ŋyH?ѣGeɓ'ػw/O\~]>J*.]"--M>w7Kpp0/rRx'iӦ _Ν+i__ 9-U4i|G AjŔ˗.###뮻4h|yffvi&k׮^\lnٶmuRE$RRRO~riϯaÆp@ ()|qŊ_~՝r {Q'-Eir*((Tݳgׯ租~bذa̚5K{ꩧ\(>;\]ƍ+qOQ sgɒ%-xݻ:,Ӽysn+`r-ZI^πns 7ݻeС5xZxl|rZ~s#Tp-niԂ_+MVSO1||Z򗒛`J'@nlo,'|ᙞIJJ *Lu1zh}]"""ne~Lw/TZUN_x1ڵcΜ9P EMAYޓ49+" 9N0A>tJ:?υf|Yz2Fшh"!GȩRE7oQFrNj~ΨQݻ7=ޮ];ّ3k@@@uy~zΝKӦM弴4Ǝ}qnͦIr)H{2sLڴi#<";(^f7_>^bF (I[m|yKvv.1B#Tp#Rׂ Fȑ#.{RF :wH*p8b/@~ȫArTPŋ?駟6lO?o¢.a>4<ۛK *tR^z=o<}F_>$Ow1bĈ_|ņ5DDD(O@/b|GȩF-;{уer.^ȢE8p`4hz-!3"##i׮>,ʕ=!{j*R{^hx'۷/O?4[l`ڵ:tui+LkgҶm[:u徿TXVӿ"}.Vp*Ɨŋ_sa_, f5 Ɨ BNo !r[|dZdڴiE[O||<7ndĉN$&&DGG3sL=q6?g .yMӦMygxt"F5 %VT>u;+Ɨ;!7SAAm*qar_y[nU|qٰa|ܰakFGe˖y~_7/}ٸq|^/?~ay k֬Y aN`ذarÇnݺr_e۶mX,2b|rZp mSd#""ի|=ܭVZ}׮]UfժUqV[ZlÚ5kd?!7ϟWj9(t~AɡT;{[Mڵl19-8BN嶆zGM6gϞ[~ϼn &LPLSWNSL3y)^_ў+Qݻ.]bܸq85xrP.]PBVo;ByǗwyG1̝;W&t:{(Ɨ&BN SAAl&M2Гe˖Q~}ſ/}ѳgҾ}{ @.]x饗rV8pիӮ];C=vtʕ+k;ĉ 9/:1c3gΤu 4-Z)Z{@uu]gϞU|} wM6U/Y|/y+Ɨ:0`.>=M/SLu/pB7_!Cȩ yxݸX,nƟ%::^a6ټy8\!RƏzɓ'鍠 &M$3f [n`/ja%$$qF^/ȍZ=NYlm_;y|1Ll޼Çe:t /=_  !Pڷo/{ơss}|rknǎTXӶm|TZM6|׬[ .G͚50`rn6mfb֭$%%Bxx8m۶^PvҦM Zn-#p2qD:uČ38pDEEѲeK|I9F'+Wgnz!o.'$$_yz Ł+nd߾}߽Ɨr]s|\2Pm۶L2KZ0`>M/[9{"ƗGȩ![QҜ &99dms/=?+3 g.BNSo IMpp5üt޹uz5w @ B6N;ABSYq7R8qWzJJJ7L>E2-/HMMW_-n8ض@S!(q2% ۶Tm:q4Ҁ^$sz ߤ ؏BꊻPdo8yC. F^7׺x 5C+`vqw*Uw%!!A(uWcmOc|SԝUp?1AEroXy/֣2%uZlf]n%}mZēhk Nwyta?Oa_nG@g(nZp.⹹\mwhqFJeާB"N'9&0b!Y*,ȕ$[fu-i,ٲ;a[?6[ibxb+֥S1< 5kJGVP6-Fz"BnzG ljG7<͋؄VK4 ;`۸}']0qx҅Sw™z}!'8O]A$a7fA۰|Al~GHFS N!?$a߲-Cu~u*. ߆*$ }GPŸ|ZtǺ[dw$`K!YLwH:ۆж{@qt7Ѷ7U*l8k͋W6eAs:]c]Ψj8wz@ c߱ Ǒto?깻R9݆}_е)=3 Pku:Y* }!b\HI/@!˾AJpTe*b[: )#mǁhzo@`۸]ޠNM6hϹJ$%Neߢi9od@2ea[5gf*_.gIl}'uՆhj<{yge{"\ºd*6QGbX] $w#e$#04]k|q&F̳Ǻ39\Kj da穃dǿېғq LS."{s[~x̟S0M~) Γp\rϟW3m]N[}3a.cp&]mOGbYNa=gy&UϹ8/4I[*۟q^:mZ82w"2]}%9̝(,s'g*W?GuNdʳ0FgOa|{_0}<s8q51McztW~󗣑.ıo0O{m`YgJVo嗏q'=þ~2w"Γ o_.Ey 8oL#弗^d=?p^8'0|Ar>#'c?ҊEN7FZdU- j>xhvr;m+0  hvaQX T[YWNGG#u+R,cd$Hdlc'`ߵMryGv hVA:{/7,f ,?M@J*9sQex WPFy=폙h߅k3n%kU؍39)/m~Oq"ES)Oqp3=P&tT ;V řz da]`| 0d=ƅbB[=¾y /EW5Gq`=!MR55.[YJX m>v+-kvߒ~OQr)|]6V=<h <~mط0ȷúl6}{DI¶t]c!9:c]G綱Le 68%qU1i۰UT,jh9$FU xb۲ UD9 tE8GJ.ſKutKb*"^񙧊v-{׹VGz(45=Rku/C~i-\(4[PiucR/+m MC"ie8[V0MӁ#({sm㻊ox!|dK9*,huH)PQE*UCq<}tlj}HYirV]B"n}zE9Mv!w.R997/AcշQ'@ntާaGtͺ8[$qˬ1Up#芾9'ϙ^>3$vJu^:mԵs>4۹UJ; 3yVK叫 @]1|SPW*yG:8O<{Mya[9ǹ8WZӱ~\&V5JvRFDحHYI2W!+#JCCWYm_߈ت]~tRZRnn?;)=ɫg՜ϣ4 nZP q:o u8CþeKq=:8wp&q(*߫Tׇ:J9~ Ta* ҙ5u9՞+7HF׺'^h׳fB@+-c&$y #Ur.ƟǾukCJORȬ.l)g\Os^>m8<LJ')vuRfkyrmPsƩt9P׹FJQ:OqƗ+\g8X,#TNq"].+NX-PiccMgF*h)Tq!Y8Y/|{Y6$9c9VG`x=B;*簣sa]= ]JPlAS1sʶi 8C1w`̦*ݳwÎ}2^T<,RV*?+*3\gZJS~s=YE֙"O8sXpl*`Y)t&UApLQ܇,WG|uP*g*FҮ81]Mk;Tlj&{òyR/e+KU+>9}ɹ7µt!`Ӥ Rfj`L<鑗暖 ];~$%"eNVu:WWmj5{s.p34QApl[:}q8=)}c)Ozlj.|B]Mut>tOё:72Sny C6`™r g**tM 5@ođql{#8Ǚս^yPٕArq=r_Gs֘Hr% îW@] Se7kʫ =%bj*Ķq!;p&%Ěr H*+^ 5׭aGl+zLۉ*8}Agútuq\FסnCL폙`Tgm^hk]@g4u,`cƼ1M}]X}\U9qM:tmza[3uH)q?x* F;V2Y+fTQX7.DW 뢯SZwEa/-KQy+y1}둮&L<ʐ3(Id u\Me^[~* DVsͻccr#^Fשp6Ùᝮ+!X/@1e=X: u]1FA]*?Eѵۊ}F{q#ƟCWomtVDU7.qx' 7u\Mط.CB@+{\Tj5GEK|Ey ;<뒯QEEJ8^VgmXiW0Mm\jCDuH5ߣ qx'RR"(0}1ѵ].6uٷ׌(mr{֢.[nbX-B]|qJIm˞h[tK/PWv-S1X1 *󵪊(*3:]aX~鳧e낮C?t2o!4s*דԱ5dU*rP yaј&sƧ?ȏDo rYG{b\~K5<]AxQN5=m_g+?wA #hwC~}|`et] #p߀SǨT#*_GueP `7_ȻW]sU:Tu e$zm>R:&)/}O47')8 h̝Uc"{Y.@ (REqƴk׎Uh_ >@ n";qD͛ƍٴikזMd*Z Ai}dbŊ|嗲lzz:v]v_E{wM^f^aaanݚ;櫔$%%?n:.^hz 8={{}?~l6111lْ[ojnIXruRJ<;vO?o> UV1ʕ+{ձ|r6lؠHtTZ]RB9}͚5Y^z%&L@ff&E@ ")u%(( Ξ=+9ry{mtt4zj2{le&OL-1ccǎѿ\H?}4k֬O>L<Nȟ3gCN;u۶mc,XVZ믿զ(j5MӦM)SS/^$>>/^z)ؽ{w3n8ƌرc˪T*Pt:Ĵi2Kѽ@ n{2eǷµ`ǎˊT "+:&MTK*;qFEEQZ5ZHƍxWFңGB=ҥKJltt4M6ØfFM|||u sg}Ư]6M4\۲e/YD>ѣ111E*)@ J]FEE,ӿx rD> / \Rv;w8֯_ٺu+ƍSz|>|v;-[dΝlذx&L#0 1B:uS^=}Q֯_/M:UN:u*v]6;wl2vIfZ|Q8|0SNr 0tPxҥ/_.2ʅ@ r[K.*WXH 楗^|˺ce1c\wߥRJ 7j(ڶm baΜ9r٣G<zFe͏pW.Ss=66VN\ne2a!$$/Bcʕ\xT=zTv `v+M6]vwʕEWvm۶]*U!$I"%%p@O.v'6쎟zӟO ;[ٳO?1l0f͚%=SE<}W!W1ٵkָqcE<[7zpY"ݛ5DFFK,m8޽[>С2͛7=1?v.עE$Iv+ 0<@ pK^ijzÇ{j-n}*xź&}c5M LRRR[ѣGq+.EsϘ;v쐧#"" WNfعs'Ǐgrz}˟W 0^ӧO7߼fKRn]ſ\5k.kʚ.ˍ`O>OKKjժrŋi׮s̹~iggyO8}4WW^{e 6l|PXt:z^%,##Ծg{SvRB.]uׯ~;+@PҸ-;{уer.^ȢE8p`4hz-ܡ"##i׮>,ʕ=7 jUly F'o߾x`YU߿H \ @ (YV6mZ) dzqF&NPbAXs'򱿿~̜9SqwڢijϟY… qgvӴiSy~a0}|Mx)w2,A-:t Ço}[lݺUfÆ qÆ ! -[YLoO,qFؽl~<ò+ǬY~@ ۦFDDЫW/s[IZ l6vZ2V[ju:[alÚ5kdY7ϟ8jys7HKKC |#\ @ ([>|i&s׭a„ uVC ƌüyX,rZ||\PDd(*w.O_tqp8/ʛt҅ *\N{~+'p!\ @ (}VEI& s'˖-~_|Ec~gϞ} 0.]K/=p?<իW]vӇ=zȊmڵi+W_;v'ht:ƌ#Ϝ9֭[3h Zh!hZ{y]w+Z 4r7DxG˗+vrcXHJJRh{9Llfeڷo}^o9y$<ĤIdKޘ1cغu|MFF'O_~)T ,!!7 ?nj5>|>eʔB睎p-tPڷo/{cus}|r٪c*VHi۶mVT`VqҦMOk֭[Dž f͚ 0{-6mfb֭$%%Bxx8m۶^PզM Zn-#pMuO8N:1c 8@zz:QQQlْ'|RIʕqz۷ 6oj%2[A@PP5jZ3{yA@P$'09~͛]Wbh4-}}EAPP9KzzzAq"TPZoJJJJq7A^$w-k@ #+'@ J"+-T*yu9r@ AS`E6cPƑ0<2=|g>TqNIq|/0"sD#^^T2oѵU;-Gwa? C׺]8aW CބbZ; ,@ 9ׂ #Dq7ؖLi?!e~.Zo->q->@ kAQL@۰ [=o5NQ* v+Ӌi@ cܠEVfT*̔|%f~^ ˤc~%)"HqGq|/{kVۆеJná-Hپ+vՖc{pޙ۶qaWvgD.cRR"XH)8l)3)3awiꕜ’%%_ qVans; %_@2e9QGn8}ńYu b@P!a,s>K67uܘ΄d>%Oy+Ց| /+y/0}Ktwcg5/Fr:6tw?q9׬:M`k+Fl+~@rur^Iqd'{t7)/"̨#uL{L}fATw7bZfVNFmH;6}{++Ķ' @CeHW8_}R3Ӱo_ߋ4a߷ޥ{}&j0}LۉZo q]ѢM1 | ]ط.+| i }L;M7(@ J7doY+TAaX~0wuH߾P6/% vH|I2s<ں ~oviN,3ƣm֍8 ۟s]IHv+Lkt$4j4:a] @g@ۤ q^:&UZ/ \\ԱmYf3?t>8u p:1<: ^uh[t@wy ~!3[q+*󉉜KH$\JrJ2Fle]/l2TTJt7I8u4{xFf4?F!;+AzF:k@Ÿ8ԯGڵsG Q+Trlf tG-[2,Hɨr~H2ndR ȫ=+\[sʆDcTÜs,rQ8/F׮u2Re:27_]2δ+HvHYyn8oTsɣmyTkPb]J,95^S+rj*HIPϭOU ɾ(W"UעLNVpڴjI&B!†Mٺm*+ѸQCvW_~lf3&˯\a4 jS`i԰;u$"<U djH^͍ /YU@W>Ȫ=rJ;7NB0w9Hi9u3鼢n|6p-nʗԭt9+7-%6o20䬀oQlk@(>2xw، 5ySW,];i4j؀Y3~si/^`v-FlNI`@fP%efkZ$6m>G{ӰAhN[tjߞ#8v,][0 7dU@۳w#&zAfM1s (ܘy(qٵSy\ FE?f*g=9VkYK[ncTZMr(>o|<\RXbM>IMMeU̚3ݻv/;9{g"8(UpP<ߠ%i4m˙BV-[wWJb@p'pC)XtFTN_:버y h %ύ:vʙp 4*ǥy ]ljiU ɒ=>N$]>rҥ3C\1Pgy訨B?G$nHo_gPFa۸m ξ}E}ohuRtv1bo\aGt=~1 f3ԡXN#2QWm:*۪h8}d|޶q!(Ɗ xVmhܕEzsGC ??#)Kl{NLk׭'@߶~8ɤϿ?reRR5-Vl `@o%vؐr*]:ӥs"A CDnHIk=dE2d~+_{cEdV{#W͊^лEU-ob2/еqG>IHltvATw?oGIr|BQ누Jg@{]]XEAo7g[ޙxS8ϟ@S)&wބw25Ym.6-"zym9yPQFuJ%'C8^ݡ[PlYdK~}9|lR8N1 hu:tZ-&IaRT z*UH]f-;3~g0G%1s Q5jZ3{®Yq 52]־ S'UxTSQ3qb?r1pf8}utEj֕z}#+7\Jy,ꊵQiHlVnq+bxZ.<bm8ƣTn"?3*}qPiumR86g.S+mzy(*@Ţ4)));S7rݍf3G d d8򏫜; ݊TAahbk1&~OAۺ7RF2+T'+{]BIrr C훋Dh/]˯ItTիQJǓx^'[bcc{)N9ot%v X?aZeKlRr2|1=4q* J:[e)_ʔR8c6[\"6zj7jr˲uȑVCܑmڽw²b>zPVBmD+EV/L?8l =%w΢Pd=Eb˗Cv/*%vGP^Fx<_W[fc/8~d-7&FS\T1s /!YRgQi67CAYEmmެ)<9BST>CxxJfe_3_nq7W0t wu\h2BBܱGp!f54eW߯o+MPzQ.[uLkVPL&3,coڄD^xY_"BZiZ7kJ`@~%5$*2[Ǔh԰QuHo5h RϿgӏ-_+,"ҹ[ϞhҸիU|b"^ݺ4oڄ'}wuj8s:GV C5X' 1ܱ7_qtԯWN h !:**G^7{ʫXFp#iX\koͤO>M/׫壏'q9y㵱|$uey-W,_M_^%IB*v J$x>%p7b\|p7^%22GeRLzh~dܖXOl}%v_Ė/ E7%,>'J"}LyGeP 1W!G|FԮUKluAFFFw9sPݖWSOp}חW???!}7Lnb\ƽF/}-[$.9;-*&0݃d(EPXb6D>:ll  ++K6%dԈGdm^%PBz>JZM>-G'{7yo"ڵ-* 93*J{`{K{4Aan=a2o]y_.*D*炒IF,]8!`Xظy =. *@T@HHgDW=GV:*u@nbi56qf#9hAy33UvZڭ->-ޅMPrHNI!#3:k$q۷ך2}sfIcNV=h 9߾F76`\t` 1nd3K 5%#=eQz.gf~M;cڵ֫!}Fb %jR5jT/.b@ p!>u%5׽=PT.&k++*+r#=s;qjPuN ə-_9NjxQMʯpMrwNܘ8=>'zFsý9ғ~.Qĺ- ś7neWp}}9!BzĚm̈ds8? o䗰_>'_:K~Y[WΗ|o sbLu\ G~;VCfJ\MW _3|ĺ-'NR8}k Uw*|o_D\χ́@p-VC iF$tY/`;ﶣ6qQx\ƙa`>F)*Ӂ-7*shLܷ[8ٿ eſI'kcڷ߽M F"W/KlPLzT*-=vC|x'}2u 57e5\~?{e4n% mCG=hr ?2I?'!5B01](;~.Ve\6 NO$1e[3A.b$_Xhc*xu2{>afj1-vƞ|Q.RF 8ҒЖSԥ\Ӂ-Yu5(gzKA_+X)~ *__K.Rv-ógԞt mBEgV:[>TLFcYl4e+WEQgj6)sTVx-7CѠh ŕ@Pd%tECgV:jcj'"mo#Irton`:c'ḟ#9H6+#1lZPXb0ٰq3:G EbZ1=8l>ϖx Nr-Gc[ 9X )-k2>9 kq$ ˱}k5+sDEEʖ6+K-!=vuj9V~pq8Yŕk5s(fʼo\v\*N߅6eDo k@ȪzEVPb_>WqZ)HsCr}N5!Uc햲E|` ~ zե+[cݍ.&N0-[b[!!!hZ1[bQV u`(4ŭrKV鍨tzW5pfa:zmb͘wP]vU))ܖXS=]pFBJ7b@ P"F4A@$yJލ&,> S{nZα4N _IQ$ZK5آt1Y~#IX!bʬi1s#RWpܫ[ufʫd1!٬ V\e;]3z]wpkf&rlݱsukբN:  @J"+(T*HKK긶ޱƫ6,~20Tk@Z1G0FIv?^υ3i"#ݵRQ : H[biBm I'W̐M[< MhkC?bRWP(˴špӾ7Z,,V+۷$( Q#FN+b@ P"YAARѹSGE!a#cȹg:rf\lLNGOqzH=ΌhAڊD=!*Nzqf__bMTY1q>lUly٧1 ́@Dđ(ڷmޤ{hZ܄_x.L6,'? ѷfR~ zcWE:=rQiuo\erj+YmEJg =C@B6<'=\eRW?C͵tIļ 7^LLĖ2"" wR7m b9MpA`ސ3 Y-ļ4Ejyy*фF3v{>5\&Ե`1IH-AZcRҸM[>W,X,Xebyuza9Wb 0}*A}O3,Wzm+9_M$z$2-,3x2jFWAPIb剠y6=cÇwSnKۢ%L˕ J)=]>R!{`oj²u̔io3+sc`=yc8Y5?`9s; @^ʼ=݇aOɾ07$h|ّ-kԑgǹgX1Q~ q&QuIs>&uT,8o"yGoCg+SI޸;w,Z 6+/(A3~ YnFe|'O1yT>Ĉ]^OXa۵)ßϩӧyqB} &*2HȽj5M3Mh$ڈ2uJoD_*QON$Tjt1]hb MhHD6$chcWA`jX>*q\&y7 : FLEFm \/&MxX}InaHl6o. FՊ9|b">ebb{~&m+іEs.&{Z]*(:́@B(lf{04iܨ#Gɤ/h⻄cmc5o2$~<?ȡmzRgnޟ0V+b  J4vϾ1xЀ5-\v_UH&8qS~ãÇR^nW\eǟгGwtT#b@_G(R_kױ毵+U*ֶ\p/|Mfxomj?%<,+-T;l̚;ӧ3OU]#( J i|Ï8N>4eoBdf̬LF>(1%̹s LAAA<@ܖfgXx iѬiqw!fU"+(uOLd_1M͚6TdU8zp U*W..3gΞeނѱC;ڶiM@8v˟j׽=hִIq?#fE"+(dddjjvGhh(ڴQTfyV.]LݺգaaȂje-lٶ BiԠ5jT'8(:q$#v7j]: b@_B(;TlBW剌 << /2͘fSHJND6m+Wz ύC%AP`$I3ǎ)`4#))tf AG`@J_.kD/ n1s / Yn\B yQrVEV@XX+[ `25L~ BVv6f BtL ZnEd"+ '%%dl6B: ==](ۊ9i x~4w"*ʕ* c(fȖ`l1ޯHŔX(w庄һa۱Cмr"ZuXįaܝQ./c+aZKfM8aQ&_۬جn@p]"[1ĉ(҂C'@*!d\Ⱥ}T#RNK_ !y~Q(_=Xs|¦-\BX%9D;\LQ ^ UjW"rr, $cZ9-Y[ >" YOpp0䀀ʒ7O >N/e^x^Y!҂XU0'yP^f3ϐ$or0dBBB~Kele*҂:\+l6&WGJ,_}dn[GŊk[u"[_EZQڵ iMRNJtߔVCֶ9''?4W}5߸lS޷ʩ)b6Q>:d>o%33AZAζzɩ(n*cRSy1{FWr}d*a Y,7lN@ Xd؀#3p'}WEH[IriAmܥ\ƺ>_ԩ;=*V/&e,EZH׾=0TW~]d qFpl r(I|'b+ Et:9s&v]N_|9{&$$~ ]LP92Ҽ93ұ]8wr䫘R¿lϓ{"PV9\$ڔjf>]>} I2+@_!aRSQ.RP⍔S6B}R^xRSSgL2GPX4^hBkWʩtz4Ako) VSOGAWlu;->Z~·5GPC|Srj8n;ثr?yK>/GtUj$:upa*Tc0 >5hЀ^@p3HD8rQerqIV NEqZGe0*)D9) vԁhB8vU9)nG6JqhOIžǧHINԁWT]Jz"MSHiã1TH;E/қ)+22*įvC$Ib„ $&&Gr*קVSB*Wǯnce;ϟr<^1VSN._pJY., i$lsj4^ά 驮2~ѡQ}OlV$ E]t1:wM(B-aXN!sV-ZP܏#hݪ%OӉf"55pԩ= )SwהZq"S`YG vm۶q222 N:lN % VdLgb|Gz ݊&,ChovSdmC+RdrS}KiiiWմiӆ6m[Gjj*7ot:Wf#>>^f0 222{dddo~HHʕ8=hZ)[u.]ѣG 5jtkN~@\\K.%::w[{xW}.mܘlGvc9CFEv?z4-.sN; u&8_CS382R jBFUQsa0v Ih+ҝXɉle\CϷ=͊$]L5-¶_ID6蘿|l,&27.)Q&E…  00zJɓ1͊e.](SSS޽bcc8p Gh4*6lȑ#ɏ￟)SЧOz:v믿NZ|rAddd0vX-[jTTtؑ^{ io_mrsyT%:nB>thσ;DSw޽[>裏J+W0m4OtTZx!Cx}3vXziР{/^v⫯9ZnOѣGc:J*q]wѤI|駟rg[L2]vMTT7fhB.ObX #aIX~ oL%{jtu0ɳ?||M4Nsi˧k-ԫhoԁ>#6ۅd_65h+IvB.' P!?)..p:FX !=GLsw\}ue* ÑJGT4cGUN 33W%s2qmGz2?̿#9\_T7+WY3n3 ~Y[?~ļu4Ƀ\(mZCHG0iկ_ř3G85Dz> Id ͻmgH_Ȯ[N>֭!!w@4i~ϼ .裏2}t՗IXr% , ,,Hjfn˩Q?)bT>L:uʎ17zKĺux駋Aq%S]>vo_E]4~tرמ8q￟W*wŮ]Xx1gVlKnXXl̙{^bE/&MbL4 F?PSRR8z(+} `$D|P ~Ra>9 /C?ۥjC_I nXSԓu?Е -WqXE<:38rOLG֖I[G_޴3_Ӝ(Z!gP(E6]kPiEZNxi82R.u*#,$=yb;pTԜӔ##E.;Cܰث=fD= wÑzEY@eCZrIw;$]8 Z#ɗ]>gyswJ6l"zuߛL7???;ƬYZdffg1uTEL@@@YYy^͛\[oQzu>IOOw^Ph~pT< m&իedd0tPY5hтt Ν;;v… ˨jY ̋VU(׹X'ϟOݺue I>N'۷oZpy^xY-S qqq9r4Z׬ /+.9,̇|(WkcK8="G76+Z~e%VW0Ƀ.{ÉkՔ};Fp^7oPH6+&+ڈk7ǑvGZVqSiˠ2X7rfPT.O~bBYGv]E^*-FKEtv>aPRвCQdO:EJJŸt/yrРA|Iu۶mԩ \tEl o|zjq֭cݺutr8̀d+Arx]۷˃xN,-[}:ubԨQ9st/!C|T"[z&##Cܴi_;gN> @߈`ʕ9ɲex}Z9z(VÇ{i&$IܹO7-Z`sժUƣ>*߂ ٣Gʊyڵe[z)+Vdr-U;~~~HիٵkW֭+1rX؝,N玑~!͵X%*ڼ\.ߤ3ޟ: by:^vCV#r'ɃmrtmCJl%*LYJg\Q {w~,uz]S1V4b^{9ғeCu&QӤqj`5ʹѝ\&(!(ʪB0eRLj5f4Zb^ԮiMKpO}|UT:=CrWYOtQ(|:ïA[?vsjtYYAAH H6+NK1quJ'c\n)ϖ!ŋsۥRQj]g6Wy:F]9-w-u_Hm\4|O+C.][nZ$W\YRTRO%B>g<=vC>Yf|e%\~w_sI̜9uz4hO?Dj@a 瞓O:U#{t^e=Cu??J{Q,ܼ,&~&W{U?.ڨXʾ8tz+@Pw^75W6ؒo,#\~ƚMF(,:wÍW:5sĵen?)kH% ¸\8Bꖜʶ rR}ł[iGֶFP\*[͇wa=cz.!StxG}SE111>-ewu|k׮|.7mTk VLBaÆB4_EY)3gpFDȗ;*|*!?m۸Ǎ笀NΝ7M xq03erÖ-[E&99YQZ0ktzq5e*KkU(')㼺LE؞|tXgGtTz&*oE6 V;iMه{enXL֎Յs-0nleVPp(j OiFV/YrhOS"˹bE+cq $4nXBw ^Obo?A>TFpNbb"˗nFHJJի {t#q)ጌ p-Zj4nXqٲe Փ{N.Xnݺݑsg/!tЬ-|E7ՍlgOutS8z(^ɾh߾:S, Vؼݒ$)^իGƍ9u;۳gO9X|9O? ˥MH$f@FanE%٣\{O=ίa{*  6y{*>5QRd;[mIDATUQ4A8pd"YLrPkx}#*z ud΀:0gf*++vÙNQlܸ1:N~f˖- +Hn "###y}ZEg`0:ϸyPP~~~L&N'/^,"[\9T*$aHLLrX,||9=+>|Xv[\24i҄ ].yyvb|wOHHP}@@QQQ׽Vp;{y^^VQilա+_Q_6"7Huupb '=*^7FcW$ rCzT8 m%50߲X,8I_5fJRr2"X^~F*s>v4g0چ_k/s߰9 _x~ggg:kaaaiո*p.] ^w}OE!olè(͛9׼i-q+P4|<3l6$Ib…_L0A1e>%B>sY|x* kj DN#((H̻ٵkWdNh5W^wފt_rA=y}lî]֭O</W$)-W{ fA]ɊIcHY0Ӿd˟=s侈?y'i\W{[ӄ|$KFs۷;6_F@ca/̢gL[iESjov,668yZʕ+k&O/#[ӽ{ŪUh0w ⴴ4{| \|TYYYrWS&JEffK֭++;ev]\L&EExϒ9s;qg4g7}_Cz??@!졗]uXL}ÙǛqGNzEݯ^\ȒʥFr{8'ȥzezXSMX@>Z#7qwYϩ]r, t.l`>w{spf+Ip}Uv8C Cʼ/p'q*o &X;ϵ.$r(whK[-=ViH>WECcy/~0yDTfPZt%3/44@m ŗ?U6mn8lgҥ$%%믳xb:tH&>>DW`Ο?VzH+̲e˘={6|l۷ϕv*1aeGq%s1 ӓ=0݂3ІlUat9$$$(ꃧĭ*_rV. &MϏǏߒ'|sJiՄQ^4hWΓ: $]^;[VSy,)PaOb-DWõkN j\=yф6y|T+ژ8du_8Mb|@-t'mXN تw})Gkzck8ufEYm 4aՔp*|u%h#a\{'ővӾMVPzA-Gvߤ6]J~[ z0j (*o۶@aUnb.$[ʴZ?b4yw:w+WR>غuZGyVZѻwop8\ѣGIKȧϼdnr4kwjժjeKoX΋UAwTr]ΏnݺMF:5wb0Gc>oݺ]vC[̙=y$ v+Tף/9 ^fzB5# hý}U-}N/g=+hB"k]*LY+υ*QO}HXѮ:R.F_nژ8*~XNDѢ_HwwIFïA;YGEwcp GZ9aGQ]J+TlPu_?LcaP 8!n]nYFJZj0k,9IqđԩSGUO>׽3fl~}$j4ɓc}nPX.  Ο?O*UzZ=t?ȋ`GߥݻwTdK7\T B| i mLaJ6*nQïA|Z &IJ\hgk-,'j&\ Gu+ڵŹw)QlϞ=eEY]^zZw^/Kٺu&MxkA:օt*۔7gѬ,yn-==*|⹂I#SgNRnϲ{GVd.]ooD 72wAɆ Zj]ӗ ?ndR0a) +;r_X:JEvdEIaw]8ҮrzhEXυ[mMʯtݛxő-9"O_ȑ#^,|*(_~<oqTqM:N/bEnǼ… ̘1C>۷o>˗eXXN۷io&[nmٲEsuǓԭGp w Mʨ7H׊%)Au,E2~xʕ+ՋAѨQ#9r˗/>[n<,YIXj#Gۻﰦ>߄$LY Tܫ*.=ꬳjkvپںֺ7ںj]-"[dȆHHy>s=z$OBBVs RSSYd ?\zUówѣGZJXjղmۻw/-憍 ǎu1K$l[75j Xj.\{Ԯ]2epInO$ǖ͛7gԑ.sC(9C.&ʧ(Y4rCj| {+VdȐ!-gh[ϟCCCLxիW_̙)g]Vrl 3 ΝMx^Wsf}2>Ï?ĉsݽ] WQ>D-:PE>ܹsϯJ2e-l޼۷o;5k$!!Ao>}PR*Yf㖖,[,Dn~,iܐH$Y7nRT(Q"J\Ftlxڈoη~-ztnvIjfI:ƨYLΊ-;"wz҃\L8J*1o<ó|e"”)SH$Y1+Vȷ~K֭sڵk3j( P _2-[Y&[2d,3W^E\p!˄DB޽o=o3Q>E44+++oΘ1ctF`˖-xxxcjRj[/u|ffYNkf7qkU02 4vBaނHϟ>}B?ck1g2DMv^DS$G GڰY oހ T*uqqq)SJ*ѬYgC?{s# UVQF٦OOOϲػ)/]zٳgckkKJիWlijnnS3-SL[I+bkkKrr2DDDSJ(Aƍ{xn:ɓXK9<_a 66yF^YD7cǎuţH>}ʕcELL 6664i҄ &diٝ0aK7wذaC›G͈@3@V(r +OE)?Q-]ѤZ#,iS7/$wmbAL\1t,CUY 뒘OYҴH? ܴ LrG/m iǷ -[둔*-zDv="5$DA dA7٫ n&& IW̥zꊶZˤZE@DrGQtP(%@Vgz"Ax PzF肼yWC}9CCR\%yx4OQ "Ń!){'g;vl i;뒘O]IaIKFlF}a.fD kdΜv@V\1? BŅ0;{gN8LmUYh⢐&\/h 9t]@Vu UhG.4ƾWq#Yc~q'M&:ԑڔB޶/^d-{$ 5@I|׿P,#FT E޼+]MQ)I?eT}4\ v)#YDz*ikƕ]m2$1-$~Hު7˧$>ô78R#CaOx@hkkiAxsCR.CȬ BF|SHlruϷw*hXgC(ك,6-@:$'w*"Cj2ǟFR-TI#uH&\ W* R1?dYRdݰXMuMaRq.G]L|2C?``RJϻFժA9~30TZgMr!*5a>Owܒ}AE_NxHVz~JlKk;<嵳B(/~(&cQ?D 95R@oFZ_PYcℎ+,VeQhRؗAM!kY#gh4H-;^D:.<@Ԯ4M%kU]C&.dr`Rkt'ݵJ$zWXUX|uDHEw_h'ILw kReDvE7':.&IJvq**Wڠ?>ѫ 2w4Ir6ؔBbSй.^R2+(cNۤjL6x!C&Tȡ8T02} B;3#-[O7|"ꐫhB%{+iW~< LzHZ2wiUJP)XX0 ڄ"NFƶk?,dZ] `R:}Q^QHTĤ^k$293i{Nt-Rk\ô,dP?-R #oiKo)QDT$%0ѤضĊ d{]" [Ib]ynYKCM LͳIi}2CEƽM r ٫p~kAAH- 䛷ng/=;y$R2tAg"!߼m;{ -v_@0 C"52mG\NSrL+H(W,gΞMoѮ5BaѨBot-["7w9ʗ/hԙ3QG!?X^KF&ܺ ⥁,pW^Y)?o]csXŗjG(*/ςܔL;{e^PG!?X^K&:؅Qܿй.r,X… ?> ,0t Q -<(BQ"Ycɾ+L⺳P4te#2Q#BɼK(j^+fzo*9` >t߇ r(W\ҩRxvq'U=(oN("52{TafQ?#=1U⳷>2"H5٫p@tYA!^K  r zL,tO*̬3t"r!h2.+(w=5`CgHT(JD P,=ٽGJ C"52Ovo@L[cY\|x+`߬\cʟHWӉN#3bOF&Nݎ afqKgFsfSDN/t7D[x B$*56TAκM?uX-FFRc4 K+xvvvF?x ?# ?b$f ʝPr*%EAA(D +  I"AA$  EdAA"I  B$YAAH  P$@VAA(D +  I"AA$  EdAA"I  B$YAAH  P$@VAA(D +  I"AA$  EdAA"I  B$YAAH  P$@VAA(D +  I2Cg@2 ‘ߏ|7ԴݻwRg/0~TC?(rgPPJevLʕ Ȃ JQPj [D3vrQ\9ϩ~[0ݓ'O?[3| &MT*:A($FcLؼu;;3l 0/<,RLtL eP$!wݻT\ʕ+=Dρ_D +\ {NL7BjM[# oDGRፈ9&&R doM[7# (>![wq6Lh, Ed sE qMnOJh4jr9*TRD"EVcccM5WWjլ_PB?`oYB!*aeŤ c [.T$G%%'caao׋/ C)5lQ#)U+ύyAW8r88W@-(]ЯI( ޾G*Pw^g,tA8ÑǘEVvիQz5ع{/Nur2c $9%zo߾3rjaaAbR_P@V(j5Ndd3OdW(䕕cFIl,֬Ά^=YoRC nE B^@V(_ޅ c o=[C Zv*jTJ2y+֬e.֦UK>1?@~%B!VPZ5ϛSAfu< 2E B^@V(RB?`^/^WeJŋBVvޭ^ E B^@V(2k7],,b>2tvR+P{ /\N!D + )l޶E뚳̌/xmnŬ⦨WЂCQ/W"ld:6?YXɬe"PC PN!D +?n]8::2aӹbE7k~XF3g{N'N憳3ڵ?_%>>SR%֭˔)SšUC D9JSQ d|ڴiܹscǎqF7nlzKIHLzzzΝ;Yl73FDD0n8ٿ?#Fx}6m*||eV\޽{s\]T sh(YrU}&֑ÿ3rP$I{{{`ڶm3////_Jb„ 6 C2zhڶmKBBzb\x~3336l̙3)Q7ndÆ 2uTJ,ɒ%Ktꫯ_>qcFs!̿u;9s&nnnY>KMM̙3P~}d2?`qpp@Rq5W)7nܠjժbkkKժUIJJٳѬY3r9JK.hhҤ ̙3TJFh4QF R OӦMo{QdIBCCy1mڴ!-- ???W F4 2@6l[JϏÇӪU,$==/"iذ=ݼy۷occcC2er|*3W ˷эYҤI,[r2/+W"VRRQQԭS;4KѣgĈ ><]*P!(/D +ѣK <OOOʗ/ѣu׭[G޽u]iΝcڵt׳eРA$$$b .]̙33bܜ~e{==ݻvɗwѻwo.]=Ç?$-- 777|}}y^yͭ[D%X~=2 ___ mIqҼysNJzR Çg帺0|pvɀ1bU(SvmΟ?kr 'ǏgԨQۗݻsjԨhϞ=Kٳ'۶mcݛyQB N#ڶmˤI/ؽ{7k׮Z/n\@SQN Qnb`]Iƍ^fƌ,\)SЮ];bbbt͚5ӥ{w #""&--/Ҿ},iT*!!!R>kN}nݾob̜9yѧObcc8{,[ kkk8sLZTԩ.8]ӦMQ()S*UpMرctЁ 6xrʔ)CDDVZ!Jؾ-ZPretWN H$kNe>$u"]%J^zX[[ӳgOϞ=rO''':ҭ9s...̚5 J%R<--MYZZZc[d2zKRuƹs8wW\O>% ÇYf?~ݻwoq16mڤ۪U+6l@@@K.qƺ1x=rH0b+N8?u|.\/rBA GyhtR:wkܹsx{{s]+'OeI&ҥ $ {# b޽DGGs18u7oaÆ ikkKPP899AXX#1kƲenݺŁر#-MYd tޝgrA9|0iiiXXXP( ۘYq@QNE9}U>b(I%cݹsYj;w/dɨjf͚E߾};w.tWWW6n6cmmMr9r$ AAA̙309+WD0d6oLY[HrIII]v|vƍ'0rHd2/z=???vʜ9sx葮\rXXX߽{w2dVVV[ b /^'|B&Mhٲ%lܸ{aooϒ%Khڴ)2}t9p۷Eߟ#GZ}:;;If͚ ԩnG1ydiٲ%}ժUcҥ,\OҬY3}R)~!s%88yeyϹ@M>wwwd2Y T^ܜM6?+P͛7gݺu/@ [vmerui?!P8c9߮PJK ƥ|f@B+TJLff'&&LMMqtteh|B˖-ٱ~z|}}Yd ƍ-av_\.gƌd2j֬ٳl"FJ 8Iڅ(ǤFB&"lXڼ*4 wI*99aC K9դ&2 o|)TJ F\Y'B:}ҥyޱڵkA`` xxx`ffFpp07oӧvEooo߯;Ϟ=x{{Cll,~~~ 62O>LiüdWNCѲ2o~׽+QZ#~RHUuwF*6mm۶K.=7ߓ'OӧcƌѝKǎ&lذ;@PP;vK.|YvH΋(ZPiY$NkK8ʿXc9`֩N?x`[~gn4M6eرҲeKƎ>>>;^zuΜ9h zn]YЮ#9p@f͚+tyE-ؕ /ig6[dͻVOlEr5ϔޘTo2Ч@[q@ЂVNMrLG-&{oű B^@V0JU*s\1c .dʔ)kN#Mv|||pssI&<077\z5j2˩2O._zin^c>F2S[8ɍ4ehIZ2$m^IIz0ʀ{"O$NmMhHZ<-HI8)?!6;O4S[Q)Ii 8 +~{"uVy4WΠ9ե dcj߾}9Le@ZrXN!D +%T5s^"0k,`ԨQ9e˖4o___qZM-Ϗ-[iii,^yѫW' 3#W@1d.&.X|uFM9 e>FhP]?HKA' aZmZ 驤X0>?aR g5s'uv"RAF$u\?یŒ&>V-1q`:Co /~[Tw!-^]q@ЂbVNTϑvb)=QR`8SA+ F뽶m:ՕEqQJeo޼IXXk֬a޽\|Y7. y9sgwJ",,iӦ|ҮM+CBԏ$RGg"kՍZsT6 % uvw͑  #k YΨnӻ&!ըkclPqf 5Ab9K>Tm&5 5P^?GA)(cZYQ/gOD=V ]r*y%Yh8W$8._<\T*vիF.siϝ;ǻロ庫W棏>'d]AU*~h㑘>߯S H־S5l3Y@J 67BwI4WBy/d: 1D*WWϺzxe/(n(Z^Ndc:3,>70VN![Qэ-v0i¸h4O;2 OOO<</(g@XV^/NFkg +0-jr9B&RIAZʩ hkDGG|7ܹÀڵ+UTaժU\\\tA;!!!XYYQ\9$ ݺuk׮TX ͩT+z>©Q_ԡRJ]+g5TO<;;AVY-SvIu%K^m?{ەARmKP8O_5Fډ(>F_ABb]%TnWIF8JuФ$~|@YFe4 ӦMx{{5vX5jmQ TQZPiup3zU:/wVʩ hސYj }e{rK$9G_ﳹsnܸQwٳ?_~TTr5nj6+CZY-p3֒~gT/bf$VKck-0uZdM?Ǒ Y۾Yc6u%iG6~ wq}ߓ~w$ SH;+TDa{xAArB?*9KTιQJNNs/@e^ݝөS' e0P(^]` K9ޣ#Bo`R fcvLo~*NTJѼD ֘ HKK9w߸yKx@hCg卥 y2eGȚu~5: aY_bb"+׬˱e,T*>1)a5 -ȋ椸P(Xf*1bhP$ԬQ{;;vzUbT(#PYy#βuُ+i߶MMr*y%Yp܉pxWW'}N.5읶y:WP됖/kD TQY /D9*TJB2_- [ :YW& use äzcC?B)(!r\Q/W"}zcnf_~Mrrm2))),[$Կ_P0jE%=r9P$m݊ڶ9s9J|'OpDDFoc̘6*ƱsPxb8-W˩ ! ;v!%%=(S[0 v~Xe"**oTiQ(u~ưVN!D + ?fOIJqrrAzoI#VŊD|BkqY*PyC!wzm[6 `pS?EXc~߷z4.j R333lqrrTɒ"p}S>sL0w.^z TjU~hS9]'8ž]qqq6 5l۱m~˦x;1||-%(lܲ?dL0ЯA(D +ݐo-bj5`߯J8WJT$$$p/>jÇ2qRD +Vfm"&CN&4>iiNT*ÉyBٲeP$!wݻT\ʕ+=LMR2ݻ0H6oΉS>8W Z!2t9\t m8z!`CgIr$Y(HR|4]p?KJjjݯ0ZEQliݪh YTrqa~ (w K7  EhJ~!:ݻT^ ?i]x F3U(R2]tJJ~dA%Y(j~\wbb"KK_r(WL+ =yؼu;Ç bb ŖX~K0 Wbts̰!* Vrz>/:9QByCgI(B cR9d BACq܉:;[HVivvPzmȽT*s 1@0#GDʰ! jhY|]y,)dG :&r7h԰cFT<7.]DDF\[ti7~{"3gz2KӸQCkז<7::KAA<;QQTBn*r%J\w j"HBPTL2o4'˗ә$6m@"}?Џ/䔔7uOƆܙ8~k_=m{mpm>BlS:uxի9^^^"[8僽=wZU׾FR%^;:kopܩ#U])o$&% Ed"ƏZ5kLn޺ճ;իU5k $<1{3}2%5WFj$$$s^8E< wTVy}+rԮUڵj;w'A/˔1c o  E)_B?`K055-{ר^_/aُ+z;7 Z߉b) ʊ1G$6UkaggC_kZR lLl ^6֌!QQѬ\reDb sb:PܽZٟzAP̏155eێ݆~%B!VPZ5ϛSAlfvv|>S֩W4..zyyoاOnُS>sgRAlfX9ʕ*'$UŜd"eTXC:+ ߗwcΊP>|Ċ5k9}n,kakӪ%m \{nr/mOgҪAС};>6Uk(,ЯD(D +ŹbE0tVtk ν^|xW(S4_-^ľ_r/ݫ^ wCC|vxU/DeYh>;vÇ~5B1%YH~& Ffѭ+iX ׏eȑ8;;S~}fϞm=qnnn8;;Ӯ];BC||<ǏRJԭ[)S}k7mk-,b>ziZ iVUN?xx'6ECgG(D +D9l_x~'vE|4?ٳg~mOOOBBBعs'˖-~{Έƍ;gĈ8Jw߱iӦBǗ_~IPP[laʕ4nݛ۷o)l޶Eۦ̌/xmns^0rUaSܲm' =l@N,-,X9 7$X@0z[dG111ّ#Gݻ7j"!!kkk OT)X]?~~~̜97RSS9s 666ԯ_L:88kkkPT\vիcjjʍ7Z*RjU8{,fff4k \RҥKh44i{sA*ҨQ#4 AAAԨQ`TBzz:ӴiSl ޽{,YP?~L6mHKKϏիhFL� &X1|pZj坤sEr9 6Խ7ormlll('Z\;uLQi sQN SNoY3E%J0}d6l `n''G\\ӓѣGzjbbbtj.^ZQF@ceeZ޽{TPdd-[VE R ̌ڵk[B)_.~fm[T)?~xJJ ]v%)) RIZظq#RYfѡC&NHrr2;wחjժ1tPƌ_'|Bҥҥ * RիiҤ #F ""o(U߲e gΜƍ3c ݻGZZqqq>}B'ܾ}0ڷoOXX ܹsGRF vI`` >D*ɑ#G(_|$==ӧOqww?g<}#F0|?ѣ9z(vvvՋ5kpX͚'ڶyaX iS޾jق g,""QFqI0`˖-+2kԩ]qk y1@0jAs<22Rג辰RRRh߾=;w{@۶m9|0Ϟ=J*ܿ;vPV-}]8}4׬Y3:vH֭;w.ǎf͚iӆ6m4CлwoVZŎ;h4&>>N:E@@֭[ b„ ٳL_E@@͚5ٳܸq#G޽{ ^zTRÇ7|pVG}hp<ȡCW\ĉ,[3p@?N:u8r.76m8;;m6;__PTN:t]v/_|ڵk_$%%qAl?6mwرclܸ1WɣG7.^LBbb sh([NA;7Uwu>Gq ~g4hP(e!'ߟC(>D +{Vnڵko>Juq=BBBw㏯gZ c,\#J '""SDxx8lRw sss֩íw]?3g2oLǎ޽;U6nH˖-i߾=̙3|2qcFs,y-U D9-r !ƌ̜9֭[ӵkW lǎѢE N~;v$&&}ѱcGz_|7uڵhт;WVo޼ h`1\^"։S>i.]ʅ hܸ1#G߿?%Jܜ!C#<<SLLE*=_.$ 'Nۛ(O@tt^ޗ8;?幢9z(5k֤f͚ܺuKEZ|yv… 䯿V///ڴiC ^T8Č!ֺj.%K>2j(|||HNNfҤI|{yylvsDdTu1Mҥy(j <3fƍɓ'8s.J'OÇ fo|'M4Ç3h 4 ?1Bo@U*<{e\@S(mMJJZU888d[V}||5jGfС >???O]ɓ'IMMs@;\c֬Y|FãGt钮:dZ NZ?BcdVIHHAcݺuv&M ///zIhh({?T^s355۷oEÆ ԪU+Kڸ8M… u_x%IMKCTꍵ}S...̚5I&T*H$ziii/---˱̭Z2tϥR)ݺucɒ%cH$ >p,Yn޼Ɍ3سg͚5]vzϜZ?SSS˗gܹ o޺u+VVV9]י3t ތh;R^\uttˋZj*2,ܺuFqU{9}JRJ,Iɒ%JSjN}n rDfR^,;Bhh(6662m16ʕ+-[f͚h>x@w͇f".ˑJHR\\\^r.mV\~&4͌3Xp!SL]vַK.ѬI,.AAA~ֈIKKŋoK^RBHH^!@wgb D9-r pN+OΝ;O?qƗ/_+͚5SY}׮]˶*J]Y}yی!3 F4kӣGBBBHOOtҺttt-<:h=W7n **dlll033˶kD||1|5o֌+ZӧcҥtY˹s… tmsuuɓ↓_>2t҅{xw^9v :u͛7?au%((p ,,}D܈e˖-ܺuбcG-Z7gϞeɒ%{ӽ{wΞ=äaaaB 00Polrt,m$3D¬Y mWoy]2!!!ٓoӧJI&MhҤ YUBy"#?_a -)*WryeR)ҥK3~x@;5sYUYuݻwgʕL:X4Me544TWV_T*OsqOAx F),1.ίLyb?CŊnݚCZ>2X"ׯ_똙\vvԉUV!h޼9vzg(W^@&BBB?~}j*,YBӦJСC9rƍEoߞ-Z?0rH:;;G{{{uyk֬tܙM*=zɓ4h%KZj,] ҬY3vءk;w.k׮=^[\h"=RRLY"Z2[D ի5={ԭNNNDGG7F6CK^~޲VrZ8 e^֨QVlSYVZ^VVT+ghѼ9.X#d^#E `̄IⅺcW%_>e?`̨̞7B5+  Ń%%+H$'==ӧӶm[ʖ-ŋٱc*իWSbE@Ok׮I0mڴtwSZnK.tR]^2wѽs#)9AFa鸻#Tիi&~g@ӆ h޼9֭qzCR$ #GfĈ|WԪU +++t.\h[2fgP(e%)]|e¼yxԌIÇgذat FÎ;t=OZbڵ;h"ˤ#Gl2zYhr]nJJfJ"QlY+! F)/ԩ:ue2sl?kݺ5[ hQgn]_~}?(-A_|F K*h;w0`NTVs|L<ZͬY۷/m sθqF@Sr9r$ AAA̙309+WD0d6oLY3?!V- ^nʪT*իlذTwիwܾ}c"JY`ݺu~O5t1ʖ- hر[npB"##ׯVVV]kkkteuϞ=;yzAx BLPTۤe˖T\I^dEӧO׭} bРAvk]֮]e˺j31CƊ"-8]NNˠP(^~ٳ={v8p QF&6,bYϞ=_GQޔv;wp)lmmiݺJľ|*rbhA+rT*ˋTeǿ9r޽{SV-miH/Y8Iڅ(Ǥ-e4`e/VINȿsJJbgC p yj~~~P?Ħ$o|-Mj(Ajꍯ2&&&"DTּ^ ѣ Ν;?~hn߾_/k׮q- ܽ{Wo9Or]IIIݻܽ{vy7$e/e^Yו|sQfmc$:Au+P111xxx`jjnG/C<.P"*)ᘔZOc#kuѿh+fIv!ɳ7 ?T`H$TWV#""ڵ+899e;^ߜ H Fܜ,\sIÆ u?,Y?ݮ>wΎ&M`mmMxxn3gnO?uq1LMMi׮666eݲF9IMME!8I-syrh .f񳜤؊IweII(}P?ޛJz\ӦMΝ;;vWLUX2?1-<@bS M2ԀJ v4IHL-^HFQ#1JӨH4j7^πه_p=O"!{'&ݐ*%ӑYRVLŪ˷:~[V?#=zĉ'Fgh""+]kf߾}z~$%%Qzu~Gpttܹs888͛7u魭9sg-diiIll,gf޼ySkV) (Hݼ$}2e "qi^Y˚x !e' c]"k9#i^O&6Cpt]ĩIݼMbI8I< +/&iV'&W2Q)Ii 8 +~{"uVy4WΠ9%}uRl6mm۶K.=7ߓ'O횮Wcǎz ر#ѱcGte'h4ȴ:xanZSIyp]9M?L$x SOim.굜_ًď~N; /{i"iz[Rw~Y I8IuEmOYrpď#iF{Nl[NSHގmQ^;K}lZ@9>ԡvARռ2̜9֭[ӵkW O?&&&hٳ'&Mҝ1:vȡC[W^Mǎr 7;vݝYf*PVTbBፉ@V0Jԯ˙r~ҥ\pƍ4M4~<UƵkHKKӧ$''gM&MݲEgΜ Kv Ő8`$5`6,UpAuR{G_#-UП/܇kISjJځ5 I;&.\qY3wRwiw-"5ThTJR7X,9Ho@h"cO!M0 >ˊ:R2Eu'R Օ+DȽ\/]tPvm<<< ?`<}vz{{~{ۛbcccذa??yzeJ;~6ڡrS#>/jc`/{E@9a5RPEbFR7/|^N|L#ZLj4r/H{*Iݴ0^Ag1X|väZCF,@{rGR]?GHsf+S4s!KYW?GGG<<<\v ~g[5,, ooo v܉7O>%&& .0tPN>ĉ_!!/gIB&Y(8""֭˵kpuu-ۥc2x{{ӲeK7oFN:p97om뜿?/͏FG882D(lJ"ut)Q8P0 % uv]͑  #k]Wִ3MCPQ? FrlPqfډ1&/᤼IՆhR0XWp^W:?w>̷~?kkڴ)cǎ֖-[2vX,,,wŅKwիzT*W^խ+ 5ȬY{eΞ;Out?{yy `$v*29&Eu>T!0(Li:r*ㆴvd  o-ʮ&!Ս (/ 斨nr3>5MrNx,@uk N?d/^͛ue͍cbeeE۶m;v, }]J*15jЕTBBB(U8SSSsUVϜ=WV!/D +ujs߹N舗Ǐq"CRR.\B 8;;=עE Μ9-Z;711[[[_4/Q_!xq&FbNV, E2#uxVhi(_u@bn:1>3>!鳮FY \TkN?c .\Ȕ)Sh׮KS}||pssI&z=VVVs}^J52NMM̌Wt2ի=8eC ^IIԕ%@[NS~l, f:Mj6,Mr4ɉNzgM AW0iEI\Ol'嗚5sO?eܸqLJ͛g)J"%%(iРp )))]qʕ\  o FUK7:уӳQ;{,Ӈɓ'uqΞ=޹ /tCP]_4qB`0t ]IRll w+0k,-__l{j5-Z???ZlȦxb͛G^^'O033[,h@Zꨇ2e9XNPGצxAdYmD1(zN@s{'%yzcS5V)UKd2 Obcs~R͛ǥKfٿO?-[斥תyze5))/ >֦ج,deanc\Mʼl?CŊu{gCfh4h4ʖ-K֭[Ԯ][\DBʕ΍7)Y+KKԡRJ]+g5TO<;;AVY-vIu%K^m?{ەARmKP8O_5Fډ(>F_ABb]%T/ jt&IIB^ڶ׃t+-ѣٶPݼy0֬Y޽{|n:> A{ uA0G}\b:3HN(w\<r9R4۝|||pttW^ <\ӧugȞ;ww}7uW^G}m,CRR2׮ߠk<@j6iXڢzż`aDaokXdj7~^Y5lOQyA(: r? -]eUWRiPݾ&9$RL?ļzZDZ1i9[_{ UZU]NrK.LJ+ңG Z憟4jH\ VZŸq^a||`7n̜9sX|9~7ԪU V\z7 FcDrSL_ `"GSiՆYj]5}!pW&qI,IݖzJKWl~+Q,-ѣ[`҄q9h4L>wwwd2xxxfLؼy3JU5 ^uztsr;wQ#rL¼yx}m … ݻwii߾[___Uӎ_>׮]VZXXXlB] e Œd׬IcVYǿws\SNt}wϻGYƍuWmر\2h4=z;[nܺuFQlYd2ԪU Жs3334 ...K8(O묨,`̄I9INNaU|kkCg9>|>&Ms+1Q{ s>Whʃk/DVYw|nRCSTl.njD6;e +07~7)Whi庆~73mүd#`,ә5{.'{r( ֯yuϗ df%^2177cİ!,XDo1HNNsbT,XЎaKp k^ b_ҒYq.q%۶6jA^Q9ӢXݕ{mqCg奾[;ߓqPd89aP[:7}{\YCMqSFuص8vVV4lNiʪ›y~=L ~;x/Vvڍc2Ԯ DBR|9 <|XpFXx8sͧ޸8W4 {Nk؉ܾ}'qBэÿa!cܩÛ_L^ Y)Ĕ&|*8i8Z c}_f͞RfBAi Y,%, EC)&Mݐ{|G-޵ ujz}]#@NЏ+XJ.̜>=^!Ə@+6QQ~ fL-_d"ZUW>:{끃p4J,=u?cmmSE!gǮ=$ۣGe1fjjʰ!+WaaapTcJ%I,'%%aWiB'YرOHl޺'ObH$H$r9)ɘ햣vTZ}2+h4ܼuA<~\.Q[VK,$&&Btt4Jɱ Co؉S۵uD +܏ְm.m[!?i+^{N :8_atL yGJJ jT6899Rd| \!/4 11?ٳgʪ33Slml([)_Ydzq68~#3i8C`pwCB;ph Rl޶}@*\+QjFqcJŊ@bIDAT`~XM[H փ9s:;yR '& e˖B@ oРz LMR2ݻ`j5+׬e혘\h<-] MD +{8%@;BQ&ZAxU9+L%tEXtdate:create2020-12-28T12:21:30+02:00g%tEXtdate:modify2020-12-28T12:21:30+02:00FtEXtSoftwarewww.inkscape.org<IENDB`buildbot-3.4.0/master/docs/_images/changes.svg000066400000000000000000006714431413250514000213140ustar00rootroot00000000000000 buildbot-3.4.0/master/docs/_images/changes_src.svg000066400000000000000000012544301413250514000221550ustar00rootroot00000000000000 image/svg+xmlProcessAny processing function.Auxiliary OperationOffline operation. Commit aaa.com/bb-ui.git aaa.com/ui-lib.git bbb.com/bb-mirror.git aaa.com/bb.git Repository Project A Project B ui-lib bb-ui bb SourceStampSet for project A for project A for project B for project B Stamp Source Stamp Source Stamp Source Stamp Source Stamp Source Stamp Source SourceStampSet SourceStampSet SourceStampSet Commit Commit Repository Repository Repository Codebase Codebase Codebase buildbot-3.4.0/master/docs/_images/forcedialog1.png000066400000000000000000001552611413250514000222230ustar00rootroot00000000000000PNG  IHDRd iCCPICC ProfileHTSiǿ%zAz 7Q Pb !#02CX  2(`ATTK={s?`)yPo7ztL,'@"P4&+XXj$kH3X@'3XBd6?˻ "|x}g&<ax2)H4,V2҇LFؔ^d#ճ܀^?I[qO&3Y2xn?|;ӄ {h I|ByPCj?1˞c'bY fz-05uk'2ĜI\/gs£8a~5b] Ϝ$czl,^p=<:/B\t/Ο-3f"0}<9f.svXn2'D:2^J775`Ϳw$_HDl\V {B>2m.QqtP5g0ȉ2ȩV@!YL @8+ p@: l{A@8N6p\WMG@F+0>ipA  B-yBP(CB96**:]ChBaLe`X6maWW8΃wp-|n/7{~OCP(wT*6 PeZT3ՍQX4 MG>4  ]@7[їwC 7 1chL2f-&SÜ\Ì`>`X9.냍`7`-Nl?v;p8G\.nFp$2ym(~ E&lzB10p0B&Jupb q+LB|L|G"4HvTN:NF"}"SdwrYHI'wQ( %IIi\<|IK0$%*%Z%nK$HjKJ̖,<)yKr\ #.Ŕ$U)uFjPjR&m&$.]$}D *CyCKaIshhiWh#2X]LL1^ Yl:Js"9C.MX܀%*K\$.ٱy%SJ.-?+<Rv+)?> Ș{ُW_ x l A=AOu  yjF [v$C[xqaDWdd\dcTGTI($zcnL{,.62.vrGVXJŕi+ϭ\\u2$ 3Y˜L`$T%LYX.RXcbIhcRIҋd=cgNgέIINJ JOIJkIǧǧQy˫UW[7Ek]3!e@+23e#~'rʪ6ruxz߱~4+ ]99[s6nmJԵYss-[n54$my*y[)_"_?a{رǷvB²/E?PΤwawv vP"]]2'`Ok)U{YU#ڿk NŽJʖ*USnt9\R]XGkkZkujaez~8pO?5)}ՋB.746Q>R7 Ǝ;q٨E88.<N:i{ӴP։6N=ӿRVl9s\Ⱦ0|qkUףKї^{ʵ^W/uv_xugnhi}ǪVmesϮYη/s.{" xìӏ<<.x"kY y < {h5߿= 935 688 Xt@IDATx}pTVOm1c9g|.Şw8LcjmW]kæRwccT3 h&k%!6H`$ZOzyhPK$M>w~_ϷsNA+G4Lcd   ڀ   @@ H^F p אd@a!1fwQ;@g5S"]^E7҈4i" @N 0rGEk"m. 6#c@ zCjUL,P-]z9*IHp:   \`~wFv8#ĚLohdXǿUkg[LL}   $nEE^MBrF*if]^   5FάͦA/#   Пb¤ns NC@@@6ӯ@@@BPw@@@M ĥs8i2J\R]}QYuRbEyUr=պek,=[J@@@`<8ЉO?։SImc}Q]b@T]oWP_i}:3Xץ/>Ld?[Sf),@@@ <8ux͍h8A"i Ww˓:=8_ę)' $n,؎  / P>^} #kyaAœ5"|R$2{nh_.'_О/2GdsbeslGK_聻vHsӅj&8~)Zy("J(ݯ’2͎]Q!Ļ5\XРɤRFT\te+}v HU.IG\ﴢzVFҚYZy jL3=7j!%/N#RX{,^Жdzz4<1K͵c%pbݱpw4K]upX`\2gJǭ_ܾr{;m/wL螰r.qDze2Hձ:IwHma'gTjտFv?g/a}^mGެ=}n}J(0Mo[U SgX{5_C0@@*T,,8-K9fǖ~zdcwt2=XYx~qGL!gӋ'??|t ӗ*,[G}R!Q,wcN5XH xg H_XǮ]БP~|7LU&fj wVc*[\|c5?o\2o>jltkZJv} -,Ӂ/oӮܩmMwϟ[w=%+vPGJ3'>o>2c9|Xߺ{%sldnl?֥jXʩGꁠ-W~&NR?8$׶+!p(.ן}~eƣH t&ٶ;@UI/yy9 Usvڎl[Ls*Ȣ3j?ovo#{@tQLwkMkMMl O>5OG[`JJRuh?3֔=pNq0bSo]9ӮN:<\rKhV7[Vb LKq͛nipenu ئ%Sji^i*&u3꼜ܝ /[\[h莻@:NgLGF50~9V"Y/+k'?˧4,;-Ϟ=68`.{U~HWi v?b7e(~YN7檞ǻ?/ {Ȕصǡ]~qӈKu!oQYV٩ vy?rffNakXUI>Oꂥ)*,wD rrd_ǿY1f*mϗ숾g+{دr@PڶdkRo.\ߜcWh~A}7TU%l^ /G=6fO7Y #uvnjt ͵i˯5(k; 䔀Jt̋$:8n^Z'f첲>g'j\;f?=c<@5s"9.ԝUﺏvz8=z>6k,iK'c\ 1p=ql-(fLC=.<â'fdȱn_^{gU1>nsmO鳯u ;ΎMoY-|}~a9.zXv3Et#u pu:f {hܱcZV[Xo}.|ѢO^8NT*}*]8֬UqC¯L=hŽz ?< stߣԒ`s^6~C$^W#O-bH|-c :>pCt[*xQǹ׏Q9y¹T#^sY<ײ́ߘl31{ }C^Ԡ@Kw7]w$=23~Ppy^q~Fu^r!u}~~e}'$^.ӵp[nxin[6SιYpnS]!k4ֻTZf.X -:%4emu?v K844`h+۫cw#ZlܥcI5*mղ^ڥK$\6-,n- ]:.wMedm gV1H}˫:[pn 8R*wxucғuE}r"}dIҔ޾u~]i[T{|e{YHx.3KtC%C KQB ΕO-Ͽ9s~m=aPcyOش'ȸ%Cm؝kŧqi;Aݗς UT]':qO;J_zm긵wm%e(?u_&R@%Ec6u(Lp=>ُNVk_g'WW/{Zߔ؍ڍܷ{ˍ{er aO9WThx޹Ix~VUGv^(]A|Iؽv$ 4mnWn6wӗ,YPCvZtg{G'?Ё軖Ig' ^oKᒾn˝%Ͽm~6T/hbG0< w-҅ͷ%UWӵq#Ǧʶ))!7[?tVV/S^Yemzk |vG%u~RQD5w--,-YP]:!A#:]qo5j?-r_R\t;bS沲e}/mw>PiS:nw/QT2%bHP{F\(wQ!e0e?ŒĴ|<-UBC>^ߵT&96|ݾ*~x:.*ՊSûw}X؏J2߭muW.  |õmvq voݔwjfjm= s,%wlH M@'f>o I;rIg+ NޝHwK~~ 3:L/:e?`vNUM5# Ft31z[-ACLM`+nD fxCSJm/J3nuF;*:.Ii][l7TJ=:vcUUy`승=ź1;N)Wb:nYȷh]_;ZdH߶k]7/{ti];*Vx;~K_CrqǧWХsn>O}4M.-h=.+w>umMᰩnoi;e^Hl:e> 5ea/SeWE xu }5w]Cq>Ov:/Y05m {΂j {W&/wkt[=clZiרM:]^;risCl͏ҤoT5nZ' ٥I]#Fux! BQR<`ZYf6yO.^û7r=ZqO n_,:ݳCC㧮XW, z>9†F  5,:Վ*rwRw 9>)ukunf( D]F˰ebm#T+ǷOnڅمi9~r-_X ^ݔ*eg (3ݪߥ.S"UDF9X;|A="Su/髯-fc|x\ W/[r}D k)@~3C.[ ʴ_mN\y{/mA_t."-5c݅(S}}2{ϐ\yCU];O~.e!}eD a^`_sMwGseL.:./ZT.ohEb JTn#I'iG\V[m¦dMn!l[+6Zk^>} 4MEch\]*MkWB_\J~=F4o" NwDG.is,WetVأl-8Yo88u |2z}FNdKkqLj9ht]1%T}C:mS;lm v}rwSYe'`DO5^~~5u%-3zK:*{;_ѮH7{sql=e}Szmo͗ }]kw‰awsGz;`w;ce͋'v54>n)ˮ\\?tulgzGcf|byzf~蘨=fwÏɔ)@@\b9@Qi]CwVV=n6zoKgE:gap ؾ";+OOƣ%s Tl|ޝӸ]ik)@ Wwo ?94 b>;48E*j =@=BVΓ{@R~}ם^qW"Nz?\Z{2otm_ .8k>5 R]0P`wuw~t v@D-t_9o-گh6eea[T96e9.w@] wDvݫ91+7>_}2^`[cyϲxPlJ{ADö.l o:X^1kǕ|EZh?ڜe1[u*OO"cO$ke@EmDc|>lꂙۥp1-;woYYbGzߌ-#8No@nskZ9@]+:daQ3{ܣ8v;8r&Uv;tf]s|tAY6Rj^\S+ш]9_ N^`^0>Sf ff9[쑞QwroNv瓾L JςUJ ܻIw;i2~[k=v5}QcMcN-63~Aw Ktaԩ?R<ƈ=}6М@c:톭Nt 9.}g*F_Kן:-[rӞe}==iùUݗ펾Zf»S}:V7Wv{K7.{b6{4&2ل:دj߲:Q -(=)[mn%(=er-;M aMFl*H>IR]- }v2a76ͩV W~}GR=;ZRXѺ{|iU̲ըcpzҾT^TM|0),@yw* X~l#IX6;l?!ꊥ9]mB ;Hf8 Ej}˽ y =gsKރu`;28Xj{ [)~JڽVL3,=yZKnS$b#ntǥI{rb@@@ +\@D"ވh{e֤>kSĞQh?A^V>>@u[orep %X|V L-2?WO=i-Խޫ2|3koVS@@@`*n{$>[ۿhRlgDqU{䝛kajt*H cSKC|@ UTTGܹs LoDc@@@Q 4w.{Ԑ]jŅJZToTFlHxw=ɡB@@5)aG?X`:6@ui*P}cj^/Ի]pzۢ -_߇i &_@@@G{ ϶'I$Zje:nTo{ RvgN@@@p^Mg#6Au?vGOHnnqLҗ!  N{.r=.b҂EFݹ}"3f#.4 513R:   |-Q:ݥnrػ)(YO 9GMuX6,   +a7A`/}z1ۣdl^VX@@@R\p%MuS|-8Q`~mZ]hI5m5ӡAE-}{jpz   䡀JڵE*(b mma ܻ=ݵQ-݋JiF-۴>{s0C@@~=|-Gb1EQU\\H$܍ -Hu00t2L CT؇  侀4]r+6-sJ Tt7P   0a0Rm3Q3H~~qnt&)@@¸/ BFa~wnnj],#  |azQ 8[tX[   /. _Mat;UnVR jrTk׊ʱ?ThIUe] ;بUk+v}VvHA־{mN,Y|B1!Р_:h媿ԳFkywoaon7n͊ ~]llowkW^Z<ƷֻM-j_\׳uư-{sw:nuߦZshvVnՑk}͠vza:m[q/Ѧ-zzM(k   3V~Z5hPνj4 6SŪzJI ]>ZyMM޾k+RǛTcSwĀ{-`$MMJWWQ;vkTccTR>m{/ꇯZ]C֦fKD}v=RC޻ܰ^o}flԗZVz)prqM m5Sm,Ǖӫг K6q,ܪCǞҘu5kGKޮ][a{խљ/ Nպ_=R/Hayu o٨-ޏ c-hwͪKۢd"&[cKM /y[}Jh ÷ch  @.L5~lo+ܧF<6zi{~(~,OTBwxˎF v5}?cRF+7>С}*Vo-xusA`j.>vml~ukb-k_ޥC.ݡ}UKaj\lӓi]Gi1~wz_zm}Mj 8Smvim;zgy 0]]xRMz1(e?0,xwfvmjx^ aAޖ?VԽCﲱMze:Tg6}~w5hKCwTV/3yg}!Qvz/7`7@@%iNdFk6ۍG|oC Hk 柽`ӣ˽df=MQ ~^IM}Z?;wX.-K]_\fl[WA_U ϊwpuzu?l۹[W2 =zjV~tj0VBTm2ԗM   [Qo{ʮlO>Uk׫wi:o+nLFUQU9_]Uhj퓮xܮ; W^#@zt"qQ$ҏں]ݼ8A@@|Iځ  iwUG@@E4_zv   9,@pÝG@@@| 8͗   @ pQu@@@ _N'i   9yT@@|Iځ  iwUG@@E / ̔@2Tgg.\*r@@SDWeeua-@p:W.0/TZZE)Uh   DBywMLpCSi$QAAAvVZ! f͚2;wΛWUUJfO_PpSy.]ꍘrvtUD@<"͙3GOiv~)8~VY*1={L  0;sq3d,Lp@rK_@8 %4;Z!  -4@@Nj  RTwX@@@ ;N_   pK RMc@@@ 8~V   -%@pzKu7E@@S4k%_Ԇ 3Ummm+n:{JhMav&,Ɏߴ? A>i#MڽnUAn(ޢ/y'E+{Uյ꿩}WcTJt7 wvo߾*s@IDAT]X+X@@@ "֠mOL5jihV]{w4K %ZO7ZmmjҁO¶vN.5Hk"unRKTC@Um.ɮjcZZGwtuis?֊Uk%<Y6^[:fmҚx\jLХRXܠim[穰m;Su5j?Ċ%5@@'FN7k< W_< LzYQO?0ݼKݫޱЖzc϶'O-Znk?ۣ߭"z}>{ұָ]׵wm6Wv-Mge9vŖ_!7ڻ`w m`ֻϫݲX UVdl"  @ 0rM=W6^mQqUtjWy>V'j튭Џ6תiGkזSskojʪI>]'?EXR([Y]kSmˬiq?[Ůڵ6kYߎ/ W{Nm:rJkvŵf+ڿmgA@@| 8ͪ^iU(t6Y|gfXPJnC R]m o(tdDT+S.M +8֚ uz51i  y%0apޞ ΊzT%*k#۴ZY*mvN \8~[QQ}NcZnm$Aq{pz.}ƍ&{yW]2[_זZ1ӑ:ҞTMynjɭnVkkn@@rNY\]& N2m.8mmmrmn 4\>ntryvvԨ ?^h7-w RSϮŵ[ڴvIFRżA*nSqwi,aj={ܧlܬWyt4iG jϡ5j}osTlց5؂  @6 84sgvZQP~VפRe]ej6l{kZzj\뷾ok6[jhM[3mմDTbջ 5X/z7kKkT{,%-KsGʧvumǫ~`ZVo{k[,Xb 9  (P`##CȐ}~dE{‘e˖eE}qkձfeD.+]Yre}Ǖ[H6 Am:o-&P=" 'd[  @ 088ur dOS?… URRbC{E{ޢ" _ӡt^oV=~a Jv"nh%Vn {ن  _g?dϮ^TV+M'd    _Y՟].jDe@@@H3L    NN@@@ 8 e@@@TtRv"  ̄7D e~Mh  p8w>2*Lz{{;00D"!S|UXXsNGXC ;ϟ/fGwP @@.]$w>2*Yfyi,{mEEE32w_XCV{{  _;osox\|%;֛BTl2uvvĉ,*B@T^7bKv fgP,pUUUy,&UC@@ ֛SEe@@@ 8~U   @N TwQY@@@ ?N_i   S9]T@@OWZ  iNuE@@S4?V!  9%@pSEe@@@ 8~U   @N TwQY@@@ ?N_i   S9]T@@OWZ  @$jKed2N]pAYP#  0@IIϟJEɒk 8):\`_T-R,˽FPc@@H$n,,n4 L# @@ %0k,ܹs >VG4{䀀ʻtRoĴKs˨"  iΜ9:}NKuv RwٳE`D@@ ;!ef,[Lͭ  @(y\(/ @@No   @v fgP+@@@ 8"  )@pB@@@[Jn  d@$;E@@@[ LOCCCDR/Xr _Q/P%O@@@ JJJTZZ*żW4UqqrAjQQьĹ/MbU7mЦWرzQ==%#:{Q۠wP{.ؿˣxnߤuTE໯-M݆WHg?_ԆM~9o=۷ki  y it5!-rkn |n=aZmm-S:>J-ֆmZ:`oӓ2ںF-ۻjkФW74I{鉚^m˫P[K^q:v3QuoR*UQ!uuܓz-:iW,Q]]bAyC@@`4?Z SWk׾}:g 9{}BD5ԠvlTm*ol٢7r[Im-uloj߮ 6H*a~`J|`מzwWq[ T]߯C^ʗFgc+-fPumE*   @^0rx}x-c,tKvn5y\]]ՕآuRm|WwJ MA4j)XJwlS|F8UmMZ߳To|Fުj֫U-jVkh>Vh}vQ{լ)l@@R4/u*Z74:fRkRpd|V(_ror_ Z_륝pM]oצr3[*egyЄ8‹Ya@@V4oj "F/Y,7̈́iG3I$yf{C=$0b@@{{ l'NF1FӟT?ϵpBؠ+{ޢ" _ӡtre,W]e>m|YP:Qv@@{"  L&@p:@@@fDtF)@@@`2t؇   0#3L!    N>@@@ 8f |i@@n)wXS4;Zeu,B@LҥKrs,)@pBTRjooW___֒j!  6wXS բVd@4ղe٩'N07;Z! cT^7bKv fgP,pUUUy,&UC@@ ֛SEe@@@ 8~U   @N TwQY@@@ ?N_i   S9]T@@OWZ  iNuE@@S4?V!  9%@pSEe@@@ 8~U   @N TwQY@@@ ?N_i   S9]T@@OWZ  iNuE@@S4?V!  9%@pSEe@@@ 8~U   @N DrT@@@ WP"P4U$I ^M@ZFi"   Dg͚XL{/Hpʴ[G[@@@, 8ҎZ   $@pz+6mE@@T4K;j!  ۴@@R,  J9'[_h+  䑀g\\2@NA@@X{hWkZrioo.\   0U7bΞ=[.a,ʮѨ͛gΜ`ְ@@"7DӉ;'SW"C5gL"   Y"Qr1{L,LLӉ;=   }a}5ˮDpѩ   @~ d   d 8ͤ6@@@ 8Qn C@@$@pIm   3*@p:   I4 @@@fTtF) @@@ i&!  ̨rS   @&L*lC@@Q0@@@LT؆   03Ma   N3 @@@`FNg@@@2 fRa   (7!  ddژueh`` H@@@1Ś={̙H$g¯1m9Nbl(@@a!;wNr*KfFL]`ZTTlE@@T1.qq YxSy L'D   @v xW.a,ޔ@@ew"=Ug   @KO@@@ Ns:   /ғ@@axiRu@@@ kɤ:<<,w3ZwH$zʽ Rht'   %%%*--{b+{ =g&S*"  .@p=L@@@ 8́N   @ {>@@@ n4ut'蒫{X"ޥxb/Az  WisWxImbgn_n/W۞)M?.Ǵ#ѡ_n}Cԑ(*N}@@@ nTQ-Cîq_o3j)I:ŘvĒd2L@@@ OnTJN=o4fl0`^N>}{߳pPǿԮ_'No;}_iƇ.yyy][-sU&;|P/T$~O~_k}lT׽O3$I!9$DN-i$)XBE.&7sOmbvdcc“Mf̞ZEdg[Gb]?s+z6h;"ZԳ:jgm^~`l4 k{,O@ۏ$=Gުh[ k   @ 'ly uN]ߪY,!XX[2P^~YmuZ# {NĦ7k%oz;TެSI`j}V+SQ"-PeUnoto=wY@@@K tsLYwLW҆w˱ okFyFku':^]9_(sʷ7*6mIn=ZqjN >?֛6U~]!w?Rfat}Y~XKTrg^]>/5u<+w%}}4Z|L~w}imӇ6U==t=ƣc׎yZM+c9~?ުFI~5+YϬb%-%[u!e>ЃwhP@@NNmjoA ,ۮy t `hS{?1zmo!oC#?]-t/FgM/%pPK~\.٠;Wj.IHpD.pWģmCF ޣ{=TYʪ֦go?V8֑6|=e;l'x0k99t@]]a)GؾhꀦANAJM=Q`ȃF@@@Fȩ5 I/9R`t=Jm'Xo^JmgTych/ ~>T'= kٗzlT*'>+'rWu'봤y]fVtW5z֦F;Un~혤* Ll.뿞֩%zXY3TjqӠ#5;V~ƚCx#nl/   `&}…t^.ٞϵz;o;Bɍ]"6R=H-pD躷aH"Vݭ˴ hSgoLFqلܧk'{`[v]vjpSZr\[7U@@\رc =:9&K/nMǏr>Vqycj̘1^Gk8jD^)d<Ə``rg=}3^O;T;d`[ϼ}C:z~*_}rŲ@@ R$JVѣ6& 'OOݮvsnQ˚4SEj  I*@8MҎV˯*V&u +J$#   $d+,!  4=V!  )%@8M   @z Nӳ_i   RӔ.*  4=V!  )%@8M   @z Nӳ_i   RӔ.*  4=V!  )%@8M   @z Mf;v,UJ=@@@R eɓi  $~;5zhרQz_Q/pR&   .tM^ r/mܸq˅1cܐpʴ*"  .@8M}   @ NS"   taڇ  4:*"  .zӽi߰Gnwuua" \{-ܢI&i] tl)9M>s}'/[o~)MJ@@p^2e 5{pĝCՒO`:yd/8jJ@@Ν7ԩSzgN/I ޿܏5*jM@@-ns 8\0 IzCScE0MV  `n@{96!lKitUHw#g@p77cINir B@@!`aA8R   Nތ3@@@Xp:Ġ   pӫ7 @@HRSN%iͨJ:;;yeZU\ sozrΝWr/n5So[<= E-Oցh;^oLJ]Sw!}EZx+;܉/nq:-Dw  @ N3LL{}56M.)*.., De$\{,SwwV$c.8oʸx峏7kqgOjUv[n׏oo޵Wi-7OMOgb뮻4 k\;ةlC@RRM? )`P-Jz NsnXv=Cc'4E:jxPnFP6V]8H`amSߊ7jK(}zogXmӁGMuujj)M~J~pǩhvնZߝԊ_dOEu?ё۞=̬KE9m-{?s=OڝZWm RZx~'*AFeT?X])  @Z L0A/&R5M5[.?p|u?دIϟɓꪤ# j$rO$;`te^dDa߫ܘ!P5vψw?q4:!k#o_l I_/#@@pOu3foٳg?Qb޵O?ԛ~[[|L5 w拇 eH{Ӥ}f   W+@0j\@@@!  #   \z8@@@`HCH!   #@8=qQgo  ;n8~0 „$z"HrVZ! |_|!Kr N_U q… IZC  07iҤv- I T!uƏo~:x37u" pSym)SPo"5D n&M:U޿Ξ=\6  S7F0eIiRv Jv/HjR?@@TGΝ; 傽{={5Jחz; i   @ nfO7^>Oƍ^.3憄S9M?   4D  4{#  i @8MN    @ NS?   4D  4{#  i @8MN    @ NS?   4D  4{#  i @8MN    @ NS?   4D  4{#  i @8MN    @ NS?   4D  4{#  i @8MN    @ NS?   4D  4{#  i @8MN    @ NS?   )Nǎ4    0\q)Nof@@@H"7A@@Lp: @@@nrs1@@@ @@@ No(7C@@Lp: @@@nrs1@@@RwNk@@@M/ӧuY9sFHD>Ocǎ}=Z5jԨ׵]"~{@@@pAn{mƍ^.3憄S?f4 @@Hi5E@@Vp]K@@@ N_QS@@@ mi۵4 @@Hi5E@@Vp]K@@@ N_QS@@@ mƦJΞ=:uΜ9*զ   0ƍ &[nر)K F? "  "p9/ĉ*)1׍`:f̘[V@@@ I\qyK $}8p7`zNd  $3O7, D8u   ,nW$^>^A@@Hi$@@@RXpG@@@t KO@@@ )yT@@Hi$@@@RXpG@@@t KO@@@ )yT@@Hi$@@@RX`l ʫ ~|>2_]]R$AO)M9p}rNuf$]E$|FuB"YE@@Y`D77 J9t/o}7gX~WMiJ-ao4Fyw`jݯVt}ƍG5r&TD*zuU3/   W(0"©l-~wg(ԡf?[˹xexgzw;[h|Mҥn {\FW#QIktdWն7{NMYQ@@@ F=Y{et߃)z:Zot$[DoZ7:\F؉}mUSVnס]6oZ{Dֽ7@yUj~m^kxe?#^qgm=Veu u{X4vêX]^s\ XVoWعvHb~jhbA@@X`D3+^6JVcNߌ2دM=~艆k~]aӆ?[Zb%V}+݃nh^' cUl6k^VujV%Ɲ_4UA.,CwXVx[x[mn'*^u:1e@IDATf}|GlӒ.SV:l]5o{ˑ^(/SOT]7@@@`Fƴ׶TKVQfp7`"όe:+ՂǼYnjOwz4Q<qY@@@Qw{5X q')(bЋ>rc8oס7K<7g{Md6`,*y0:qւoy}˱ٗ;eJ~جAuVJ֦̒7,~3aZp>|tLT|~TjӜ7X@@@!Qq>ב#+To9{wGI}m${mmRIW5ÛE"uo?Qc^iiv>UW=={jWӒ\ '쮏Ғ*Z+`=鈕spr_!P]GUhGOWɃ/s K?qT[puĉlF@@R`<):ou2=NV9OThZ?z,*Y/|VOвeϪ1zJն7.}}?ӺnˌjUۃ<0DeNzuZWq_[`*gd}ͥ/ELQkT=K˶dP ϲn-(׮#5YUӌVw,k   ʚ Οy{]pNg{>꺟^⯿+ ;Y"76$-69L+> iw^D[z\=~PkFt~ꗈ<~W_g   0R;`0ѣcK/_6>/+ǍƎ1cxu5j2rzut~|sirU.vΗd#ѧZzYJMq׎   tMĬGMCZ&!  &@8MWXo _@@@tH(K@@@X+@@@@ zzz9:{Μ9# =ɽCrñNC2@@@?~nfOկiLM`"   taڇ  4:*"  .@8M}   @ NS"   taڇ  4:*"  .@8M}   @ M:zU$I*@8MҎZ)?G7|nv(B@D":}w+\+jn4LǎQF%y  M7ݤ /fegg$n*7 othnNޞf  @1c[nџ'N$7N)!10a)]T@pxfH/IҭNKP 07{!  w.ɲp,=A=@@@,@8O@@@d &KOP@@@` NGpt@@@ Y@@iRu~D;#u;lW hb_㝲 d˾2ZCZ8!  @J 0rUvT_6E\="z@O#oW.ԮWڹ]@!x{B'^qbA@@9k'$UV+jqe*gG-/8?vu(yd[} [p*N' U  i"@8 ٢ j8e*ΟMǍtQMM|3g+W-ڕ2W\essnkVii][TQڦ+U횢giPڳ^~j]f)Dў5|zx>jzE-'aTVWiVv4v+֐+!<=WUֻ?Z=GSzrzzaOWQ~0Z~ߙa_-m)ZUkw@Uk*W,   @ NGC;UTꝝ65(ήF5/QОn~{YSNNPSM{;vRstRe0WU lWX;˿eeҊ]j|Gr}nose*zvw So~P)&6x+.&tkw&U-`.RܠU#mj\8i`0,Ҋ]6j o]v$Z,9- *muo_i865[sҨ0]k+}}CǍU@@V/ӧuY9sFHfQ4vѣ^F} ^j$Xb|BbssVѰJm..nk֎5mi$]۪ʭvkn8{Rtqzw+c[uw@f/ŏ)^wk56vuE,VNnЪJZƢ[^|(ZM{bt3~̦y,Vos&}u׈3>mb"m6:❟W 366 ZN‚  +M70a+##C[nэxN7#]mLі@)%3*-to[rkvlkB6wm5~|g[N\F/`^q4\v*.=./A]\ WjKmfFTo[#X}mG_P>jqcs.:>-7Jyhe{Oy;Z+yaV"ݛQWWA_(@@@ {M=\/>7$nL81ዥhlqe&2HiBbomq4fQq54DCfK~Z:Ӧx쩃"a/hĚ0%g(Mni3F< :B=jS2MK` $@@RZk>7: mQzzݏ-LOm-jE{ P4&ϯ3{`J]n;B3GXmW,ǻ7Ԣشx[]uu^s }a6b;5:*ݠWuy( XRky)=WѪU⼙lX@@@F NE;^q'vbalh˭SrE`wnRiAA65=uZP\{i̜ϲI 뵳^Jzά^4hHt;{xēv'i}zwnthb-Λxxߺ?h7:TQ@u`S)n9>ۚ5fmj*-RR_ Xae^ux@@@ 5oVbٴ{?بJɫҎsɵ,MiS]M^3˴zo`a#QWVQ6۞[SSը5sp+}2S_더vSiIhwI{`R 4[@F}eODGp=wؗڼJyyʛS{l}<9Z477*`vێi mM~Nƞꀵ~G}lC@@5[.?p|u? /9B&OpO=n9S 5끢:jmk?ӮKoD|~]:^j=NRkr5vkf6m{˰'\PӴ.jMܭ-PEWwm_Fg ޢ`~X!("@@n?ʵ_*O?T`YK/nvc=wܸq̘1cnO@=t߭2K|ZŞ뢕{ xu c~.P4N~ ^;2x=l`7W}>@@@ G\Z* -lk$;Q.vEڑO{Zn<~&olM˾h:e؄  i(@8N͜U)ׂu~~𪮕=m؋@@t5Z   0,aaP@@@ ^"   tXX)@@@jWű#^`/F  @* s,)z_nH"ȗKhxeJNPy:C ceSs3}#pB;~RuF@XMu#8Kr N_ܨbC&<`q s~5tQMM|3g[d;V۟*kSa#S_ZHHKn#Uk*{;jWb1#˫"'ѻ*<%SgN^?oms34eZ*hjfo+{F^#;3WKmxfeu9ESP*h}mi<]`o7z -;;{ks=@@Up,=k lTꓓWwݹY֨vo{:ȕGI;+:xq]'c='GY.^AH搻-69gjhnƗYT[ڢ5[0עiK|~y.q@@@ ֛$]iعGr} 4~[/d*)X)ji`TY6pT~tB̀mu*Z)kŏx_6vJba\,׋6F}]{//S+ 6`QY/Z^7~fA˫hvE@@RW$`a@dG*JJb_@sfFG&CA[ށ3MuLl[`fvT>[)L}v oj퀰k!1p],5FЩ-m=כǭMkէvs`Y7U[gOiN֬y1xX=SbulԵ~sl©"  @J ʚ Οy{]pNg{>꺟^H?^Pܤ0W"̼}P(Ajm+l"nfũʌX6>˴k_}#k\f;n!ڙyÓ?  ~`0ѣc^/~ ~; qycj̘1^Gk8iáze-]Q6n4 PjBLʃʮx]o_/$@@@ )ڇ\Z* ^E)@  #Jpݝ9USS@@'U/   #Mp:z"  I(@8MNJ   H    @ NS   0#LJTgqEm5bxxHJ2CX4E!  @ NKRBv`-F7ۺ}_SCO\#T"  \:F>e{>(Evqq:  $4gjў*-_;4_竰^mITcv=*mCvŮWZݡhT_^nu-\my~8ӯph뵩N   ؔ=&Im@hm!m޻ApZSivv;M6]#;ϕQ{kMΌV[nw8ܷAMMVI*,J=@@Hyiw7kjcUn$HMݭNUO+3~bv'⵾2No~ƿIŵ\nP^` YC@@`$ ?׹sty={VgΜرc{_G{55VPM2{nvtTaL5մ{it +Eϋ_o|"  P?^7|ܧߞ%^>Oƍ^.3憄S9M21˼2Lzq) [FH6& ne@@Ft$%ز/w&FbÖ]CQOAI]'cԠů;%"Cڷo&@@Huz^G.*jږng<}nТݚkW.{Q .,+[MYjohR?5JU/vOৰ@@RTv9jk]n4;2s<-ML}*^v'H.1 8Uԡ& * ze{[k-D@@Z`T֤o]p=.\8=kuO /|u 4y䤨OW"hն* Jp|ч#%6Цن]K< #a&l7Wg O0@@R\O?U0. My饗_Rv[R<iO CPJm~ߐe_m,De  `ZՆjGMF@@ &G?ZE +r)@@@r<rBG@@vs@@@ N/'~@@@a ;1@@@rBG@@vs@@@ N/'~@@@a ;1@@@rBG@@vs@@@ N/'~@@@a ;1@@@rBG@@vs@@@ N/'~@@@a ;1@@@rBG@@vs@@@ N/'~@@@a ;1@@@rBG@@vs@@@ N/'~@@@a ;1@@@rBG@@vs@@@ N/'~@@@a;WSt!}_:h˾kɺ5a„A1PHae*̖-;;@2G&f" @* NSרW&|;׾6h=>3ܱq_}*mS6mִ"ZXPhD!^hgA֖mq  p='ե3g*++KFn[oUkko[ō2w8CY&7V) ?3 h @@I{Nө7i˰ ={K .ctT_X:4mYHw+b'D:T_^^ZټϦƖT^ZÝU_j m_XTw*,W[+)Dˏ_6zԼ4vJ<-ڭ;upߖصm6=M]WY_Xޯ.ywjBkEkQ6lVBieiͅٶS].U^+mk^ʷL(U@@`j8v }=39j֪gV͖mZPP@jکuj YtwIM6A}T_M .]9VĮ*-i ʛYԽՔ(-6p6EksިlM+TZ݅ {R5;^_ъRJkqqb痬sյs*ֶEڴ\uצ2N!d#OY`nYҵVz+5*NsU;X7ΑBTQS*ц%|p Jjfqg略fem4_HKּոqBOV]kiܸQ6iEML۫AEXC@X/ӧ3g(iر/w{Ak8D8UL[KM?vf]*-PX%/=;R {2ONz5[XU Wtkf6UjLݴQ-"VYyezgâhmwJcQUVW4C- }sfMPTVWq6bgtA25l}mB5,p]56t4as; ͹(޹JΰwEOvT_8 eZ7jCO[rߔB {BZ?:smՂX94hwdmvu\4k[q-'?O@@ \馛@^n۸q㼗 cƌ!&]Q!\食w(b,Ʈ=KpZS[">s|VeNS`;ɴ[޻ŭ{_rwڴZ4kW{nWRLl {NWMŝ}<}v=jjggou% r_ -j6J봼moJ{`b+" V*?>-h ܆T+8I˗7ۻ埛ݳjۧ惭ڳFG)b< &|  \#W @ת_.]rȌωۙ#нޫ rl46bZb,V5~צʦiNrQI]Bx:斫dm)9ZZJȩ;+`Sh_Ȧ/Y{m@`k= 9OArR߲Aݔ|䞰sq'x ,U]8րK ˕K%*  '@8FI6H=0{/ecuݏٿ~ VaMC9p;:т'FB{pLj|gܽv\iĩsCnzݟJV6)w"MʵEώW!=ͻώo+{aM-ze;;'~lMoGmqo ٩Á=VcA܉_VF|䴯$@@@ʟ2R] ##CppǸcv-k~:{TQTg?Qrg*Gj(̆ wjwrYL[XfOZ-xpg=F>xl֞0VT5}{iz,/XՕX79Sk͡A5;CR6*((׾qnmKAQuh٦>gXgOvm>~x*Ldq6gzh=YEKkQո]^sVJ՝89ko  /RVWWKLu㎽oۨưJ)n ؓv>6]h/6mShi=e%1WDot孷**ؓ|yUQfioњ-b5!O@o^%2[['b*Uj53{G'y|m|?TbO7sP ODԪ#GVۥjɵ~ڷ5Z߽TE:ʫjT R" kSysUTwT-߻߾o|A@QYus:o lZ]S _rڍuyB!M<]+!p 펎uvvɓFL]0=J6x[L+9b{XUyӽ3w"+۩4ʮҭZ!*MeWQBC.vʦt " JkEEia0;g0sp'9|VЕ8 KOt u zë@@@+Kx[E   $ NEG@@\p89 "  $ NEG@@\p89 "  $ NEG@@\p89 "  $ NEG@@\p89 "  $ NEG@@\p89 "  $ NEG@@\p89 "  $ NEG@@\p89 "  $ NEG@@\p89 "  $ '`@@68qB'OTkkk֑j*//W(JyDo|Wh(BߺmƖ^ev2{`t@@`\0=x4yd BKK̝3HP]0OVs ˻;юc^ߴKW|@eZ#  1u*+#uݿsp7bi8V^^^S8v6ugo]DMǥ9K@@T^7EHn1o4YdžNJ`Z5N/3  &cSy=;'1M6j6|p:p@@@`Ӂq    N@@@.@8W    tA@@@`Ӂq    q}T  @ 4Ӿ:5ֵU gJRJЙ@MY:tzzש@cѮMtz=IzgqiۛGu e:HOZzpz>Xn @@D nwbqۣ^x<^[@O-۬w95Ȇk濛~Bi2chͽ^wm<i[֦7d+Sɪlג_mgk6^WmQ]o|U,p^` ]_j6N $N)}5ᐅdI.ɦClz+  0 4y ië+TSK:"-]WYaO&' ]0,=i{^r@X+e=5*_mВߞRIYD. #O[՟ߢx䇽o\f{ծn;Ze kloT?RmW++UN葵i~Oz'-rOeVZfa86?5zxծ֋;c׮Z\HkmUm45wSsFpDWR z޿|/+o>ߜ҃_ E=# m85|xBm5X:}.W#]ՑrQzW)F+յ ɶ?ܬ#sdި^'h]oP}Zd#wq?sN+Sw]~Np  XzHiOMUk۽hkZVszL-4]vm{]S5|s|,];UO.g42Gk:vK-̆Sv|4QwD{[-@Fho]^zw5aLw/:L֟ՎQ?R7DFFiLJ!;7W*N}T4ւl9ohQX'#h-՞=ܨvڬ ϗƄ T,ȐӮ9U߷`*k3 ׸zո:mpmOE@=J%݊yLk6}j~Š6[-@9OhÊmCjںV6jiZ0屗bn;oԾKoէZm~SfM#ۮ#h/'׌Hf\V.]o/EzOҖTyoWD~wNlZ_64Q^h@ş玻"mX9MSNRLh]+޻ߦ뺥nI IDAT& vk^d#Rq)@@:m4mxYZ깏Oh[kΝz-(͛Y^-%ZΝշVN׆wޱ^Ya5Eluz{ϢoSZmiRfL)*\ea2K?)Z5eLHGX0u 6%szuD|UVkueyuz? eLtSlw7Z&]h "  a}4Л~ïkւ̦.ɢZE՛k޸f:{w\%~b=ػUѴiFb-hN({ҥۺzxf^{$[]lu ˮ bؿknZC=?u$Gl:vhhO+5Әl?ZJumJ<Џ:i=ln@@`o*XOnӳ4_~n6 ,.ޠwVtSE{b;O#\ڹQSߥ~iV^=e]ۮ-Eb|{t[1E}8=n_N%)w,|LNk/*:bڳ9?M!V$QS8J׻`,U*R&=}]fwN[[[ܬv;wNhTPH=knnܚӳz74C Ju  Y*`/=zbqD+,ңm9+۾NKkwkC5{%k iwiǥ4KխZes|kmu4QڦR_LZR=^R;- /yLkJ4嚫kzjPfMڌ׶fEDW5(c$ϛ vIPbDoZTm /h۶[p~ژ޴^;.ҬIn~Emw3{ '[5謁FM?zڢ>n֌#t}SvTmtߤ+© EE( ՅԼ<©g  !+^3hyrmG6hށyO>^,^eY/ќz㥩O>V.7nnFm~"6z^}QK/bő92 3^Ke/+kL>[4z}ܨ3-39fi{vF[SP'T꓌~hD{ ,[riBG퍏;ϵwd`߇$۷[sAb،Jt?S=}lt3uա{jn@@ {vޭHhXI&ZT6ZVv~5_&h!~UT3j2eu=WWMclo~.)V۞۰Q)#, 6{S*Ӝ9̡>zMU=L*lĴQ'   %py>B tsuЩ::t_h@@@ 2)[SKnԴ;x}|Hh@@@ hRê?TߺdͱQ;w    y.رCpX66 jވҊG%AǏ ⢰ۧry&@@@P}ѣڲe>3ݻW~JJJӂӍՅ 9:aٴCML5̎u;;}څ=掱   #0g(EQڵKǎSYY SWC?71ǝDFK.Z@ nEnFUVEopP5vmgUp!   Um#.hfŎ1'~8unu]­[ܶ[ðw`ط_7w^eHytt#PFKcsm/v<%>L8g   N?O6]çێ_P_K|*$7GJ{k;sՕi,-{ԝ宲%]ٽtƂPލ lp:0/F@@SCC?"#~~ J47:caTtЭrQ.U/?݉]we,v!V@@@ [åPSSxyz?HC9uqOFI-`@T[FJ]XtQRkinΨ^{1}@@@ @g|8Bgi_J-mwjaeIENDB`buildbot-3.4.0/master/docs/_images/full_logo.png000066400000000000000000000221721413250514000216400ustar00rootroot00000000000000PNG  IHDR1y- sBIT|d pHYs+tEXtSoftwarewww.inkscape.org< IDATx]w|TU63o&I)iREeQ"kkYA\u)]P\g)BB $Nʔ73!~>~>[ν{=s"d,1GeM6Jem э?"*~7 kV{ s%KOky?BuhG;ڊi(Jw]xdϲaQݯl5ԘڽYs`Ο=O0юv\q4i 7[nD!7"&̸vhD i9]@DdIE7(m~:5W 0  3ne)3c uMaS2@dPUVo,u6[c!kG;.4&s4 )-=<:-itE*ú%][>=`k ~E iG;bҿy9*Ufu/8P{HԇrjPO`9rc^w&?i~PbӠR503O5ԍP$L&Ӓ:dREj5PeUUu Y~+<<͸8 YUU|D$HMMuxf:}4+IR_h8c6z[]]]q\2QQQ Ƭ׿_ Lߨ]5kNfeeI9mUUIQJ+}QLr555tFgjܹsjjjBPd~oݪ*,XO=m4SΝ/dڴi6nx ^:EQ}y_(KPNLɸ\!#9- ^:"N{%D~:]K! )M+կ?~MҰw0exD } SUq*d]LؿuVo8ADTTݭ[7SNNi6{KuIIIHIIAIIx߄Rvڵ޽{7S+Xf;BDnY&n_#2LR'ec4mΟ*oDa=ޠdk`y+>}gNH5DF3gΨNZ`Oq1ׯ7zNm>y`Q$KQs _IM6^|!K"}iH0/=k&@/=-=q=t'+}J:fzć2'('ދXr]ݜX{7WZܹs ^YUl%l :,δq9r +kt`z/ċiO}HƷN-)k Qj6K^=13e(ou!NdMo q$ɳG4-#_$yn4,A/ t:opMӟҤ(㸜ґeJ4199YYkjjcǎ1$I:!E:˲%Iҹsg.>>tTee%1=0;,eiQaJ955Uu: TUUNp83N;j8N^" 0O$IwXl`0M3J:s5lJ.c!yÐP Ⱦ?_io6/XeyϞ=sW.33S.9 /t)E%3>|RX"7~ԤI.tmm-|͠/(HQg#Fp d,#??ϗ_~㸒`{fĉ &u:(Ջm۶]q,)hdfMvv63n8SrroGmܸ:It:p(z- $ [Z9r$rssiU!"=Ș-fh]]U'`=Ȓ¨.aщYKrO1I#s3Ŀ澽%0/OvUy!T+? <5; iZ53JgNp(j<0fϞmt+jHjRN1gCǎ 05jnzp:cS:n#hr_JDi&ʀp!'EQLWy1]g6p $o*;~][8J< @o ˲FM<$ |$d?:b(Zc5cqaՁeYtqqq3gK D޽ ;vDCC9łlٹsg̘1ôdɒc[ccN󯁖߄`Xl۷O]r%G Nt, 2e!UeqӦM"0[EQ,|G@vv`$Ina0{8|'G:I( z>GQӧO7B;PZZ:OE7($%%E\H7N~ BKЍEdI%ļ4,lUؿZY`y,˫eGw{>w}Չ1#g#:n _p6E^w/KJJBR%Ͱj*$b57eYk̘1^bX^a<.-`x~~[o#v!m޼(DQ,=fѢEk͛g~C= 5htbttwoo%KO8VpunҥK>nf( &M 裏^w:]4L&S~Qi5}45Ԁқۺjۂ`O8ur]_y6ƁsZՠ]e&25bΜ9hX, .awN:IފUV5 0 :8O?IVknvZ }Hڷ~u#!!F&ѣG{EEE8}t ,3&,t8hW$EL3hB^( PE@ye"[՞9.<ϟa-++|q)]gƍؽ{7*+/zt a:uò[7ꁦ!^uee%.A Ir?b$->tk̤\Ẍ oƼ<'GdYܹsD}鍬,$mGP ݳEMHXNrM"o[+vy8ݪp}IRIһQPPlݺ{|H4,)={ӦM37 `ƌAK iWzWBBjXQQt>wm \8ٳa$OxΒ$4MFDxKPeee(-y:窚߿,:GZzE(Er=Nę^*I u\,;Vuߏ(/;AH8tЏk֬8L& >  b<թEQNo%M3TUU9[MDg20Lh|444P*ZنpPq zӭ}Oi򋱓ѲB=:R?ϏҔC,'lmXp2DQ~*Ȋ"BjmQIDpXX,E$I^RY@QTBX=g5FGGay ,@HUU},Wq:͞WxxVEQɾbf(exgO  _ 7[VJ]cv+U&Pů[K"R:]s(ޯKQToG%DQ,Sll,dYn$!MAб ˲ZHB X__/<_p%[B!tA e9-:[gU[[+ p<6\HĥSq~rШ;{-q#kXD̏?8j⃿n&,;VHa$ڵW__a]5i9i(5xK# u333,%& -仺=z Cח8q+gϞ,Nih_ۡCl$R]>imb: Iubju#aCowh9A)<쇝qԴ5鰁҇CtZEEfjh3dѸeمw}w3W"YeTMSSS30`a|I|0!!j}b PRR+ ĉY70#}=bA=(^h`X2qD#ǡWy{z˰,q7Պr~<^HZ DyZbOvOytCs>u9*n-$7Q59[#Ftt:0e5,˾u}5c6F 0w%-\wudNNN`8hšz#eΜ93ԩSMXH; áZo.3 $Iɞ63gzw4M0ȃnAzzz7Zlڴ#]} h4n4ǭ*\hBeL ĔL<&e'(A#H$ʪܝbt^DzO T%\'룻1UA_C}C/8B;E+--_~?[VVv;EQDZZ177Eeeefp8O_~hb=#mΝ˫`4l|e1o<ӏ?8av;Ç{444`}O>& 6/v37|33vXjر<(>(o(_YY9bxmTɓpGbٲevAn&pۻw޽{{ٚÇC>Z|H|-{i9?iCRj$wFBkBf8^P$qVj1q6l{9Gaa*9Peyyyy+8_'V+y{eee(my/xjmgϞd:UUuVy A]*Gؼy3~GUUUXpA$W\\W^I ͆իW ~=SYQoٲEI͎bؘf%Jpx䅕Ty%s:@YFDD,NNNA v;XUUвIJ$Iev IZ8x𠣠 ph4~vakʣYHHHj^@ȡ&[mtXT ; \c(Sf;'jQ.SS\qwDV# 6T-M ٯ ')vHJ*A*>}@2͌֨,Xଥ"9hh;_%'ᑎG7o2|I+8^'rl) VSjND/M,X?Bme06)nڍ+hŇmrMއvc ƅXJ].n:w80_-4e w @ Ζ8Oe{Y33]\ F|(.mtS(*)ܩVW(I"ӨN#t=_= :U 2#g]&5&G\)bi<ա/.p/x'wICJ _vJve+ ~<4+v9a?]m`Ji,m3r'@;A\HPs]v4]i#M4uʖ*vy\Ѵy6QY\Œij4#p64⢡MX743ڄVjs>Lt=,WA8- 4":ʎg.8߷ꑈ=Qd3|RNtLӐ稈NCx]@-zh0#]$4c DBc;4fZg=cЎOu_ m qw.z?jknws1qBx 3x`d<m p4?Ww.G\`+uѻEnWYs}/sA4WOw/0>j۽]Wh5xUW=r+; 4gχ1DIИ-$ea?W }ׇ'zųC PUHBHJ'+En66ɻIDATB2H29o$h#hIm]Js#ۍ"h!W7hʍ|4h+hj}*qљ13DC6λ@{NA> S"\- j`GY??C[E7YИW=_x}h} m9L+ߖmϚh\U-\~`;o!AQ5nnj 镸Оϓ.pnE@c@OiJf&h+ot+:iJhKVMQQ sA3㟸?m3 >oJ-*@c|, 9pBh3Mɰc{ 93#JSoΪBAl*~/ Ж̫s[;GI")=-{hqeشd?{IENDB`buildbot-3.4.0/master/docs/_images/full_logo.svg000066400000000000000000000565441413250514000216650ustar00rootroot00000000000000 image/svg+xml buildbot-3.4.0/master/docs/_images/header-text-transparent.png000066400000000000000000000336301413250514000244300ustar00rootroot00000000000000PNG  IHDRDٗsRGBgAMA a cHRMz&u0`:pQ< pHYs  ~tIME9'dqtEXtCommentCreated with The GIMPd%ntEXtSoftwarePaint.NET v3.5.5I6IDATx^ՕW 0"0AdLT (I4yg'zrNJBA!16ذkcaAzpH6sWU{~5h 8{SYx_2/%%sk1 +~w9cwoFh DWƌ؃sy,ޓ(>aqڤY-LkJLI1K3LZ|I41ML- Zy~2wNʎ@.&n<M^4sGZM̯JZ1aq;IN.01=!פehBM[84m(ny8f7gQqѢsj`mI#d AQvJ欤<2CsR MjlZ@}%^ke*izT7Cp 1SLT;r7.~sg!ր'!|Z`\ D})`AeRO5K奲2u}$.!xQQT q֪jl~m)9/ɾy-_ 1<&Cf%-E&G\ֱ8rx,A{\m0:ʼ-z IK2B'ηY.rm< =h l7kdz|A BZ`-6y饦0~VGYye&[fd[Mcki%Hg d9I+@D(Wi0ͱ9E-Lo,9=zA5@Fro3/‰f&+<`nKy3(U]J hX׫mY6V5.bg+laΔ*g&i_R MYr.9%.o%ŏc/Z]?Z ۢL@-55_mE= 3+:Geq-  ʕ )B =񮠗":E9Ra敁eYab'̲dȩjck,̎wwޙ:239PY`E1t4XUV.ʪ2UELee.$PWf=``*uUC.sEmYwmB+A|Le{ՋWEd;ʢ(KrM@,<o$W.7-7'ֽa6=C̦mY~fY:S_iaOX-ͣ@tЂPeMR2e?IM99g{ gzʎ-ΪPdU:S|v)!Xֳ<,e_fh`{7}ϙzlxmooy̷_/7ϼl4uŭ 8ׁ2@POqSKM,+%fowr @BL¬j LU^RZTP*,@i]`-R2ĵ$k7E?hzoY`M3 +~jOO[/¼O,o4ļ.p_{m)-/>sֺ>⢲6oò,-A]JZ~\vrRsg=a\HU]xւX!hCZgj M@- $*MWzbM4gt@znw['d†w~gU s?6{'P v]Y1`Wb~U&_L/s@[י>D%R_dsr?~_ (/̫|S-PQU恑L0*Vh90(.płE&6%} mI0{&Y/7Ui{~f#h_y]=WO RG_6/{<'͚~SGƬNUW`Zߙ@}yUVu3]zRFk@^V 5Mk(mZWh*:6*cۢv;K\_fj MlL , fkjbųLYRt= ny7 Ro>)5}V&)wѼr_ګfê]ͲV]Vgڋ*Ls~bjSj;uק21}9Xh\XURXi✟=X7ޑ1J1+j2\iJ$*BJkUIHE5f]i6, c_qQWDJX(۬]M$7/?캗ýMwUi*(7"S+Դش2RPjj|BRQ2ĸL9O;UUJU*Kg͌}b8^9 ( ,(iKE-ĚLFQkZNʻdhs|Y[!kLbJ@mŲxhS[4.)3e6i^/M4O1S)[hQՖRӔW"+E6Njv̔ @ZUkRZ6]vs0Sn5=^n+7صպ %-vחjyuؖTa1;;/_mhI8+q'L|r̙3Sǜ|Q#Gr,6=[TB -ږe٤WFQLQ<)ռbrFSuR[Ư..jΞ4mpf5pmޑ;rUu[7mRwTUPLQ#Yj\_nI6J2ܗcōM4NR\bK]--50B~csv `Me[gamlԵ!Ȕx2_q}Qb+̼O&Ÿ6в>G5f3'Nh] ͂;Zkat]F 7[t f f][nOt`dok8b3fAFsϗkjʤXƴ0)@a}Ae8$@kYk-ICI)<3ܫ6p =aTyp{mI*ZT&Av(i{M], fq[[of[%Dx 5v)wMMl䈑fg:3:o~R)f |/w-[J:f><[{؝{~:떙>^cmS N\dTN&w[͂YX2ӮcN:ygHGaN88]t-bu^Yl*-2 *ju}Z ̔R/Ŝ3qʆaw'4jRnBb*jP~l! (@qQ&Xv]\zs[̑Go{;#Gc4b2ru1b/yuĨѯLj=ȣL=uK4-05D6PVoeA)[Ldcrͤ6gMR;nl] zl)'V+7ttMr /1+ڭ,CUc(lsE]@e>~{wZ{y#=h.hpenTU F`|M] l_oY~)ŝfKISY.qaF|We}Cv.^O;G"{T',4!pLNV/;pǿns6Jz2*2\c~^a:j[[kԖiW\)RWF@GN;L>,Yİq|6-J Kr1|S+qKsimY)xYEX.-o5qKr̤s2L|fu9ȕ7p6uuumVAf…WL;ٿQG,[c~|wBbZ[[?Kc;xQzUW8S__}_{ׇ\p;wLٴ4ʾ>A Ua&V^Y`*ZI,+({,R[AI&),6WNAݬZŽ)fν&qDzjYY(8*Y'VU{C2=mUprMmIy6~={•2iPyg(7/ſ{퍕&??ߠXc_r%ن(==ݜs9 ]Gyq=襤$?5:1G~.(.[YOv{slv iH"2 F_n]cMVvIU[p;Z5rU\\M5KC5Bj*[/ԕv]\خ9.nR%PkK^n*ĎwUT)ī(o ^f:ŜyOr@jyɁwnn{;8='%ɓ*Gnz/ZE*fcc7nsm'Fj+w;D@ζu_R_7G*&tmog(ǑF"C7Z .[/oM}eڑDU B6xZ& }6Xiz`JeV0,qw`&Ε6UVP-WFwvjR3fg̽O,꼵ryw緵%HWH髖1J|IUCP֗;]Vu2L8mY Üw5f+!i璓]=øuِ̪;spȑ#Q]2 5wkx #9b; ,?"E;Xp?~89SMllYhucpY$5 o3@ƨ݁eRX*xumAm<[1Ƹ̨mRur*k[ vͷT/3M}(Q@|].E|eoU귴 }U5+wdKiMaY[md}WMe6̲fsϛj?ٜvUm6ԕ}0jԨ&1͡ {qDž+s< oq^t`V/Gןn%x4538>COȂ~MA׸+/w@n,[la\ d$R^UZ2u,I|gVpkXnW`TYwpS _c.k[ AUR`q9X,x).pOTK@!.%%qsgptD:kwx 9/:{ˣXyg{?=E, -mUOEWVn +{fe3e>(lW)JmA(ig`@Jgxk/j Mkߥڊ_lr rYpL@&ƭ- മv=w%Zw B(u>*//wuCʤ!,rv'\* ܸ;k؉?=6)'_*\\d)/o^c&# E?mmvl*#xy6^;r;PY@$E<%porr(O" $sX0HA7*Gs0yCc Ⱦ.KʍW\wb\bFeQ*(6ߒ|]yiloIC͂ a}d#ύFI  Rlk8%$~ |ТkF!]%;8qrBlYyOf-MzQX)N*#p7ºU1mCv8e]FۺZZWX7 W[iv,@,%ehx^p`mHUPh;Xn6RcmblNz)ܖKݶ].U '`oțxz [;g 0ln7 n9ꫯfi;,u111I>b<01qot1bA|?h"됲w e>E,J/A4@Ї->Nտ[*%ϟoaph6i~9IʍK +pQY=nSŭ⶗7dX0vP`]xJt1*Xt$PUvĪ,g;e6'f\o_ k3%M7͗;|C8u{F ^hfϞm\HJ'$$ MCa$.x]7p"of`R঍,,OIݯVݠAz8 %rߌP'@r P`lA~6CL1cew1J-V.>S엨|tIZV,#x&MU^B)+Y f|=\FD9]?N%H(pB]WT]f6n-dZv71VAĽ@h'Z%seMmլ1uST\1Ā| IMi84m'h>m4kĆȶȨ~s]4l!KYbMcdvA^d9uwP.r_җ ʓq>=o@뤡Qpe&+z5\~LWl_=&Oܼ‹7_tݟ~k3tW&N,skIz[8N7on%9Dn`w1wGD:r; ,!1`DŽ*OxžĴnYs Zϛ6![o"&`=Jӳe{ۻsij "|pgGZKAE+PVkqa)|]9{絎r,ƣpC`*ST6GS2YpgN6\xֹ/fʮ],Э(/2e+d8uÿ28/K!q `'{͵ΕeG)[Wk(!uTkvF`J۹{ں?xMDljgB,P_&ק$ס6= !:)PvZ:1WU/ e^gDeQMܒUUgRF} lwNUms-]+WvciNwt),_N? ؽ}z: {j]I6 ӱ z4u+ kJNC<=H7쩲dnJB}˼i/V:㐁ٟU/X[LN}=٤{LvVZLKog͵* d@廅;laB-GE^yTpY@]<媙fgiй2bdȎ; zcp8y9 XOn]E<ê/2ԑ@UUU9]5Q{W@]7;=%AnV7(qWdeRd2n˟KLį.cbjP[;:J@ƹ&Cfެ,G5\2> ('%N6 6"Z1@LmPZD1!^f!tŗN7 Y&e 2bZ"cS>NB7z]XZ/j&Z0 L2dAXm{@<*jLy^ r9J<orJ@Ͽ^eŪ5\d2Ylzٹce`y˃HR1,u^7}*rvK~ p6I'NwL1s\Z7ڊ4ȡĞcL0lFo ŀ{hr@ oALmNޝldݲeO^XR@kX\d7<_>zQشz *)n.pgI%QU\Ntlzԕy`QKN*:mYSbinܸй<jh2d dNE)D_YJYkF\/l\ˍ>$p#o Zs7Gnȇ̶`*@~F{P| 6Ӌncm#^ʴZT(%[ f~uDŽJB}҇Rv{dKG2eeı2tq2hǩçAM7+n|LQOtscŭ,c|$v m|0c UoAn.4O3 Ϲ/Ѿ6L5 G oq!B5 s-[d<ֺ?[5$ 6A2pSPJȋCOt<SOKq4Z^һu?F9h^IKTGЈ{Lt)1nkgt#tsIF}t{I%[!C3V-+;6i39N#K#@ : Ny'`KQ)dIL05e ̳uƵfHQ(/\g\4H .v.> įJYV$K-&/;R<ǬUt~S(cRWA.p";7c eTpi[Bzo-# skD8nS9+jJ2XCh(1Sveh!K#ss[}d'̱Ps|xĜ'NP/,g߀zŭO?#7IþyЃs^/^`9*22dJɘ9ۃQ۰?[GJ&ɜT92B &SH%>F':l)oKjtczy eQqE<Ǿ܁=.҈St!]?V6 ==e}d<n3:ybVPbBt]|Dٖ.E]2(17O)R͵+3{օt󣺣edqQ/f2 Kd| Cq# e(%e(640F“ͦW@y4ibjѣ5 0xcn3 ,j@'iKπE/;07Ha[4 s'&Fdkfg5 hP`2͌y[@&~$>"ͣ@3r2`Q7}DP!G@2ǜ﬋v|.hw[ zIENDB`buildbot-3.4.0/master/docs/_images/icon.blend000066400000000000000000002737241413250514000211210ustar00rootroot00000000000000BLENDER_v236REND SceneSRd+WRSR1-AnimationL, /L/33 M5DATAL,X,DATA,X,L,DATA,X -,DATA -XL-,DATAL-X- -DATA-X-L-DATA-X .-DATA .XL.-DATAL.X. .DATA.X.L.DATA.X /.DATA /X.DATAL/Y/,,DATA/Y/L/L, -DATA/Y 0/L--DATA 0YL0/,-DATAL0Y0 0, .DATA0Y0L0- .DATA0Y 10L-L.DATA 1YL10-L.DATAL1Y1 1-.DATA1Y1L1 ..DATA1Y 21L..DATA 2YL21- .DATAL2Y2 2L--DATA2Y2L2L,.DATA2Y 32L-.DATA 3YL32- /DATAL3Y3 3 - /DATA3YL3. /DATA3[<L,. / -J9:48DATA4Z5Link and MaterialsEditing>DATA5ZT64MeshEditingF>DATAT6Z$75Anim settingsObject>DATA$7Z7T6DrawObjectF>DATA7Z8$7ConstraintsObject>DATA8Z7EffectsObjectDATA9I:333?\<@DhC)DhCC(BDC?z?DATAT:K9333?\</9DATA<[<3-,, .DATA<[DC<L--.L.|+9=U_=o?  #$S?A=>DATA=Z>Transform PropertiesView3d>DATA>Z=3D Viewport propertiesView3d>"DATA?DA333?e|????????|+9=U_=o?;AkA?|+9=U_=o??????;A B?=CFF DATATAK?333?e|/9DATADC[ M<L.. .-??Pף  #$DK,D,DDATA,DZTransform PropertiesIpo!>DATADH F333?kzC̽̌?zC@ #< #<`jFzD OBzC̽̌?DATA FLHD333?k@zAAQAQAB A@CC #<@G\HDATA,GnHETADATA,Hn\HGBO`ADATA,\HnHBOp=ADATApHOLI F 333?k6 j>DATALIDKH333?k??? ???? A???PA A!O?j?}GCHB? A B? #<C@h@hDATATKKLI333?k/9DATA M[DC.L-- / 'O\QMNDATAMZNLink and MaterialsEditing>DATANZMMeshEditingF>DATAO\P 333?v<zCCHBC'?CFC= ADATA\PI\QO333?v<#DhC`DpJgChCC(BDC?z?DATAT\QK\P333?v</9SRdRW$+SR2-Model lS,UlUWWDATAlSXSDATASXSlSDATASX,TSDATA,TXlTSDATAlTXT,TDATATXTlTDATATX,UTDATA,UXTDATAlUYUSSDATAUYUlUlS,TDATAUY,VUlSlTDATA,VYlVU,TTDATAlVYV,VlTTDATAVYVlVSTDATAVY,WVS,UDATA,WYlWVT,UDATAlWYW,WlTTDATAWYlWT,UDATAW[lSlTT,T?@Pף4{mX4~DATAXZYPreviewLamp>DATAYZtZXLampLampF>DATAtZZD[YSpotLamp>DATAD[Z\tZTexture and InputLamp>DATA\Z\D[Map ToLamp>D[DATA\Z]\PreviewMaterial>DATA]Z^\MaterialMaterialF>DATA^ZT_]PreviewWorld>DATAT_Z$`^WorldWorldF>DATA$`Z`T_Mist Stars PhysicsWorld>DATA`Za$`Texture and InputWorld>DATAaZb`Map ToWorld>`DATAbZdcaOutputRender>DATAdcZ4dbRenderRenderF>DATA4dZedcAnimRender>DATAeZe4dFormatRender>DATAeZfeLink and MaterialsEditing>DATAfZtgeMeshEditingF>DATAtgZDhfMesh ToolsEditing>DATADhZitgMesh Tools 1Editing>DATAiZiDhCameraEditingF>DATAiZjiShadersMaterial>DATAjZkiTextureMaterial>oDATAkZTljAnim settingsObject>DATATlZ$mkDrawObjectF>DATA$mZmTlConstraintsObject>DATAmZn$mScriptlinksScript>DATAnZomEffectsObject$mDATAoZdpnMap InputMaterial>DATAdpZ4qoMap ToMaterial>oDATA4qZrdpAnimAnim>DATArZr4qSoundSound>DATArZsrListenerSoundF>DATAsZttrSequencerSound>DATAttZDusRampsMaterialF>]DATADuZvttMirror TranspMaterial>iDATAvZvDuShadow and SpotLamp>DATAvZwvMist / Stars / PhysicsWorld>DATAwZxvAmb OccWorld>vDATAxZTywPreviewTexture>DATATyZ$zxTextureTextureF>DATA$zZzTyColorsTextureF>TyDATAzZ{$zVoronoiTexture>DATA{Z|zRadio RenderRadio>DATA|Zd}{Radio ToolRadioF>DATAd}Z4~|HooksObjectF>TlDATA4~Zd}Particle InteractionObject>$mDATAI333?WDhC?e1D~>pCC(BDC?z?>mDATADD333?W??? ???? A??@PA Aj?c3>}GCHB? A B? #<CzzDATADL4333?W@̌AR|B1@lA A@CC #<@DATAT4KD333?WSave PNG/usr/home/warner/stuff/Projects/BuildBot/sourceforge/docs/images/logo.png :DATA[WTSS,ŰDATA[lTT,UT?p? JL  gTT̀T̀DATAZTransform PropertiesView3d>"DATATD333?DwF?Oؾ>!?Dt?jCl^1?F7?3?o̿b?CwF?!?ClDؾ>t?`1?>j@7?r@Z orQA?rY?8U^𾺚,0?v?x?j?410?7F7?2JhւAbAt5?8?W4[gy>d>Q{Z,BžA^*i~B@?p? JLDwF?Oؾ>!?Dt?jCl^1?F7?3?o̿b?`?! d:IA B?=C=r @lH3@DD DATAIT333?DdC8CnD fCC(BDC?z?DATAH333?zC AzC A #< #<`jFzD OBDATATK333?Save PNGTPUT PICTURES/usr/home/warner/stuff/Projects/BuildBot/sourceforge/docs/images/logo.pngicsblend >SRd$W|ŁRSR3-Materialodel Sing4ľ5DATAXDATAX4DATA4XtDATAtX4DATAXt,DATAX4,DATA4XtDATAtX4DATAXt,DATAX4DATA4Xt DATAtX4 DATAXt DATAXDATA4Yt4DATAtY4tDATAYtDATAY4tDATA4YtDATAtY44DATAYt4tDATAY44tDATA4YttDATAtY4DATAYtDATAY4tDATA4YtDATAtY44DATAYt44DATAY4tDATA4YttDATAtY44tDATAYt4DATAY4tDATA4Yt4DATAtY4DATAYtDATA[t?@Pף++ ,ܗLDATAܗZOutputRender>DATAZ|ܗRenderRenderF>DATA|ZLAnimRender>DATALZ|FormatRender>DATA䛁 333?zCCHBC,?CFC= ADATA䛁I䜁333?DhC?DhCC(BDC?z?mDATA䜁D$䛁333???? ???? A??@PA Aj?c3>}GCHB? A B? #<CzzDATA$L䜁333?@̌AR|B1@lA A@CC #<@DATATK$333?/9DATA[|44tDATA|[TtG@? JL - -G Hh~ԯdDATAdZ4OutputRender>DATA4ZdRenderRender>DATAZԥ4AnimRender0>DATAԥZFormatRenderH>DATAZtԥPreviewMaterial>DATAtZDMaterialMaterial>DATADZtShadersMaterial0>DATAZ䩁DTextureMaterialH>DATA䩁ZMap InputMaterialD>DATAZ䩁Map ToMaterial`>DATAI333?lCqhCC~qLmCC(BDC?z?hyDATADĮ333?lJ>H!?)xu?i6>Pbܗ=(?^L?a?滎z?J>#xu?ܗ=Hi6>(? ?Pb^L?\FAA?4[?[!?@jG>gb>Pb>4Im>k&?]M^L<0@AzA=>Kc<2֍7&> ?:x¸A1vB,jB~@G@? JLJ>H!?)xu?i6>Pbܗ=(?^L?a?滎z?@?$$OA  B?=Ch?j(4?t'>SSdDATAĮHԯ333?lzC AzC A #< #<`jFzD OBDATATԯKĮ333?l;SAVE FILE/usr/home/intrr/blender/blend/untitled.blendDATAT[ |4tU.=z=o?- -GH S<DATA<D|333?\???h?j(4?t'>????hj(4t'?U.=z=o??OA  B?=Ch?j(4?t'>dDATA|I|<333?\DdC>9C,DeCC(BDC?z?DATA|H|333?\zC AzC A #< #<`jFzD OBDATATK|333?\ SAVE FILE/Users/ton/Desktop/der/blend/untitled.blendDATA [ľTte?8?AHM    SDDATAD4333?,L?В ?K?ȳ>?M?K?<ȳ>T5L?В ?ZI? @μ@?ƾ\?lU В (??3>>]`b4AAVl>K?x<4쓾S>>3ApןAAA)@e?8?AHML?В ?K?ȳ>?L?ՙξAE;OA  B?=Ch?j(4?t'>dDATA4I4333?,DdC>9C,DeCC(BDC?z?DATA4HD4333?,zC AzC A #< #<`jFzD OBDATATDK4333?, SAVE FILE/Users/ton/Desktop/der/blend/untitled.blendDATAľ[ 44(.=^=o?    SÁDATAD333??3?3^I2(o(4?? 3?3^=i(4J2(A?(.=0n;^=ꉖW5jOT{:?OA2 5AC^=k(4J2(A?(.=^=o??3?3^I2(o(4?5?5OA  B?=C^j(4?J2( Z ZdDATAI333?DdC>9C,DeCC(BDC?z?DATAHÁ333?zC AzC A #< #<`jFzD OBDATATÁK333? SAVE FILE/Users/ton/Desktop/der/blend/untitled.blendSRd|ŁW$SR4-Sequence Ɓȁ ɁĹ́ 5DATA ƁXLƁDATALƁXƁ ƁDATAƁXƁLƁDATAƁX ǁƁDATA ǁXLǁƁDATALǁXǁ ǁDATAǁXǁLǁDATAǁX ȁǁDATA ȁXLȁǁ\DATALȁXȁ ȁ\DATAȁXȁLȁ\DATAȁXȁDATA ɁYLɁLƁƁDATALɁYɁ Ɂ ƁƁDATAɁYɁLɁ Ɓ ǁDATAɁY ʁɁƁLǁDATA ʁYLʁɁ ǁLǁDATALʁYʁ ʁLƁǁDATAʁYʁLʁƁǁDATAʁY ˁʁǁǁDATA ˁYLˁʁ ǁ ȁDATALˁYˁ ˁǁ ȁDATAˁYˁLˁǁLȁDATAˁY ́ˁLǁLȁDATA ́YĹˁ ȁLȁDATAĹÝ ́ ȁȁDATÁÝĹLȁȁDATÁY ́́ǁȁDATA ́YĹ́ǁȁDATAĹY ́ȁȁDATÁ[4ԁ Ɓ ǁLǁƁсҁt΁ЁDATAt΁ZDρOutputRender>DATADρZЁt΁RenderRenderF>DATAЁZЁDρAnimRender>DATAЁZЁFormatRender>DATAсIҁ333?uDhC&ԓDhCC(BDC?z?DATATҁKс333?u/9DATA4ԁ[Ձ́ǁLƁƁǁDATAՁ[ځ4ԁ ǁ ȁLȁLǁ8=i>o?[  [P ցفDATAցJց333?}|zCAzCAPP A@FB= A DATAցDفց333?}|????????8=i>o?fffAD&@??fffA B? #<CDATATفKց333?}|AVE TARGA/t1.blend9DATAځ[ ՁȁȁǁLȁ8=H>o?]]]]S |ہށDATA|ہJL܁333? zCAzCA1||1 A@FB= A DATAL܁Dށ|ہ333? ????????8=H>o?fffA*@??fffA B? #<C>>DATATށKL܁333? AVE TARGA/t1.blend9DATA [ځ ȁǁȁȁ8=>o?]]wx8DDATAH333?ሜB̽̌?B̽̌?88 #< #<`jFzD SQB̽̌?DATADD333?ሜ????????8=>o?fffA@??fffA B? #<CzzDATATDK333?ሜOAD FILE/9SCBSCScenetageainT Ǿ,e^R@<dd??< dXdd?? Z@@???//backbuf/usr/home/warner/stuff/Projects/BuildBot/sourceforge/docs/images/pics//ftype@&#@^@&^#DDATA< DATA<$lDATA<d 2dDATAd< 2DATA<dDATA<'(DATA(<;,d'=A@CAlCACameraamera.001=BA?LA$<LASpot?[?d??+r?AB>??@ AA4B?@@???LA<$LASpot.001*`@?coF???+r?AB>??@ AA4B?@@???WOT:WOWorldg=pb>>===??A@pA A?L=DATAX??????????L>OB8dOBCameraamera.001 뽾-@???B?;ļb"N???Nb<9?Z/?Lռh/]9?뽾-@??????/N0\0??2ޯn?^1_ S_3?OBd??)d??>)d????$DATA$7OBd8lOBCylinder.001 A=A> =G>G>???G>G>?A=A> =?????G @˄<`D?|>Uj>=?@???OBl8dOBCylinder.002dOtdOttA=A> =Ǣu?Ǣu?z?Fӿ??{t{t?z?A=A> =?????dH?ݐ<*@ U>O7?E=+?@/c> K@?DOBd?? #=?>=?@???DATAtOB8lOBLamp $L'XE@???[M{?[⽈ r??P?JNv=>>IS?54߾_ ?L'XE@?????$?l>0uyZ?k=s? >s98?x@!@=i?DOBd?? #=?>=@???OB8OBCylinderA=A> =????????A=A> =??????Nc<9?[/?Lռh/\9?q9&̝&@?DOBd?? #=?>=?@???OB8OBLamp.001 </{@@??? {?9?>D>??)3j?>>şsI ?RP?~>r_>z?9\%iϾP,L>gkd?B`7@V?DOBd?? #=?>=@???MA`!MAmetal"h?"h?"h???????????L????2 ?????@?=?=??D?DATAXD ??????????L>TETETex.001\>@???????@@????? @ ??<?ME+ MECylinder  ٮ?ٮ??DATA0??GGٮ?gag>ٮ@IIٮg>@gٮ?a???GGNٮ?g?aNg>ٮ?@N?IINٮg>?@Ngٮ??aN?DATAh ,             @@   @ @ DATA  1H;;;<<<555;;;CCC<<<;;;@@@CCC;;;888@@@;;;444888;;;555444555<<`[-<?I3*]<?B`[<u=?Ћ VY<?L#IwM><?A <L#I?3<Ћ ?u=#<B>-<%e֢<B<ދ r=Y<Y#I>̉< <?#I><x=‹ Y<B<gH5֢<B>-<o= ?#<f#I?3< ?A<3#I ?>wM< {=?YV<hB?`[<[6?*]<B>?,`[< ?l=?#V<r#I? ?3wM<???AAW%?&#I??wM3W~=? ??V#W?JB>?`[-W?RU?*]W?C?`[Wn=? ?VYW?c#I?wM>W??A WI#I??3W׋ ?t=?#WB>?-W ~5?֢WqB?W |=?YW$#I%?>̉W? W#I?>We= ?YW}[C?W¶?֢WA>?,W=h ??#W:"I??3W0??AW#I??>wMWb W=??YVWCu??`[W,??*]WJA>??,`[W# ?=??#VW"I?O??3wMW?DATA,,`@A! @A"!@A#"@A$#@A%$@A&%@A'&@A('@ A)(@ A*)@ A+*@ A,+@ A-,@ A.-@A/.@A0/@A10@A21@A32@A43@A54@A65@A76@A87@A98@A:9@A;:@A<;@A=<@A>=@A?>@A ? !@!"@"#@#$@$%@%&@&'@'(@()  )*  *+  +,  ,-  -. ./ /0 01@12@23@34@45@56@67@78@89 9: :; ;< <= => >?  ? ME+ MEMeshQt48c48c<2Q( ?@ ?@6,?DATADATA 0(?hFǾ6,ʶ?V6,?@N)36,?B>6,į?E>6,ɤ?ɤ?6,T @T @6,GG?@uM6,aɤ?ɤ?6,?į?%F>6,??B>6,??@K6,?ʶ?6,??WFǾ6,? ?@suM6,?aNT @T @6,?GGN}?Ry6,鷼?V6,`?~ 6,?跼?V6,?{?Ry6,?_? 6,??Z?@6,llW?@6,?llWZz=? ?6,!?0#I?6,? i?6,??6,? i?6,?%?*#I?6,?s=?̋ ?6,???6,?fw??6,P:>)/@6,llWfw??6,?:>"/@6,?llWZ۾?6,@6,?puM?@6,a۾?6,?uM?@6,?aNȊ@6,??Z*-?D?6,S ?=?6,UA>?6,>ʶ?6,w>ʶ?6,?MA>?6,? ?~=?6,?+-?:?6,?6,1p>6,B>6,9>?6,uuM? ?6,@T T 6,IIQ>=6,?B>6,?1p>6,?6,?T T 6,?IINuM??6,?@N]; 6,X; 6,?ZӋ ?u=6,]+-?6,^+-?6,?֋ ?u=6,?Ý?6,zt1p?6,̿9>6,?uM?6,@s?6,?̿>6,?y࿁1p?6,?Ý?6,?q?6,??Z!?SuM?6,?@N5ys6,GpDp6,2yt6,?ApMp6,?Ay6,+C6,j= 6,n+-6, +-6,?g= 6,?B6,?Aܿ6,? *x4?6,gB?6,r]?6,aB?6,?@_ ?6,?@x]?6,?V淼6,i6,Lʶ6,56,$v56,?Zʶ6,?i6,?V緼6,???6,??6,?_#I??6, #I?3?6,?J#I?6,?6,??-Q6,?=-Q6,??6,?J#I?6,?5@?6, }=?6,Ipep?6,Mpkp?6,? =?6,?tB?6,??B6,=?B6,?B6,?@?66,??|+-6,n=? 6,s=?Ӌ 6,??z+-6,?K#I6,6,|#I6,? 6,?#I-6,O+-6,L+-6,?2#I"6,?ºfw?6,~+-?6,t=׋ ?6,?U>6,=~ ?6,?+-?6,?ƺfw?6,?H">6,?B>6,ʶ>6,ʶ>6,?B>6,?5#I!?6, i?6, i?6,?>#I%?6,??6,6 #I?6,a#I?6,??6,?>56,|56,?׋ t=6,B6,B6,? {=6,??P#I6,?_#I6,?`? *>Z];  &^͊@ =Z?@[> !{ڒ?R?6,?ͮ?^w?6,ͮ?F#I?6,#?.#I?6,???6,???6,1? ?6,?f;?>?6,u=?͋ ?6,}=? ?6,?ݯ?MX>6,?6,?B>6,w?[B>6,?w,t?7=6,?uM?ml>6,u?j6,?]'16,?J?l6,? ,t?76, ?B6,w?B6,?w6?6,?'%ݯ?X6,'%t=?֋ 6,0p=? 6,?0^;?>6,?V<(?/6,V<?P#I6,G?_#I6,?G ?kw6,?3Q ڒ?R6,3Q?6,Z?6,?ZR? ڒ6,?ͮbcw?6,ͮbL#I?6,ljJ#I?6,?lj1?)6,?p>?d;6,pҋ ?v=6,@v֋ ?u=6,?@vX>ݯ6,?|z><꨿6,|zB>6,}B>6,?}a7=+t6,?ual>N6,ua56,$v56,?+lP6,? a]7,t6, aB6,}B6,?}yA꨿6,?'%|zXݯ6,'%|z׋ t=6,0@v {=6,?0@v>n;6,?V6,pV<u=ы 6,@v0i= 6,?@v0ݯ,Y6,?|z'%=꨿6,|z'%B6,}9C6,?},t*76,?a Pl6,a Y46,t6,?V>l>6,?au.tj7=6,auB>6,}B>6,?}O꨿>6,?|zݯX>6,|zt=׋ ?6,@v=~ ?6,?@v;>?6,?p+2?6,pQ#I?6,lj3"I?6,?lj6w?6,?bͮ ڒR?6,bͮ?6,Z'?6,?ZLRْ?6,?3Qdw?6,3QK#I?6,G#I?6,?G?6,?V<>f;?6,V<ы v=?6,0F ]=?6,?0Yݯ?6,?'%??6,'%B?6,wCy?6,?w7,t?6,? lP?6,  *x4?6,@_ ?6,?Ql>\?6,?ut7=/t?6,uB>?6,wA>?6,?w>Y?6,?ڄX>ݯ?6,ڄ׋ ?t=?6,G ?=?6,?n>?;?6,?4?+?6,R#I??6,且"I?D?6,?且R?ْ?6,ͮw??6,?ͮDATA<2,         ! !"##"%&$)'(! *$&!!*+-$!!+,,-!('.1"##(.01##.//0#672456623346=89:;<<=99:<7>2<;?65A5@@ABC8==BBC=GHDEFGGDEMIJKLMMJK2ONN32;:PPQ;>GFO2>>FRUO>>RSTU>>ST?;QYIMM?QXYMMQVWXMMVW$-Z[\$$Z[^.''_]]^'H%$\DHH$\LK__'))L_3N`c433`abc33abgP::9dfg::deef:hihj* j*"1kki"knAlmnlmqBopqopED\\rstE\\st_KJvw__Juuv_\[rw]_yxx zz{ ny}n|}|o {~{~OOUVQQO``NOQPggQ`a``gfgghiFEtFFtFFuJJIJJJFRFFIYIItsttvuuux z RSRYYXA@lqCBabaaffefnm|npooUTWV*j+0k1-,Z^/.bcde|}~ @@=6  6 @=@@6<7 <6= >7@MG>@>M@?M@7<?@?7@HG (&% H( (%H )(@GML@)G@GL)@ &(# & &# !&  ! ! @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@                                @@@@@@@@@@@@@@@@@@@@@@@@@@@@ @ @@@                                 !  !   %"# #$%  #" "!  $&' '%$ '& & DATAQ1 ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssGLOB\R DNA1V?SDNANAME*next*prev*first*lastxyzwxminxmaxyminymax*newid*libname[24]usflagpadid*idblock*filedataname[160]totcurvecurblocktypeshowkeypostyperttotelem*dataname[32]sliderminslidermax*refkeyelemstr[32]elemsizecurvalblock*ipo*fromtotkeyslurphactkey**scripts*flagactscripttotscript*linelenblen*nameflagsnlineslines*curl*sellcurcselc*undo_bufundo_posundo_len*compiledsizeseekdrawzoomholdclipstaclipendlensdrawsizeYF_dofdistYF_apertureYF_bkhtypeYF_bkhbiasYF_bkhrotscriptlink*anim*ibuf*mipmap[10]oklastframelastqualitytpageflagtotbindxrepyreptwstatwendbindcode*repbind*packedfilelastupdateanimspeedreserved1texcomaptomaptonegblendtype*object*texprojxprojyprojzmappingofs[3]size[3]texflagcolormodelrgbkdef_varcolfacnorfacvarfacdispfacwarpfac*handle*pname*stnamesstypesvars*varstr*result*cfradata[32](*doit)()(*callback)()versionaipotypedata[16]*ima*cube[6]imat[4][4]stypenotlaycuberesdepthrecalclastsizepad1noisesizeturbulbrightcontrastrfacgfacbfacfiltersizemg_Hmg_lacunaritymg_octavesmg_offsetmg_gaindist_amountns_outscalevn_w1vn_w2vn_w3vn_w4vn_mexpvn_distmvn_coltypenoisedepthnoisetypenoisebasisnoisebasis2imaflagcropxmincropymincropxmaxcropymaxxrepeatyrepeatextendcheckerdistnablaframesoffsetsfrafie_ima*nor*plugin*coba*envfradur[4][2]modetotexenergydistspotsizespotblendhaintatt1att2bufsizesampshadspotsizebiassoftray_sampray_sampyray_sampzray_samp_typearea_shapearea_sizearea_sizeyarea_sizeztexactshadhalostepYF_numphotonsYF_numsearchYF_phdepthYF_useqmcYF_bufsizeYF_padYF_causticblurYF_ltradius*mtex[10]layspecrspecgspecbmirrmirgmirbambrambbambgambemitangspectraray_mirroralpharefspeczoffsaddtranslucencyfresnel_mirfresnel_mir_ifresnel_trafresnel_tra_iray_depthray_depth_traharseed1seed2mode2flarecstarclinecringchasizeflaresizesubsizeflareboostrgbselpr_typeseptexpr_backpr_lampdiff_shaderspec_shaderroughnessrefracparam[4]*ramp_col*ramp_specrampin_colrampin_specrampblend_colrampblend_specramp_showpad3rampfac_colrampfac_spec*renfrictionfhreflectfhdistxyfrictdynamodepad2name[256]scale*bbi1j1k1i2j2k2selcolexpxexpyexpzradrad2smaxrad2*mat*imatelemsdisp**mattotcolloc[3]rot[3]wiresizerendersizethreshvec[3][3]alfas[3][2]h1h2f1f2f3hidevec[4]s[2]mat_nrpntsupntsvresoluresolvorderuordervflaguflagv*knotsu*knotsv*bp*beztnurb*bevobj*taperobj*textoncurve*path*keybev*orcopathlenbevresolwidthext1ext2spacemodespacinglinedistshearfsizexofyof*strfamily[24]*vfontmaxrcttotrctadrcodevartypetotvertipoextrapbitmask*tpageuv[4][2]col[4]transptileunwrapeffect*mface*dface*tface*mvert*medge*dvert*mcol*msticky*texcomesh*oc*sumohandletotedgetotfacesmoothreshsubdivsubdivrsubdivdonesubsurftypecubemapsizev1v2v3v4punoedcodecreasedef_nrweight*dwtotweightco[3]no[3]co[2]pntswtypeutypevtypew*defdvec[3]max**obdeflectforcefieldpdef_damppdef_rdamppdef_permf_strengthf_powerpartypepar1par2par3parsubstr[32]*pardata*parent*track*action*pose*activeconconstraintChannelsnetworkdefbasedloc[3]orig[3]dsize[3]drot[3]quat[4]dquat[4]obmat[4][4]parentinv[4][4]colbitstransflagipoflagtrackflagupflagipowinscaflagscavisflagboundtypedupondupoffdupstadupendsfctimemassdampinginertiaformfactorspringfrdampingsizefacdtdtxactcolpropsensorscontrollersactuatorsbbsize[3]dfrasactdefgameflaggameflag2softflaganisotropicFriction[3]constraintsnlastripshooks*pd*soft*lifelbufporttoonedgemat[4][4]cent[3]falloff*indexartotindexcurindexactiveforcemistypehorrhorghorbhorkzenrzengzenbzenkambkfastcolexposureexprangelinfaclogfacgravityactivityBoxRadiusskytypemisimiststamistdistmisthistarrstargstarbstarkstarsizestarmindiststardiststarcolnoisedofstadofenddofmindofmaxaodistaodistfacaoenergyaobiasaomodeaosampaomixaocolorphysicsEnginehemiresmaxiterdrawtypesubshootpsubshootenodelimmaxsublamppamapamielmaelmimaxnodeconvergenceradfacgammasxsy*lpFormat*lpParmscbFormatcbParmsfccTypefccHandlerdwKeyFrameEverydwQualitydwBytesPerSeconddwFlagsdwInterleaveEveryavicodecname[128]*cdParms*padcdSizeqtcodecname[128]mixratemainpad[3]*avicodecdata*qtcodecdatacfraefraimagesframaptoframelenblurfacedgeRedgeGedgeBfullscreenxplayyplayfreqplayattribrt1rt2stereomodemaximsizexschyschxaspyaspxpartsypartssafetyborderwinposplanesimtypebufflagqualityscemoderendererocresrpad[2]alphamodedogammaosafrs_secedgeintsame_mat_reduxgausspostmulpostgammapostaddpostigammadither_intensitypad_ditherGIqualityGIcacheGImethodGIphotonsGIdirectYF_AAYFexportxmlyfpad1[3]GIdepthGIcausdepthGIpixelspersampleGIphotoncountGImixphotonsGIphotonradiusYF_numprocsYF_raydepthYF_AApassesYF_AAsamplesGIshadowqualityGIrefinementGIpowerGIindirpowerYF_gammaYF_exposureYF_raybiasYF_AApixelsizeYF_AAthresholdbackbuf[160]pic[160]ftype[160]col[3]*camera*world*setbase*basact*groupcursor[3]selectmode*ed*radioframingaudiozoomblendximyim*rectspacetypeblockscale*areablockhandler[8]viewmat[4][4]viewinv[4][4]persmat[4][4]persinv[4][4]winmat1[4][4]viewmat1[4][4]viewquat[4]perspview*bgpic*localvdlocalviewlayactscenelockaroundcamzoomgridnearfarmxmymxomyogridlinesviewbutgridflagmodeselectmenunrtexnrverthormaskmin[2]max[2]minzoommaxzoomscrollkeeptotkeepaspectkeepzoomoldwinxoldwinyrowbutv2d*editipoipokeytotipopinbutofschannellockmedian[3]cursenscuractaligntabomainbmainbo*lockpointexfromshowgrouprectxrectycurymodeltypescriptblockre_aligntab[7]*filelisttotfiletitle[24]dir[160]file[80]ofssortmaxnamelencollums*libfiledataretvalmenuact(*returnfunc)()*menupoopsvisiflagtree*treestoreoutlinevisstoreflag*imageimanrcurtile*texttopviewlinesfont_idlheightleftshowlinenrstabnumbercurrtab_setpix_per_linetxtscrolltxtbar*scripttitle[28]fasesubfasemouse_move_redrawimafasedirslidirsli_linesdirsli_sxdirsli_eydirsli_exdirsli_himaslifileselmenuitemimasli_sximasli_eyimasli_eximasli_hdssxdssydsexdseydesxdesydeexdeeyfssxfssyfsexfseydsdhfsdhfesxfesyfeexfeeyinfsxinfsyinfexinfeydnsxdnsydnwdnhfnsxfnsyfnwfnhfole[128]dor[128]file[128]dir[128]*firstdir*firstfiletopdirtotaldirshilitetopfiletotalfilesimage_sliderslider_heightslider_spacetopimatotalimacurimaxcurimay*first_sel_ima*hilite_imatotal_selectedima_redraw*cmap*arg1outline[4]neutral[4]action[4]setting[4]setting1[4]setting2[4]num[4]textfield[4]popup[4]text[4]text_hi[4]menu_back[4]menu_item[4]menu_hilite[4]menu_text[4]menu_text_hi[4]but_drawtypeback[4]header[4]panel[4]shade1[4]shade2[4]hilite[4]grid[4]wire[4]select[4]active[4]transform[4]vertex[4]vertex_select[4]edge[4]edge_select[4]edge_seam[4]edge_facesel[4]face[4]face_select[4]face_dot[4]normal[4]vertex_sizefacedot_sizepad1[2]tuitbutstv3dtfiletipotinfotsndtacttnlatseqtimatimaseltexttoopsspec[4]dupflagsavetimetempdir[160]fontdir[160]renderdir[160]textudir[160]plugtexdir[160]plugseqdir[160]pythondir[160]sounddir[160]yfexportdir[160]versionsvrmlflaggameflagswheellinescrolluiflaglanguageuserprefviewzoomconsole_bufferconsole_outmixbufsizefontsizeencodingtransoptsmenuthreshold1menuthreshold2fontname[256]themesundostepscurssizetb_leftmousetb_rightmouselight[3]vertbaseedgebaseareabase*scenestartxendxstartyendysizexsizeyscenenrscreennrfullmainwinwinakt*newvvec*v1*v2panelname[64]tabname[64]ofsxofsycontrolold_ofsxold_ofsysortcounter*paneltab*v3*v4*fullwinmat[4][4]headrctwinrctheadwinwinheadertypebutspacetypewinxwinyhead_swaphead_equalwin_swapwin_equalheadbutlenheadbutofscursorspacedatauiblockspanels*curscreen*curscenedisplaymodefileflagsglobalfname[40]*se1*se2*se3nrdone*stripdatadir[80]orxoryname[80]*newseqstartstartofsendofsstartstillendstillmachinestartdispenddispmulhandsize*strip*curelemfacf0facf1*seq1*seq2*seq3seqbase*soundlevelpancurpos*effectdata*oldbasep*parseq*seqbasepmetastackedgeWidthangleforwardwipetypefMinifClampfBoostdDistdQualitybNoCompbuttypestaendlifetimetotpartseednormfacobfacrandfactexfacrandlifeforce[3]dampvectsizedefvec[3]mult[4]life[4]child[4]mat[4]texmapcurmultstaticstep*keysheightnarrowspeedminfactimeoffs*obpremat[4][4]postmat[4][4]vec[3]faclenoalphaoeff[2]iterlastfralimbbaseeff[3]effg[3]effn[3]memslowtotytotxxyconstrainttotdefdef_scrolllimb_scrollused*idusedelemdxdylinkotypedataold*poin*oldpoinresetdistlastval*makeyqualqual2targetName[32]toggleName[32]value[32]maxvalue[32]materialName[32]damptimeraxisdelaypropname[32]matname[32]axisflag*fromObjectsubject[32]body[32]pulsefreqtotlinks**linksinvertfreq2str[128]*mynewinputstotslinks**slinksvalvalopad5time*actblendinprioritystridelengthstrideaxisreserved2reserved3sndnrmakecopycopymadepad[1]trackvolume*melinVelocity[3]localflagforceloc[3]forcerot[3]linearvelocity[3]angularvelocity[3]addedlinearvelocity[3]anotherpad[4]butstabutendminvisifacminloc[3]maxloc[3]minrot[3]maxrot[3]distributionint_arg_1int_arg_2float_arg_1float_arg_2toPropName[32]*toObjectbodyTypefilename[64]loadaniname[64]goaccellerationmaxspeedmaxrotspeedmaxtiltspeedrotdamptiltdampspeeddamp*sample*stream*newpackedfile*snd_soundpanningattenuationpitchmin_gainmax_gaindistancestreamlenloopstartloopendchannelshighpriopad[10]gaindopplerfactordopplervelocitynumsoundsblendernumsoundsgameengine*gkeypadfokeygobjectgkey*activechildbaserollhead[3]tail[3]parmat[4][4]defmat[4][4]irestmat[4][4]posemat[4][4]boneclassfiller1filler2filler3bonebasechainbaseres1res2res3chanbase*achan*pchanactnrname[30]enforceoffset[3]orient[3]roll[3]*tartoleranceiterationssubtarget[32]cacheeff[3]cachemat[4][4]lockflagfollowflagzminzmaxvolmodeplaneorglengthbulgeactstartactendstridelenrepeatblendoutTYPEcharucharshortushortintlongulongfloatdoublevoidLinkListBasevec2svec2ivec2fvec2dvec3ivec3fvec3dvec4ivec4fvec4drctirctfIDLibraryFileDataIpoKeyBlockKeyScriptLinkTextLineTextPackedFileCameraImageanimImBufMTexObjectTexPluginTexCBDataColorBandEnvMapLampWaveMaterialVFontVFontDataMetaElemBoundBoxMetaBallBezTripleBPointNurbCurvePathIpoCurveTFaceMeshMVertMEdgeMDeformVertMColMStickyOcInfoMFaceMDeformWeightBoneLatticebDeformGroupLBufPartDeflectbActionbPosebConstraintChannelSoftBodyLifeObHookWorldRadioBaseAviCodecDataQuicktimeCodecDataAudioDataRenderDataGameFramingSceneGroupBGpicView3DSpaceLinkScrAreaView2DSpaceInfoSpaceIpoSpaceButsSpaceSeqSpaceFiledirentryBlendHandleSpaceOopsTreeStoreSpaceImageSpaceNlaSpaceTextSpaceScriptScriptSpaceImaSelImaDirOneSelectableImaThemeUIThemeSpacebThemeSolidLightUserDefbScreenScrVertScrEdgePanelFileGlobalStripElemStripPluginSeqSequencebSoundMetaStackEditingWipeVarsGlowVarsEffectBuildEffPartEffParticleWaveEffDeformLimbIkaTreeStoreElemOopsbPropertybNearSensorbMouseSensorbTouchSensorbKeyboardSensorbPropertySensorbCollisionSensorbRadarSensorbRandomSensorbRaySensorbMessageSensorbSensorbControllerbExpressionContbPythonContbActuatorbAddObjectActuatorbActionActuatorbSoundActuatorbCDActuatorbEditObjectActuatorbSceneActuatorbPropertyActuatorbObjectActuatorbIpoActuatorbCameraActuatorbConstraintActuatorbGroupActuatorbRandomActuatorbMessageActuatorbGameActuatorbVisibilityActuatorFreeCamerabSamplebSoundListenerSpaceSoundGroupKeyObjectKeyGroupObjectbArmaturebPoseChannelbActionChannelSpaceActionbConstraintbKinematicConstraintbTrackToConstraintbRotateLikeConstraintbLocateLikeConstraintbActionConstraintbLockTrackConstraintbFollowPathConstraintbDistanceLimitConstraintbRotationConstraintbStretchToConstraintbActionStripTLEN  0PDtdl(XP4`@d<0P<  L, @,(D$$x$T p H`P8d@l  8( ,@0,HhH,(lDLP< <@Lx0848l(@0pP<`|,,,8,<48STRC                    !" #$%& '()*+,-./0123 456 789:;<=> ?!@A5 "BCDEFGHIJKL(M#$N%O%PQRSTUVWXYZ[!\]^_&`abc'd(efghijklmnopqrstuvw) xyz{| }~ *nop+*,'d##DEQR(52t(#)+,--mnopqDE&(M.(/Omnop     `a++&(/ !M0"#1!\2223$%&'()*+,-./01223443$ 5 6(/78l9k:;<=5 >?@ABCDEF6G?HCF777IFJKLMNOPQRS6T5U8'3$ V 6'W'X'Y(9Z[/7 \]9k:l^8_`abLM27cdefghijk0l:::6T5Umnopqrs  t&; uvwxyz<#3$ { 6([/7 | } ~=>?@A<]B ql9k:8_CI>DE?D_=I@nopAF JK6([GGG BkH'I'X ''(9Z3$JK L  {  6 /79k:8M     w   IMNHHOOO' P7m      !"(&MQ#$%&'()*+,-./01RRR+23'dS 4 56789:;<=>?T @ AB!CUDEFVXSGTHIJKLMNOPQRSTUVWXYF@Z[\]^_`abcdefghijklmnopq1rstuvwxyz{|}~W!X'PX# RY Q WVnUMZ #(ehi@[+\\]'Z[%Fj^\\\]_\\]`\\]!^  ()a\\]^ !b \\]^!c\\]de f\\]^   gh\\]^#!hii \\]^j\\]   5  k \\]l!5mQ\\] !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHnInJKLMNOPQRSTUVoWoXYZ!%[  \p]^_`abcdefghijklm!qnfgopqrstuvwxyz{|}~rrr pqqqqqqqqqqqqqswGt# su   Xvvvv wwwvvbxxxx]]]vvvvun   y  cz%OzzzQ{ {{2z|  xy| } }&}}} 2{z(X$N|}} }  ~    }   2 !"#*$%&'()*+,-./0123456`789+:" ';<=>? 2@?ABCD EFGHIJKLM('NOPQRgS FTUR V  WXY Z [ \]! /^_`abc de fg+! h $i jkl'mnoWpqr! s';tuv wxrWy! sz{|}~'; J ! _  !~  ! ~';< X' d'; 9:  ! ';6?h+J  ! Y$j'n!o ! wW '; ~ !\! (5\\]^~ hi!JI ''(9k:'; Y  EEEE  9k    9k _K (  _J \\]^J_!LLL( (W_~'_'_'_'_'!J'''!   '_(J!ENDBbuildbot-3.4.0/master/docs/_images/js-data-module-mvvm.svg000066400000000000000000000041361413250514000234620ustar00rootroot00000000000000 Buildbot Javascript Data Module View Model Server buildbot-3.4.0/master/docs/_images/js-data-module-wrappers.svg000066400000000000000000000074541413250514000243460ustar00rootroot00000000000000 Buildbot Javascript Data Module Wrappers data.getBuilds Collection Builds Steps b.getSteps buildbot-3.4.0/master/docs/_images/master.png000066400000000000000000004233421413250514000211550ustar00rootroot00000000000000PNG  IHDRa8gAMA a cHRMz&u0`:pQ<bKGD pHYs+tIME :*ѩIDATxu\&.҂`cbwwwyӻ8쎳۳n;P@BBk0 >378jZ B@l\AhReIedHJND"Hdfpqvky8;lm>B\.LJP"&6RTH2$HD H Vpqvx<B! q( #_+..||1>Fȇr=`aa #004D$BjjdiHMMCrr аp|R yEzuQϫ.%eP*x5ٳ FDGT*E"@"PsψE"hTt9QkD=p.iB!$/sx%D^*jԨU @+JGիWx%^|W];cvb2EpܺsJOQQ a ˗oSSSxyF-кesD"OB!PF G8y6b7kMbEw6R qMDGŠA=/mZ'Ēu.N9w C횞 "?fTt4^x'Ooji ];wW:tB!%%aD'Z7oc뎝xCV-U6xR/^7nî-F;}H2wvهԩU[@x]{@޽ ~jB!%a$WnĆ!mZ@=Qήˊs`bjcF{.&1) ƁCGЫGtb'Oً015aCѫG7BC#BIаp,YsM7/ӧΎ!o%aY!GE-p<~Mw@ `P!2HRaرk`#akcvX_?=6nڎ1m2zvvHB7o܍0g_prvČ'xL*a݆͸!&)C۬9xM7`;BQT8yvڴ΀XLB!DI7JVc߷ _8{lߍ6-[Јp?xL\*~/KP(IJ ^!2oBy[שS&!M@91\]\zRH$C"ԉSg0bt F )"HSXo<{+-F}lD!I}cRSS1헙xs nlw>Ea հf;$R?x+VwglWj;wbYЮ-!B!e%aT'NBԧ(̟˱RILL[6ڊH)Vjz:zӦL@&9v{ǯӧ_lC!)}#r9&M,]8lTdT̚3r;l!nط jd;bwU۰];wb;B!([#1\j_}}߾ D"C̛;T*M LvH;|8v3M&`о]k8v4 W`;B!̠$jz\v{{aX"Ɯٿ"2fΚ JvH:,_ ~WmaUNп_o̜5O>c;B!L$t*9x3*N`ei?Gؽ!%73FmZN0lj fh!BJ=Jʰ0k1 6 ƏņM[>JJsа~=эpJƎ]96k6A&B (0 `"n޼C!QvmSjPZ~3ilC!ZQ7nNbݚ0/"<< wӹ%fڶmeWzz:<Xp>X͵|c|~eDZჶw.XXX}Yr!.ZM֢^:lC!J {ُƌ(w4`H Ā} D(bğp5H5cİAE@xXB?j)si񑹍BeUѾ]kh r9B!%aeЂEKQj4o֤LNJƄXngW7DJahh011AfMپ `UHOOg;‚Ukݻtf;Ra!OL}B)JGR`.^Ƴϱ~"w˖mTst歗\S@HMMŲ%c#7ƍ^ۈÏ3j^AJO܅KX`x|ZFtT4a`KKb;F [wcvBȗ$ QTXi3ztR_r99X6Թ#c@G=s{κ .CgȐzKŒ0| ܵ^CJZKm^p%˱m>tNҥ3~i>k ._?%!9br%DEEޏ|-ۣWڗ\بD@׾M+H8xۡbrmc~l/T'c90 obIbA|}y!R2BVcǮ=ԱLMM;w2-[Wf[b/XާO/\rNTb_`FJJ_@G*=ܵIՊKojkׯޯ_Бqؿ1lHט0WgO< eHf͚ 1!_c'Ю}b^sb|?z$پTBH@IXq1hzW(!!Q.WNӧOi} 0x / ֵ;C[.RvۏfMޮ}MG۟>yQ8i<;(޷`jͭ<>Ӧ!RPVDEE㡷.7lʱYLmVa<{j`;QQxΟ>xEgmѥkV֥c6w0N>B!gΟ%U\r!<|4OY,-,Y*Μ;v(DOΞgUa;|#<a97.ZnYc$W]9{͛$BHYEIX)cz;{/Ġ͚Dbb"ۗH4i6nhܰ"DbXtQY=A``Ρ޳'ffZ\7B9j KT*XߑL !Ҏ#rNNz;FJ}z'P;}{E7>eK/1!rUVMj<}P!R1j12ׯ݀RdF O !RQVGxZ*܇m[3  5 ]vގ0ʌ_~Fn]ݎcw̓`Vc BJ:' g*zLXxc`hhkkXXX |AJ"̹S:복e޸tm9cbZ8:…ˬ@NPpjhiּ)b>jhf׸ICT-0|,X8vƿO 2fN.Ԫ] -Z6K_Vv $B %aXtt IR,,,Ьy|r_ڊ%T*u#ZǏ(-/35mf2@u5n7*s(({;;x0!#b))9&r%EK$2\!L R #ccC)$ B)( +RSeDlQID$79%PH!e)̏-RzhA!䉒RJP =]CBWb͛<>)&> EdPlB!XR2Y*hDDI=/2߄=O"*J7B!9QVJ rPH!ы1oZb;B!Ģ$200ǣfrzJ$C!1M,P4LCU !K셓Vqر}'B?js\T>}zk/NƖ-8V nhߡ333_DHbۙd6|2V\-¼srvЀHLLЖn {{{6]]f``uO>hӦ\|۶lvBjժS٫^ JSR.!JJ1ggGՐ-?| ZaaaDNڲBƹsCRx֭M֭݀m[W@{l{30iD_p899":. [DD~BM=ǣBCô唔ܓ0~Z[T*8v{{N9wmݎuk7^!Wgx1axH8dK$ !DK1gng~!LJJBHHH^<(׫H֮YkUjU_"?~v_˖-rمs_ygF...֖;6n5˪J*{ӧX|kѢ^?,<]z B!7a3;zE/r F˫0o?c}V-0d`H$x)ދN;j>t1ɫ- oEQXqXX8Z4kZ,"@n:8K8880މ;v27hX D,OpAt^C{{7oނ9{V빇]zƄBHiFIX)Z HJJqU$a^Bn]ۼx|UKmۡPdc֨qC^RZ߿/3{o/?O/_kjaa(BOˊ.θuޏUx:0~͒ؾ,szyMhaÇ@̜3fRf*ÊHzZBOQ9B!#b (xQV}V[ky:/\fl?i}ʄB!d/s&&&hܸjHLJS?")9^]F9Å77W`ٛN6Ee022هR劌mmmѺu+w@@ T*<*Tq!ҎRL gu<ַ8:9ή;rm;eN|OHEUTFFQ^OYϞ= @Jժ/^.|eypuue { Ylewew|"))IիWq潣V/h6O@77 !҄RW]<Jo׭[GȪvڟ-w2ﳧO1U^7n\?z?Ϭ^|!BCIX)W? .>^/$Ef/tifsv.0ݺwcC?b@{&F}STx5խ[HR5!fwyltΝ;1~лg_T*U( \W}QnWF!qrUT}YݲiP6mZser7mA_ENyAAHNJ˗/w Erbb" +r<@ S4B!PVTTpMOodV<D|}cѵkg0={N/ wCfMv ®n]:#<"~~z=NݺMT*|߾9Is=fW##102i ;-1ꕫE~wGnB!,$գ;.\˜6ѻwGU626‚ѪuKپZϡKǎ~AJRٕC5q'{m{BQ O_yXZZbhа>cOyM9ڵoXI]~} ((Æ K6h\W͵wwFɓgP*_u|N|,a߼yW}1?\CB!,$D<`? Ke/, YeS5`ll-'$$KyC.W@*.{S\#GEN`oIѢySwqƿ'N$}NZ5 g6Iy75V:pgȸhMpষH!5!,5(e/|@ u֌e+DPPcǏqE@rru遝;v!SvDw?c? PԞ={70jİ"<ߍ >]T/_100@V˖-Ycxܺ1..zc!%%EMJJ <ե?=<}tB!_`m޺ǖWC,޿D}r,777ǵs44#)ћ-Z9&McՈCmٕ)>|xCrq))JL+VsgYdSTw jG~зπwޣ_:Ν{hԸ!VYOQhۦf` SHbB!e +cF {*|eYgy!GY9:9b䉌eiiix"8O,"#޾}FiΜG(LcKJ6._O3>P/ppu c˒pqlۺW\Cjjj* 'Ǩ#G /{x LF!e1 >sHTf(C96|(&Ms_gC"YߠOcc#2 64n͚6e-:ujX߽wcF嘴<+$/y@=1nOErNR ۶Z|_3B!e","/+:޽+/n7(('O'SSS88ءUh٪ dRR.^O"""Q1015{75jzZd>}ƁKʮ=`0~3ڷ)|ܾu׮^g,+k{>}XZDxD}™3g3|DtL4LMQ"z쁆l۱w=@$BB!CIX)*  GV-(4uNڶU*Wb;¢]{a]Xr ˱N+>kV.]͛BB!D(kk,;8NvaI 25W_We >!WE}(#B 2IF1t0֬߄hMH[Ie2,Z-5E\.͟$عpJJ+S&c;B!T$XWpß#9A2BEWb)A,ͱh8s"._v8%ʞ/+.P(d;B!T$xXb)|>]sXoZu;#xխӦ`xpJgd &B!Q 022†50&l۱xﵫv8ЯFW7lê7`]54nXt#,B!2J¾V{*aɲՐlTT*nۅ.`%\"!;b_pX7Vـơ{.lC!4D7}` ~06֘=s1!B5#o,_g;$RJT*s>]?fNGM=SWR]| 6lGb߱!RP 1?N?gτ!L#((֮7`䋩jY7<ǡy&lw'NŶ1ex arF7n`[Ѱa=̛= &lDʀ.aŨZ MC*2Xt%dԩU!2Vjyͱgf Q&:yW"Q009> AmbTc4kEpwsńqcQΖ퐾Z|B6o݉{bؿo|cAxLyWvHvm{ j_܌!2VժxwֹQ(6pťmBM[.9ui;Q-{\IG`i]hh /^B>=ѯo/*0Js/aテƼ9PJe"eTZZ\OBv1r`·##e.x?~Q#BHI# VP&DCR9؎eO¬-):״o% 49w֬ƌ JW`˶~+o/I .Yh ݺv,IRT"f3<BfH\v=[u1"f_ gnv_$# Aqp\u!mUdt@ Ur~|B -~> ?Gcغm4nVZCR[\~ 7nށ@(~})"BJ|0y̍m { T;[A%M UoLYCRAo8|OHt@ҵ#H>$L8<>DK:gbELC2<%ۃ4gYq#C$= $~ZȌq<Ӯa2Wc(ӺkP˒wx oD4Z$XYYb4'9|pUr]@ҰGAs$]9C_&A{ÁA>a f3pES!{I$"H_g$awmbmAHt]!4SDk.Y!,i ˃a:Ju#T)=fQrhPԯS'x/^`jj.NanaX" T$'K*!>>a GXX8>|CTt4,--P ?&Q/R_ק7B>|spmc<|GT*ǃ `hhHD .RTY*SdCTT4j5Z<׭5k@$}ʄBC$,j FY` %' KI mF[/OҠ#!f혤AIua0\W Mo:]Um:b;I~V* ))HJJWo;nX,$b^fRuhݲvYO'@&St4ΞR={1`aaggwq !R(7_.#329iH}cnn5v[j+áVlbaIݷ)ü$bg5o*x"=3<&8b(>iެ}ڸeۀ7AZUꏔgeKr694 jyWk_Rt QFԎJ!(RB!BHMÁ[;B-OGlǫNO SX⺭A y"}~jy:'c$Ōf,G͆+TZ!G[aae/1Zy&2> QnbMB!BDq8<> ܪA yxj)4 |Y+IHq)CҸ R^FY;)/"Htxj̟:s WxnSp sO!B)J3ʟJ&I2RS>oS>Z_K:U̳ an$O!o!zCL,Os fdH =fYB4҂^؇0PSGfEX"2ȏC!B)%aP?^<ʘƶ^Os՗[;A; ɧ)*%i~O?o*}|M54JvMxV0jj)!B!MREM^{ɷO1\krJN3N#NkNXJ¸M?V'ls3T9v]w۴B!BJ}’CP+Hz;@\v;C }| 'gnQͦH{wNHTR @l/;yD d,mAqo!)jvKt@J5 =[o<#濐=T'U!|K{;UͶ[~qHv$fQN@qV:#=¬׏_u}!B!# ٵ Fk~!F+Ðt8RECfhGKƇq-ó#9S}}?0ؽ3Wr80>6~<+1*%AVKrUC&LP& Xά*iLqY{so(DoG¬|ύB!R|b+9V^[Z\.G|4TIq[;Hvn3y]3*7yȍЩ"eYAR=Ε t{7X 5>!x=x&׃TJHt~{5۫ ڃ  eB Ff0n mܷb-~.szIv̵oХgYC#`oy#ELVB!_1q!_[ > `blW.!%ZLl,v̜k00(B)!B!#J!B! b0} < 6LGzOqV#v2]Wb뺤i4:ߘMs_9 *u0}/IS p@EZ A)hp(Ss]gv 'йm:qs}#s~>BBv된$p=NqViF/b\%]>O~ֹo>sv\ץÇ-uGPIr]gn뾖&Fz\׉kt"?/Ͽb#xs߸+{@gpk:>uk7w?ĵsQχ,tk1hNչ>pP%O[a~0[\!:':WӺ:Z3/u]?߂+2u]Z?]NC:눘;Dy~>BÄV:msۤ&!pPeZ \_VHrqVOsCF 0_X؃+uw=\CID.=->Lhs_ +`v` X*YJ|Nunk BE|P%A) yVJIԹZ0dj*P7-5}JuMOR2Ty߼^(RrKrhݿ㤸<-χR%FFVk:(}e󑖚 󡌏JGbab ~W?Pχ~>t&d}>y}>L-WχJC]G>>|>Rv>*>y[Gχχ17?eċ@ 36˳]UGh羆kôh twB:Ws_^1}N:qW{F@\L: :U뺌au1nj sٵZv1kcϾa'0:!RSSqY; u:WC`Sχ>?ÿѪ/Qa\χ}j6cL}<5L:>~*psV"Tq?ro5_χ}>aA~{nWҨs]Wχ8} K^}>֎:|nIQ$m^ϟ\yo]kZ-Oi 2yݳ&O舩4u:LMMtтBү!^*fs(ǜymB!_ z{?XnC CoC$D9q l͡T*3.\d;DB!Hٷ{?\VSFHN:4fߐ?,…KBJ Jp8)6Л07VڟO9Y*10kk/:9peM!%bv(JˆlX́͠P*-aqlJHyR9\f ̝;ۡB!( #zS[7mcWqڟ6i%-@ `;TBJSlްժV.zԬkVB"EB))( #zU[7sݦMaŒ )#$+cc#l\lUB!eylBʨ%`-{"F !Rpj ȷcd$vS'M&@bRm؄QF)>.oelCH$B!0-!Bk$ ;onĔP)6&B!B L&_( C!$ USǖx5پ !W v(Ba& @TACGCh(ۡźuys(c*J3/_f bya5< B/}rP (Caų/)!R`O0%v`P0^z]7ڊ@Hcğڲ!o7.' k)#JJXQ"aI"15%*.B!Bȷ0B!B)FB!D]P)64D=!Ba2%ʸOk)vЧ+ \pwoǏWzhժbK} TXJ+qƨ]S(J\~ׯ@xx\.lmmQNi&lXDFF'K$@$2+5n @ ?BHIE  ̱`jj glZ*޵kV1w}N`֍+v+ED|ӦW9֝:y˖.eЬy3C-N:ysΝطb5X"ۡB!$|E*%nlR*Q!>>A6))R88ڳjpy\:mbbc`eEsQV=r un#JQΎx̚Ѱ!/ߞJ?H|>ԭ k++x !!_,,,STXf-cG9rL&4mڄVb ZH8jժ{ros/v-1ǃ]9'$ 9)PfMD!` ܻ{Ϟ>Ӗ6nڀzjW^PS|<}489;1o `AcT*L0r9QлOO2\~=G'|!x{?X[[_'11ǎǓ'O,66Y ]uFJ2 M5sppȵ~CDGm섇GT*yw]65:q \ѿ?xcR0czf..N0q|<|-tXY[!44 Wqlc#T^ ۷L7oj ժUE] 2ija(p%`E1øfGӖ{T@6gJ*as@t:59=OHRƲ#1;{W\kтG GJJ2,,-PZ5t5kn_53Hp_c4 زu֭@Tz}\9fzx=`ii]нGwl"G̚]wE\QIIIsn#QJ%kVV1< 7o 'g'TT tBjUsCll:֮[ B!$$ NvjAT1Pkbv('r||k}q3Xeu9{,p^޾}nj)sZ5o e~ ߁s$`Yj^W^+tIJJSg :*|9\]5,{([ɑD)ST|鲶9l~"">bİ9QNB[{&@]|%Gݾ}qBǘ4̝'ˏ>owfffY#?nߡܬݰP)6&^|R {{$Gsr1/^2'O滭ZƢ2f988sNp8x>7@Ϟ=uQ|ymZ[k@ k>OTRE>ضurcPv-\v]%RVcߛѪU"}"^nj4M2O>}];w3ӧׯ]C.sήt\'Y-,,L"FMO7 DGGkBx{S!JqڍmrjiVVֽ o{n9 6ѣ,tښf4((wCƍtVMe Bvhm[ww^(*ӧNw BŌ_4Mnz~:GŊ˗C7e,`BJ:J ($8QPAIj<~D[nа!Ξ9\~gbr_>O:o3Cf_$w~ڟ< \--"# ooo_Wgg\]]˱wf߾=qMK˜̙|0F;33S8lllRuQm疖X"޽}@7bh|7\y ͚ji6j?ԩ3O>hޢ !Рa,rϜ8q$UXt2)HMͽIg!|;5V3y,?ǭX.Ys}m.^knݼ(2UMD0nOe)))XxF}Ӕ'm[3˭}<~Z$f(g&]wpݘQm}#%d+kc&$" s> npuuՖ}dyFŸ۷0݋75yR'c$rތj4e4˃hӆyϴV~ٙQNq?a!nZZK(d(/>>z^GތrŊ>{%Mq< FU_|~R B>he >}z•qn AI#^\&ψ!<,Q---cʕQ\'NKOc\IxisK=s<}țbET\I[ ʳ_Xz^H$4MΟ;^x(eMsӬy3t̷^1tpD{zr&YPpf9,#$UV)pJqK !#3Iﭒ.+*Yp8TT+W31xu{N>t \9[DFFb˦9U]tʱ{ڥǟy':ukc_R)~6۶o%JH|p`ll̨+/'Nb=^^u0k`gW ЌL 4HϡdoN̶; c曮ʕ+HV||cVѦmkC1ש)RP0Y5[ȸ~Z5+ ?!J ({Z,1:9;=3~3 RѥKgmv<'f"djj .=z<NIjܸ{-KҰrjڣC9hM_##fR\d1{3CʕѲeK@R?y!V։GYD"xWg27o=?ɐ#9%9{yf&a_˳Fu>r+ıcjE*J,[Miބd-2mT ͛7?гWwYt]MB!! ښٟ%..+k*{~nvWZUƓ)Ji\.}zEaaʥ'O ,LӔ0 uZZZe2NP1W\lЎ&6FZZT[&XYfg⾲ILLd4͸W`mwfY ԫŘ39 |sob1fx{!??CT*. hA988`έfRo!]1;#zK+#7aVQ{Jsi]ۍ*;0F '[r# 0TSPgV!HЪuK;ixTT)cfmڗ\ n$ұo|ϯeXh,OvG Gͬ>&1T*HRe|ozMJ3yS*2\.W;cbb}OI:1r``bbb`cΝ;a.M3ϢWnkl&..I٧Tիb˶;uG1ڵk$Ĥ$FMq79FF%L8H$_udfXBcBi))MXyTtg:eOzk6m[Y3pQ[633c$o^aq]jJׯzx{/(=g&aj:Ctͼg>~īWs.ȋkD"L<&O,Ch~3 ManglbbߠթS+We&vtm4Çu}9W\BvPV@hڬ)cن P]MG\ߨq#Ɛt%Ч˗.$o0jY9[aܹ(רQ#K ADx-Zu"|--1$lߺ]FfDܼu+GٛQUm۶alƘޏ w>VT oݽv5j0h^x-=r|~Je~1=üGfgjfa&<==1X{4{VcϾyQM5ͦsJ3B \`;}[-|meܠAgOa¸Ix -X쭑.aF7R/cԳ y1sTD]wЎ/#}fw~'*((?^=`58yc1J=5M7nxSp`A<~j/_Ź,_PvMm9kT*S5=uʸ=ztז߿ĭ-Ô/7DXXW3s[&Ke\Q߇ cdzϰjݿPINJ۷oˆ g:waNW :wiּc.˗.ԩ3PP(JӧH<tK/ D"A:bzY\z 7P(QFrZ5_">}{iʼnNj˭Z3kް19zV q4>!R]FCU.ڶm˗jݽ{/ۜJ:\ԣlVR &OԖ}}}qz|IO?ͷI.q`<€Ͼ4O4ffHI {}2U8zPg1lm.+k+Gi4jx<(>*Cbtѳv;ڹٳgٷ_ܱK{̭[i^{bmD2&&'LfߤI\}5YX %+.{e/:Γ'O[[[<}|ݺwuL a54ok9 K/CZZZ3~ffqZ3iv6544ĐaF7MS&Mޣv;ڭ vح1|Cet.];cHIIڹGV-QAHQ=z?~d\?x狆'B5GBjժ=<^ ѱS{ή6l-xk`9FVM6EgYT7/>!ZƓ|ؠN{!h׾cYbb"+#@:u0~OeG;~TTLd,suuŘx+UĐK"C˗479,[SowJB!J¾D"=;09&4# lK=a~9̞|F xTUٛdF$a rBdmc}{g֕ڠKNڡĤlŒO% 6n^=@"`XvU̚&Nw'0x ^#^ׇQޱ}5=q"蚧, Xa &OK#F+17ɭZZm<ʯ_tp|>K.`emcuC[W~4l}{zo`eۦ}aA9  ٫'nۜȕ?;lll膭6x0>}޷oߩB7l߱^^us^ 7R2 +CT9D2BJ :ߨ7nsqО/O!W"`bjr`gW.6,C; B|ZxZiFK6X$1*L&c 000@RRf/G lhq\h~c Btt $ %3sq|) DDD *: Iɰk#eCll,lll`kk胤RD``0bcajjW7\6Rm>q3pT*$g %6>>BR`ooAm}i3~[r̷ObȈ8rpw/yQTǏ!Q\91_BF 0ex<$%%1H$12g|>b:f&߄Xt9NKRH| 񉰱[> ?F"*,Pή@j#::b9'θY!7ccZ!`gg6y6-tTt4F'SM!%a$$QF!$戄B!R*Y2V##('B!-tG Dov,B!R|T~H? wTpo?f \:: ?O\m(-! a:5!;#`-ͷP\<5xj@;pK!ҁTA!7T!3~ w</qGm(^)WP'1P:q A'pͭF. JBHAoH!i_B1utrĴ\̷]c2׏B8DF@-MydIWmn*ς Mf B߂_9ϖu -s}^Pʠzp׽偐@eS-_*ðsB{i&\V-A\P?C :cd3: JgpY>ʗw9W:9ʗe_W[(_݇:1sN9g/!;R+XlCH$0(?c\i} |= `0j.:!4)@8j5gnjTAulRS1B0~m*RWvPf| 2eԝSɖP mo8A<y8yR̔/0@!0m'5ؖW ץ2O uTpmNND-8 mTH s}u KTaH]fPAGVA% 6:.S 3KF)"bCIaPyUsAPc i@jt¾4uEY$( @xup̑sQgp %_FUh@T!;'!h=Lajik'{!TA8澃"]_H!m\[ˠ&AkX *?\ *ȏI{B!>aA [}AP ǩ"VC{cdvjY2X@`r) Ž3H۳@OQN UD BC׮<8ܟ)_8ݡx8;p"?^P+PD}I !2o1|&T@٘XHI.Bm=uB4hWgǡ||SKZ:> p+ks@yUJ*荦.':ub TqXc oS&\*WՊt[uD ޗo+8\ iүNT k\pyCB![Ʒ;1#64R(>1 ~X[U#>Vn0퇁cbmcm K׎@q툦SW*9oܕKL?Bf,gUr"8bS˃mk: WP˒10ߚh+&uŏ^F Fxv6}I !2N92=xN@R~u~SמyͼJ ) Z50jsm|zjG#-s 68Uo P9gn#4}9jU2=}KNTY[s DpHsB!P0\ppLm!ԟ4Mղdq5W f߸+$$\r< pp< ?pL- /PҷQykmaugr\j0q)8"ȯfB8- we;B %ac^6U}.j p6 | WFH!6B/Vq' o;2!d|:ۡRl9"p|Hglzgjj`)}A1H^#? }!;W(pp,4ͭ/>6 9dK8pV⇗ l{7 H 6G,a#vC!? JK7#B JH ;.hڭ)TaYM퇂Q;mkqx.oelCHɚ aż9ХS{bϿ --Dl\40B!( #ee9HkQF!oߤH@!alB7+#m֦BSxlB@(Z* lBHǷ!([%`̜5/^b; G=ܱwv;j/)4#&6HNIL&CJR IHJ.GJJ RTgF- BbH"X,X,D, `nfM~^htDBJQFMZZ.^gfMO!zG[v8Zh#<#h!66119F562@d(HdC!$b D">. ,-annr033LML`fj 33Sb#)9 OLDB\'`o''G\K#RD0B!_alQ⤧^?> x+gg'88ڣN5B (`ldG8九T&ç(D~(DFF@\qR`aa ^*\] M$4%bB!_OV#(8^Wk\.\]]PREthNpttm|D(_;X EH눊H$B^U`ogh}=BJҐQF!|аp<} O=ǭw)* =*fMO+UikjW\ ^+T`,;❟N>DXXXnZhP7l;rNI!%\IN(#B F*ν{}b<QJet˗/d锜m;w011FѾ]+3GG~4gcӶظu;F!|q1J)rKHĶn߉5>~ aaa֥v%`ߍ~]}瘘h~Z1H]p'; c7 )}XXpulڲk֮KLJyWaXb 7n;|CJj:sMHHعs~ק(UeJN`-040O?~I)}1h`?/]}pؿ2q<ڶiUz iBl!+d$b]:u.HIJ6Չ?N\y&LvYn 0BH[&K-c<|۷ԖJ%&#*55MF|||_d_8r~~ y) Uk֣gشaZlF 7ݺv֍РfΞΔl! F,&&?8[6kfSL*iJ~ۧK!8FJwݽ\n\1]߿ǟ.]3fᷙP$šk#@RR?/:Iؼy+B?qut:Ckn &&9,s9gWWW O[6225ڵk~8t0>} @d9}LﯤS*شe;vރ6Z``nnvXH3z8:wlvaȈј2q<s?jHH_"!&6N.&J!ΥKWUܶehּYuk*U«W?~DBB"LMMp9U.[vm:@.WΜ93 pG1ߵ?V1e~$lܸ-+ Fl1h|tDwW;H=/"">biѼYC"% W[?w·CR䕈TJ ! FZz_'Njx1y 4lX&X}7ynihԸ!ۡyNNX|,[QcMݝ %aARQ7k60BH r1ud >4VZ(p9[j<=.ۚF̔QNLLDn|#--b,oؠ~˗1jDfsaÇSslm[O07hР1| >Ǐc!qfffe/իWslӸM?|KWr[(`!O#WZ6r2'm6f9֮?lFw0nݨ!l\3Zujĺ+!>EBA`oopKW10htĬ43k066fOHLʱObeFƹQIIQ,E~zش|+D @L@)mwuz9 F  nڳqX`vjה 11XzmdL2ū\kޢy$NclیQ C All,֮Y]>``R@?O gҩR+Pa^!E֯G:mW(w^;MvB6$f7oa_ _gmVz9CnzvދY/dKQ|yؖ͑tYXX x-j5{}.sO ;vs ̛;۷nqX0|(@rR2>QG[ EazؽxJ_,Zgk$$&b?0z԰oW͛7q˽0u`9zt넍c XvȄRrX`M~}(fSz9f~yhp';Xnd? :OOOܾ}) o޼'j+ݣv+Brێ5 jRJq0d`Fs@"`qdm"v銦i\^عahhvmZ} %›ׯe_߷/&&1pvPooƼw7c%Q<|?G gUɳw! B!^ذv$MT*1g]㙙}ڲ\.tn/ܹUmמ9hşsBYHKKӮkۮmubtQ[~|!IDAT1>}Zk8׬CWڟcbbpB%d n$];Rz21,=.]2^M *>>OHYem o^X>>Pgi}ӰQfOi'0Q53mצafjj:jڍ&B)ɔJ%sL⛜}A"ȈX---1g;o26 ӧjˇFڬl_~С_>C]yVm>N(bXF߿/`]Xnu˛7ok8{uի"55sN/&3.j% 'Ɗ .* "CC>7bj޼A }̒F!WE3yYX noeeC*B,#%%>|cbbt;KSD-0x%jժe)UQFȷ&&62Y*R)J8. 24ELHi+=nEǏPTlr6E~5=QZUܾ3JǺk0jox<-^#/+UDUn޸?xxT9dgdl䉓0\|)iHR  o| "'\H$ŸY @*MXDG@Ru7ڿ=nL|?;;;g1%aST%aUax^ERr8c##$$CRXU*Vpvrbt&`5}^+c ޽y3i뎯G$2If%vfXt|i1 Fy~Q?O ww֮ܽýM><=k`Un100<`d9Gⷙhq.lo8N*1h΍c 2_>~zf GuVRDEg& mZdW(svq6[}Ze G>+V򀣓#$ RRRکu<>J|k5@]k%FRܸy qprrD510RRR;!`bb͚aze.^.`)4oU6|yrr S5GK`l^s I Ќں|_>/k\߽G7t-`?udL6rwwÚQP(YfuC7 Ý{7Qg 5jz\~3 K011ҩ }w~صg6Xd!7m\.q pK{IfJ2@Y' ͲkȈ]1?XrR6q{)rz33S]=d;BH!$&%wFH$_ yм)v&fΚ]0o} Uk_d\.w]/4ž"@Z[&_VVlPԭSxASvm ?ރسo I^+uj8@fM իW+cxz YuO(ux5j֪a;T\#RZHؽg>F~}1z~u555Eг{7cߛ`bb#՛6B7bZmV wB\t%`nHJJ.D̈́ރEEayujĚSR啀o ֵ3vۏߠz*lT$a* {R;*oKHH?c٩Sgrv-rkӚ0BJL֠,H\y8q f͝Ӧ(k TpqvKJexmJrƠ3ۡRj<_uK2EZ<ڵi;aْoQc_|>}x֧OA!W R#4?( #e+│sqѭ jת9`ڔpg;$BJ8{^3?~I霨RE,]T CʹQj!%,4nX(fig@k7ەc:tnP~y:H NPFdKnP\a*WM={glS$rz'nns,*&MC6j8|.]X~xQh޴%Eؾve\z >0$.== ,o3~%f+W!E*e;BJ3ltؑp>'s,V<3ZϜR1t@pyIľǃGE@w ???Ʋժ25MXwL4P.]KBP@Ҩ#[c;NBH6ތÇȲw 9L:Yleffqk_g!%VxDۓUQʷ>P'1*^ϽiyԱm_Gra0fRi7uqLn:_>a_k\@P?pm!0 <7O(aƶ *Յ⑦*^jDږo5 ai-Ũpj(^Yp}f4sqڋ]hܕQނiK}V::0͑-dO )t7Pyh34;~%fUh@Ѻ?RW/[ D#ڈKXС=wyv-EDw3 #P؉fRoAb`D( 9s.`İAtIגeߠ>b(v}V-]kkIhqQ>Xw_a=j?F5VMD #sB n|;o>@QyUǕwE$&y_ШaBPB%&&bkk 蛨.Ǜ0K?!$fP}bg #fJ2ݞLey]]nR\έUS|B3QG"zb|oc,,,(MdT_JA(ϻS<“Uo^jOERe" ԯ"ڋ> :퇡~ݵ36k/CP- ʽȽF0wUtȺߓb9/Tt}'7~Yc0A(=v3 X?ƓB@ENŮu?6 6MCvFܨk/c> 0IlxX5iܐ#/ 8R<<;£ɫ7#޺hwuz/ 짋BpipiIh|sJC cö2yc,H\<ݾ +$V6H< Gw7΂C&;P>= ε " ߅'aiV#w"sWϗ+ǷK3T\#$/݊U܉_e`*Zr*١I0dR?]@uj/Rk; ^mQ'uK+k\GL$,<9+U k Bqػ ^t) ".!ެ~ڋA.GBMtwOIx*AVHkZXnO{d} K>(z'} }v^Z]\#kaC& ) ƿ?[ o!,))殚PyL8`6A=8B&%TZz&n;y65Kך8nYbRI\5M\$r_98tIʦe8' IJ so Q F7pWP<Ax՛9'RWo^Ѧ%죻pt:2_:a7TTfOd>=d_}F2<>4!ec¤ne ']Z ͭ?_/cr"FzƍзOOz{= R~ߴ}RbVo ՞lj:4.OS~iȝ= ?V2o5.KeRُCQ(43o7pCdN1)S!Rҏs]GOGr/s콱gV찹_6A5-X*  yhS Kh.>}Ew|vVDfxviO-&GQgԛYOCի[?{rƼ6G.< ̙sǟ:}ujyԯWʒlx\FL1u yOOnpgnA;R ڑk3s#.M$8 YLh*stöI7R7g^jNeX%?huYA;R4n8 uFEǔ$ k4/]>)yNtDޠ#Ҋ=$9>;н1%6!- \C {}V:?ݹnXِnM[@u#MxIRڴj5onj.4sWQ(FI)l/Ǝ{IƖ+EO&%VJpѸR2o, /S{A"A )vNN+pCd]zE wown`Ө8N4Sai܅&A#IWIeȪ6Y!༔޲ E@_D5nWWjԛzZZLŀP C'(:Bڴ꫷,G{0S]hMbnq}ΊId JưMԿ2[WziԨʢ_iָ16maȈ˯8uHSJ9i256oF6ǯ|5qg! B UN- ?2'wlw%u  VNŢl eRӧoeu}Hq0ڭIݺ Յ#O` Axc0h9l2^(mWF9{3RrHߌ߸Md!wjzXX?\Γd9z&c5:&]ݾ  o2&oiH}ù\8qQNPO(0)WES~5ٕ(ǻoFNRI:5Wj@z ENNgΞ9zjfM3l4jX9 LJ({I9pq6໿%c96;We!s}܂ :{:t_N#Scfע)Kݰm]VFS]9  KH,J"JW!%%w~/ڵjPNMW3MxVw9{=~w Ƚyڵe2ǗePu:Dm.'Dz\eDaePu4m!C*{_$':bLñcwG\j & <//Oʕ3wuh O5wUAđJW"BGrbΜ0~㱰 ( <@*2iL67_rRDG3">H- A&Zdw&r1,ap1D$ /ʔg''ܢ/#VKbbq ܉!*2ȨDFn B m[rH%TU/ѱ!h) $&%ݻ$%'LzFYY*22{7L2JYYYdfe=Ѥ +KKͽ@O vv8:8b][[[quqg''\]5KB LJըV M]ִlt1t8x-۶SFu5Cr93^Bff(8Ӌ AOH]>mz{/+KKlmmpwׇUT4 Pqsrss8zp/殒 0Axܾ}v2^^(GquqɱcSSSIȭ[l{QQT i  JaHMMe_9{<~4kژ~}z=q+0\r;v~!Ա.lAARIi^bU殏 O 66~"33.^zh~΋aشy+RAQ_A(DwDA.yWѫsq?İ!zaR!gXZZ0jPlmm  ; 2/\d6dUT6[=m"5:sƀ#| ժVy%mj^a(s'cbΝhHNN!9%RIRpt+k++llpZiZk++dr\Jkkd~5Z-\23QeRPTs+"*̬LRSHMM3M&耓e<=«L/wA ߯2tUyڡPYl9o#B Fao'Z:^V{7Bh4DFE~7~7}'e![;~ظxa'\9Jܦ"ST*dXӒ#7#spŦqg~殦Q֭{iղ# Nvv6._BX.qu"nh7~~>4o2xxᎵRi?1T#NΎSLzFqq'KddAdmrrrϗ AATD!T D(cE&%J⧟eCu Q,|$őq`#ee:h ::ujނ ""9} .qEoBSۛ Aiղ~>5 ;[[lmM2CKZ\]M͛Gbb㰰PP!(*CBZ50 _}#s4w]Axbdw}J]eӢI5w5 H$:j~Xøo: Pq 8ɱc'H{g+Ѩa}DP`j'J)NOw֩e\ի7r:׮_;HKKlYoթMڵSْ^ Nr-FNAq:He %GE@+Hu1~?1y\9׬?PVk@e$ocq.?-Du^3~@p!? a])WKxOH ;;+++s `6:ϰg>:wppZӝjժ9? ]k Sdܺs9w"މJ|@96iLT񅍗:},ͻv *ۆ'$"{JC;oY>$JN1ad7Ie1N}ƃT6=hntZ]D*EuR+\4Ql WJM8r-55) /TNN=ƞ}wiiiTBvZ ?? e'H(WΏrֵZ9sJMiټj0DzD&%hӪq9%OCuz]P &YUDAѿiqi;֓y5 u}krY:?Y+0A?•WLY) s Un:ؿ$)lvCQR}t2$WJJ՝Mh d2o$ܽˉg8~$c~mZgnT\A Px{1.k3RP~Tbu{}n6 C7A%v-zw;P= ggRSR P9/w)r%ͱcJl4luƶQgsWdze66mʝhV1__Jҡ]k:kMzFˮ]{MW[Nth߮)A PB"@Q#,<mmDi61d bc*GQk43š(nk&i=⣐{s"{@,&[j܍ ^z^9v+WԨɟPz5x b`ӃGҳZl!TS+<z @,+:AϏ7b*ؐCu cY*΃?  ,~i_ 2;SLw<bnqܭ,x<_Ʉw7}~Hc"LT ,a;iؠ.˗,³r_Z ժpF8+WwAݓWGʐ"A(\@.]^7p5չC}9ɾ/GzdYY\XĶq\T'uĮiw7kWɍR[Ⱦt+L;~Ax|}L[m239/+WujX~*Ǝ#%%ϓ·|DFz x Vk1}&7n?{a/ N[~\vY''0|>s }{sl2cDkִ wCط.5'y΃>*0?mZ9mݸNYHedهd#I۱ָ,kOEXV@Z-qsF &hcCԪYT<HII6Yvws+rߞ=Xڅ؟46oj\yWg?\Ï>ښE_-ĉa^Ziߡ VpΝ{דʞܹ#Zrew}IJJ2ۺMklm =\\] O>=vvbQd Ɩ4V˖[l |:3._hݪM հ~]֮lfs8t_&~ˆ LJWڷ iؠ1%c8t:&2{gdN.9`rS8z LN9&ۤ6T؛m\? @ڄ &oOFjc>G?hw'HffeqfΞiP?X4ZlNMYf]\vXRSppgm&O9*UBٴkZ֭ۊ ‚ֽr~7o{]*ǼwܐMoF1)'|䵖SW o/%,nޛk]z.}2c&^,Z82c bG.ӧwwj֬/{`fϘFho P4D7`̘:Nl<KHDZD&GQƿzX׬RViw9u}XWT=g^u^iO /]8d]HHm۵~au()ٳ%˾ޥQz^sVkÏL0LFmQZHKLL,t`*uVg?8\A !*2e|rdM9|> Pft`V\9?8B@q6GGF TY,ɲu!Zahӊ5J&2 A,[-rTbb">M=J|er^Dw.& qqpB6|(YYq#0Q5=JdD$ 1.mۚISռ8ΝZnR)vfꔂXXZP?-ZO^fGʯgŧ?㧵c =v?LMi߶ tg7;vCۻ*5k{'o9_"YpceG̯V|2#ʔ1${`ִBJOO7Y.jֲ>eV{7IQ~].`Tr;61%: '$Wiec蛥:;3o/s_cÆ -!8ߊ{_VVV]sssURxF}V5σ,븛pN|rY 31o1zp1Y Va(|2i W".>311Uk~ t̝=Ќ? ܽБҹ{/߆nL&geVV=&ٻV Am&\.8y+ :޽{W^u+W9ts{ S^f̜j\NOO/p"XhCطeܾM߁]zgGw[ƈaSW:PwBRG,IΜ9æM] #[uA fRV-3wL6nď6)f-._A=S:ko L}5kЮRТ2uDsr}'wbs;g^MpF&}rRhZyDB*U\|ٸawرd+WznWQjԨAPPy?B`~,YO]F,^8ߘyOVфI eQtgx!d9(([歛LZ|>| Mzn*߶~)L 15?@L֟8~U3ѦuK={EwDAxYW"Wr)m=z֔g''Q(,P(C~z:IIs+",zԮU /˗|1orn"zпob?mڶ⯭MxkBr* E^fYr~z۴СSf0st,;g999y~xskkkڷo˯ɓ8{,Ϙ(5%5.+V-G̙=0jn=zv{:@쭷%+Ke |>Ȭ.{ACrO\ƃP%Ҽ5kRfM38|q[zuۯC˾t2'M5YÝjU|6yRBn:eT1eRI|B7or;:[d;[\]V ;vܗ!黉&]u/=t vZV z?x搒juz3lX3Ç֮kx7&|>=y]:l2bc {ׂb zdy}1a LKXX:Ƹ_ۋչK'ZlVXInz|7z&7W@~oJX0d:g'goG?.)S'Rb^6m=%lu)J 6oَ<:rgn^ %JT1Ȩ(V`Q&1|ȣoAxq.]!bbbh4So!*. ] ]222_>bsB,bQp'ɘ5{cU@!q>{ڵ&]7;;;ڶkß܎͞{hRկ]a,---2K@є̾$kUe[xPQS%,9{6}R)]v2w prv"9%i덈]_N v~#?uT`zY:V'Wd?QkXYY'ciiirL豮AX 7il2.M.Z=o|MmLPMciiiַ_5ۯkeeiR$^Ri8L*3Ji,W&ΕVjF ^GNk>6[[^*d4';;;*VhU <2niiɤ 6KRNE* P{/?fvT* 4˙YYZblm{qmmmH -YY*nߎB{/9T*ӏ>`'O޵[WvZ`}?Y}m}'W`P |NG||.2(e}YՊ*Vݳ-\!../cpNMʊw ,`> {{{㿝1rk/~ufy&Μ9#^h=t:wFӕ$b w"0AA0_~_-6f4JҾ];wzhR5y;]l)4jੂN*>U;ޤnZf9wzzcOffq]N2B[+W , 5k\MvqqaCpf \W/ǎ%66ZwWF}]=.]J JrU.]̍[$DJQHRtz&}\.7f?T*(PqdPJO5ISZ2 O`۵)qݦ&wdN|˹] d\WTv_qXZL jbaa%Ljڽ8'tzܼ2,;~.n[l-QAXFf&N䋙3D&/?sJeT @Jk扺(effrV.Q5TBM(SF<v?Z|]0AxMb Qא2 ^Ўo-gCl/#GDD999N:wh=Ƹ,'%->͚кeN_,BA>>eYR%&%uTRٸwjU#_@-hղ6֏[L>Xz׭/ZiɊ7.kԯ]KgggllɸR~E222vmc0999&is;Q("Jٻ͚2eT*Z*TZFI4jP;.Pz)~T\?wv@}d+X$NkD_R:m7Σr+Þ :-R CbXÓR* X>6lP ?ձGg?Yl/dž?ѯ@zA2$%%s"OO0IO@RT*y&HS6{v ;;WGN&N}+zR˗ܱJe\T!_Sa-|M tbH/N'ct +V@-b֌rڶnEV-ٳo?|)zh5s,PMr\^]jլa*]Y9 'R4ga. w#z,v? 2  yil%qHݐ;{ fh?Ԭ]_Q!QڡOCrZeKhIT*=c#_{___z>Gnn{Llq]B|K]c}~Xc pڴjϔ&ZE01x18o}գ26l,WoOĭYlwİ{6]3XMt|SR>eYZ'Hhټ6`:/:RtQǤ=~}g(墽q>Efçkg({rFV6ʉ?\>.Z ,M -b9d"&t#wX}t4#KgUr?ĄIiۻP=zv###.~RX/Q?==ic0j[KGO<&}an_P>:]fkf카iݒ:F{s ǎ`i|ھy(REsWPC'0iPutdx-Cޢ7!*[|7a -kdZa9dCE 7t8>69Fc=t.a>X}_AnE1 9/@C{zݸ_:O,?ZǤzmў;dOKh/#{h_lEMΊIhـ^CrhN/OI kl>Ц[aiiɇL$+3aC<29 : k8r_& EZ5iּAA##()Jռq..ZhU9ؘS.\}7jժaO\p4^=Szu )؋֚:v@hPIբ%=~o/Z*&E&@vv6`a:[6L6iS&bcA셼 t:!oAIβ H+@Ѣ7RhnD/N>3jtW]?"\Hh%Pڡ2LVwќ؁zZ' X٠j䍱tD-209?D&Y8 cbbyװ+*1c[ZFaiYtI=ѣgSrk~>rf͛Ҭ 6a2|9w%T*x&F B 1kF>?__ƍôjZ^JZDr]Z"$neDix#GZ6y꣮"}e꣮yWV.{_焕^X1hP.?ZA\lc|ϚJEP<4^C2A"ah ouj:6՟s"2;gZƾ`sWחݻl xUsWG$+ C^z"/ yfOTq>`RcWٙSA&gL6a5v1*N&(C͟.iĥ ;Q,dlVhOAs?~tΎ" _vr">9];wd@^X۔y$'5WGgԈa}& B Ý;ѴhfhW 'rT]NAY!R{TgԭSo4wUA(M%K.> ݼE*z㟢kOt>=ՌAdfa˷x%dTENhoAVuB{SL UkBβ^?3Cq#]fW1u ]1W59Y㛣bls/%~c~}miYPrss=-"#KxuԈGpkAGA,}^~Ց#Eb"w6W{ 'ڴ$X{26#r!H,eMKFx>Gx.;HeXR^::_%b[#2bN/ 21u sh*Sba#/abX*QV9yv-\w۠ޱ:.6d1F>1}l$*Xb7-#ׯ8?Ae>@iM䮛Ce".c*6 E ;fz/JfMӠ^]֬]U 2Zj0\:G{o{Ώݽ\ޫzu.N9A@v/[&10)'+Hfq;u&7*^m.'y$, : ߥZYw3}(ڔdnDm,_beMЎT&!AR'S^OSDޠ46}a`B(-[]LD^<9*77ӄAQT&SV:x/uD&ǢǛHPWN"а}-S gc˞^"EѪIq GJΝvr>eңG5nL.gals+))ӋÇ>qB2QJOШa}rACYkFy/S{MI w.3qIDOut[bx[Vy_6e p["769y&_JAJy.h/Bw 9]|qY8檀ATOC^fV mEJˠ{HD'$c6C]|MXpG^]mcR]m$296Y{e&4quua_hP..Y1oϭdffzs0l7vĶ?g7*#H!%ŋa 7Zs {٧*Ouf +$.m嫢I%p2d[U<>Z^7ۦLZZ#/pjU=t/ 82zr9Ϣ`4{F5{8}E.6:׸>Nz 3yKoC@5RRNڄ P4J/ UD?RVDsO$29IV' Vo| J$ qtz@V|yod|Fΰ!8p0nʔipqqU4i?ߧ{x*^p?Ȟ}Vвy3j>uj*ZaPB 8Gf c-i;3ZBY!tiwù4j:$id_>iR69EYCzԭߓ}=搉zӐ2ڪR|6K'B)'uAJdª4$N6YhNъ):D/[wF4@{}V;gXW^^+#LѪqrގumEɤZYy}v&Kks[/硯E7F2h=*IDm6Q8a|qF9:W") Z6oFHLLd_ټ/oo//ׯCzu-d%V 9zGODJ}5ڵi]'/A Pvv`o'w@s7 ̦Q'\M }ZR2PD}}nYg#s"^c(uVn^ø^jmgWeI%lwAbrU*MDk4c#ӓ  :ד@b(Ӣǡ.nF&Ju5= ?8 `yt  )^A^v~ w>cϱƹK{2DyaLZ&""?s PHR*UH>dj.q1.ܾ}BAr~ק,~>t-z=qDF&"6QQ߼EDa ?_T̀~}Z%ĬA׃DwDA(˪5k{e|O#巯9]*B}PVo6-E@lv5nPmѦ%(㏲J|dG}$afvt᜶8|ˠUXh Էo2 nn]:%ABAԨ7@jZ/qe]?uF >>xY xҍ3$%'Ol\DGETڹC*QRElKnN^o>AJ9_.kT `<["XG>"%V͍S8=rAVmj:sұKo}k=1+qOdܗPi4"~߼EDĒj?‚2{䄣8::䄃ٚ? KJj*餦Ljji'& pvvƫ'>>(G@r(Wiz\%LJc0agLNN^sW|)]Ap1S]RA.P|t[fV1D?]^Nrr IIeTbckR %6X)eF&WLNlei\@&'|,:azFJ"+3̬,sPeVeIfV:0L舓#θRJzU ^^ezV LJ8+++>]f|>ɟ~c fqؖ'l VVXX(8}殎ddfV^,kkXt-ZMrJ I$''EzzYdefRIFF& wUfdd::222{rYYZbmm;vvXX>klll'G'J]KֳA ||6s&~%/EtL s-qoR#B P(xgX}Jeρ\.WW2wU"( pws3wUD&- x~}]%M+gsWG_A0;鵶\mne]AsYt9cz7ZȨ(.,&6.罏>6AA ՀN^5w]A(b%:3lkf/N""9{.?S&~BZ5:"1AA\mi׫sq.no1w}AȧlЀ~8rRr2~(5OfͰ+;pI}pss )s?d"{MܸQ͙/ H B B ɺ t>rA`W%bIZǕ˥W8{YǞouK пqXn߾c  Pr_j }-sW=k  (%ȗ*AAD&%J2{Ƞ/Ev\.giެqVL/ #CfUFdGfX:rh|h6Pꋕ[oǝ;&r9j$]MA^0^ϵ7H y^J666WBP~//+3f' '}6sܲJYݲ'FaPB>O/@LS&PV sWGxS4t4\j殎4 YYgEff&*̬,z=}zʰ4  JkklmZ5Jܗ]" L̤ET Pv?REˉ}u Uptt0wUUv򟖕":&8HLJ&))d&DRr &ܧP( Z4`ieBnqoQfdsQӓFA*YH$舓.θ\quq'...~9Щsq2"u (8:::AOH <&Qo˝{1`a''כjwp+k+VVX[`kDT"oMFf&,YlU*T*Njj*Qos1$&%uoUKKKxz]o//_r~{]_$:.:m$+  %EbRW]ƍpn޺ߺIFz&rww< r(G`|}JK~>  KBRqU.]B˄]VDRw7W|}\>&c^&J~>=#3H"ɱ'_IIIZIPP !+R"*V|@9s_R"AA&cd_>:/Lzz'O؉?q-KP`y:҆ E&% C*Q9]׸?7ReL5Su)\|D&  M᭤m[rs՜<}cNpq\BrH%5ϨC(u)OeNnѨQ}t:Qwz*/tw̜nnԭSzukӠ^]$A  %?^`-]殦 *;;OJEr }\S9Jnm۴ &63gv2 Ôi3Diߦ5/&kAA(vݻ]T2pP^: /3+]aǮ8q$rڵj루]֥tϗEOwxCt:._ʑ疭,[h٢9;CnۤzU  <JwWr|1z=Naڽ\Nu *PRJ#֭H=z]滕V;ѦuKl9xl?%55}ȹ殎wӭKb+3''wO}KʊGd殊P!ˈDo7ݺuA殢Ưn&&&5B75ԯ'R_7عk/.y hӪ%bpb9t`oLbb_Ι\!23|9!]:Rlcs\eaer,f zt &M:uksx-nݺeܶcc[S ԏ6m۰~W`'M6e~9r_DV)[֛ꡌ5>eM?s K]VGGGBCJ&OQQ|=&Nr9"qswcc lRH%^//Dbcq'oۚVjX<3j{wCè_ÇnT^TLyK폍|l9VV|x5gj fVӝ7^E.Yj s:%d~AAaӱ\Eۺu 4m2uBpp0W\uChҰg˖L1sUU`9k0dmEaAAKKK&Nd\ޜhczøn&A/Pј]aÇ>ZxyХkg*үO^:bbbtT\F=֥H!*ä r[ dҧӪEG+# Pj܊̝%Ѹ^*ҹsb;O`PrFF&z39;;ruuj)"""L”)iVkxʔ)c1CJvaT*-K]E&)ZΟMǎLl*ȣƵBž15Iְc.OdD$9T!f͚аQCd2GY(t:{c= ';'khܤ͚5?n <f~ay\DdNWxNlIII1 BB*!HL\%Ę$C~Z1m۵5wuA(eR)o}?ts4YV3g8~q ;;;"""Lsp(8d9.\dUa3P ,<*+/ u}@!cJZÛo/!>OsWW`,V1ؠV 3͚5)"_`]{c233M+ _~sߞ{ٳ{ɺ>t\[qp"_4YWbEڵmPR'9{:޽_ܿmmgM+Jiݦ%+V|7۷4sYF Y7lPPPA>Ĥ$eoGz5`1oS,ZCߛIS&X-,, ,OM^ŰKdgg l'ּyS<<<`-E,sv%mj^wTe\rT㲭-C;OƮ{;ge|߭DV׹tٷUzN&hDV,[bAX)}'do>&O٤BU|G?܎>|=Ú PgIJ.ؗ&}mC .{_/>}{{\kFzɻqw(pnjrtt4h4&߭(w>zrqNI۳{/={vI^)羹ǽtzVeg3@@Mtt4ǎgӦlj~y[[ݺafefA.\tw׮%tܑ&`eeeܖmr\aN>KttqYӧ=<899呟{%y=`Un݊Ysfbaa rH%f͜[okl zOb Z4kZӓ V˚5kdG\i)_>2^e 쌽=iiq^W ꕫ&˾~>{8880c+W8|1uաw^ jxoΐ_X%cL3{~}z=X}Xꉬ{.Yf\V(j23@xy(r>mx=mA"+uiq=}AѰnz7g/?f^X &}󚑞~>cVAJ`"^?/})]}{ Ԭ󍁘FO&>@W,[p`{ԢT -;D"J>|x^J !+S{xx>eiب! 6%%9:xP5lܸKzF:^Ջ*UB_8EP'Y#n@aaǟ|X\O&&&;{;6OP*̞z7nҘ:uk`B:{{|}}iݦvvv\v6r bH:s6oZ\Z\riҤ!͛7ŋiskyݷ+Pi2h@_[=ݺtU5(< sIXX^R G=I&J?/UT(<ŋiߡFaҍ!I T"=7ˮؼyk5oZ` Cvڅڵk:tظ ?3rbL^=yoQVBwE~u=zvNDGGsq{.];=zwޣ\z3?q$?YaO׵8ծS&M+*+\V-hѲ{7wNj031痜JmB0{ƔO'ٌT&X5\NNNnӊmm =֯[,tkn$1vg~ڴ2aSNg֬sMzzjeY[[Ӯ}~m#`8{,CIKM3keie2كaϖ[ޣ34KJNfLsICu_è#AP c7īW믿Ч_o<==s'ڸqTW r*׭غ/bbb kc^Eid&[f0N7brnߎ*y؂0W:7疿Яr[עUKvm\y&}zg7ҵK_"߯dy[o}ܺu$YBqջW$Ayu]UV˃3.=[n|u <֭]Ϝs֬Y^ UZaÇ>־ѹK':vz.i(;;Bǻ +!.[o#\zg/ԌbS'O|з_ocaO >.2Y4hkorx /ҹS)ջ1Xjԧub#6.{{{Eߵkg1`Xjvԁ7G{.!䢯=Q5nĶ;QT v'GGjx(GMsТE.GulԸ!OƎ>e\ ev}!J 5\[n5`5jԠfo4m֤Їa&!>=̢ IDAT5j(J:uHDDΞm&y{{Sz5p_*!8:9sAXf-|>~~~DDDGJJ \rx\u^O>͛7y_ݻL:N -[6'::hj֪ys5l''GV'BNMCV ]9{q]bb"S&O೩S(_>䘌 Ξʊ~(ܗW#Y'cW׮]IP(4yJhĒA4kޔ}ʤŜLY`GrhޏT*eЂ72%گ_~XXYB|\}쀮$J-k t:yj,[kכ}h4Zͳ5jPR% ~P(XyhӦ7o֭[޽֏h+;v=cx %/$Qг\Wzzz3gZM|-Ѝ[-Hdaoj :SN!Yz遲=.]5>>>|6eiiiФ'B!Gv6m祽(Qߋ53qc|(RF(3g0.߿jλiѬ5)))\nYN-<}׮^N\jL@@9$ e}rMsnc{'Oh7^ZhiwUVrrrDVd2oӧM;{3yDtl\BI ={Ua232 IH/^ #66/IcuQ*ԯ_ktك;wM^kcdddp㲥%.fa9׮3dPyw[ܴrG&xZ.y_r:&%aR)M6ᗟ5tك8ϫ%+?Njpk| trrrxO9vDmVVVX(gHbkgSd6)33㲳_̚ɰQzy7tXCd${ʕCX8ǎ7RUBO4GM3az?a) ZqH$ڳw:+V>uVRmII5vjz~ӣKbooL݄]\Jt:6'-Z6C;x٪^qʄƍmfrF8aa|c ۜ:u}{ӬyB錭.Xzڵ]^FJCkשС'u͵uVDU璝T*}.]}|h4Ɖ 4rpg[ŋ1V2y*RN^ 112eF*11ɓ>u{}g|(܀'1EZnq(׮/ٳgul۶ݤ[QV-ӷDTlڸ/.0I߬jr|$R[PRiyϦ,2@:ˤr/c9mݺ[˔d +[mløs k&X*קkJ~~'3f1.;;9x<+]zA^= 0?Q>SeU fɲoh4šh)Ud0ԪU Ξ/|޽{һw"ϵbE&q1ϯ:ԨQmOIF?bz~E/SWew1c.!x|oeffaee?6N`xh[ZΞ9ǭ[գ#FF=rZlps{O\\]]r}7\ ^eCau̙3dgg3iݺ'O(lk⯿r_dIֱ5Pnm>W^ڵka k?|ޏp~'^䬬,VRAҼEsf͜yt:fN&M`OH QR=j%w&8sW텑H$tލƍ3wdƾQ`_k2'booGZZhgʖ;{,>cΝ=Zaos([erT替 @"r wDs%llٽGpl u:iԩS, z(8.ի\}`'1~a&ƼftEٴ:L]'9z$MjqvΎV5&8HJJ2k_գg7N?nEk$-7Y7lsc+z׃c%]ux _'HF|A㵼zyyTZqㆡIFzB񱵵1y$J(sW\α'hݲ`:j勘oKۓ`jVgh4'zPj֪ifm]q8 wM"0dYqiI|ycNqib 0M>y&mؘ5䕎YoMƵ,JܐJq{q4%I`|>/fY3t /Tݻ3͢ ZhNnn.\Ա+(%ٰ'֭]޽&߼֬Y˱Ǎd_dwػgQQƬNNՠa0ц?q|0WխW1!Z®^120C`q#ܘQ)i\tK.DBff47[nܸY`~k^glop/I^gϸp!cG3}÷n2M.ތYYY?p I3F_ɹ~7Z7s۴Z-Xndz߁Bϡ.ܳZ{Ly1B&aaa&IOT PM24/yuM}^ 2|W>0Iۙu$[^KXٲevt7ӧރIU*W~ʒAZ2mZ45_лGwCR'1dը5̙={سg`ha#G=5cӡ};lmmtݳޡSǮ^!!!> 4v9^iߙnӺ+cUT|2S&OG^̘9`SܼYS8H>g o(PN@@9>hZveX.ӴicrYS8j֬ARb׮]c͌~mIr9λᴶQ,w^$ f玝&/ƆJ!p"=] LzF.69~RTWQQ B>4髯6j"V_ڵ???5nG>ylޢ߯tloyb\23iԸ)))gt }.z_t{s{轉%ta.q l68x@ckT*V9z=_F曯kNNvנ A+W!66b`lmm<ǨQ#sfSE_-Q4\0Rɴ{S)KBcF]EѪq>)ͱ7Rv?.О?J[d! g/)6|HH˔CXve6o{#G@.R8J"0oLx"'O2no޼j2@ |1wcӓo]iӹv0J.J|:B2vc1ƍ{^c”&4<^{pooNZDF9;;zE2o=_ɓO0$xQYo݊62e㽢J׮s"w|ZFaLΌi3_X}?xc+/ wioߡ]9K ?5.8~ЖB{{{vRdn߾SӦOkg~ɺ6SlVB͚59u/ɓ8| uxEDD'Y,L˖-Lߗ\GBa<`NN\/wnnBrB׫_^0aaa3ZZZҧo/Zu9`?}׬*2(S-[^־knmt xT&}YiN@sII)Hj"+ ,wlr7-EiH"6#+oMB>AzDdykgPF TbS =#~!NHJ>oD"a 4[7oV8^{#{K.X(")J/ (*|"Ei"JH{$I6!>sgM6{9CPP]_bccn``kO\\89i@K0LYeV*?n@Lt Iitlq|hD*"P Py璚JTTn%W;O L?{UQб=͚[)X'q܄7r`#`,[{ew܉ڽò*4RRS536nDcNCk⃱ryxxŒmg DBϞݙ9sz};c<^3edR05ǟ[`W(*__8? =T*eC[xi/*JU% kBE߼ e8-܃=hhb'"2if:l#{Op`zIW^rP5X}uVT;'A"MWfB_V\킕c5kբkm 2ctרΚr5vBN:6l(U|e^qL9]mҴ ˗MXt۷Cv?5{&3[ 4*=e˕c%?vՓ\]]iѲXpRi{O.۽4}S*]oooרFqFI Ytb2U7h/NJˤdFrWwXnmamH$TȾٳ3 r:(TH|~IWdZ AP `6#"44ުyU}Fs ;W#Mz3, kzR } ^ߴΝ1߂nه8}}D~6cHWnzL뼀XLWOky/]@KvD1/(vC5d6GcIM@uxNt_Dt?/ { ^Ϥ)3phjլ萄B.%5SfWfJEVH$ڴmCmX,`?__Q|y}9h5S#=doɗ_u]l9R)_Ezn3r+5{}ٳ;={vQA0hlg7 yԸq#7~#:: |}l"PD ΛX'&$$텇G;wm{d_SNbԬ1s3fN#'/]vҩS:u@bb1d|||9GҢe|wѕ=feK ֥- &>!Q { uQ = #qE恋'o -9DVb:݊JWCdh} YUQ KZ eQtxYZJX+ǙG~TDON@D"+SIXS= XR0#[|5i61.XҬs,`x`xCKL^4SAJ"愻˿!かd&# JuF"0T*E&K(DD1ml4j5+}{6ro1,z=2g'~q$ ^x{?wDo#/yzzR,iɤjctLt,Y6 _0^#c]o^ĒKTcQ Rt4FøG/'֤?jʬ_իWeԘ ̛ K(f3Û|bxc\$LAHT0An5ψ"ّH x>SBX tD&Gi0!1_:_a:IXb }$Lv6duڠ[1'W 8}uEʑA3o;Ґ d,%yhv!4GNNN̘2% sMATl еsG~\=ժ權(! ֱ};~ݲ#Ga2pT[ 77Q#?hglDXI6e[5ɲKe6c:wKJtP"AV:^Xi4UP;#2z` tҙ7Ơa=IIIOfQp4<~rM0w]֭Xv=^PP]\#i5kژ.8 ϡM)0}DFl9fAT>9f<#qB^eM # t6pGZ&Cb"Vؓh4݋^=?i/?ZjAM +G?9z~~t܉ݻ ~m<=yGq-H _Omʦ_߭Jv\*<ڲot7 +wĒShڪ-ϟCR%|`IKƢϰKr9LF$5| )}@X H4yy۱K/ZksTa7"V2 ץ^x{=ݺB?~pz=Z6Ν]\s,q|JT_<=OHxle-JŢpuuqtEJ͍=ض3NdhC~jrst7ñc\C1LFHmc~0}jT3Q*[ %%m% sfMX G]ԪYJCy+3gҺUB zV|Y8H*$j}z7/ Om  pCkffk)T> _9l۴lތ&::gְA}5i̞ͶMG׫KV-ǟٶX<]j ۶C]C.x~a-%!=V MN*RVMԪɄr=Ukpwr T^ի萋 Nǹs8qg]h׭ˇТYS[&Rkxϰ9>7B.V&M>ԭ{ԐQJOn FDDҭw_2222dne˔vtO%2*iT*e˨\`>b$v6[ q߭%b -Xa(<hÔDDU_By:zDZ˗l2 |BB'p%.^gr%$ +U^ԩS+9dԡm͙}0}9Pҥգki?7 oܖ{ԫ&``-Z2hv_ٽkOƎ~^}_Azѷ)@w+;ށxbgX,._¡G9|߭^K\\ZRKRLiʕ)MR%c$''su.^¥Kt QѨT*ʕ-CZjTZ GIsJ"0n(Fn]VCFѿˎ-׽jn Yxyy1dG^lZXmÆ vth9R<8W^}ibAwG(ܕU", Z2KSti} HΜ9ǙgEjj**wŃ)QE29KNIܸq7nrf8ׯ >!LFhHU*WEfTTҥJ=E$aϱR%CyWa`6wu׈1qt۶GT'ҥ2)f:ãT*45NZ-..xxIP` >>x{遏^^^xz,V?) 䉳g e5K`yXaK^y}_EjZCZ ZMHH B(cRRRHKO%f30N)j5w'V qHA*L&rvvqt$EC*" ϑˆwKp?0qqV;rC|#0AA(D"&vuqi]  B!'1AE$a  B!߰LF꠩{,$ g切{蝈^y}CWo-2޿75MZDI( kAFx,=}F$l6sҟBDs9G!d !\t! #DL+V0ACA 6N0e4/]+fI+%53gϘ: ¸'3 wGkXn]G##&iH2d~GH€͛= G:;;ے0'[9:$᮸x$UjG'Nš6nD@HĄ j(};;:AEr8  bh B#0AAxΉDL`I  "A(8/łTXAACb B { DXAAc⎘ ሂ PĈDLDzAA(DpiUh%s AȊ& E#&N^Hܺ vt(oDJOO+8y.ڶzt:/Hy Ν@\|D.\`mt2._!5- @$b ObX,"Фe[=եpX,w ZXH$h5ZprrB HOH 5%1LPFuթ-3gqQ._D"EVB JIZZ:;'`6Vԫ[F, 55=FV8|G ""L+wwwf3iid2mM,J/NZ5V R~<ߪնCA$i fPxݼy~%"2*+Qv-J HrGtt GrڷkKZ5ҞwDӦ_NWJ>ظ8?hۦ5 } Ic8y߶m'#CG:U~OGXu='{.8 L$b Crb13asO(Iعa<=<٣+.55-[qIڶnE- k^NNNޕozz:[ôhڔ;" s;\"/vꄋKņ~&**zJJ~B. BI# ˯'c6mvݷB Dz9 7^돛kެgX}nv~}_Fj~9Y$a|b%ujdy<"ýO|wM(D"&DFOv;w!s7j z5rޤDzoWd2֛Q*> IX0D0l B. B)c(_p+<&[hxԨ^'ٰX,,kN=-P* 2&1IDG8t̄S([4cFo /3N")99s?h4:t@TMK\ljPG"F$aTFFMFuxoS&6g>-``Y/WAoZ*39s?ˎ>-ܾsf0A4lP!1H$zvF-pTQ9!1!?XfXbvt(oDVt:LE~}_NNߏ>d/yOO`4:cvLMf{v|`6K<^^^L,_+W}z|ɧ-`҄q}}"-ի1dLIZZO ^ 3C {}+DVfKw!d2M5|d;L\M[Fw( c[ȏum_Dx)He]^==?/b`Kz7:t9ĩN@6P*CU2./'I,=:LyIy)mހurv.}c1pssc믲pRG.!}b%:XG(T85yuźd;@k򲭍)9Q8s]?~Wϱ{%7il"&%uL]z5<=]B. B t63o?Xz$* 8ҏƢ#> (^eҤ nopÝkvcH?MܽYb#+SI?nHa7./t5ҏƽ]:%LݍLu>~3ɖX?BrpaG6!\|ظxۜќH~^n_#I9X45р)N{Xڋ9~! K'VoYΝ{ۊLrl۾Ĥ$G6!DLى$XU xeBꞟqjmj' J}~c{Bb6o ȊVaX}kѧwcAkqj$G(ըJ[7ru=8$S7^7bΧ.BvJ58ӏAc87ضr~ő,Vm.xGM8M72=J ZMzOmWUc,{ ^gq9D"#HPЦuKms'Qrv뭆\mIx!./gq)> CLBCBHH'm;!wDL-.^yx1ѡBIXfz^;Gm]wRޜI?:>H0DrNضh|M=Gm]Zw}Qbʳ$J52O?QY/кe [y7zt{1GmLw2z '~Ǥ!Y*7EgZ~[S/!oDL$L&7nݢTɜP=xy4%Ř",>\ZW2R%CpZX,N9Kʕr^U:{{^m]1&!`HRjT'ruV8(msz/|j&+dS>0 ظ8z}8!ODLIG C4WUf Y7>ڼ ϳ-=GVܩ[pȹ/'OG՟Xeh%X0FeIZł)6oQ/m\ܱS*Yk` .XL} *|ؼigΜD"&dkͷ[(gQOu42?w50G# xҏehl㠳??@|wfI={Y#S; Ƹg{N9#Uٶqsu%99[(^5FB 2/) My?uliv"QkQdW~<| z  ` xzx丽)!Z^_$0u:}QDOŘIef]j˜!" CD8vC l#j+A]#c!*:ѧ2τ߾صdJG%QT@UArP>x@k0k#"R Rwb )[:t k9N´Z,ʽm 5p n]a*RgwuCSRvX_zի9 yH$b“^ɻ7 ޡ " sh|sX*C 24Um4U<ʧ>$GO䆦}*Q2+:Aȏ=**chҸaJ}e*#stGT*r|ADE=×D"&)):UIl8iJ ."Q@]!2xԴL&HJ~QW2C}$Tlqt3HIMŸӰtH5Bi㻣h dM$aT_qNsgp]:| nR)&yJסR춥Qp; sz2>ǭ[WxL% j u7[jN0|$1A̤8GڿXE\||ŅϞ;ϕ}QZMFF/# 0!?^C].1KƁ 2&q.=vKdsJMF<֜bMkیLo11LO\Шdgdڮ Hׇ) M͖~96QZ=H˴Ȕw?.# CUs[tYGX >/\;A)4ĭy6)> C,.m`JhH[ ,L1"[׾#9=&jFTMA'wj1t:̱'G*Ꮏ/Gà 7osܾsN eKNZT(_ɆIj]piQ s>eڔ=BI[>A1Kqn[Ú2ORv'rPl$m[:i]#_?o>@{4NNٗ.̜}'N8 <~_‘yKbâKGqqynj2%Z>yfz`w&,^ߝ"yD%#Zį߭[x=ɉ\\ܯaGҏ @ꞍDL]U`1=]?X *JUO^s^D)9hYfӽ;x=yH-`g8o+7^)&iP4duG `'I"繻}aXﲖaoP2^9@n6t f)^ݻ_ΪT`@gϝd^{jNEOWB7PWg8ž܇TAbNM"zzI? Eh%>T"C]n݇! bm~,0 mUMm>A[ `-tq|B>R}rV.hܼ(UX罍kxe,',U1%5{rx+[ |x  m>dwbKAqUGLX)U(!jP"N(ep@I=$mikQAځ(JIqODS9ʐھժ蹄ka0XԚw??LJ>pth If3۶^:juT*\2+k-v+W}N~} )T>,8#݂;o1 Q hkee0ы&RX{9ERVbZw M6Ƚpig.~(UȽT.}6J>yžǛظGIٻ;b1*( {D[A*罅8%G( &Si+V}JUZdߣ=?QQQd(gϝv{YUZvە%!ur=NںV}pn>5fGPٵK(T1ܶpg1 v)8Р7^'#CǷ+qUTsMpuX2Ґ>TUgEvI֭Z0{ƴ\R8cFGBB;$ o3+ɲf B̒.#}}¶oVt|tT㌺\M2} h5~s;fM)/Pfu]>IBGwT DD]> Oɼ($ml9zS)<ҥJ͙$Ҏœ#zSr#D$|$; w3r.m^ ~c\#Zc}-._j#jڸ*58E |F.͙und#7g3r֢J_;qwDb9{nsn~+rb$Cc|+pC,&c# g"|F. w>#njEF ;4 Zf)Hػo?ptX Bj5LIq[>x2s>qcFRJe 2h!!L5TTRNr{讝!nJqjMX0-3E s,-u :Ը 2oۖb_bГ,cܼe+:w[oJ ҕ+͙Bs6;avu85n[dc#cdnHPay`hx=zήnwA]mF3-;D iǪ,eobsCغ}G.IޱS\>m5w̥R|^XtS"`yQDyBuqwAwT[Ldޟen=+{. -~ 4J\SsK\|oWdxyyn2.#1tNbJͷxZhF/0}LȡU*sL55*Yyb=ҏeKLqd\Z,.np}qI侸4F UkqnMD~2Uxk-}d<)EP"sFMʞh6Bw(4՛emͺكSv8&fm(3^'ŭʥKuRVq8s^KҶU` si֍;|*r`~[[vϓOȜܐ:!*E¯=ڸ^X%_ao .sEO7}GQɻ!Uiym9{1_FYq>F]&r֪NͺK%0ܺY|_dϔ@v]WEH cDZ#պ,1vLΪ E=wrY~ JV.9oBмmܼm}/[M{Ϸ:*EyJWs42NCҦ: 4՛b@r[9BM.ӬIcv5} :7AºODTûH; mN(J0m1K>Gf336 {IWL!u85G?9R3R$n\XZa,Gv£x ;bӧQ(ʔ.6¿]Igno/iv"պ*E(C+mv1FRq<|mYRr vwF(:wW9vw"?OYXQ~۶%\&RvgQ*I߶Ѿ][O_ ^+=-|G/ͲDpi2DZde]n;}Դ'cvDGG=ODE]&rܽSuV8߮X~ ͚b6ٺmS!QO* TdT_,cF*tEezS!sA]f_ _(l'T*UCG B}sӮ%¿ѢYyo~cĹE/O,{bngWnleS;~穎 oKەʢl7ƶ;MTt4[G=;b!2*s^|7o=zB` |[9:瞿G"F7oQb{sArܽGǐGQV-$,X(NM!Qpn3ĭ9KD~w~)/5kTgyOo["aXK"""ӻg:y?~XI!ϛwoWb*xCc9w+VJ5[KK{O_TĹR%HxwP6Ys>eba݆y3MF6x}Sg|{wȗMns̜6!w)F#ٙF 0l`sΜ=˦_pv8+VΝDҟCNNN̜:ϿXr|ObW(^\/p9Ѡ~=tDE[9q_,44nD-ɞ #$b_ŒQ7{X_?iqQ}xd_ӗN;uKm4jy$잮]:Qr%&OIݨ_3לyK-Zu6P3?Ѭig4܉`鲯iP'׭YO>}-[09RAըQ` qqvWﯯd's=vmA(_OPIJrҵK'yruqiF4m{fGV ztU8 )e2+GŪ*I+W)YcRGw~_@]!q"yh#eB+!s)>CuI3߸@ھHc-o~dU^oQ;th[aR<89q/L8W^L$&%u$&%?Z;wѻggBjUPjn޼狖R8wcsiFTRoVd2zWʓUk ai(JG| \= x\_DPжu+ڶn#G$:V-; Oȳ8@üd6\tG˳w*G lpRz잣DLqr@kCUL Q(KVFs4%DcNK@T!q bS0[3{ Qs:^=痫Gӱs^x<ɾQv9YHOOg8z87oSTI:wh,ԅXr>-_k69Ul\[sRfMUy{?ycOB5iۺ5Z/M(~ۺ'O;o;g,Z%C LR> Y: X *ri3\(? ʖjy);q'N r VT&#-54qu^tjZ2jpؗ$5-Or J\.ɉT=*+QF\ٻwfܘ9W{L&8 h=ƩN 45fR`h4a5kTFj"H_~-:4tf۵az>- H.Z,=}F3l㱙4uS&N=҉[9Pu>0FA}︻LJj*&O~iOpWl\u=/ǿ{1f{ᇆc(U"5de6n߉}z / &ƺӢhX(1/o~?\]\ \vIƌB6{(x>̗:uhϦ_7jٱWܟ ?NZH'.fD|CUc?cm!cJ>)OG /g} EPInآ ~Xڴ:qzWw(;  ./Gw r?ec wCXRQBƩ51HRF=E: BpR>4W}VHj,fGwccۯvH*v#nt@.'3I?7}E/d EMM>eB2!IݿѡB)IXeߔCƙ׺Q"EU2bГ{n[4U5h*GuAO[yevT턦ZT*UM&HNNj5tB%>>s.вy`6~mݶm.WR22G'yJ  TL Sd(r"埽TB0&רJUA uPWͮ^罅ބ Rvg.Bնu+?IbRO BM*V3g3mOg?[C"urţl_?%# M/k5@wSťAڡmk6A N*:sYϓN85y߻G6j$b_bǶM:s?>96+EY߅P?sZa=C J%n yEs"n[%Tekp{܋DsjWΝ@e}*!ǒSHHHdhc=DU3p',bS}HJu#<}PdF "Qi(o<%irLz)c_4n(9۶mtl =pi26b֥y\sI@ؾs#N 9w]9j*s&&?`4ӒX'y_h9AxPbovvǹK$][LSqj"yZǩ7XDb?b#Q?OyJNݍ!Jf3oSx pԬQj]P}w= ԮІb b#yXɂVA&s(ϻDZ X~EY `,]~lwM:9th{i8x>ձ(1rO?$j-R'WLQ;Ƚ:6S|K'Pf0T*U(@dn# z;:Ա=lBTTmL f1Hޱ oq5}(q}+_O©Qg]2M7 RE=Spn>ѧOr#ԭS;-Fװ $o(rbBƙI}  du tB)5ݵ3$mݥ~2}ԭS͚:t sȱ7&c} sZ21K!urCswR}wƹM_ w8gۗŠU7ȕf(2$돐hqi1թ]#GѪesGN6as 5m܈  Eb{1ǐq2^}7 m6hk4ãxLIq$Srd.A2baU Ctgby 1ʊsȿ)Ikl=_"QOt :0{$j-){~"rTc֫mn!⺭Ϡy[QƝ]l_ ;ڪ7_c餦C!O8;9hAtM+qgz Vk{='m[ݕH5MF8M87[JXtĭsQV"85dOLz;(\+y= Bx{{1sdjUҥS|!11 ӤqCڴjS"Uer;^2-%f9=b`"r.Orkq6`pr&ƨp:95cDjM Z;3?l)F# B^pvv~e dȽlk{B]Y T!"[Û._W>¹i7t05sQ*k>ES̖e?95S F_UZYe6 `Z]$mwH„"P&ajM`ӯ[4mÆ /_ap χ""" rq1ױmJU=߸@Ou%k%Snoe9oQ2w> .2uCy_ WPWih&Avmu㣐y @lj?/O QH?=3J;\;GٶNP-@]tCvJ.+0rY&LB\\̚!0RByN=i{/_5oGG].mfۇuAUu4՛aJG7azR .CcS)5WWAh#HXֹfO=3gQ\:e H]۾ ._{s??&NGMY ,\̵\[7瞿0i Gcℱ vZ9zS\I "hD&߼/ԪY݁gQN7kGǐk*/&p(^<ڵjRbꜭ ɱ'9q$:F 3mDʂ> < kL&d2mנiHJnYû6hۧk4aa|h6:^Ҕ4ZzM݇'f(b)1fy쑣Ǚ8aO ސ߄H$CDbpvvBLPid2 xxgFQNM^$E$SlekN ;_"gNԼh$vY<^{7g B~{.xp0]tmu Gn'CkݪS {aDLsQW E iv"EWSu0Cd#o`1Q|w,ffTJKoJlH.G/_ocyz9p*4@"27/XLF$29k^?0mDGB 2 A( d֡R8;Ln-S2 sjO wuW$X)85~د'+W}Loq4$Tj/AIsQM>u4!vه$o_E$>KMM͛T(_ѧPU*s%RέۘRH޾:60۰emkO1l&mo+ e=?v*U (W%7<]'A(D!>{P uN[/ÔDĭA=^VKbGSll}O[fsĜM Hari} $RbI/!sw[R'Z/}W*³%V}#(FS% [bp\<q)a=B5©i7+E۰#^ՉY:nC ~=!!i7l6vԳO ?Q'hԾܤ}Gw=lNeO~1 ŢKGށVAfAOx e{= ]+[ѧN*+uN_HrePw%lL)8mmܺ1puԽ1 CU -mk7g~cDZ)S?@ļQA  IM2}&aa )x͍WՇZq̚6q'K)ә9m2* WڵQVBu7)TmUe'jRYB=cf=͹K]zO1`x"~BG"B2f,r3-.)F7p<(X,|8@qrrb̝CVRr2_,#qt( 6RݕS.XA(@Z>Ys>E=u?R&Q|9G&HXGҶ]:p|RlԪŗ_P2`OCA\" sL1fo(BpW¦ګ$wB>6wb48:4!{ ,fC`ټ9p9[C!$LG*W ^SX ̥tɒtѧF+^XiAʓvI. .Vٟh4:4h&N7^LR>5cNܚ$lѡB9AxeJ3̞;^ݻQv|Vm~JJޡk,ER8:a} GjpOGSr1!ڮ}чCvs.tm:&̺VIUZvu)R:v#IDAThG``Nʈ13õKֻj+gRk}Cl'jXyUQg7Y(0Q%JQjUvToSz \]=c2;A#b82%štcmG"N&1d@:ulϬ9sy3bw}8 NP$`hm9Ɵ_~>M&i5@wŦ,V;m˴FsqkÙ)pwsc܇P=^g݆ӹع#!0A L3h4h5g؎/_, H$~I OM$a9&[ukW]1=?]N0[BXbm#Lc׏ϰ9vxk} Ķ/ e$n_E‘vLZKkQ}I*_Thc{~*Q6<>۞OgQMRWaڇ>;2®L"Y3F\GqFꔹ2;Sb 'GUMToj+S5m><l6DB3iެI==2-vsz R׬|X?‘,  lc. @ Of͠Ev==Ã%uWO /{;d+(TO|MA GLIMeػd|BC2?wCA@@ce2$w dw`ҎƔhMu"|zʾ-X87n7̲~eF}0Ĥ,0/Ӕd'NYvߟgQQccb|( ؐ%`JA{ج9+/?vĝi5v>ĵkɫ3e"fY?e];ܙKܼp9Pϻu؎{$LA([k."RRRxm#nj#)9r_2b!Qcj]les*pO8{\)b)l?RUlf|hqZjHAȒB`GxM&>,){|*,ji6&ta(KUlV,FAw';7Fz߸DEYܾ gX|uAz>K KR)\]Y2X,kݔy4ضIZ̺tv]R}'LD&uuDEXрlBXݱdqdubbm ^ExJ'δ֔&uXt]p0w`FYjCVkB϶(VߠRC;0$*[uv.MzdG}/q"{f5!QiiJ;wCŠҎF3RAGڑ?lk;u(kڑ?P[V wkB9og$,bYPTx;: A+b#ut,mVTT)u!+lwioI;VUvJueehk9N>o0򻋝>wwymyW$TylfO?Rn]ByE`(k=y>mf;9>(r߆6ҽkrnÏ !8y0 jUortEl&"2DFmW(CAU+O8:ya}G7D/S\  7…8!bffrƍ,0SR2׬d.话ŢϰtwБ|<=8?矽D&Ḙ#O PHlu3Çi^e6TWh1ґ$ `ᑳMɴo⥬Yb)]$3LrtgVإ떭:}6O/T|(sBB'NKѼiZ4oj[FI3pi=Pmu3znی?aێ9iOb*6l9OC^^^Y.!y!6.')).:ҧtwwy&4ob܇Y:=wE._kA(ħUJJ qrra)u-w{0x/ggJCDx:"r@뜲.27k!k}5<=yyz"/Q#k0G2 ^kaz?GQ*kۆvm۰!NHNhִɳw.+7&}8($E" uV>l(pw F2{T.._ζM6ib~%H?ct8r v>CuT1J$R0;mwqnӮ0).?k6Xg[cqDFFql_khHsģ/# sdM=W^cɗݫנ^]թͺ ?19pŜB_ G!J |7_-BTPv¹zSXH=n{dܶM)!b2bJ/7{p8i,Y8ڵjf?DqZ//)B}|:L@l~T*KR/&M}*AU$5_OLBB;4DW\9o m+//>5c]Z"zX|sycѥb:oڪJV"俘w M(C*>:@ eڅ9#ډ@di4~)ÑlCJgْ//9& s<TG?pX !!L<ٟΥsԫ[;Ae޶XŹYwG#EFRr-If;nwDMd\:1{4[#pkT{AT#_H\ Yu ;Iۿ#b@TekؖXN:}*2iG@TAPj61_ODS)h*72t["&0Ax>-] Atѡ֊2[DLGrb13asO9& s8^BB.f#h͛Ii1[1D\ǹi7˴Hs?{ a(KT{dA"i"n]TIۿÜBL eLٳk|e, IMK㓹yw[bY9w̋;9:;f?GTT;A%E" [v(̑Qpsx32.'S(BP>4m҈L]琴;ށxB(IXHH BB+`*iH*G"^^^ta})_,~%GڵmÇҦU'+DJ(̑wwwGQDFEa0ҁG'BptEN#:&a8*R!¦[>pQ}~ZÆ vt( qrLD^Aeo0GDawq(p}e˔vtEƉDa,9AթRY.)]Uk֒VpA(¤DRzgB  ٺc'/m0Jڲ]C"H GlXӧLttNB޻IMMutNaS(]L̂(̑IIh5B{'^L2n/ 8NHDahѬCԡ Ѡ^];CB!>Rpsuut8 QE" 0͎J$H$GQ}aI313 Rc'xgؐ7''~o̩@V:8rZ4wt(]vE8:A7E" +FӥsGV0Q#k0GJOOgނY:ڽ0 4r?Cu2NDA[R}xSR,bԣ*Wep,&#GvaNI@Ym{N/W_lIX*S UjCUHAAxv$;"s͛b_ܧ'p^Xzj'LI?A=Gx` c(C+yt+IЉ9d 9# sdM_jZ|&fA~~j-))бŠ#j0w*Lqz& )DL釺ZcATq2{(xJ]< CAp8yܪXLF4 LQC($LJjJ*ιeڋ1.._#y`6/c>R 99w 5E"&#c7AGmfX0IHRG(f,bd29:G"P)җ>UmecHys*Xe* Շ^ToOcAH G~&&Nqtΰ!hܨ(R~Xkaa7fry8*RSS:,wznEL@;X{];vmƺ݅؁t 5ct``.~g9qL'i֤ t24))r.yRI( !LƵurHgW 9ZكkiD01j4^&;E`0qCʫ0l5Gזp;tQ ğ=\}a:9Rq\X[3 B C;!fHjCt O By5FHy z,?~|aqrJ-8LJBH)V*D1"&&&L'T+5ɄC(\bL'`:Ebccajj㢶 ¤hD_ 4@X.X|!3#*x 8j㴼~ݙRTa'O2"k玘3s:(UV /^f:E["#Oa L'^ ;oJ8_o<|-[lDe ZS[1eٕC `ᏳjlG6 X<΄a''"~ A?*>~OBsOPţI* !5559CF6&]FähY<>LYəYO+OH$S $\ANm_,7d9NNL'=SNF㕊J" =5073c: łlv$7oAzN!+o>ʖEeN!05>xyЮ- Xpؿ/Iɵ+׮c9L'Rʕ B!ǣz5;pII0/>C?Cʕjcz|3`:)f+$BH.x wÈCNJrL=K̃@@ !̢q !kժVA\|<>|tRrhۺ`"!5ar{ċ lƟ X,3_X)j 3y1GMP(pqbC7{I!+ tNѼYp7)xo޾D"Ł=.aehؠ-I8zp_{  F AP_PIpIkv޾{GaL1R,; L'BB"YrGU 6<=wQ{=UcG1R¼}΂X,U7o'c: CrM[EԬm_ a֨III+0aܯB|Ua@ X<5qU(T&N5S]T(T&N,WEeh[] ]JMM¥1|`8)tr!D #$BHu VXj-d2YOQ3\5$ d()L'BCA!B DТY̚aaIcXz OLX1QhanL'BC5B!ԬᅲNXn#k@vm eDl۱Xx!\z!]% zZ?vB6cc88أzбN.)6n؄eZ𡯯GxxVG۶mtrI)pq<ۡrJhӦ5\䃅/`ѭ |kZΜ? n>!da)))׸->>߾~Ãώe05p":L@З ܽ{۶ èT&n%D-Xϟ?{{?ޣϘJ $X,Zl&8z$7j&ȰQ8}?|vmZcLgBr cbtZ T*M[`oo:0B J-AnN!P(8|LL1nXC9>>zWnq6V, 5> Zaoo6c`LаLLMMҕQBBbcb7?Xu3*m4lM!Q$»p%2o^ ,22J؀g/J!6.NljbC#Le5#[[p\ %%[YYj ʹY;)))8'C#CS*U떐w鞃ѣTϜbv}=}T"ERrjC("99QQZamm>H2objOl `X0222qqYcbj  DtyI 0>IrqfѤQC4iPlp#)) zzzOD?ۊ\Q}{,~f+%IUѽ{Wrհtry~45k{~qc†S-Ϟ; =ztþyu:1}L<~DsxLLt ڵ\KqQ:6mZe#tq^6{s%XXX`׿}ۡy0UJT\ILz`3>|T"U.\7Tm?|*p ji8r(֮Y55fϜ۷֟>sBkߴDe]vE6-3S'OcYޛs6jX͛7aCLWW 6XܦM+1ZoPlYuw~UkDMU˷n 3ΝS>hԸ! kkkJ===ԩ:|UR)Сs$cN8;;jM)qJg P(7ROaYx$%%b׿ѻg_DG0Qc2P( Ix6H$}{g*Θ6} uwa̹:;?au:Fi%Ԛ#X,J5jxaZZZ`XAwٽzLDqqXpOHIgذ ,Y3B MmD"Q[׬y9=MHS'Oϙ3r cJd| vڣŻ| Z}5оC;uL>:cDZd2Wbb},@2 Wm]tͺgLT*CGTg͞䖓V\T\r{vUɫW;_c3 ]vV[gay`3[8y/ZZu`w7Ҵ`F89&NSSzjMtC `箿!Cl߶C8vz؎:b#YZj.ÄpmL2]u?Gxx85K$#% ܹxE"|֤Cєzz\2G5ȤRٳOZ_affkx\[nW3Q"($k/]ƧOSBBBT۬ˈaL'1Lzyy;U:5344Ջ*GݱUë 66VLߊb)~< _|A@G6===L8$VIOOO}L:t @v9GMSS/qHHaBrr9xvw*9~~Ν0~X_ދ^{b&O tM5l6B.>~?eZߺM+L4L'1zwRՆ=y6MAjB7,BBB5z[SLVб=֭ݠjGԤL@sbB!7xzg($b V /~P }˜cc#հߑQ%46:} ^<Zvq)Afy{w#,,mrʪ N ֍7aeeۉqaai=GTT&]/\B``js]>o<Ǿ}0mb;5*V | S ğʔ)t !/kG5|wA.#%%wAjj*f͞t2Rm8A?U>Zm[a޽1Pm`=35jz"%%xp!r9{aL'3,-,՚k 2մa™g{diuAe 89;޿{ޫ̴aW._yڶkӁ!,%6S2T|UL6αc1Z*\fI:C>nP/~3J|u9N< U8pdADsuM6nPZU̙7[+ 4D5x{d:y dhhBCURS3B)JlQM/_P#bEDD-iۧ&ʗ/Z.[6/..x"Zb5i\.yas1h{Y˻sބiBhR0V uOmqCcc͓|CjTˆ9QXB!|$\R(JM( ?М 5I'8)2>Rjj,,,i&C@b'O|[7oe++K ,<<\on>Ie299Ym`DEaFZBhժn <Oڵh֬ cy-,-G=Lr8e,9s1)CdmppڲcGժ] gL9kשWWWٽWyqIЏѼRɓE%Ā%(}P ɋR9Y*u}}}տMԿ~ϰTĔ\8QVuj8[ʕ+ڵT#ޑ%9ں $e(aDiy* /_U5ajժHmOq jsY~@LOSzlffX>^<l@Fuuuo't۶ Q(g%Rڔ L&aÆ!::Zm߮jێ= ** nQ+_Qm] &ܥjC={d ۷o[ZPڶS'OdBBnܸ~;3É|ĚUkֵk.gݻwS-YU3F s釩/Z>U"K$R\pI؊̔Ɉ̟Pm]I`Xܹ x>@mϟ ..BH:8(ږe:-Cr*2)BBB2}\2\+^M6 S0ݾu}zKrx TVVvvis:Ǐjw>*U:s yxVLjd703Fx}Vjhܸp)~PoY4``ۻ_mFi^|F TLQȾ>>W5=ó:^x}[G7oު˗hݪƑ]`8v:++KM% ~< "Q4J2 ÃGKH$L1bPpy<$'S\rqqB333wMRq1W_-6l.hӶ;{^ݻwxڱ, ƥ ]|`A,_R-0͛`}p?(=|iWK?3Q=ȴW,˲3x\mI lllЦm:yrC,2"R–üs~NNhߡo޼7o2?fտO.Gvm2m1b8:tlݺuAij-wIIIH%@T;n::9>=ՂԣG!KP>X2b݆MXjw)% aY#׵hִ1fϘS'#AiȱWR===! ^^^Yba``Mm[qU~TJ*ܹcѵ*W|>иq#f̄BskA9*Vh?UZEmr.i+WƎt5jg`ee +>6nZ[7oҥ+9I011EjUѱSGeN0fxNQNmxt1+k+UrWNe1a8׻ ~\ByY 3ssTZZqxBt cU6d>ZYZP>ڷoeҗMdf`ff+++Zj-~:VQ"mТe zt} 1a8Iz}@pLN !VBHHO *Izf^wruܚ{̝3,ݻ6e&ܾÞ})aw09|aRظi wZ>?ad2?x_c_!H+15aBaWp̛=P'p8߷7>~ Ĝ0n(+[A!>aBэ_0{ƴB +R ƶw'LBш+ \KpmN!B-5:-5bt\3+p-6'1}},7KWOB2 fկ+N!BR-! N€k]6Swsfc)Xt Qr%o!PsDB!.:hnf ʝӿCPHRsya٘1u2Zi$&9cX#O+%\qX(ˤ<AdsW.Y,6$:\QYpi1\>,#3akdQEj2'A5վ1@iPS*"$$l (ޞtݰ cF:a& 2\'W!m685`q0&r2d_O'#U4 bEpx,n5׭Ϡs9K+)4QχIpm?GգH`u霣}6$t ۄq9cFt`9;9v aaѾkރ񮑣&wX\ Q "р\qmBq[%w7 )){G!loQ7b@ Iݏ`4e8bPgaaÇ25ב073vaCr1V-h>ԔK Oi2*Gn]NnfIfôc:s:x.0l fv$?Gt6Tu{-pBg۸{ȰIhX^ճ0!'Aаd762fONTŒӧduJJؾ-,MeS&oRiѺUlAIչxBX~OX?kˁ|$|}¿qNkӔ&jm2gaaÇ0bM Iڛ@l ۷E /Os\>g''y 63|$"#ӺSvYe;Y6:x@.X<=xujXidkf Pl}#bqbka"AYаl!>!!g&UlD0d?xS&M(q54LhܨGeKlSϬj5i0DÇ.\V>zbL덍0nhW8;9o/ɿAOO$'~OSFWHR!Kk[z^JH(w!VWb•%ĤO!5KÖYVOA*n=L'BSl0:h ԛ~D͘NZ1}dBuAL'M*GhB!~Ba>[I~0Wݼ=L'B!>O˜(;EϞ10_aQ#p5DDDF vM͛5A7@.K@Ѿm>{G$# q2cCBXGۻTWHy)l f~ڴl=CX\ md-_;[[}gvBJbky(X L5c$*BKGqt@H9h2L'RHos7wqV^8q,1ފ"TDS$D!(W%}Fja%B7&\2Zt|rbjnB)h\( l,A{H\.GJrNXϳ/bDwLw_2LUV% )䊴g[eJRbR"ej$Ng:-B)RRR nOX]<\`:QM!I^B!9""9%%,~YA OJ@̮H g:RvB3}DlӫJ>0lC3TDH"m?l񗷈?2 ׇAH~~)n|"5.a蝋_5HITKo+ҘF #00B!䈑!D1 @H~t JɞNn~·Q (_ l3D}ߖH}G6aTBwoebpLTAX EHy\Kp/[ rp89I.cb ;N!0B!B il8֪uaj9_PH%_u,\_~)f00 ffh788)BR?>G Z3B M  >|DpttłD*Ê?W3*j L&÷`z, ))saog["NMMśSŰB R+Ta:D(|_ rxʖ[E¢[|O ,ϢK9TrwZ@}^z cH%>P N"..&&&Y'KA°q7;*\L ,^yJ BHb\|w=!sss3U[Eԩ  T}+׮#..]ЮmkX[?(w>{?}W hۺK$HJL·x 4n ף,} ƙs7|yTZ[7mP(Drr}N=$UDV-aiitu7Z4k͚6s U*3/1) 7mfL=O/S?kˢA\MBS F2VV>@ @V-Ѻe \vSgƘQ# ,JTlڲ q0629VU*#<"w섃= Wb_}},X><'74脸cC-5@V믨oب+CArj4R6U(,߼}e6BJ"¥ؽw?͞|\.E1ob|,2.22 gE1g|wRZKƼKt5X~#>~ƲE eTv-,Y83O3o!t#kfLjU*c칈b:*5wlmm0g<`X,4kfKLgHe0<`Y[Ya)XgEtL Y,0۶3e[F{#(R='#@ q;p!Or@&U<_i|6i-$rE2;|>~Ɯa``P.E`?;Kʘ_a?0yxԩ]+'L]Я7[hF"`eI n&F=ρky=7&OW۷`X]ÇFh``f 7w o޾6o93Qɽ`lS&EK#8$/Oz&ۏm&]@{#ُ칖 eHzptX\y<JnnLB|gwT`:)"=w1q9|ήsfaCF,44 k7fτNRLV >!,Pր-Y];wi;ͭ6otK7oX0w¼3jzDDD2D,^'GYg']gA+nK}X8oFδY3u,:ѷwO?p(}jCkUV{? @ ĝ؂+'4à_x.V }ߖ_ȍäXpӉޱ_zWė,"˱a hewL'BÙ?|7oq-L;ZpPV-\.5_Œpr̞1&&?a PkmD&g;ڼejת:Vy߂Q+. ~f̛=C翫5hҨQS(XdFZ(TV/^ǃ}7)..^ygB(B5UhڸVAW`gk s33LE+l" up\r*{àV+pL16Q˾,6 01&]VЯzCz}*kf k> ʡ#GQɄ"("mRSS1c<,Y8Ю;xgLc+D.Qݻ߼/Æ0?eX^we>dʔqd,ŝB91n(ux7oa1-5-kJR̜;sgw?fEeؐ,G,hϞ[w0aܯLg?Dh ,>Nڻb:)5Gn ߷P0([x ӷ@=M`?DTt ?f$! JQo`p%** o߽+ P\4flݾP˴+WzGjZtÇƟk2𿿶Ҽ`"Z#rۜ-bbbժ2r~{c0( ?tzbF ;w1rOkNebbU*G佸H$~&ڵi ޹ Çf$NN`X6Ls.K\ؐسd R NJ&Xl%ƍ~lEjPHČ%ݓ<5<=ciеn^:M&lld{{{Pוx)j38IwNpƮ_w"ڵnXB}}=Tp-/_ZQѐH$*"?} PR\|-7el.}}=TPČ۾m`=L'EE,c|bly:LLL t^n߼y R^?D`B-3<az{ίä(XZZ q)z;y:LTtM7y =<4<{'QVLğۅ+F bS$a?w'}00¨YoğS%'!UȓE}S^=kj[H~v A-J](RU*Wr7o%V[gyC٣VBzUĻJNIQv>-9Ǎ!F¥YwHBU*R! B&,;$!B.BI@$ UY텹ky|Tˏv-.`y zkr\YB 2j_yR)ImEmX/öJx򚘔##*\2 n>Be /@IgC;?E%dz@꧗Hy7O͚x_gy- >~qpXuN^^D\ܠlSʖ,!F-%Q ^TmU*U«o%ʧf Lm"o܌3+듒~f('W(0]h1ًVM6Oz TvE{}BwFĚqX-M'DjLyX_`[ =Q(r604\9A_j{PD^/}4oBһ"jl8uǗn`X`yҊWߠJʚ7XlY|"~zH}6iȔv8@|{[DnM)"-;S |Uʾ`իUŋ;i.p F*?N'\؃!sQ:^"|a3:Oghw"%WQ&,k4Q/)v &B:+VB&AjL}*[[#xlc##zTV(} ..iS]K*Xt9Zl_G`3!ABȲct{ݲs(>Os(w+`pvv/_%.::ffZKcc`t_}"Swt2;~JPH$k=y,g =*|eglYL8Iٕ\VBRè,կN_bB݈أ戊*Kcl^#]a3}̶9A!IEP&n_TFvWeM%s-9O!sW~`kE⭓:cQ T$ `g==s#;PmS'% dz'(RTОbmVX`޽Ysq3XtL ݳs,9V,]JLgBrka8R f$F=Kzt rQ,R?@۪?rcf "h']qHLLVIG_!vt%G|ee?@/嫁o6@`Р3= DEGRmB&Q($?e*°y.[2/3)#6s 3D¢P_iz6 ~z9=ϓf'|'7XOߎB`ظN >PAIc1l>L{N42a xR+8zq̭! U JUFq'Bآ72oK (T*Y~9o1b Ig|_<[g:K,lFzWխZ'K JsH%N8{[[ !%O!{ђ1Brr2B!ӷiZclaZϐyASH` w!Cìk v/DW.?Fc*9O?R#ZV=L$#Rd'iHIDg uIZAt OJGpbb"ug&eٕʲJwCS60J>s33i< )+=7)&W[HkyJj}^H#!O_é#9։^:vyB4X55JWq ҨPp SN=g]3BJ  12[!a-Y~14U#%,OZk$A=vtp@pH",B}(ie20 q ؂Movdռolc娪?d RH`TS!ILt@TT,-|.BJ6|4)y# elmu{ݳ`;gS`sViḾU`qnZ[Rlm]vɏaeI~P@\ICA%| r %Ne (H{HI82a5:uhZo$],*/|\|6jQ[fA5R^B.E#|_ ¸ @d _zW,: 86x6[R<==¤~zF f UrMOp-a0 S.Tl8Z:'? dNj٢P2:O(Rx,q01`ش'b%C t`Բ/e-LI'b#T=? 5{&G$Ao}>Y\,ΣyP~OaogZw Pĝ- @95@.YI7HIAPȤ)|=F졵;X?#?$ pG} v-X}9iD΂<% OclCF!tf71 K?/rR(12p:[un]Cxw$>LwVBa U /Ox6tґ[B!1^Y:X\; FT'a>hj$HB?CP7[hALOAY9 ATU6idbtqaZCϳ!]iq%HKdx4mL^@Ī1;?j i'o!;!߰ D!we3D_9 +G!V8\pg }a0b!,*hn+[v\ ;\"Q$= AدAyD객ҐO?zrr3kfyl*G҃P$' !V_nݹk1]Ox$ j@7" Umaڈw1 <[ge-ϝ@C()H  m !tg7!#zᰶ]sD022BtL @P .U0l Ӡn{X/o:,~YNʏ vC҈`~zMy4@ʛG@X. Ѓ4"qmpp-4_NyVeaF(0+Wm}n5y:R4uWx<{^@>qˎL&y b"PX s3MQ6dӷHiܼ5/1 `B)yؒTkbUu7B]N\v͚4f4 Թc9v| 5qBS$05b6mZ%[hצ5ӷ@ -f[_ aڮ]+}@_Fia!ؼe{OK2 ;w uƛQ-ٵ*Cӧ7b˖\.f uQ; BH)2mТiS޻?'!\?VÄq!_333ܾ{P{qԭ]/Gǖ;Px uXK{{Tu7oݎ];COp'0WZ2Ю)$b1<= 5LsGWWpPc.tlߎFL%Sڹ12B M Y%&rی[ޞaKX({][fk0vH,cu܊D"zO\jtwx_,띻pF]˷jii^ݻk e@ׯ8|8~Ux%{v'OeL{ r4WG$"v8f<{t) l*VpEzurb٘5c*6oَ/AA:Wq>i<lإS,YR'$`0WQfL:k7; 4z׬X~#rήK6/̝5\.F2f߱a~V׹s>>zQ c:ERdd>B&OP7De&]bPH%0jE51 6o[Mhܨ4jtV#`2իoܼna洩֨Ϟ1 |_n&L0^m"XR0b ,_F qznB"!^_GR$(߼}\Ôa_~ǎYөY,Z͚6F Gׯ8~ ՎC9}G $,-adl  ryH%"2* "P^YM-s˱/L'BRHp \tG G2>Gq1u$\ʕc:{%ذi30o2%%[ Pd_6BCðjztl6TP3xS&OY, vދH1@Hlش C͙΢Oظi ꁚ5}sX,F^G-\B\\oޠw\=Ow:͛ɾ}NxDvmT㚐 G#֦P X]{"88{@>D"sq ժVa:[E֛o]WڵiII8z|Kux\\vݏж j*)%%.]Ɲ{QNmtlߎ>*SҨa?} C/Ox􂳓}R)޾{G}G044d:ED"p}xTfM(K"Gh͛6)v_$D8p>~OS7˚WB?{x>>5ЩC"체}Ξ;KK 4iժVE,%%_Hٽ[#(+jxyOeU,ًucкU 4WH4, n޺s.M4F%w,o/^•kב kNQFLO&aCx= ꯳( nЭkgԭ]鬓\ F&I*Wx  U"Fhpd 9r\ʕC Fyjv[_76SSpy<!Q$T&CTT4d28iy2_r<{>BpH,6""#!<W<|r[6*Vpe:䇰q6޼} LT16fa`` /O4_&9>~o`HTT1zz[Nv-87n›wfP(𑒒@T *+I`:ٌs[v\S*b= }}3V\()PH% ҡa}6ܿ]jb2BNcђNض,/ WƷ|BjQ  FfN=ue:Y:{~^дq#Z$X>~D>3\޸ jr:n޾I2?6lٶ !0%Sղ@ (5)X,I zŢ,Ǐ;k:cUpu93ֿ)ӷB4@ߐ 궃ZOp $4nVZ0/jƳuZ޻! A#C Wwv'NnѺqE5H{Q;k=n47Kyi=jjUUǂԸMX'zlIHyy_0yc@ܩmZ;iDTYvc'RE4:hܖ|M4C8lfhhfc? ȢY{߫ѻAtml=C8ظ#vU>Ϯm) |D|Sm$=?)ɸ&E'Gk܁_6Qğ^ KY}5n'ey$4)o#U9pYFk.(dRdY+OIҾMXidy,&BrY~C>S+% )o_eS>>@j+U>.XtY> j~wz,ˇIfY><ʇFYˇcEGYޯO/ <974c%A:*zȪ+?CϏ&|\Я^ѽo_3M%ˡJnmY'β|g]G~ʇ"Yv#wȲy~iBJ+.˃b ;#)Zs[ܲFP &ye+C߻9R?>ڿ̰r<60"c9;γY˵>1Xcփj\Q{5Wߖa:XiޖQ&ch3[uKW>D@LW}+~>ˇ:9ٕKaV~&'i~iOPd\HCA۲,VE|Um9*r4|!Up{0a#{ HEA׀e7Yӧ`9m dS>lGe#^w僕ŵ,Y>?]@HiEsl]~U{e:I(L w)| B\.d:I򕫰t"7,;6l Kͧ)!EB}bB!H[n#ƎY0A:pL'BPF4JHMMѾL'B?x !'+ڴƽ 2Bɧb]x9} .\~?YJJ ~? IIYw=ܵ7g-n޾mHH?E͍R`e߾70b/'wr9.^ G@]B4_odBH 6mنmuBUkq Zl=ǸI$i!NjzL$LݽL,Q3cL6ܽ3gCS~qQc'WZŨ1{ko?ֺ]P`KMsǻ&IɕJF!T,0@{ 3;p0I,~bꁖ*Kxs@4%+,Ia4boSKNDu$XLL,FpfmƎ2W(Xr=t /m?ZhD\lԶpVb=Gllj911D`? ז=PV x?5J\uh6ߎD,LG6lK` 000P/.XN1BH)V,k?wݯ-}V\@0M__ S-Jה)8$D+ ѨA}[ ӕg%Hk>Ur}~[?۷oUGP+\/zzD"83̐}TjXi>BZ9_!SCl6 iBer"5)%|IDATب+SF}r.0wtZ,lb)7jiiB!֭5ghۦ-VX KK ue8BH-wX3Nf::|j$=yB IWY1 BH)V,0GGغvvu_~SlYgq=̘NjrbϗX_L'q\\[P\|ycԭ] x<'} PV,1e]Xh>8?jb UۻvS&lYglTKwǭblz0TKJJaO ѕH{}9I HzpOA=(9^G⭓}5+n!! l^m}ٲضic/$4b%9fcyj3[T=> 0oXZh% v3QVmF@ (]B(2" 6/ր4*T==|Й]x>qk #dZGLUco%_v RЊm(m71 ^1xn ~Q;@H#SH_9>-Qd0 B,F9)I>&ƹ:.jpSy4I0 w,42_:WS u@Hy,P5sH|xW$? so½6E):ݹ{KW_W"Y$ȓHeum0$vރCGv఩!!U,Ƨ8r܇N~Z>x1Z4k9V0l] ?.3Я6§݈'\z01;B緑28涰 OQM!@`LRi¤i`:ZEDDz*2^r)lEddN0XvC0SE߫qÅi`7z0<<'6Y)R)+{C!9E}!c?'kOH`:)yr]ԫK;BEA!Br}8}?'\CMN! B!R޼AJM\X=GŊ2B !kwNF) ?t{tc:)BA!BrtRrȱhռ9h@B(#BH3 lD"a:)Yoޢy3 F)(#BHaXj I*1) 7m&0BQYs 66-U[7eoѸpղ%M>}JkѬYi3޽OEk=xjۻzq_\Y3gC" >*Ub:ۄUOs ؟X|%:FFL'BT(SS<pպM}?u,.]Zf9fW&a}%Tڵkt !`em;#GGGۻ!!a?|gϫY,f̜t QөC;8tǀ~}N@,`د/ʕ-tr!D 5G!ojO5P(ur=ָ@@ZpPӻ!Ň={q/_`'= quuY&$={ko\.g4-11=ʕܙ5 a9㫶T ևʴG篶cc,"H/AfB-kPP˳Cj$''-"( )(ݻvF:1{BDDD2gϱlş} !E5G!__o>!..&&Ӄ5ԯSK ֫Wo>ܻ Sgaa ~P/]kkfbb2FV-3 ޽{&Z666{1}^xZWݣ.[^="&&&{yP ؽkZ}ѼEsg{nBt'OT-?ݿSgD`P_vGLZh~jE[n.i=͆9ʕ+zjIӦ28qm[[U'>>GAlLj]1x bgPgN]Ǐ2( A`ca]..z4W[u]`XXx!|ӷB ,8a/=ϟ=C KYXX/_ǧڲ\.5{;p 9}Fdo߂5[0^z /b;oZlOjXxSLٺeΟn?VIiC'$$d_̙n,]jf{~BtV(>fPiw՚.lx{?#D"M۷hM[З :x G3 th]p,5kDEj$$Ԏ/AHf6^xZWzUW]&߳@m922Rk|???>t*bUppp$lϞ>={ӣN%pܹwEU>!d> BFjR)5Slޢa!! K{r᥶Ϛ]9j_/JylkkiSg׹:غejfcيJddƏe]jYPdXrgDhhퟱrŊՂ:Tc'f)xM}ѣ >>>WmknߺZ655?+ڊcE~>bCX4o^| g=`022ʥ)#T >8uj9 WoԚկ_/\R5{:vڞQ*```Z~ux9j8*W xlުyhޢ\]]ŵ+Ld|{vCJJ #6كe˗"'BBB1}LdiCh+֭qcƎFٲΙgy/ GbR?zCC*RRRp!L}R_o|EF3k߾g5k`붿MgJpwwqU"#pMjRk^Uk-Z@JJ*.^H3MΞʏ43rFyJw60t/K T߼~KAC\.ZhV-C$S;xBdrHJI/@l\?ʏKٲ];0%B3 rekWQ _z O`ieTAXxx8>?+q;-ȓJqA`?988[nպnfeq>#6;g,^ׯӚ.:qe 68dJ7%R}ڰժWBjUq=P(b 55Wծ?axFó:?W~zAXFe!1qoSKKN>G{ M$khԠ>5Zظ8 "Gcё.BHCAX.TarOb5tWV[T>xe"!!?|TZV{q:ukaGoDP̙UX!.O֬FFF9]ċ/YZZM9:^*U}  /_>|P+xyyիWj@Yk(?@ExZQ042T-~o=iM[ڵՂvʁ}^-]ޥ]DZ#,]@hTRI½{ֹgn @kki`\ű'TϞ)_jC,OYYϞ=WlZj3"#35fkk5]6EEEi ‚f:DGGڀ\.C >bejiĴw Ӻժd\h<|X/P(D70WSgÇ~NGQ/6[#TthƦ`2zG㏕jM-,ѩs,nݼn9H\i]} nܸ`|/MBPK>>͍CPdsBU͑2`eZ>Á1l%'h?,;vo|>K-[!99jN:!CAOOOW>ӧL v,UB OM Ϟ)?~Zgemj\J%~aO~&i8:kb3$tgxF$&&>jL5Հ2۲eSinؑcWnn?8/6n.Ba5%K+K+WV"$$ϟ5q,{{;ղ*Bhh^|vތ~Pd2gZLs7>ť\MHH(oƏ.7P,˗/ciCG:O& y{{fj_|B6?VtA^q& 57'cc*9b2l766u(rw?S/_k7иIy}_[hBHa ,}jc޽{wo߫kebѻ]|>իlXXd#44Dk~͵5˗U# b\r..rtZn //O,_Rߝж][/s舎eQ/=z <&&CΝru.Bt\xdD$?{,.cͷ*rtUTTdLLXXڲTtժ[~ƛos4):ŰaCÇFq\|%jQ`;Z.\R.|A_r\B%J.jexzW͆*W֡JU|>_{L#!޼qKknR{RN9;9aμX~M[r4Q)VOZ D _V9fU+Ԛ-_ڀ)0)c/-hP޹s2^S^jƒciVuY~3o6ϟZP(0{\e?QsݺuQ0qPlk[Zj9fcquӧTK!0<]Sk7TfXQap8jd~aC7jPm]{5pIu9mcggQGR)̚H$;, 3gPN*໮[EKٳ橍&GS2Nwpp4IkW@gZ ԛY.S_M8ufm7͛ñbيl422IԶ޵'Sz Z]j?~mܬkB!9EAXkOHH..`ie_ SS՚%$$oؼi n\n`#+l6s/]GOrtX*UhVb:f46ߨZ6oAV&>I-8Ŝy0 UM޽QA,&K1pEUߑxY z?;vj\_Z} 9{:ƌNʾq&ԫW7G+FpwwS\'|}qB4t 7nPi VCk|>,YdceKj-\9TYc>}{P=c^<O[.Z[pIutQ]]]0h@l߶ 1sl=~(WCB!jHSֻVp8؆ZS2M>?|̚=}u~T^{ Y~ʉb'LSYQ)CUʾ3a2>54/'_-@s&K+KlП,'zV;u襁nݚuَ_FTM(>cj^BՄ呷wM8q2x{kX,5owo8~DfMqaܿ߿왙uF=ԆOicVV;W?}总݃~@W;^ӜD۷Kjwڍ5fQ1B~J?Hu58}|Qxcy F )(KiJ =!:J=(]vXd!cGO Crr28T M6Bn]`hw,0l6sĉUϽA_q54oFFjk}q1{t=~DžS5cuԧx1s-\Zc<}^^^yB!Ra'Oaع:93E,b1€ M:x Ihei1R+)) @H$ܭ/m5Bָykhqyq^:B)ʨ&8=!q^1B!'B!B aB!R(#B!BDA!B!" !B!QF!B!0B!B)Dy'qظi+,--tc\})Ŗm;`nfXL'RA*#$;X,V~FBHiQ3rx ?gLD"`˃I"f`mmAC6L'G>~NBH+KKDr̄kyC!D BP0Rx} F=8}0I"!r5kl<$Rqֈ\|&L'B  +eXfa 9߄B!6 J6+G.WP3RB!B a ՄB!, J6;]Mj!B)l2B!fQVPsDB!BEAX)C5aB!0R&LN5aB!: JV940!B!RJW0B!B a ՄB!, J&B!&QVp\տR)!A4;aR{ӏK!% =J>CCCD"AB$B6 +KK,<"$R$%%"Q"@ Ѐ$BNPV XGC)bU{trH)+(~4w+`RRPV yyz}=C)b֯gN)ENI+ouj2B Jzuk})0$BHҺU U;L'o޾ŹUڴb:I3B Յ &&[`:I"=uQ-/\aL'` 9wj F ;" b>djy}p2"!Gs33@TTNLщxL<?B>i"̟?>Ӊ ϭb}_P(p p8lTZB&Q]LXtNePٙ&6P~$\h<=14B)BA3VqqqeX|H!ڶAڵ`gk ss39!+0k|պ5йc{xzxZmABsڃWpmhl6S~={0LB9 JL<~ϘN !R@_CF N ! r98L'BH)bкe L?VVL'B aD%55w?k7'DDF"::F5Z!l6VpCZhӪj^B)Mƕ#H<%%tEXtdate:create2020-12-28T17:58:21+02:00vǛ%tEXtdate:modify2020-12-28T17:58:21+02:00#tEXtSoftwarewww.inkscape.org<IENDB`buildbot-3.4.0/master/docs/_images/master.svg000066400000000000000000005376311413250514000211770ustar00rootroot00000000000000 buildbot-3.4.0/master/docs/_images/master_src.svg000066400000000000000000007037701413250514000220450ustar00rootroot00000000000000 image/svg+xml Build Request Scheduler Queue Worker Change Change Change Build Request Build Request Build Request Build Request Queue Queue Build Build Build Build Build Build Worker Scheduler Builder Builder Builder Change Source GitPoller Change Source HgPoller Hg VCS Git VCS Build Master Reporter GitHubStatusPush Reporter MailNotifier Reporter HttpStatusPush Email GitHub Custom service Build Result Logs Build Result Logs buildbot-3.4.0/master/docs/_images/multimaster.png000066400000000000000000001136671413250514000222360ustar00rootroot00000000000000PNG  IHDRMUFgAMA a cHRMz&u0`:pQ<bKGD pHYs+tIME 8$.UmIDATxuX]SJEEALPOVN=<쎳PIoveQP}v;;3Hd2 <Qy7Gqۧ#""""R{.X=<"*"NzF:s-rU-ለ04~27opDDD<;ZZ gzv z8"""""Dbb"IIIhkkS\۷ zX"""y@ @"Pb6l\C) 6oĹgJTMzzΟszLMM111_SSOCD?<|bcc$,, =}U>mmmLLL?lrDMM/7<0IIIϓgx6(HOO/衉(6V+sy'J(QJhiiHNN&88>CffSDrB" H͓dJ"##)U*Ub 6'Xd9eJ3}Dʕu(!|jjjwr#?L'h, z""d^xAXX(ϟp%HOO'..8IKKX222HMM%!!233III!11hd2III$''&&&$&&~VD&eS>ݻwQu5(衋|LjʃH>YssÇF5ׂFKK[ҨA}A?R2I}ѣ%"RJ G)MMMD"Fzz:ddd'^bffFff&ꨩ.522"44+Vkٲ%G=?OU?/_q̙3֭SLMS蔇+W 033)7oKR`P$&&{`ooOѢE D{O?BE BjըLK&OABb"~_C*,X7n(ԤXbԫWFe2EѡD$&&mQ455jHhhh.^bCCCDOOSRR'""o7ox-o޼!44TRSS9cccX[[SH033ŚbbbB+VXbRH:?L&ѣG޽[DM 0~x={F9sL˗/g۶m@Vl7ɓ'9r$6m!>>GRb-Md ;c'`fjJN zHÇs>nڴիuV'c lXYsPNO?x+=NDEƆbŊ ʁX[[ciiWE̙34jcό9djjj; 9z(QQQ\~?y|$''3?΋/) )V/^lmm)VօtiS FFFXZZRn]&MĆ YUvE~TF\~PaɉRJ iii<9Qre_V#MLLSN222V'J144^>|(L[ZZRzחd\zwB"EQygddd+QDSSWׯSY(}Ɠ'Opvv#a/GA*<|Xx$Bҥc'Nf_:!222;w.WfW*2dazȑˡC={_D&!H }+kjj~7NPPJﴌC*"H>s_)IMM {{{?]vͳg?+@KK ;;;(Q_gmm]+ 'Oɓۙ9su۷o.]da:22N:ΝQ޸q#k׮ yAQjj*]vUODӦMQZl{-?۷-Qfё)/*JAiiiJ^z5=@Vw.^Hpp0VVVҥKlV ,LQbEjժdE><>>>3Fx~zʗ/O֭sUT-Zp;&dkM T*A7oرc<{ }}}TBǎ133+?MLl,K`!?WDefxy_eM 2?11gϞkÅ8pyːʟ͛񭬬pwwwܡEX l޼qp5͕prV\Ivhݺ5oŅ#GG PөS'Q ҥK;w0}q*WNϞ=p0<%%J* B7n߼yQF3a^2e4h5g}ر >e ߳"M4aӦM:u~Ii XZ[r8661cp233N:1f̘OOKKcܘ?d)^cmm ŸcSHРgϞ,t1ëWʅRM/}!d2^^^oߞ'ORdleC9~ŋ+OMM{J"ׯ_/T'9NypvvF]]lԝ;wR ׯ_IE!55l?]vsNvޭ_lcsqqa˖-d5*zF"p222X|֭-Z_j LB߾}yL4Ixx:t pBCy+Qե"R~:z,ݺt"p׮]ܻ͛wnMtt4͛7Ĕ hG1ASQTV7o h;;;+cJNNÇL>sαuVs͛7L&rƍJ=]FFF+z葭ӧOP vK8s 4HI1з}1yO:f[A)]4%KTammAR^=Ay #888wxUwīW%;GGGEjLL``Vy{{TyXf 2fRRRcŊ,X@i]EH8>>>ٔC ֖}vs,qpB%Y^]KSttt(S ٔPfMnܸ۷ol+ZeM&(?nnn\~˸q\br^:w___&M_WUNZR%ҥK+TmZB"py'N֛>}FFFJߪ\r̘1͛7½zK*|*COT# e[GVFV+ܢT~}Ν;Gll,׮]#===׊jjՄϪBӲeK>Lzzb$'::Zlkkmya-[h;wW(Y$III<}Ȋ,UT6Ay4O>-X P.ٿK=y$S9555ի/Ʉ,ӧ6lRPfMtuuILL^%lllφD8\r+WRJ lvvve*H?Y?~rJŋ*'Ϝ9 6P^bǡLiMO^Z]vaX"5WWWU~GGG3uT6l璞.胃 @"ЪU+€:=z`ܸq$$$Eƍ?zJ)BRyHNNqvvݻw\rW2hп0rlTT5Ғ8Ǽ+""9/(^|)*U 777S~§(WTJ*qeS ŜXU3Eσ+e˖¯C\(eOOOOG[[ҥKSBVCy(L&<-AJ"d d eP Vӧ#(V"B—S>^L_xy f޼y 8MRlYttt(Z(uʻ;w T&o15}2iɠA8y$vՕDƌSfQMJJ ۷oVZa%AKK6mڰzj… ޽;eʔ1iTKKsq̙/8@\|gϞ!ɨP?s,{)7@J޵HNN e^bEn޼)*V(Y_zta=w0"##C⮗.]J\\Wȷ(]bE d䠠 ^|Ǎg+) y>,|{|R1H"֫WhWXu\pjժK M8qݣM6̞=^z}c} 2@DDGOMMŅK֭IJJ/^}2_G(ʃʕ޽{_]XXZ۷OJ .]U^|aÆ 'ksd뱯[NݨYR9K.qZli& \L֭[9888!!!9rDCGN,gΜrʤo߾lǞ>}:ٮӢE9sg=Kۗ@RRR^x[8<'O^hiiѥKL͚5?y@B؏ѮF||/^<'91PVXAll,111DGGCll'҅ t._.xfΜzzzL4=zg%TJVXjٔ*VHŊ166[nl۶U':* hӦMټy3>>>yJ8s n")))[hMʕsW\Q}5d֬YhhhPti0.R (9fYs##Cbrx&D||rJ Ǩ d[z5͚5B^xſ3s:C;w.k֬aΝ?~+Wp=^zM)ZZZlӎ.]?x)XZZ*owssCCCC=]6d:u`llLTTĪUh֬ѣG?^R\]])W\ǐo_B*WLZZRbmc4n K~yP +xXY.ހOEÜϪUY[ʦ=( !D-N_{{{z%2L >JR2Xf\矼~:APH%σⲷo2sLfΜɜ9sO)#7^YtyY[CCC ?#xpA&1w\8]s:T*G8p@iMKKT(y坤$1 Wx\i ˝;w=dҖ7P>={ۛׯR3UWW^z(Q/_~z n*UD c9p*V*wt!DOOmmmAh[>!455155H"X[[ ŋSHlll044X/"JYx1ÇACsL-7ԑOhh(!!!jժQjU$ >>>ٺ*"J&::`%%}ԩCJ ĉ̞=TFd=4WWWW\\\;ׂBtt4ϟG*|U1%KXB*vDWE&MwܡUV,Z:PD >}ʻwjXUVeӦMKE?~.NNNEZ7==ݻ3{lۗ͝-|qTTPD %… +V &OgbRk׮rٖqeN>޽{^AAAرҵkW'x>hF<(ѣGoggg8r}|ܾ}[Sf| [ d=ӳg44T+'={dʕeFͩSuhjj)<%Ik>X"Epg69OJy/V #={,֭> [nJǎg ܽ{ rq @F頠 SJ@@)jVy, <ȓ{ʔ)CF8s [nEOO:up !444ԩk֬رcٚ|(ҭ[7.\HZZk׮ͦ<ܻwO)ֵlٲ٬r:vȌ3gݺuZUVqyڴiaaaMeoo_*ȩVURREʕ+޽{\rGrƍ<-,,PΔ/_ҥK B~h َaiaw[.ث.'_G{+V(^^^TaOy \2eʔו(QBxb !Tr!k׮$&&fSvٶm[6^X}&}2e*Ą%Kr-7o. ~*G`5-ZТE -Z?ͮ]qxzzIٲe۷/ݻw" -C2e%fذaҤI,,,AB"/Y222Xt4Õ:f;ԩS矅gf[IbŊaaa!53@v\X_~4=122Ύϟ ̟ew:uJnР˖-S˗f߾})R555Ҕ2e}c>T*eԩ,Y;w:m?~Ϥ Vk.N8/ӧJ=j,,,OJ*ޱNNNB;dr}3g~rҴb <==Gp*W֦W^ՋׯqFv)x# +Y~ٲe,[ ccc\]]quuB BWtVXmۘ7o?'ѣU577lY,H7nݻwҥKPHիd׮]ٶL6-Ǫz" K-[lSjU - ruJFv F"Uѣܹ3e۱c7ntuuqssS9,RJZn9<WWW*Yf|333S^^^DDDDrhذa+}}}͟/S^|AjصkWȷӧOyi[.[x]^T *W,[sR %*[)WWW)!K`}_RuOTVMP8tR(Çn~SSSQy,ac ҋ*͙1cFgwWe77l>zٴiSI*Tȶ/Uq 2|_?9QX1ԩ5k֤bŊJ/o 'r$''1t}>}TaGGGvʺu;v,6m"%%E`B常8֬YbRRلv[[[%ǮVVM >4}(UUCC#|kkk:uY˲7 iڴ)3fV@ECCgggڶmK^5!}RONM.Ǽ~cP!-Z`̘1Be2EC GЦM=r4556lXⅅB<|H$R-,zFWdIRRY`>us|vaF]]:7?-&Yή],]veŌ9sR ?Lq+s>}֭[ O{?OGGe˖#-,,_ׯO>%$$x Zj^azzzܸq#k54DA$߰)JneF6-[tn~AMn}466[[nA5ӧ:::ФIp- 177kРCS.Ŋm۶-ZzyxxdOQp̨(6lRyÃ%K*=+W,lNݺu100PǭX~}N8'_Ԕ~MMM,,,TClJ%B_o,-,(P擶:::4jԈFtR^z%]=y'Okjj76b[X3g|<7:o޼ׯwf}ʆ !ʛA򊍪MJ555cm9Tu544TGDx4;wOb4/,X kV2}jK^L:wC4pkD}d2C^=iצ'((ẁĄ]2gm[ĭf?8:uh1cOU/LLM;v<Ç aȐ!lܸQXА7ұcǂzOȕ411ڵk)S:/,Z""R}zcn W"m.\LJJ*Z|^E__j׮MڵUVܽ{+Wh"#8aK.eݺul޼YWSN 8gwRRR8p]v333X(XZ? "iѬ ffܳw&M[=j'wz*r""y@OOKKcر4mڔFtnڵԩSW^o޼aTP---СwDFFTQq)+"9ʃD"0iX>ʃ=BL&cTT W8gW 9wʝ+UׯUt \]]#<Rxq~w߿'OT*eoa zEQy”.eo#G0g"}EivNj:iBAEDPQxqwB]"mۖCɒ%KhтSYlRSS9u=z@__ggg6l@TTTۨcjjs[&##UЧ&#*""@vmhѴ)@<|K``EDER)tٳJضnJթU=Y+3226mmڴ!22I\v`bbBfIHHPUTa…FDD~~~,ZR”JQ' "O1.>n"!=̮=زm-Y}ɒ=BqwwO׌?kرcP^=aٱcpqqQjUfڵTX5k~&khhЪU+HII֭[5 333/ٳgy'JDAD$H$L-[4c6^r"GBB-x_M=$BKjj* .uű}v6l OOOgܹjՊ3e˗/[n cd:u.]`iiq*Uw^8rnnnlddDÆb3G<#,_9E>?W4l$jjjܾY8|yYGҥKݻ7~~~Jqss#99]v 5ә6m͛7'44O*e*T@fسgOIxxxp}nݺEǎŠ<|խá;111aa^A eNHOO*#~M[cH,aPEDDEtqLL +իW'$$3gΰf^iӦm6ԩ#l{i*Vȉ' \˜:u*ݛ| ˗/ٺu+ˋJoD"A&+a|od21S'-df/^iӦjjj<~+Wbcc#Q:bD}KK1FpE}(aǟS'1fHΞU>JtL VJ͓D iii$%%JbbeP ͧbSL'LjZ=$ohtt)ȳeeʔ֭[,X3gL&c֭8qsrY֯_oF||<̟? *j*MFPP̞=gϲm6ܹìYػwo`R);vd̙)S&_(׉.\t){$2LvYV@HH551IӑQ=C‚Z*U*Udبx]|fÆ l2cbҤI\p!W0|p&N(${|O(y|ԩSs UV,[bfld(רǏL=Fge/vmRC#^>05Yݵmׯ_gȑx{{+wqqaҤI۷/[3aaa ~Pyׯ_3sL6l@FFFΥhѢ^VZewT*Ob%DGElm2sڔ2OrMs'P|}=]׫Yd֭? aD"K.nݚ?Ǐ 444(QO>IRŋaaa̘15kM;wf՘%*<FY|93f >>^? ˗/WRWZɉիWsϟc+VkҬY""_Qy&)BWWnݺq=n޼)̗H$͛W]]:w̚5k0E<|\zq)033qƜ:u(a6)))INN8lܸƍ) """"""]pYƎ|kkkzj%H۳n:r!"="*""""""" 2{2a$iȮ===/^̀ DD  """"""ݑ8q"ٖF0004lذOGD -|i9uJȕ"..Ν;aÆ<{,DDWDADDDDDD!--KRB:$766t7T^=O9""+bؒwŋ2d>I$ڷoOHHHP&D ,¢OWD@o`ƌ֭[W\-[|rbccJ"000DzRgbb_PWW/aK"""""""$,]r))FFF̞=GGGfΜ)(ԬYAqӣM6 DQQQ 6 *p邾 ""_ """"""qey-Z0j(eeʔؘ7nʕ+޽{)____.]Ν;IKK6 4ÇcjjUx|i_QyƘ (Q8ڴ*衈|GLs?еSG]3tP߯4sά\:uǏee˖eٲe7[n [l122"̚5kXrR!ׯGXbzW$?6c6͚6EDDDD.֯ddd2ob7iϵ*בdlٲggg%Ɔ{nN:E͚5իWӻwo%a9r)L:ׯ_a\\\>MBB˖-ޞ}oYE RSDADDDDDD렭קs/̶4hЀ޽{ d5l:t(?iӦ 8giӆ-[&6mb̙LѦo߾ܼy0f̘ViJOOӓӴiS;FF *"""""""_ Nf=Ϊde*UŋJ*ٳgY|9vZayٲetEdnnιsݻW;''''̙Û7oعs'M4`hL&Ӵj KKKƍNj/7!"wDADDDDDD@x-aÆ[hΦΘ1c>ѣTRk׮ уk׮b &MͫW4iӲeK8 i)R~} rn߾Mffmbbb8~8Ǐ'333lllpttD& \z==>UQy)${΋/ &66^&"޾$bX<#1{̋B?Cniٲ%[l!&&777JKÇgɒ%)S ƍcܸq$$$Ç9t_q ͛J߿>=zh>EQy) 233{'OsM#"ĶhQ.!GbbIIImQj֨NMT@D;'55ݻv !H*..xxk2tP 'NdܺuVZ*,[|9 *SdhԨ5bٲeq 99z&Tc@#tKCTDD2i>zHMMŽ^]S.d?[:LMק͚6FMM'"R(IOO55شɓGO"%%!Caa"Em64i™3gر#qqq@V֭[ܹsAƆ~ѯ_?a^PPСC<uܹs=p85 "Ü ^ԪY]VE 'ڸTK._aO6m1Sr?wqY OݝGOݝW ֨Q}QhQ6m?,˦:t**}mlll6lÆ S/ӒJ,]CʃHR%66THNN&5-,KFBBBbuutPWWG[K M-M DGG=== 144f/E򗔔,^ʙs4u9rFk 8@B(IO/~+o߼aߞ]ٓuܹ֡s7n(Q'N\r}:6͛7W-"|ʃL&]x8AA{GxxaaDDDFLL 1"H024MMMFSCCC +P-PINI!5%xRSRHJN!>!bⲔ#C055ŢHaaa)VVVF[[o^f诣PWWgŒI+jjjkӊ\=!SYTo2~4,^1 zHHNN.Ta>9 Ϟ=@VOBo߾M||Һ 4(N 0%K@9s挰_~aŊbq ,,ӧOK (Y$5j ?z6:::,[9ʩS#&& WN:u> ɓ=)RD9 P@4>{9bffFj.ԭF5W:S;pXxG1lhjլNz?ضm;v`Æ ,YITV3rH8ӧqvv.衉|!C`ZMMMhٲ%>>>1c0{lSZs JTJΝYx1 8P:::ٓǫ4P2p@-R k֬6۲u1cƌlej544С/L0duiѢ+V@MM/2~x!iT%%%1l011cd[رc̟?_q:u*usu r͛73g0b֛;w.gŊ'Oqy.sȨhCvm9} Fbâ"s. QԦ()̸SU@*\͘#B俁)Q+ŋҽ{w̙Mghjjbnn.L|pL_dUyJNBB0-,%%EL$y))SFWZZ0 Tד BU"uuZLy%111/^\6PWWg„ ^ׯݻwSNeʔ)_m<"rJf̘rYff&GRRׯ˗lٲEiٳg rn߾MΝ9u=uiiiDEE .\`Hzz:aaaIKKc׮]z}eSd_M5gϞڵZj}t}[[[8;;Pڵk 0 x<<<8p@ _嘙<}g&<cd K~x n" 9.U)^?ҤqC1&Б,?"R٭ 3.{|+zp]?Ι3gU ?Fjj*!!!5 8+[Зqln߾-2 ڴi9Ҽ$=$iiiܼy}_ѡC<Ȅ u޽{czLAQSScÆ A_-Z$m۶U R ϱCZ044qƼ~+WYm燗Do۶ kkkZlo۶#GCBB±fΜ)%KŅ L&SM& T\}*%+bee%5l# "00=/^"&&lڶk>n\T"0GY/PhQ?^Ч ooo~7aZ*bee SNXYZpZlEӧ9pǏ۷/MMMLMM8q"/ׯ__P4nܘ%K z*'>>^ȟU(|IIIJD@sR)+V$ ܻwO8FZسgHʕ"%%LFppy+k.r!UӧPt()))lуơÛ7oYbW@|H$,HV,55/^s"ܼ}2pt~1;N?iT޽{WhsTx͋/ɓ:uJ,! '?[yxlܸQ`UaeeEʕ)S Kssso믘(<ǸT_ }6@sssUȽRMiӦlrʍjjjbeeE~MVZd Bq<' )B͹r zwQH>} dU*^8MR)jjjPz쬲bPnQ|( JyNz _~}%755'OՅ~y(Vq{_ѹqㆠTPA)2/ymmmRRRX]kmmmWιsnjlYvmB"y3!u+@lǚgϸ{Ϗ7|s@TLXx8!aa 2зX3 Tw{ٳABBB4K*^P*֭[tܙgϞqavǓIOO/PfBRRRLN<_ũSTh֬-[~h'^]]]ݿjK3AAA /XU.\P\8(UժUcӦM<{P"_Ŋ5 J9 T*PPLXwrr}={L(t+*-.(QB8p ;v&C0$xΝ; ; 7n\QAX"eʔA__xݻGBBG@V9٘_璖&(`֊ƎY1+.σ>̙1CCV^K?SGYshݾ3rPft#}Wth׆cc'F ^: q,Wn]:hΟ>Όi+^sCVII`jl=ylD۔u(iTTÆ ޞrҥKU&vGGG=:u$,R ;wȑ#yOժUiԨ8pϜ9 q߷n?Yqb(dŜ:::*_~M޽)UJnݺB2_vprrR[z5?pu2`=Q@QyQjjjA1Ҙ6m0]J >+^gE kmbbZOV߸9Yl1-[4# y۷ڶ>?z g)‘{Tʖu`t#G!==p!&Cz?Tc'׮ޝԭU)&bh UWz闫ku^ʂ == } `O۷"EиqcҘ;w.-RZ766ׯg%(%tA6&&mmmbBu^W/ybbb… JTJe˗edddULaJ/8鈈:uę3gB ~ZVVVXYY "IHHرctؑ.KٗYi޼9=aT*yݻ7o0|jժ҃ݻ (Yk+W555n߾-(eK9)_KUF%XQy(WUVϭfթS'o蜛޾}[\p\zɨQr-8|(L?h۱cGhӦ ׮][(=7c> 8m Ӊ̞=-Z(%SCo\QHIIa4i?ζm8v옠w-[ҠtCKGI&8;; D"aԨQ)nE!6>Iy c̟=rebg1y7151}6,_=oJJ$'%C33i׹LR;wׯShQΜ9͛ٲe RUV) ׮]###Cx]zU>׭[ÇYp!Rslؓ\v [<\a\2v>99Ye'M RRRؿYr%o߾VZ?јgw?D@oU1?ҧ>WaSۏ?o<[0tJLL~111B,X4bcc_tttF&)O4Ax ?#xzzQyw.X ܨR ?nպukݻ72Le^ݻw\|#G1T~=WXA\\Ν#((Ν;+ÇXXXPvmn߾7e͛7xyyQfM9<۶m㧟~R/b1n8h J5kۼ~Kc#CVXqyH,X >k _>UV___x˗1^Y]ʊ9s ϟ?'$$D[.Z(fݺu,^x\/ݻ?iSN>&dʕJu% 'OYf7ݻ7ϟDYoE,"""ۻ%xj֬\"/U6tT|?۷oO1cٳٰaҺ͚5v̟? 6 HLL?r iiժxyy~!OQ1HMM.+#>>^eMkkkJ.۷G]]V:S̉R$?vUg)9--Sgp=sf 2j0ZhFJJ +WV,%)۲̤82 >qk#HpUZ5{Ϗ͛ܕ=;xAxr #DDDFZu<rU/Ϧ<ԬYeC?~cdU|pbmJ*1tP%DPQyU  P9ԫWs޽l!&^"44}}}%kЧ"C[[G~P<ȭeg:h%RI Kyؾ};XZZ"ɔG~%$$P|>>>oo5uuu DVڵ+/_ʕ+<}2ey?7hЀ^z;n޼IVWzuN:رcٱcܹc~k+h7n$99jժƆ 8pһLPr:WNQPؘ `ffˁ\;U O9LJ)Z(,\ҪR?~.'==ׯӠAl˚6mʬY6mk֬!..;v/|IV(+dUknkؠϢ"KMFe`eYCarTXu9z ,]n>v ###J,IMgP7 kkkS|y\\\x!>>>T fɒ%$''se6nH-8x D6\]]ۛ\'ժUrhhhKrrRBzJJ ݻwر# ovUHO>gbʕJ^7np%:T*rH _uA(IDDDEw*JUZ>{ 3K??ooo%ANb6l<| *UD׮]믿رcFR)<3k,GJJRn g䄅7{CѥKc<u5~lPL4)tP!%%H,,,Tۯ_?fϞMFF}j1[| >>69*9]LƺuׯgB3Y]0F^O\f1G7u ; pbOm#|xu7j– jVTGr.zRy."eYMyrB"кe jըP!Ԅ[[[[(mihhRTkii Hਘ$$GqhhhݻwYx1阘0k,ˠ#HLL̷k!WEE^A5РjժlݺyǔE]]]rFƍaϞ=Z P---۷TRԬY9vڵSZ'33ʕ[%jbv-Fϭ%UHu,kLfr"Vku JgikQH za^d&sb I~>yBIܟIvwnyMBzz/000 Y_022b…L8Q%aaJJAsv*Ʒ浫eÇ7oBGyd5ڼy`VR aީS'֬YѣGru$7֭QX4hgϞӧَ){ꘘ`bbBũ]6={ss!o>GR駟ݻR-R<4jԈ3f ɘ;wP8%%ɓ' /L*Rre._̃011dɒ*sΝ;+5rqqӂwѡCtuud߾}uPΎڵkse֭[GIJJb[m8s,M(sss6l(7nS˫+V,ԫWO#._,()))ܿG*-_\09887oiӦJ/ 9&&&C->>>BGYqaaZ~+>ʖ-Mjܸ<}4}___='ɘ7ons]29ʃ͛6c…-R=#l܍DCS)6nzTno1l!R}c-E$ݻN7Dץ!#`B?K0tw#ْJv^ٳg<~ի %gA,ѣG8::ܿQn<ȓJϿ*y%&9u떰 +|͚5رCEaggT*ݻw￳f-[I60W`hkkӲeKZlIRRbڵJ/ʀ&NȔ)ShҤ ;vEyJС)S۳~^~MÆ 177'&&F+%GFFsNc0eڵkGÆ s}U Ն\\\rT ڮ>}ZX/22)S0n8+FjjPAY9*\|=;k֬a͚5QH)bȐ!;wLƃhР[F]]/_ryO.:T!Hڵ+sb`nnN׮]ٲe LJҥKs +/aXXƆڵkSLlDBll, Ғڵk@RR6mmڴ{1sLҸr` Ojk(BO$UjՊ)S{nذA0Z#G QO<իW~n޼Iƍ" s222ѣzƆg*5K'vߗK.60PX['#* :P'jB^Hu]AVG]]3`r(:sy'WgK":tHhݻGE}kAiҤIuy_k\oܸp|y E"'cQƍqppŅׯ 1LffPSi׮:t(Ol'##GSAEmNЭ[7Ο?Ç1bR]FF'N-ZWWWL¥K>KԩSԬY3ϥtӾ}{Ahkkkl٢ieƌBY*U鸊99;xBwpuu!U@z{{cii|R-[VH>t_5kLYz5tDD="33S("/5j`̘1tPPk֬aŊ;vLOÇ޽ _xQI?~P}/55OOO&L8ȋ OPP{a֬YBHdCL儆~̙ҥK!@gŊ]Vi ŅX|Uksss9hjjұcGaZިD×N8R^DsqqqX &()N:^s,aά?8= ({ 49p47cآ''#:KQ30F!uIDATIyz2 4s ͚4SwJggbbbymQOOttt6iڵ+-ۛcRn]lBll,kצ\rBaCCCF-ZPj#Hԩs1ܹsٚAVɴi޽;тرcJnu% ?+++ m_ƍc$''m6 mڴ͛9wwwڶmK֭Yf U2J*µy M0OOOcM>ӠA=zgȑTRE]dI>|HF000͛Z##Cmd͉W^~;w^W[9s&=\YZ}E/D '7!4$mñ$%%MMM*9oO+ PH$J\O4O~sxrȓM>c'""QÓ䤤BD"QǃHJJggV'((pRSSWHbb"ܮl9NNNH"Bd`D{.aaaJƓ{SSS\ry5k֩ycǙ`5ߍ6!)OUkWJ09vn޼ xΝ#22;;;4i">>ue˖JϻwKff&ԪU Tʕ+W%:88PtiΜ9CrrXN:Ejj*5jȳ61)pr[nInӜ>}k׮*HnԤs "*"_?#ӂF!"e2dy 333D" JyiiiqY>&*"_/<;xD©10(eo_Ƙt"(-{կ)Op1jaTM"j"t]a脠<+[´\y {Iu=( QaC3if.XE~QC)d2:wM4nw(]gAg_III<|'OuaaaɻwMMM=z4a89/J:uy\#ߔw_GE<|Lݎڢشux_NVAFTrHu&E!>A= W >X`jw.E\ ĩھRH$ ЏR /\P%:&xY%2˗/{ٳ σM)iWL:-sVLL1d]Z^WմL&S{z:33SBPIOO'--M/L;ꈊTR7+Ӧe Xh)kդj}n,|K!( iIB+ MmykS%> uheźɕHzz:IrE سɀ} z8"7SV \V}Ad2-Z`ժU."%ܜhikcgg59*%55U(̻w %--M»wxRRia@"FFF顥hkkcdd$RRSScΜ9B蒙׮]TR/|rĈQvrX/>W5 xF; F@IAK.{4lKx,cH3_$ˊV32C.˻Fwn7k{buۡfjA, N}iTi3ᗟ OoҽW*U@u{!|9w~~ھw_>|xACDD%IIIFXXX. #akkKb(VŋY^͛T*e˖-t޽OS?o0Цe tut8x8GM}JX\JZ3$Na~6w,L_Gu´%+ ,|֫݊d;a3N$j"&o%Dn/,vQ˟ d#<jY%k5GcQ?!ڴxZC4fhFLE%w<"wִmit#Q2G|jwD>JF ĩ2k!!9=~Ÿ30QTHGi/=j)LLL᳙ɄyRc`ffFٲe)]4KTRg0HAoK([ٸy+]z£{Ww|-$jj^9f`{G-nZԲw{)ڎ)װ)MI #e*0/\ق7O/L; zH"_n`2FV<""M{055ͦ laa:AAA<|ٳg.ڹEEprr|{{l""N)u}P!d>"!(JBݔZsjguywީlGvkmS{ZaDC=`$@Ф yJ+G~_{u,?}og">߷ 0 y!6֯]͂yOц~v*RRTl{y'E,eO,KɓܾB %ilfJSsVQh4RSSQ(_z}ww7TVVRZZj UDRa01 T^<|yM6;f3L)(`ْEMb>}4p=.RV\Ob,?l޲s/Lʝ간~ )pୃT*JDՄ:,A[jmGRR ?|ݎj=H:22QшdBV63(CV66`o[̝=әchk|45̞$;oSiɸz?W^)y [SĈzC2;,^8_ #hjjbPQQbj3|$&Mĉ%77 6~2CYBE(.ZF;>;~ mno3uJPߟ!ں:,>{kLZE (ٽ D;|;ϲt"=ߎ׿} 9&d32- Mj*qqDEBz_O3>q?ƳrX0o+]vaXX,>]h$//U+:lvN9ˑغ}:|Sd (GaSW~E*䛌_|c؄7nPaDU\(76/!jԌ3 cV4pC4P! 3ޑۻ&&&N99 CRXWX)i@ @u.^RYu+H$1dOяmN]jC#5WՆFF8!S^&S^X+/;>\(|M=wJA>1]K=?~lz}،tFN{hJAn67cƧMM CKK+FN> C}x1w^~S[WωSxaVkG+ajAII%pigl6c6b #r]]P" q(,DT ~N'WivOh7l$$$ըIV*INNFHBJFP\&C.;IF ӉtpBAss ihZtMFF:i:S)56m$[pa  r);y}{Jے aȏCry1M{--yKk+VZp>rՅL&r≎B#%..DEE!}S@w7ϝ|>^Ϗrс\~'qd(z$F )*tZB[6Wp:ai5l鋡Ca'$9Y|>?.N'~s|~?^C Ѝ}DȻ>DDMB%>>>ԷF̚ɜY3)y}/uϭ uH0٧jӂ aLYy@WCȨ(fNC%P_m|PS%tEXtdate:create2021-09-07T15:56:36+03:00;t%tEXtdate:modify2021-09-07T15:56:36+03:00JqtEXtSoftwarewww.inkscape.org<IENDB`buildbot-3.4.0/master/docs/_images/multimaster.svg000066400000000000000000001422561413250514000222450ustar00rootroot00000000000000 buildbot-3.4.0/master/docs/_images/multimaster_src.svg000066400000000000000000011270341413250514000231120ustar00rootroot00000000000000 image/svg+xmlProcessAny processing function.Auxiliary OperationOffline operation. Users crossbar.io Database Build masters UI masters Workers Load Balancer Load Balancer buildbot-3.4.0/master/docs/_images/overview.png000066400000000000000000002022441413250514000215240ustar00rootroot00000000000000PNG  IHDRa<gAMA a cHRMz&u0`:pQ<bKGD pHYs+tIME QIDATxwxSU$MgJ)m){/٠EDsoE(){ U({tI?6tPPysϺ7w~K))=vj ^_c} =2sǎ*kr@ xd),,̹h kO{u9AQ(Gy00rK/A IIU@q9^`#;+M$66:uk 5*j{nXM^@ T|r9r"x011 _*2@ @pB @ 1B@ wɽ(99+XZYBޒ@ @ xgkWOUT <'Ǝ@ C|#GcRѡC{RDzU|ΝiHIM#&:.YJ\\,vm@ A h #406oW3Ʀ&!jExgZͼf˖ۻԸ@ ݧ=?‚B9|([͵9=ڞu#+++^ye=Fzz:Ǐ+WsǮ ۹3h@VXENNtNPR^ڴmy###ٺe;R]tI&֏gmDEEzhѢ9-Z@.m6tЁ"".Vy6mJvm㜜6nH222qqqQ`́i4 He7o%1!otը^gh4jZv@ j %* T@Fxb?^+KKK IMM7:gcʪys| )퍽}z"-I)Fe*%7$U璖Vfff8;;CFFFuTht̴9鈺M|BZ_-QDAzuIOO'!DDzf 5͛P} =V^/6nbQ<=aQ'Ng_RPP`Tv:?cͩS[4hKHIvYdrIp} 999gϱ{6mW34RF]Ί*cL{{*JWaeeE03+Mx& -F.ӯ} @ xiۮ -Fr5;]aioE] ?ˇX|}mڶ.wne,] F#rz^1HHL^*ׇ;Cad2?O?\uغe+Sa=3uڛ t:vߟ12kf7\eTϯ!Lۻ"o, ]Es6U2rF/)Zi| ) Ybr#ӭ{z=vb,_www @QQڶk'GsE6oJ.iݦ fʒ66֒`%|D:F㸸pb$˵/zrS&V ?æ9x ueScջ /";;Cӽ{7[бnnnS@$}HC鉛λѱ AM(hv:W\O?/?iR.`4 nO}9v۷ WA&Gu\rx~;3 5KQףt`ieQ* ?2ˍ,)ŒKHLLd[S> =-wwWZiCVVgϜkս`dѝc0#rvv.w̌ߴFR&A9AA),,d=O?%mРdY%ҳg& .G^&==\[ޔTu~~ 8h66Bq͂,ђ`ѷ_rJ+W"W_|U^:9s'NѲe9̓Ąa޽{j,fffׇ+VuVIC 6FSun.EEE+@p)kq?ZjBT|==fT}X[[ŗ3oõX|$XİbJ^zE?6HjI><̱ǍKdT*:ٿf歬X#ѱc{:v,]>>z,bM@ z=a 4rY8WKptRa mΝ=GQQQؖZhU׭Ir9*ka"-)))ݠV|,&6XP0A< ivQCj 23ps7 $&Ý]\*mňR]OOOꕫ|ٗL|SJe2.NC.\.͚/VZ?-5W_dT‹3dcjZcǟziTlⓝmdggWXv^Yߊ|'$''/ߪx* r%G-N#77:͛-*"< @.3l0>GąO ~l}c@𑙙IffQٍ7if=fT7_U[XX[hܸYْή␭vd2z=Y\oyfFk[ʕL}sQ3N4RRN+ AAر=fu;zqO=En]jէVGWڤɜ:y+5255y`}F I^%Bwӗx{coxR6@||"_|=zͩo(|qnLO~U*/R9'o0h-1T׿_ "uʸ s-_ʕUINN&99٨ƵZ旟~A3gIMMEP0rl) ˊ󚙘T*),,$??10:vssc듸]rsspQYVVvzoN1Nhmmʹ6Vt,^ƪOd7߲aF=rYj>iٲ!!'Xd)}anڟfۀdž>_C|=Ze J WJ"<`ies?ːm=r];wѼE0=۹ZmZtry9[[[ Z͊P*DұcG瑚JI@ &GO>cΝ3Z*1fp3v 7b[cGGG^y%igaYgSbB,+uuuE.7$S(ou:]9 ƹ9::󯰶fLxfHOOg-0Q pW%Z2w%B @P;d(`D_m˗WZ{{;7baa_~_~/?Ѯ}馪^ 7k_4 nHˎ&&宫"E߾}زek}:;;3yw$'pd'/wXʶdһ:V`sj-҉ U\.D**uԑ4GΑG s Çrr%66(XXXB ZD.@dR;vpT nL}6]MLL_ߐ|L 8w`R7]+bxꪕͽgkrvEMBܶkNr+sggg̐nFF6=ŋ8M5N-ʝߴi :{{{WxO,aow갲d2)2-ʅ<#8}Dpjߡ=nngeVj rB! @ twwbʻ:VF}*̩w~4 4x@V-W.l޴+wwWڶmNcŊU7oR| vP( b3Gq%#tL~ӨMPPc91r8Qףصk7zJ?\6oRB˖ѳ96бƎg괩Fe 777{> BBNr輳}A+Jӷ7.&==[UM ^һOo/^JzZ:+Ƒ֯PΏgӲe[ى=8t>IHHe{}iڬMޣ;/$=--2X8._0*kݦ5O6( /rJ&>;v~quuE+P4 ~~ iqg:w|.8u*|"/ 'gG#'ԫWWܪuK222q.θXrޤSNl߶XrۇmZoߡ=99jBCHOGG:tAEJ*7W`aa!թ,n֭o"#/3:lHN.t 'Gr333~iBOP,M4: @  J%C ៿a3(@ZjiiFm+Z?6++Kx " 2qO=Y~J%b X? ˑ$k>|(_~1mw0j(8r+Wp^֭xcڄ6lܬk6r } S(@ ÅE E h0QVkB^^^U֓_ќKO33ji7*뻢9P͵y[ @ <B @ 1B@ wL^^/ A͡@ $Xff\)II=@ @^vVmOA𐑣V+Q@ rfj~SBG^o=33L23 u:evT*r KKKlmm @@IHL$"#/szqƒV>MLLpuq wիK kgiGK@# @ xH)((ٳ tN!3f#6.NI 5!iSVV BD8CG8rJ섋#ΎبVYam3S4U)_!?/sr&95TRΖO`+++ڶnEҩ#NNB!BT;?sB:7W|}hPu<[ U%;;1DEru.\$%%Zͮ={ٵg/rӫzN@# @ xt<|Urrvv4oDp7Į'뻁LFDz ϑNS8yo;1|ci BQ۷W ,BVߚu,^Ds.Noӊv[R^F`ooKvmԮ :Ws ?AJZvcn<;f4C⾹@ !X@Zz:.XĪkBZӡm+wDk{7E.ӰA}6ϸGpevϑ'(,7?2j0ƍ}[!`,>&3+,bF؞ӃN`an^Ӽ-d2|i˄'glڶĤd,_cFcPTd2'C S`!!EEE,Y?MvvT}h4Z\[YZЯWSߴKcUDF *|0>E@pqaGGEIe }31*r-[жe Nau\vLf)Ӫe rC%\ N 'Nr1b(*䄇۵\lZؾ]m^mfϜG5['--of};2wWz|-5s7 "icdU$y2/ɯacc- @ OB׳ظyK/[κߖϹSPPHEytt 3gٺq @ zmw2 A# onL&]떴 nʆ-;Xa3۰it\^SGZ,? ffhxcݳ#"urrw=^N133) Z~aRYfM8a,=RɰԾ 3HMMcп_}-B jZ,:*}5n,'$&(,,,X\rssș`km ci۲EmOɑwߚG{2Թl޲s/0Ok( @ %jE-2蜛{OҊaCK;v!!1kkk X8Ga&rj0|cXT'Y51n\ʡ#]6h@~AltspwwSmJj{=*FX|% ߇tl۾' koLfV&.蚎=}HJNA$Y3닙TW0zpbbYv883x@jlشϿ :wlGF~:](&<"ߚJB{G YaYٴoۊ}WWe11Fz<=4`J~/ٶ}i 7c?fr=!wK(5m"rƷlڲըݎ{Xj5?}?GGG X\ZfUJm6oV_@ EEE̘9K2}R(<5f$zw[ޞ Yf| "/_^D>{Iz</ƚuؽg/_~=~\aCaog@rr })y;kd.6`̨xyz/M ;MRr2`x8t; |GLL, Ç[S$-HHL*7Gpsu// Ya# }}yɼDڷkK.MHB'O=m5%׮GTn%8;91?I/(`=5 joM ML>=Pq2F<6ޞAxrޚ.jNWSG,/\,iJXv=jڨёedsu>s^ym ))w]ZdVUbB/Of^r Ə{oN`vA 173_&̯h Uw{-9C*OH{,ObR{7|祸WGPa#i߁k$'B 5*X$%+t+]~?|R3viR >H,Yֵؾc{ӓV- Ѫ.E^4)EZ-kod9v\+''zvnd(+Y _bRz"hڴ vvlLF.psrm۶i-SKyGW Tx& "/кE0oa4ofF L".>NWcq@ TL  ˕Y[[Qr9{vgޟŧJ 戋8Vjee@TP(VH\ξ>14!e||R {{Ą/>[["/_~'&**vh4E=Ƶs>dnn4 V '\vg{8^{T^n:ËFTt4z^@pQ϶^ȹ$gsssy{jrRtڅLG1*2x _Haa!km =#33S#)^i[N$(~&֬@LL,I| 6(rZ]9sl鳥!::^5 Зz /M$.>^gD ..,OBˣIP ePTT$9p#}w0$+$s33ӹx`R|(kWlڲ[ЫgO)+@)wU1=r8oN!'NqJr{'eNubȉSR |h^戏OW&IBŘC>x@mOA&Q*LXi+IILs~ [$KdlEv:Fx%ݺd t:CNr<$ W_~ٿIVVEEEظ8f>G:dRٲ5i N!?壴L{s2MZfF眜7vMM_ThX#>^eۑÇr1_9::2m+PPPK&dd{ǸG_XȎ{|*y2LF :,Iyn^yywأFjEسo/\$5- zhS Uό/?eݺ $%ΨnNGvvyyyiЬobIM5]ب ߯wg,_ /ޖfM2_IW$$װ!_"..ӫ': 4 `bb¬obmT,--h4ѣF)&55FF~~%K Z>g޹UzdL|q9z$'O3w111@pѽ)6̍ 9BN(//ھ="Xx]۷cy*oLp+m݊ *ݝ7^&V_|s N 葕W0xP_e{Ҥ9sssJ%1D^JEf(r4"NRPPHݺ^ԫ[¾INN-ģorVNN`C_t@ !qq't:,yKl:E>b&?,[ߓ|ELJ"".af2__r+VFVTHũ0^|uQ*M^_ȘQ%\/h dŌoi׶ Xpw!1j|deg#xix>9s;Co[Fp!?Jrqݺ, xծxyyʟ3;VE5DJHL"!!̬,L@p*4Nԩ)зA11(vvl߮ .ժ۹c{1^g~=(k4Elڲ hPۗ$!NIǶ*ض$%_H^A! 9* -io w7zvĶ{9|ai<\. NLGm_O VVVpuV]Ǩ1׬@tt vvgdTGDEvKll>;;[ΝR!ˀ~}| N6bMXZXܳP*Mغm;EZ^l b0_ٳHIM%;;[[[tx~Paii~kߋbYp.M~j1#vb԰Nwe ;;[zvm8rgnT@P j4A^p}VM[_ӯUZ_Юm8|EK>=rR*||y}ˌ5BNLL,,-fͺ եw=X(*LMMڥ=wř} 7oAS^]TRjۦt֕Q#1h`J%z^jApbnnF>Xnޜ!ѷWK7WDD:[۷ƱÉu\*\O7gH~Ft"i@ Tw"VZN02M*K}znՂ!'Yq35xqdTbaaw̔=Iw'ˇ6S_S4զ-ۥ_mZkaOP/?V+rSNڥј& km 6.V'r!yRٹ(#yȇ$1)us); ٶ`ooKvms{'::oz‘[ nBTʕt:RRR+mxI(&hZSrsscUKtekodcˇ kh$TD^6& P3qիSp%BÕ9t%.>08; =q3ٱ V~'!)ȳt z=rzx:ѣ[aŞ'噧A 4lq⮍;o/d2Yd ZVh-&Ԩ¯aGETVxxWλ^]F΢%%gIX}NAHHL:ޮ.VE‚Zm`_dj<7~ **9B!b?"(J>{w*v8=زs/))*HQQ.NJM99XYZbffjwVv6:b#Ac4og;џɩwWj˄,=p't/g&VԭI#?]`˶ ( *K/<Ǚs/i733e[S(~ٹ{/ t ff4՗_(w޾z/\;[kHMKV}iKjj*oM|| pwgټe;w>TI]8p$xhX-L(ݭ3=tҕ[aұ_ 棷'=PlݾuaWε6!ʂ꽹 H#!1 wǻ2^Ϯ9w!LO>@ TA nn,w.'O"//@\]\̌W^|O_i')9ƍ=gg'g\N||Bh\^lD(**TiNTa74{N6{|S_fv={U! ¯/je+V1 ࡣtbka=jT^=Shoe>0xbXXXd؝KLJ?rdmL^~>E+׊2t:NeC"z=]B2_ Wokccbk{%w۵g㦤e~g,A&1x@;kRh]QfOգCCPFÞEY U*orsûi߁=vM hӪe5G>W_*'ThZlCNh nZ͞}Yb={A2x NUR\./?c ?p̨_ߛݺұC֤wdgg]33S~;._c'ՅF<6?`XϜ% /!**{{{À~}[.EEҩ |qsЀh462DR*MY,].RY1?ZF&SXX@VVw߅e/+KW ߛ8NoӢNlڱǨ;}waeF& >9btn>\!9-qcN1zӻ[g.Z^k6m#gjT4gW0-w5*X9{iF lU$$%STt?l$#a:f@EpbQym011aCF {ӶU0 ]Oʲ6:A9KˏPq>޴iьE+,-;rV'3;1~ Wʻvh\.g cԧU,^nנ>;v͕z7D z=kbڌO}Pﶀ;;[222ٵg/C$L*hݪg}aOeк''p%8&m)>.^oеs' c߁ O;?Y|%?)%LU^4W$(1FԊ`80aX;uhGV)&>Tٯ_|󖖖<;a:Ѹ[MB0pavFھIŹ{eFr)2^OݺurDcbƯM7 Rlmml7WVvN9h405-5W$-Օ %>(xQ7KKK 99ףT*1777rظ8] WgARGdyG5 _i0b̓l߹ۨBEsCظ8RRRɑ"ivd.^VBDMkCgA! 0 135e""9*wsDŽ^dq+( .^2hVT*+TŚd>|]yfܽ-[qQ?*iXM̾G)(,ZHI ;7~)ŹY,-]!KZ4 {' 8q+ܽ JT* J}VKo4 JKϩ< +TNgFeu˦P`0m FՊGr<4`L~~>3EQ۴jh4EaaaAϞϹt:<`X_vB6tY|%"//\'ǎ&΋NBB"&++[҄Cq[ +Wٗ_`x}[L]MX\M[?UO_b'IV,]Է%**V֓^tIEq=*ZvG33S,+O(y~4kSٱw?V<3vdcP`iaA\B'B9N9CXd s3&&RpR Lrrl&!! ca֦GL99y]a_O^2:^gҝ\J4*ѭmx43^~zZ9N+e eN!]68CvahhVr*4Lj?ҦG6įZL4 ''G[\WG|BB$y̜9 |J׮GI\|=]pw*33SthzQ0(J<ݥ 33SasOߴ >9NNW~ׇOy¶/?Tm_éҗo;戋Oe")&zy:agcEFV9~KwߴI͘{i4^OinD\eCFzѡ^_\~];wgc  v;:8{yyF||1$% ݺݺv"^NTaEf;-55MkȗV9%5Vymڷ} 9oťHt:3];win"֬!7'"HUAXa =!)W^  D.!?߻EI҄ʑIKʜ \Pn[&X[d2iŲጓ3`oB[@8)=kh?)9}03€/6֖h:2;kڪxA4Ao) t\~_ 5ͨؽgZN*k׶R,[`݆M$'bH8Y[24 /4+_!<3LCՁJ-e2m[ULyk:'N͵Z0طߐe`r5'a"v_~ez:S~9,KD n$ֺ{$KOhHDu7'z+l@[\܅("3*}*,u/$/.o;g'GS, I7ן~5kΡIPc=BmV۰h4 ۵Ū8+~ }xI ݶu+A۠yr옛e$խ[* {{sdhqMl)kLl$T>)'zzI0!+ 99 u[''GM}IKp1޳O@p?p1BUInҌ1d` _v&& ~s.L*k}Ծqm_M07e (2ֽnOSrEEE( إs'".^Bjhݪ%s2h>m6VVVh4E޳VZTB6&&gg'=Ǝ;CY7Wʦ+r:۶V3gXq`ȣUg9ѣ!lNt3$>>LX4yoIr~r" TV899̹ ;?<[Hhiu팅bsZy:'$$rE)5%xP Z!33TS`{ q`]ƎNrRY9y+( 3<=mI n070bHG\ J<9j051ɖ:tjE/<1tb2wwu4:_]J"#S:7B i03{[k5O`=*K=/OBNKa^5~ 9w>BNօ? @Dy%6}>r}2IMKC葼K|)DJRT^gJўn义gO:M3f QxU\]ILJ# bȦQQsR^^^;Fg I#uLΝ ??W_/#""٥Z2|78::^EPо]Zk)v:~Uh'wsejW{ lw݀->]lnGcFR}p+ IHL@j  UЦu+Νi&խ[zuv=֭W|{gvMQQId2ӢETYӦ>}WJ#Sd2i\IT\6 "+;ls^^Lxjzvu8s<`0Ą/?/fʕkrڷmOW&G+SƍSRd+KKK}B&<=3gx)sssƏK=KddFstssӓX/k& }k'aB5 ͍\NϵQ|L~ڞgja VPsiZ{qKYihP:y 3?G489h4XZZ;giiNgNյ\>=ӻgwHO ;;GG 3s߈3 0XdfBºH e133_~$>!z,/j5&&Jljiʥ D{?=s&))uR-V3?GvNT8;;jZ,8*Ĥ$~sr8{EEE" CØKXZ5E^! ")M1oi;TPK`!(٩4f||tIN7/{b';5l?a5TG ҺteXZZV gg' [U(T\.72qJ0̥zZsk(Aamr_]]\WgΞE41239qxRP6 KN*%azuiִ i鄄$!1tLMM lD"u:/\ g3ݻv1KKKcXҹ36mʕt(%Vk^&")1ڌdI^!rb;pBmOOp$'`aa ‚;\Ng }FP ܿf~^!/l? ]mN Ը`qu6mJvN6m[{;;]:uV/EF2W&Тy0?BNTt |}2fMp*4קL3/sS>җe?%!&6o33Syؾco5JF#C?w a/a;yXU=;%*8j%(ddd`ޮA.aeeEvvgΞe2yݙF޹"%u?F)(lP܀6;\;VPVT]5"gZ0\[:X%kbLܱ0P:>XḻBw 2S3cټ4χteB*7j,Μ=ˤoIo̓ODK/Tfæ<F|B@ѩc{9YS{{{\ݨSK K !&6B ;}̓ϨL Mw5Ғ |pwœC)ζlsh)iݷ o:U*|@w}rxƏ٧;_CceJi Q/̷)o!] p}kTG2 <&sO%e jLX Y[Pg>dJCDB@&ǬAOEM٘7nG%.6%7j,ZHemzF&D,V Ǥ7ҩC{4\]\|mԻbD8H˵3m* :c H~2̔rmto$~2ͻ +~&v: ܒ?#m7F]ݰQ>{~V Ϧ~.@P]jTBI(ah|~ zt c䩆b$bc1r0yHioM3LEJ(((?qO}NMξ؏zxǻ 4 +(JEw72r2ȏ8 @qz3Rb ڶ UAȔ䆔DQp9Uǐ[ !Q?Y @P c.1r9~>$\p^|S"'Oe%UQYDC4(\6 |72q1$=mV* c_#w!gjҗ݈W> E0{t+&hQ" _E_!s/qX6VXĬ~c<݀^!gXY۷ѷ!fp˯! }I eHNN4apuq&;'DGǟ8tynR$'G+WPĕqjʴ }QRIVV6\!7/WI 3Aq=*sVwR4jbbbymTtio^S)!&ph3S?udYȖr}*ꑹM7ziR]50Eڎ}Ot`tNTӢQnW7d7}ss Ecjκd|3L}ajZ!eP/ٲu͚-m[b޸-gR'^YfDtEӎymVy玡0xrCv\?oԇ7g]?mQJM禉 z=f-0q69?1o]G}6m@T.%/ +Eju|ٻ38v*=HAnm/".ǷN(A}l+|`TvsYnqKDƺHI !/v ?T*Xo}u-r{I hȟ# "F'mݽyk4E: SGWe{ѻcoi9lrc/atN.jOҤ7.իY {=jkce~\ cHI"##M++sTxy:ѪyC: #!I] ߏ\.G_T^x)g_x {no{[(B.sgLξ5Ľ7Vp{/Lo_*6ihҁD'!bZrr'/l?:WݑH/z5|WN…ྤFWp/0҂^T2~A+r:oÞrڶ~aӖ(r&Gv#-81z[Ξ~}6t8_~=T*yvx7%)9?@r?B)ߏqOÝ/#y5*%͚V)Xu:|w?Rѯ,-A#>r,`ܕKf$s̯9+~aMEI(&.+l]E,?\ R}B?wB}jᒥ\x/*ᢨ؜SarVjZgI/ҼNje{B"z G68Şp4<ܫg^~tŧʾgP,~="r5ظjss3!NZZOLl G]`1i(>n5geL#ˑd2#W^x15νm !@}d3?O%7dѓQo VUP58m[AQj<˾'֚,pz#Rz|&ĵn~}o0'!\jT΢sⳏ,^0>{V^Сq CF,=w#ߟͲ~ G:u&*d2drfq:6CaEn#ʠ49]Aaso7eļُ7C_>?}~.8{Q׽XN\ߔS{kSRRS,v6WÚ;jXzUg#ժ{#"bHN6GUmjz6RFZR=ax%Wu M=2߸6yjןzN`M527̫N6*;LL0 ԇ7؜ƿ7Qz?^HQ :9s= /(({[9""gΞ'&6RINN9ʹd jҒn]زu;"/SPPȱǥ4*:ؒIK_BUbW׼055ՍEܬ)}zښtBNhr"XU? N*[p qCɄhۀ;OCLX\aIj/MOB.V}d ֽ7G}&OlY*8Tj (~Cw\{IlJ$~9& U6|"EGpRth-AWս9TYoŚ٫1߉ #vw#1e4oKffJ|E%q>"\v.}̱)77gf¬FeVV(p A}wx9!HJhpju>.,2+Tsss,q~,2ygSxj5?tRPճ%FUYonXd2yV"ę/u!/+=iz*y!{G kzVY&ِ 9sYhIJ m۴f/V\>%OҳGھ\A1 oێ]Pa(ֆ>;;Vbch4E\ckcKˤ߉utaa!7o1W&ӧwO<6]@~~> Iőjo߶ .$&%s*4Sa(rMIKK3DywNNL 離c_KƪU U>jmZꅉaak%cۤO%&؏|_!(51kDIM8&/[6ǒS-k޵\S,ZHǎg|W*Mp;_~ ryMbr;vVEEZ/onNx6Z ;}ẍ. \>N'e5|}x}>%ۚ1#hChS72jxg7+]Hŧ,<r-v"t`m=:fզdv3Q.E#rs+óH3(B%-Afj̀ dm]6;yھwLzz/Z9}߿7K((X3qaϜ߹sgoٺ/Ya.3VW "=jE_ߛ=!EX273ݍF~(5QñecFqxǎ`og#CtfM ;Nx W9BAA!̓2fv3 cTV1g.^(lm61Q(XT88KcyQ(9W_z+#!uI[4IlMDӒ!UA(=o R 2npp@I_72VB,AZ苤9dq3%gevBd2̃ڑ??Xw.(9v;sXƁb, R"T\,=hBō_ y$$$kITT43fΒ8::Ņ4MAS+_C_>[jӧwrݺt62SǫOEErAMʜ2yR>M}ӳZc Jib;f~- :-2K _-W9c5<5zM2)=ǬA,iV1 ϐ{b824Zq0u~EAi(|0ZaT9JHٙGkIOOoi3N-?NÆff<ԓv% ;r9aڔV/}\EQ/,ٲUhirQu0hy@rJwJȚ7nyvv}[ѫ{smh턄tl*ΏabjBO[Oӱ{Fuoya53y+ ϴ] m1oܶj^z)]^qS|b[qAC U}XؿomOA>C ~+++ޝ>n]v:JMJJ&33 /O\QG{B`ޝ>Ɓj{*A8:Sչj2-*<ڶ@Nc],]GᄷY8P J.ӬfXYK9-H~~!ht 4*a=}z`Ƞvϼ*=bOV'#3 rrrMc\V*d2 hsɑ>{I9,T@p+,ZL*j5_V-[R? 01q1hG*3+KRۗ.&Bu^yy&>4ryM nZz4& {igN 3ߺuI#zj^vEW~֥N4*W*J&8O _ dd1k9!'/-5y~{f4en2n ;vŦ7b_IyULd*˩[fJ$$&V?v @ (Cل I4JԿ-{K. r>.k%uĒM|̡NJFdve &56m=N+F gN9iw,{fUL>Q䑔)ܔo¿O,Xd2#T xT5b({3)Ѫe "8CpΝ`?i@3Egll&*:d\oyE@PggU*sr>ML<;_|W3wuMzTyer4 18|aq#@ 1wS)MӦqu+K3Fh*n0m/ȱ F25>wxuwǕKTcnҬ-:nPs[EE 3(#Kf|!RP(nϥqRY^~e%<=^BfnQF tKh4q[ )^yy8D^^={抏7o*3:ᄝ62⌿_YPԊ`qy~mN[fڛoܖ͞@ )'&.^5,rra,[W{lq}\CrdoNNWTeRӳϕ[Ptz1FW{nM!2n8Lqsv?^B6l@M011A&abrtK {$ Yj8Y*w 8vJ٩hռ~#vd3ddfk4E,XiSFU e-6 BPB ]ʇ}<&Oz?Ʉgnۺm["##8d2.N8::Z, 2r^r3gq23ok{û^]lmm$ezwZc}+&|[m9^c'h::4f٪}RĤt._M0 )gi^MUC9\ʂdUw7޾cuuؼ5~^$XX.>tr*F>UqSv>r~``Ŭ\n|>CwLG]NmO\ݴMAF#g?r<|DQhܨSm߆ۢ$_ȇ/!xCv䎸I^AkӪJQ&MBA!bȪk-O}os\|Rp&877dw 99j/]SO>Q.Q(8;;UjϾPajjJfMtV-Xh)ZKKKF Kԭ+GJ7EBO`Ѫͥ`p~}ZѻGs鸲,F>ggcFL3,5GLFun>r/'ݒ@1_kڶ)5rqZs/~) I^xQ{ﺮk'3#bjz`i@'iղXA%Ԩ`1%1~Xɼ:HssKUuxej^Ǭ~B)".>>>[}Xat_<ؐ[Hd^N{v p%<= /DDԄXr599ꚼ]>gn\|Ĥ;K`r=:xc{ΝP(RD( O Qd1^ צԷY?鼹i sɱS^͠N?ʨ'qe>EK`) jCG d2]:uD.K-B Q&PĄ7ߘtKڵZ-v쒎[47ht:ʊ^}ンOꚙUk k2*¢xW >Dh.GLFCx3XmOIpt!+Wп_o=h'f,HIIҥH^N}z373cԈ>e+VhTk,@"ؿ/>"\÷xA~)]VA B 9ƣbggK9315UbogMi4E\~ w7wT*ھۢZT٭7lsP9Njjϴ7'Zo{{EKrMZciݒ"tL~m 9ǎo O~~>͚bccKbbsM6@p" kEEEܻ_ ^=z`bb"2n ʕk|9k&[803x`ϕYz-K !!蜭 +,ʊ4>}ztS&4463\IDATf';~g{ߕ`ԭKgOBb_^=o! IMM=m;v@jj*W:wQ||p*4 BA6d0sh J5]:w ?ƍccc͜~9q$9j5nn4iҘN#l޺QѤLyjXL&cԈaٷvǐj{Z;)*2xl p%gnLxjfJ8TxIޘ6_}m 90jİ{.XXZ*,,,? ڶi4r7~ ;IPc6 !T7'&&NԓKAp<=iղQ]Z}fddL.n/ BVs=*W+ihw"`_\|חOJN&66SS3Zt_BwVM3L~G&c/HOd#ک%Klߵun|+`!#==4ӂEKߴ7+ǟk?yp3~qqqO0JlR14а0h  ƏO`XYYmZ{|ž}aӺ[n~1_{tY23 F۷kk$T _|5M[Tg<>>/B2҄ÇKKg{"y\~WJƒ'?-.Μ cR_Gбk/\|׫l+F ۻoF4>yۼ 0uhN,GFû|'+>I8T^.]{;Y[ZéC%¼8@vNwJZz:_=99j RYViZ.Ikڄ)'Gͤ7JBvvh4E,Y֬-~gFXTn" @ <{ڞ.i.sx'P*R$(<,^H|רok^~e6>M/8,}+/2ol50~X`{wna綍a]q~[%37g6޺W),,ohW.lڲ\Lz lX/?ΒL.^4@~ 7g6NN`DA1ol͙̓ U,d2@/Cnk֓{ko6n)VtQpԩre99j.+_h"X*KKK˯gϿt?h={t^ݺE23m{{ܯ/~ų֢9p0Z?LMMo^=9a綶6 j0R*krm&bmmMf()i5IVVͯ*@ T^ /=?>llk{jb굒o ֦r?~[.DD9rԐ~ŪݫG ?5pYjJ#ލ iӺǎpUN{R:S_e鲕$;ɤ$6l_~ӡmLK;w /'///Fj)Z>$h8rrrII K ͿX,`1e6zM C,\Baa!:ƨsssc+/:Mmwk^.[[_s'77O0^}ݏLfڃa4Xtyzaj/3xtO,K9j}G;vޕfĴGcǎO@!wB``@KW1tB=EQxX~07t\\PBB@I`!Dw1WtX>{ҽ[WBCBPy ;o̚=ztc򕤦seWxo// (((_ۢb6_-|ŤKyڥ3`K;;$ GQZZdWx [STTQѷO 'tõܵ-[c2۟O}nZBCC!V3_fٴ=codff_2_%)9 ф 8e$;N~A&_0ᾮX:˯őj ̞#-[P߸2 4c˯|%B3>><==R !ӝf͚Mg1sWHH0N9U3Fn՞`yq,[y Q1ٿ²/ðCX?~}³Oq]B≓D`06:CHNe\h41kR{jL~/` bJJmDslGvޫ?,Ue^^|[8p\rrrQݺ)cu^Eܮ]B5yX?)ޭrwC+)AVVnnnǯ?2h'-=6uI)<,]0R QY,2(;j%j9sm<&^<^/kWFe <ؓմ VTӱX,ܼ,F`[-==@Zl𷗟OqI a28r4ZYfBA $Ee3_Ulp<1l<=iT,fCF=qIMMCu!"?6:pRbWR#}w;\|g80nrrr ӏdzi4jR'{qHJN_]t"#]4q{UD86t::QSBCCjX˖kZT퍷iji6CB Uy|}}ECVlu}KVPZvyyy96)5-[ծcxghX͚5sQloEq^HP!BQye󸸸`4yyG~bA̓ J!Dݝ"%%~'?g-~f.EZ }}&7X,gC?FINYﻇvmꪦBqf\wm|2w~)*8yRmO7& ^èq?9HPSN7 Ϋh4c!gα())q?s5WV!OqA8ˎoڼ9hزE(wu?#!8-F >ͱlj?teвE(ƌrvDc t؁[ *dBsXT쉰3ͤ*|2kقP(I≓X11ٟBӃ{[ni0_g[?g>` !*:C"#ls`rqꊗ^2?~&:sB:h4{oba߱ygI[~#Θjח^~@BswqC% >poŝ:uTo/]n[9[ܼ9:ݦ?!8#1ݷpssbg_=n$mܼ_}bӓ7^ypYO!΃sX`OC_2U1ll,Zv 猢(ލwxWWWF~7osvњ56gz_x11j(YO!έs@^t}5k5W]ɏ?b/ѭkbڵ\BHЯo^{E}% ~wnb~.^p ~9ł/,ݻuEөCdN;p$Ͼ&{tvqyWՠ{.L=ZEDquuUTHP!Bq(FyP_~:)/ '/Wb6(F폦SPXqcxW hL$=Bqa8}(B4%G"M#  ,Б{-;ēL[%͝GÆ !ԸBqهF]~DV%=Q~3Oz/j`6Eh7͍ѣFȰ'!piB'Xh O}@3^#[p,/o0 gשRR`GjP:~L&M}+eU!pB le<7e5:Ì1JKK{1j`.x~eMAFV/+Vl6΍_]{Gd23K/2vhg!hR$B hɲ^4GDEErAhٲ%Voϖ $S'c:LZZ4kOt6tEF !.%˖s/VT@yJZEQ Ws_|-[m``%,\}{qQDF;m3$0R6lނbUۇovmqqqQWЮ]h .ɜX޳Wx =:vϧsx"/11Oxrzb;u"//ǟz{TW^q9=p3߾ 9]Pa(OУ{7>6o7qL&Vd w77g:+,*f,_C=k *gW-B8={ÏRZjl=7l2MXrAN8IXXKc\Z=dgjPFxXddfbX2x޺mPQj( }WYcfIБt@>=۫;zoF{aV6mNiiVe@\sՕthwPT\SSp( c..S oɃ݃/%׻2t+QG] ߗfXVX@HH0~ Ŭ^ݺ: !3 **܃۩#/wq;scŤc4q;fL:uhO.9g cn03v(]t =J'[]p S$B)H8vNx'2rq6~h5XlC`jm.5()k R ehgm!DsAEEsqͬ۰%Kvz)))e8n 0FӮmڴ$ey(.)!D&p 0έ'eaM]>✓ws?\!pt/YF~~}GnUټyzvuԮ]:ӲE N&%q&Ss{{{3?4'+;]w;oiiYl9 **`0|DQQko` lشl22X56ҲE(̓ yPx{yW jB '-=LR9LzFfCݫwbęU칐B!οsX:|{B~ wq+7^~'N&[8qccGV\Eo$;v0j0t:L/`sYl9MkЃ!e+xyPQQšAVF7#08Ϧ[ع{7{#'k`XHIM#%5wuEb4 _fء=;uWDDѨ쓱+N>iV !@i`ߩA7UWNFwUj}¸1| V+K֛odj]Lglfssw1a I ! TTVb\`Zjt؁cX0?CIHȑ$8IrJ c q;777BC'*2ִjMPuovE]U\<7et:F z޾!hji`q*L&))(* kɱ9r(ʲA۩p2vc՘fywѺhJqN9#~1^ժmDѺus, YY$%%C^^yyM&  t텟/̓q7428dg_B!ΑsXDx<`dokO}dQQkuqcF#n';waמe/9""xT>d:׬l=)X!gձ_{1\\\ 1Z<(H-{]!U|)aB5B!ίs݊w5sb;vRo/[>l-*.n'y(R,O!!KܝBQ ʽ#VNCףqss?s\bT>~M9=7Wrvф;ETV̜O<0?S.4~C Du,[ V٧~OOO ЭkBCC[yغczZ!8S׋.+f86*>v5+V !Dvί 'ysLgj2oBth4rx". oj3KT;xg1JѽAnzѿ_ a !V/VTD;hgL@Juâx9~U !D&;o4X ]ٹkfكAW{HL<={Z8_@Ngu!D#`*L&[摒7?zMBqnӡPujygy Џ@< Gnu-Xai.ã><=݋AAxzz@lNx͗; !eWV *>ڷwvDU7,ꉧaQBq:@QвyuS)5>'1I|{BFh< T4"s!z,ٵ{O>T%hbڵO>ۖ>{.B4X!D-޳Wgݮ̩h,:o?R BBQK^Ā~}ۉ/?DFC|'vȠ3ZRBڒ19o_hr9hZ9`O ^___?Qj6Qi1x+6q<2mvkg=!hp$BK}$>0sN:pk ñ}/Eo:xJ!+1$:t7B&I !DmsivS mL:g--hsхF;4<'c.ȵhG:!1KQ>;60m@hK)=]PK\6$`5JZUg8~h+BI1mA㸎. G>..8sn')+߉sXM 7,k!gD !DS}qmݑ^rڢkZ]wINƔnӷJ˷imKat߻OyP},g_>ֳk>xp,EwDWoƔJG ڹxN]ݖ޽z_qu#utvWvAezSV >co$=;g8oRrpNG!4)\Pϟ{5x >dVg ߕܹ۟Pe)!OVRoSzp;ރgQ[K9`=J|/M}p< (4t:v~~Kģp΢`?-u5nzDΟRw,FgxKфm!DSv3*އ,%|rLiV{靔ǑM;+ V勞aJ~{9~` >&X 䐿wWN//1SqJnSZ[Jw VpQy E[;2&,;oBnL&(=qk(ڱ>'s~xOŔm KyMBzpl?5l6T. [>m[hyPo\mAѸy: Os o)+҄UUwl|F_ 7,[Lи'~x=b 7=K{ˀmϨkp<Sl- jLiR @ײMe|/9c??uWMU`1ۊ@]5nY/=sNznܻ V9/OJNރ6 1 uIޒYh~V1ny_BQXp qx .5Z-%ExtJQm+p6,*vk 4.4J*nXGGwh*x Hs0֒" 7-BqW7<RW}/ٔ0ljEm+xTR~.q%X,n^Re1aJ!oZ}n{?!fڣe϶Fe=|Ojf!8$B[3e$m]xyV)Mkd1S?c8n54}V7j4^r_E!o B4iX!}t^c \F㟓sd14tZ<=SyiA-ՅFbLN*nZSc+~l~p"hp kKе?~.Z\lrE{dn~ߔQ_`hڧL{5gE42I)<,]0?BTbƔ mHD՞*c W=f!.,gN`b <ؓ4gE42v#̞#mm J۰q~ÆpKpۿ4.=Xdx~>ͪM+BFmPT|Gt,,Dt˧FǞ|F !D$QYn=<T|T.x) .BFrPe *b;u<# qv*BGh,f!!U}"A?j .V]좉LQK!#wUPr4kklGt:(,J c6J\\Z(4Y ѐŽ>Baa!:} 좉F(%%WLaPx{;8I$n^;wjݲuVh42SZZYzCFnnnhg̉7DPgzjbZ9y2ǎLRR2ɩ)dg琓CNn.REEX,/OOtx닿! ePZEDIP!),*$ך뙿`!&l6c00 %Q)NCןuMIѭk[Ozf3`0N* OOϳ;~vs빺պ51ڥ3]tudCOHCe *`5HP!\د JKK9DFF&99&Պ\鉇~HP\5&IF\5 www|||oV٬ FA"9Xv6o!+;t:퍗^^^x{yZbB /$-#]m}zd@~ ח̓n~) *D"8@𑣜8quamѲE(mhE6tooW5<$= (h4t:yp^ (o(aWRtXɢ%Kٵ{O֭"kIDx-CiY9yygrI8IbI;NQQ՞EQhӎ1G1jpZuAŧ@N.u؂ܼ<6m¶;ؾ#C|wuU{3%%uzF!Uݻu[׮ׇf뼆KQ7p !R+V⯿a˶UNtԑ1miM04 wrX,LJa!?ν/pGQv̥LQ#qwww'` *DcЃcǏt V]Wcc <0@]3텋KϵX,PPP@~AYY٤gdAjZ: 'aTi4bڵe1dqH`!YJIIe毳;9995 _օz5brhv~VSb؋+iQ/NTƬrp轷ۧVtEKr`|]]ulEDG*+[ձ} wnt 7ba뎝,]]{c޽;9x}# c6 *DS#n'=);Xz )``e|$$sx,E ɀ=iYӋgdeyoC귀f͸˸+du%‡Ӧt Ņ}zqѴn"s'p)mh,45d nZGϾ~U *DStb8nnn\_lrnn כ9IJUkoFL|7p-~~~\QXqEEE| fRX]Fð}p}_tZ-S&_ނgO~Oj`!AhN\ ;n{w9}풒ffLnnȐ挿h$CM0ݸr<Ãk//I]+DmH`!)YzfEQЧS.HHpsg⏿r:צ?>c/}V ÷cOJP! WWW5d͊%V+/iZСud+&]2ݻի$je}Ͽ;P>ߏ{I^FCSBjE6#h9]D~9OxsX:oˎL4=5 Ꝼ2?|T6 <8]t,RB@ !*Ya#/*-uWMfLٰfJ^m|7>FsEBө===%MZFfg_|ˋ'0f N}eN~W3[}v%xQBT!e #Mά_S3эn_g*((Yzuċ'#Q`b`a'Am}=Jqq ,?3O%-ӓFⱣ񸻻;- y ׼ :*$$y}zP! ,Rxgٵ{`xrՓ5l`l޶/ݪUmju­X>^^^yoWnm|99gXXVf~2]knfy:Rؽw?` $LO]SD I`!-[3ϓ @658ٹL[/>ϰ!ZM'z,*햛6`~i&|<5^d̀-2&#=Ojt̘;es:uL˖-$MIg|^y-L&[ QsW;}؆jς, p7s2 *DSÌ5>^b<$%'<(ﺕvm6T>-/H~}ehh$Mj/oj:hlںOLGNWm?\¶-BhHg%!f%$WHZNu\uc,Xf[ԯwr=z}XB3L.]l*CW^M/hR$Mbg_`2[uxX !|}%VXT}.6էWOM<<SGPgQ?DztƇヘsɐB4 V_{9 *2g}ool4FM hڽGxٌ/<0e5zUGάݸR6lD kR 4;"[IOhӻ&$PR\B޽E4Z2cR4JYY<Գ FZ->?ɄRwuj˯qdOB[?>x<xɐOӻC矟fbjJ}' ,Dc6yIMKkTTd+x(..qvhP`Xpws㉇陵%xzx#l݆bqvф8$~ϖmԿ/cFsv) ׇqGp`p7ZS_!-=]9H`!]շgI WOmY٬BTeZy79v8O@Ϯ]],QvQ|32x~+FD"h4y1tZm],Q ...qwsjo%'\!Na޿ Xd)ݻr.8#2t`nΏ?ϔѨH`!i}ɓI\;r"œ]$QArdffG0Lr'x$+7x -BعkѐZH4 ٿ汣;H ؟=hR֮ '\!*X,ҫ( u:1XzMooR\\,YD hf3/Nݷ,-w -7\mAwɞ"Dmw0f0vtvDlK/h1'\}9sa.8^b S?N@rJd9銦.5-} ւoE4=;H>FȽ`Ip!,ʻ|jEqUW8Hr%xyzɧ_PZZ*h$ 7HII S&Mtvqyph0|ÏX&kٵ{MP`$/OOd}I;O攉K edd眿ک#b:H<oJ=Xt9 ǤB4=VϿ%bxK،>@'5 Ae &Kǣ( V_f!Edr`j:fxp3k!" ,DSRR m48H< kт]:`b222d+~V;Jl Gh-]_SτhH$ μ 1]q\<= ̝'-yشe+`^h4\26-55%KK]' ,D_Aj ѸulCV̛(CDϳлr!.8φ 苟3-sD#hP߯ 2Fqv2b`RRSٸidMɤ$֮rvy3jsZE Ѡ9g.`2>x#.Awu`Ғ']4Bz+C?zN4(Xd2l uD3?gI\@7_ -yѲX,̝mZ*:B΁UDi-51}EQ׸q)+**u]s/5@"Y9A@3oڷ C;l!3+|J0L(_O|< A=Fi3pu*.1riqqq\rr '"<󢙿)?SkIJj= Q F۶9}luF]q)baa}ww=RH$7b||< !mi)Lfv>YYb4h|}=$(зJ}RYIs:]Z}]JNN!P\\ ;>4>SQݸRk*E$׬V+GNa-Oe}օo?ʾ>yVٯ`{>TӚLc֯+ٸF1\ЯWOGeYfTꪥsH.߇[UW/b]ke _RrOYk4 ֕C"06mNrJ Ÿ֩B7Gd=F~7ǜ5}=|pAhS\\F']5Tn^wz_Qjxy_Wu!CLbnՖsظyÅ~ezd|EW߰4틹lzPˍ1vT*%OwBj8N@NN!{:}_sX7?.Yiʨ=20:}hFBK2Jk햡ݿKc+dj?|R *n?X~G8\lTړ#O~ɚ{j<X2GNTO~A) xY3\-+_|3ߡ[seNTq$!_o\v 'N&nه}DD8]b[;/**jݵ'AmAXl;\~N:܏n!ؼ =56(,*eҭ<$OvS`ٴ/CUW/!h%1SrX?g_w~=AU#=OLXzdRzyׯoHö}w6@bCDۥsz$7Jkم^PXxbzjY3z[?Wq3z9u{ŵ18vIj_/OOmY ǰX,2L[2JkZc#Y|(Ё՟h7ou<’*]*h;Z M&3}/IW o22rYb;S/..ן+_F!;uU3wwSxzz.ökFD'J[&_:NGaQ N *?9nOtN_+Iꉶ/B4Iɶfh>!h01Y,* 'qb4Gc%6||V:0'||*"KAӒ_X̾}Yqur`KJj̯^KKPo6hXğb8\p& 9%yf0_GG օ#ذy?'{\1+.oۦdvuC'wݢk|Gw^ԓ?uRR)KSW-t؋z7_?aXD]4cú%6>߾ ,|ѻRj0hN]3F֕C-$7a[I=ѭK@vzlCVEZz1*ghۦvdsYn=nğ m 8td@w޼lQ%O+X|=,=eЮz;^<0V?zxw:D ʷ  8}`ȾHMM QIӞײlz}-'vr8H8O~UKVVz]tnnrTԦu(O:IZQnv|vw> EQ?O9>ƊU;}D]!ua[vnߏ-͓hRRl-eeW *Ϊ۷9b8JM|uþ;EV;IZQxy96@fu(ӻZc/ݹֱu*ǹ4rx*DrjYه~gd`4-{^ɱ Yd2sRkl) aG|1UMj2^UYS ߷pQ'Ow}GO"?Ip!*BvNj*8pfkYXXBB$#{X&w7Wui<sjmw<__Z=|h*m7aZs{t$Z^K`PU]O3Z|!Çts-?ʐ-k;]7'd׊mG.g~ޅ[\ܭ 'Z//w"[Z3h.L eTba[QZxoͺ=lRj̇nWkQ oZbA>N$)Njۚ+vWTag7mnv6m*f %$6r}^eSe5JjZ6QO6y0gzm=GjY$$؟YϿ :]X&Jp۫|< ,D=&׌eEiuoUWHrJ!&:F24N@l~ob%xyFHVTTzӞ|}=Qš寨w>JILJj6phh4\Yu'/L^^zg}^Tyk| ,2r{Z)dێCUP^:n_Egt5ḏh xzo3ngXKN"7_.<< }7o;Ȕ+PTTJRroͅ)/7T;mj-ٹ&SٸyC=N[ơ߷w2V=)I=Vn'#1 ҁNU>Є$}Z]W鴴 #nWyx: uս-:3ddp< pb8Piڤ8:PZv˶*+=ww/]'Y2]u|JI[A;UB\lKjW_]WySvniΑZ8HtT VԳG[]aVe2),*9ťLSuׁf>wL-( mBkJ~yo(.-)5xś 7^C0~ ہR5c3Z{~bbBWVաC?֣sRҲP.:WYªNՐ2dP,^^ޓDۤmFC F`[[#C+z;'igS;NM8r4q-Bi>eӺ&JO-It:[ יЄ:. 3<{P -`zu;̬<22[QBBfH?M*Bjܿ}pnf=u->>{o㺫;l;z,_Ni@xXP*QY3ol=xC= *-j~1Vס#B73ɚӶCCEVvꤊ]7Q(2H,߷r}ZXdd:[Rc7EQi^jם;kGܓת Lf{-fg2)qv ]WTz}ps?\>Ȗx_`~: A)3 ,Dv6(ㄳɬpgdQ!=]UZw縖Fp_V`%-XcSO܊.ׇS|ך:כ׍t96'BBk3K'Z1 0L*[.(ȗEzlD7ҍ޻ #{ŰbzPuU[uh:V^*> ͚Ϫ5;4!+V6cذi٫׫}RàXnm^봙>l...5GٖCjwwq!Lb-n6o=XeFM=.,%uDY,5 ѰV h"Y2NOxՎ?8.WMօnkorH%@oe͚ys tHv9Nt-r5#toˁ'+8Ĵ ;$OyܨȺK;8Xw3 B`sDB8 ZHHyŝyFԫWL)|iݪ5OֺU0O?~tyitf-N|x|_mWU9=w>!))14ol)׍r>22ssrr9-!Of6?z,jjn&B`*s1*j.֋7gU+kQhÅymxpm/|ռ:8[M׍rvd l9s+>XzL Q UoY`1&!S^mO pF6yL0>xN]Իϋ.{oYcam]vq7u ͱOS2*(h.uWy+ ah*sXeNJei Q(k>tmPg9rf:]cx5[k96ayOƕrLd"7@m>q^GYx+`KZNsvlU;UTeMtZbuŨqp5S?f'#+ON =ePj0kk1pӭVdePXX;} sl L;~ ۠h4Y鶻athg?ddb.;7=CI|Jޞu`X8z,B KryY7>%6o%<,?~u^9^k___rssIL:["[4"/%Z-!j&9ъel۾􌺥]b鸹vf"h4ڴnqFua뱗aP^S6Ʉ'N:89V{V(t̴RXKEzVN%%.'V+)$ QI`!5J6?bqvD=qℭǢud+Eq'DCe`X9!23@#[EH'5 ,D( 1lKKK9rGANN.Y"mZKP!%K(sdo!u#)I`!꽮W|@A$BےE$?z"mVEQjZk*B](gw0( NK1rFc?z@Y}ֲ>>޸H'- ,D( ;u`.6:^/'Y((B8AQٸztY,:"$(,N&';H‰rss9hЫg\\\$h4 X=';NԱZI}'+ ,D0p@?.p)eFB4ݺtlE睻:8*u"$ B5]Dl߿7;;Ѩx{{[6s=.pl! )BW ( 2" '())eGYU}$+EQIId,Ruv Bg  # [;8 숣``2X4:V7nڶcF ~}z|2 H`!JNvfgI8ˋ=jD+ۢm`-.p6u!B4V1lye$''m;màVFIQF# 2 *..V'n^/àD R`(¤K'Vqvb:, ǎ|Q3z$`ogG\`6mh40|PNwXj֒^=jzbe@2u$;aѲZ}L;\buv|jӫ̯ \>R oFgK\o'5= V<(ד.,vb xII`EԹdB4K jeԈl 1` `8D+àDc7nEprgG\ ھkEQ?"~' ,D( ...\y$%=Nk#Lx<^^^ J4zcؾs7'],q=e2 J4(X>jǍo@sK !;EQẫh4XV]EչS&_.Ahp$ ']{G%i,"w=pɄq ѤD3t V@Z\#hJǴk;+ ,D( ^}~~~c̵hdV+1ww7J[!EQQĜy ]$q_¢"n:t:c!DC!V`yzxp p")%+sv9vfune/!(0P&c <k֓"s,?&miǀ} Rkp~o ],q Ff~~\wI(Fl88=KJdP`YZ4h:¢"f񏳋$΁?OFf|#>>>rM(nEeqoܲesDw")+l uڅ}z\2`I`!4EQ1l(}zl;xgX .X @L3Z&OQnu]f&bŅKmDZɯV4x̓bfDbXfZ-UD87][!wBظxh4 | @}3rN4xXFCjWߓbZ(,*Ͽbiy'P3AI6гGw&]:=0o jh[u70__ TH}'2FΣ?@NN.}9f`ZI-[QoR@IY!oKtvD-gYmԣ=VU}Joh-EQd8:9X/n_>L2g,)('Z{|&4FIqI CL hY[4*VEQxhEKdUzjkN~k.-[$+'Z!RN;pɧ_ -o~фc\~D ^YhȯX4* Q777ze|}}_ؽw'*8|4jݝ^x\]]BZPo@ݞA?KbutMmD# )3hE!eK}5\]]1ͼ?s8 5-?N =mꪶYVgSzMQ4 /0c; )R׉eԩS]!ηN۶ҭs,>.Z˫oɤ.b2X3( !! e5F6nA.s,^z}[ǡI' ,DgXѭ+Rq;wQRRʦ- __gKMKgqcpuuuhg& ??_֭߈``Ӗ҈YRwkF94H`!{޳Gwt:[n57ҺU!͝]F^}Csd85gE%ٰ;ӓ7QRZ >f.b+oOjZ:MyC}'s+Dc%h2'.cpwgӖM&oJP@"œ]Fg˶8p:EEmNM *\]]h42@sĞ)*SGٴy  [ҮMA.bp<̲OW^1K)Ah$MJe۰Ėq vh/jy 1͸wsSz*Ah4tD3m؈`d4Ff4u&_w݁^NJJxYV, q;wO @<|zKJٸe;n< 4P(j q.7`0( /^,e_X,h4wm\y$ETB dYVV+Ǐ'rq!Elp/Q ]hj*=ؾ#Ǟz\[#J[pv473~akt<Ì1\&j&I Y,xͷYx)`Fp\uDZXY,V_obЯoyQ V \9rG|' nIdD g_q$8v(E%؂ Ÿs惏SRR@xX Fڶi"[OogpH:[o)/WwB8l&ƪv&]2+& :o1Rb;vg"((H ѤI`!DՊlhBO}( cFIqwwwv1R9s0DSӮ-Z*g61L| GL&;u[nQǷ3fq6P2˸qsssXN<I`!D!F7`ˋ'0fp4}زc'  ֛V -Db, ;wf+sI\q\2&_Ug|LmYx߯CC̩MBT`sP$M!n.uRuuvq/09x谺]h}^  Dcf3|'w=*\8±|l?n2h ͚5B=єI`!D dbLKE.KlGg;e6??nƍaLq!j'Deόg2شy o>'O&}zq핓lãr,[vA;d}8S=єI`!D *^_5w "#0v4jT-U-㘻`C/uWO!X,f᧙L=!l$f3fl~+5 h➱\V]ϲUk(\]]0n WOLP`LT *N{a6IJJO?gUja0Q7wvqϋ,YΒ(.){=wNTTku}RH !j~⵷eee_לQh4t؞л{7<<&RڍK]ˋK&粉 TONj( Ev_M4 {teti(.I`Ѳ۸E͒йSGn:zt{h4BSB:`[d)stԑݺЭszՓ]{ؾs;vSZ6.u$ƍeܘxzz(C*'W!9f;vݏ3شy!9tkpä XqVxقv]:rS6ސ"8= ,8C C=wW"''s[ұ} h6Ne|<@<OPӓA0a:vrC(B4!GY,9_~eɲ Fu?FCv Ӌ=W?6oٵwCϬ 皫Cޫ8B= qzXq*OX, 6o+YaP|| Uxa-Bi<4W-ԅb%++LR3HJNر;y@ۛ}z1l`z^wh8IѨC&M=qw!cǏ;( atKj)6M=vcMP;ٵ aQL7hԈVIDAT@uEF ,8O&߸-[A5HM4oO/D[*)-" +(phN]۶t֕~}zөcb+̟BTd_ۮ{Xl+[퐎ŅFG*<0[if3'9xc'8xh4VϏ0bPvVTQ{XqT 0b{3Ϟ?t MHh4Vj  #u$mԱcb]rRBTVy} ={Ya#6o`94  y@5֐N6b2((("2I"-=Lfsjݚ={ЯoouRmDŀBqf$<<>@Quh$99dSRHMK''' (,,BQlc<==w|}|׏@BCC nNhHxaqP!N,Rvf,v".n#Z֑Ա#:нkW:bܒB ź$\qrލsָ'P9 !TavZvH±>|CGBrJ ))5A;___BL"[uHZEVeXS;!Ĺ'Nv ; "J@ޫF!Kܼ<ɥJn^VՖ냟_ .*]@!/ ,'PZŞڊ+K禙L&ף9a=׮r )ą!B!λ\W|T|~B!R^y~?`qGA%tEXtdate:create2020-12-28T12:21:29+02:00)X*%tEXtdate:modify2020-12-28T12:21:29+02:00ttEXtSoftwarewww.inkscape.org<IENDB`buildbot-3.4.0/master/docs/_images/overview.svg000066400000000000000000003247131413250514000215450ustar00rootroot00000000000000 buildbot-3.4.0/master/docs/_images/overview_src.svg000066400000000000000000002064611413250514000224130ustar00rootroot00000000000000 image/svg+xmlStatus BUILDMASTER • Subversion• Mercurial• Bazaar• Darcs• GIT• CVS Changes • Email• Web Status• IRC• Status Client WORKER Commands WORKER Commands Repository Reporters Poll buildbot-3.4.0/master/docs/_images/success_normal.png000066400000000000000000000037501413250514000226770ustar00rootroot00000000000000PNG  IHDRTjsBIT|d pHYs B(xtEXtSoftwarewww.inkscape.org<eIDATXXiTW@B-X2ZSZZ3*V a ȾDd4"BA VăEajUhG8LgL̏gnLC9x9wZXs9 r 9SB?}9x0BƿV<# ,U~ F?wU 9p\ wןgCL-5AI,9i zXuyЇd)w# ;y9Q“/$H8iMO }W'=-{XXjIu-|9Ź4({ ^H!*=}gYtr${mqA=98tdch(_LP!!opZ\A1]E\笀(lpDXrD|j5vL˯=.nvQF5T;"OY|iBܩuZubBS- ZstE5e9^?E> W;povӷRYkhӧ42{ G+ BZ}59ԮjLuܛ@W&.ca_jظO# `b ƪq7 -[O^=Ͼb7S/s'ѭǟLRE[H E@:s(/ Q5f5f_`zpn{Ń;p XMITP{~~v&wYS~h?/ձ~'_9nMB{Wca_j1ՇQ`q*|{i`nާvF.|;™jIߊ'OGxֻZ|TF<|4 IZob2]ZjT!~!~`ٸq~v1R Z;+_ک=;;&6=3&HYe1:p4Tgr㏻⢪ na,O xL͌gP";ʅf8hLRUu:udrO~g:)lc0}&d|ЕVܸ5Ze 376i6Pʵnt">4#-H[&`̣9q+ظ#>(u,":)BqE2S'*ݽMoߧpQcp cC~1+q5xIx4n*rjGF̖줼WsGD\R2Wb|tVјNHe qA}h& šغQ?$rjlB1ڒGa#YBI(ʦ|Rv8n$VȫO}lr Mlᱷ!P(guHFc=H9!_(1fUJx]f/0dwb7-Vl `Ún [:ˑ?|Sgs^&6o_]DPg\ ߗ{d;=CKm_6~%AsC5|[ {bٿ"?|6Ya_zas$]0R /IENDB`buildbot-3.4.0/master/docs/_images/vault_multipart_key.png000066400000000000000000001715021413250514000237640ustar00rootroot00000000000000PNG  IHDRnl@sRGB8eXIfMM*inP@IDATxXG٣w^l(]޻F{I4_4EcLLR45޻w@w88TEyݙٙ}-p<3, @(UP @$~ @J((YB8T @@ @@ %Kj @ @(d P  @Q? @%@,!A  Jg @%C5@ DI @ PQp@ (@  Q2#].OK8fT @(Gɐw^GeBF @.P(I>QHrG!@@KV^ /˯fz43ckϫ.ܦ!j㞌},NׂYR6m@|c.ܺ%!wKۑ|,cSsKEbf Wnja_Ez3h]#vbh G  @% a/$w헟K8uO9Ogθϯ-ьf"_DHe8TR]~r4ZȚ&uUؾ^ٴ]j6,6lXIU)PQ @l%i*Tȑ:$"6-1aQ˶I%m%̞G2UӸA9.3G:-j,۳GlPGa:fՒ@ N%d^ԙ]>1n`;n6Im$HhVU: g Ku ȑbkf;K-.7@ !P(B2/m[Zp֚DIzܭj.^K3Im6N(g4]g<,qqsd[{ aC k"9.JH|^6w!j$&Uk缐MHy{U`΂Y s[Yr X @(g Ɔ\X4YVK޼{)I~ ߿Ն }74`3^hsZUX}~C^_%^A| ]IĄ[Ռ;r?Wf½&U>ȹdu0 䝭r͢%!@J#PYIENr%P.Oaޡ&ֹty%` ٔ\WṘ%լd|S^ _UؘQS5˝5,މj2[t$qlYi1Vg5Lk߀.Tԅ @(o<}Ltwi;J(@ $ :rE@!@J"P()au-KR$D@ *PDXŒYk侵 @(+Y.A @@ g%߷a @e/(Yh TDJr1L@ Peo!@@%@$'Ä @e/(Yh TDJr1L@ Peo!@@%@$'Ä @e/(Yh TDJr1L@ Peo!@@%@$'Ä @e/(Yh TDJr1L@ Peo!@@%@$'Ä @e/(Yh TDJr1L@ Peo!@@%@$'Ä @e/(Yh TDJr1L@ Peo!@@%@$'Ä @e/(Yh TDJr1L@ Peo!@@%P/8ܷإ޾Qi vgn&WfKS+6/|}L?NPy]>%~}^0N B P8GG^{χ_Ѽ u{A.ugNF cyu*}L{[~NSV3)މFi@ &bJf,٢[ϑo%C @HhlgȧƇ=~,ʡh @ wҕ)& 4v\EFJu 53p™3nzyyGħiW:_U<pe. JM4>|T5\_TNj͆-?N;KTw&.W͡x}8s+ ,7n°voeGHSi f65~j?ծ8ƽgox'Tơa' nf#jn=jPhDN-;ax}s3 ս+Μ|7 ROG4݌D7KjL/xTc WQ2ȴ_/m6і<^#[ٯruH5v]Rr_,8g˪ѵ[Ɲd򑼗9?Hg19Jq|oׂsuԜזVg}N|~舥Yߍ{"~ Fl?[fc^z/?[~9:gIF?.e!co>^ZF6}^DŠv,D\@@ e@U9NLj~Bi^SȈ K͑+?_Ϝҵn34| w!y#O~( wRM }Isf)ͽrniSHz/>ٖHڮ^iC3EzVV]ѯԞWяZ @%H@xxaѻ`ܾKw޿v-'Ֆ0yЦwU{{cvb>^Ud;ڽ}GϩؽG4"KxrA\=AOpxF%ūKk~&4}7x|ꯑZbn牰wy୘[w>qtۜ?-lxD{dYTGZ@|̉+*f.m}䥫owf_FCV_Itgm˶4YcA=\V: @(zLM;Ƭ+ځ%FvUkp<{нx6 wY4`%Iˏ2}K7{[#-I3t ; ZROLJk2kZōn3<.5chY975xngp=Z_7/ڋO.{gFGǦ -pget;'; ss;F7*lϹլik.mnLE?VΖ  @洴XD}LJR"v0WIG^v65S`,E>־-] 81 yݐA7sA{[D3˼W ^cgT%ܦߨ z64<Jْ4y9~M 1{|XRԿ[m"E\/ A  yV357#y=U\.فRigMŌ([7n)e w ڜ /VH2VV^yYsҺTS2Vk)8UzPUe :V @g%{ v(D8./;xX%|1maSHfSz9.Ft^kӰ{Nn '7b+T c=3v+tm 7?}/*3&\=a~32h+O se_xڕ%L*|ƭ7K~5u X @e P(j\Odaz/ʊ89q;n2;çJӴ,w&5$gˢ՝ne+f~w!Fl?p/!dzZZhrZbVKoǫT!jZEusvo̹{)KҪnm !["[uӿ )K=kh%7KbhA ~Iw 'g@J)PDV署 /ëxJo(!@@1^QN3zq G3URIS?OCFuBt%z#je^zc}8ʱ}s.">WZHƝ=:>Wu<>A 7 7y1- u'"x{;YA++O+U{|抽||#sUl8zvZ6}juzR*kczгqY#?Y9t`/@jDzAuϸEO)gͧrYkC<*8y J)Z-LX_HyM3X @@,XPF$zm ۬&&%%&2(ViXUsi~~?>qv Oի׷NZԭt֭<9>Q*#60Q^zQ*2Y}ܭxi4>1I8)-Sk{M;vԧ ԥw zB<1oԾgjOpz:NS8T/S}>ѱq #u+P8u5 ! U<:id^^c,[:rPG4MJSu#3+M9aV&=t3KMRO%ͫoթGYϮugUnl՗E>E>b¢#bdlҾ[-(вw@ P].P @C(U @(d P @@@ @@ %Kj @ @(d P  @Q? @%@,!A  Jg @%C5@ "DQ _#Ø8xE% tfp,@ . { @ JVsA .(YO@*d=7 @{ @ JVsA .(YO@*d=7 @{ @ JVsA .(YO@*d=7 @{ @ JVsA .^ NMYR IC" :y `; @@Ex/J"c qjvV6V Fy=ϵQ#Jzj@ P7HCC9Om HHJ.@ )PC")8,hUm̃C# K۩zhDIAed^']C܁s_GpzmeQĐGNzL[vsݿl r8[M[7fAGv@ 7 A ͼR?2KY>0((,|mjl"I{i92Ԫ7i[#u+Gm?rhF̌v)TZb V .pӜbi{91YFRcb%U6anblj]az3G7?[Mk*]kox'cЙ}^ˊr @(@DI0vA+dWAU ڮcTKb!5=E{u)Vۆ ͓**ҭNK*cFu!@e%tu,db#ʏpu?l`S}_ze?)䓥 LL/G%NKWKMJLLݶiwI`H9 L   X37byEľΚԿ~ʹOq:L8kTrMu?s4i2>@ A6{={M+6_TgnSRb4ytr52VܾQW"AETm`Ruط|11E @*v%m5pĤdan=Hw;wSfdrN^s#TL_==mX>vguj;7nui}j{96ӡ{m%s @(@DI[5MܜݐAa( t]H>5=٥{"uZDoO}vCQcK+ :]8v֖[&WO<}Qvn8RN @@Ae j!@>`2W @ @, @ @,L @ @, @ @,L @ @, @ @,L @ @, @ @,L @ @, @ @,L @ @, @ @,L @ @, @ @,L @ @, @ P/lʾ|* |&F1qsl P, @p?! @('DrE @@1F@IQ`, @%?sB r@,'X4 @>|Dc @% B _Q?! @('DrE @@1F@IQ`, @%?sB r@,'X4 @>|Dc @% B _Q?! @('DrE @@1F@IQ`, @F茶 ϟStQ-]9m>~mkC6i_}JOVC ZQ2Mk;?''-<훜)ܡO޸+zd [$>zԯԨeɡNW\ۿ+mQQߕK=Fu;~_wrqkڒ|'ZrԳc-:s,T,/~9ĢuNѴ#$v}afm>jvg;_s'S֪Ӵ%gDt+̴^F5MR%Q+ @=(^0sF:3w6|Gv,X"Wwzyz32&cr#-XU"3\dwSvFfCm&Mq%C~%67.jnGc|ɷ _Sy2o;tFzwϴ[һ&cqz-?~9ۋ'oǵ6:~w- -#vϢ1켵:X %s@ \xYJ/㫸wduҼBׅu:QTK§T*#28J.7wpa {[uYTdD5:p,)^vشQ_m_?vߏUL:̚8Z8y\dp3[NxYlT^W̑ʕ|k)b |`śTs~q$y K죥}h;ɱH)ܑ+ܘjf6I=L#og+}iif2iHRʋ9dri/_qkixV1{}f3E -TL- o }z4w( Q + @=(^;@-_|40\ܫnXC[Kwt_] "l?ٽ+l.}0I~u$\@j9H?^fW1;L2/{a'+q -z̨uv'O^1kuCYL3kzj-X} _h)ZAxp| @@1/pn~_ ئ/F[ڻ:2Τ1}\Uș ^^e&Xw/oƶ}MWBJ i;=Od8t?N8pV[4IוK7AT6.ݼI+Dĥ>bJ!a7 @x_;eql@ !PYɊ1 @ Jv |Y @;@|'8( @%?1@ w"(NqP@ ! J~gc @D&&.JjA=t @xE#jjk @ Vz##A @{#P(  @x[Ÿ`q@ P%544&+D @[(F^oq8 @;(ƽ @*@1d<ϫ>ڢ ^ J |o#ۮqs]٠He ]Dɷ덣U~b/=$* 9Ss({*h EF,oe=fOCvKl2ނw{QZS-u@(HEIy̓]w܉Y^-|oJ-j |y|ێxFEX}ĝWiy ~w~#-V-I/8--ca>,X8֫whj󎔖( ES.\h%.(֢[KB͵K"hkk V Yd}ҢQ+窨(+?KY+E;  Q$t׊yc>3~)W=}"O{qGeyY0#7c5wc)c' 6=wki>L:Ϟz7NG^^6sdWmOļc%tTе͋.8.ym~~ebZ|ܟ %1ݠ<-M,hD,)Y̎dA/ kږ5Rt,,-^eaA̡k֎Midw9sre:N}X]7auVae"#t h͛<i7FLՔzb%ەe 7bw1?ăuzg,YZGvƌL[ ކ t ;i(L]b?1 \)s'  A<]{q|çkgvqy$q7onВxj8d#Apyqs7wJ+GTlNrxLqv_k_6G&LuoǙ1KMagu]co"kBw. &` q //Aʝ-2**EIʑt[X]?''s'6ٯK>|ٷ3yjP[V"B1]}wda]54.Q e%Y,-޺91&2C$q/nrX-vG͛"c9*ReÞ^M۫!%_\4uWLp}qdtgAf߿jҽ m_ қk]KG-^Rtφ%k6h7`UL %H}xgW(>ev+"Yzntkeׯ?=kIoEj|y'k(mr]z71iiqfNuZ;CGes뮋^i::ӳʔԀSwy4XK/6MyBӞ_ضUj41qdK[-yG&sƎXbKtCλ.o^]eX Y:Yt5fSi RTe Vߝ'''S)LI.YFٗS(I -턏1e5)J.v5_=-l yt؈(lf)J:vqbK ]@b (GuSs;mr4 %KK~.sOn],Q; K?/'4Z 1"-ɑ}Yϔlimg ~NkQ%%g)#\Md :ua`,#^v3_zމ4won[Q4lmc &عӬMF(R[Q_5O ~OsOu/<ӎ\u^f&~鬗u 8W-7cOS9$),ZvJ37¬QΉ<(:pn󦇚hL^$=:R#9J)6dj4ѕi%lMk_u8-=S~cȢo{kN)Vuؤ&qONϮKF9kF>1mV۶nm>dLkNOK1Sk׭^ctn p/$)xf+3', #?U>K *1zn@İ@(@$cVvKWO)]\j}A{ts!VM@נg ^K\U_yו-ZU;Zpu;j㏕ߩZ N 0Q&;e}67\£nHkO^Ik um]9.cm؎4OV{BIo8>NF.9פA׶|UmʹktBTOЪ,m5q- hMlfy>IϔvS>.kr~Hy٣?;m5:s^w(G{\gGh&q$u;}?18IGqn.uQY60>:>Igږܜk+ :dсqsqNwWhBizP>dHi]W^>[4syҧɓahNץ{?]ѡk4ě><;nz ]-ENO%'h8sƤwk.G|>Є5]BblX 6K63a8 ųa&^e^ߴФiŠ>,Zr$-~oh0gXv͌U%HZ,NC5GOca',!3_yIqq'|DŅg*FI|Ww*zo*@DqcMj;f:k7S($R!G\^H~7NYvn]N;ϊ< ]n߬z=j_nb_\ўE(Sa!Kͬl'=e`Jdmk%[m 0:]彤NM ,-sGg33Q(Y~TMQF,)|I{?sv2Nbß̜L{xf!sY3:ԻmSM$ZAUH>fZTRI4 +Kq,!+J|?gd' K#o=l"Zc+|B@e *,2T~ ~psVA mSqY;~@>"Vd~mq+~4t$]?Y 7>^9biY*Ceo8V\p at* ]Sh#]g-Ldʎ-Z(O2O: mmsQ k;kAQ:v.*jH~m՟wj3]etu[֜jZװQBx-8ֲq|zV}b;XӴm-:βk\_ >w;L=i4q:?N_w׆ҦM*6ׂN+=u!OJJ_9sfd]kHWXH9#[kǡfrP!fVU#,ez BiFM0n)I/Y{ o*q 5/@C((X]k]ڶrȎImfRkQ<}Z;SK1K}3dp˜ӯjˆ7B"4hSy^~ܹom`ۦ-p{"fFoߡsT旿Yf}c'9Ijd)U}|򨟹̪/_tF0߫B1>m =vsvߞ zz n ԶIf_Iݪ~Kzh:sXR? n. I=C;i|e6M=Xn/6u[fQX35iLHol_KOLҩ۸)ׂNiѷlةzFDU;g$FV1ص4$9 t~? umu)XBeƦ}zyqQByxX׈V'>΢j'&E1,WʌّV)W [rzzz&kND֡[@'i=#Nҳ2fGۛL y!a^Cą8Y Р?ؘΌP3yiF3O{m{%e(y]³".hc #p:YEl TNr %@IDAT 93eo8xaycg||.VܩkIz틉y5kOɆW-KS3?VˢG ;&nk<ǥUu) {6anWCt%Va>ZP/M;ײd5=#: IN턯%v޻h\ǧD}?G<^攕s==Ymgm~9uQ\b~XCјoCBwR.~4O:.$odx^IOsg'ku)ܠ͉ҼgBwL~:{=N E7w& 7;etzIzK -|\]J^#vT a (#:{NV URB~U/X!;{}(gw)ϗLZ9z%.@ w;>-Bc3Iчb Z.]5|raU%?mקI1^vM G! P "#(?6^tvv㲠W«0M tLܹCwww7]+fgj8^ppեy ŗ/_VREZn޼IfziT`R_-̄w+!Qdd$%ZjEV,44LyBs04wƳ~EgUn~^^^F]cnnNQƨ%ekX Q34cL\{e¦tKGڝ59mQ  % ֫/Me./ߠն+qB 8Dbu(魍BBB(/R0)UR U+E.Etuu)+SJkT Gz?]%HGC}2 6bT^Pk5):SuN׾4|Hі'D*IXT&**"P2#E-Sa*CvCQh/PSNǢ{HǥAQ.400 P?iTڧKqZ-$FЬX1?EG@Pg~OwOӞ񻮶L<`Ihv|SBu5>م H+e.<ʑZj&%t/Yr(DG},H}PHGiD*\TP &D(p:}h#}KL55B*t8*CSjJ R]wT*R˴!6+++jjQڣ&uR ԂE TQ"D)RIj UE֩:QPB5v ĄG%:@(H !Pb:l$J ^:/uwi;_Q\.ۣe&e vDU(QHH:FYRSB.U:G{)A eDj pBR1.Jt\qjA3t,H)GQT"&`Q.K%5T>RSP*IBm: ZӡUNzeX yMB"m&+BZU11z[iJ%JN+ 5Z(Qy-^(L{0E4q jQlMkbZPTj }EǢE%&EZ*L[-F:}{T SIqTC^ѡiQV,F[ıJ-=@Wr !f) -ii9{\S^)M㥬+$̄D{BeXy THu/F!V (ڨܛ<"DHĊGJ%i%WWUwk]lFAq{Ba@VQҞz (J~TARh)h%[Zͻ([ WQSyQ(@$(D 5T pe|l4@xoGLtŶ2 c (_uc @L%Rh @(ˀb>ac @(j, @&P ذ1@ J-@ J*(YIO< @J/(YzC@*d%=6 @(d  @c @7D  @ JVaC J-@ J*(YIO< @J/(YzC@*d%=6 @(d  @c @7D  @ JVaC J-@ J*(YIO< @J/(YzC@*d%=6 @(d  @c @7D  @ JVaC J-@ J*(YIO< @J/(YzC@*d%=6 @(d  @c @7D  @ JVaC J-@ J*(YIO< @J/(YzC@*d%=6 @(d  @c @7D  @ JVaC J-@ J*(N|rgo8 @(Olmg ^ՈLsȚ luw`  ś|y}û֫avy[/_qlxOٷG8c.٠! o8  @(@1f%}~6h֙˳);w7ߦ&WCJUOr%o53"lgt{Ԣ*MI6 N7蘔]P @d?~H9z(6͚v=4Mc[OmLM1NP?3{wխ_{v*=˷z/;u4a[qI@gZ]o~<?/[sƋxδzNg|=X;KzX"lE3N̙Qi\|ozX*M矼ظ;h%{WKx>iZڻ~49rsCPYFg y*G: @(oFIt{ ;Mʃ>}1E (M>ِ/%(Ƈz Pa4k {CkY wqыYc>Xv֍ 3!VI\:l̺d[Yڋ>AsYp`>^+AM3 @(CFIθէ=n~>}tuU;0T?-8MALj+>8b͕f3k vCYHBIj+G« =Qz+֪83C\rL*Ƹ'_ouϹה1t.oWbNM%?\>w>Ϊ9g5Kw>,9> JtW5Lr13ԎMע3o9a{MyЎ{AݨH|gُ(Ijtd<ڕ;ɮtܷu2-O3~@[sN+B; @xE3smCwL&n:]ҕ}M3'+ yNMߞx44Uo֦=')Wm˷9DPmk@/Յ&V9:mVQ՚k:iJrz~:ngssc%d27{N9p ;'|z“Fu:Q&m۪5=bg'G=I y缄g,snlj̳謦ctǏ<{QsyNԔ\Eϧ:o`4*"Yס:~E~1CϭS#ս󅵖=Z3隲u 'K̼.3鞩*-FGLCeʪ H`,vf ׬Ӄ^kկzcvm>_Vl% 4ai)T[fo)A6@ PVʼnԱp7ٵwO9p$;q}z+ijewӲ4/[mmL>@״4ͬnpZZ&rJTOخXx]3zQV4!+f-t7由?] 605by5QjG.K4F; ٫rJYɲU+c  @1dTmhĶmp}&8';2fmх tdlh$+SZ4+AZhq||x0Ũ8F)Ys,ikZ 1xi$c koEFO7RIk7?ETuіNx~JJr(Gj4wrZ>ZxGYBK3DDR{b|Se]@ &P()[2xIu׶039KΠZMs&miCO'} p0L?N¹vmkyhFF\m{\Fb[xk6Vu5H0f.}|qO z8|:LvrސO|B. htSڽ C+ԎrJӮ?]qh,"z3nҷٍaIViwg3՗j i;=Zv:ɪVǚ.XúUM٭IUTc F4#}B33S&}ܚΉe No4i_l?ܾN>c4VmM^xkν7ܺN\_}Oo徔 5:gxz^AݗTx[j^ P?͏ k =}W') #"#'~ŽjY+[T/gZ9fH_2ժ1ռ[4uu#\N:\!S?ߩRf|CW hڡ,)>1qG%@.QQR&}pAcyԤRum]H±o萂ȓaO9^-]v!2kݹ>8v༢ھeƁ]Bޤ+T@ Jr΂;7[{6$)훹9{57iۃNp`S9ܳK[ѫߏ+?Ÿqk5jʜ/qq%Y3ᦄ0YTpO_cd L9ׇvub֔:sj<`~N.zW?5~]d\T Nֺ-|3{ڐ^ [7\bQ$wHҒuwG\9#ޥpT~FGٺz7;qOTˉv5xEY-54yxrl~Kcg,wia-VĈSգ 3|=Y?K:l,NSѫoK?sU۾{|h]yy6~w汷?@ހdzFgaw둬 zx'erLlonٱkm3ߕ &[fgѬͱ6~DTsJCεyso#hW3ީ7F` WBR =:4ykKY 3 PE*3MU&U ܳE?ެ]y-k7ߓk"9ۥ 훶I)6jְ,-Y(Xx[ TvAu=], ۛw㗏ǭ>Ta#W镝c?洲йmwl_S(JlӦK89)ǜ^s^^EځSػy]+e'Zx #S|&EqvZ܍{q5 |e焩(.r;4ywVe @P;HpGupx|lˎݛ{P7/#Dsկy#kDFQ5{Oe_?`̋'uݪ?k"bN]q#.B3䉲k3fŚUI|ZjÜwMXFĵb)eV\ԷfƖ3Ǫq}7o;vfSFD(;;L\ۊm+>\8z1'CrK'\8&ԁoO~G9~;R l޼rl;Hj48^>"NQB/hZ^*r iϙˢvo 1j׶۷7~r|sH+74 5",*J#RbiK)f @v/B?Ьz/Ιa6C25%Rdƕ 7bKp€ءg@}mn WrR&ґIs^$"yCSggNn)c"s>g;EfQcRZխ$bTZlPMg{gizzZgb`}?vSr֤2]?f:,7!>h5ݛXE?~l翱5Z hj!VDeoƵxu- S}R\rTbә\Ed]l`[XWW148iRpK @6CIzqһ_oCingׁff₌,YEB}Ӻ? QoqpѱSfr\DbL[R2#B4mYxqX?xª:mfFztN7j?1٬GDf}x#rǡ]c%Oe?܅{\^}sޱ9xxI1rS}leTdբZl+ydhȮ6C[zIVIھ\42M_^,aקK'y)K˧߷jGaf{p/L#4uS# @ +IS9#S>DmJJ$"47#jФ=u/}2φnd6޵4~X}ƍЋmhBtٸzOߐ)~~օSX>^4#?EoȬH 9mZYCm1wib{Tȕ'V,cvXd&xAɷN8NW/S狵GXe^g\ˢg?wc/z@["CiuJ䤢萝;8~7ռ 6鑡Q2-d[*G[rȶ'?mCu*a qeOm֣u @!á @@JjG!@k | < @[v Bá @@JjG!@k | < @[@oII˸CÍ @ruCI#ݜ $/ @ ;M#5݀B r J+S$ @r XD"m@A!@(G(hRS @Z"PPH]rA4 h@9־Kp@ P@9BRF@ @(  @+Puq< @ZZ{p@ %_WC P2ʟ}q"Y.U >Ѡ|+va  @-P{ux? @%A @@@(Yj@B*|rP5@ PJVA *,P T  TmUv @ 'U @U[d>? @%A @@@(Yj@B*|rP5@ PJVA *,P T  TmUv @ 'U @U[d>? @%A @@@(Yj@B*|rP5@ PJVA *,P T  Tm]w^쓤inهH]j(;B "iőϲz8藝>;'VdKmSb%}Pjy{y8;zB PꆒN}i)TKW#կغ"J 33kͺM?Tcb[j6AFUP. @jƑJq(]}#ϞGgMSSy[kDfOѤ+ 7w3պUIVQ(đ>?B/i# i @0% j7hu""%SWm&r NVQo5tʰ|c'NV4MOHL~W(s;'_'uu^\6@iXK=5qcruL:];};MU%wVO_׵2>2 MзӰ4$qeޝ=ݜ\ {< zqK+Qe gC~i̎L7?zKߺ>7lѱ9J\ec;SjI\:գ[[2&|fN:޽uJ/J\rofo-llw{|ojE9g-ʟ.)9 ޷Ͽqs=b@6"H&b mBpdiQQnhfƲC5,˯} 'gt6N Am SzXH=Vyv~Vy7~`̪sv[kş;-q~vy(22P1+AnǖMZzn4jDFn/ݍ?~]8ʲ謘_}1ق 2Ajvo[Gt .[QWvN)d O9JUӈtufP'dzzs>_^;WY?恟fVK>wZ6(ٵs;3pD lj5@F z.CjQffV"V9gD$t58gJc,:M"D¸E[_&sQ^)C36ZM(v_ްtGE}ctff\<7XS¢9ػ<7>ĹE2=>92䋯۽n53 5l3yH+xxAVGhEǫo/ .->Z7=ao?yUwSe<}!CH5Q|KpDr|BbfyucGZ0Qm>k\]y=r4WΉ3o1hFwa0GR$=|{y4MGn @6 vŽͺ:/KOm yio 2=)m'歃Wd|ԉs4cZ*Ķ.oyąO<a!³m]s6{Af/6.!ɓ)cˏѩoݶ%߉{faU|Ka>/^uZ'/D?8_?icE?y>2='=CuU+ ٩_;VQyIO~ɩUW3tUuY4'~'umP4=6޵Wc lF"6% E'a44褗`F.q&}69?ZY|ڈ8!@( TB2b>o8V}K edX! O.mW!"@ স0=t5"GJוP#˪A T_@ {[,T @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @JV1J @*PRCO,@*_d@ %5ĢY @t,ѣGjD2@ -q%ME3!@*V @ ԢB @(Y  hBI-:h* @X @ ԢB @(Y  hBI-:h* @X @ ԢB @(Y  hBI-:h* @X @ ԢB @(Y  hBI-:h* @X @ ԢB @(Y  hBI-:h* @X @ ԢB @(Y  hBI-:h* @X @ TĨ^䣩 @p(w<tvoGKhָc!_H@ P/wn=htj8d^4Ugiћք$pU0>7`uZ!@[.^()2k)D{\6\6Ȫ7`5\YR&"*,QV~lD|Vo @V :u?3~rwnzH_%]x gkF54*6d$? )dͤ TqobbRN~N\G)kV?pRh|vis <O,yw^zJi/p/tô~[%AW8d&@rKA @IDATJsWA%7o޼[G?Acܭh;/п|^>}m-19Ȯy\\hݥoqmahz;Lg/^0Ndv܏dދ )LS6SйÚX޼d]sglMQLfV=ڿWT2:7FYq|y{7,=ro:?]fme K8{0{vywmO ĜF80w@' ELGAAmU[)JFd %Ql1F>Fۜ}OvF_{HH/Z2%3nA3E3v}tsb2.Ig4=ETݧq[ʑ~C`N+m|YʅڼK{t Թrҕkף$r+RvtJOcZG;r" 7k9||a=׈oed~)aXJXk"eX GJs(dI (tEOO?RB)dTDO=CbL(̥K$bApcO:KZ`G]e4Ɲ_pUqҽ>OBg4 n<<ő|Ҥ'/YuN5 RB oa;n+QYB49# [צH˽'lQ)n`NO/nȏyÏz8K7wmCJXIiu+ܮ李t E%KWR2-%7!rQ.qU3: P^JR]z7p.371m~kEh׭ xN6بs =pYƍ:ZYч㼂*1޺1g3&Mve&=$Tc

L·(I尻Gø=QG <7ֹfVRsFeF@ P/])ePt]~Wf),![0]>♛ z#ڟɾj(}6ffm3&;2Obi<}ٴ)"?f5je߱-_Ђ ؒ}ԕQM_̯ o8_eZi9szcY/hؾ,>gX6L13&PP>s[c(7s Eg~mDzSQoOV:M<ঘתvm NFg fҋL{NIeJ6?[Z}r 1yK~{2apH\nԂ{??¾W3W[ Ty_ q`XF,;rï0vQPx2b6el&+`v5-]`z(vMGn2K+>7F˳4>۷ /;] _Q.X8%,}ٙ^v q-1v|"X{Z:;.!u}A"ʹHHo(w5zѡޔާq>/BEw`SO3^Jc-TnVLwZ֩OiMk72}ͮETcB䥯ȋb&0TXfͭ< _Հ1pDbZӵ~\ Vm{-^5shinه^l lXh4d1Z8ҿ3xu ;6j:}da"-gC`.c299Iʖ5ma+HZ:g?u[GՏllqo9kFV/w\b #""BJ|tco`{eSa>f[W/;)7,;/dH"䱩UI{}O|M>zNMiX9-q MN_1lps~c56+~hz@=<ӧիWץNV,@^WT#^4F7b2apjG aS,%OQY,KJ4B&^ҿ(72e@Z'@AЧH# Fq$-)1KZb7SR2-tC$-t8"I#4Ą²T]f(yM7Dr|?Vט+bFlD>|%LTm k >a a}YYf:ߓJapy|q \@BIds)t+FHSZJRSgQRg#^lܨ8JA؅9<%@\79Sd )pn`at -]яD|mddD)Ș'P,!_n^fC[mG>?n.| 5IEV֙ԗS?oSoC[I=h/ h]~ *ncVVT_3B;tcЙtj㿼aǶ/,. |&KIf˿gW9O)"|9BMhk8-7&8zK7:),i)|I+3v|0Mi$ jl ZD% eP=G>󻱊h -;ְd~__̛/K]h4B5b?Q] ?O}PůP8KJ>MdauI٭J^>C,ZJ>W:1))ܼD;i~ .^qeafclR=eYYO%!U0h0 '?!EtYe~sN?׿1Rֺۿ0-Aٺh7'P2Z6d4sꕤ uP$-t17mziA!Z}I eKߎ&CKԳXӅڤ9&Xd>r2wrG,"߶8|FP'O!%\|[x  bwi͞JH%Elh~ޢ0K.7.jattfI ,tE{3e:aTp?@<i icGֵq$mƞMwIRH]P$ p3ݒ\]н2֨ZQ)T.=3o8W/d۱%߹Ȯg=}Y@EIqjMw'F ґW$đ^ XGfff>`\x.$=xi7?_4qQ F.Cȯr]:i\C>㟬6cEz&!7};+0ĜXF>6u/2z:-gW -Gw)3Mܶ'>SUSuZ# G`O-}9/.~B@Sw\1Kgu(; 9ʅ.KB. ~2H"nS쨧't5ƸB73aDգ1Ȼ/ Q}oEZhz5-4-,3ȟC$_6DKWΥXT;҅}v|~&ܹCP+@y~[Hl/*H'HCB,UF&3,i.p5?1^}t}&݇_yf>a\$jN7h VFNK63~#?/D>|xAۄ>ҿ%}oN ;ut@L"]x Zk;G )@.IִB/)v߈E][n?zD3Nw+ҕвzaBz1@M*,Lʨ ^kMO/K2==? u{+Nx ˻q2]'at[K@CI0'0 HjK0n~F~MɅ+,}9½4BBb򐢕HզF+9EƃORŊK@@(޺~LLN=vHEzD u( o>BɌ`.##}\u#;2}G )趰2*vvJeр2}ަ!!!vvv5\d)iiPG2GLNU200KWH@,驖5j0s.OnܢQ>tClmmgoJI#)T=BV  P^%+g#= H3Q9/s_Jn (*--3 Ph%/H[((lu~=FGX P( Sn ;)l:($QORdll,eK򹿴NUjHRIyR JQ)?`|`E|(O!NSeuj}Lj2MZDQR U` /dL`,>R9/ʄ 7.te]+@q5ENN[QFѺKJ#\(ꢅ"9C2=ң>c-W4۔!\c58PǡJRJ/)ȣG"<ʖ<S5hp9J-T/F K)3ʢm)g*KKu*=9sOM`3tǡr=;i#S.4_=%&0*xɊFX"(L)TB(Is7.c/(0*0ZTgF%=ak4/U #X4Hb& (,hQG P|F['hOHm8C77W\F5k֤`NT(Vm"h#-4v!|=7ȫASGŵP(a^Zh]H@? )&vQVM"*K+Tc^Y@BIЕvFh] aiW '0e!M`~e Ph#剮qp},Ї{xSlJg/t9JE\H@WeH{TN$MG)0!4푲t1bZCB CЕtţK#pMc %I (̢!] ]%i%"*zI4E66j()kUSޠ oaaa4 a"HQESrPF5-Kzz9u* $-B/:StYt#-'mB&p=DhQ62Ch/ţP@Yb "\v\^!кAQH)ޢk"[PR(~K!&7\?Ȍ 2)gBih:P)4v AJtզ±N[h/]iT=a#FQr#|IJL]PBEPY@$-L*@EBHMX^@z%2 h]2ۀ4Zh2H?%ExF h Hn4Vq hŠ0FU0ҁTPeKy m 2ޖ*Q)ʗBN% P(ůPєJ)E {8Xx @^*u Bq* bnQH1HOӥq p!,&0 sKv@Q[JVFHZ7zJ*w +Uv! z)R[,C @e ^:%-fDtv.Jiii)@GC'czcH=-^y+@@xK?F W#ݜ $Kuʮ6 ӝ4s~RdI}4@LwҸ6ő+[͑ӻyPש~/^GB(PW!GjyI^;<QbJ %[_tЪ:{{U4Hfe() %h8b,[h> @@ Ă@9BI6o4ԯ\!-E0"@x[+3eM-g,C~ 0F]P @@J-_$v5M&TJbI E\&~| 4T P_9BIA @@U!ErT5C^ PA%+@%d)v<P 6@r&@ Jqr@4EҔ&x\ro+v%eU۪sccgUsn6󡍪_6v3vw+6e2:d]/vä|cwPHD)#/ řڻKژtsU!-MUm:y;1_ߴv37]3M7?>_{J_@SO#9]SE<ee:x92VefrFn)!ҵ7K[xފ*0Տy/ykb%ޫ1y%|`~YxSe('^ygTXuZ@*PdZ>C'Dزsܼu??>Ky+/Y .cCX{M3-Sj)A4\@dda/f&\^fn.gSw幈LȈռu(=}kr{<MJ%L{6שq{ɘ%'{sޞk8wۑ"M~݌HumcңZq/G=W|)LJ*KޣG"Ua4Y/>~u=62iVq]%5s%1&q{G5*e$Ljz{rvrmJ}̏i.s^|3qo{TdvNT]YE/9kd9/?{ɬ!Wv=`>/xq"KoWcߒ ~3iĥG |C TB("ciYF8)=ٻcsrޑSK>ŇIC{nn釾\ihȻ˨*¯osL:-f-Uݲ'[onD@۶]sEՃ6  x)őf'E*!AmfڹM<Ѝ/..i:V5hgvLޑn(3l>DDK}ŤQVzt&y @xBI%\,BIs&~ww +%{G<]ܥX* .]]K>kk#VjX瞙Z>X5<@jH+++*5jG} PNhY}.lxjbquq1bG% @őYYͨ^Ɋ:r6 ̵Ym \XYn^ @x5bqjne>N5!!@F@8 G /4F!@Xq$=pMB" A Gb ! @ Gv^ h@qr* BIU C -PFlG {ffIDz?H_exM@@UD d)ٔ@"QV[T;;e܌ΑێxK(t47}up, B %Ʀ @. JSSS'{%+B mwpk˙F;!@@b}}|BL @$J=qQ[xN@ 0F=' @(!i7%H @@=9! @@ %H @@=u[=DFv֜*THRꆒnuW&.(E GOAK٧&^M !@};M߾8RCh\ϯ Mfe([n(<+ @, ɔ/ǀH$RUH{Q!j-,[~ N^^ d %WRW5 MK~/^ Wۚ&vUγVC['P_9Bɷ0 @@ T^d@^ASߊZ ~˹d)άZ ZMD(Z|8@pYj`eغ?<VdsnɈg/\ܷVhoM :ť? o@KkMQʢ8]1SgУZLR5aM=olt,ljy6ޣi\IxXyHA#Z*2Ҏv=",jGC:87FV)ۦL;S˟s/<6/7 MN9`=hXz]([k`O>h("7fGy_:c5&;wz7Ѯdriavuڽ4_ůU߁X+ :[DOZ9zp D͂~ȻpS)*w[ [5/71Tξ\_ɳEWѫn(tH_b|Dѣ|ZЅ߭zΝԶj*̘vopYШ{ Vmcg]h6J(,؞!љ~A;PwHnYl{hl[/Ԭ|&16rmj_:&<^گpc+l:z5&)fCGs%?璏êSg~m]oua+/%};Nz/ui7jT_P֥1f'3yciռ^/|ޟ;S[~.w]vzo⢏| 䞞v6/tS-.-sY쪉o7vg届4x(?s~?]ǸOk.J/0mv\.?LLkxAׄl?oBI Ӧ(s KH˾\J 2@MАW"=#G "޿hֆ[}L/ѭ y|̴\&>km4SN.n_ozR͇cV~s[dlTX+? ;x.ѷr,%짙/:1}{ci ]. {_ qu{ $73=>qC:LH-?}4 O}/Ms\նFUUu.󙦆)ӡo |l|ؽɹ  ~OG70|+H.-=ǚvY6 ==uFM:;R{'}2ub;'> %VD:5i7xloGƔ4m&cv~"3tJ'!T*Xye_}wpQ]߽JWi"A#^{hbFŚ+{7V *( ED;\hzFqSgg7~?y͏>bIޝ$.Kiq規SM(AmO?~Mߦj*Ra4y4Յ\\)v:Kv9l"{B0ߺ/jίtSض%t 6줷:jrD_׃ U[{6zefġ#a-A:k#yԾž" vW/g3k]a῍Xɳ`fgM(>Ղ8șo#De҉K. 56({NP`trkVG _'bD 62Fupv1$>u?x D{v]-g+V;<;"PMl텏1Wg6=_C _'NEh0OoY$^9r5G+V1]s(# ov;Ctr5AI*5}GV+q B/fQא)*?,ڀ9S]{21POׁlכ FQqXLm]Ozj9͈v*,fDzZD=6dʡb= }ʵ5}ov+zJ5Kqe,/BTw7q5篽5Ъ`UeJwrWe=[:jC]5#e4)_.>m c7;P=_l9[D)_f=Z9YbeBdž6VJA.e:~1 K54m}WDx󑉣}ojZnQb[st] ZY/V2yRzaܸ*$2~#U^"*UI%!XYG 3J|6YJ g-ZR=CoMuF\w)9nla5 sv0G_EL7s8:s'H{$W73ӑb:2ӎ:ϐ.oL';RYLFNZ mӞd l [=&!hPdiz'5sY3m9dF }q9,L:hԒI>v*{< ̤sKW5MI[eٕ;Sj%`QP}fjcB\|ONrG?Hxp3lJ+|sQ-A] u5KZ>#0pt.y4&3nFBU:~Xp7( YCVBt<q%yWnf[=RjyY0ٯ&; Aּ21sޯ~nXWĩ45 :(y7wђ_4ɽK^P-[X2[HLm̶MsQjwq>VrB a}U6[gJ#59p#;5yDvGdb&߾]2~~0:G-À&2:&V:ܼzEV^xbf.C+wDpyjKZΞE 4ߎ\T=Ozݒ9}-^}E9}Vt=3%W2 >L'GL6Rcg#=U8(S3%o-MqN#:>xL<|:Ȳ>{ov\7Ҩ;_6M MYlW]"r=`<|eg萺HbΫԻ z{Bdݭg/&T raWH+ꑽbFqҹn䱐KVȘSE/MOZ[wxy ҋz6#!<v.e.r6i7q|dag4<eejdp0%) BBtF5 4O5_"w16{u ѽ_S%,{r( C]CNHc'ڍh9S1{'7,ݑY<ŦN3f'ګJKq=qB$uP1z !:+;%goN+q=EYSLL(ܟ;6S ֌7f ŵUE% 6%N?9iI⇥]Qq Å I6(]տ-95;O]"C@c]k.]#@5S)#'O7gT.5 ։/QR>{#M>w?tRI=Oճ&l Ϯ &r0>Gb'{X{`YbUhAk6OV ~.?L+E-ܬ'^s1 rQRd+4wsWY4^4E,>郎ͨPMQC]8GJ)/g KɆ^EIx{;RBnB\,)Ūz2IDAT;t{&JsdK+3Yx+0b)m O%V%ywIs;'Gb.$wЛz~}.qn]VBvE]ޘZ] ۵uBFMcZwȖ-70'JHG- 5'8z%̛#g:h!Ɲk#~{#iDžkzҎ"n\ ?0sfB1#vm@_OׁWyefCj? )G4<]ycPnհYB̥h2S\?tN2"oܼr`ɳ= ;GvWCX2#Vp*dPw!瘏DkòCg.=l놞iu跿Զ. a00U Gt{;,9zkԋ V2˟ * .EOSq{?]dj~k\g+\fިpY5}vr.ߡT'Yz8hʮ k-ˠ>MKWa"T]5- 85`ª  ?]U ֠*o C'F? O+ -`6+6V|~e(DU"KQ#V\.A{Z7j=W^0W➁+T,C:g>qOؤeaY2FVS!U < $dn~lg7_A{͒n/=%ta6eĄٷv)HmfVIWÎ%XhOQRzRuzOoNb;Wzl.vPB&}Pe~Cx{ձ IkJ'҉ T7„j%71Y, )IΘ&(X"T>.쿟G~ >llJ4D1wi(tui}a!9r ҨoQ~dl!\qC{Gɳ-PՌMWR}$]פ71't6sNzonXvJ!僎H+o=) 8{a7〮kI l4!(HcK~5lm0rѐ{G^mb%׋!rfz};t~['# &IY2 yRFɪ1ҳޛ!6bˢGV3c(+'vKWLZy׎fE:w*iDj rߓwpkr|Lz"xn^@,F5Bb^=|vqNOh<JpZSiř}:\ФJLEq> I'pua#?xfk; C o am GF9)Q`k=0^8|otn?`ٹ3)$Q}4S:H`*d!%V8), !~$I*Q>HR~d/uQB}N-cƈ'ݾƐ7̏.LR׷rD^~4[BWsݿv5+d.B-TE߻۩_V>kpUlZIjOJ+Y wÄ - i&6ގV4eC}(/l88xRd}+V4%F%&wy{ޓrOln%6cھW R|z&DY:BB0Rr1 ljgL|T6# f0EctM(yZ6(J%Le>oP+'H@]C4?]*ڹhɦC"\m@^ zn 6D5ؙ'5f8~#oQYS^Đj9/cqU9~!/Ȯ]on++eFXXb+yZJPУ)mڀd//5>!4,m̭:<օqә"%slaF/+9'w܍\VMKIaXhbϛ+"cl'ny \ߢv =9wͱ?Nw0gfC& X#dbt5Y׻?}q@$U* V!զ4S"y'~^eZ@M*M#/zk$; ME^*L͛#ߙq'Rby!"c35~v/Znq㩛 2%rc1%cSFյ U+zPХun%<9<40 +US #z4M)KfwWMذX}]tUzJ%_^*jvꂢbQ oVߖ}UM_2ԅllf5Т _ osuCJЄBu["n+"fQ;%qp,v ZQGOf9/X;ӛy{LJm*>*Qb'ki%j]G8a>NW{F%jTnYnZ&-`|w$5"Ey0`0zfyuW .\HSwaA}{62ǾBx3 q?^|QP#'%:$\o i$=wA<&K06̅1qĔZyNڔ*-)pghg%we`D:Pv .G!l 3 GYG܇Գs2޻SFuzh#rӡA4^r!TF X%lޚ:4WDEȐ7C `䨋r!2lJ$Va޽@j6~=fٹ)D^cxl𻹽4[9H*YEe.{ _I}[K/!z`f{@3(|tdţrǮ[g߬/r gfԵiћVGK(nlrѿ/_h~C~ttNow0L]5ޣP%Q䓴xޅnuJE;oGd6+˅;kÌ:sg%$T+w^)F0ˤ܌zIZEȝNXL\JY~]tK|VػaMEn{ʫgF WGq\37g~C: I˭^8}G%s˖(d2_qf*D%EP松(SjW54u{dϙǹZ6^,.MKUCScD"l,?%LVJՓЂ2zfgO(6m\[ciAth+=FQ7&e:htm _Ey>~]Ĥ9 =]0҆9w~_yc`9twAYk%QG )DcD=f>~!7aj%75څ,fN%4üaH3g|˯w޲xO&"QW.qT͜/\r੫bIHʤomwġ" Qd'o@x-ҩJ:xsg1 vzirBV1784\(Ŧ뷮yX%Wsp'֥xi=|Q⁚rd  siٱOVl+]Rd$&H2]G)dm]$F-9|αofki[{n9xb/\ s -6ixZn;oZh;SRRעdRwތًL~ _;dcCff~1Zs2j :n_?cQ5ƠWh4pںO;K7ܧEW>+S8y<}1A9}kfMZkj|<_UO8f;v+/>HV ?sBeҞ=?m󎪌5L:㦼-b^;`zk~IWoynۧʘ?4q*տ+^ӟdpؐ4&&VhI3pq&%g<4Ux։\L}^h(A!oQK|Ϳ?qߓ_EasjTЃFu=G[#1>SԍG@/yx+g?'ȏ['0P[27~"߱Ӽy/cf:h 3K̾qk0K%@yB\*m`RzO&O8"ϭ>bo{XjMFo')Gb4YmYTwa@YHYG"{ժ Udq/) gtU.>G@'܏Is@׮] /**B37Tp+:ToE[ }%+#_OwjgO5eJ~"N_: @]Htpڍru@ @%PMg< @ G)gSP4` m@=ű!%lkŀd@ 8;lJ*]@ $)Mʗ < @ @%Д(.РRT)L[>%&g:##Br B HЏ@ @"ͻU%Wt]2FeR6 0>#ݜmu5?&C 2@P[ĔLXU @ rw  @(;X H[@ g%@"h4|P%?+z(  N@J"^m @y  @ i7m@ h^J6/O  ms(FUaPK hfh3,EUIG[sYj476]A;|L/>@e$ }zWt}2Helo)zBh / @k%YU%e ` @-5Lq\vR Y~,! PUI* OMeifP7Y h@ 4|~UBTz~^J Z+zz5\VZ UR  @|RJ~R9 7N\K8XQn%WH R^uD * %ї<47ebzqbGp)Rٵ! |H9{J2SUBan!~H&:IϾ.!(NLέL @(S%Y/omY 5(,43~Z2ٲly7m9jUJH {Y>^$u- kSc5a9Q׆/.kc@.-6#2Ytq\P !)q)$iDDٳt %uI i@@*l@^ q~V`v^=MV]Żim4Li7w݊yFyͽ&UT*&dDu,2GӴb? HʍNPq"|(LcDɭ]ǿ-bTz]z&ߍN*v7s~벻XBUM;glWEs %ܚV[fg4<~;w87^UZ:^NpJATIh&5j}T 3Sn!wY8R$h?̛CiZF8*鍊܅>KgbXs'lO4;cy<XNp 'c$x[+4-`%gн/ 5bDy3L(fc^zTBjEs-f-B%M20`5sZ4ӐysAua[[6߂x{>:@(V%I&nqzQzoSk+"y[\봛t~{B"\0Y8aޕY5 M,%>.p5#3S VJ7#'O7gww1Y˙5 4 U%%8W4%~8Qs0-}{Bï U|nX]9ϙ{׬l 4jcrq5mMFhhk\&Ch8S>Lar)E$Wbմs`Ս'lTOF_ R |.U_hi K%rOMbvOR,!7#ymKϻ]Kta⚴{k:o/Q1 ֓)milT FH %0tqr\] 5MPFbIP93:Ht\fYkm:)@/V BbLKV_xU3AW,-SK俈U 3kg7ֱqV&C%XU|T ͧdtnj>^b⟦]s|$B'jjaJ-)Q07jT  L"-ip}?:CZ i.N6UpNԛo숋M^I~J@,E nRDK_vYB\qsw쯶`X6&7 ~$b׷T)}|>ܫ5+>.:]&B:q]4/0>a1RL4u}sC46h6Hu7*>4O\@3[%QXkzHHU2@iڟ&o PO?FA8~]wS$n 8@ hIH١L>F>s@ PJ2Q)>D57TIl] @z$`#vS[2,` @z$v#@ JDzdi7`TQ@ [TIy@6J@)ϳG"4q Lam-T5@׳ 5~!9@|!dLL@ @"ͻUt؊~Q͑ l1~1 -@(#u5KL *U@ .//GQ+x!C @P[ii'@ D$ U%cl @@ |ZR7%@$-I@ RUR@ @KU%C@ PjJ*u@ hIJ$}( JMTIn> -ITɖe @@ * @% *ْl @(5P%@x @$P%[> @&R7@$dK҇@ @T@ В@lIP6@ J| <@ Z-I@ R((Y L`mO %>I)lZY:l@ 'Hlչ< Y0qEⶢ8{ȇ\TZQ_&EjFpB @Px#ۚUYC,G5=AUnkVzj V%{ @uUu+  2@ITٮP+ @|J~P@ Z'P%[gB@ * C@ h@l @ g geAp/K$ PRJ*iGKns(Lɀ@ G=_IvU(H#x^6tO4 JSN=pы儦]~~֬9&6k@ v *)OOO)+¬(*prl]u,4* RAPe @@y*`DՒ+NWho{P_ŝ͞@Y/Ի~z Q/H: Rl԰gTBOCzI}cHqN8jF Ӳr9#= %7M yT1q?}Φ=ܾuӧ@R m@KpU3mgks)6//u>BeA<ŻZTc<T_M{ Ͱu3Ցv'Lp0>. ;#Q-\CWTttl=C'{NDe΋l" ^$~T)j7[}>J_OO+>eZV_]\>A() 2 UIYz=5M\Z[,)ۋJWUgk4| G|gx;2 [ЙBT-ܞuum_w"9e~TDiA@@ҳ4'9WOYra2^as7N^x-5-Zr-2o;`^awr0 Eo^D|@@ ZRTGe_9봰1}Mhƽ_YGl1)z`##FǢc4ltWW N*(.$0Cʛ6}5H|NpCSlj 6,-dԽSPZ ,yFN2*Y@'Т$NP"QskXkOypZU+a9!$|)zHUc\ ] zզ,4_#OEr HxZοGnvJbZv!Q%`!izR_:\UCűשܯE$MWM`|5m Ytճ@BEUI`e59_EMI|J8FRMl8F>~ZC*|RqZ)g|?T1NY|)A_anw%<41<9lЎX8QPR]oWN.l /+vwP尘hXԁ oIO5 @@2!d߰N4gmgTdy(}zM簒@ԊjhܮfUp2ʽe EQZ:d-M(`-:<2[ɩc':ۙ µoE0ILJiRm+Rc<UrAh NYZ SpB @ b*l&"c j0N btmLp*Q N)I4LEDb|.Q]L8]WB%!8h#(0-] #5aUo~O7#b: -=t @xikOTt D D" :&V%q N -'㸪! DJu8U'T4KE"柶 w @4MEUIQ8^_<2 _D$\gO @ ;~)@ @TD@ KTɺ< @@aJ* " @%d]p@ 0P%F@ U.8@ P @ u |*IMP묊xd2UL54[//@H@ѯlϺD`mMı-}ޯKz9$d9x4Y'G95yN mNl6oXU:::"8P8@ @+#!ܭP @#7*D"h4x<gbʢ*+(   p >"Z]C7+M+.@ >@T0G@ F(:Fp@5@ ' &@ @J@ ?`uIENDB`buildbot-3.4.0/master/docs/_images/vault_simple_key.png000066400000000000000000001352531413250514000232370ustar00rootroot00000000000000PNG  IHDRsPdsRGB8eXIfMM*isP_{b@IDATxtU; -!j{&(*EE_i;RK% RH/Ivlͦd 3f63xg @ @ @@@ @@ Y#z @@ @@ Y#z @@ @@ Y#z @@ @@ Y#z @@ @@ Y#z @@ @@ Y#z @@ @@ Y#z @@ @@ Y#z @@ @@ Y#z @@ @@ Y#z @@ @@27]|(OK@ h-P:iBnw˟GC@ g,i08N~1p: @ _E7"yYB*yξܖ1 Y,ـیYR27 >ֵI^e$>-ld+p oe>3}ՙKmCJn @^7E? _ܾG~\9^qOy|{K~hq<7s/]xu\!왜Qܼ&ih's+ݽ$Onr: mݬ걒gjJ @)Zɒ̲9e1{SbX&,L)IVɾ }Ur@|wlYBs瓜 (h6dw!#CVəC` @^+YS G!n@[\9[IՓК(uF'lXck/VT?FOL᮷u @ʍ2!=Uwv\ Iu+ /q!gDε9[7AZ_t<ͯ:SD̬/ @@9(A+b)+͊~Ѣw=O= ,Ő7NMf^& ۑE1m/Ś,V+DB@ 207ov]ZsℷS2s|P*DJ|ݸ[yk+n0"32~T~+:p" ,5IxҮ"839i[+|-|SSC+@ -_+<$",#od=@yt.l& ,d3mBy lnEIC[F]ckέخ [(,NF 3~k}v_ksia& @#k2]i&יB qݹ¾qs!@^WuČt~N^$ @B(\4,)@ Tud1 @@9(5r|v @,ug @~ YA  Y#A @닳 @@ԝ5F @-dg@t'd;k@[R/ NRw  ~__ @Н1 @o$K8;@ ;$KYc$@ H}}qv @@wHH @@, @,ug @~ YA  Y#A @닳 @@ԝ5F @-dg@t'd;k@[R/ NRw  ~__ @Н1 @o?[Gvظl#+G'w;wٻǽCRĭ7CCkLCU\{7o {fe:: @xӿ9SOws?wun#υ~؜o}̾s>.S,kEGK@ PkYf +DVxd3nO籲E1 @@ h,C̚+muxoּ4YgΜޯSC mg12<8X=#I)Ɗ3M3¢bRm+V=5oeD۸ϽEH,+n}=ꨯzkB"#SSŪ7i"WnP}'2P)7w^p u_ryEc h!e=0唃}p&es1oÖ˝ GsF8Kκ{7,e|T3شFfӦPd'^G?_֘3: Zh}NfȰEwym{,2G麛igY`Ūi_de>S>XryrzI'կٹn[m$yi+iߧNi,]+iXzkVQX)4O=۴'} ?BLW VRW0cY''\ @,J||Xd8ugfTo3Wo߼p Kޯi*a?W+9 v{0mnwq!IZfǩW=j7֠X-[jOoYgB+θݣ Жj=a|+HckWo]z͟$V?;1v;4]*QN}^1/=\4u;#g\V|57%y^q5;{oSgR?z7\7p+\ vR*}qCXjWyHl<<ݭ+zm]QN}7R֜q[_x6'IuN^d4lDhaW],ս̨j6}w򼙝\LI.3vQ> OX1.u习d&.[be1[n*jJ#f(VՔ::(9fP]UE 8@JO@5KfRݿ T/;гC|1S+aTq{?mh=/7Rŗ(KSMgYdVw /{n3xt;~Tm_>v˦̈́|&e|oX(bs⢟z3w?/67%ūA^pZ?Oٻ(KNjѼOƄ-]Wjn<kH5B7U/KbmoAX,~D;*B@^KC8A{˄O=ÐV&~ۖǎ=ppӚ%Ԣioh~eSxS;y#$*.."4[? zYGEm!ô+w):%++%+SxY/Ζdܞ5lآ]GE^pO9叽gQIbӮ8/Hف2BO K7_w }g'6kʎ1gAa4'9WMП%U|󝱻W|&F]QgS  @@C@oJ^zѣGXMZͻ~_;6ˤ=gW*"v}aqCKU &<%*8JxГ7p'~Ÿؑ]_z1 3<`Iwscax(>—/~sEo4b2e0?w#Hn5oݶa^ħjȘM :oJ W.Xuϋ;qpga_ | @/(,?Pm?4S镳;U:vf7]^V;D;{|I3GQ>5&,80(HM돞K߽*ԜQ~Fx2ǜSb%=!=p0埩4iT=Yby!_Z[O{A#'2 b @!`0o!.|oPC[ⱌX$) :zn׭Ghy]_@ wõ @U@* @NR;7 @@g@ ,sC+@ M$KM| @NR;7 @@g@ ,sC+@ M$KM| @NR;7 @0,(ԛw"@,`gc\@N\Sŀ @Zw @$d],L k$r}y09@  YA S @Zɲ\_L AHoT!@@@,ח @ot0U@ P, @$d],L k$r}y09@  YA S @Zɲ\_L AHoT!@@0,=JeNvV4DjZƓاűBQP@ P8_u񉩶VB^,tɽya%7{ 1 5 ?Rmhh^}@ 7BS,+ \qIȤT!@ r+Pw#S:SWQ*1q.vEՑ8wI'$!3p?hH 7c?+әgZ{[Q9'Z&uj@,wdpMFD9 @@IJ9Y(,tbGF=+&YF_X7R~ֲd%?mXzt;nc( ~'>ęu~v1#N%v @@)pZq|IiRBbFUl]+ׯ[łc{ Xݿqo7\&>QjPݯz'^ @@%PRW S!/h% ٳTq6'.,$Ͽb{#*8忻eK/*@ VQFD$e1T@Dκ4ٽ+ K|fJ3w׈319b) d4p  @,]h*Ns_Ttw.ض;vߛ=sbY'7n9DƙY[2Ybz4#71P[,~|Z@(d 8|Xuv#tԼw2PxWGqF6uZwM ydY&IE7*PkCWN#'C3lG!@k-F/MqazFrY )VR|Fؙk2O짧.Uq9O/zz8X$< NTw9]o۳ڠCm 3"7^>U Ѿ{w, @N%F~>Q(_pZ)&xQ4'NSx7Ӈ253['%HlkwԳ𗁬jthǯlr(YywD~Ain($Q  @ C @z,PYN  @x$}p @H%B=@ ,Q@ YT @@,G!@J*dYR)ԃ @(^ɲx @(eIP @x$}p @H%B=@ ,Q@ YT @@,G!@J*dYR)ԃ @(^ɲx @(aI+ջy'B X'& x/7 @"o˕yB @,ka@$˷J @@ih,3N}ԮqOoǯ̀f:Jr;hr|L|NĞ>[w>ө^ֵ|:j.wCx,IYyiBTtJN&qߏէ @ 20b^45ky m?fN< m_A͒T췍X> |S|+f>؟wXVG_~!@@hfa3d5V`>vBf&>Y==o]0x*5aX" Gݪ> |^r^%RL}g*^gȐ'ҿwo"/TNilTXū;K_!Ci5*{fx"{>SV ?G~gi?Ή=[V]I/?٫I]o{CEЌf F]YodQYu9[4ds  xtdVCg3ݫ:v\^dLnXu Tcv?o]ݓl=̎ma):j5iv>ŝ\#ϱvbs e.YyB(&UU{WP   gY׮zZ?,;pח]&gs.LV/`KM$4Z r㬫x7_ъej$NUOfg~v1'?߲ՠn&˓xvgՐz~ ;~y\˒SZ5Xj)(v @ hfi7(~;ġӢCm~-*猍9BQ3i~ִ5ppHy[GQjp&&DB6 Q[To6eaWvWn-ە^{|9{ہ΢ryWs&v֗;a =kBZqϊQ ; @>Y<_'_<{>2V?.xq_V yqV?!@,w5ڼk?֜~m8n_yɳ5s3{ՕOs0vO^SQpcT;q9q|J15Cc{w 4YT r{^˴/{s헮~Psg.+R)O.ƹ3⒳4G,뀼p Փ1< @@(5wn @.,u @> YŹA ] YRcA }@立s @@ԥ6Ƃ @,dW@t)ILN%-/~r  @h,)VzWu75Ѧmy9o @N}! @xIoI @@@;Ǖ|% @*e422B|S9 @F@dIP˶es" @?9K  @O@dYN3ϫ}. @:@4ёGv3Fh8 @vh\~`+߳.J^9992*ɓ:Dz}B[%$K>-cJ3%>p.<}f<<˪ &>=BK'>tqY+}w%l(rԼL>y(GRVHiլ 6Yic3Z͐ourdFIS`IFS%f,3YS;fȸ'EMu1IV)1gW!&7 Lg<_Ju,'G{G2s~_'OZ'ΤE ԰h7;;l῵KR)%΢VLZGcB:Q?Zpª%SU; PNm^i3G:-cS4>gߞY_E|l5ӜG|;-I߶DlkGk.d7n:Lu߯pY5S9=빕 $QM+HQg^W*V} TN%TYQ\T14ڤ1EӅ)2fKtЄBŠ)vԏ͋^N>gBqn @J,<ô]{sΎybEg[5Ou1Ŀ5>V)۵[d<8lncFؾuڪKM@sRl}Lk DžR۞tkҎӐUUf.J \FhSe5QxDIv@m@ MMC{ݴ)KYWҸ4=_Tv'V RIMK+и@2Y Yv΍՟_͘  $?.;[lӬqh VO! ?c U>f|ƽôa*OW˫ n?Izv1f+ zm' ]D٠C⍙GcުenK1vQ$N>#gLN[S´>:z6پmLʍMӥrN-cr. 'G''g^^3'?D%Yy5ф&N\ȸ tkj/=7]j oH Ҙ"΃+2|B|Y| PFe1C6c#lTg-uڭIvbCE XQ4gQNYYY,0mhe3 ]/6S\+߳;C"e~̈́s܏3QDXV_]ku.lt|mZn0ce7Wg Ťmal PTl3uR+MFVy왅?ltḢG6\nfv|z;5}Q5\k'.W: |J-BrW/#]ټlٮʋs\L͇O?oi2 l9F_.kDt1T4^N uhإD*2q.Wޜj&NYhw3Z;[D<&&uf>Fg͢6m{a+]n$zzZU|L#Y3_?v5ѯ؊ꎚej|0ۢN=>.79#ǧi^\ꐊc&#Zmfe|1l4rְzЍ$YAMS83́+WV4B-gY1=m姭k,U (̤9흤Qg֬ݼŽ٨] ۴m++R/N%{,߹Ջ$qO_R\M慮^cֶ̊QOj/h=O 7.g~*%H9l?^#F<.xQ!,ľ;QO_߿{Qia瘙[s(tCnٽklBMx9۴uLɒSA2&;ECc+)S~I9 ٓm47y'成G`¡'Y3a6,=E,|]!Y)zҋYe @\ qdýx ]( ^ϾX|=0g^^B㟟9r-[ǚOyGAsmNGO^f[ES:~6IOw!ؿYl\]=@8_w,ڼ?jt|>㛽BZy̙cm?OmEsbv{3~s5^Âo|wz\'L\xYVWVضM-[OlJ­ #irJOU:ZEѦӯ\v?}WLBeyCMۡ%j2LJ^ ENJk|2JL\j e|}y@pR}=1{>84"U5ʜC+j/_n٨ctROr)*fCkh(fR+}$1-BZe)jdgKje/ߥcaa!,R.M2,1eU ي¢*VzbI B7om(޹*-r'|>ʬ:1V5z]֡IŭS?֬Ro xa1j |SY|td.>emLʮ\8m*/5輨bNij6[!?ZŧE_/t #Wך$$ɮZu!V Q^X)/$K-iSιS% 2Ů,&ivHywgZ#9oAEvc\{h^k4"(:ZMRѷO"Rxo@Isj 4&]|ͣݬ*̘"YT;҈KzgZB>Sl{t+]ɤfmXט|?JfJNnT;t<_5`=ݧ J<А31fR|rK>;]&ڪDN$f|dhΰ?g;dfHFZ ݼ,űS_S66kԋ)Ytsrب,PO!Yъ_ =&= NKlVpw=mv;{kޑ1*<pKOOpv~W \8ڌ٦ +Xnp$,SZ3BFKc>cS3z0`Roeq3ճBITH,_fB򾹲/@H@ɲc(s!gldix^/NBӺrO\+:o?Ҙ}ui{D]HWVͤ_/lv;f8<`!B'~$$'//qYʁNVGqrvEי{Um[oi/oL#C&Rbh 0b؏7/y986166S\\S\S39=)iU*511(Nj#:ejA5ld wV~cBkUH[H3##r0|֤PBK=g9h|FF<ݪ7T߸qں@_!VVןѻPt6N?[n^TJ::팞lߋ[ FVߊUpbc}Z-x+e}l bbEiTdxAX̾'sm!\TO Nj祿K5Wɹ'ν^}YݼlpJުFcM#nY׮HwiU:箇e6Ϳbgjv7&ǡbkmOn<﫞K0O|)ڥp+\YFO46ݿ=i zͿ%l M/+rUۣA^j677OY?[5ouW\3JsNҡ?jnj9Ϟ:TPļYvJh^cx*52w5޼Ģp<"fkq?QfnfXK\:tʵKwu3u/qv]_W;iƕ-dI1Y5f8nh8%Ųfc\&/g_g+0~ȡ._1kR,,jZ]u|cZ.b=ly%5Fh6%y޾\Q w'̨</DM׫itkڞ\-V=I e׋^žG 6zoZ`h2gZأPM#F «޹*j+[U+R|2,bk?ھ7kЂm_\kr%Z>˟gIb^C_^=}xGԪ^}qaS+vl P^ <_^'[ᗂLk61-{be^/Z |IQHWPՕ%I0fgmK!`*6-yCރr%Y 9ֹ?%9ZjMwaF}o\eִ)ˆ4zw]F"=ś-}pCùoShߌѼAz6}*]+)wmƆHkiWSCmy0[޹s)ҕ,5 @xؘAi ߛMRNKhĤhv$=y{^zk7OBﶤ/uJzt-666 2/bXXoQ98P׀6lhgk+TxxWŖ.=AtH̼{.D:ZJ///n)P޿?88ѱqƱO/f~l+ЄFEAzI|5&9}=dᖺu)KPH˪ iYrzIff h  |mzÃ[T؁ P,|^Y@/)FNN?Is)\}O$=HR21QQ8Zp SʔlG9&G)WQT΋6ZBJ5UI|N%DӡDjH)Q4(Tr4O#*W΂Sj"G'OuLGXLL(>i,>=HCSt뉈jlllEW-A@ЉN1H %%' 4( Q*5GlDRH VGJ(3d({(9Ql%$ O.ZDnӉP@PktQ^Pʤd);[SfNRs:*I;}L}BKh4LN~A@y@,WzDVTTy◘R`:WOh^To(S1QlUUPoV+ĺo'8*ݳEs&wv1QOi?rzsf\ ?jޡM}ȸlc[w7Nmb'O)f+zةO*'K\*986z^Dg_m/ų'.rt|glGBUoq+χ/;بȑtbJ{[o,~1MߗwǕzmed%(q6>i$y x&ɉyZTߛg}5K|nioc3\z$4W1-l_`zm{oOπe^2{;@ΥYd]]kq+meC^w}}NEd^ї~:fiMK_xeIˣ.ImG}4$Цm'In#T7vt0?;mwewl?nN Eҥir/3𜵏J$o H@d8nV-1eFDo,>t[y]gZ+?vr6pgR2Mwl@uMTc F@dIS30knٽ}W#eN:[tpX6Un_miDeаh*̵Fh|qX+q᥶=eɩtK3SR{Hq\;?RA3#]yr%K7Ƭ,Y:̕ 7sGg wɽTtEk>~r!4,.A[ gK[ئEºA @(Emb fN}'}ӆ!I8r5Oo8c+ksLM8&B{;3^ P=cz3o AY;[ʍ7woTʫ-DǼJ5\{$ɾlJ֭bF|jno @@g$Bbrm<>QƓ'Vꌜѿ\O 9b+2f0,L7Ǜe,EqGgl꽩ͨVA7.zSVUFWxW>l[~5l^zfэ3z~p]z/9蘁ߴzuolu|R[90pwg^[=XqNiju+_\cNNʫ!ѡ_%W.&Y*Sl؇ @:мU] nME!X9{50cuiIRg]LӑRONZ*#ӫ]޸ㇱ*Zѝ^çfeo[I<'mJ|jW{G5{[4Mm7 ZiڡU{LZ|◱:N8kh^Zw:״T88? {oP,. wU[V_:jںժֽp/p#q!!/^<%=ݥ[%V}o?VߐzTʄ @4,%>oQs$9]Zu>47b&ݿŽ:9Q+רζ{B!wƏ?Y)Ͷ-v+ɑo&m<ޯmz :\֏jy6v\JNLB2R% @z"Qd)d[B|T%[?V?+9Iy9AT7 8/$`[-m"KZߕVomg]#أ1K9ybdWv3rm4h65h6 @@4,lr!Oy̧>K뺞u}Z;m_x&'92Os3ԫթlwwMo{&Q3oH'nuĖChn .ӰZqdry̿Shڰռofּ~]>éNw[gG5&Re3jU}hַ;SzKS/ͽM?jaCҒ GhD.켑hZd&EIC-bz0``Dʉ3\۾ [|1Yv-j$.I!fj^аÈw(wj8)?t7o^v(Rz̷闏Y߻wԭX @ЎferJ gee۷]7a LKٻ[ϞM]ӂ`إǷ;Z홳⏥p>j{KQXC96mL^k0Fճ={j@"O~r=(4̫kio}y4N}'xTup}1RR5Υ^|+,աu{~ؤ~3۾Z}Q=ˬZvA?NvYf콑ߥӋKreNC-.Yw~d¦ qyJsT3N ӥ*s[dRRБ9}:[dFۤ#gvOTm|=W6jɑSG>W "=4ެs@&Ea zڲ_= @@!Qd)M8>?g]Gqef8];6i/?!zx?jVLtxМZZnѷw,g)b~u̹C%yC}[7oEIIk6nϒ܊,Sn8tS|=\%Yz.Yua릙 "BB]Z~.tc,[ݢS:$\>$oMۦwPN_C{v>D%pO;̪h#>"}[SN-r}[۔F&2:0ճC 4o}7ŋSOD*[|l1M I$4LibWY W/!@ P0+<9" 3kwqeߢ\J$K1˔GtiF~W˸ aTg`]q."3G{H*0w&\Gy< 1!aqG&qbǚy'dt23m*|7ˮ\ @ 5P$)Y2ǻ2a- EObNq1H{c>된_( [U6tkܲWmPS >)Ai"n&{aYռcǾ_-U^/̚ovcjwpowp>Ӟ6i-MhxWCVjOEQmkq} U18/#ħvVGK ?~%ٴyu򘃟U\I"s ,"=&xwճ!imuI/fPKZy8H2_HB YM̜1V'wrLLEr4%qȴ)= ykrWu|}ߙ0Ec[6hzO;q)Sܻa߰t/ьH;k^5Ɖ. [ޤ@[fNGgO\b-sl"elnt+CeGMn=}ks]YF6v{Ⱥ$5r4#āH6q:e/;9' {tunɳk'8c_4cZ5!@"+e)@ 2SH@ ,r @ Y* @z)R/w;: @@eH@ ,r @ Y* @z)o$$ ˤ@ \M"K +븻hmn @ h2N++ @o,Idƕ@ J(ጶH$ @FFFF.55v @r hYRXihᶕ @z/u O @@A #˂5 @ @@) ,%H@^@JIe)A@ ڈ,cWG7.C>{ @tSl#K.5OYtyju{O/:}4Y C!@6 cj_O7c|1ʧIatzQ)X @:)P3d*dGU~0"˾Km'!ͳaJ @:!Pcj l3sś66\rlڼH @r,,4 Tr,Eֶ DYRrɦ' @@?!4ZRR??5 @G"KfZ@w9k>kڳeT' @\hTQvaC~ @2 W:nd``P @ 7(7l26 @,+nA @ &C )ȲB4  "Kih2 @,+nA @ &C )ȲB4  "Kih2 @Di 4  @4uDŽ!aiC T<M"ˊ  @4 /V @Oe'h @MDj@ PYV}A @d  @Oe'h @MDj@ PYV}A @d  @Oe'h @MDj@ PYV}A @d  @Oe'h @MDj@ PYV}A @d  @Oe'h @MDj@ PYV}A @d  @Oe'h @MDj@ P 5k҃/2%7751Vɣf]@ J 8uAaeJjqfdJYxWž I85V~>^jVZ@J]@͛2J՛!m7(rHKK߼uժY'O#:o=!*T @@Ed6<[]°zH9_9i^ ІPX9{B)|ܹy.zk.,X )=ڗ!OSUkݴeQ+)Q DZ2nu@G@o"KYҾ-lF@' ^5CרZoHyi}ΒO}۪a?/ՖCV<}n)o iޢ /bnD@*@ŋ,z7WˣN]]ߛRL)ɳ[%Y^Z;僎jy{gҺ L8,6wj){6YVϢS$[^u=xgǙ+~oD ؼ˻BJI )s[r5e V @+M,[ N",<^j\׿y{L|##Qk~.r6{`K<;e'=X9x$F: ɘUTndGv  =Y6];7unRK[{F%ZLyųaracW1ɉ&E֎V9+TOuL\3me>{sڨ%Gbu7˯ф?#VEsjv[??i֋WN1Me&^-:T[?ݖ˓"=4ެs@&q#_N\mجS@MӬUE, 3]41TkgHِ%2^:=`Pr@% @z#PA#KCI-<uu4k=v,\Ze[zJ'G(piVeO_kluy}>?jճ{WɸŨKlG5uWNn6 f$i5 YЫq񿢳SbyeMm3v([9kO^-|{kwۦvŅ=ɽg6lahĐ\r?WdoՓQq kƯo1 F ud¯4.~J-;޻/[!Yc[X @@*u9ءewg ˪ҹ6T^?zJKN2r̫Oxb$*6Y$sr Rr#K82.4c 1sd,#U¸Шlq7ʜRq|w})"u2W<М,N#Dat Q&'Q߫<o~޺MMu?Iӡ]kz̐={t* ⟠eepvrk @' Yʌ<ޛѧfZp_Z[;Xs E+Jd.oLLDٱ sK-jđEY=g,{,`bNaWt@΄$9Y[,-g6Q,l0ylM5Hqy{؇G7C|;.yΎ [?~‚s+RqA kL[ᒓRmtU]}k]M(ƘIKW#ǡCt'CiRx%ek @3 Y {IC]|n3oivZ:iÚ҄w5t ĥGB(ثyC}76w_ۥʯZuL$9kFMI?f]=U+=EE%O?2Cފπ~;_]={ars qJpkF&M[6igVfy0Q1i֭ [ pJ{nSKi`cqаn-SӇ缗Ǫ[K%疔_zh% Xп0$V8=&iJO>/ @t0;[V-wMM^9VDa>Ol[sMelnt/D‰]_2Sڕӗs<.~gaͫ8q68ųA}w"*-߹֮ٷΜKvpw65ݺsG7r,xLn>Y5ؓ \8~&e+> …:s~rfr"7'6aR{[hIOٍkן4Fmٶ!>vO{,wl皗5˺n. o]!sn0xImE Ev=}ˠgŽpђsɉG+kV,D @SE itBbWWz8?Ykǒ3kWѸl@ &&7C2%tm^q VV⨅F=@ h2f@ :K" @ DZaF% @@YNF!@VYj@ =@d;] @Z@dfT@@dt hEVQ  @DzE@ DZaF% @@YNF!@VYj@ =@d;] @Z@dfT@@dt hEVQ  @DzE@ DZaF% @@YNF!@VYj@ =@d;] @Z@dfT@@dt hEVQ  @DzE@ DZaF% @@YNF!@VYj@ =@d;] @Z@dfT@@dt hEVQ  @DzE@ DZaF% @@YNF!@VYj@ =@d;] @Z@dfT@@dt hEVQ  @DzE@ DZaF% @@YNF!@VYj@ =@d;] @Z0ԠgϞi6 @"*w;@ `6\;Ψ T~D @@;,Z @@@dY1z@#R;Ψ T~D @@;,Z @@@dY1z@#R;Ψ T~D @@;,Z @@@dY1z@#R;Ψ T~D @@;,Z @@@dY1z@#`jPK pWdHT /!@vDCTZPv\.700P-RXhU@U-k|O@ 0fxiVV" :Ͳ4D"TjbbR*+@Q'Z^aOd2Y3C1H ~F%,uuW'''SI p`I/n\M[PU# )P25Rj3mjbTMPQ@fhhTND$^DEGfiI;n _CQSeiiVRF 2?%:%*Sh%'o1՜BQ̩\Q[~-A(yB]!QZǢY}Oy-(&%Ѣ'廥N(9;;;##,~BCo0nRNjp [,K_(@D@t%y(*FѥhТ̠\Q(\#PNyb],u)s 2:. ۾iVNkR:9PԾZ+OSW,|}(Q5RxЧVݼN(#(ʡhE5pH P,ZX?d+氜[yX6lHd;װ'ߌv[vۿ_11e (vbF Ԍ~hpFSٽloѽB5fk:b#'13sEXgӇΑdZET-C;}Yj2{̷v( ]8^ 5auեU(nZNR}GncǠ{ܒŪw)(566.p`+kZ@ҊfNA3^9vGzfl¯cߌQ$YlXgY-=|Tg;+؀Z"βb @(cY16eCa5U` +=}Yb =NTs~mWhdGsNkXE"hQXIX4H[5<_Wdmg*1+gYOݫlJ<*cBn*6s*Y= : IDAT3~2, eSqU/!m j3bC]<~+*,68|6.Vrv.;ɚW#-43.,YB~|{ͻR&L)r ٯA %湿9Ķ`N2KE z.ή gYΊdk;(eA ,"%1мpb+t5݂'ZOh\D.˟h uB'Z%..US䧀RX(fXYV@ָ],K5)9YkJG@G"ˌU:x5՟OKE"u7~GpFѹ*;w+ )!F~$"KNg?R\IBspμ6cXrIV:|]XYf|MY: -X>bY~YjNpNK*f uWlK5MH+IGtecEDIgdS Ή,㧌D{*YV֫βhؒ gJ^:rBx#ܡ%s‘LdOT6z{y-.{M4B.t3:DtL՜_Q[@PuK͆Uԯ[X)LaQ>ҏvwD")#9hkNRc)OE'ZK‰Vԓ|'Z/^ЉF1tHLLGw/X|tSɔt -^ W}~βL:a2,ƅ"ϲ%X .F{!DN\v\PdJY#KuM-IO$X$٭?O#dr6o5͹8]֛y4oTBwSo VRBllNaˡy=Za;V㙕m{ k."KQFZhB旝iYMĖ%E ,V:4g=}9_x!*IGh ЕQVZ28ѢȒy9`zz4!<1,)gY%%8ˢ߅r" PY2ԦkۣE9|+kS3X1]UkX*-_([^ [*EWBV?m|~=T3f(c}v v48zc.ϞS W=Edf3² kچ_]@?%kP6`+(-c>"pV^Y]'*,42VNtNl0A(+M"Km-Oy.?{JfY#9 ٧{pu/>J9l_F$!o巓7F'gY9Nv.\abKg7cYz4-~JXˑkRTqks=>͊F;oBC#_HVu[4c\{73-\˿wSjr&s3ʂ7NZꝰdIG.#>EIL)'uQe v^M0JYnOe 6m;i=e|jI9]=뇢RmyVT8} EG@}tOyO RNd+ ^Ցۤ<"T^Є8]lGv;.9=uϲr/,yHQZDh傳,%V )ѥ">-)RNЄ1 ?^U$Lǟ){N?cg^{s<,'M4bw7A%Q?F)ݸ:?OypW_qOv:}34Jju؆Ͼԫʛ}7$jWYݼJл4qY W5ؤp!qB~߸ȾYÄc]s9u8!!sb#&R۷o}!Jw@wl8]UVЊҚS9nŶW -u ք <{N&Ep%[ٮlD6y?{Ą-!7DBID>f4`I#yE5;˙ o e}We)Q|ؙ_Ⱦx'_%β ]"KlڼΈ4MZV"0γKTxƭ%3MQJo7ݿsf~,7pfgsjh}Hp__わפEo<"_9Bk>*;9gZXekeѓ~j48aVP"}"eqa Ю&| ͛ 'aB'i*3'g*<ƯK3IwvH 2B37>j襸Jwvr\INLIDZ4jܢZK ؖW]V$Ԅ%˽Xd\K^ z\k7zP6XsIA' WCs;)I؅۽g\rO;6?a?G_BΝ][^NKB1f)"Om`qt_ZΉܱBNn-DKxpYD"Kv(&=?.ըVSҶV|Q5L,kls,~Uh~⫦,˜JLM6)qYŵ9-,"zYXݽsGRw!G  m hY2Vx0nP^ +&I*?'E/Tz%{ϱR { Ÿ/贩vAN_=+1a|44SHqhؤ҇#Δ|pd HUIJFgÜ} Wė]ҝK9+#>$_&|MИDrr3ghNؒGsy!!!ʃ ͋Q6zrPժUjVĔOh9BXIhCu* mi,$<9f 7Q[NE =WztdH,B*H4(:_#(oD:MzQթp*^ =RJ5 P6*A3*aTƄ0Wh9܅ªޥ:.RbښS$. R\K%Bt[ #<dRRFMt} (R|:9f<s6mK_^I_[Z%}Y ?D-($&&ҷ8((H89LH{Jцtꅳ,!r<DF},XR:}IsQ:}(qMS~zWCY𩦗L_ӕ'͢t&TSi9'4] eQpE#"T,% տŲӤqR]:~褸V·9胻û7t'ET=D@^J-qE3_7Gc#Gk:ϒc; -V: ?4݃ݫ2O|3G6- mַc//or7>uڄQk7EF<^U.&q֭sֱR{4+~-59%[tZBG*ᘤ>8ёFDG+ ZWʧ>Q UGKCQca)[jSjX:##BE扖RXRKV'}Ng_MPBLZ/pGoǕ>ɔM(ATpEp#gʯ͕y(lY-UGMi| }7,{)z^;奊"6_ut!NPtg#QF5ɿz/*y%ت$2fӿ|e̶aȪݝjEΞ|/own^aNѮOqOLz":oD˾8JaA}$o7[c?hQ<xJ<QG.hDYIw)aDTˡT(C…e2(Օm.** WX>Z>0GA-NQ}3O_o B|)+?>6Πn! Fv[x0EHLi[_}YξJN2c[WֽOM:vע7c_2~m4] <%\7gۡj?/YݨxNή}vtҪ- z>8ʦ{&if]Q:۵gnD?=GMJتgMjS $኎tpr"""cЄ-N / ST"eSSmX8s!EЧ"'މB:aޢ/}(T(^Q~DK_Xe!|K@O_+j,J۴Mf+8A"4BI7 дEBI?V 0E5dAY- jNDi  @dYzߠo#Q/e]M6,4&\(D4 B/Q w!6YhC!%~ƳbQ?ZC7 aG",s*SJnG*ǭ)%C$\f #]MTۧ$y"е4NI!&N%@z+ H&۾Y5Q}GsI_6BIeJK"@{ CDwlFIz<-Z 8 W G G,}MP3$z&6Y@]:B{PspW-Q"@ ,=K2_ =2o R fHsx٨ Mk H5FA'jxeJO]07(AKlAT'GЅDS!"rd-lMGqN@Tz~<юBO隐݄+\VBIeyMzh I""KTxyԭuYFQ"x.]B_XZ |#7٘i~p<8G8*E~|,' ;q;mYe}~/᲼yZ8^A!5&Di@&Ȳ|<;[<}83]Y}8X'Yz̏E⹕Ml:}@@dYd?/2yAY7]a)kJ?trzׇRO> TJDޭwFpMU9<vRv7z얝0`  @d]Fς9L86K%VD@h8#맽&6=& PYYjux'Ukx}L`]r +;ժ N X ,l%'(n3:Y5=ŹmwMUM^ŸخKtQP]LNv7*>^h-(w1u9]@F^ff4.K[.r]Htp oУjՋTc#>2ܚ!@UƆRdGYM;^_.J\M}<,~" QipVfy-$LHJ)5J%7oи˗.E&UdϞWZ? {,%em451vҬ>Mfspw)wKCF4%VH8[eb,߳e[23#xjC@!Hv)*_4,)jjɶ*/h2--P6 [lQR.ظbF2%nӼf͕BVn.N3Xx lg `MM]+v#]U9K@Tn7|WM}*4Zx-L&JQL$+igM]kы,wc4ZRuWQRAS(<ἱyh=8 ^@/!Td:MRs{bkȲ bZ)]YTC@wh^ks/ѽ/  gyӎ,,,p-T+tr01Kh0# NKw(EkکHj!UYBˆA'+q[Y?),X r5kY!,jSda^\d)70y뜕cs>9BՕ >^O)T]NsfZҀ%'e FVb͏Y@]mwlwnFMuPL~ȒKJ(/peJx hru c:fŏY*7L{lߓ6Q&bE<:8Vr>7OMd]P!薀cId$7^3-k{;H$4GZR tHND^7βgiڜY5m˦.Ά@ ,u{WH, oti-)oYJBdR#d[ڤmaE[A!T&Y^^vO9988: (.-~KYʌLJ86;gYďb @YF\E4xxS\9GEiasϨĀ@iI[ypYBe"@߂̆5hHD|dgP@Z(2퐤Zz8ja~M\VRĔZ* Pp)NM.,#;ߏƆFy!8<1޽/os\,\C6:rLsΘt UU|ʰȡASxrv nBE5}s3iw&oծiy= yfW?XxWss޹ާS51TSq 9z4ƣa-9,wgvw)"h&iZ9C4/ea8G@@YL;ZGa%~aU7.KvT]>oґ,+a-X@N;{&4hZ/k#i}@{.ApI%Q7nڱӵb+6֚e}tCgwvbf U N?\ɒ7ィ,Cs.#FVr;9ȞgpOFgoK&So'Oe+F>-Q][=ȸX/TՑ%~gLj!;Ԉ'S^dIDEٺ룸]U*q+*@G"I C3:%8|):۟nq57ulb)z(4ɋ{v ވl(gxVԊQt8cjW}yf0S-g;?Z%])%aИxQ,qJ*10S:Z~b_}:=wlwEW"{Elk-<3k*2,l]o7FKRWgUldjk^cZ =fK5ܬSRG SeYrV]6=},^vܓ̤{s?;$7KJ6p1$IS!>oӬ_G9 v,S3-MX66\VTU;*Ȓ$ *4HI1%* ~? xu֗4f"<"S-\;sz-zuhaLo95]ۄyu]q8WՒE6UmRϴoֲel6|іgXmoVtmZU̢*"E^$ē(5Ӈm G].nLkm"sת.z2bE0`*뢳!_nhFU6RZ<΅KBWY}NܻVB7d P^|Ia%]ڈ,bÇ_ٽu&e[غ+DLѫMO;uGGT4|DOgV[[;Wȼ2a B@(|rb)*DUY {T` @@дiSХPCN5ysұuW^ۡI)U,Jx,N}%ߪ$%'Wd2wnD5X.- 㖷kbb P@|Kjreg&ٺ3dʣl^x.`b[+)6 P yOᠭ}*{<*YUebim,+ @@ ʗjYT&վD@ }PƑµKQ T4@ jJ[2 T˂D@ ŇER?4< @(,P|XIw9sVR- !@ V*UR- !@> (G[vV Kich`.Gl ;lF/Za PtdPP\.~e^֢Uk I)wC2%!(@af~16}sR3El@$k-Iju@5, s4z)@!蝀T*IMNb%uRDdwt,"I\,F (`,Z$BeEUh @YYYRi# 5hO4 @ TL&HdlaRU @ yaK@ UDX @\v @@U!@4@d @P@du@ Yjn-!@TYj` @@sDaK@ UDX @\v @@U!@4@d @P@du@ Yjn-!@TYj` @@sDaK@ UC%\_qL^%-)$РLL*fkblaokU@ Z"^kl6)Xu$gK_xT{zOKϼw?mrC P4 J} +iSK0LKV uY߆i+@@hYN( @R T @(ȲQ5 @T,+Dg @@9 ,G|T @*"J;@ P,UC J%ȲRNt "r/9ij@ P9YVI/g!?Odcl@ W /SCW}C \-D"~$f"=IY2dφ_t/2rh{/sYZ$  @@*Ld)c090!6|8Yð,qLdKpܬc2m 7R] @@*Ndݴ!RgݔbMLLm'6V$DlG}D_-!^ܳ~n|xpkԎٍٟg9迳w#UOX1HĥL0bҋ'/<άk8~0Gys6]|OKV3fǭ'P7sҹ焅_v8 ~.qk7~UtGbh, @@*JxeweLd9 M\l LIc }~d~M:elIf I3vJN?(Qs==\Gw{qjgVR/8ں lYC~9.%^HD[.X{)E2,<`PO5n:g46M82{ށ>h6 @VYDu;b?3Yldwetegvhzle'`#iz2ljkCci2L~ޫ>G- ^\ø1)rؾskL~O4w~1*2)ȩy]X  h/CŨH ~AJ4<) ¶]߽xIu@i_sν.sO[w~C+ec7nL-_h4;i. @@ْ,dY~,ěSj.{nUWLzYm2c8Pȑcb/@n ]'i$Svˉ'*2ԯ7QS,v׆AU@?$6oaϖ^ٰzAC?*AYޱ_ @$KʜjA'3_hgʩ^\Qkܵy5f&:~c I9HffqrL$1&qsNDM殽%J%P_*SbnB<Xx@R&YRFL 5)=P"\2-yռgK @xPbE~!@ YbC#@ $@ @4ĆF @)H)$( @0$di @R,SHP@ `HA MO (C3^ण: 䞀ѻ'A ?#wc,5=Ѿk}4ɨrM&A c뺮i# y",fN`8 )`0Y;A @wxG# @0&di @z 0DIENDB`buildbot-3.4.0/master/docs/_images/workers.png000066400000000000000000001463571413250514000213660ustar00rootroot00000000000000PNG  IHDRQZgAMA a cHRMz&u0`:pQ<bKGD pHYs+tIME YoIDATxuxՇE133,33tIWLNi4a9qٖe33vWZXi-y%yϝ;wgW3sfԮq˛tw8${HH0;hxZ-m"nswC=BF0MY_{mQ^Nx_w;oD${CFl/Tk@44_ZC(#J@@@@`XN^>mT<\H"I}\{* pQ|Îb74_|n+ցHn~8W{`D  ?fĎNviN<DB0D0һLc}E2{{E`՗(.ſED buf?{S~YP(FrD|L6ZEûAkw?K{*QH[[ȅbLHH0Y2#J@@@@* >/~ߏU 8R&4my[1tUT>K'#BeGЗl4{P"$ꕏ,F؀B?'ⅷp83'yYPCˁ`D  H l\%ꟛ>k>]"c {=䡑6nu]|V'MuSTjuW77cE,;% `C ROo{A@@ oSWW羠 ya&N`a , 01uhi:_lPBW`2{(㌩Sr ױrJ&OX,{{O`CJ%~Y477GdT+V,#::|>ߺ VXo9ˤZz=|-GWBYf5&Ouߞ^gݖdDDp|||O-{fǮ]9~8~~w-Qc[+J_f61R4b1tz,Iwu p'bg{U@@` N ///Q*}sTTVr^K,ǯkъ'N3@ill$7'={LXXu_KK ůr9=ƛod ++R ,FRq?\.[T2k[CC#8p~|/7n9ȑ#h4ݻ{fEff---$~z&}z#J@@@``6QFk<4 )3=ˆH.'4Ma^Oع Mw:Դ^m?p?RSlڂmyvpaK?3Xt <̚=モ@yRR\SO>^Re죏>@~GH$"99?xoF~_`] 55 N=ͺO? 8$Ȧ 7'<ߐ 9v8nҧChh(rwR8w.3sպ|ɧGbbmz рdx._ xQނkX #>&ꕏ\DsFJ%lE [)#v6l\o5aѢF]I\3**j@kLT*Հ}y{{@KK,\ӏ#X~#Nd2c6#J@@@`{O:ȗ&gO FRo_ {H\i7EE8pݻw;O@6m~خc2Œ4i>dgX~6yTMVVfsRR"2 Dvvkor.]}5D"هZfǎ Ff̜Nhh}.p>q̓,0C8&WUW qp(;vش͚=UP|r0m''>z `5||}<]Tjqttdddܹs׿{!ᠡNH$<==ihhqOֆX,&11 ={={ذa(q`@ yHG! 0III&<<vp79[|h믿jӦ蠦{r%hE8GG>+օRёRUu+|~0k,G}Qxxx{ (z8u$ M8pUVZeλظqgϞ㝷An'2*ɓ'r9(QQBppX:i|o*V, 00 -mmi%=7o'Ygmhdeesݶk׮aڴH;=`_...[`Μ6 xAz>U-싎O'NAKK xW^Vڪ֖>o^Dd[?J^^>^3"8[4FŎa QF_ S%dl#{=+=g)yFZw| f%. 0^HLL %pǗWTTs# >kjRL&󘴴4?ZN[g_@`P&0gg~?p!ňD"-koxa:ʊm$\nHBٯ6?C%ܰxF oo/kT(;Ӻ>|W,0u{G@Ϟc:_+f3Cq;=T;su?:?7*++ FkӖ6}ZyM l߾~rM }HbR"3fv_u=,[||7`ɸ`3wi(b ;fҤI(U*>jsdtke|fXv͈A0׺hs'Cշsms"_|;hx?Z~' 0 7OuU5nǟO>9DDD :...XOg"k۫ZV鳿.Pbbp+oƎv_BχD,]KhEP '9)UW֩\msOW'&&>&Jy7HXX8K-!0{n mگZ",Q[[k-: 7\}} 2zEޘt} \VQK ؇?ydl{E`qQx% 0*j"x <u>BW]N/`bh_{c|ʥw, pٹb---)P(Pc4hz$D"qww www<=D6j'ʹF WqG(`'6>z˺m/c"7jjk)))jk먫J@P` QDGE?gQh {3 hq<$^F=ߡ:vk 6Jea3Z[[WFh$+;̬,efK[[۰_`0PSSCMMM}!$%¤ l6 ^+1 *.\'''\\/Acc#* D" //OǷ)K"#pvvF" -M4myyE",ﮇ,mW,1Dh}v5 oo>p_z!0ty#Jh#?q'Q텿>>x{။3..8#,hDi;:P(UjTj5ͭ467B]}==B++b;b̜1S&hooȑ#unLL I;::Ρ JDFFa59uԀxyyg+>&LvǸ$? "ϕpͯ6#:Z} thnn`0 bLQYY|vOGՅD&$'e^^LLMd6SSSGVN.s),*h4RvݾV\+B<*++@{{;Gq9א 󩬬сk(&6w #\}5܋y|FT*2!/ROGGB9}'H !:*DRE*SA# '~&_!2Iָ`0pD g2յϰj;(,,3f>H}}=...,7``מ|pwcIMDLtԨE"B dEhĩ3dfb0w73\6]w- 151QPXdy//2sttQurrl6s^H$"00f3---444b6 pppCJيK˕{˽ȃ=K`0p9OdpD%%\upw7W߁D*hVF#ڎTj5jZCSK ZڗhrXX)I̚1iӈwNΓ;z ;f'O^Ѐ`hHOOg}t"""IHHgvtdV% '2LGсiS6FS8|JL&`LHM[n&mT@05deg͛gӦRسgM[xx S[[KssMD"aƌj ǵa#a2!::QzNDZ#ǎ+ '4$А`Bj5-7PYYMeu U(::::8y4'OAP` cɢōJ$"q|c OO^HBa'Օٓeeeѿ'drj55hAyF7T*m,--%..d2SRRBUUqqqDEE߶xeQz~LSS7g&ncEyY8o6e{SFefGτ&M <@dfe]lpQ\\ҫ-))mҤC3'Nd6--Ê&,RS>SBYrV.ʩ0i$C 5L9u!g\.gΜa4CRRͻ699Ay=6m`tSPP`sLuuՠ.t:YYYTVVeeTQW_ `EL:#BfZv;ރj;8kfϜ?K8`P.Z-yW\0r֐DD ;bF<űwCR&LEja :W۾$L@rb|-*3[&uu ԙs[ y㭷y=s\ɓ&g/6ci܆׆MB} PYY+ۻ<7E" ֢TvP*U!P(>|RSS7) `˨JJKxn`__֮YiSDޥ5ֲb"vÞt9v ް;oUxL&e]H˘ F< q"DG!y) zZNbX<3fwCoh] E_|_|ͦPX,fbj23Ӧ2!%|T$  Yx!-- N>cTU`49x!>.E ZfosQص %m7j]166f nHe6wx]]DEE7*vK4 |Cu2†X0wֈLF3.\*-^ȗ۾t |dzuS[WGffV@_===FZzրttt-hTGSk{eblkEiaJǟ|gał;k:#Iwv+-D2N%fEGag]U990-p2ܘL^^zL6bʩ#awUVVHh efDǿ>Oii`Y]`֬*;]xq͛X4.~E%cOly\\\c2T̢~Nޒn#?XWW׫m,Jʩ3z:I!v[Mg[OQT0V,Yȴ)"UTDQ\*<ĞP*UTVVܟ[>=KF$oZs oLۢ)=uekc jUz#kaxZ-NԔ^bnDf> VSHHwxYOB#?Xz~%ܵ2y׿`?SRzag{W].\j2$++׾@{߂!alkgw`r1P俯NMm-:2 W")!1\]]j V.]CG.ڔJJJKg߽c;!:?=4ouTрb׷xmY0:62z=MM NNNjmmC4lF๬FTkk+Ϝ:}ׯYɊep 5#x19_?mm2nBUFf rrs];$z2\Ydgj4&-wOC{ nnczeu4ȹ,@_pyR}J|o;1:1rf2ּ'WW֯Yłǽ0OrV-[ܙ3bv9`[ HMMMT׺p-:~F}-0I$3ijjb׮h> D"G6h4OUU,\#jy>fHM׎ͬi7ޡ<#<̓DFX ұ>)j>U}4i@\NZڴ1/vv!W"@1>CCC#VeSHY3uW|ninnv,7w7Lye엿n";ydB*}-$ hPh(::wSKVAY/)))ݚc5f3>`Y޸n5-_?SgxS&{xc.գ>W 2YFux'TPEoV/t=/y%~3~ؽNg`qͯT;Pi^`DBFOxxc=hU(ԂL*˓`͙Mll̐㫯`]DUDVHH0bF`/{v~t1Q6 owRy'/~ŋA`6";;g\K ??K;55d2y/ˇh䍷aG[fO͛/X,fL|@yE%yyǂ=D@.mD.$ y%ef||IgxzzRټRNdzT[z7dfjdz4V\a[RZ;w79r(ڎBCt5]c0xg9v<.c`f/z J|WWZHՑ5x!66*)B ȢT__Ouu5{ÎH$.Bq1zt[3~!F~>1Iz=5'2N"XpK,ub8smmJ~s-71k>!wڃlljDu%/#7uK_P=X,ڇ+7t`Y0OQT$'Y v7߶y>TUUs"$'ֱeg{^***)--~JœVkUU>}pMVaՈɧ`GVW@^~ǎ'~g7sd2EV @uu %%%aYf9r`>dr\& ̳z:lU>3a;a6 ׿97w^~"9g2yD<εP_3O>΃E 2yhU(D\1:s| ^zN:eKMx{[ʓ465Q\R2b#7f}%#ؑА`y>0L/r1-=t:Y?p+ŐL@1w+FLOﻛiS=qD"nb"Kxm]?o9O0# 55WTd#G;NAAjYp±V#׫-:*|hk[]]=/io{aUl:VX}mm/((A.gŨ/Fyzx0sFӭ s'iӦ2qB*\f) <w"ZeF||, ۧ2f-de{_ܚbp>ϽfF.6oty|lD};6hs rF;H$t:|9;cpF9L&3%ڽ⒒q9/X,"--A6;88{#g[׿dB.O!%)ú"Dlv#lX X^ʿT`2sW4ߕgDyxxXzS]]Þ={˿b]/F^1O lxKO=W\n !x̜`pʪj9ng Xrlk&''YC} ?om,S^ѽ8#7Q&hw55}՗|%Q9Kr.t#0|$'sP^墡}5nBd2QQQ6yyy \XLRR"K,&))???prrՕYx͕̓`eDi,M|;3⁒O#1>úXl WZ@Cc#c455{X d֬\XJT3{qb;T՞ܿ H$bi<ޘC kMʞ)3=eTKXAhC,tbqZm9{g=ܵSO[?yww[>׿}Wͬ:ߚEd2YC"|ܯknnቧ`0 J}{yG/pAΞAm}=;vob5NVE]]2888zx Lll,>{a`Ds`0 H=w-(׮Boгc^Oe=?ǛDhhMT:l___-ZD}}=uuuj::t"qww>144 N7<//jHMM֛7 ٳfr<=C`2PT888 Y^Kz}g}=O+^'PTݻiSmJbt-$D"^?;mD$mT9r|JjjkW}m}s!ŗ_Kcc#X!!Lj+:$Xr9&z^Jypu݌J?9@z vM)/ɧe4BBilhS$,,\]\;{ǟzؘhN9c-r ^ߞ و/(7M׍W.{-<ß^NNcaDffmmma4%΀"j''G3 Q|=!KJJUz4.3vw"$U,[W>ihlTWWȌ?'O$QNn.UU5D'f\r򨭭G.ys9Z,11QDgi29z8Y٨j}6Bg&OdR*;~BB~  xQooֱr٢c2 X/Q9 0QPX8#J? ya0_WWKX`W^d2+NNNz!J`3(EDDL6zÈ2iD z\\\‡@dD˗,b1wv#R_zQgTWװ{^zV˳U)PcxVu闗WoQ@WݻwS\|g2e6y`0 p1L`?-fmU(8x0'W7^P{I`_H:_}MQ:,8KZ@`2(랽8% :kV.%=u C,^|iQ^^1&# E'2N{[bYd$0b1? hZ^˼ʿt>+BC  55,X,ppA#Cǫo kW.Tu׬{JŻoo^.L&3a0}[F%aS@KGlF.q׭7 1׬_??o\ny? FPo%;hll$**!+\W_RWWƫh1 RSHIJ +'@hhe쭭#++ ޷B@qX:-zPׯCH̛Mz) 9z<(#J@nq* BQ>@fϚ>,5|u Mm(*T*-X3Nzƴ)88!yÝ6_ls߆ޙE֠=#DĻiy24BhcPLBѡd4a)∋$%DӰfZs1 ~_b6GtRʦ~D?hCӧ`ɂ% y`0wn{K@`Tj9u4i!  Q[VuW d=W,>[rHQkW׈*+^Fdl]dcD8K{dAQu]HݮVJJk8z<LESYr&a8upM#JדOIi{ `0 麍B8 0c^k/( !>ur!BBBHNNC$Ut|2eD ]oCfv`tz8_^BKjĮn D"5Wczf3eeڽAUʤ"fNFdd$0L]΂,0:OFfJvC^PD`lӯ'ji_ryB"biNNrf365gf=d27IM=V\ A*蠡AZm+]Uȿ9#䉩:s=s0,ިF2hkS^{* 09|rkׯpGGG6]{~粘8~)dL1˨j222(/%%%NX$+; YWW['`@. #jǎ8} _:v3njg8_ɓt|AށE\nMlC7>e{~IcxW7no ndd81QNwQ(F|'P9+; ] /bWuQdfgc2 rsYt:s|m }{㮤ˬY3 $0 ŋ>m~\.eάv#NWF" 7`;R4.8,o1Zmm:|ih1ܵ|!Y@:X4n.1#mE3N̈DQdO޽ⱝ?y{~⤽7$ƄT{+OhMJlCl74!oO7[K2)nD7m$>%*E  \ ʹ,k@7'N,F,lB6 bcodeeQVVFjj ~~~Ґ񦩩ǎMjj2`YLy1joװ9{6JKK+lX{D"Vkef̦믣/E ,'OBV% ˗keDܽ^92(Em`Κً3rU HRfMe>sHhZsr *iSr;sfMoCRs)b=$1Jc]a0oIA.g,Z8ő b mM;՛u>} BC.XoyTUUUUlf;&/#c)m&>EG3szn +'n+/M %Ϗ:ĵWod2Yj̢/& 01Y99DFri ͞wa69"!S@0LPSSC||wlfGYdx}}} leyJJg,?V<]WB@[ܰa U#I|\h˯SS̔1xhUP !A_PX».g_`t0o sj5%&Z[[9x$''02B///iOINӦ1udN:Mkk+~] (|{yu݆`M&3{s ؆ j9 @jRDLy{O?m3KѦ,-o_fS}chll kRSL6\n>9{ʪF]5ؘoAJTdzZ!t \ AP]ۄ#!6`4@.Zu]wƹ,z=r2Yswʵl`սq/xMwwX}!s:{k|Yظ*VZͣW{9QN8KIRLCgm6FG9*(m ]ъFV.2b")M?ə%((BJJJǽG5f3w /W4-{eúلYX̤&6;q2dg̺|ko$]ebTwܶoooAo-1?{n9sMm|qr޳>y2XBjon dFL&aX6 DVcG{j8ܢb$fσˁlLJdk[GGAR 2t:5me2&$qi KXf4^x+H~~>UU$&&t ::_|Z29~~ k6RXT<WWWT*equ\`0 憃\NNm$02w鬆 2"k]KY\]txzzҪPڪ@* ]kolJ,8] _k?/G8jmU7bqb?J`0Z%1'\RRIIN&,,sf'j--JJr*Xr:a F>zߟVZWdQ\bD]] 2q+D%x;Ӯ頴 Q6f2K6śX\.%7N9d産TjU8qE.uJwR̙3{HTII9NM9㏴)8q4:*"#=$qZNFFee}] АzA'Xp۾ByOL:H~~ڎG,3sfF?Ijj :R^KH$,/=!dd7qBf5~"#"8{5WX;ɺ>l+G㞻n0Xd&+̦6fxJ=Q)(D"BFo3c6m&~y[c'9 ww͝)FҒ}ɩϓ0R g&TijCUunkkGN`m"l&4ćzk[yEAeKKk %(ЇҲފIxhk#'7ٌlCCC'_h°~#\捍 bmW:Y\^^nֵPQHTdeuy#=O)l6GqI-3f$`HK!ҡQS„ QZ30L61&F]FvN)^ %/@/'u'^TҧAٝz-`=RMM( )N>8::IAQ%,?2 \cb$&#\8/k(mŶ'''{ >Bvv.'GGbcc5k.^O/OCGhkSeKBס!rz^  ݍO8pƦ&7w6:N6a~WG!! oXH ())CEG^pRlצTS[׌hiOf itvLG&"aM [>}ce%sgj߱?@..\bQ:s¢"}rBBBVJ5$#ʱ3!m''[/\.E* 󧼼JRx?gimUc0}]wT(9t$&B=r9G (NOR!&:Y8~<M.x!D‚X0^Ǹ8;lU+<&>.jTdy$ɀsK1899qU ӓzܯYnj˫jVG\a(nND"mJ%+ɄhB&]3PQYOD}@ee#eu!:OeLJvN D"JK{ӦҢꂟCLRD"g&qǭK bSV+uH$dVUUၿ`vZ~7J@@@`cDe5ot dRJf͚#?Ec+|46ߺ-RfLKdúy-|B-FlOω6!}fEZ})Q4͐wqq˕JNsBsEl͈p?~ؙAaa5& DOiy-*bcm-A2BDQFqI-Mqq!?BVHݝmοz\}r*_]NeՅ^^UOsj,!q$4ZD"TUwE,H wŝFT`@e # 5Rjhv؄AdR<=] '%)bP5VRx{+ںf4D"pss&2")b/hH$eMqsuBӡաvD9Ldx IIe5 ZV^ JQ[ɓcؽ41eRo7wTd 'iiUi1e ]:Y+:F%hqjjXfq(s 2LOg=h; zJrHA,", ?C 67D@?l cǏJBY=Z0xy#C),*GtϿтRV3QHKxQ[ׂ^o wVQـX::9f \&z":'+.HN 9bQK=>CHʾ=F"`4l6YEHs )ײwYJ@h2jES&ǒ[Χ 1!v 93њcԓpN."m0v2pssQRjnxwm "<̟^FԾg::1oœ{it:^^n45ؤ`޼{vˀP]02ll <a#*3 9x8 ɌvMa\wn8AB\H/#h4փ̟4۔9Flt0˗M.Rlhs 'b^465SW_Ouu5hQ7-fY;WJQq5%8hQtrȧ \Ίi$'uOECiWwY(98؇Ln%..Lmn9;ʈ E."YU) ;1L;'BjjhniCѡCс\* w7'BIMP᳨Zi}KooײGu`b,^٦6f(,;Z^YOAA% Q4yf\]t׃HBm0Q(xyy }Α⅓ɤ(U4K|QUDKrHaÇX,֛hg iSc(Y@>>>465SZZʹs2b1&E3iRt]]XysAA>Q)mZ߁ެ]3úݳOO/7ٳ{++U&{cbY %$ `DDлRE88m(Z*:R'gyWmOO~r~y<)^9yD|iu[ְ=g$7QZVY&nX¤~{Nm΅J%,?Ϸ=~ ʨ>>cc$:;{whwi>7m7M-54;4Zֆ'xlTkHnϥYɚUϛGQ5X6f%;~ȰsX,FnyE=hmQDjr'Dwxz0WI|V ΢GG aHK˯mq|݈ `>k`'KM^d荷Ňk:Ào۝i;TijQpwfBjd/WUU#{#ߋK,=r4>ΚHGSgmW%|{`2 $lw{{;Gz~Mݫ^(7P(Jd2S^QAYYP-c.l&'Dڞ۪k9[hh";Jjl)U?sP6<ɥ`0ٹ$/sL *ee2 񇿾wAݪ..w'-JDPh8OUEc^oOBH Q/-eE̘@HvjZv=M`n(۵̞DTTGoب@jjnCjсp?"mF&_J¹&3uu-x{OIY-|`_7"P(| 96}VFVN`;~8;;P[LP~8Rik3|GAgRgggfϞ.mmJ놫PB߄ch*wW""><6.6\j#RSPTblLfME}zJJl%|vmD"ppjv]Mtd0!Hpsx- u/#-/βhjh:pM٩L?~H$ \Fk h_ǻy՟.Tj&88s2iiKmA^^(\kr);s4Ol6sp4$' 1#g)f#2ܿWԢ=JLtq{Wo;.6@fJ⋯wYB!%5DžSNeUDžXۊkHo++l63mjEw4gO)gggf͚Gjҵ!mk(q묎1HU tz::_>dRbB+U2kFqeJ˲Hs #ݡ>NDEX2N哙UbGLtwݶ ?ObW^#v_2pM7{I>ꠍ1uh損_`ӵ.x;]}q٬[3Lvv |McVv 6=b>ިTjjk3nnn̛7 JyFh-&2J.f%-JVHЀ/뮞\nyǔWh59vWU^QGqI 1{ˤZ>V H?Y@N@iqZ~k;smvy hSnFAiY=kWϸΣ!q3KL oKJ%Kqi xD"v>eœe2ċW%NF!~$s?"<,'{w&?➊'WhSSZZKN^%57c4=Aa4P546*hlTRdpOYsK uP$Flֵ8}ۈZKH_OJKmλJ=(* mVYxZ"3r|?bm'=k^ȻwxV,M#!~iq=Ѧlc4U/a20 FLfF@JK˥TT6Zf3F y+S10w1V K>ρCYw>͌Nat~.]WII-!>H; u/der9^L'׭eױ|\;#z+*/=4aDڥ؄ =O붻 x>U/~*Ϻ}KHMyu<ۏ?za!< =:I}Ck*ѻp$<]8uz^ =qeǮA#yJTghljM+@.NbB8 O(I.=4ހF1(Ʀ68ƙ T"\.[ Шw77h VY|m¬CZxtpMTUאFcSHļ9l~wE &J]1LfA(qt™%@GZyiS9u'Xx2R#j1#mjJG  %=#f9}}K"+OO.ϙ%UFcӈŢ!?z)*BRS"pvrLʬIoC,z"h̖%2oOh478y LY0pqo,ao9'2NRRRObv5&MHTd$_|/TVUq7uK=O<,ϟ̬,?_,AAERvCQQ UM4FR^Fily]*No`ѝk<.g ٫-y崞P(( !aEEUbzFE%UT2uJuBgR`( \#^H/vgXR)j^Fc)CV'7]3GG-?ɶoqˍ=,;S;urb"QwXRyy:VGiYdyLt!6Ǟ9WU(.g04A%_ JP4h^g%|aumۏX-εLsl6ȵA?p'9r< ]O_w6 13++ /O!">. &rp"L&3idD}\8ӦƱt~qϿ:D"h46-Rmuvv`(N)&mj1!LCY>d͙WͲNӦQTRÇE,c241j1LH1gV2 .;N`6H,^8*1ٞX NH`тIfJJ 񵙋sL1 DEl/ɀxwصۈ9cEFAiiqW3yr͌i>r|}1|} w\FLt4˖+Ndޏ.Y3"59oo/ _mVvm܈H$B&1oT*5aQXT=#p56+)-E,3y4-x% )&at\Λ+ '[^t=Wm{rDB2bnӃu`Ę>_E-m7|˄(}͠?`nd2'{/gʤzQ"r _?[SS3+qqus??Oj ǎҡ(gS{Qxy`zOQYшL.!$ؗiSP(=˼yVoVKÇ=; o/7v9;&Y8t8׸|ݙ77gTjpww&5% ^p%8ЇKdںV$<)OOW#eulBb ql=&$D"͛͞0Tf3[¼9b 'vmFXDx?z\\f3t4NKRb55ttp!vԬIS >nܴi.q''97ߴfTv\] x|Va[3+0d*M`h88 1.rܸVT* >> HMKh0rAr6]ںZZdq9;w 89ɹxx^pss."<=\-0̆fR= ԹM}By{ ި-bR)u+{ҳ夣={j~ǎl::D[E9N$;a2hjj$((iӘ6z?P-`BeA"::?>MO>FR*;m[dǎ@N}HQ'̝3.hEhbDf>t}'s)+->'Ow{bcB(,|na週m%FEd}h%b]ȩ!)H/*O&1kL<{WPSS;F3/LZrhoߵ4%LNik:/dzZz@/bcoY0F"hIIV# ,9$=s""&OQf * 8;:٪X F7i.CO\xྍF>-6-I:?obcBݏ\nyD"_% *lw=jz Qj@LtܵO6Ljw>"s\.v_B,fPfQ3Q$obB"X,sT*iwrd{mG\\d2Q^>z>e.D" EǎSZVϿqz*1deu; ʶٽw_} qs粨`Г_OAZrsQYKLII1TRrwbB:zslk֭IVN頯q6F{܉V#Jq6x&&粊ذvK].lSbB5S|d20Μ=pٛ6bX b~Z5iS9nxz|Tˉ b,K֜rt:=kVRpqqa„Tm:Qh&ɉݝcҪ)2;uRӦMm6PI6}lܪٖH$8 ߄?`A\xk9?m5L&3 Ap]7>kVbd{a61l0R=CPoO@`,E@?uu"' .Yds_=ee8tgg'Ξ_/( Ҝ\^m<=<)M~ T Pn^Ͽ_Gs&B] J!j qo jm=47[&ΎD Rc'jW&g,}z&)յ̟%;DG~C7!ˆu%PMMm6sT!g3!5gѠk~\sp+DBBB<-YrCcfڪ6ebnҢѓhz(CBիTV5lh"E6BU5|y }ĭ7-iknUCRpwadO,k:xh4!71Rr>O/.6[oqd2-lf:Z-/OZ BCyyW[9U_^xD"?{FMK/JELL4zi|\mwne˯q1f3|#?xayv;\ĂR/+KcuLj ۾;ʦ|Jw?tljΚJ=xu"ZCfVu;"WW'ݭٹb۵b6%OD44ҿ>%)!Ҭޗ̬+GG4}'m \FLjP[(G!e@)J )ɣ%FyEŠzQ뿧%0ggolRHJrNb2;'e"qrr૯3{f2&DL*HEH3=W F=rmˤĄg cinQhQ!AB|N3ܯHDmyTDl|[l6\&eúvkۉ̝]Jy<13S8r;lRvܡp>[NghS#p!c4 P>/*һƢ o/okjQwίf̘FB%GCW?yJKOᅬkUuYX55Y ,ML5H$7! A#[[/xYm]+&Twi̝DP7l?j]D̟7/>ˤ}{ż䕑WFX?cBʅդr/~7 W֦&' ZZd'3 &]THWKK+..ΈDS8ˀpss͍LMM8KIDAT퐌[n\w߅HLzZKmE2^p"nN9KQQ5KM&%e-8:ʹw{Os\1^3o!Qy%bc@8;uiZa}29ױS&7ggQq=)1oÕ?6-ř'rmi2kzY|x M/&º@DJ}teՊ|퐵m LzO9s\:*sF&9_xe\]hnn"+;j̙= wlfwߣThhh>t:=NNN|;-Ɛ;vc믽_(Ͽzj̲o&шD"r^+!;NNyݡ{گT>>VkSbQ) 0MܝNopU9R E5c'.1ӈJlu.R"y'ُiEU=|e+o-:݅Mn~/S^^T|l(ν-RH(u2$Gpt1;GFrƹ\qUQOLǝ/'4ďOnP v9/"wwg6 sQ*5s޷lbտf_)ľIWI?MPټ^׊H$bӵm fсݱ(lt;LdQ1]ri=Lf>xא’O`>s^8ˏww7VX@{řg[KAOAcS23mSyyRd;;oz;[fΖE̬lvOx.:?khljκ'_sYEVӱl26gs΅hjVlH &LGGnM.,IO LI<=|3;B=R\{QjLF&xd6^5jQDQ[7"` C\n#Oꏛ3>>6ubMJ{഍w?a!~8:ʩo$ y>ڽpmaf%x1!,Y4Ftwj~kjR^{Zk72k7.o|mm+,"=#o^GG(Ѹ K,JN~~LJpw΄dr J̜Fttn.4Zs&Mcs0"׃iSPXX#g$<<TJFY&(0 А̟Kk\k'",~~HE"AATWP_?<\wZ5O>uz׺ʙ}scsl^8S{xD"S&wfgzˤ\z6KMswF ;}NxDCyEeUM&ryl]qp4xҗ!FKK͙ĄPN.bX 4,X]ӌH$7WDٳV̬ݝmKsWWGr!˷mIbBRER9D"I ?-3P.L5m%㻯o/}L]Cwāls6]!xzr͆:S`]LCg,[%i:W~Mlh'| [L.w7gV/X3rY0gD*p_7[uF Zk̟޵*l_}a&o]A$H"L 74p8b7oZ^f|͆^ KiWH$,_#|^W#_{ cqcqgϭBsD}݉txx- -wDz*썈0_OkO.v#>8r(vD>R(6&M<;R;); maoROw ηN#]Ii)lm䨮FYy֗T '38?b냴 (--ӱt\NB QT\|#Vѡ}+Hrcd2)IKdȉc@!3+{@@ųQbvƎ.e䢺=Y9jq.5^UUy&_W#t" JHZ^_wtϠ6=35yYYc)1ض=ն'?!۴)/@u1`8b'iiiG*gBF vv' ֫=V;9q_K!#0AHH0W慅 5- ytH^zRSƍ;;AjlGN\0 NF?yfapS;/C~~y'N5;' \b^o߉[h3:!!A&(8k.i2)LYPXdJP(*ZNFFxx8 "EDyz\]>=fc{7~㓧3L P7m)~2@$i^Ŝ^rprqd@ 6??_LIIϹúܼ8&Y9ckS[DP(;qqq@Qccb*ɩ齏 @;z^hU Ӯ0 Fc{#1%yR.S( :]scY "# z d2)"#@:Ɉ&ad^9t:Qg!}$1zh$%M$>#d pd2BqY^`^/H$xecV.f*Wn/Xnr5iXMM]^'07;f'e(-BSS3!BԽWOgC7I/:JDdΓ}{ꉱ v_U0x&xcX{&&: 걍*&ţihhh!# ˲pu*dff]ې%3+溎E$10pwwGXX.1,gP__}kY9\]]}+K,j 8},*PPPwS  \n=%:ppPC\z qwU@'OR(P(fߙJ;U)mR|vty'p777W8ʕLd\kF`pE54(n3 Q.PR\Z터0(ç$i8th/“? ر&C&I ܗg6“'pE${* SÃDȐn%N`S R/0.i<rDcĉ*,$$cD#0ҥxWPWOYI c6c'O6ƒܼ|3NRw\&Øh "!R)UpP8ux0o'  ”))Vc?>W_y o<[?Syy9~a##d:˲HI.Wsw:z8BCF䄤ɉ/(@z455Q ,Μ٨khwu>-lk:BIˡ1Pȭ:9Ozga0ػ/[ʍPiZ>sW3Q]]NN(8ضml Lv3!1!٘i\^{妩Hhi_@cc pq}eD1 >GKK :BAq丱@odD8\] xՆaxzx` 2\իY^XaSg@7f?e-:Jҁ aaptt "߷nɓ <9ct 77WKC~~S}O={GJdk/A `p%h1l,SVV^|UUU&IKxױ'QWW+M2ώ3X4d2$C4%Bqjb ///NȀuu8y,Q#P]C.\2 $=T2 AAA k~mwrϜ9&.1 .\4,炨qQ[EpPQSo6Iö;R`_s/"ւaD"qTxX(fLZm.]tQwg:Nc'29%}Jbc! TQ"v#5- ZS@Ȁ`p):)% oN^T H$&;v경7rYtŎ:&#aRɾE8tu;'O=}?kllSxrIh[KZƘhBLI -7n,PPX{!)a7wXddSNMI\.#FɓK/1!$1 C,*"gϝ7N=v7OIId8t `g;z(Da1<`џNښQTT `!2":RXT̵s1](@V{Cjj&@{9!r J(`xPVV3cLt$m%h=H[gL ^ ` w\lUGCC^ ₍{@NN.:RQPTh8C( hbPo àhnJ899Y2!"/?͍yX&yjjj~Vv  ~)JJʐ?O \r"xyzB"t;L`!te1`b13;>[7ZlIqE<`Lq5+}XELt4ܱj]B&u|R\^Qq7GaȸrvDd2̟;%rq|b"n/  @@$rR0iR=_xo}왷r?gd\fL6>0 (-5[ Ø;gU[f0Ř0 &W_TJO=[0o.6jjj˦0Q q1 oxx{yA,C e?YYٸZ/E*Jo6s(*.(E}i9sf̈́P(?O?smSEZFxxnʀH7{rTW OEb$#/t\tWkuԔdP]]gDA!7/ou,"(0~>`Yy1 z` l9v¬cϿeP zL=j# ))VQɜ ο<-[`z}|2tw,p={{رc1qbUPK.sX,Ƃsc\ b (**o~G^~Xt;ٛ+AR0֔ڷ="K(**}K;}ʆ^sLΟ;? +/1&*2ys4eKMNEBu&0&&>HMMõkn!@86erIB |}}1e Bկ!X<Z~#s,:~ٴi.&bά9d0rqdn+?_,wH4555! #C.6iѨ1ydA,"!}F<܌? =$)%9~4~oM~"7n~~pssť˗7,b%8~/`ރ ADhE?p}dKgkk08TTT 33 Z(J8ث::IMݽgX%%ؽ{Y|놺""C?Du~Ϯzr9YpY-XBJ :dRPYy&aD"<Ïkq4'Gza>z0:$s6M%ծM`0ty>.A#Gq9 ?Y0(/mٳ`\@aQ1S^O~A!>^nUO _{{{$LqcB.2¸♧Wa54>f ?Uê'[kB4>bRROW|_~Pn?n,Ə _z XҎV$s6W7<V GIi)<̫px{h OK{#!&: bzK..θr%WtۃD` QPP}^Zj|hiiH$=4MkZ_J'''ATv5+6o`'a9BGcpssg4u VXG%+B$zc/^ǟ}85ԊKJ{C[[ap=+4bĪP LBɐhѿ޻ 8y,~eM#466FeUKDBTbr9bb?!mv.HZ.`yY;ɍ\DW^+"#gS>}CyCTWW0.xy H$@\.ǘhOp֐vU[nub غc7?w^|Bi`mB"s$BAAAB``evcOv߹si)㓓d?0ɤx!"< p)#|VFϕ,P]SX8$ $Etj&'"&&>޽%kI=RWco߈TWϾ+1&&{wk%!))+Y%ΝO'7ƾM1 fũѨi1 ګ#G!#*^xyҩ3f`'0kR̝= .t;aQ..ȸrWfzӯ{_m v|ksn^{A&b\Lq ,/YBSOB*rQw8rYdT*ёX۶}혜0{8O*rۿ_0ymrdLv3 A馐Jx// o>rrVeYlٱ :q#>@ =ILu5tշ/bT`xtH+0X^KM⋯s? T*Zv5$2d2)"##@ jvִi#J[ X_PYܿkU|,2@KDǠ]EP(w->Hf=t:_%x?ĩZ2b'LT*1jBѨ0JoN W~ӗƦLTlfvmwkG11x/C.eY|u6Rkh3R>>_ RtPj 99tl J՞d ˗eߐQ{5yNPL#aP@ 3W:]!j6[ ^|aȱjt:@Xh(INnp%0󅇇;._@Vv6}@"'<,TthH7v>0B!}]@ @TdګxŗPSS_ߊJ,_r[c+GQ\R {~UK#BP8Ӕ!5:$B,۫k$8h̪pwwCLt4|S6b= }Td$>xmHMKGqI)w0yJFosm/;/5ST[G D"Axx<==p>5 |7tC >D(m(OۏfH  ,t4/GYy%g9TJK1]O>_mq ĸX< N:>1lbׇ>xoſ g|nŲ,v=~߇ow Lj9e}3Ya剐*1L<`{^*"0f2iE"ΟА7QPP?`jr"nv3r" ?RF&mR|z~ثTiҌrqFƕ+y$HWHͷŎ]P]]Μ&C@׀>˯l@D"ߋ)ɐH$DV@Faj5 aE*bՓp Bk irR"\ݍxT*,}! 0ПH: f$O>x?l---ضs;3g`R\숝⧭/~CGx{;~U୍P(Dpp<==PRZws (~HP/}[^0LkN{i>Yfl7^{-5,_}'wqq3OB?WH:hM԰`kkrh־z|^Rb9|$ ^Һݜr9g%''>DZhXؾk/f͸cDֺl߹Gc΀H$´o%PA,6O4}ox t2E+p.BNg!8((((ĥko`ܙHK׆oh?GqI`&7}oدN7;bIB!`X!1X[,ٻ?_rnwQ~m٨Zw@}u4Q=QB"Gl~md!pICxnz}6n{p?-..^giN:~Fd2){%NIXܖGhx9{2"8"!@}+\wSٹg?:> #ea0Ԅ~a#15%ca+;Y¢b{G@ss3=(0K/BLt-;5 DvvgM G!ָFG;u@ BM3P3RFڅKܶHD#X ` ˲O ׮UqϋbDb|}]PԄȼm\P`-\qcǘL+87>/!]c~ SYYcvۦMŤ żsUVUa˶(\$a-7c `ooor+!և(2X,:eYn^3gm8|7P) A -SXKJ~2R/^ t-&;88`Jr$ݍSD246oPqe|7jw~|kp)29 V7/vSgaL7K,D"!D"1B>b("eQmڂ)cOUؽw?8 (gpu j/,D^~!\FfV.r;  XzS7&hG7vB,[Ky ʾ oXN[Sss3v܅[4Xqc ['ϜǁCqUDc?!zQE˰ a`0 #8qΧ  4NjhP;: *;Bak BB ˢ-kjQ[W*UT%ejn"1nL&F͝P_ f\W[0g~l'df\7~,DClmEƕ8q4N<MB!&NٳNuFA,2md 0bYHMKCjZ:/544; pxqYڂYfNC^c≶5SN6hi1fbjt:\|pl*RDJR"n>FH}G]o rsp!d䠡 J;;8kH?i3jj &ˣ1ΧyָX gF NN Ą~+:Q6]SwBƏq׷ /?Y9F^~>JJJQRZzo{{{8k4puOOxAc0uB_X9{ !{Σ `;&ؾs7vރ\@N^>rag@HP}}77Csjlld^̫ȼ%DÔ$B*O4Djzk~;vu^?TTTb[w.n_@ [n:|!-[HOÈ1l:Qm=EbϏ|uM QSS-jkk0P(uj\.BRJR pQRvwN4l{"`ݏi}כk ,w,ZlٷGEv1jkq?yq(nF #4NNH$jV[ W /DYE9#HPOE|\J%D"!!,c'B5_`_oy $%%rAԑp&׮-.\4ֶ#Ϡ!# 2b:鈕QuXX~1;NԨШ]vMT0L^ΣMtC'dt, ];y Wq0 @y:q@?|r28u /\䊞뚛ܼ.ǕdP(lPB.A& 0ꚛѤkFm]-PW%jwussExX(D#*"r 70L{}Gr{39K T9,B+\ec#V=PA}템\hpnߣǎs9&'N__o\z[bRn!-@.jsUG4m[w "SAfhl@WzЬ|v)ֶveYaYuۭ0 /(@zd\d䢶4hcc#Q^^qCmqh o/ObtH0J(SwӣZۻGQGjʭD2mt&Z/sW}FG!<`BOe42sc_-勼 ]G]5ls3Z*L4`z)t\VtSEq#vRQf\Ps}>!#QQQi=0@ JµkP[[-Gb1&NP_߀cOpYK:gg n,7~'Ob wZHBA!Oh@+-h&j|iDc5\} uN<^z1l3.=_b?Z-cKtwtH >ص{7Ιm{$'%ry;  4+۞zqu0 (/5dPE!À٫VB2N W:An\_^7ꏺǻ c CP 33 2>v3jQo $DSSd + p9Ąnr5+>^&)|(Bʿ[BT|AAMwZhx9^G<>h(BM.wYTd&cO}ps:a۰`.MJL뿇`H$ĸ*ex~P)QYUc>U# Qbtڼ<ϰ{ӗ&Z _Xg<(`7.nBKy Dj}v*U& ZU ڙn 鎕 S!{"Ur> f> Xc?aWT\bKX0&'b?``1~X(]>;j8y3oqc} F !1E u!>Nw.Дi?th.m2%ɡ75sNY&z.~jڳpIF=x؎!dr駐cO:]3 Ostt IgpwwEjZ:JKK{ΈO %x@A!X8'<_~Gw+cz\jnӒ{0vk]X?/a:OzAQGQ'<,a}z͂sz|^* .vbq$ZYJ!Ä"6>t iydQ;\\pCDɂ# 6VC B!(BFNY<6Dھ`@Ư}< <$ܘC<} zxB!< B! _6&s͓fYD Utmջ6w)k ah4-+@n2BDB0&KoA~b5 p{Iac?}ۯuή9iH猩 !X: ! 0B-|7!q]|m?C:6ss BQ22^|,bX]i \{5hTjɶ{m¢M_`0tIeO!PE!#D܅Ǒ](-+ighjH}a5乪MPTfֱNXӌ{GDNGu} !Fu!d$` (-e-ٿeBuL5@#Z9Pa:V7B2ìC՟9nX C䤆Vm6)[wj=[є=vZ|ف !("5]~I8}5O\ۼ= 1|7բD<8 Ҹm~)b2dhl=MYz={ǎDBHQ#!PwLf2c(|y\໹a&[*=5d\t>\ėq!QE!#IU?3YӦI0"БĤ.5-ڦ_a"0 !FPE!#, ^}gn!S<8oɦbz}//p .yQr#BF*ݤ)~nOw,2eʿ]rguMO?kM O7twqo;^i$tNB!}C#Q2 28.X7>5hHÈpSOs;ׄ&4˶GA! $ !1fXv` 3C`0kߺǺl . E,{!¡ BHhDCP.d@nڷLH} %$&tyYh.+wI,ABt%b6Vׄ/?@Ke9J>x9 X3ًH=RQlmB} t3ݯ`_Dt>B4B*ͥ+#PNə&<8dDMv}*h=`R|{V h.+]|sR^!ŧ@66t&| "'MQ56Bʽg'3҄BԐv%F]ps@*㻩@}+\wSٹg?:>B1<,>oǞ9CcʿYG!J!  ! #{8q/-|B4DBL{z(Ssm"@w!AC%!OUʭ(]8Y+_X3Rk?lVqز/?D7t 4+}7Eٺ{?#zCfπu ! QBM8Wl 0D=sRrWnTό@冯z߉13kQ浏 )"#F4?yju>"Q)"B!Vc6^?`5C,0`̘A㍇QB!Ċ(JKv buildbot-3.4.0/master/docs/_images/workers_src.svg000066400000000000000000004323601413250514000222400ustar00rootroot00000000000000 image/svg+xmlResults Checkout / Update WORKER Commands BUILDMASTER Repository • Subversion• Mercurial• Bazaar• Darcs• GIT• CVS TCP NAT TCP buildbot-3.4.0/master/docs/_static/000077500000000000000000000000001413250514000171665ustar00rootroot00000000000000buildbot-3.4.0/master/docs/_static/buildbot_rtd.css000066400000000000000000000020641413250514000223570ustar00rootroot00000000000000 /* The RTD default uses a lot of horizontal space for the "Parameters" heading which is placed to a separate column. The descriptions of parameters are often very long in Buildbot, so just some indentation is fine */ html.writer-html5 .rst-content dl.field-list { display: initial; } html.writer-html5 .rst-content dl.field-list dt { padding-left: 0; } html.writer-html5 .rst-content dl.field-list dd { margin-left: 0; } /* The left and right padding for the contents seems excessive */ .wy-nav-content { /* the values used are the same as for top and bottom padding */ padding-right: 1.618em; padding-left: 1.618em; } /* The vertical margin seems excessive on descriptions of parameters when there are many parameter sets. The structure of the page becomes visually unclear. */ .rst-content dl dd, .rst-content dl dt, .rst-content dl ol, .rst-content dl p, .rst-content dl table, .rst-content dl ul { margin-bottom: 4px; } /* The default is one line per paragraph spacing which seems excessive. */ p { margin-bottom: 8px; } buildbot-3.4.0/master/docs/_static/icon.png000066400000000000000000000517241413250514000206350ustar00rootroot00000000000000PNG  IHDR,,y}ubKGD IDATxuxՇ3ZВ, 1DZ8ܠmh_M4fj8 98qL%[`ɲ,b\-}H;3ռ}qzY (((( oRETh9 \`,SPPdK7}vPR/pW,UPPVh@=;( ö+((8!^`!@7XDV8BB##Fu#7Gy(x`0 XL\0,2)d;_?}i?g[(K$s  #ȝLθ-{슥o[cۤ#jG}(Kd"8Y @Oed5zQɃ:/܉j.N:kQ+y6:ujyGwc鴡{vN҃&oR Z`MfNyȈ|2r'[@Va.0ULe^մKuߺ CBqX !oSـљ eO$=2s]`h ]K2Dǥ8{Iay;شKC.̞J'>Xj qId3n&Y^n'#BiR8#;xGFP ɑ3B]`O%(+pFXs:5 rzC#F;QfSNԦ_mw_MRݥIZ5Qo#9 B5s5VS;7 6z֪' wFR9uɀ6RZ5|%uRei88,!ߜ\$| QI^rXī^C-Rv~A78,$QcfSzm'od_QL݈<& +f+vGr| \tyΪC(0& =kI @2LS8,bp9MCU R3$ᜱ3<o7!`PYa #;x.O>"0"'l_=o?s\iBig L_$wI³%µBid zT`g <%I弥 >=ȕrx֪BqXHΛLPp`& +2n p [kLo AHkqipI=v&9֧t5cP^Y[ 0uఎ$ LWIVnS'o>u6/;i("V"Srco*6_5>;B:%IXg@O͞gu9phZ!)5S̝BF"b\ca 0,=kk !دHKfT2O<OuR}XRJWBF|țLVTFfV2 EI:^}z:갊QRA!dN&3o*yIZy TW#W(Yâ8ڳVr=A,Z|$ +<WR{T\Qz\гaԅr_Q 0&:4Ջ垶͗ǡɎ y  g}U=;u @:Z!o/znyϕ;#L a-A/mDE@Q59[9a~؋O y 3ԫXA]yJ waiz?8/;?)X1zmrX3^93QJB1vJVJCi͒ ta0f|oءvҲ'po#Y6DK*2)xI+ |,U_V߳` &kT/£̛"խ^U_֩ 9f)DA!\}ǫ?DCnCЉz*/Kb:8 f((xN| ϓ;rC<r CgPԜs}޳JAèTj'C7gdOL}aI)AP5}=|-r뽧?~+T*9 `\1#=ks:A>(iN9 ԹgpM-F0Y:,SpMBãLzԨ!KPJ:5zdHSZc/F`N6lV(%轄jaߞ-TRW]NC]͍56ъbnav;vjVVg`Jph #H˙DvTFx 3Zky+,"A|޳V _tXwlvY,^2gxa)ھ]SQ :;{:JJDT#24cf-+8AgG =~=%pM0"қfdN`cٽmk~ 6yۤT*#-`g3v\lg¶u2p-`Uk+ښVVc0xϪ߽öuQsէj⓳zYXXptƧo<ޑ; Z%Ɨ8~;=1V6/{憚AL܈č#69yJFkc5MWspvj*>ĔQ|-Jصy9o>u&I9IvZ_rX'K{6$每~%s|ʗ=F[G:}0);3 r*WSYMX̦iy *nGQ 8ٳV/9b@rܩWrzbͲH k$)}"j:Un+1wl8"* {1Ǹ*."K HpoUU8+xQiz74F:oq-ݝn | H/"5k1 @hiGR6O<88̍CB#mu۳cG;4FM\!8yC\l{ԥR<4~hu>UV^{;xˁ=M= 򥹇5~ =~=M}:gE7:6 Rsf?4܃[聊"~MbRIJAK's`9:d]K=8SӁ/܂/8l}EL9);|__v n'!ome/wdc7.ɩο,ˏ^M[KTw3BOGXWlMLs%s||o$*9S&,:3*i>M:[DX-f.đ$)NKMYȮ?.;T==dw5lXd1${Vm-L@SlXyn}F2r 9C#8}{vα[6݄qbz6,:Dn?$r9ҧIeZ#u\b9wx*,f9cgzB` f̓,NC~nC+M۳YݍZ)#Cf݋)/,ٟ=O,I#>m"Wb5wˊ6Bބ9^?BiM+H2tX@Ph8 8CT0'm<}TJvTL\p O*%EmF&6f>6H"ۻSg;tb M=v"u`*90d3o DGU$ )nŇhO0̻ɝv,~ad?ڊ-t4wقn#;e JE !P$3-~[tZ9Wއ$V7sxW1HP,wHC$}+ݵHI&Qy9vh8AS+``ZH=bщ1XG=(/y9FԮ蘻!RGQ hA̻q2I/W(dv`|φ / 3o,G'jW>RrjxVkhktyWL=,A+&$wlvnQJv98X7 4 @/{%w#}FPXb]JE_@֐w,{[feW9a=X£7x Lg{c!1ya7Eo)j;ϤC̸yW3\/X ZcD*w-ljg` f%&XµL5 @v:>mX{+VMw'܈S;IȘ¾aN=tp1q#HV` `֩/A}M!R`[_xdžPidžپ{Q{tRSO;*젼|͌3ॻhi+1//0yir;\AJ` ؖy/v}P83;*ݮ|5cmV_@r+om:O;,jؙhuzYzfETL;oG$y};o QI暪~mQ4uim&-\;y6o_EѶ_EO%1{v;O^*cθ^[;.jWSi\s=BRɟ8σ&x,9 Lb‚?o( LaE;ψ?k1 \O*wq1rsـ^E<-/8hAر{*bh݉ƑS;G-:czc:c8ZMXD8?Q]YAgG*@VV}=eBҧb_e6\溽t4Z:[lmZCo CNht*# IXL1#R '²/n1y멛_y4gX!)DE'{Cy)>q *Kw'vTSLbjhk.f5@W{-EH㲈1c1'Xx|gQ^5U%o"#GQ*)5H loaOGM;cH+Nr5Ԕ~A\}۷9#F>iĤmWLE>|.e%fh )% ˟4/`k a!8)x JM:t--}W6vE4,x[$f"9g.qh=|v.W{MU %I%|M9WSK4.~Q{能6jkbWT\*Ju@%B : I7!EQ!=Z ;Ag #eR|Ix{־CwgO^+6*vpX)Ğ :Qcf߬_:C(iNBŁ1ĹhU/qIAHxrT]m͟If٤䟀ƃ;]Se,im>DXDgׯ3LGom)-}ҙ R#̦jE'AmBHHu=D ~$? ]*ewR?)1DfI2&Mɺzvߜ{=bS_[IM^ 5l5U{흉R?lt) Kfe! ,=A7mpnz_'Zٻ]6}5|i/Z}0cG-[,Ua\1; `AJEceo-.n:)]~,Rv6'K/G-p0A?"$VMo(=!mFY$rX]mo_E8a\rw,@pZGIKdLocRǝLJ (~ʊ0=B!ub#p'H+[6f5S]z",.qb􏟿;sL].wkh_/IB\tw'h,\ٓ:U5@|\ \sEtw4(1ش.7(ehݫ\F߰hJsB* 0ۥEfhq̎ej hO`9+G!zuՂ+?Խ]\FBRvN5a(1 `] ǺT@OTlq x@\\0EXeGk#*y^kfƮ-aC;8`̬JӡTj-).!Mla|u/ݝm@$pGK@BqɿEW[?]??bvg&gXR֭o֗ٷwXDK&-D&lwRU^IUj>"D+CJ}dvM%5sM}77a6ڇ6/i CVBQ4ZgvE|t$ջcɱQʁ* `.B CuWlt3 P_nhxZ4(pXAQC)Ο8oR^Q8z vW<Ƕoj0ـ z|T_91R<^tBu@"Q{ptpn}֯Ml^mMBPo!Rap#6 ;x__D3046X[ IDATKtʒg>F墥.viypXT/U]P!41r_V6|r3%O` G7)|l!-ƇfX/~:R8N]8GIL&&^6¯/Dd sW ? wJ<0v_ q!Z Κپ>6 ,U4dM] w.kcC&; #HpdʠȦ6|'Z,ngeM7ppwJeayc 4n{v,Zt!qN]kؤ〠.1|Qͦ6|q; Ro@ C`DVMC4H(DTWA]s \"DX%84Q\|Aꩩb -l_iNBЂrN[a0|0CmfT_n jy-xή-X˟av&EYlZͽ쒺>sD8!ͧU?c4VZRU^HsCCQN_K2'Gh~ .Y sMOP$1PU+J9,[ "H5F0D^aE#vqs\x lv>eoK]!$ܽV+ !m~w:ho:.+"/#+J,fΏZk.;(YĕhE>+uS+;ލ%9K &,.҄eZ*/ċU,+tp /H֨C;zzaEuvsˋ_h/x#,ɯ0pOv5EX\t+@jXذSLuR+;Tp?kkHmҋXpJH*5`QbF!sx eYDP!;mTppk/JW=KwW}V`V:?lbώբ`MuK43yqN/N4wj=HH^ \n\BaҕϊlUzCz=;mwRkЇJ W9, u.ocKXLm%>U\' sՓT vK7wAab8D;#jv965ƎCcw]jwP9fɪtVpazYD&t9SoX3&{c)=8>_KWh [8D#YL~IA㦊Rm1uީT a)Ȍ)n3EKyHf=+:j )C/HJCKa (+A(zCӆx9&4<cpX_<)[xU3.)]R#,E߇Mwȷ[FX2T%]匛Vɝ}=Ak J pe`c.)h)$6OJLN.sXqw'3'ץ2`=lg]iaO35jj E(pJ1OCQRtHXѮW%$KG?nԲ_gCqX~)c LPTN,qmQH@LMylQі `@;pwH-.7jPc ER3gܽu6kB4zq#Ȅ3o0R#u /$$3ocXs<5 y.!?HEQ^T-:@/£.:ܘzRbiR@!ԱqF_cZٽu=8N>݆E,ui`K!?i^Ƙ OR.9 zͨD$LxY&:Uj t5K52`바~X d{ ѱqIVk$~e֯p8`bȨ3{πg JѦ{rk<($ 9G*-j ځo{8q( zp',"/7@"ձq b0|֯L-ۭ`>Df* }@I{e^2HI!k!nwt^1*JYxeWťR` *uX I#s[Ups".度gTw.E]E}i밙EE @,/A!ph ̈xwJ- ڻ:JC$W/}u@ Đ<$6=l#;~6sZ`Yîp-J0sy3\s8- A"Jǝzհ)%,.VW=QSU ez "oq B1XtRg*ƚJE+{N+ B&<T*S?)#>9+n}^p*B5\yUW'$y aYA}H:Δ!O crHy+jxfKw':ep =gW9]/@`G%y*$CYrK'I"v-EA}TvY:196\aSkXD܉F'=D{D] ==oz p<gV,V"ŵ{}XZ_&lsѕ(y9.*(A !Z1wJk~1j 5B$ʠJI)8 Ak)BC/#ܳ;x4zf)+cJ H5~[.Q3!t2wILM:R8}6L}aR:fB%(+O!BRs)m|> ],3!4δ \;ش4RUKޗX_wVc.`ryTKLVSU=m@uw՗R:Dc8(I8R";zS䰢prkQjmj@pKflޙDJhn83_H u7??)*gw ! "v"(.ZsK*6*6a[oo@T|&\"` ]߻JS亿IDcBTg~0'!S( wÌ=jbbբ>-]XL5FPV]IL#*4ěOU?N4CTݓF[}G 2- rM;,nǧ* Gٻs ݦ޻*O9.iod3 $!R5DF?BZciZ/*hٵg6+YǠ)Fs" #()\'&!L{bEI](qE`*2.?*Z%m'|:;zC͕=jTvWvBӲJwo R3$bڼiokr{z-$GVnTjf,8n~F8Ȱnѫa|’ dm-W0jEt}]s}kO\/* bw j EZ^(_Ax0>&rpa VB٤8P憃*bGM- K7<$=:Qit͹j<b dpӃqu$*p"rf%(Bʏ2a}x %ML{+Mdg!\ȢO8);]HS7>-ڪ7I'ep/'* g/Fk#.pAE3a̓~J"9m4y0z\FMT]Lɮ"gRidVv^ǒ6.*V8/e_DKS.;Owhuǘ.cώՔox`]Z!!9|27q.ᑮ+p2Q(-'5EGnooX͇ßVKM|RI:@8ʨ*47Ը*"cLj|RI(&x?ڌ1d[ni݆9,FٷS9:_E~?+JuwwGxd9c{lv?t7"&k1w IT*AJ","£≌I$,2N)率N#Rdj/QD='>mQ1blb{A XǏ? sgcs'BF;5F6nHK߈춀[tz:;Uj ᒵ:-ԭhwy%tuz2px: ;Q&#AcBSꌛ -ٿp-OwK)ul6 ݭv;K vX*'/!WR:#`̋].ի0(,Q>bd瘚ޓ!9zzx^'|4&_%`ū_Ϛex2ߠ͢xCX͵K<@iݮOi( 渆Ѥ̸F"fWQL-{-Jѣ5FȞ#S <\ii K3iػڝHP a#Hw'?* ; %E! }ڃ}1ٵ ;L-1 4~,*UkD΢zT4*Ks+L ǯ܃!>f փ[&ŻZ Ne ÉΎʊ6 ,!Ҵ 18zbQlh KSQQg\ȴᓱZ]MeTNJZɬ.e h6qIDATJk!JS TWT-&aɒ}-W-.3`xC7s:Ò?FBwSQ +>Koz1 *"6eox*몵 a#MHL)CGLAFj)>#\ *EbHH\ajl#ٛ9+<}VLlV+*vSce7b0P i&M"z"q:o0#/n=| Ьv-^pY|;vht$\12V moħWM֛m|fʊ6Q^ҢXkAcAxTbr m>Pevsp뛎e@"rvXF8v5z&\BH[5@ꋿGɾXˋd )]mTlcՔmt;V*pbF2̅.[l(I]^ Hz_XA0avSUFMDoJ䜂IݮOpThm:s\}3W$-Mu]JIP("ӎb%z_(E@[ 3m~Sa[DIDuM]صLy,T =wʊ6QS%4*mp,cX@Xr19ݎW*'"X#V,,CmBǒr̟Z(j1γT_q^% mTR^,=sI$Z>4ЄDg/"(ƷNkjwV ځ3%-U밚;H1*0F1rTzLv|1k8;|Aٹ6`QkB T8brNDf:?O@ؕ(v;.‡&ђ\cǒٱҢ$a]p,yDe.Xe/e&@ 6)A0 Q6+5^KtI^1?NBkv85?.7>IDIɰaUj-xB ׳}Z;:+ rXc*N;x&; %bt`hTo~Imǯ+yxBbZ(ryJDhxG-F&vo ~+pcgӾ_t50b\#eM_v[3ʇ/ESAN</Y>L]ۻIjCHB& ҿI.v8`Y2]{Ð9MҤQk%:|0n\tchu$a@%(z#}6m՛!p' p "HDVحTԶ2YӸg1`Vw$ R5DMT|"F4,Ba_C$su:>s@qXׅ[vI&bm/a~+Q0K*ɵGFaEf̧LTS !C @rXo MXڝeɾ\uK'gzw% ! u!|rG7rXAt6F_>ߔd5^ԣRw&#}WL;~#L޳}5eś0w;FRЇ%0+wrXjAdJ)>K~s/ܚw[Q}6j#O=$ ,$aCD*>ɇ<RKt@RW_k­To|]Za|ɷ&Ik!*J8IZ#ƨL’&u<:t4| zayuӄXZ0E XM-$hRgLlV>zkeLݒ$Ic\ *x<ɍZkO%yUh6`jfߪNw6~M.ݵCI"%b,"FF3RS c#jaA?['_.d`)=MG0w6HF,R?$>Є;8u͂fqR)IYU(MJQ_x!IQRoHT7jKK)K5 MbcvoIp gÙ'3c̽3EIs/PKL+zZ#,P_ R-;Զ-&=Vݐa랏q>3=qe_z^,O^o l|*Mgh l 4A=B}v@52׀sUZ#ubMu?[U)`$CϜ?Ŧ }nQ|#|\'ݰG>Ǿ=۸Uxm$F*2H,t3tZUPV1w~8Eb!&~ۍ[EMXFAe>UȮڅE]F[6<;r=~$[vVYAu+{xpGV+3uE7n1 7"CB}FyӏjהAn,gSqmL_;ˮƎ=\ʼnIChP5JpNXo$`97|xh¶ #=ݸU2CBbnL;ݛ{,w,_bƭc1qv?xB*EUeLߤHV}&=/i :*[wPzxpަR jAVet>_!qk2:nzco'}PAVAQhDI_sL;i8]mkzf*bЋ ׂb}$`U8Ei+?e'ö]jzM1P;K㸜bp!ߢ$`UT_XfP})Z}&T,D,ԧ2ݩbH Hx}㿨 od~^a[_H,I1" R HFN}$Aw:< X3'Li bA1@ոy;Nؼꋫ43v  + X-y1&ϰӼo,=֜NʬHX7 Xbƭs7ƭ uȅI*WWűǼErR QyOR\N7%on~?ФCb uEH.* 4n-Gꐿ1吀}K,<ՃG QA}]Dne: nW{zxAByUɭ LqJbr6PWo^!*@j 8 Jxե%7@ /"@zAa*`pu{F'NIENDB`buildbot-3.4.0/master/docs/_static/icon.svg000066400000000000000000000022341413250514000206400ustar00rootroot00000000000000 buildbot-3.4.0/master/docs/_templates/000077500000000000000000000000001413250514000176755ustar00rootroot00000000000000buildbot-3.4.0/master/docs/_templates/localtoc.html000066400000000000000000000001421413250514000223600ustar00rootroot00000000000000

{{ _('Table Of Contents') }}

{{ toctree(collapse=True, maxdepth=-1, titles_only=True) }} buildbot-3.4.0/master/docs/bbdocs/000077500000000000000000000000001413250514000167745ustar00rootroot00000000000000buildbot-3.4.0/master/docs/bbdocs/__init__.py000066400000000000000000000000001413250514000210730ustar00rootroot00000000000000buildbot-3.4.0/master/docs/bbdocs/api_index.py000066400000000000000000000037431413250514000213150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from sphinx.domains import Index from sphinx.domains.std import StandardDomain class PythonAPIIndex(Index): objecttype = 'class' name = 'apiindex' localname = 'Public API Index' shortname = 'classes' def generate(self, docnames=None): unsorted_objects = [(refname, entry.docname, entry.objtype) for (refname, entry) in self.domain.data['objects'].items() if entry.objtype in ['class', 'function']] objects = sorted(unsorted_objects, key=lambda x: x[0].lower()) entries = [] for refname, docname, objtype in objects: if docnames and docname not in docnames: continue extra_info = objtype display_name = refname if objtype == 'function': display_name += '()' entries.append([display_name, 0, docname, refname, extra_info, '', '']) return [('', entries)], False def setup(app): app.add_index_to_domain('py', PythonAPIIndex) StandardDomain.initial_data['labels']['apiindex'] = ('py-apiindex', '', 'Public API Index') StandardDomain.initial_data['anonlabels']['apiindex'] = ('py-apiindex', '') return {'parallel_read_safe': True, 'parallel_write_safe': True} buildbot-3.4.0/master/docs/bbdocs/ext.py000066400000000000000000000426361413250514000201610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from docutils import nodes from docutils.parsers.rst import Directive from sphinx import addnodes from sphinx.domains import Domain from sphinx.domains import Index from sphinx.domains import ObjType from sphinx.roles import XRefRole from sphinx.util import logging from sphinx.util import ws_re from sphinx.util.docfields import DocFieldTransformer from sphinx.util.docfields import Field from sphinx.util.docfields import TypedField from sphinx.util.nodes import make_refnode logger = logging.getLogger(__name__) class BBRefTargetDirective(Directive): """ A directive that can be a target for references. Attributes: @cvar ref_type: same as directive name @cvar indextemplates: templates for main index entries, if any """ has_content = False name_annotation = None required_arguments = 1 optional_arguments = 0 final_argument_whitespace = True option_spec = {} domain = 'bb' doc_field_types = [] def get_field_type_map(self): # This is the same as DocFieldTransformer.preprocess_fieldtype which got removed in # Sphinx 4.0 typemap = {} for fieldtype in self.doc_field_types: for name in fieldtype.names: typemap[name] = fieldtype, False if fieldtype.is_typed: for name in fieldtype.typenames: typemap[name] = fieldtype, True return typemap def run(self): self.env = env = self.state.document.settings.env # normalize whitespace in fullname like XRefRole does fullname = ws_re.sub(' ', self.arguments[0].strip()) targetname = '{}-{}'.format(self.ref_type, fullname) # keep the target; this may be used to generate a BBIndex later targets = env.domaindata['bb']['targets'].setdefault(self.ref_type, {}) targets[fullname] = env.docname, targetname # make up the descriptor: a target and potentially an index descriptor node = nodes.target('', '', ids=[targetname]) ret = [node] # add the target to the document self.state.document.note_explicit_target(node) # append the index node if necessary entries = [] for tpl in self.indextemplates: colon = tpl.find(':') if colon != -1: indextype = tpl[:colon].strip() indexentry = tpl[colon + 1:].strip() % (fullname,) else: indextype = 'single' indexentry = tpl % (fullname,) entries.append( (indextype, indexentry, targetname, targetname, None)) if entries: inode = addnodes.index(entries=entries) ret.insert(0, inode) # if the node has content, set up a signature and parse the content if self.has_content: descnode = addnodes.desc() descnode['domain'] = 'bb' descnode['objtype'] = self.ref_type descnode['noindex'] = True signode = addnodes.desc_signature(fullname, '') if self.name_annotation: annotation = "{} ".format(self.name_annotation) signode += addnodes.desc_annotation(annotation, annotation) signode += addnodes.desc_name(fullname, fullname) descnode += signode contentnode = addnodes.desc_content() self.state.nested_parse(self.content, 0, contentnode) DocFieldTransformer(self).transform_all(contentnode) descnode += contentnode ret.append(descnode) return ret @classmethod def resolve_ref(cls, domain, env, fromdocname, builder, typ, target, node, contnode): """ Resolve a reference to a directive of this class """ targets = domain.data['targets'].get(cls.ref_type, {}) try: todocname, targetname = targets[target] except KeyError: logger.warning((f"{fromdocname}:{node.line}: " f"Missing BB reference: bb:{cls.ref_type}:{target}")) return None return make_refnode(builder, fromdocname, todocname, targetname, contnode, target) def make_ref_target_directive(ref_type, indextemplates=None, **kwargs): """ Create and return a L{BBRefTargetDirective} subclass. """ class_vars = dict(ref_type=ref_type, indextemplates=indextemplates) class_vars.update(kwargs) return type("BB{}RefTargetDirective".format(ref_type.capitalize()), (BBRefTargetDirective,), class_vars) class BBIndex(Index): """ A Buildbot-specific index. @cvar name: same name as the directive and xref role @cvar localname: name of the index document """ def generate(self, docnames=None): content = {} idx_targets = self.domain.data['targets'].get(self.name, {}) for name, (docname, targetname) in idx_targets.items(): letter = name[0].upper() content.setdefault(letter, []).append( (name, 0, docname, targetname, '', '', '')) content = [(l, sorted(content[l], key=lambda tup: tup[0].lower())) for l in sorted(content.keys())] return (content, False) @classmethod def resolve_ref(cls, domain, env, fromdocname, builder, typ, target, node, contnode): """ Resolve a reference to an index to the document containing the index, using the index's C{localname} as the content of the link. """ # indexes appear to be automatically generated at doc DOMAIN-NAME todocname = "bb-{}".format(target) node = nodes.reference('', '', internal=True) node['refuri'] = builder.get_relative_uri(fromdocname, todocname) node['reftitle'] = cls.localname node.append(nodes.emphasis(cls.localname, cls.localname)) return node def make_index(name, localname): """ Create and return a L{BBIndex} subclass, for use in the domain's C{indices} """ return type("BB{}Index".format(name.capitalize()), (BBIndex,), dict(name=name, localname=localname)) class BBDomain(Domain): name = 'bb' label = 'Buildbot' object_types = { 'cfg': ObjType('cfg', 'cfg'), 'sched': ObjType('sched', 'sched'), 'chsrc': ObjType('chsrc', 'chsrc'), 'step': ObjType('step', 'step'), 'reportgen': ObjType('reportgen', 'reportgen'), 'reporter': ObjType('reporter', 'reporter'), 'configurator': ObjType('configurator', 'configurator'), 'worker': ObjType('worker', 'worker'), 'cmdline': ObjType('cmdline', 'cmdline'), 'msg': ObjType('msg', 'msg'), 'event': ObjType('event', 'event'), 'rtype': ObjType('rtype', 'rtype'), 'rpath': ObjType('rpath', 'rpath'), 'raction': ObjType('raction', 'raction'), } directives = { 'cfg': make_ref_target_directive('cfg', indextemplates=[ 'single: Buildmaster Config; %s', 'single: %s (Buildmaster Config)', ]), 'sched': make_ref_target_directive('sched', indextemplates=[ 'single: Schedulers; %s', 'single: %s Scheduler', ]), 'chsrc': make_ref_target_directive('chsrc', indextemplates=[ 'single: Change Sources; %s', 'single: %s Change Source', ]), 'step': make_ref_target_directive('step', indextemplates=[ 'single: Build Steps; %s', 'single: %s Build Step', ]), 'reportgen': make_ref_target_directive('reportgen', indextemplates=[ 'single: Report Generators; %s', 'single: %s Report Generator', ]), 'reporter': make_ref_target_directive('reporter', indextemplates=[ 'single: Reporter Targets; %s', 'single: %s Reporter Target', ]), 'configurator': make_ref_target_directive('configurator', indextemplates=[ 'single: Configurators; %s', 'single: %s Configurators', ]), 'worker': make_ref_target_directive('worker', indextemplates=[ 'single: Build Workers; %s', 'single: %s Build Worker', ]), 'cmdline': make_ref_target_directive('cmdline', indextemplates=[ 'single: Command Line Subcommands; %s', 'single: %s Command Line Subcommand', ]), 'msg': make_ref_target_directive('msg', indextemplates=[ 'single: Message Schema; %s', ], has_content=True, name_annotation='routing key:', doc_field_types=[ TypedField('key', label='Keys', names=('key',), typenames=('type',), can_collapse=True), Field('var', label='Variable', names=('var',)), ]), 'event': make_ref_target_directive('event', indextemplates=[ 'single: event; %s', ], has_content=True, name_annotation='event:', doc_field_types=[ ]), 'rtype': make_ref_target_directive('rtype', indextemplates=[ 'single: Resource Type; %s', ], has_content=True, name_annotation='resource type:', doc_field_types=[ TypedField('attr', label='Attributes', names=('attr',), typenames=('type',), can_collapse=True), ]), 'rpath': make_ref_target_directive('rpath', indextemplates=[ 'single: Resource Path; %s', ], name_annotation='path:', has_content=True, doc_field_types=[ TypedField('pathkey', label='Path Keys', names=('pathkey',), typenames=('type',), can_collapse=True), ]), 'raction': make_ref_target_directive('raction', indextemplates=[ 'single: Resource Action; %s', ], name_annotation='POST with method:', has_content=True, doc_field_types=[ TypedField('body', label='Body keys', names=('body',), typenames=('type',), can_collapse=True), ]), } roles = { 'cfg': XRefRole(), 'sched': XRefRole(), 'chsrc': XRefRole(), 'step': XRefRole(), 'reportgen': XRefRole(), 'reporter': XRefRole(), 'configurator': XRefRole(), 'worker': XRefRole(), 'cmdline': XRefRole(), 'msg': XRefRole(), 'event': XRefRole(), 'rtype': XRefRole(), 'rpath': XRefRole(), 'index': XRefRole() } initial_data = { 'targets': {}, # type -> target -> (docname, targetname) } indices = [ make_index("cfg", "Buildmaster Configuration Index"), make_index("sched", "Scheduler Index"), make_index("chsrc", "Change Source Index"), make_index("step", "Build Step Index"), make_index("reportgen", "Reporter Generator Index"), make_index("reporter", "Reporter Target Index"), make_index("configurator", "Configurator Target Index"), make_index("worker", "Build Worker Index"), make_index("cmdline", "Command Line Index"), make_index("msg", "MQ Routing Key Index"), make_index("event", "Data API Event Index"), make_index("rtype", "REST/Data API Resource Type Index"), make_index("rpath", "REST/Data API Path Index"), make_index("raction", "REST/Data API Actions Index"), ] def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode): if typ == 'index': for idx in self.indices: if idx.name == target: break else: raise KeyError("no index named '{}'".format(target)) return idx.resolve_ref(self, env, fromdocname, builder, typ, target, node, contnode) elif typ in self.directives: dir = self.directives[typ] return dir.resolve_ref(self, env, fromdocname, builder, typ, target, node, contnode) def merge_domaindata(self, docnames, otherdata): for typ in self.object_types: if typ not in otherdata['targets']: continue if typ not in self.data['targets']: self.data['targets'][typ] = otherdata['targets'][typ] continue self_data = self.data['targets'][typ] other_data = otherdata['targets'][typ] for target_name, target_data in other_data.items(): if target_name in self_data: # for some reason we end up with multiple references to the same things in # multiple domains. If both instances point to the same location, ignore it, # otherwise issue a warning. if other_data[target_name] == self_data[target_name]: continue self_path = '{0}#{1}'.format(self.env.doc2path(self_data[target_name][0]), self_data[target_name][1]) other_path = '{0}#{1}'.format(self.env.doc2path(other_data[target_name][0]), other_data[target_name][1]) logger.warning(('Duplicate index {} reference {} in {}, ' 'other instance in {}').format(typ, target_name, self_path, other_path)) else: self_data[target_name] = target_data def setup(app): app.add_domain(BBDomain) return {'parallel_read_safe': True, 'parallel_write_safe': True} buildbot-3.4.0/master/docs/bbdocs/test/000077500000000000000000000000001413250514000177535ustar00rootroot00000000000000buildbot-3.4.0/master/docs/bbdocs/test/__init__.py000066400000000000000000000000001413250514000220520ustar00rootroot00000000000000buildbot-3.4.0/master/docs/buildbot.1000066400000000000000000000204071413250514000174310ustar00rootroot00000000000000.\" This file is part of Buildbot. Buildbot is free software: you can .\" redistribute it and/or modify it under the terms of the GNU General Public .\" License as published by the Free Software Foundation, version 2. .\" .\" This program is distributed in the hope that it will be useful, but WITHOUT .\" ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS .\" FOR A PARTICULAR PURPOSE. See the GNU General Public License for more .\" details. .\" .\" You should have received a copy of the GNU General Public License along with .\" this program; if not, write to the Free Software Foundation, Inc., 51 .\" Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. .\" .\" Copyright Buildbot Team Members .TH BUILDBOT "1" "August 2010" "Buildbot" "User Commands" .SH NAME buildbot \- a tool for managing buildbot master instances .SH SYNOPSIS .SS General Invocation .PP .B buildbot [ .BR "global options" ] .I command [ .BR "command options" ] .PP .B buildbot .I command .BR \-h | \-\-help .SS Command Options .PP .B buildbot create-master [ .BR \-q | \-\-quiet ] [ .BR \-f | \-\-force ] [ .BR \-r | \-\-relocatable ] [ .BR \-n | \-\-no-logrotate ] [ .BR \-s | \-\-log-size .I SIZE ] [ .BR \-l | \-\-log-count .I COUNT ] [ .BR \-c | \-\-config .I CONFIG ] [ .BR \-\-db .I DATABASE ] [ .I PATH ] .PP .B buildbot upgrade-master [ .BR \-q | \-\-quiet ] [ .BR \-r | \-\-replace ] [ .BR \-\-db .I DATABASE ] [ .I PATH ] .PP .B buildbot [ .BR \-\-verbose ] { .BR start | stop | restart | sighup | reconfig } [ .I PATH ] .PP .B buildbot sendchange [ .BR \-m | \-\-master .I MASTER ] [ .BR \-u | \-\-username .I USERNAME ] [ .BR \-R | \-\-repository .I REPOSITORY ] [ .BR \-P | \-\-project .I PROJECT ] [ .BR \-b | \-\-branch .I BRANCH ] [ .BR \-C | \-\-category .I CATEGORY ] [ .BR \-r | \-\-revision .I REVISION ] [ .BR \-\-revision-file .I REVISIONFILE ] [ .BR \-p | \-\-property .I PROPERTY ] [ .BR \-c | \-\-comments .I MESSAGE ] [ .BR \-F | \-\-logfile .I LOGFILE ] [ .BR \-w | \-\-when .I TIMESTAMP ] .IR FILES ... .PP .B buildbot try [ .BR \-\-wait ] [ .BR \-n | \-\-dry-run ] [ .BR \-\-get-builder-names ] [ .BR \-c | \-\-connect {ssh|pb} ] [ .BR \-\-tryhost .I HOSTNAME ] [ .BR \-\-trydir .I PATH ] [ .BR \-m | \-\-master .I MASTER ] [ .BR \-u | \-\-username .I USERNAME ] [ .BR \-\-passwd .I PASSWORD ] [ .BR \-\-diff .I DIFF ] [ .BR \-\-patchlevel .I PATCHLEVEL ] [ .BR \-\-baserev .I BASEREV ] [ .BR \-\-vc {cvs|svn|tla|baz|darcs|p4} ] [ .BR \-\-branch .I BRANCH ] [ .BR \-b | \-\-builder .I BUILDER ] [ .BR \-\-properties .I PROPERTIES ] [ .BR \-\-try-topfile .I FILE ] [ .BR \-\-try-topdir .I PATH ] .PP .B buildbot tryserver [ .BR \-\-jobdir .I PATH ] .PP .B buildbot checkconfig [ .I CONFIGFILE ] .PP .B buildbot [ .BR \-\-verbose ] { .BR start | stop | restart | sighup | reconfig } [ .I PATH ] .PP .B buildbot [ .BR \-\-verbose ] { .BR \-\-help | \-\-version } .SH DESCRIPTION The `buildbot' command-line tool can be used to start or stop a buildmaster and to interact with a running buildmaster instance. Some of its subcommands are intended for buildmaster admins, while some are for developers who are editing the code that the buildbot is monitoring. .SH OPTIONS .SS Commands .TP .BR create-master Create and populate a directory for a new buildmaster .TP .BR upgrade-master Upgrade an existing buildmaster directory for the current version .TP .BR start Start a buildmaster .TP .BR stop Stop a buildmaster .TP .BR restart Restart a buildmaster .TP .BR sighup | reconfig Send SIGHUP signal to buildmaster to make it re-read the config file .TP .BR sendchange Send a change to the buildmaster .TP .BR try Run a build with your local changes. This command requires in-advance configuration of the buildmaster to accept such build requests. Please see the documentation for details about this command. .TP .BR tryserver buildmaster-side \'try\' support function, not for users .TP .BR checkconfig Validate buildbot master config file. .SS Global options .TP .BR \-h | \-\-help Print the list of available commands and global options. All subsequent commands are ignored. .TP .BR --version Print Buildbot and Twisted versions. All subsequent commands are ignored. .TP .BR --verbose Verbose output. .SS create-master command options .TP .BR \-q | \-\-quiet Do not emit the commands being run .TP .BR \-f | \-\-force Re-use an existing directory (will not overwrite master.cfg file) .TP .BR \-r | \-\-relocatable Create a relocatable buildbot.tac .TP .BR \-n | \-\-no-logrotate Do not permit buildmaster rotate logs by itself. .TP .BR \-c | \-\-config Set name of the buildbot master config file to .IR CONFIG . Default file name is master.cfg. .TP .BR \-s | \-\-log-size Set size at which twisted lof file is rotated to .I SIZE bytes. Default value is 1000000 bytes. .TP .BR \-l | \-\-log-count Limit the number of kept old twisted log files to .IR COUNT . All files are kept by default. .TP .BR \-\-db Set the database connection for storing scheduler/status state to .IR DATABASE . Default value is .BR "sqlite:///state.sqlite" . .TP .I PATH Directory where buildbot master files will be stored. .SS upgrade-master command options .TP .BR \-q | \-\-quiet Do not emit the commands being run. .TP .BR \-r | \-\-replace Replace any modified files without confirmation. .TP .BR \-\-db Set the database connection for storing scheduler/status state to .IR DATABASE . Default value is .BR "sqlite:///state.sqlite" . .TP .I PATH Directory where buildbot master files are stored. .SS sendchange command options .TP .B \-\-master Set the location of buildmaster's PBChangeSource to attach to in form .IR HOST : PORT . .TP .BR \-u | \-\-username Set committer's username to .IR USERNAME . .TP .BR \-R | \-\-repository Set repository URL to .IR REPOSITORY . .TP .BR \-P | \-\-project Set project specifier to .IR PROJECT . .TP .BR \-b | \-\-branch Set branch name to .IR BRANCH . .TP .BR \-c | \-\-category Set category of repository to .IR CATEGORY . .TP .BR \-r | \-\-revision Set revision being built to .IR REVISION . .TP .BR \-\-revision-file Use .I REVISIONFILE file to read revision spec data from. .TP .BR \-p | \-\-property Set property for the change to .IR PROPERTY . It should be in format .IR NAME : VALUE . .TP .BR \-m | \-\-comments Set log message to .IR MESSAGE . .TP .BR \-F | \-\-logfile Set logfile to .IR LOGFILE . .TP .BR \-w | \-\-when Set timestamp used as the change time to .IR TIMESTAMP . .TP .I FILES Lis of files have been changed. .SS try command options .TP .BR \-\-wait Wait until the builds have finished. .TP .BR \-n | \-\-dry-run Gather info, but don't actually submit. .TP .BR \-\-get-builder-names Get the names of available builders. Doesn't submit anything. Only supported for 'pb' connections. .TP .BR \-c | \-\-connect Connection type. Can be either \'ssh\' or \'pb\'. .TP .BR \-\-tryhost Set the hostname (used by ssh) for the buildmaster to .IR HOSTNAME . .TP .BR \-\-trydir Specify trydir (on the tryhost) where tryjobs are deposited. .TP .BR \-m | \-\-master Set the location of the buildmaster's try scheduler in form .IR HOST : PORT .TP .BR \-u | \-\-username Set the username performing the trial build to .IR USERNAME . .TP .BR \-\-passwd Set password for PB authentication to .IR PASSWORD . .TP .BR \-\-diff Use .I DIFF file to use as a patch instead of scanning a local tree. Use \'-\' for stdin. .TP .BR \-\-patchlevel Specify the patchlevel to apply with. Defaults to 0. See .BR patch for details. .TP .BR \-\-baserev Use .I BASEREV revision instead of scanning a local tree. .TP .BR \-\-vc Specify version control system in use. Possible values: cvs, svn, tla, baz, darcs, p4. .TP .BR \-\-branch Specify the branch in use, for VC systems that can't figure it out themselves. .TP .BR \-b | \-\-builder Run the trial build on the specified Builder. Can be used multiple times. .TP .BR \-\-properties Specify the set of properties made available in the build environment in format .IR prop1 = value1 , prop2 = value2 ... .TP .BR \-\-try-topfile Specify name of a file at the top of the tree. This option is used to find the top. Only needed for SVN and CVS. .TP .BR \-\-try-topdir Specify the path to the top of the working copy. Only needed for SVN and CVS. .SS tryserver command options .TP .BR \-\-jobdir The jobdir (maildir) for submitting jobs .SH FILES .TP master.cfg Buildbot master configuration file .SH "SEE ALSO" .BR buildbot-worker (1), .BR patch (1) buildbot-3.4.0/master/docs/conf.py000077500000000000000000000270721413250514000170520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Buildbot documentation build configuration file, created by # sphinx-quickstart on Tue Aug 10 15:13:31 2010. # # This file is exec()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import pkg_resources import sys import textwrap # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(1, os.path.dirname(os.path.abspath(__file__))) try: from buildbot.util.raml import RamlSpec from buildbot.reporters.telegram import TelegramContact except ImportError: sys.path.insert(2, os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir)) from buildbot.util.raml import RamlSpec from buildbot.reporters.telegram import TelegramContact # -- General configuration ----------------------------------------------- try: pkg_resources.require('docutils>=0.8') except pkg_resources.ResolutionError as e: raise RuntimeError("docutils is not installed or has incompatible version. " "Please install documentation dependencies with `pip " "install buildbot[docs]`") from e # If your documentation needs a minimal Sphinx version, state it here. needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.extlinks', 'bbdocs.ext', 'bbdocs.api_index', 'sphinxcontrib.jinja', 'sphinx_rtd_theme', ] todo_include_todos = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Buildbot' copyright = u'Buildbot Team Members' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. if 'VERSION' in os.environ: version = os.environ['VERSION'] else: gl = {'__file__': '../buildbot/__init__.py'} with open('../buildbot/__init__.py') as f: exec(f.read(), gl) version = gl['version'] # The full version, including alpha/beta/rc tags. release = version # add a loud note about python 2 rst_prolog = textwrap.dedent("""\ .. caution:: Buildbot no longer supports Python 2.7 on the Buildbot master. """) # add a loud note for anyone looking at the latest docs if release == 'latest': rst_prolog += textwrap.dedent("""\ .. caution:: This page documents the latest, unreleased version of Buildbot. For documentation for released versions, see http://docs.buildbot.net/current/. """) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build', 'release-notes/*.rst'] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = False # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'trac' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] intersphinx_mapping = { 'python': ('https://python.readthedocs.io/en/latest/', None), 'sqlalchemy': ('https://sqlalchemy.readthedocs.io/en/latest/', None), } extlinks = { 'pull': ('https://github.com/buildbot/buildbot/pull/%s', 'pull request '), 'issue': ('https://github.com/buildbot/buildbot/issues/%s', 'issue # '), # deprecated. Use issue instead, and point to Github 'bug': ('http://trac.buildbot.net/ticket/%s', 'bug #'), # Renders as link with whole url, e.g. # :src-link:`master` # renders as # "https://github.com/buildbot/buildbot/blob/master/master". # Explicit title can be used for customizing how link looks like: # :src-link:`master's directory ` 'src-link': ('https://github.com/buildbot/buildbot/tree/master/%s', None), # "pretty" reference that looks like relative path in Buildbot source tree # by default. 'src': ('https://github.com/buildbot/buildbot/tree/master/%s', ''), 'contrib-src': ('https://github.com/buildbot/buildbot-contrib/tree/master/%s', ''), } # Sphinx' link checker. linkcheck_ignore = [ # Local URLs: r'^http://localhost.*', # Available only to logged-in users: r'^https://github\.com/settings/applications$', # Sites which uses SSL that Python 2 can't handle: r'^https://opensource\.org/licenses/gpl-2.0\.php$', r'^https://docs\.docker\.com/engine/installation/$', # Looks like server doesn't like user agent: r'^https://www\.microsoft\.com/en-us/download/details\.aspx\?id=17657$', # Example domain. r'^https?://(.+\.)?example\.org', # Anchor check fails on rendered user files on GitHub, since GitHub uses # custom prefix for anchors in user generated content. r'https://github\.com/buildbot/guanlecoja-ui/tree/master#changelog', r'http://mesosphere.github.io/marathon/docs/rest-api.html#post-v2-apps', ] linkcheck_timeout = 10 linkcheck_retries = 3 linkcheck_workers = 20 # -- Options for HTML output --------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {'stickysidebar': 'true'} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [ '_themes' ] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = os.path.join('_images', 'full_logo.png') # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large or a png. html_favicon = os.path.join('_static', 'icon.png') # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # We customize the rtd theme slightly html_css_files = ['buildbot_rtd.css'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': ['searchbox.html', 'localtoc.html', 'relations.html', 'sourcelink.html'] } # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True html_use_index = True html_use_modindex = False # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'Buildbotdoc' # -- Options for LaTeX output -------------------------------------------- latex_elements = {} # The paper size ('letter' or 'a4'). latex_elements['papersize'] = 'a4' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '11pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'Buildbot.tex', u'Buildbot Documentation', u'Brian Warner', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. latex_logo = os.path.join('_images', 'header-text-transparent.png') # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # Three possible values for this option (see sphinx config manual) are: # 1. 'no' - do not display URLs (default) # 2. 'footnote' - display URLs in footnotes # 3. 'inline' - display URLs inline in parentheses latex_show_urls = 'inline' # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output -------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'buildbot', u'Buildbot Documentation', [u'Brian Warner'], 1) ] jinja_contexts = { "data_api": {'raml': RamlSpec()}, "telegram": {'commands': TelegramContact.describe_commands()}, } raml_spec = RamlSpec() for raml_typename, raml_type in sorted(raml_spec.types.items()): jinja_contexts['data_api_' + raml_typename] = { 'raml': raml_spec, 'name': raml_typename, 'type': raml_type, } doc_path = 'developer/raml/{}.rst'.format(raml_typename) if not os.path.exists(doc_path): raise Exception('File {} for RAML type {} does not exist'.format(doc_path, raml_typename)) # Spell checker. try: import enchant # noqa # pylint: disable=unused-import except ImportError as ex: print("enchant module import failed:\n" "{0}\n" "Spell checking disabled.".format(ex), file=sys.stderr) else: extensions.append('sphinxcontrib.spelling') spelling_show_suggestions = True buildbot-3.4.0/master/docs/developer/000077500000000000000000000000001413250514000175255ustar00rootroot00000000000000buildbot-3.4.0/master/docs/developer/_images/000077500000000000000000000000001413250514000211315ustar00rootroot00000000000000buildbot-3.4.0/master/docs/developer/_images/stats-service.png000066400000000000000000001504711413250514000244430ustar00rootroot00000000000000PNG  IHDRzTXtRaw profile type exifxڭkv$F* ${<3UʪT& <_3'?謏;9_eE},A8f 2}5^50 b - ?1{xX+rD|}}wJ1/B\EBy4u?xb {!F_7å)J>d`QsH8VәhAb~,f)IHN6x, jZHMNRJ 'kN9g%r7d̊9N%\RK+jM5WjoOVZmC{չsbБF6ʨ>)f6ˬ;de;nJinjI';N;߳ss;krוq!I1I-PcJ&faٓC12v|g2y{͛.sSC~o[ .$ ~\k_qup013E񳌶{w<<bZ1>F?o>#E'lm{K;a2Fs4m]rOC?'~8S?gױTf'ePaE^ v[`Fۋqf̡d9+5Y utشE]*h.ϡN 8Zn*Z״(Th˻Ts B(n>n'ӊOI`KΡ"VMg(GcޱlD iG{ڶh(V +Nwiӈ(%>ęNKh! 3dӸ  qZI@0Mc;ssw*vhX<: >˅bTFK2ΤWIT41r{+S1`rp$X )`#4q ]̃0V P( CoQ=1 6s\JU aV0ƢlU kFX| OAX+3}!.":,|n4dcxlepoGQ09҆dB(r+fn)S$[a.Zӎ9+7Epyk+wK#,9-+/$r=)aZ3k鞎:>`j@wdm[=7#Lp%ƥyk4" "x 8{TD2˭gQ j:Y=J&9ݵ 7kޢ։Qm^DO^ǰk{U_Q3Q ?wUpڇDk$;ܰxe4tbḘ"nQs F,`8{hpЅZ45 %V d=(%&\/͓ vQ55PUHS")xu:+Մ:舆uoaEX;\ "CۍyP'QN/ykrB5w_c.8CD`ԖkI Lj_@ DyP.(E- 2dч,G})X$2F$DԀmG> ԦjEDaʛV7:- r JŭQ'\qeEp1.R|WN% ,ז^eȏa /):ύcti-A9O݅ؽVB0̈ z)@Md%f +qxr H R}glP@q\9Q4H#֠ N_2COCpԉ, =0`hفEz懦un^1LIE|3Čg +_c5ZPHio5 fxO T, Y递a C ΢.s~z}sA^vn5W)ZJ$džgևb/1 U&`2[*aa #>Ro'V.dq6ML܁vUCnȯR|nB A?Cq<:)䢋b3'p?*7=,guyK赈MB© b[L'r d.Nn;SGwy98[Wn(o I? dzI|;N3eRMQYx\؋ڮ6sFj1Nu4gy U\Ɔ^ts}$]z^#ݿ_\ܣbx(iEKLGT2TJ[pƉ  CsgHl( ,M0 VIب2,fg`33#Z׆1.G  LpbFЙg6x[/WR$ zN"fG29 UtH9Q 赹( Rk[ǹ}E׾kil9݃ǁ }(hAI/(L(Eq㋙,m$i"V@ }4Zt9Ej^F ^VXn8_8uD #8\QYp!ox$|<Юhb9ƵphC&k `X,{&kz\+5 l9jrfUeYh ''9=mU }yqU =ɧW_٢EZVkxDy):|_Xd]hA_ < yTo(mIT!X&Y卌D9`#7*q\I ԅ> ؤ)c.I'V-BT7uIW)یG}S8"b" Q:9vL<7-BC2p{zm&,utl8E=RE.G:YSrI }BxQpWIhffaV%S2Ʒ6k뛙C "#2nT=8C_y<F{Q!ɿ#(R>^5VAT%_!vEw+Lvq qM0P칻,|2}owSL "&jy-=Obbw[ $(˂Ɗ&Pյ3s+;0jBP*FΰS Mq,ͿAHSR k4[{/oyvuxo0h ;B KT NܫQPE֛0ͩ wTo@EvxeBs5ړ0St.Kpdǝ#/ CU33O.vZ*a|(joo:ҨQP aBoE/2a6Z n ;Ami< Rb#  h6g U *#JH^=u7,kTbf_fAY^PI1ZD>@9D΁P᧏+F,!E1Zr5) 1B6c]AuL~ꓹ9E)%O2ݺ^U,~H^_"vL${:R߼ᖀ8$r04KL_`voL 3ƍZZ궩.DĞgE2YHPV/|,d2`V `VHPHAt3!XW!sDE&JCgo *w(W࢒ntIŸʅoM$GqϾ+hݱBi!iCCPiccxWT۶g9/2s9KsI(* %( **(`( \<ژ_5gUoc 3koOsquapj>~q!ggYgϰ>3>K 0?ho=ЈAž~ WwG r /< @߿ 8 /_4}K$ˤ9GEH d%iQ$XIZB\'KJņąih22~>!~QQqQA2 !~Q>!2Q2rҲ2X@OHx4d_%>.PAGQa0Ӱs<Ñ9fBPoW/!6 P 0Z 2dLV8/4"@ 3.4*ET*P@hmt6huS`́%'8W<'g?~!x?a9 g#n?! $A8@6 G@(E8Ip Rp P A u hMV.v.NA}*p !0 np1px�<`</ '[{BA !Db3A'@| !aH$IC)Bʐ*i@6 Ad Bd@=!W 򀼠@AP0 CPA P" B!(ʆr|TAI*ʡ z:5A-"]:n hF;tB4 =As hZ@K;hZ>B/7. `F`L03 Ü0Â0,Ò4,+J*kڰl9l vn' p p4 'Ip \0\p |Znf|wW^o< ߃ < ?g+x~ /+o[6!` E$ `Gp!x4 BAGH"rE BA# 5pB"<_D"D ITD" G ňS2D%qрhD NDb1EG -!1n7{[}}a<O39x!</WīuFxs- $|>_/Ɨ&|߇ e"`DN% 2%A`B"8\ B(!HH' ń2B5,N& w)3<-aAC"D ҈"DImv;]]]]݌ݞ݃====({){-{3e~OWؿos 8|ty #k0xCG8NqTs4rtp\x+6'IT4tL OOOτϞσ///_ __ _7M{||V~Ұ4&/NSi4oZ(-E+hmD[GS9e5=c?R") , +)`"`/%"/)P$P. !/p[  b-]S l| Lo!PPP)ZV!BBKBBha00]XIXOJU8@8Z8]@Lpp)y{"6A o0$<"""=""EDEDYDEDDEEDCEEsEOֈ>}.N^ULPLFLCTQXXa3bŮ{%&I:]Hף[A8zLqvq!q9qmqsqgq ]7gŗſJ%$$$%,$\$$b%2%ITI4ItK I]&'GS3;().wTRYGnDnJnAܶT>Ei]C)t<B+V8Х000Q"FUQHQAQOFK1\1MPRYWk/;JD%N%RR :vJsJ+JHe&e~eYeeke0TB f^;3o*祖UxT$U4TT\TTUUTΫtRRYTYWQ%r:&橖SR:FTRPSW3SsQ RKR;vZQGڴo@Χ.n~TZz=YM FF)Na)E/dM^M)M-M+Mpt"͋55_h~Тkijhk%khUjh]պ5fV6vN>]ݢ}UFMGTGEDY'H'Y@RU_s5ߺ8]]q]u]s]704"Zvt?xdtlEe;׭w[oFoY>JE_D_YDY?X?YPZM@gAAAAA/C!aaIne #QQQQQ-i%(c6c1c5cscwp gGg7M0&&t KOH,S&LzLFM6śrJښƙ晖^377lg`&``fddljVdVgvllllmn.nanemcc^fl~K{ .!i--,n[| ޅE%%˥ԥˤ[ W+kkkkuI׷n87797#7Wp,2VnSnn | &Q݇gWxP<=T<,<z{xztz yx=<<\2r*jZ-m]}ֻ+4\|1VO!p[TAKHdH^HUѐ!_Cѡܡ򡦡q㡯C7Èaaaaaa'ÚnM3;W_ "k:;B!,;"!(!?b2b9b;1R4R+!242+<#vȯQ(((QQǢG]z-]}7z!g !F F5&&0&=4-f8f6f=+k{<1zؽ8888xxxx " Z a 9 U w6IBى&.$n$4B*:&-$m&5Òs'NBIMNqJLOKIy')Uyyyyw^mSco?r{a9|##G:Ruȃ#Gv d | N\*[`Pил0T; [G58y4艣?QD)+/r/J(*.j-)zYq|L1c Nk=6rlqqq<~㯎o3K{'.>tB i'N|;'Nʝ8p2dɞ'WOO)=v*TkfN`KJ4KKbJJKK^,K KJKKK;J-[V&WfYX]VSW6U44O=tzrrr*@WRmEXኆg*I%**w8lBV T=VM֯N.\=^pרD4 ռ٨e5=T[U[;US'PUZ_wbXۺ3g؞ ;s̹3g^٨g7Ϭ﫟_?;+|Veg;>8ҀhkPopjm(nhkkxӰs9s 5uչY˝:r>|矟hdlj4k hnխ}3_.//]pBͅ O.|k#eյ]omqEAs/x⋋[VGo/`P8qccsIK%._zxi2e^.W^>}vr]U|5jծ~'Ms_qMӵk%:M^|p~z__35>28pzは77nXQp荥MM7oܼvs ˠ `KC!ơ7`oX{c8mz-[n:u֣[#Б‘֑#on 6{;÷oṣyNڝ;eU=00Z6zetzX؉KccJ޵vݶgr/{Fサ/t߾vo<=} A=y3.<ܝО84qfbpbabwoRg{2s~rhrqr}tћ)hJw*gLjB <xi̴ttf3333g:f&g֟P>}I'?2?Uz4iϟn?|YƳ3φ-΂YYYټ٦ٱٕ9}|.l\ܗ ?{^^pP"řC/^^ 4|֗_~'K[G͟2+Wij_ Z\ ,./|Z$-,..-..~Z o~F7\~͏,oU޺M}[v%xIhx)xRerrw;wFnާy "bRұ2}yUeu5mjMl|-bZړ4?x}aGGv>q'O?~j̟U>}|ֱ1eW_|}1KG_e롯 _G~#|f-[巁o ߡBM}/_?~?dgφw~n6d6776nnDnnZlFmlm>2 :չ5/_7oߞ76öv;۫d8ISg__ɿVjcSs}gaݵ؍-ߝ{{f{{%{}{/c8`9@U5"AP<ky`bKGD pHYs  tIMEXu IDATx}|T$I @$@%شA?]Fm/ݭQv mCWm킭*vQ[ EPFd r;$sG&1( )|s sޜscRъVDDz{{eQoozzzݭv+<<\VU bQXXl6٩N ;z%M49x;%:::-::zzDDDl iFغ#zzzln;5<<<`Zaaa~ lN##|X(+ aaa_;vUYmmmi===D_rrrOOOFL2E&LfnWTTio}>^)^W]]]:v옎9Cu.Ka~ddqqq9@`ܬY:ϟo7o=++K7nE[UUUi𕗗*++cv{fx" \cMHHX޾0###lٲش4kVVM6bHUUv!_*"&&fcss˒ޔ =&&[ vߝwy|ńf~zswI}ӬW"""WZչe3lٲz#""SSS#i&]Βjy30g?q8ޔ ՚K/3:u朜zDyo꫏O:O20Ȍ3)&&&P^^N< eee&**K.zHr8,Y'2E7ыfկJ/1N>JR] 3a„;v/>$''W#O8%8_~N[nb\'OVlllÇ>zz* 䥤|kҥ^7^xK,񦦦>B/&͞=1p\r%jf 7$Rs'DFFO%%@JNN]ti:K.8qMT \/N0B mWkk\*.BVDD%55B !!*@]G!޽{:JKVZZJ;C%@[~=EB/"EF %8/QsUMM ոLݻwkl#{.Sbb6oެ$P [ƍ|q8 \SN޽{ukʕsXb.\(ۭɓ'Sp 0$&&ߖ1F6MO=E9?OdZ?񏊏(`t WAA:::بl=*//8{ܹs*߯ cb>MssyMvvIII1̌f[l1>III17! 'mrGkVex]޳Z /I#IX:#{TSC~#)-#yoקi=")^65lC20[|ivhs0WEEJq|LZ㻚6-U$IcRtUޣ7s`fS?{IK4@]gL #q)Si \FJL^G JJ biIFLIez=bɓrg: "1[2%Xd1OY߹k+VF4ŕ^sMfMiTrLIOGh04[:ιKc<;M\*7ߔ5xv 1SjrKc*sе33\4oi]ssuq,7eF_;\G1;Ν4x1c++MeefuhJeFrHr >>i:$u&dPQȲSTwͼanK`P3jƮ n;@ҿJ eаo*?iQz4oXXvբS4zxD,IfHty#)|NIP(azi?IgT \oHK:*逤s5xeG0d.U<;}IET٨ v ? `%#e’f38]C3zmYr[ج)k\K0t(`ZJRR.ԊmZΊ Uhy-u6R9kۦW?ϾvpO%}C6I/^6"~!CR53w^uݒv'Icb(>_%"%Mz]mhq볺)m{y$6~Di|YۦGnIOzR7ΒUoUvnҲ/)Q/>4O?;H&IY|]8l(-08 Ŧx]Y)L6x+]\pgr Si$)*)5%kI&x*I&syc*Iƹĸ .9RPjL-X+蛯cjKF_kJ6z\&S2NWp%} yDҏF^aq8ϻ\l\^-3kזcSk3NS)5Y?ՙLڔ9rLdl]82Ώt ˊmyLN0 F9E6ip9 &I/nFa2qPF֥<Ǟ/=Ҧ?~G,EŧޢR\"41y|zp}gah]ώlܡ5a+E9-{ntH>5kv+/޿P+%9K]fdxIRWfJ'ڵGU mY-1\X:l6KG^_Z'Hi@5/='_wKqJL^G JWQAxdv7 O8dcL)s ,1Rqy>ְ=b Y0R,Hc.onCiWlrJF4.ImsHd?TQWxMJI,WνO]㕞J(K. \^p %0p%p \.K@p \%K@f̘ C/##:KU__?B ;vl !+!! b;vL ۨ@YGuJ _S p ~ov:NJKPV<}X%jKVWW;vغvZq=sڳgϟjkkP `H2*1;i 0aBƍ/r>ϩB~GN mjY,OLs]dy mnXrSi_%m}ٌ/iW.O'RQ׫n6q-}%յU>=Ol-^tn.˿T;f̝W]Gyjmj]sꁳ?nٳYvRs,YnҲŧQ"%MKV׮o.=jXzy7Yz\MM5۷ovlllOA֭[ݽsnTHPZ`$q-6M"8SFNSmip94zr:KLdZS[4LnZS8$#W1k$\[b\F)(5er7LY;)FmaNSNݜ[o74W_}qñYR(/@HTm9kj$+?In˽[~'쒮tׄIk2.n*Pݣ#IZv~z8ϡC9r=Џ""".wcG}a{9ZѣGv]0͔kA_=@VlHI5ʿA:T?^.PAn䔔9hA&y7zdA͑-z*:͛P|*6 ,;&$飏>o{رco qf?}޼y,edd\=---UQQ+//UVVֈo-}>vPE}ņ[]/NRJsSWx$uEפ$C#X,!nJ2LJBXXnj3jf8q"ǖ4iROOOFL2E&LfnWTTio}>^)^W]]]:v옎9CuIUI٩\pn N %p \@.K%p `DK"irvJtttZtt􈈈@ 07cuwwGknwjxxxjvl-]]]흝՝nIG$NG$(?%+ aaa_;vUYmmmi===D_rrrOOOFL2E&LfnWTTn}>^5x[׫.;vLGѡC<諩驯677ۭVk 66POO϶JPR%0?22길[~@ 0n֬YϷϛ7Ϟ 7?x.~6F---֪Ko~IEEjvy<7H*F$c#X,!T0q|2 +o/,[,6--͚iӦ7RUU;vvw訪7%_%K±slpc~MIIZp %@ &M)h"ӧ~GBE.?G?"X^ ӊ+&gee0$&&nɃ0)B@hh|kҥ^,YMMM}$ p \Τٳg"K.VR8,>K)<<\PccO{ァ\sUkk~~(,SB4$RzOl~imRRRL~~)++3ٖ-[̣>jRRR3N@K IDAT^y%%%)""BWvveBC *//{ァruuuI_u=(%%6V- z @ ׫l tkֵ^I&)##CSL1@]]T__RkϞ= ,ФI}cp .%pyvo߮rh֭:p5}tedd(##C3gԤI%&&jC&[ZZԤ&577ڿt8p@III>},X áleee]U%@|lQ} 5ֻuСԤցI&[l<ĉu1~~l6>|x L655i'$$hԩ6mI722r6V aL&=~uzݏ_S|ڵj1AIz ~_h D> aXmr1kreoJ+k֔sI&1tT0+FF .$0ԸOr\sI)kC~fs= #wI'bbђ'_V$鱛ғVEOnS㮗5i}]':^ϒv{uד$6~Di,4-YlQ=Bnڼ͚߬uLEe_*֙8򜦤֔830E%lCd2.c\&'۷8Ȕd[`J+*Lqb#ecJ-Xg\ŅS`LŚ<#8זWqAz@$\rxKcڬ+XnrLq12xJMdoX(4 px΁ZnqU{zLu隁pw1%F4Ztj 4KaJm>hiUن"9^׽VK? H"^ T_HJH[{iDkuHM*P/̖=8_dؾ;M;F];pmnR\$%MKm>Iؤ]鄿/X/iށeM*зo׫-Y-).Sx\ʔGiiwCQl$)QǣaG`8Ke(MmGwY)_:!h;Y5S*^vIz^O4ogzF"zk)+'Oz=ɭ6z8PRٺ5Oz_^G'*)G6-]``/' \@hs~bw27e =)5&w|9c\8]gGVge-3ijMY22S54eűܔy pIHJz.ʝfJS]{jx<1ՕvsghzP{^ixMu:Se_z2#9M)5|FK@eNHyiBE+# LQb#u#\#\ |%EIZ9(dFMYSUƱ z_6ԆG8'VJB#fIʐ4CRΣI`ȜjŚu"vyc%5Pߡ%n7,85wxORA0 DUH ˷$8I +wxSg%۶b4Gumnzr8$EmۺJ5xz]t̙\u>z!@r/P{?U߹CIrhB;+*TQVr޶DoԝgCXHI[/\~ވQޣJy@ Rh-BQߡ?~랯#Yd$u}Ouo#g)쟹֬ 9̲ cq93#~4;1^*Zn]b[ag3xKŅ%Y\\8aj8}\kv` !F.#2\^9å8 Ŧx]Y)L6x+ fg8seSh<ƘL#9LQI)Y(\LYPfI#u*PKR\$)QǣaVMxiwC6ֹu]?o-.RhRKh[Mںi6mڨg_rn;LI1#]}oȹ%)Nj[siWOuWA Hr$'kZX*qm֭y>:1VI=itg:mVH}$v+SoҦMq&9.9t}|Hx1#J2 prkM1ԙi2 Sj9c)co:]M8dcL)s ,1RqyN~sN>K@pyV 4x6 S[]mjvTV4յ \q_b"\# C$J0iСc>\SJs^Y9%p \@.K%p \.Kgƌn02223@]ckkk)ѱcP p @!бcǔJKuQy/魯=!o7Q;4@emmm<>Jqw}U@iuuucǎk׮s=={v '+$A J!\0aB; >>$''*\|N )wqW_%|/{=J5{. @p86/Ysw7\\ 555l߾۱]>9 [nUtttΝ;wRpƞKmqOTԩS7䴽[<7|\}fIqY֛L---sϑ(1?ό𦤤TX\ p \YSN}5""лe˖Q(lbVZ=uW$ͤGEESBBGvww_~iii 0l֯_ovݗQTT?I_K+iqBBo###;fϞ3tkjD^{<]]vYnHHHIF %( HZIY e(l̷Z Ǝ{K <5kVͳgee)##Cƍh+Ң*m߾]r_eeeno".%p9^)^W]]]:v옎9Cu `Kι(p \.K `ę()Lݝ`DFvwwۺ#nwjZ0xx?,,gXVD{{~PRݠ \L$})***;..*;c|\\\`ĉSjڴiM8Q6MQQQgҽ^^o=:n_uuСC:zboz,p 0|K!66:~'.;wnoNNNܼy,JIIQDDDyyIե'UTT\ضm[X[[5..n^*JjF %(BRVRfo̰lw9sٲe/K_Ǝ;@[[?nw? KOR4B.\$WƮ?O %6m2ַ|&M:866v+/%p9EGGĉw~uVٻw kVZsM8qwddm@nƸz_~Ν;#{.>3f,2eʺC,ϓP]]]Ԅ ~u饗."`Tꪫ2?7Qoƍ 믻4.F6ol͛#۷JKQaʔ)Yr3fP K/ՓO>k.B>EFFo~!aH"#%\&&&ngSC_jLttR* ֦tVbɢ .ȿ:u?~|Θ1czߟNU3f?6f̘]z?GU㯽;8rq/bm%%mt. BZZڢ#GLvvmܹ;w.ҷmۦm۶9{_|RRRW)xw XPڸqc7ՙ3gqCh߾}jmm%gG}t`)I^=%`JKK[4mڴ{~ӟFPoN6nqvogq-ZH-]wE1S͝7oy-xWIjr9\j$4͛7O'N8;K,׾5(~~z|~M7XrURP׹1c.C7\=zb *!!A7trssO+::œv=zwFYYY!>ap?znIjs 8/O |ؗ%8p@sQRRV^F )OkҤI;w򗿄l$c.t{}IUTTׂ|ERP{uttiZp^x|>#^Ѝ7ި9s樧G'N7 #֞Bn[;$#\ wРoڲeƍ<~Tբ^%%%SBBl"өcǎ68.\=erss} z.Ѝ7ި^^W<~m͝;WWg}V~aH?Pk֬}ݧt͝;Woxuvv 7@Sgg/Y{}6r, 1#xBxBRVH}\F)FwwO~0\;pAmٲEeee*++Ӿ}dYV͜9S3f̙35sa9pKoo߯/SOO~_k֬Yꫵ`-X@iiié/rC|[]zyrE9Svl?ܳgONlpyߟxGӧiIߗOsW%-u6I_R̈́K%ˑzО={6}t]veJNNVRRx}󩱱QMMMjll<ÇUYY)}R9s.R͛7OQQQù/.7)d%''GssW\q >Kbbze?.E,ZR$Kw>#K')RRwaf>kF\s);p8CQn[)0!!A'NPddiL޽[~i:vikRRΝ<͘1CӧOC4qƍ$HGR9\%p^I#2?IJPߞUYR$>~=G?=y3/5I+3=tXV{46)C7g|u|>iãWOOOjfg3ex9ƅRrNJL̹W&#-RKqS]''B#υSI(WL!'UV.Vo PZ'OCi rw8Sv,gߨ6(}?;[bvoARoP_/'O}|.)A2IQWDv] B:E<^*P񊽰@l.+.{z):)_|RzybA[h6AEr0 HD))^gLfrgIqW<<ѫNoʓ)R;b ,Kr)9FXr-bTmVniN9 ,m RZS{vCi%8{6sCis屝-wGPF} ]n2=J.]X(Mg=4P4~$ 7y0J0{Q zMpth팩Ӂj( eknON&.g-Q\ CshR :j\c[u#˚ף50܇#:c6ף$ֽp`dN |f10|va͊}jK+uQ5@1Tlȵbj\,嘾 Q0wr48*\6n^akM Z( e@zܧ`  _fG,o&L\u~֬OP.#25|N0调\% Un_CDE5ݡn e<'(>>>n`rD=55uЕ zG1zH,ߦԦTL3=h (UL $ m {{;8j+|s+H@6E=4mW]R Q_k_cvX,+ JZigP|;t|o(bcYnr91,yFTqA&'yڄ1ĭpy=tȷ9Qt#/'"=3CoA:Y['flxw+|xtBiew8t^r{ΨϿm c Y|Gؽ{wmۆuֵ;Ýnu2qRVgjʖa&#G#k:—k,_]98KLzGɣ B߈m%xx&0&a}.,A7;&hY~ܜ\4&LX5egc.D1]jJ@rp7>W]@o~b t$@EU؟[ZCb:AŗR~CPm)gϵ1Y h8XّzM*A쾰;+V)O\,0KNCk= SȪ\3JI'Alʅb Nmד&ZȂ9mŢjoMq9}.#J3\>}Zɓٳ""䦛n#F{KtRIKK?Ons̙R^^>צ\C#fB r @\>M{>i˹VX(RT~{/](b:u[{+Ĥ;gGY~J9.ފ"1/g0ӫOc1HX mMj8#fcc:-NxZt9x@bo?std\ VG2gϞ~[Μ9#""䗿ϧ|+q8RRR"˖-+Ჳ:sU]M-lc0I&[`TIAgKC8"|bʖ %&r{eh[^'ٹou>/ os]{q*(`"vRb2[lL31貤4t#YsT[Á($"Yy}!\yp+Z\^o }&>q¶P l,PhG|+2]WD CVJEzkUC m=4`HWI på͢ [*|o m}8\^lpym%:t:х΢ekwEϲWܰoЏ%h獼v PSK> Z}='1""|G>P|0D~.,Y"eeeoKFF:tHEI0O?T^yRydΜ9""zj馛䥗^35QWxP[YtmΕRTCb/: 0ZtJQn:9˹v-lRhwH9];J͡f:+Ki~rF߾W:*R,s^kKۨ|;%[,.[03HB17{L)ji\sb+n)軺d8 6r"=*y,`=RrEzqC"V[B-RNi: B#l儞b{-BXk.wn }n^0\h.=@u<З,'&^Sr]|ϭ3`P&L n[nv9p@(\?r޺͓.eEΝ+EDG{ݿ[ZZdذar9~=zToʁd}^{Io^[]*,cљr]ΧNh\؉CGA TK^n|%^F@ C?9&ɵmS8\^[vq.(m*orVPU/[Cl5+"&:l.6HY:.}i&2巡L7.鞤/,=THʖ⊞vҕ60p֪]!R}N f]rn6/<|cqUhw/c{ey?Nܩ>olE#NV6kG*|d?f߂м{#6aPu@|k|4ez#=}uPFmp סItt4^G?c=mÇr1|K1w\|Err2pK.̙3i&[hjj‰'`XB۸q2M7ԓ[q_:c dhG}Wdr̳XQ2|DPEs,m(0.ӶiO>Y?kЛ,xo/m&@<_ y361@lKX:1 Ӭh޾Mt]I} V;BVIƤ֗:+:NI°0djRzVZԺ?@{)p ؙ??@G6OH@ "~E&$t2Q&\&#}lr؟W2c{^8ev}ׯX X9SoQG) ?0ۏf\4Vbժ__hW*<ǶQ^5sMެOk PF;M@yO>x;n:rTxueW\݃#F9@r|nw¯p=dz`/ǘ\<چWqh ~,]wؠ烻ځ9XeF{ t|>x<^x(*,S3?TlT᭗r#,%۸ K.H^<x؋ _Y"e0e'@Z=Qa+.S 7_:- E>'5kx4%\`Wj \%G]<2&r)+vfCMGA8Z«x9_}v-W_ y瞓QFIIIIۜ9s|ߕ … g[N^{Mrrro7|3k&}V塇 S/uU?|j?@\^xn*|D<%yvWwQ j_/-́@ge\5-np C>qٔjS)p$Wqҟ[x eC˩0l-8(7,@D6:P$~k3okf(kWCyy?%/GԾkW~ :~{.[ 1m-T^z{. b/pe|v_:_Xn`k.[ZZ:O?mwߗ_~)< OpNSCu։_>L#uuuuZa/w[oɗ_~)""pi]8'fMIS'/ğ~FuٶP68XPQd "^uŶrQg; ]Xa`(4ۖX0z ]-'XyI)N8v1(}𼀭åϑ9}!hΓ‚\go?F)`V[C*vGz-^"X؝ue^y@ /.^QQvm~nLthݷ𺥺B*CC JKn)2;>ǥL}3(مjk C  :o+ћ;Q7G7f^|NDStCE91XJVl1b6B x*$۠S3>gLf1鳤.$גPpߤ>.쉈;^;-~/ &[AbFđW"V>syvv:@FT&wbC-PkFFѺ147|[ ܫh?WAmm^\žLr(-MP +t~<^T[ <+j;(H.'zw:NX̉?6?Xˆ!(baA 7+1\Qo@FDuf_4kƠ ϩBTج]Kj}>/<^/\R\/4w7~۩DzLX^ <[דK.s<~e8i4LXz+5OιӘ9>wN3m0ǟ`x"oh:4{jj7`%%Q0{1)e hN9  T,<{!:% D֩ٯՋ`%%;ч[.C16!>[d-P?PGZMgͺ{pO1\Q_2E53@~!Ϣ֩AnVᒈ.6}6r`ywz5|Zq _rK"""""g&ˋƥ겳.4>nPV5Oۆmwً4 <@iV,(`-ϧ EPX2U#sLFLkIYvf%[."3̙1{r3{&R eK1C:T,u3EX_G6n[2a'ՖXYy`\zuW#s CؼIb,ʜn15*_S'gA sKY. }Yv&%%cƌa)%'Nۻ(++CII JJJh:Zi΃11ӂ1"T8K1`f6V{Zl؈Y+o#ɷ_?(Ɲ_ba1j뿄mU.v=Ң\rK IDAT3=FQi1Ҷ梄ۥ~BeH>#a%]jg].vp N'?uVIIIߋ~Ǯ>wDD}t{^[l"^*RN`_bn@'nuvyD@gحr+Tn m۷u3m Xlzz=o'[. ,Ȓ<}Ye, ɋ_ #vs,f { t|>x<^x(*,S3Ceˋ~ DDtEԩS,r ]V~, H;d+X@=V>œiɝ`o]#2ӖCHN>Y/Bxr4_a ~Xjj#>;,t:]֗_~ t䯐 _azН fh*\LBs TͅiZi >ŔF1=QǯVji@%@ ̏h4J{-A3=V(gjH -,^ݭsz cǎ]u?|h~"~q_|.;01TL9f]rIDDWeϞ="үjއ~;`J/s <#G uNגΒrꫯX(9},/smBB> m#9cY,;zUrذa;4$~v}۰av=zADW/|̙3NJJ}ʹibL)Spz(Ӊ[> ]VKa2.=dȐÇoOO"r…Ç7 2@||BԥrPZ.g @zu~ 5\v˨c璈sH73׮]؉'?iiiCnv~HJJo9>s뭷%$$T8q@ PV%;8 Wd5LiPK"""R^7xA͌3455}sРAuuu/m6ٴiSޯ_|v?ɏ9\ $CiɌ0mᒈ(5f׿O?tj||@ 0ɓCccc[RRRFBzzzLzz655)))F\\\ԩS|N>C555~t8rHl 7pcqqq_8qbSGVAiPȕPZ*WNAg.eN5Xݝ/Xό%P:2Xb!"iJj8&666CE$ٳڳgƶĞ9s&%&--o߿0***ؿ@~k4_LLLoOƑ#GpA8p-jhhFEE te[ccgI~\DpIDDD3{w||@`B02vؓwuvʔ)I&!##C i WDB^ʢ-رcQ}qNl⯮jTz;s&[qQ$Z@nKX$DDEzNAllWNo>M+׿>=~xV=,}%1\v!KzWcǎI$ihhիWC=tBv /DDpIDD Whljjg-[vjӦMҗlڴIrss}gFBDD DDpy馛:o!$믋N󥦦:/DDpIDD K5j'w}QF} ᒈ.2̘1ce-[0A^!qqqǍSKޭkt wqwy' L6 N[_IOO_!"-DD'DOOOwgy劊|н l$""K""aavtn*)))_3\>,*L8^x!eĉ,.0e,YI&XD DDDD}Bjj'M4},.rˌ#G.di1\E~okY]vHfi1\E~ƒ>ַbccȒ b$"""X)))DZ$nXD DDDDرc >ѽǏ̒ b$"""Xњ#G NCTTW."LJٳэ ?~|Kᒈ(vB7Zn ᒈ(^Ѝy%QX`Ĉ(++cA1\EaÆaÆ HNNFKK 9sC ֭[1d[1\F_|o}[Xt) *,YgFMM n&%Q2l0|'_f\^z QQQ߿?>c$&&P.jɓԩS3`˖-,|g/~ɓ'/S""깖 QD eԩ*fYe6m~ZRSS宻W^yE?~u~eE@}$\Z`" F>WWWw}. SNԩSqJ0Ė-[ga˖-8}4=~ziMW^BpIpIDD W.:t#lقcԩ9s&nFddd`Ĉþ}p!vlٲwԩSC!z/`ᒈᒈᒈ./۱ebػw/p7###7oÆ CRR CvQb"ȸ=R`RY/,gYgљ,RtKE/NJ(D<61,s8nUNf RǮg(W_#7 Ȫ.%%QD"'\%1_&)Wr.ⵋbWhUgQǫˬPhyeKY'0O]]j@/6[,:5lV@g7%Q$Ȉ҈C|l/zp+-G=@  bt@ZFJDh1zBA=ŵ:ZuAдGwbC-Pk ]vkncY㈈ᒈ"RJļ=z$oS14)V11H0-t0=5#ExڠActܶ]u JA2S>.G) i')!"K"""D#$B6 Q~#jF`l*hL9z 8[x[s3\%p03@Nu2%RmfelY=1\sIDDDd mjxϠLSr%ɮ0@a!3: -u'y(~z&k] %a["" 1Bc`9@YW>( kԭMW G"T~lͺqR VELDԆu,V ~(Wx@j6ܸy949 XVVy\xwKA4n^f/PM $71\QW ]"lC͉ Ny9}euWn,XQEdV%8D DDDDPZ._͏1adL6OTI?`Řjxe{zuX(SmLÓJpnca/mGHSdZw/[ҔydYZrRCoHDL'lwWN!""(]Quxz *N,ER\T$Ey@oy@iCxEԬ@'yv)b{fыou}NxEęob)([UrjĠRhUux]jb4Hiq bt0IEy&FéHᒈ.a@N':]>*Kln)((n)2:v1]Xn+9&ɵn5tB)O,:b =jP"u[>b~5%ļ* > !e<_ggnDOXj\ch u#ڋ2l[ ܫh?WAmжԘ^#VaA{Q^Mh2T1\uz'MXiX׌Aq >TfM &@XF?J.u@ U k`qI d||>/<^/\R_@%(6 aG 1ot3-E-bGy1\ѵф]~}9V,~*`[<0cS5@<М0 s @syl|92yE.%f/AA lPw)x~\_U<&`+6XnJ +!CD ^8;/E$$"lϥב+Љ.>: -+[lGgXLar #"v^:NB%[<qxEDi[;PU<"<.hx^91\ WK䓣,nݺ@ Kᒈ(jll|G?E}ɓ'gi1\EܹssAA ؽ{n;A{h$FZ"ְS'"raÇ?T^^2qD~Wt;ÇBrIDDDt<O>X{UXv̛7u, "K"""eyM>hqN[, Y%QU[[;c>hРY `0`_d1\/bɓ'?g̘q⣏8McG`04eeem|CwK"(#ޖkODY2=EEE?bĈmiii7x"믋N󥦦:/D ppIDD nQދ>۴iS:شi,[TttQF /D ppIDD WN/IIIZ:zj9vXDW444ի塇:jIIDATII(rqKR*$".{>,KJJAss쌌`vvI&aѽ۷;wDMM͙UV5\744Y_.ᒈ.'$$< '!cǎ=y]wiL4i2220dȐ;}ap:-[j la}!b$KK""b_~ |8bbbj~?|>y|8}4}tTτ<;$%0 O^Pz޳ևiWh2<4c+kզ p=A`mj d&LH00ԩSohhmIoK:M@@_%͒ğ233 333祤XcXP1&}pp0uhh(m`` epp0(gO2%ܟ2eJbD"O%uJ0tJ_O2%W"%IR__---EQ* J%p \~|NˍzzʲaԽS_Xqg9ZJl߱IWʕ_ӣ-pyŊź%TiPcC|^Ueuzmm_5K:%EcgĞ&I )4׹A|;!]銚)gV.эKh-'I'NGsZ9 <{G3[bk*/%e'VhsThb)٫mxzVZ,ϔK m}q=TW_&mP} $2" ?^NXX숶\6]A" Iz?ġ:mܩQci]zze6H-Iڻi[䪮9׭S@Uӟ=[۳E%*-/ia酧7IUaLx\=\~([u{$`GD7'rcg%'-߳MrTj볏*]2)tnԡ-<_t<ͯ;$I%ՍzGڹ!-I_1$0/Nfd?f꧶s\{5~_)}dD$j*#rwKYweJh *USjmӹ.ǏeZfz{oDF.=qC琪l)xwӨ5_y| C Bjի)f J+-q{tdz啫GaܹS;wЦ'%WtWW1"u22x,H\쐺9[R~.zgbߣ~}QNIRSmߥH_ז`>py{.SS$|޵ګ-RY!s\QێzīgUnCI6CRG˷kn{AI;GeϪtSo9򞫶QMs]A6&% /iiiVuAJJJtcLP`P@@JQQQ/HNNONN%%%ELMJJX,prrG=-hMRǨ%H|$cDzHhD#_OUQA9jWJ)S]1Iku:2o?l_ʺ%/<}tnVVVꫯ͙3GsM+**J/((W_Teddw ]b=Hkkk:zhӧS-{%䧿Kr\ʖtj===}EOOϢ%K 9KZϟ|߯޽g޽IYYYG"=^IJꦿK. \~~zoaii颢ŋkŚ6ʿϦ[Ӿ}Ol޼*zԩS/b-Z/KߗVgddd]6ui+W0'7P]]]_ខ(p %K.[,{Ŋ׬Z*뮛'Y[nڳgϑ?OP(h4ZO \@ rssKNNniO>q뭷No詧 ߿{``O8L?pyř:uCVTIII/߿߬]6lZCSN}L \"pyEVXXN?oܸ1'??3tz'Cn_%.ϐ}4>k}G83hؓKp %6\.Z}>_fAAg3joo׊+ӧO_oK`Ifg͚UsaElȘ9s拋-ZKEK;# [淶{TN1cƌoqmh]Ѯ]R8JKIa֬Y/oܸq (eh"=/Q `X;Ư/2Es}С\N(,, /2 01r p222zTb|߼*33!*.&VZŽchժU)999wP p 0aBfΜI!P~~Q p 0aj֬Yb ͞=[))) .&'O洴P1t}{ߣ`[fVZxb\Fk׮ڵkzj.&G7 :t\}_׷mYE1P,xco>YFNS<23393W?_7`dK/k\XXx_V py{>8#͛7O_󕗗7̘1|HD]]]裏u?Pg jѢEZt222.ߗU%@ \^uᑐw8}t(--휋nWSSYK$Qnn;v:zΜ9Z`͛|Y%\K_)o_^?)3,,i4LxWm|3`ʝU6ɸ<l5[|]ø}#7דa]eeROeIFw._#\Kʝ82dAQRfjLCsPcL}CԻF㫫01&艇EɔW_ `\m HvS4&otOmE"9L}ИxpuUO;~\nx %}GQҲ.1VS.3.Mms2vkLk)kٴids`(.KGBxZCƄC[5.=6*N/7x%j0L`|˅F%\HIv*UlۢuST1)SH2,ڛu8*$lY25 .eO7hx4kѴ]RSSEENn)+mw#g9(imvkznoD4-@?aʖ4-w-dNsߊŷWSIk} L#B)KsKMޏMl#k$W WSN՝*p p.sțRIl]D߻E-][,EݬRWjء/+F{$IW^_$nvK6hNmgloҖ^LS^={Яr3Jz &ycvaΜRr L]ɟ%evJvThK[IHzlje7$J_vPMp p1(~|!)3tݿTC%%ieáǒ~,甽UQ~t]pY=^7VwgqҭOv89*ͼK?Q.Iy.RFI%ź%TiPcC|^Ueuzm6ǰi9qT^^GR4h4h4FHsO@ zFry=+$SW2ܒ6Go^9`ˋOQҊ:2x\vc+Unk1a,3%#StW+1ߥLIET#mNSRl3xcr8.mU]kjkMmM)2ƘpCpi7Ш7S,kLK*1:6%:TcnȺx0 ֏N~@(іwqd=o {8>yg6lQo&F2 x`٥^ܿ]VQsMRSuoі7;hTRF[[=/e+ORqOpHPHz<Ԍ&bёCJUz<1~jo|se t\Vy5^T<ն/KUi j73d*2rLc(lZ=)\?ЧDF*6PTVnƘϔJF5/td=X]l$ 5\zv#9L]sЄCf$,<\|g_*)1&P=XI4uNrmEKeX,|gK.ɂ%p @.K%p \@.K%%X`*PgKKٙR1tȑiT \LXA 19w@N<ݲeKJW^y%vש0>X(En{ZZZr)ؘ;wn/tjW>F..Ns4QUU5D)._C &~3k֬]%%XpRʕ+oᆿ&kv]wu1Kwwp CF..с^>r7g͚nkk СCk"Ǐ_+0i͚5kӻy ?_&///4{y"H;wғ@)?|r޼ykN>++~I[ O:DNN:z ;¯ڤN>az0X(KKK뜜,Y2[n_jUnwSSn:oݻwG'Oa4498u?JzKaN%4]R𒞞^xUW-HKKߟkJJLHL-,,lkkҔ)Sbɱ)SDIIINMM=DO>D$uZNPv}a?:?IENDB`buildbot-3.4.0/master/docs/developer/auth.rst000066400000000000000000000137201413250514000212230ustar00rootroot00000000000000Authentication ============== Buildbot's HTTP authentication subsystem supports a rich set of information about users: * User credentials: Username and proof of ownership of that username. * User information: Additional information about the user, including * email address * full name * group membership * Avatar information: a small image to represent the user. Buildbot's authentication subsystem is designed to support several authentication modes: * Simple username/password authentication. The Buildbot UI prompts for a username and password and the backend verifies them. * External authentication by an HTTP Proxy. An HTTP proxy in front of Buildbot performs the authentication and passes the verified username to Buildbot in an HTTP Header. * Authentication by a third-party website. Buildbot sends the user to another site such as GitHub to authenticate and receives a trustworthy assertion of the user's identity from that site. Implementation -------------- Authentication is implemented by an instance of :py:class:`~buildbot.www.auth.AuthBase`. This instance is supplied directly by the user in the configuration file. A reference to the instance is available at ``self.master.www.auth``. Username / Password Authentication ---------------------------------- In this mode, the Buildbot UI displays a form allowing the user to specify a username and password. When this form is submitted, the UI makes an AJAX call to ``/auth/login`` including HTTP Basic Authentication headers. The master verifies the contents of the header and updates the server-side session to indicate a successful login or to contain a failure message. Once the AJAX call is complete, the UI reloads the page, re-fetching ``/config.js``, which will include the username or failure message from the session. Subsequent access is authorized based on the information in the session; the authentication credentials are not sent again. External Authentication ----------------------- Buildbot's web service can be run behind an HTTP proxy. Many such proxies can be configured to perform authentication on HTTP connections before forwarding the request to Buildbot. In these cases, the results of the authentication are passed to Buildbot in an HTTP header. In this mode, authentication proceeds as follows: * The web browser connects to the proxy, requesting the Buildbot home page * The proxy negotiates authentication with the browser, as configured * Once the user is authenticated, the proxy forwards the request and the request goes to the Buildbot web service. The request includes a header, typically ``Remote-User``, containing the authenticated username. * Buildbot reads the header and optionally connects to another service to fetch additional user information about the user. * Buildbot stores all of the collected information in the server-side session. * The UI fetches ``/config.js``, which includes the user information from the server-side session. Note that in this mode, the HTTP proxy will send the header with every request, although it is only interpreted during the fetch of ``/config.js``. Kerberos Example ~~~~~~~~~~~~~~~~ Kerberos is an authentication system which allows passwordless authentication on corporate networks. Users authenticate once on their desktop environment, and the OS, browser, webserver, and corporate directory cooperate in a secure manner to share the authentication to a webserver. This mechanism only takes care of the authentication problem, and no user information is shared other than the username. The kerberos authentication is supported by an Apache front-end in ``mod_kerberos``. Third-Party Authentication -------------------------- Third-party authentication involves Buildbot redirecting a user's browser to another site to establish the user's identity. Once that is complete, that site redirects the user back to Buildbot, including a cryptographically signed assertion about the user's identity. The most common implementation of this sort of authentication is oAuth2. Many big internet service companies are providing oAuth2 services to identify their users. Most oAuth2 services provide authentication and user information in the same API. The following process is used for third-party authentication: * The web browser connects to the Buildbot UI * A session cookie is created, but the user is not yet authenticated. The UI adds a widget entitled ``Login via GitHub`` (or whatever third party is configured) * When the user clicks on the widget, the UI fetches ``/auth/login``, which returns a bare URL on ``github.com``. The UI loads that URL in the browser, with an effect similar to a redirect. * GitHub authenticates the user, if necessary, and requests permission for Buildbot to access the user's information. * On success, the GitHub web page redirects back to Buildbot's ``/auth/login?code=..``, with an authentication code. * Buildbot uses this code to request more information from GitHub, and stores the results in the server-side session. Finally, Buildbot returns a redirect response, sending the user's browser to the root of the Buildbot UI. The UI code will fetch ``/config.js``, which contains the login data from the session. Logout ------ A "logout" button is available in the simple and third-party modes. Such a button doesn't make sense for external authentication, since the proxy will immediately re-authenticate the user. This button fetches ``/auth/logout``, which destroys the server-side session. After this point, any stored authentication information is gone and the user is logged out. Future Additions ---------------- * Browserid/Persona: This method is very similar to oauth2, and should be implemented in a similar way (i.e. two stage redirect + token-verify) * Use the User table in db: This is a very similar to the UserPasswordAuth use cases (form + local db verification). Eventually, this method will require some work on the UI in order to populate the db, add a "register" button, verification email, etc. This has to be done in a ui plugin. buildbot-3.4.0/master/docs/developer/authz.rst000066400000000000000000000017241413250514000214160ustar00rootroot00000000000000Authorization ============= Buildbot authorization is designed to address the following requirements: - Most of the configuration is only data: We avoid to require from the user to write callbacks for most of the use cases. This is to allow to load the config from yaml or json, and eventually do a UI for authorization config. - Separation of concerns: * Mapping users to roles * Mapping roles to REST endpoints - Configuration should not need hardcoding endpoint paths - Easy to extend Use cases --------- - Members of admin group should have access to all resources and actions - Developers can run the "try" builders - Integrators can run the "merge" builders - Release team can run the "release" builders - There are separate teams for different branches or projects, but the roles are identical - Owners of builds can stop builds or buildrequests - Secret branch's builds are hidden from people except explicitly authorized buildbot-3.4.0/master/docs/developer/br-claiming.rst000066400000000000000000000057371413250514000224570ustar00rootroot00000000000000.. TODO: replace generic references here with refs to specific bb:msg's .. _Claiming-Build-Requests: Claiming Build Requests ======================= At Buildbot's core, there exists a distributed job (build) scheduling engine. Future builds are represented by build requests, which are created by schedulers. When a new build request is created, it is added to the ``buildrequests`` table and an appropriate message is sent. Distributing ------------ Each master distributes build requests among its builders by examining the list of available build requests and workers, and accounting for user configuration on build request priorities, worker priorities, and so on. This distribution process is re-run whenever an event occurs that may allow a new build to start. Such events can be signalled to master with: * :py:meth:`~buildbot.process.botmaster.BotMaster.maybeStartBuildsForBuilder` when a single builder is affected; * :py:meth:`~buildbot.process.botmaster.BotMaster.maybeStartBuildsForWorker` when a single worker is affected; or * :py:meth:`~buildbot.process.botmaster.BotMaster.maybeStartBuildsForAllBuilders` when all builders may be affected. In particular, when a master receives a new buildrequests message, it performs the equivalent of :py:meth:`~buildbot.process.botmaster.BotMaster.maybeStartBuildsForBuilder` for the affected builder. Claiming -------- If circumstances are right for a master to begin a build, then it attempts to "claim" the build request. In fact, if several build requests were merged, it attempts to claim them as a group, using the :py:meth:`~buildbot.db.buildrequests.BuildRequestDistributor.claimBuildRequests` DB method. This method uses transactions and an insert into the ``buildrequest_claims`` table to ensure that exactly one master succeeds in claiming any particular build request. If the claim fails, then another master has claimed the affected build requests, and the attempt is abandoned. If the claim succeeds, then the master sends a message indicating that it has claimed the request. This message can be used by other masters to abandon their attempts to claim this request, although this is not yet implemented. If the build request is later abandoned (as can happen if, for example, the worker has disappeared), then master will send a message indicating that the request is again unclaimed; like a new buildrequests message, this message indicates that other masters should try to distribute it once again. The One That Got Away --------------------- The claiming process is complex, and things can go wrong at just about any point. Through master failures or message/database race conditions, it's quite possible for a build request to be "missed", even when resources are available to process it. To account for this possibility, masters periodically poll the ``buildrequests`` table for unclaimed requests and try to distribute them. This resiliency avoids "lost" build requests, at the small cost of a polling delay before the requests are scheduled. buildbot-3.4.0/master/docs/developer/classes.rst000066400000000000000000000013101413250514000217070ustar00rootroot00000000000000Classes ======= The sections contained here document classes that can be used or subclassed. .. note:: Some of this information duplicates information available in the source code itself. Consider this information authoritative, and the source code a demonstration of the current implementation which is subject to change. .. toctree:: :maxdepth: 1 cls-build cls-worker cls-buildfactory cls-changesources cls-remotecommands cls-buildsteps cls-basescheduler cls-forcesched cls-irenderable cls-iproperties cls-iconfigurator cls-resultspec cls-protocols cls-workermanager cls-log cls-logobserver cls-auth cls-avatar cls-www buildbot-3.4.0/master/docs/developer/cls-auth.rst000066400000000000000000000105051413250514000220000ustar00rootroot00000000000000Authentication ============== .. py:module:: buildbot.www.auth .. py:class:: AuthBase This class is the base class for all authentication methods. All authentications are not done at the same level, so several optional methods are available. This class implements a default implementation. The login session is stored via twisted's ``request.getSession()``, and detailed used information is stored in ``request.getSession().user_info``. The session information is then sent to the UI via the ``config`` constant (in the ``user`` attribute of ``config``). .. py:attribute:: userInfoProvider Authentication modules are responsible for providing user information as detailed as possible. When there is a need to get additional information from another source, a userInfoProvider can optionally be specified. .. py:method:: reconfigAuth(master, new_config) :param master: the reference to the master :param new_config: the reference to the new configuration Reconfigure the authentication module. In the base class, this simply sets ``self.master``. .. py:method:: maybeAutoLogin(request) :param request: the request object :returns: Deferred This method is called when ``/config.js`` is fetched. If the authentication method supports automatic login, e.g., from a header provided by a frontend proxy, this method handles the login. If it succeeds, the method sets ``request.getSession().user_info``. If the login fails unexpectedly, it raises ``resource.Error``. The default implementation simply returns without setting ``user_info``. .. py:method:: getLoginResource() Return the resource representing ``/auth/login``. .. py:method:: getLogout() Return the resource representing ``/auth/logout``. .. py:method:: updateUserInfo(request) :param request: the request object Separate entrypoint for getting user information. This is a means to call self.userInfoProvider if provided. .. py:class:: UserInfoProviderBase Class that can be used, to get more info for the user, like groups, from a separate database. .. py:method:: getUserInfo(username) :returns: the user info for the username used for login, via a Deferred Returns a :py:class:`dict` with following keys: * ``email``: email address of the user * ``full_name``: Full name of the user, like "Homer Simpson" * ``groups``: groups the user belongs to, like ["duff fans", "dads"] .. py:module:: buildbot.www.oauth2 .. py:class:: OAuth2Auth OAuth2Auth implements oauth2 two-factor authentication. With this method, ``/auth/login`` is called twice. The first time (without argument), it should return the URL the browser has to redirect to in order to perform oauth2 authentication and authorization. Then the oauth2 provider will redirect to ``/auth/login?code=???`` and the Buildbot web server will verify the code using the oauth2 provider. Typical login process is: * UI calls the ``/auth/login`` API and redirects the browser to the returned oauth2 provider URL * oauth2 provider shows a web page with a form for the user to authenticate, and asks the user for permission for Buildbot to access their account * oauth2 provider redirects the browser to ``/auth/login?code=???`` * OAuth2Auth module verifies the code, and get the user's additional information * Buildbot UI is reloaded, with the user authenticated This implementation is using requests_. Subclasses must override the following class attributes: * ``name``: Name of the oauth plugin * ``faIcon``: Font awesome class to use for login button logo * ``resourceEndpoint``: URI of the resource where the authentication token is used * ``authUri``: URI the browser is pointed to to let the user enter creds * ``tokenUri``: URI to verify the browser code and get auth token * ``authUriAdditionalParams``: Additional parameters for the authUri * ``tokenUriAdditionalParams``: Additional parameters for the tokenUri .. py:method:: getUserInfoFromOAuthClient(self, c) This method is called after a successful authentication to get additional information about the user from the oauth2 provider. .. _requests: https://requests.readthedocs.io/en/master/ buildbot-3.4.0/master/docs/developer/cls-avatar.rst000066400000000000000000000013121413250514000223110ustar00rootroot00000000000000Avatars ======= Buildbot's avatar support associates a small image with each user. .. py:module:: buildbot.www.avatar .. py:class:: AvatarBase This class can be used to get the avatars for the users. It can be used for authenticated users, but also for the users referenced in changes. .. py:method:: getUserAvatar(self, email, size, defaultAvatarUrl) :returns: the user's avatar, from the user's email (via Deferred). If the data is directly available, this function returns a tuple ``(mime_type, picture_raw_data)``. If the data is available in another URL, this function can raise ``resource.Redirect(avatar_url)``, and the web server will redirect to the avatar_url. buildbot-3.4.0/master/docs/developer/cls-basescheduler.rst000066400000000000000000000212361413250514000236530ustar00rootroot00000000000000BaseScheduler ------------- .. py:module:: buildbot.schedulers.base .. py:class:: BaseScheduler This is the base class for all Buildbot schedulers. See :ref:`Writing-Schedulers` for information on writing new schedulers. .. py:method:: __init__(name, builderNames, properties={}, codebases={'':{}}) :param name: (positional) the scheduler name :param builderName: (positional) a list of builders, by name, for which this scheduler can queue builds :param properties: a dictionary of properties to be added to queued builds :param codebases: the codebase configuration for this scheduler (see user documentation) Initializes a new scheduler. The scheduler configuration parameters, and a few others, are available as attributes: .. py:attribute:: name This scheduler's name. .. py:attribute:: builderNames :type: list Builders for which this scheduler can queue builds. .. py:attribute:: codebases :type: dict The codebase configuration for this scheduler. .. py:attribute:: properties :type: Properties instance Properties that this scheduler will attach to queued builds. This attribute includes the ``scheduler`` property. .. py:attribute:: schedulerid :type: integer The ID of this scheduler in the ``schedulers`` table. Subclasses can consume changes by implementing :py:meth:`gotChange` and calling :py:meth:`startConsumingChanges` from :py:meth:`startActivity`. .. py:method:: startConsumingChanges(self, fileIsImportant=None, change_filter=None, onlyImportant=False) :param fileIsImportant: a callable provided by the user to distinguish important and unimportant changes :type fileIsImportant: callable :param change_filter: a filter to determine which changes are even considered by this scheduler, or ``None`` to consider all changes :type change_filter: :py:class:`buildbot.changes.filter.ChangeFilter` instance :param onlyImportant: If True, only important changes, as specified by fileIsImportant, will be added to the buildset :type onlyImportant: boolean :return: Deferred Subclasses should call this method when becoming active in order to receive changes. The parent class will take care of filtering the changes (using ``change_filter``) and (if ``fileIsImportant`` is not None) classifying them. .. py:method:: gotChange(change, important) :param buildbot.changes.changes.Change change: the new change :param boolean important: true if the change is important :return: Deferred This method is called when a change is received. Schedulers which consume changes should implement this method. If the ``fileIsImportant`` parameter to ``startConsumingChanges`` was None, then all changes are considered important. It is guaranteed that the ``codebase`` of the change is one of the scheduler's codebase. .. note:: The :py:class:`buildbot.changes.changes.Change` instance will instead be a change resource in later versions. The following methods are available for subclasses to queue new builds. Each creates a new buildset with a build request for each builder. .. py:method:: addBuildsetForSourceStamps(self, sourcestamps=[], waited_for=False, reason='', external_idstring=None, properties=None, builderNames=None) :param list sourcestamps: a list of full sourcestamp dictionaries or sourcestamp IDs :param boolean waited_for: if true, this buildset is being waited for (and thus should continue during a clean shutdown) :param string reason: reason for the build set :param string external_idstring: external identifier for the buildset :param properties: properties to include in the buildset, in addition to those in the scheduler configuration :type properties: :py:class:`~buildbot.process.properties.Properties` instance :param list builderNames: a list of builders for the buildset, or None to use the scheduler's configured ``builderNames`` :returns: (buildset ID, buildrequest IDs) via Deferred Add a buildset for the given source stamps. Each source stamp must be specified as a complete source stamp dictionary (with keys ``revision``, ``branch``, ``project``, ``repository``, and ``codebase``), or an integer ``sourcestampid``. The return value is a tuple. The first tuple element is the ID of the new buildset. The second tuple element is a dictionary mapping builder name to buildrequest ID. .. py:method:: addBuildsetForSourceStampsWithDefaults(reason, sourcestamps, waited_for=False, properties=None, builderNames=None) :param string reason: reason for the build set :param list sourcestamps: partial list of source stamps to build :param boolean waited_for: if true, this buildset is being waited for (and thus should continue during a clean shutdown) :param dict properties: properties to include in the buildset, in addition to those in the scheduler configuration :type properties: :py:class:`~buildbot.process.properties.Properties` instance :param list builderNames: a list of builders for the buildset, or None to use the scheduler's configured ``builderNames`` :returns: (buildset ID, buildrequest IDs) via Deferred, as for :py:meth:`addBuildsetForSourceStamps` Create a buildset based on the supplied sourcestamps, with defaults applied from the scheduler's configuration. The ``sourcestamps`` parameter is a list of source stamp dictionaries, giving the required parameters. Any unspecified values, including sourcestamps from unspecified codebases, will be filled in from the scheduler's configuration. If ``sourcestamps`` is None, then only the defaults will be used. If ``sourcestamps`` includes sourcestamps for codebases not configured on the scheduler, they will be included anyway, although this is probably a sign of an incorrect configuration. .. py:method:: addBuildsetForChanges(waited_for=False, reason='', external_idstring=None, changeids=[], builderNames=None, properties=None) :param boolean waited_for: if true, this buildset is being waited for (and thus should continue during a clean shutdown) :param string reason: reason for the build set :param string external_idstring: external identifier for the buildset :param list changeids: changes from which to construct the buildset :param list builderNames: a list of builders for the buildset, or None to use the scheduler's configured ``builderNames`` :param dict properties: properties to include in the buildset, in addition to those in the scheduler configuration :type properties: :py:class:`~buildbot.process.properties.Properties` instance :returns: (buildset ID, buildrequest IDs) via Deferred, as for :py:meth:`addBuildsetForSourceStamps` Add a buildset for the given changes (``changeids``). This will take sourcestamps from the latest of any changes with the same codebase, and will fill in sourcestamps for any codebases for which no changes are included. The active state of the scheduler is tracked by the following attribute and methods. .. py:attribute:: active True if this scheduler is active .. py:method:: activate() :returns: Deferred Subclasses should override this method to initiate any processing that occurs only on active schedulers. This is the method from which to call ``startConsumingChanges``, or to set up any timers or message subscriptions. .. py:method:: deactivate() :returns: Deferred Subclasses should override this method to stop any ongoing processing, or wait for it to complete. The method's returned Deferred should not fire until the processing is complete. The state-manipulation methods are provided by :py:class:`buildbot.util.state.StateMixin`. Note that no locking of any sort is performed between these two functions. They should *only* be called by an active scheduler. .. py:method:: getState(name[, default]) :param name: state key to fetch :param default: default value if the key is not present :returns: Deferred This calls through to :py:meth:`buildbot.db.state.StateConnectorComponent.getState`, using the scheduler's objectid. .. py:method:: setState(name, value) :param name: state key :param value: value to set for the key :returns: Deferred This calls through to :py:meth:`buildbot.db.state.StateConnectorComponent.setState`, using the scheduler's objectid. buildbot-3.4.0/master/docs/developer/cls-build.rst000066400000000000000000000020431413250514000221340ustar00rootroot00000000000000Builds ====== .. py:module:: buildbot.process.build The :py:class:`Build` class represents a running build, with associated steps. Build ----- .. py:class:: Build .. py:attribute:: buildid The ID of this build in the database. .. py:method:: getSummaryStatistic(name, summary_fn, initial_value=None) :param name: statistic name to summarize :param summary_fn: callable with two arguments that will combine two values :param initial_value: first value to pass to ``summary_fn`` :returns: summarized result This method summarizes the named step statistic over all steps in which it exists, using ``combination_fn`` and ``initial_value`` to combine multiple results into a single result. This translates to a call to Python's ``reduce``:: return reduce(summary_fn, step_stats_list, initial_value) .. py:method:: getUrl() :returns: URL as string Returns url of the build in the UI. Build must be started. This is useful for custom steps. buildbot-3.4.0/master/docs/developer/cls-buildfactory.rst000066400000000000000000000076341413250514000235370ustar00rootroot00000000000000BuildFactory ============ BuildFactory Implementation Note -------------------------------- The default :class:`BuildFactory`, provided in the :mod:`buildbot.process.factory` module, contains an internal list of `BuildStep specifications`: a list of ``(step_class, kwargs)`` tuples for each. These specification tuples are constructed when the config file is read, by asking the instances passed to :meth:`addStep` for their subclass and arguments. To support config files from Buildbot version 0.7.5 and earlier, :meth:`addStep` also accepts the ``f.addStep(shell.Compile, command=["make","build"])`` form, although its use is discouraged because then the ``Compile`` step doesn't get to validate or complain about its arguments until build time. The modern pass-by-instance approach allows this validation to occur while the config file is being loaded, where the admin has a better chance of noticing problems. When asked to create a :class:`Build`, the :class:`BuildFactory` puts a copy of the list of step specifications into the new :class:`Build` object. When the :class:`Build` is actually started, these step specifications are used to create the actual set of :class:`BuildStep`\s, which are then executed one at a time. This serves to give each Build an independent copy of each step. Each step can affect the build process in the following ways: * If the step's :attr:`haltOnFailure` attribute is ``True``, then a failure in the step (i.e. if it completes with a result of ``FAILURE``) will cause the whole build to be terminated immediately: no further steps will be executed, with the exception of steps with :attr:`alwaysRun` set to ``True``. :attr:`haltOnFailure` is useful for setup steps upon which the rest of the build depends: if the CVS checkout or :command:`./configure` process fails, there is no point in trying to compile or test the resulting tree. * If the step's :attr:`alwaysRun` attribute is ``True``, then it will always be run, regardless of if previous steps have failed. This is useful for cleanup steps that should always be run to return the build directory or worker into a good state. * If the :attr:`flunkOnFailure` or :attr:`flunkOnWarnings` flag is set, then a result of ``FAILURE`` or ``WARNINGS`` will mark the build as a whole as ``FAILED``. However, the remaining steps will still be executed. This is appropriate for things like multiple testing steps: a failure in any one of them will indicate that the build has failed, however it is still useful to run them all to completion. * Similarly, if the :attr:`warnOnFailure` or :attr:`warnOnWarnings` flag is set, then a result of ``FAILURE`` or ``WARNINGS`` will mark the build as having ``WARNINGS``, and the remaining steps will still be executed. This may be appropriate for certain kinds of optional build or test steps. For example, a failure experienced while building documentation files should be made visible with a ``WARNINGS`` result but not be serious enough to warrant marking the whole build with a ``FAILURE``. In addition, each :class:`Step` produces its own results, may create logfiles, etc. However only the flags described above have any effect on the build as a whole. The pre-defined :class:`BuildStep`\s like :class:`CVS` and :class:`Compile` have reasonably appropriate flags set on them already. For example, without a source tree there is no point in continuing a build, so the :class:`CVS` class has the :attr:`haltOnFailure` flag set to ``True``. Look in :file:`buildbot/steps/*.py` to see how the other :class:`Step`\s are marked. Each :class:`Step` is created with an additional ``workdir`` argument that indicates where its actions should take place. This is specified as a subdirectory of the worker's base directory, with a default value of :file:`build`. This is only implemented as a step argument (as opposed to simply being a part of the base directory) because the CVS/SVN steps need to perform their checkouts from the parent directory. buildbot-3.4.0/master/docs/developer/cls-buildsteps.rst000066400000000000000000000670231413250514000232240ustar00rootroot00000000000000BuildSteps ========== .. py:module:: buildbot.process.buildstep There are a few parent classes that are used as base classes for real buildsteps. This section describes the base classes. The "leaf" classes are described in :doc:`../manual/configuration/steps/index`. See :ref:`Writing-New-BuildSteps` for a guide to implementing new steps. BuildStep --------- .. py:class:: BuildStep(name, description, descriptionDone, descriptionSuffix, locks, haltOnFailure, flunkOnWarnings, flunkOnFailure, warnOnWarnings, warnOnFailure, alwaysRun, progressMetrics, useProgress, doStepIf, hideStepIf) All constructor arguments must be given as keyword arguments. Each constructor parameter is copied to the corresponding attribute. .. py:attribute:: name The name of the step. Note that this value may change when the step is started, if the existing name was not unique. .. py:attribute:: stepid The ID of this step in the database. This attribute is not set until the step starts. .. py:attribute:: description The description of the step. .. py:attribute:: descriptionDone The description of the step after it has finished. .. py:attribute:: descriptionSuffix Any extra information to append to the description. .. py:attribute:: locks List of locks for this step; see :ref:`Interlocks`. .. py:attribute:: progressMetrics List of names of metrics that should be used to track the progress of this build and build ETA's for users. .. py:attribute:: useProgress If true (the default), then ETAs will be calculated for this step using progress metrics. If the step is known to have unpredictable timing (e.g., an incremental build), then this should be set to false. .. py:attribute:: doStepIf A callable or bool to determine whether this step should be executed. See :ref:`Buildstep-Common-Parameters` for details. .. py:attribute:: hideStepIf A callable or bool to determine whether this step should be shown in the waterfall and build details pages. See :ref:`Buildstep-Common-Parameters` for details. The following attributes affect the behavior of the containing build: .. py:attribute:: haltOnFailure If true, the build will halt on a failure of this step, and not execute subsequent steps (except those with ``alwaysRun``). .. py:attribute:: flunkOnWarnings If true, the build will be marked as a failure if this step ends with warnings. .. py:attribute:: flunkOnFailure If true, the build will be marked as a failure if this step fails. .. py:attribute:: warnOnWarnings If true, the build will be marked as warnings, or worse, if this step ends with warnings. .. py:attribute:: warnOnFailure If true, the build will be marked as warnings, or worse, if this step fails. .. py:attribute:: alwaysRun If true, the step will run even if a previous step halts the build with ``haltOnFailure``. .. py:attribute:: logEncoding The log encoding to use for logs produced in this step, or None to use the global default. See :ref:`Log-Encodings`. .. py:attribute:: rendered At the beginning of the step, the renderable attributes are rendered against the properties. There is a slight delay however when those are not yet rendered, which leads to weird and difficult to reproduce bugs. To address this problem, a ``rendered`` attribute is available for methods that could be called early in the buildstep creation. .. py:attribute:: results This is the result (a code from :py:mod:`buildbot.process.results`) of the step. This attribute only exists after the step is finished, and should only be used in :py:meth:`getResultSummary`. A few important pieces of information are not available when a step is constructed and are added later. These are set by the following methods; the order in which these methods are called is not defined. .. py:method:: setBuild(build) :param build: the :class:`~buildbot.process.build.Build` instance controlling this step. This method is called during setup to set the build instance controlling this worker. Subclasses can override this to get access to the build object as soon as it is available. The default implementation sets the :attr:`build` attribute. .. py:attribute:: build The build object controlling this step. .. py:method:: setWorker(worker) :param worker: the :class:`~buildbot.worker.Worker` instance on which this step will run. Similarly, this method is called with the worker that will run this step. The default implementation sets the :attr:`worker` attribute. .. py:attribute:: worker The worker that will run this step. .. py:attribute:: workdir Directory where actions of the step will take place. The workdir is set by order of priority: * workdir of the step, if defined via constructor argument * workdir of the BuildFactory (itself defaults to 'build') BuildFactory workdir can also be a function of a sourcestamp (see :ref:`Factory-Workdir-Functions`). .. py:method:: setDefaultWorkdir(workdir) :param workdir: the default workdir, from the build .. note:: This method is deprecated and should not be used anymore, as workdir is calculated automatically via a property. .. py:method:: setupProgress() This method is called during build setup to give the step a chance to set up progress tracking. It is only called if the build has :attr:`useProgress` set. There is rarely any reason to override this method. Execution of the step itself is governed by the following methods and attributes. .. py:method:: startStep(remote) :param remote: a remote reference to the worker-side :class:`~buildbot_worker.pb.WorkerForBuilderPb` instance :returns: Deferred Begin the step. This is the build's interface to step execution. Subclasses should override :meth:`run` to implement custom behaviors. .. py:method:: run() :returns: result via Deferred Execute the step. When this method returns (or when the Deferred it returns fires), the step is complete. The method's return value must be an integer, giving the result of the step -- a constant from :mod:`buildbot.process.results`. If the method raises an exception or its Deferred fires with failure, then the step will be completed with an EXCEPTION result. Any other output from the step (logfiles, status strings, URLs, etc.) is the responsibility of the ``run`` method. Subclasses should override this method. Do *not* call :py:meth:`finished` or :py:meth:`failed` from this method. .. py:method:: start() :returns: ``None`` or :data:`~buildbot.process.results.SKIPPED`, optionally via a Deferred. Begin the step. BuildSteps written before Buildbot-0.9.0 often override this method instead of :py:meth:`run`, but this approach is deprecated. When the step is done, it should call :py:meth:`finished`, with a result -- a constant from :mod:`buildbot.process.results`. The result will be handed off to the :py:class:`~buildbot.process.build.Build`. If the step encounters an exception, it should call :meth:`failed` with a Failure object. If the step decides it does not need to be run, :meth:`start` can return the constant :data:`~buildbot.process.results.SKIPPED`. In this case, it is not necessary to call :meth:`finished` directly. .. py:method:: finished(results) :param results: a constant from :mod:`~buildbot.process.results` A call to this method indicates that the step is finished and the build should analyze the results and perhaps proceed to the next step. The step should not perform any additional processing after calling this method. This method must only be called from the (deprecated) :py:meth:`start` method. .. py:method:: failed(failure) :param failure: a :class:`~twisted.python.failure.Failure` instance Similar to :meth:`finished`, this method indicates that the step is finished, but handles exceptions with appropriate logging and diagnostics. This method handles :exc:`BuildStepFailed` specially, by calling ``finished(FAILURE)``. This provides subclasses with a shortcut to stop execution of a step by raising this failure in a context where :meth:`failed` will catch it. This method must only be called from the (deprecated) :py:meth:`start` method. .. py:method:: interrupt(reason) :param reason: why the build was interrupted :type reason: string or :class:`~twisted.python.failure.Failure` This method is used from various control interfaces to stop a running step. The step should be brought to a halt as quickly as possible, by cancelling a remote command, killing a local process, etc. The step must still finish with either :meth:`finished` or :meth:`failed`. The ``reason`` parameter can be a string or, when a worker is lost during step processing, a :exc:`~twisted.internet.error.ConnectionLost` failure. The parent method handles any pending lock operations, and should be called by implementations in subclasses. .. py:attribute:: stopped If false, then the step is running. If true, the step is not running, or has been interrupted. A step can indicate its up-to-the-moment status using a short summary string. These methods allow step subclasses to produce such summaries. .. py:method:: updateSummary() Update the summary, calling :py:meth:`getCurrentSummary` or :py:meth:`getResultSummary` as appropriate. New-style build steps should call this method any time the summary may have changed. This method is debounced, so even calling it for every log line is acceptable. .. py:method:: getCurrentSummary() :returns: dictionary, optionally via Deferred Returns a dictionary containing status information for a running step. The dictionary can have a ``step`` key with a unicode value giving a summary for display with the step. This method is only called while the step is running. New-style build steps should override this method to provide a more interesting summary than the default ``u"running"``. .. py:method:: getResultSummary() :returns: dictionary, optionally via Deferred Returns a dictionary containing status information for a completed step. The dictionary can have keys ``step`` and ``build``, each with unicode values. The ``step`` key gives a summary for display with the step, while the ``build`` key gives a summary for display with the entire build. The latter should be used sparingly, and include only information that the user would find relevant for the entire build, such as a number of test failures. Either or both keys can be omitted. This method is only called when the step is finished. The step's result is available in ``self.results`` at that time. New-style build steps should override this method to provide a more interesting summary than the default, or to provide any build summary information. .. py:method:: getBuildResultSummary() :returns: dictionary, optionally via Deferred Returns a dictionary containing status information for a completed step. This method calls :py:meth:`getResultSummary`, and automatically computes a ``build`` key from the ``step`` key according to the ``updateBuildSummaryPolicy``. .. py:method:: describe(done=False) :param done: If true, the step is finished. :returns: list of strings Describe the step succinctly. The return value should be a sequence of short strings suitable for display in a horizontally constrained space. .. note:: Be careful not to assume that the step has been started in this method. In relatively rare circumstances, steps are described before they have started. Ideally, unit tests should be used to ensure that this method is resilient. .. note:: This method is not called for new-style steps. Instead, override :py:meth:`getCurrentSummary` and :py:meth:`getResultSummary`. .. py:method:: addTestResultSets() The steps may override this to add any test result sets for this step via ``self.addTestResultSet()``. This function is called just before the step execution is started. The function is not called if the step is skipped or otherwise not run. .. py:method:: addTestResultSet(description, category, value_unit) :param description: Description of the test result set :param category: Category of the test result set :param value_unit: The value unit of the test result set :returns: The ID of the created test result set via a Deferred. Creates a new test result set to which test results can be associated. There are standard values of the ``category`` and ``value_unit`` parameters, see TODO. .. py:method:: addTestResult(setid, value, test_name=None, test_code_path=None, line=None, duration_ns=None) :param setid: The ID of a test result set returned by ``addTestResultSet`` :param value: The value of the result as a string :param test_name: The name of the test :param test_code_path: The path to the code file that resulted in this test result :param line: The line within ``test_code_path`` file that resulted in this test result :param duration_ns: The duration of the test itself, in nanoseconds Creates a test result. Either ``test_name`` or ``test_code_path`` must be specified. The function queues the test results and will submit them to the database when enough test results are added so that performance impact is minimized. .. py:method:: finishTestResultSets() The steps may override this to finish submission of any test results for the step. Build steps have statistics, a simple key-value store of data which can later be aggregated over all steps in a build. Note that statistics are not preserved after a build is complete. .. py:method:: setBuildData(self, name, value, source) :param unicode name: the name of the data :param bytestr value: the value of the data as ``bytes`` :param unicode source: the source of the data :returns: Deferred Builds can have transient data attached to them which allows steps to communicate to reporters and among themselves. The data is a byte string and its interpretation depends on the particular step or reporter. .. py:method:: hasStatistic(stat) :param string stat: name of the statistic :returns: True if the statistic exists on this step .. py:method:: getStatistic(stat, default=None) :param string stat: name of the statistic :param default: default value if the statistic does not exist :returns: value of the statistic, or the default value .. py:method:: getStatistics() :returns: a dictionary of all statistics for this step .. py:method:: setStatistic(stat, value) :param string stat: name of the statistic :param value: value to assign to the statistic :returns: value of the statistic Build steps support progress metrics - values that increase roughly linearly during the execution of the step, and can thus be used to calculate an expected completion time for a running step. A metric may be a count of lines logged, tests executed, or files compiled. The build mechanics will take care of translating this progress information into an ETA for the user. .. py:method:: setProgress(metric, value) :param metric: the metric to update :type metric: string :param value: the new value for the metric :type value: integer Update a progress metric. This should be called by subclasses that can provide useful progress-tracking information. The specified metric name must be included in :attr:`progressMetrics`. The following methods are provided as utilities to subclasses. These methods should only be invoked after the step has started. .. py:method:: workerVersion(command, oldversion=None) :param command: command to examine :type command: string :param oldversion: return value if the worker does not specify a version :returns: string Fetch the version of the named command, as specified on the worker. In practice, all commands on a worker have the same version, but passing ``command`` is still useful to ensure that the command is implemented on the worker. If the command is not implemented on the worker, :meth:`workerVersion` will return ``None``. Versions take the form ``x.y`` where ``x`` and ``y`` are integers, and are compared as expected for version numbers. Buildbot versions older than 0.5.0 did not support version queries; in this case, :meth:`workerVersion` will return ``oldVersion``. Since such ancient versions of Buildbot are no longer in use, this functionality is largely vestigial. .. py:method:: workerVersionIsOlderThan(command, minversion) :param command: command to examine :type command: string :param minversion: minimum version :returns: boolean This method returns true if ``command`` is not implemented on the worker, or if it is older than ``minversion``. .. py:method:: checkWorkerHasCommand(command) :param command: command to examine :type command: string This method raise :py:class:`~buildbot.interfaces.WorkerSetupError` if ``command`` is not implemented on the worker .. py:method:: getWorkerName() :returns: string Get the name of the worker assigned to this step. Most steps exist to run commands. While the details of exactly how those commands are constructed are left to subclasses, the execution of those commands comes down to this method: .. py:method:: runCommand(command) :param command: :py:class:`~buildbot.process.remotecommand.RemoteCommand` instance :returns: Deferred This method connects the given command to the step's worker and runs it, returning the Deferred from :meth:`~buildbot.process.remotecommand.RemoteCommand.run`. The :class:`BuildStep` class provides methods to add log data to the step. Subclasses provide a great deal of user-configurable functionality on top of these methods. These methods can be called while the step is running, but not before. .. py:method:: addLog(name, type="s", logEncoding=None) :param name: log name :param type: log type; see :bb:rtype:`logchunk` :param logEncoding: the log encoding, or None to use the step or global default (see :ref:`Log-Encodings`) :returns: :class:`~buildbot.process.log.Log` instance via Deferred Add a new logfile with the given name to the step, and return the log file instance. .. py:method:: getLog(name) :param name: log name :raises KeyError: if there is no such log :returns: :class:`~buildbot.process.log.Log` instance :raises KeyError: if no such log is defined Return an existing logfile, previously added with :py:meth:`addLog`. Note that this return value is synchronous, and only available after :py:meth:`addLog`'s deferred has fired. .. py:method:: addCompleteLog(name, text) :param name: log name :param text: content of the logfile :returns: Deferred This method adds a new log and sets ``text`` as its content. This is often useful to add a short logfile describing activities performed on the master. The logfile is immediately closed, and no further data can be added. If the logfile's content is a bytestring, it is decoded with the step's log encoding or the global default log encoding. To add a logfile with a different character encoding, perform the decode operation directly and pass the resulting unicode string to this method. .. py:method:: addHTMLLog(name, html) :param name: log name :param html: content of the logfile :returns: Deferred Similar to :meth:`addCompleteLog`, this adds a logfile containing pre-formatted HTML, allowing more expressiveness than the text format supported by :meth:`addCompleteLog`. .. py:method:: addLogObserver(logname, observer) :param logname: log name :param observer: log observer instance Add a log observer for the named log. The named log need not have been added already. The observer will be connected when the log is added. See :ref:`Adding-LogObservers` for more information on log observers. .. py:method:: addLogWithFailure(why, logprefix='') :param Failure why: the failure to log :param logprefix: prefix for the log name :returns: Deferred Add log files displaying the given failure, named ``err.text`` and ``err.html``. .. py:method:: addLogWithException(why, logprefix='') :param Exception why: the exception to log :param logprefix: prefix for the log name :returns: Deferred Similar to ``addLogWithFailure``, but for an Exception instead of a Failure. Along with logs, build steps have an associated set of links that can be used to provide additional information for developers. Those links are added during the build with this method: .. py:method:: addURL(name, url) :param name: URL name :param url: the URL Add a link to the given ``url``, with the given ``name`` to displays of this step. This allows a step to provide links to data that is not available in the log files. CommandMixin ------------ The :py:meth:`~buildbot.process.buildstep.BuildStep.runCommand` method can run a :py:class:`~buildbot.process.remotecommand.RemoteCommand` instance, but it's no help in building that object or interpreting the results afterward. This mixin class adds some useful methods for running commands. This class can only be used in new-style steps. .. py:class:: buildbot.process.buildstep.CommandMixin Some remote commands are simple enough that they can boil down to a method call. Most of these take an ``abandonOnFailure`` argument which, if true, will abandon the entire buildstep on command failure. This is accomplished by raising :py:exc:`~buildbot.process.buildstep.BuildStepFailed`. These methods all write to the ``stdio`` log (generally just for errors). They do not close the log when finished. .. py:method:: runRmdir(dir, abandonOnFailure=True) :param dir: directory to remove :param abndonOnFailure: if true, abandon step on failure :returns: Boolean via Deferred Remove the given directory, using the ``rmdir`` command. Returns False on failure. .. py:method:: runMkdir(dir, abandonOnFailure=True) :param dir: directory to create :param abndonOnFailure: if true, abandon step on failure :returns: Boolean via Deferred Create the given directory and any parent directories, using the ``mkdir`` command. Returns False on failure. .. py:method:: pathExists(path) :param path: path to test :returns: Boolean via Deferred Determine if the given path exists on the worker (in any form - file, directory, or otherwise). This uses the ``stat`` command. .. py:method:: runGlob(path) :param path: path to test :returns: list of filenames Get the list of files matching the given path pattern on the worker. This uses Python's ``glob`` module. If the ``runGlob`` method fails, it aborts the step. .. py:method:: getFileContentFromWorker(path, abandonOnFailure=False) :param path: path of the file to download from worker :returns: string via deferred (content of the file) Get the content of a file on the worker. ShellMixin ---------- Most Buildbot steps run shell commands on the worker, and Buildbot has an impressive array of configuration parameters to control that execution. The ``ShellMixin`` mixin provides the tools to make running shell commands easy and flexible. This class can only be used in new-style steps. .. py:class:: buildbot.process.buildstep.ShellMixin This mixin manages the following step configuration parameters, the contents of which are documented in the manual. Naturally, all of these are renderable. .. py:attribute:: command .. py:attribute:: workdir .. py:attribute:: env .. py:attribute:: want_stdout .. py:attribute:: want_stderr .. py:attribute:: usePTY .. py:attribute:: logfiles .. py:attribute:: lazylogfiles .. py:attribute:: timeout .. py:attribute:: maxTime .. py:attribute:: logEnviron .. py:attribute:: interruptSignal .. py:attribute:: sigtermTime .. py:attribute:: initialStdin .. py:attribute:: decodeRC .. py:method:: setupShellMixin(constructorArgs, prohibitArgs=[]) :param dict constructorArgs: constructor keyword arguments :param list prohibitArgs: list of recognized arguments to reject :returns: keyword arguments destined for :py:class:`BuildStep` This method is intended to be called from the shell constructor, and be passed any keyword arguments not otherwise used by the step. Any attributes set on the instance already (e.g., class-level attributes) are used as defaults. Attributes named in ``prohibitArgs`` are rejected with a configuration error. The return value should be passed to the :py:class:`BuildStep` constructor. .. py:method:: makeRemoteShellCommand(collectStdout=False, collectStderr=False, **overrides) :param collectStdout: if true, the command's stdout will be available in ``cmd.stdout`` on completion :param collectStderr: if true, the command's stderr will be available in ``cmd.stderr`` on completion :param overrides: overrides arguments that might have been passed to :py:meth:`setupShellMixin` :returns: :py:class:`~buildbot.process.remotecommand.RemoteShellCommand` instance via Deferred This method constructs a :py:class:`~buildbot.process.remotecommand.RemoteShellCommand` instance based on the instance attributes and any supplied overrides. It must be called while the step is running, as it examines the worker capabilities before creating the command. It takes care of just about everything: * Creating log files and associating them with the command * Merging environment configuration * Selecting the appropriate workdir configuration All that remains is to run the command with :py:meth:`~buildbot.process.buildstep.BuildStep.runCommand`. The :py:class:`ShellMixin` class implements :py:meth:`~buildbot.process.buildstep.BuildStep.getResultSummary`, returning a summary of the command. If no command was specified or run, it falls back to the default ``getResultSummary`` based on ``descriptionDone``. Subclasses can override this method to return a more appropriate status. Exceptions ---------- .. py:exception:: BuildStepFailed This exception indicates that the buildstep has failed. It is useful as a way to skip all subsequent processing when a step goes wrong. buildbot-3.4.0/master/docs/developer/cls-changesources.rst000066400000000000000000000022721413250514000236720ustar00rootroot00000000000000Change Sources ============== .. py:module:: buildbot.changes.base ChangeSource ------------ .. py:class:: ChangeSource This is the base class for change sources. Subclasses should override the inherited :py:meth:`~buildbot.util.service.ClusteredService.activate` and :py:meth:`~buildbot.util.service.ClusteredService.deactivate` methods if necessary to handle initialization and shutdown. Change sources which are active on every master should, instead, override ``startService`` and ``stopService``. ReconfigurablePollingChangeSource --------------------------------- .. py:class:: ReconfigurablePollingChangeSource This is a subclass of :py:class:`ChangeSource` which adds polling behavior. Its constructor accepts the ``pollInterval`` and ``pollAtLaunch`` arguments as documented for most built-in change sources. Subclasses should override the ``poll`` method. This method may return a Deferred. Calls to ``poll`` will not overlap. PollingChangeSource ------------------- .. py:class:: PollingChangeSource This is a legacy class for polling change sources not yet ported to the :py:class:`BuildbotService` component lifecycle. Do not use for new code. buildbot-3.4.0/master/docs/developer/cls-forcesched.rst000066400000000000000000000152411413250514000231460ustar00rootroot00000000000000.. -*- rst -*- .. _ForceScheduler: ForceScheduler -------------- The force scheduler has a symbiotic relationship with the web application, so it deserves some further description. Parameters ~~~~~~~~~~ The force scheduler comes with a set of parameter classes. This section contains information to help users or developers who are interested in adding new parameter types or hacking the existing types. .. py:module:: buildbot.schedulers.forceshed .. py:class:: BaseParameter(name, label, regex, **kwargs) This is the base implementation for most parameters, it will check validity, ensure the arg is present if the :py:attr:`~BaseParameter.required` attribute is set, and implement the default value. It will finally call :py:meth:`~BaseParameter.updateFromKwargs` to process the string(s) from the HTTP POST. The :py:class:`BaseParameter` constructor converts all keyword arguments into instance attributes, so it is generally not necessary for subclasses to implement a constructor. For custom parameters that set properties, one simple customization point is `getFromKwargs`: .. py:method:: getFromKwargs(kwargs) :param kwargs: a dictionary of the posted values Given the passed-in POST parameters, return the value of the property that should be set. For more control over parameter parsing, including modifying sourcestamps or changeids, override the ``updateFromKwargs`` function, which is the function that :py:class:`ForceScheduler` invokes for processing: .. py:method:: updateFromKwargs(master, properties, changes, sourcestamps, collector, kwargs) :param master: the :py:class:`~buildbot.master.BuildMaster` instance :param properties: a dictionary of properties :param changes: a list of changeids that will be used to build the SourceStamp for the forced builds :param sourcestamps: the SourceStamp dictionary that will be passed to the build; some parameters modify sourcestamps rather than properties :param collector: a :py:class:`buildbot.schedulers.forcesched.ValidationErrorCollector` object, which is used by nestedParameter to collect errors from its childs :param kwargs: a dictionary of the posted values This method updates ``properties``, ``changes``, and/or ``sourcestamps`` according to the request. The default implementation is good for many simple uses, but can be overridden for more complex purposes. When overriding this function, take all parameters by name (not by position), and include an ``**unused`` catch-all to guard against future changes. The remaining attributes and methods should be overridden by subclasses, although :py:class:`BaseParameter` provides appropriate defaults. .. py:attribute:: name The name of the parameter. This corresponds to the name of the property that your parameter will set. This name is also used internally as identifier for HTTP POST arguments. .. py:attribute:: label The label of the parameter, as displayed to the user. This value can contain raw HTML. .. py:method:: fullName A fully-qualified name that uniquely identifies the parameter in the scheduler. This name is used internally as the identifier for HTTP POST arguments. It is a mix of `name` and the parent's `name` (in the case of nested parameters). This field is not modifiable. .. py:attribute:: type A string identifying the type that the parameter conforms to. It is used by the angular application to find which angular directive to use for showing the form widget. The available values are visible in :src:`www/base/src/app/common/directives/forcefields/forcefields.directive.js`. Examples of how to create a custom parameter widgets are available in the Buildbot source code in directories: * :src:`www/codeparameter` * :src:`www/nestedexample` .. py:attribute:: default The default value to use if there is no user input. This is also used to fill in the form presented to the user. .. py:attribute:: required If true, an error will be shown to user if there is no input in this field. .. py:attribute:: multiple If true, this parameter represents a list of values (e.g. list of tests to run). .. py:attribute:: regex A string that will be compiled as a regex and used to validate the string value of this parameter. If None, then no validation will take place. .. py:method:: parse_from_args(l) Return the list of property values corresponding to the list of strings passed by the user. The default function will just call :py:func:`parse_from_arg` on every argument. .. py:method:: parse_from_arg(s) Return the property value corresponding to the string passed by the user. The default function will simply return the input argument. Nested Parameters ~~~~~~~~~~~~~~~~~ The :py:class:`NestedParameter` class is a container for parameters. The original motivating purpose for this feature is the multiple-codebase configuration, which needs to provide the user with a form to control the branch (et al) for each codebase independently. Each branch parameter is a string field with name 'branch' and these must be disambiguated. In Buildbot nine, this concept has been extended to allow grouping different parameters into UI containers. Details of the available layouts is described in :ref:`NestedParameter `. Each of the child parameters mixes in the parent's name to create the fully qualified ``fullName``. This allows, for example, each of the 'branch' fields to have a unique name in the POST request. The `NestedParameter` handles adding this extra bit to the name to each of the children. When the `kwarg` dictionary is posted back, this class also converts the flat POST dictionary into a richer structure that represents the nested structure. For example, if the nested parameter has the name 'foo', and has children 'bar1' and 'bar2', then the POST will have entries like "foo.bar1" and "foo.bar2". The nested parameter will translate this into a dictionary in the 'kwargs' structure, resulting in something like:: kwargs = { # ... 'foo': { 'bar1': '...', 'bar2': '...' } } Arbitrary nesting is allowed and results in a deeper dictionary structure. Nesting can also be used for presentation purposes. If the name of the :py:class:`NestedParameter` is empty, the nest is "anonymous" and does not mangle the child names. However, in the HTML layout, the nest will be presented as a logical group. buildbot-3.4.0/master/docs/developer/cls-iconfigurator.rst000066400000000000000000000010411413250514000237050ustar00rootroot00000000000000.. index:: single: Configurator; IConfigurator IConfigurator ============= .. class:: buildbot.interfaces.IConfigurator A configurator is an object which configures several components of Buildbot in a coherent manner. This can be used to implement higher level configuration tools. .. method:: configure(config_dict) Alter the Buildbot ``config_dict``, as defined in master.cfg. Like master.cfg, this is run out of the main reactor thread, so this can block, but it can't call most Buildbot facilities. buildbot-3.4.0/master/docs/developer/cls-iproperties.rst000066400000000000000000000014071413250514000234050ustar00rootroot00000000000000.. index:: single: Properties; IProperties IProperties =========== .. class:: buildbot.interfaces.IProperties Providers of this interface allow get and set access to a build's properties. .. method:: getProperty(propname, default=None) Get a named property, returning the default value if the property is not found. .. method:: hasProperty(propname) Determine whether the named property exists. .. method:: setProperty(propname, value, source, runtime=False) Set a property's value, also specifying the source for this value. .. method:: getProperties() Get a :class:`buildbot.process.properties.Properties` instance. The interface of this class is not finalized; where possible, use the other ``IProperties`` methods. buildbot-3.4.0/master/docs/developer/cls-irenderable.rst000066400000000000000000000007151413250514000233150ustar00rootroot00000000000000.. index:: single: Properties; IRenderable IRenderable =========== .. class:: buildbot.interfaces.IRenderable Providers of this class can be "rendered", based on available properties, when a build is started. .. method:: getRenderingFor(iprops) :param iprops: the :class:`~buildbot.interfaces.IProperties` provider supplying the properties of the build :returns: the interpretation of the given properties, optionally in a Deferred buildbot-3.4.0/master/docs/developer/cls-log.rst000066400000000000000000000065331413250514000216260ustar00rootroot00000000000000Logs ==== .. py:module:: buildbot.process.log .. py:class:: Log This class handles write-only access to log files from running build steps. It does not provide an interface for reading logs - such access should occur directly through the Data API. Instances of this class can only be created by the :py:meth:`~buildbot.process.buildstep.BuildStep.addLog` method of a build step. .. py:attribute:: name The name of the log. Note that if you have a build step which outputs multiple logs, naming one of the logs ``Summary`` will cause the Web UI to sort the summary log first in the list, and expand it so that the contents are immediately visible. .. py:attribute:: type The type of the log, represented as a single character. See :bb:rtype:`logchunk` for details. .. py:attribute:: logid The ID of the logfile. .. py:attribute:: decoder A callable used to decode bytestrings. See :bb:cfg:`logEncoding`. .. py:method:: subscribe(receiver) :param callable receiver: the function to call Register ``receiver`` to be called with line-delimited chunks of log data. The callable is invoked as ``receiver(stream, chunk)``, where the stream is indicated by a single character, or None for logs without streams. The chunk is a single string containing an arbitrary number of log lines, and terminated with a newline. When the logfile is finished, ``receiver`` will be invoked with ``None`` for both arguments. The callable cannot return a Deferred. If it must perform some asynchronous operation, it will need to handle its own Deferreds, and be aware that multiple overlapping calls may occur. Note that no "rewinding" takes place: only log content added after the call to ``subscribe`` will be supplied to ``receiver``. .. py:method:: finish() :returns: Deferred This method indicates that the logfile is finished. No further additions will be permitted. In use, callers will receive a subclass with methods appropriate for the log type: .. py:class:: TextLog .. py:method:: addContent(text): :param text: log content :returns: Deferred Add the given data to the log. The data need not end on a newline boundary. .. py:class:: HTMLLog .. py:method:: addContent(text): :param text: log content :returns: Deferred Same as :py:meth:`TextLog.addContent`. .. py:class:: StreamLog This class handles logs containing three interleaved streams: stdout, stderr, and header. The resulting log maintains data distinguishing these streams, so they can be filtered or displayed in different colors. This class is used to represent the stdio log in most steps. .. py:method:: addStdout(text) :param text: log content :returns: Deferred Add content to the stdout stream. The data need not end on a newline boundary. .. py:method:: addStderr(text) :param text: log content :returns: Deferred Add content to the stderr stream. The data need not end on a newline boundary. .. py:method:: addHeader(text) :param text: log content :returns: Deferred Add content to the header stream. The data need not end on a newline boundary. buildbot-3.4.0/master/docs/developer/cls-logobserver.rst000066400000000000000000000112241413250514000233670ustar00rootroot00000000000000LogObservers ============ .. py:module:: buildbot.process.logobserver .. py:class:: LogObserver This is a base class for objects which receive logs from worker commands as they are produced. It does not provide an interface for reading logs - such access should occur directly through the Data API. See :ref:`Adding-LogObservers` for help creating and using a custom log observer. The three methods that subclasses may override follow. None of these methods may return a Deferred. It is up to the callee to handle any asynchronous operations. Subclasses may also override the constructor, with no need to call :py:class:`LogObserver`'s constructor. .. py:method:: outReceived(data): :param unicode data: received data This method is invoked when a "chunk" of data arrives in the log. The chunk contains one or more newline-terminated unicode lines. For stream logs (e.g., ``stdio``), output to stderr generates a call to :py:meth:`errReceived`, instead. .. py:method:: errReceived(data): :param unicode data: received data This method is similar to :py:meth:`outReceived`, but is called for output to stderr. .. py:method:: headerReceived(data): :param unicode data: received data This method is similar to :py:meth:`outReceived`, but is called for header output. .. py:method:: finishReceived() This method is invoked when the observed log is finished. .. py:class:: LogLineObserver This subclass of :py:class:`LogObserver` calls its subclass methods once for each line, instead of once per chunk. .. py:method:: outLineReceived(line): :param unicode line: received line, without newline Like :py:meth:`~LogObserver.outReceived`, this is called once for each line of output received. The argument does not contain the trailing newline character. .. py:method:: errLineReceived(line): :param unicode line: received line, without newline Similar to :py:meth:`~LogLineObserver.outLineReceived`, but for stderr. .. py:method:: headerLineReceived(line): :param unicode line: received line, without newline Similar to :py:meth:`~LogLineObserver.outLineReceived`, but for header output. .. py:method:: finishReceived() This method, inherited from :py:class:`LogObserver`, is invoked when the observed log is finished. .. py:class:: LineConsumerLogObserver This subclass of :py:class:`LogObserver` takes a generator function and "sends" each line to that function. This allows consumers to be written as stateful Python functions, e.g., :: def logConsumer(self): while True: stream, line = yield if stream == 'o' and line.startswith('W'): self.warnings.append(line[1:]) def __init__(self): ... self.warnings = [] self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) Each ``yield`` expression evaluates to a tuple of (stream, line), where the stream is one of 'o', 'e', or 'h' for stdout, stderr, and header, respectively. As with any generator function, the ``yield`` expression will raise a ``GeneratorExit`` exception when the generator is complete. To do something after the log is finished, just catch this exception (but then re-raise it or return). :: def logConsumer(self): while True: try: stream, line = yield if stream == 'o' and line.startswith('W'): self.warnings.append(line[1:]) except GeneratorExit: self.warnings.sort() return .. warning:: This use of generator functions is a simple Python idiom first described in `PEP 342 `__. It is unrelated to the generators used in ``inlineCallbacks``. In fact, consumers of this type are incompatible with asynchronous programming, as each line must be processed immediately. .. py:class:: BufferLogObserver(wantStdout=True, wantStderr=False) :param boolean wantStdout: true if stdout should be buffered :param boolean wantStderr: true if stderr should be buffered This subclass of :py:class:`LogObserver` buffers stdout and/or stderr for analysis after the step is complete. This can cause excessive memory consumption if the output is large. .. py:method:: getStdout() :returns: unicode string Return the accumulated stdout. .. py:method:: getStderr() :returns: unicode string Return the accumulated stderr. buildbot-3.4.0/master/docs/developer/cls-protocols.rst000066400000000000000000000153751413250514000230750ustar00rootroot00000000000000Protocols ========= To exchange information over the network between master and worker, we need to use a protocol. :mod:`buildbot.worker.protocols.base` provide interfaces to implement wrappers around protocol specific calls, so other classes which use them do not need to know about protocol calls or handle protocol specific exceptions. .. py:module:: buildbot.worker.protocols.base .. py:class:: Listener(master) :param master: :py:class:`buildbot.master.BuildMaster` instance Responsible for spawning Connection instances and updating registrations. Protocol-specific subclasses are instantiated with protocol-specific parameters by the buildmaster during startup. .. py:class:: Connection(master, worker) Represents connection to single worker. .. py:attribute:: proxies Dictionary containing mapping between ``Impl`` classes and ``Proxy`` class for this protocol. This may be overridden by a subclass to declare its proxy implementations. .. py:method:: createArgsProxies(args) :returns: shallow copy of args dictionary with proxies instead of impls Helper method that will use :attr:`proxies`, and replace ``Impl`` objects by specific ``Proxy`` counterpart. .. py:method:: notifyOnDisconnect(cb) :param cb: callback :returns: :py:class:`buildbot.util.subscriptions.Subscription` Register a callback to be called if a worker gets disconnected. .. py:method:: loseConnection() Close connection. .. py:method:: remotePrint(message) :param message: message for worker :type message: string :returns: Deferred Print message to worker log file. .. py:method:: remoteGetWorkerInfo() :returns: Deferred Get worker information, commands and version, put them in dictionary, and then return back. .. py:method:: remoteSetBuilderList(builders) :param builders: list with wanted builders :type builders: List :returns: Deferred containing PB references XXX Take a list with wanted builders, send them to the worker, and return the list with created builders. .. py:method:: remoteStartCommand(remoteCommand, builderName, commandId, commandName, args) :param remoteCommand: :py:class:`~buildbot.worker.protocols.base.RemoteCommandImpl` instance :param builderName: self explanatory :type builderName: string :param commandId: command number :type commandId: string :param commandName: command which will be executed on worker :type commandName: string :param args: arguments for that command :type args: List :returns: Deferred Start command on the worker. .. py:method:: remoteShutdown() :returns: Deferred Shutdown the worker, causing its process to halt permanently. .. py:method:: remoteStartBuild(builderName) :param builderName: name of the builder for which the build is starting :returns: Deferred Start a build. .. py:method:: remoteInterruptCommand(builderName, commandId, why) :param builderName: self explanatory :type builderName: string :param commandId: command number :type commandId: string :param why: reason to interrupt :type why: string :returns: Deferred Interrupt the command executed on builderName with given commandId on worker, and print reason "why" to worker logs. The following classes describe the worker -> master part of the protocol. In order to support old workers, we must make sure we do not change the current pb protocol. This is why we implement a ``Impl vs Proxy`` method. All the objects that are referenced from the workers for remote calls have an ``Impl`` and a ``Proxy`` base class in this module. ``Impl`` classes are subclassed by Buildbot master, and implement the actual logic for the protocol API. ``Proxy`` classes are implemented by the worker/master protocols, and implement the demux and de-serialization of protocol calls. On worker sides, those proxy objects are replaced by a proxy object having a single method to call master side methods: .. py:class:: workerProxyObject() .. py:method:: callRemote(message, *args, **kw) Calls the method ``"remote_" + message`` on master side .. py:class:: RemoteCommandImpl() Represents a RemoteCommand status controller. .. py:method:: remote_update(updates) :param updates: dictionary of updates Called when the workers have updates to the current remote command. Possible keys for updates are: * ``stdout``: Some logs where captured in remote command's stdout. value: `` as string`` * ``stderr``: Some logs where captured in remote command's stderr. value: `` as string`` * ``header``: Remote command's header text. value: `` as string`` * ``log``: One of the watched logs has received some text. value: ``( as string, as string)`` * ``rc``: Remote command exited with a return code. value: `` as integer`` * ``elapsed``: Remote command has taken time. value: `` as float`` * ``stat``: Sent by the ``stat`` command with the result of the os.stat, converted to a tuple. value: `` as tuple`` * ``files``: Sent by the ``glob`` command with the result of the glob.glob. value: `` as list of string`` * ``got_revision``: Sent by the source commands with the revision checked out. value: `` as string`` * ``repo_downloaded``: sent by the ``repo`` command with the list of patches downloaded by repo. value: `` as list of string`` .. :py:method:: remote_complete(failure=None) :param failure: copy of the failure if any Called by the worker when the command is complete. .. py:class:: FileWriterImpl() Class used to implement data transfer between worker and master. .. :py:method:: remote_write(data) :param data: data to write data needs to be written on master side .. :py:method:: remote_utime(accessed_modified) :param accessed_modified: modification times called with value of the modification time to update on master side .. :py:method:: remote_unpack() Called when master should start to unpack the tarball sent via command ``uploadDirectory`` .. :py:method:: remote_close() Called when master should close the file .. py:class:: FileReaderImpl(object) .. py:method:: remote_read(maxLength) :param maxLength: maximum length of the data to send :returns: data read Called when worker needs more data. .. py:method:: remote_close() Called when master should close the file. buildbot-3.4.0/master/docs/developer/cls-remotecommands.rst000066400000000000000000000244041413250514000240570ustar00rootroot00000000000000RemoteCommands ============== .. py:currentmodule:: buildbot.process.remotecommand Most of the action in build steps consists of performing operations on the worker. This is accomplished via :class:`RemoteCommand` and its subclasses. Each represents a single operation on the worker. Most data is returned to a command via updates. These updates are described in detail in :ref:`master-worker-updates`. RemoteCommand ~~~~~~~~~~~~~ .. py:class:: RemoteCommand(remote_command, args, collectStdout=False, ignore_updates=False, decodeRC=dict(0), stdioLogName='stdio') :param remote_command: command to run on the worker :type remote_command: string :param args: arguments to pass to the command :type args: dictionary :param collectStdout: if True, collect the command's stdout :param ignore_updates: true to ignore remote updates :param decodeRC: dictionary associating ``rc`` values to buildstep results constants (e.g. ``SUCCESS``, ``FAILURE``, ``WARNINGS``) :param stdioLogName: name of the log to which to write the command's stdio This class handles running commands, consisting of a command name and a dictionary of arguments. If true, ``ignore_updates`` will suppress any updates sent from the worker. This class handles updates for ``stdout``, ``stderr``, and ``header`` by appending them to a stdio logfile named by the ``stdioLogName`` parameter. Steps that run multiple commands and want to separate those commands' stdio streams can use this parameter. It handles updates for ``rc`` by recording the value in its ``rc`` attribute. Most worker-side commands, even those which do not spawn a new process on the worker, generate logs and an ``rc``, requiring this class or one of its subclasses. See :ref:`master-worker-updates` for the updates that each command may send. .. py:attribute:: active True if the command is currently running .. py:method:: run(step, remote) :param step: the buildstep invoking this command :param remote: a reference to the remote :class:`WorkerForBuilder` instance :returns: Deferred Run the command. Call this method to initiate the command; the returned Deferred will fire when the command is complete. The Deferred fires with the :class:`RemoteCommand` instance as its value. .. py:method:: interrupt(why) :param why: reason for interrupt :type why: Twisted Failure :returns: Deferred This method attempts to stop the running command early. The Deferred it returns will fire when the interrupt request is received by the worker; this may be a long time before the command itself completes, at which time the Deferred returned from :meth:`run` will fire. .. py:method:: results() :returns: results constant This method checks the ``rc`` against the decodeRC dictionary, and returns a results constant. .. py:method:: didFail() :returns: bool This method returns True if the results() function returns FAILURE. The following methods are invoked from the worker. They should not be called directly. .. py:method:: remote_update(updates) :param updates: new information from the worker Handles updates from the worker on the running command. See :ref:`master-worker-updates` for the content of the updates. This class splits the updates out, and handles the ``ignore_updates`` option, then calls :meth:`remoteUpdate` to process the update. .. py:method:: remote_complete(failure=None) :param failure: the failure that caused the step to complete, or None for success Called by the worker to indicate that the command is complete. Normal completion (even with a nonzero ``rc``) will finish with no failure; if ``failure`` is set, then the step should finish with status :attr:`~buildbot.process.results.EXCEPTION`. These methods are hooks for subclasses to add functionality. .. py:method:: remoteUpdate(update) :param update: the update to handle Handle a single update. Subclasses must override this method. .. py:method:: remoteComplete(failure) :param failure: the failure that caused the step to complete, or None for success :returns: Deferred Handle command completion, performing any necessary cleanup. Subclasses should override this method. If ``failure`` is not None, it should be returned to ensure proper processing. .. py:attribute:: logs A dictionary of :class:`~buildbot.process.log.Log` instances representing active logs. Do not modify this directly -- use :meth:`useLog` instead. .. py:attribute:: rc Set to the return code of the command, after the command has completed. For compatibility with shell commands, 0 is taken to indicate success, while nonzero return codes indicate failure. .. py:attribute:: stdout If the ``collectStdout`` constructor argument is true, then this attribute will contain all data from stdout, as a single string. This is helpful when running informational commands (e.g., ``svnversion``), but is not appropriate for commands that will produce a large amount of output, as that output is held in memory. To set up logging, use :meth:`useLog` or :meth:`useLogDelayed` before starting the command: .. py:method:: useLog(log, closeWhenFinished=False, logfileName=None) :param log: the :class:`~buildbot.process.log.Log` instance to add to :param closeWhenFinished: if true, call :meth:`~buildbot.process.log.Log.finish` when the command is finished :param logfileName: the name of the logfile, as given to the worker. This is ``stdio`` for standard streams Route log-related updates to the given logfile. Note that ``stdio`` is not included by default, and must be added explicitly. The ``logfileName`` must match the name given by the worker in any ``log`` updates. .. py:method:: useLogDelayed(logfileName, activateCallback, closeWhenFinished=False) :param logfileName: the name of the logfile, as given to the worker. This is ``stdio`` for standard streams :param activateCallback: callback for when the log is added; see below :param closeWhenFinished: if true, call :meth:`~buildbot.process.log.Log.finish` when the command is finished Similar to :meth:`useLog`, but the logfile is only actually added when an update arrives for it. The callback, ``activateCallback``, will be called with the :class:`~buildbot.process.remotecommand.RemoteCommand` instance when the first update for the log is delivered. It should return the desired log instance, optionally via a Deferred. With that finished, run the command using the inherited :meth:`~buildbot.process.remotecommand.RemoteCommand.run` method. During the run, you can inject data into the logfiles with any of these methods: .. py:method:: addStdout(data) :param data: data to add to the logfile :returns: Deferred Add stdout data to the ``stdio`` log. .. py:method:: addStderr(data) :param data: data to add to the logfile :returns: Deferred Add stderr data to the ``stdio`` log. .. py:method:: addHeader(data) :param data: data to add to the logfile :returns: Deferred Add header data to the ``stdio`` log. .. py:method:: addToLog(logname, data) :param logname: the logfile to receive the data :param data: data to add to the logfile :returns: Deferred Add data to a logfile other than ``stdio``. .. py:class:: RemoteShellCommand(workdir, command, env=None, want_stdout=True, want_stderr=True, timeout=20*60, maxTime=None, sigtermTime=None, logfiles={}, usePTY=None, logEnviron=True, collectStdio=False, collectStderr=False, interruptSignal=None, initialStdin=None, decodeRC=None, stdioLogName='stdio') :param workdir: directory in which the command should be executed, relative to the builder's basedir :param command: shell command to run :type command: string or list :param want_stdout: If false, then no updates will be sent for stdout :param want_stderr: If false, then no updates will be sent for stderr :param timeout: Maximum time without output before the command is killed :param maxTime: Maximum overall time from the start before the command is killed :param sigtermTime: Try to kill the command with SIGTERM and wait for sigtermTime seconds before firing ``interruptSignal`` or SIGKILL if it's not defined. If None, SIGTERM will not be fired :param env: A dictionary of environment variables to augment or replace the existing environment on the worker :param logfiles: Additional logfiles to request from the worker :param usePTY: True to use a PTY, false to not use a PTY; the default value is False :param logEnviron: If false, do not log the environment on the worker :param collectStdout: If True, collect the command's stdout :param collectStderr: If True, collect the command's stderr :param interruptSignal: The signal to send to interrupt the command, e.g. ``KILL`` or ``TERM``. If None, SIGKILL is used :param initialStdin: The input to supply the command via stdin :param decodeRC: dictionary associating ``rc`` values to buildstep results constants (e.g. ``SUCCESS``, ``FAILURE``, ``WARNINGS``) :param stdioLogName: name of the log to which to write the command's stdio Most of the constructor arguments are sent directly to the worker; see :ref:`shell-command-args` for the details of the formats. The ``collectStdout``, ``decodeRC`` and ``stdioLogName`` parameters are as described for the parent class. If a shell command contains passwords, they can be hidden from log files by using :doc:`../manual/secretsmanagement`. This is the recommended procedure for new-style build steps. For legacy build steps passwords were hidden from the log file by passing them as tuples in command arguments. Eg. ``['print', ('obfuscated', 'password', 'dummytext')]`` is logged as ``['print', 'dummytext']``. This class is used by the :bb:step:`ShellCommand` step, and by steps that run multiple customized shell commands. buildbot-3.4.0/master/docs/developer/cls-resultspec.rst000066400000000000000000000120301413250514000232230ustar00rootroot00000000000000ResultSpecs ----------- .. py:module:: buildbot.data.resultspec Result specifications are used by the :ref:`Data_API` to describe the desired results of a :py:meth:`~buildbot.data.connector.DataConnector.get` call. They can be used to filter, sort and paginate the contents of collections, and to limit the fields returned for each item. Python calls to :py:meth:`~buildbot.data.connector.DataConnector.get` can pass a :py:class:`ResultSpec` instance directly. Requests to the HTTP REST API are converted into instances automatically. Implementers of Data API endpoints can ignore result specifications entirely, except where efficiency suffers. Any filters, sort keys, and so on still present after the endpoint returns its result are applied generically. :py:class:`ResultSpec` instances are mutable, so endpoints that do apply some of the specification can remove parts of the specification. Result specifications are applied in the following order: * Field Selection (fields) * Filters * Order * Pagination (limit/offset) * Properties Only fields & properties are applied to non-collection results. Endpoints processing a result specification should take care to replicate this behavior. .. py:class:: ResultSpec A result specification has the following attributes, which should be treated as read-only: .. py:attribute:: filters A list of :py:class:`Filter` instances to be applied. The result is a logical AND of all filters. .. py:attribute:: fields A list of field names that should be included, or ``None`` for no sorting. if the field names all begin with ``-``, then those fields will be omitted and all others included. .. py:attribute:: order A list of field names to sort on. if any field name begins with ``-``, then the ordering on that field will be in reverse. .. py:attribute:: limit The maximum number of collection items to return. .. py:attribute:: offset The 0-based index of the first collection item to return. .. py:attribute:: properties A list of :py:class:`Property` instances to be applied. The result is a logical AND of all properties. All of the attributes can be supplied as constructor keyword arguments. Endpoint implementations may call these methods to indicate that they have processed part of the result spec. A subsequent call to :py:meth:`apply` will then not waste time re-applying that part. .. py:method:: popProperties() If a property exists, return its values list and remove it from the result spec. .. py:method:: popFilter(field, op) If a filter exists for the given field and operator, return its values list and remove it from the result spec. .. py:method:: popBooleanFilter(field) If a filter exists for the field, remove it and return the expected value (True or False); otherwise return None. This method correctly handles odd cases like ``field__ne=false``. .. py:method:: popStringFilter(field) If one string filter exists for the field, remove it and return the expected value (as string); otherwise return None. .. py:method:: popIntegerFilter(field) If one integer filter exists for the field, remove it and return the expected value (as integer); otherwise return None. raises ValueError if the field is not convertible to integer. .. py:method:: removePagination() Remove the pagination attributes (:py:attr:`limit` and :py:attr:`offset`) from the result spec. And endpoint that calls this method should return a :py:class:`~buildbot.data.base.ListResult` instance with its pagination attributes set appropriately. .. py:method:: removeOrder() Remove the order attribute. .. py:method:: popField(field) Remove a single field from the :py:attr:`fields` attribute, returning True if it was present. Endpoints can use this in conditionals to avoid fetching particularly expensive fields from the DB API. The following method is used internally to apply any remaining parts of a result spec that are not handled by the endpoint. .. py:method:: apply(data) Apply the result specification to the data, returning a transformed copy of the data. If the data is a collection, then the result will be a :py:class:`~buildbot.data.base.ListResult` instance. .. py:class:: Filter(field, op, values) :param string field: the field to filter on :param string op: the comparison operator (e.g., "eq" or "gt") :param list values: the values on the right side of the operator A filter represents a limitation of the items from a collection that should be returned. Many operators, such as "gt", only accept one value. Others, such as "eq" or "ne", can accept multiple values. In either case, the values must be passed as a list. .. py:class:: Property(values) :param list values: the values on the right side of the operator (``eq``) A property represents an item of a foreign table. In either case, the values must be passed as a list. buildbot-3.4.0/master/docs/developer/cls-worker.rst000066400000000000000000000005321413250514000223470ustar00rootroot00000000000000Workers ======= .. py:module:: buildbot.worker The :py:class:`Worker` class represents a worker, which may or may not be connected to the master. Instances of this class are created directly in the Buildbot configuration file. Worker ------ .. py:class:: Worker .. py:attribute:: workerid The ID of this worker in the database. buildbot-3.4.0/master/docs/developer/cls-workermanager.rst000066400000000000000000000023211413250514000237000ustar00rootroot00000000000000WorkerManager ============= .. py:module:: buildbot.worker.manager WorkerRegistration ------------------ .. py:class:: WorkerRegistration(master, worker) Represents single worker registration. .. py:method:: unregister() Remove registration for `worker`. .. py:method:: update(worker_config, global_config) :param worker_config: new Worker instance :type worker_config: :class:`~buildbot.worker.Worker` :param global_config: Buildbot config :type global_config: :class:`~buildbot.config.MasterConfig` Update the registration in case the port or password has changed. .. note:: You should invoke this method after calling `WorkerManager.register(worker)`. WorkerManager ------------- .. py:class:: WorkerManager(master) Handle worker registrations for multiple protocols. .. py:method:: register(worker) :param worker: new Worker instance :type worker: :class:`~buildbot.worker.Worker` :returns: :class:`~buildbot.worker.manager.WorkerRegistration` Creates :class:`~buildbot.worker.manager.WorkerRegistration` instance. .. note:: You should invoke `.update()` on returned WorkerRegistration instance. buildbot-3.4.0/master/docs/developer/cls-www.rst000066400000000000000000000034201413250514000216610ustar00rootroot00000000000000Web Server Classes ================== Most of the source in :src:`master/buildbot/www` is self-explanatory. However, a few classes and methods deserve some special mention. Resources --------- .. py:module:: buildbot.www.resource .. py:class:: Redirect(url) This is a subclass of Twisted Web's ``Error``. If this is raised within :py:meth:`~Resource.asyncRenderHelper`, the user will be redirected to the given URL. .. py:class:: Resource This class specializes the usual Twisted Web ``Resource`` class. It adds support for resources getting notified when the master is reconfigured. .. py:attribute:: needsReconfig If True, :py:meth:`reconfigResource` will be called on reconfig. .. py:method:: reconfigResource(new_config) :param new_config: new :py:class:`~buildbot.config.MasterConfig` instance :returns: Deferred if desired Reconfigure this resource. It's surprisingly difficult to render a Twisted Web resource asynchronously. This next method makes it quite a bit easier. .. py:method:: asyncRenderHelper(request, callable, writeError=None) :param request: the request instance :param callable: the render function :param writeError: optional callable for rendering errors This method will call ``callable``, which can return a Deferred, with the given ``request``. The value returned from this callable will be converted to an HTTP response. Exceptions, including ``Error`` subclasses, are handled properly. If the callable raises :py:class:`Redirect`, the response will be a suitable HTTP 302 redirect. Use this method as follows:: def render_GET(self, request): return self.asyncRenderHelper(request, self.renderThing) buildbot-3.4.0/master/docs/developer/config.rst000066400000000000000000000460311413250514000215300ustar00rootroot00000000000000Configuration ============= .. py:module:: buildbot.config Wherever possible, Buildbot components should access configuration information as needed from the canonical source, ``master.config``, which is an instance of :py:class:`MasterConfig`. For example, components should not keep a copy of the ``buildbotURL`` locally, as this value may change throughout the lifetime of the master. Components which need to be notified of changes in the configuration should be implemented as services, subclassing :py:class:`ReconfigurableServiceMixin`, as described in :ref:`developer-Reconfiguration`. .. py:class:: MasterConfig The master object makes much of the configuration available from an object named ``master.config``. Configuration is stored as attributes of this object. Where possible, other Buildbot components should access this configuration directly and not cache the configuration values anywhere else. This avoids the need to ensure that update-from-configuration methods are called on a reconfig. Aside from validating the configuration, this class handles any backward-compatibility issues - renamed parameters, type changes, and so on - removing those concerns from other parts of Buildbot. This class may be instantiated directly, creating an entirely default configuration, or via :py:meth:`FileLoader.loadConfig`, which will load the configuration from a config file. The following attributes are available from this class, representing the current configuration. This includes a number of global parameters: .. py:attribute:: title The title of this buildmaster, from :bb:cfg:`title`. .. py:attribute:: titleURL The URL corresponding to the title, from :bb:cfg:`titleURL`. .. py:attribute:: buildbotURL The URL of this buildmaster, for use in constructing WebStatus URLs; from :bb:cfg:`buildbotURL`. .. py:attribute:: logCompressionLimit The current log compression limit, from :bb:cfg:`logCompressionLimit`. .. py:attribute:: logCompressionMethod The current log compression method, from :bb:cfg:`logCompressionMethod`. .. py:attribute:: logMaxSize The current log maximum size, from :bb:cfg:`logMaxSize`. .. py:attribute:: logMaxTailSize The current log tail maximum size, from :bb:cfg:`logMaxTailSize`. .. py:attribute:: logEncoding The encoding to expect when logs are provided as bytestrings, from :bb:cfg:`logEncoding`. .. py:attribute:: properties A :py:class:`~buildbot.process.properties.Properties` instance containing global properties, from :bb:cfg:`properties`. .. py:attribute:: collapseRequests A callable, or True or False, describing how to collapse requests; from :bb:cfg:`collapseRequests`. .. py:attribute:: prioritizeBuilders A callable, or None, used to prioritize builders; from :bb:cfg:`prioritizeBuilders`. .. py:attribute:: codebaseGenerator A callable, or None, used to determine the codebase from an incoming :py:class:`~buildbot.changes.changes.Change`, from :bb:cfg:`codebaseGenerator`. .. py:attribute:: protocols The per-protocol port specification for worker connections; based on :bb:cfg:`protocols`. .. py:attribute:: multiMaster If true, then this master is part of a cluster; based on :bb:cfg:`multiMaster`. .. py:attribute:: manhole The manhole instance to use, or None; from :bb:cfg:`manhole`. The remaining attributes contain compound configuration structures, usually as dictionaries: .. py:attribute:: validation Validation regular expressions, a dictionary from :bb:cfg:`validation`. It is safe to assume that all expected keys are present. .. py:attribute:: db Database specification, a dictionary with key :bb:cfg:`db_url`. It is safe to assume that this key is present. .. py:attribute:: metrics The metrics configuration from :bb:cfg:`metrics`, or an empty dictionary by default. .. py:attribute:: caches The cache configuration, from :bb:cfg:`caches` as well as the deprecated :bb:cfg:`buildCacheSize` and :bb:cfg:`changeCacheSize` parameters. The keys ``Builds`` and ``Caches`` are always available; other keys should use ``config.caches.get(cachename, 1)``. .. py:attribute:: schedulers The dictionary of scheduler instances, by name, from :bb:cfg:`schedulers`. .. py:attribute:: builders The list of :py:class:`BuilderConfig` instances from :bb:cfg:`builders`. Builders specified as dictionaries in the configuration file are converted to instances. .. py:attribute:: workers The list of :py:class:`Worker` instances from :bb:cfg:`workers`. .. py:attribute:: change_sources The list of :py:class:`IChangeSource` providers from :bb:cfg:`change_source`. .. py:attribute:: user_managers The list of user managers providers from :bb:cfg:`user_managers`. .. py:attribute:: www The web server configuration from :bb:cfg:`www`. The keys ``port`` and ``url`` are always available. .. py:attribute:: services The list of additional plugin services. .. py:classmethod:: loadFromDict(config_dict, filename) :param dict config_dict: The dictionary containing the configuration to load :param string filename: The filename to use when reporting errors :returns: new :py:class:`MasterConfig` instance Load the configuration from the given dictionary. Loading of the configuration file is generally triggered by the master, using the following class: .. py:class:: FileLoader .. py:method:: __init__(basedir, filename) :param string basedir: directory to which config is relative :param string filename: the configuration file to load The filename is treated as relative to basedir if it is not absolute. .. py:method:: loadConfig(basedir, filename) :returns: new :py:class:`MasterConfig` instance Load the configuration in the given file. Aside from syntax errors, this will also detect a number of semantic errors such as multiple schedulers with the same name. .. py:function:: loadConfigDict(basedir, filename) :param string basedir: directory to which config is relative :param string filename: the configuration file to load :raises: :py:exc:`ConfigErrors` if any errors occur :returns dict: The ``BuildmasterConfig`` dictionary. Load the configuration dictionary in the given file. The filename is treated as relative to basedir if it is not absolute. Builder Configuration --------------------- .. py:class:: BuilderConfig([keyword args]) This class parameterizes configuration of builders; see :ref:`Builder-Configuration` for its arguments. The constructor checks for errors, applies defaults, and sets the properties described here. Most are simply copied from the constructor argument of the same name. Users may subclass this class to add defaults, for example. .. py:attribute:: name The builder's name. .. py:attribute:: factory The builder's factory. .. py:attribute:: workernames The builder's worker names (a list, regardless of whether the names were specified with ``workername`` or ``workernames``). .. py:attribute:: builddir The builder's builddir. .. py:attribute:: workerbuilddir The builder's worker-side builddir. .. py:attribute:: category The builder's category. .. py:attribute:: nextWorker The builder's nextWorker callable. .. py:attribute:: nextBuild The builder's nextBuild callable. .. py:attribute:: canStartBuild The builder's canStartBuild callable. .. py:attribute:: locks The builder's locks. .. py:attribute:: env The builder's environment variables. .. py:attribute:: properties The builder's properties, as a dictionary. .. py:attribute:: collapseRequests The builder's collapseRequests callable. .. py:attribute:: description The builder's description, displayed in the web status. Error Handling -------------- If any errors are encountered while loading the configuration, :py:func:`buildbot.config.error` should be called. This can occur both in the configuration-loading code, and in the constructors of any objects that are instantiated in the configuration - change sources, workers, schedulers, build steps, and so on. .. py:function:: error(error) :param error: error to report :raises: :py:exc:`ConfigErrors` if called at build-time This function reports a configuration error. If a config file is being loaded, then the function merely records the error, and allows the rest of the configuration to be loaded. At any other time, it raises :py:exc:`ConfigErrors`. This is done so that all config errors can be reported, rather than just the first one. .. py:exception:: ConfigErrors([errors]) :param list errors: errors to report This exception represents errors in the configuration. It supports reporting multiple errors to the user simultaneously, e.g., when several consistency checks fail. .. py:attribute:: errors A list of detected errors, each given as a string. .. py:method:: addError(msg) :param string msg: the message to add Add another error message to the (presumably not-yet-raised) exception. Configuration in AngularJS ========================== The AngularJS frontend often needs access to the local master configuration. This is accomplished automatically by converting various pieces of the master configuration to a dictionary. The :py:class:`~buildbot.interfaces.IConfigured` interface represents a way to convert any object into a JSON-able dictionary. .. py:class:: buildbot.interfaces.IConfigured Providers of this interface provide a method to get their configuration as a dictionary: .. py:method:: getConfigDict() :returns: object Return the configuration of this object. Note that despite the name, the return value may not be a dictionary. Any object can be "cast" to an :py:class:`~buildbot.interfaces.IConfigured` provider. The ``getConfigDict`` method for basic Python objects simply returns the value. :: IConfigured(someObject).getConfigDict() .. py:class:: buildbot.util.ConfiguredMixin This class is a basic implementation of :py:class:`~buildbot.interfaces.IConfigured`. Its :py:meth:`getConfigDict` method simply returns the instance's ``name`` attribute (all objects configured must have the ``name`` attribute). .. py:method:: getConfigDict() :returns: object Return a config dictionary representing this object. All of this is used by to serve ``/config.js`` to the JavaScript frontend. .. _developer-Reconfiguration: Reconfiguration --------------- When the buildmaster receives a signal to begin a reconfig, it re-reads the configuration file, generating a new :py:class:`MasterConfig` instance, and then notifies all of its child services via the reconfig mechanism described below. The master ensures that at most one reconfiguration is taking place at any time. See :ref:`master-service-hierarchy` for the structure of the Buildbot service tree. To simplify initialization, a reconfiguration is performed immediately on master startup. As a result, services only need to implement their configuration handling once, and can use ``startService`` for initialization. See below for instructions on implementing configuration of common types of components in Buildbot. .. note:: Because Buildbot uses a pure-Python configuration file, it is not possible to support all forms of reconfiguration. In particular, when the configuration includes custom subclasses or modules, reconfiguration can turn up some surprising behaviors due to the dynamic nature of Python. The reconfig support in Buildbot is intended for "intermediate" uses of the software, where there are fewer surprises. .. index:: Service Mixins; ReconfigurableServiceMixin Reconfigurable Services ....................... Instances which need to be notified of a change in configuration should be implemented as Twisted services and mix in the :py:class:`ReconfigurableServiceMixin` class, overriding the :py:meth:`~ReconfigurableServiceMixin.reconfigServiceWithBuildbotConfig` method. The services implementing ``ReconfigurableServiceMixin`` operate on whole master configuration. In some cases they are effectively singletons that handle configuration identified by a specific configuration key. Such singletons often manage non-singleton services as children and pass bits of its own configuration when reconfiguring these children. ``BuildbotServiceManager`` is one internal implementation of ``ReconfigurableServiceMixin`` which accepts a list of child service configurations as its configuration and then intelligently reconfigures child services on changes. Non-singleton ``ReconfigurableServiceMixin`` services are harder to write as they must manually pick its configuration from whole master configuration. The parent service also needs explicit support for this kind of setup to work correctly. .. py:class:: ReconfigurableServiceMixin .. py:method:: reconfigServiceWithBuildbotConfig(new_config) :param new_config: new master configuration :type new_config: :py:class:`MasterConfig` :returns: Deferred This method notifies the service that it should make any changes necessary to adapt to the new configuration values given. This method will be called automatically after a service is started. It is generally too late at this point to roll back the reconfiguration, so if possible, any errors should be detected in the :py:class:`MasterConfig` implementation. Errors are handled as best as possible and communicated back to the top level invocation, but such errors may leave the master in an inconsistent state. :py:exc:`ConfigErrors` exceptions will be displayed appropriately to the user on startup. Subclasses should always call the parent class's implementation. For :py:class:`MultiService` instances, this will call any child services' :py:meth:`reconfigService` methods, as appropriate. This will be done sequentially, such that the Deferred from one service must fire before the next service is reconfigured. .. py:attribute:: priority Child services are reconfigured in order of decreasing priority. The default priority is 128, so a service that must be reconfigured before others should be given a higher priority. Change Sources .............. When reconfiguring, there is no method by which Buildbot can determine that a new :py:class:`~buildbot.changes.base.ChangeSource` represents the same source as an existing :py:class:`~buildbot.changes.base.ChangeSource`, but with different configuration parameters. As a result, the change source manager compares the lists of existing and new change sources using equality, stops any existing sources that are not in the new list, and starts any new change sources that do not already exist. :py:class:`~buildbot.changes.base.ChangeSource` inherits :py:class:`~buildbot.util.ComparableMixin`, so change sources are compared based on the attributes described in their ``compare_attrs``. If a change source does not make reference to any global configuration parameters, then there is no need to inherit :py:class:`ReconfigurableServiceMixin`, as a simple comparison and ``startService`` and ``stopService`` will be sufficient. If the change source does make reference to global values, e.g., as default values for its parameters, then it must inherit :py:class:`ReconfigurableServiceMixin` to support the case where the global values change. Schedulers .......... Schedulers have names, so Buildbot can determine whether a scheduler has been added, removed, or changed during a reconfig. Old schedulers will be stopped, new schedulers will be started, and both new and existing schedulers will see a call to :py:meth:`~ReconfigurableServiceMixin.reconfigService`, if such a method exists. For backward compatibility, schedulers that do not support reconfiguration will be stopped, and a new scheduler will be started when their configuration changes. During a reconfiguration, if a new and old scheduler's fully qualified class names differ, then the old class will be stopped, and the new class will be started. This supports the case when a user changes, for example, a :bb:sched:`Nightly` scheduler to a :bb:sched:`Periodic` scheduler without changing the name. Because Buildbot uses :py:class:`~buildbot.schedulers.base.BaseScheduler` instances directly in the configuration file, a reconfigured scheduler must extract its new configuration information from another instance of itself. Custom Subclasses ~~~~~~~~~~~~~~~~~ Custom subclasses are most often defined directly in the configuration file, or in a Python module that is reloaded with ``reload`` every time the configuration is loaded. Because of the dynamic nature of Python, this creates a new object representing the subclass every time the configuration is loaded -- even if the class definition has not changed. Note that if a scheduler's class changes in a reconfig, but the scheduler's name does not, it will still be treated as a reconfiguration of the existing scheduler. This means that implementation changes in custom scheduler subclasses will not be activated with a reconfig. This behavior avoids stopping and starting such schedulers on every reconfig, but can make development difficult. One workaround for this is to change the name of the scheduler before each reconfig - this will cause the old scheduler to be stopped, and the new scheduler (with the new name and class) to be started. Workers ....... Similar to schedulers, workers are specified by name, so new and old configurations are first compared by name, and any workers to be added or removed are noted. Workers for which the fully-qualified class name has changed are also added and removed. All workers have their :py:meth:`~ReconfigurableServiceMixin.reconfigService` method called. This method takes care of the basic worker attributes, including changing the PB registration if necessary. Any subclasses that add configuration parameters should override :py:meth:`~ReconfigurableServiceMixin.reconfigService` and update those parameters. As with schedulers, because the :py:class:`~buildbot.worker.AbstractWorker` instance is given directly in the configuration, a reconfigured worker instance must extract its new configuration from another instance of itself. User Managers ............. Since user managers are rarely used, and their purpose is unclear, they are always stopped and re-started on every reconfig. This may change in future versions. Status Receivers ................ At every reconfig, all status listeners are stopped, and new versions are started. buildbot-3.4.0/master/docs/developer/data.rst000066400000000000000000000573321413250514000212020ustar00rootroot00000000000000.. _Data_API: Data API ======== The data API is an interface against which various internal and external components can be written. It is a lower-level interface compared to the REST API that exposes more functionality. It combines access to stored state and messages, ensuring consistency between them. The callers can receive a dump of the current state plus changes to that state, without missing or duplicating messages. Sections -------- The data API is divided into four sections: * getters - fetching data from the db API * subscriptions - subscribing to messages from the mq layer * control - allows state to be changed in specific ways by sending appropriate messages (e.g., stopping a build) * updates - direct updates to state appropriate messages. The getters and subscriptions are exposed everywhere. Access to the control section must be authenticated at higher levels as the data layer does no authentication. The updates section is for use only by the process layer. The interfaces for all sections, but the updates section, are intended to be language-agnostic. That is, they should be callable from JavaScript via HTTP, or via some other interface added to Buildbot after the fact. Getters +++++++ The getters section can get either a single resource, or a list of resources. Getting a single resource requires a resource identifier (a tuple of strings) and a set of options to support automatic expansion of links to other resources (thus saving round-trips). Lists are requested with a partial resource identifier (a tuple of strings) and an optional set of filter options. In some cases, certain filters are implicit in the path, e.g., the list of buildsteps for a particular build. Subscriptions +++++++++++++ Message subscriptions can be made to anything that can be listed or gotten from the getters section, using the same resource identifiers. Options and explicit filters are not supported here. A message contains only the most basic information about a resource and a list of subscription results for every new resource of the desired type. Implicit filters are supported. Control +++++++ The control section defines a set of actions that cause Buildbot to behave in a certain way, e.g., rebuilding a build or shutting down a worker. Actions correspond to a particular resource, although sometimes that resource is the root resource (an empty tuple). Updates +++++++ The updates section defines a free-form set of methods that Buildbot's process implementation calls to update data. Most update methods both modify state via the db API and send a message via the mq API. Some are simple wrappers for these APIs, while others contain more complex logic, e.g., building a source stamp set for a collection of changes. This section is the proper place to put common functionality, e.g., rebuilding builds or assembling buildsets. Concrete Interfaces ------------------- Python Interface ++++++++++++++++ .. py:module:: buildbot.data.connector Within the buildmaster process, the root of the data API is available at ``self.master.data``, which is a :py:class:`DataConnector` instance. .. py:class:: DataConnector This class implements the root of the data API. Within the buildmaster process, the data connector is available at ``self.master.data``. The first three sections are implemented through the :py:meth:`get` and :py:meth:`control` methods, while the updates section is implemented using the :py:attr:`updates` attribute. The ``path`` argument to these methods should always be a tuple. Integer arguments can be presented as either integers or strings that can be parsed by ``int``; all other arguments must be strings. .. py:method:: get(path, filters=None, fields=None, order=None, limit=None, offset=None) :param tuple path: A tuple of path elements representing the API path to fetch. Numbers can be passed as strings or integers :param filters: result spec filters :param fields: result spec fields :param order: result spec order :param limit: result spec limit :param offset: result spec offset :raises: :py:exc:`~buildbot.data.exceptions.InvalidPathError` :returns: a resource or list via Deferred, or None This method implements the getters section. Depending on the path, it will return a single resource or a list of resources. If a single resource is not specified, it returns ``None``. The ``filters``, ``fields``, ``order``, ``limit``, and ``offset`` are passed to the :py:class:`~buildbot.data.resultspec.ResultSpec`, which will then be forwarded to the endpoint. The return value is composed of simple Python objects - lists, dicts, strings, numbers, and None. For example, the following will query the buildrequests endpoint, filter for all non-completed buildrequests that were submitted after 1/5/2021, and return the buildrequest and buildset ids for the last 2 buildrequests in the collection: .. code-block:: python from datetime import datetime from buildbot.data.resultspec import Filter submitted_at = datetime(2021, 5, 1).timestamp() buildrequests = yield self.master.data.get( ("buildrequests",), filters=[ Filter("submitted_at", "gt", [submitted_at]), Filter("complete", "eq", [False]), ], fields=["buildrequestid", "buildsetid"], order=("-buildrequestid",), limit=2 ) .. py:method:: getEndpoint(path) :param tuple path: A tuple of path elements representing the API path. Numbers can be passed as strings or integers. :raises: :py:exc:`~buildbot.data.exceptions.InvalidPathError` :returns: tuple of endpoint and a dictionary of keyword arguments from the path Get the endpoint responsible for the given path, along with any arguments extracted from the path. This can be used by callers that need access to information from the endpoint beyond that returned by ``get``. .. py:method:: produceEvent(rtype, msg, event) :param rtype: the name identifying a resource type :param msg: a dictionary describing the msg to send :param event: the event to produce This method implements the production of an event, for the rtype identified by its name string. Usually, this is the role of the data layer to produce the events inside the update methods. For the potential use cases where it would make sense to solely produce an event, and not update data, please use this API, rather than directly calling mq. It ensures the event is sent to all the routingkeys specified by eventPathPatterns. .. py:method:: control(action, args, path) :param action: a short string naming the action to perform :param args: dictionary containing arguments for the action :param tuple path: A tuple of path elements representing the API path. Numbers can be passed as strings or integers. :raises: :py:exc:`~buildbot.data.exceptions.InvalidPathError` :returns: a resource or list via Deferred, or None This method implements the control section. Depending on the path, it may return a newly created resource. For example, the following will cancel a buildrequest (and the associated build, if one has already started): .. code-block:: python buildrequestid = 10 yield self.master.data.control( "cancel", {"reason": "User requested cancellation"}, ("buildrequests", buildrequestid), ) .. py:method:: allEndpoints() :returns: list of endpoint specifications This method returns the deprecated API spec. Please use :ref:`REST_API_specs` instead. .. py:attribute:: rtypes This object has an attribute for each resource type, named after the singular form (e.g., `self.master.data.builder`). These attributes allow resource types to access one another for purposes of coordination. They are *not* intended for external access -- all external access to the data API should be via the methods above or update methods. Updates ....... The updates section is available at ``self.master.data.updates``, and contains a number of ad-hoc methods needed by the process modules. .. note:: The update methods are implemented in resource type classes, but through some initialization-time magic, all appear as attributes of ``self.master.data.updates``. The update methods are found in the resource type pages. Exceptions .......... .. py:module:: buildbot.data.exceptions .. py:exception:: DataException This is a base class for all other Data API exceptions. .. py:exception:: InvalidPathError The path argument was invalid or unknown. .. py:exception:: InvalidOptionError A value in the ``options`` argument was invalid or ill-formed. .. py:exception:: SchedulerAlreadyClaimedError Identical to :py:exc:`~buildbot.db.schedulers.SchedulerAlreadyClaimedError`. Web Interface +++++++++++++ The HTTP interface is implemented by the :py:mod:`buildbot.www` package, as configured by the user. Part of that configuration is a base URL, which is considered a prefix for all paths mentioned here. See :ref:`WWW-base-app` for more information. .. _Data Model: Extending the Data API ---------------------- .. py:currentmodule:: buildbot.data.base The data API may be extended in various ways: adding new endpoints, new fields to resource types, new update methods, or entirely new resource types. In any case, you should only extend the API if you plan to submit the extensions to be merged into Buildbot itself. Private API extensions are strongly discouraged. Adding Resource Types +++++++++++++++++++++ You'll need to use both plural and singular forms of the resource type; in this example, we'll use 'pub' and 'pubs'. You can also examine an existing file, like :src:`master/buildbot/data/changes.py`, to see when to use which form. In ``master/buildbot/data/pubs.py``, create a subclass of :py:class:`ResourceType`:: from buildbot.data import base class Pub(base.ResourceType): name = "pub" endpoints = [] keyFields = ['pubid'] class EntityType(types.Entity): pubid = types.Integer() name = types.String() num_taps = types.Integer() closes_at = types.Integer() entityType = EntityType(name, 'Pub') .. py:class:: ResourceType .. py:attribute:: name :type: string The singular, lower-cased name of the resource type. This becomes the first component in message routing keys. .. py:attribute:: plural :type: string The plural, lower-cased name of the resource type. This becomes the key containing the data in REST responses. .. py:attribute:: endpoints :type: list Subclasses should set this to a list of endpoint classes for this resource type. .. py:attribute:: eventPathPatterns :type: str This attribute should list the message routes where events should be sent, encoded as a REST like endpoint: ``pub/:pubid`` In the example above, a call to ``produceEvent({'pubid': 10, 'name': 'Winchester'}, 'opened')`` would result in a message with routing key ``('pub', '10', 'opened')``. Several paths can be specified in order to be consistent with REST endpoints. .. py:attribute:: entityType :type: :py:class:`buildbot.data.types.Entity` The entity type describes the types of all of the fields in this particular resource type. See :py:class:`buildbot.data.types.Entity` and :ref:`Adding-Fields-To-Resource-Types`. The parent class provides the following methods .. py:method:: getEndpoints() :returns: a list of :py:class:`~Endpoint` instances This method returns a list of the endpoint instances associated with the resource type. The base method instantiates each class in the :py:attr:`~ResourceType.endpoints` attribute. Most subclasses can simply list :py:class:`~Endpoint` subclasses in ``endpoints``. .. py:method:: produceEvent(msg, event) :param dict msg: the message body :param string event: the name of the event that has occurred This is a convenience method to produce an event message for this resource type. It formats the routing key correctly and sends the message, thereby ensuring consistent routing-key structure. Like all Buildbot source files, every resource type module must have corresponding tests. These should thoroughly exercise all update methods. All resource types must be documented in the Buildbot documentation and linked from the bottom of this file (:src:`master/docs/developer/data.rst`). Adding Endpoints ++++++++++++++++ Each resource path is implemented as an :py:class:`~Endpoint` instance. In most cases, each instance is of a different class, but this is not required. The data connector's :py:meth:`~buildbot.data.connector.DataConnector.get` and :py:meth:`~buildbot.data.connector.DataConnector.control` methods both take a ``path`` argument that is used to look up the corresponding endpoint. The path matching is performed by :py:mod:`buildbot.util.pathmatch`, and supports automatically extracting variable fields from the path. See that module's description for details. .. py:class:: Endpoint .. py:attribute:: pathPatterns :type: string This attribute defines the path patterns which incoming paths must match to select this endpoint. Paths are specified as URIs, and can contain variables as parsed by :py:class:`buildbot.util.pathmatch.Matcher`. Multiple paths are separated by whitespace. For example, the following specifies two paths with the second having a single variable:: pathPatterns = """ /bugs /component/i:component_name/bugs """ .. py:attribute:: rootLinkName :type: string If set, then the first path pattern for this endpoint will be included as a link in the root of the API. This should be set for any endpoints that begin an explorable tree. .. py:attribute:: isCollection :type: boolean If true, then this endpoint returns collections of resources. .. py:attribute:: isRaw :type: boolean If true, then this endpoint returns a raw resource. Raw resources are used to get the data not encoded in JSON via the REST API. In the REST principles, this should be done via another endpoint, and not via a query parameter. The get() method from endpoint should return following data structure:: { "raw": u"raw data to be sent to the http client", "mime-type": u"", "filename": u"filename_to_be_used_in_content_disposition_attachement_header" } .. py:method:: get(options, resultSpec, kwargs) :param dict options: model-specific options :param resultSpec: a :py:class:`~buildbot.data.resultspec.ResultSpec` instance describing the desired results :param dict kwargs: fields extracted from the path :returns: data via Deferred Get data from the endpoint. This should return either a list of dictionaries (for list endpoints), a dictionary, or None (both for details endpoints). The endpoint is free to handle any part of the result spec. When doing so, it should remove the relevant configuration from the spec. See below. Any result spec configuration that remains on return will be applied automatically. .. py:method:: control(action, args, kwargs) :param action: a short string naming the action to perform :param args: dictionary containing arguments for the action :param kwargs: fields extracted from the path Continuing the pub example, a simple endpoint would look like this:: class PubEndpoint(base.Endpoint): pathPattern = ('pub', 'i:pubid') def get(self, resultSpec, kwargs): return self.master.db.pubs.getPub(kwargs['pubid']) Endpoint implementations must have unit tests. An endpoint's path should be documented in the ``.rst`` file for its resource type. The initial pass at implementing any endpoint should just ignore the ``resultSpec`` argument to ``get``. After that initial pass, the argument can be used to optimize certain types of queries. For example, if the resource type has many resources, but most real-life queries use the result spec to filter out all but a few resources from that group, then it makes sense for the endpoint to examine the result spec and allow the underlying DB API to do that filtering. When an endpoint handles parts of the result spec, it must remove those parts from the spec before it returns. See the documentation for :py:class:`~buildbot.data.resultspec.ResultSpec` for methods to do so. Note that endpoints must be careful not to alter the order of the filtering applied for a result spec. For example, if an endpoint implements pagination, then it must also completely implement filtering and ordering, since those operations precede pagination in the result spec application. Adding Messages +++++++++++++++ Message types are defined in :src:`master/buildbot/test/util/validation.py`, via the ``message`` module-level value. This is a dictionary of ``MessageValidator`` objects, one for each message type. The message type is determined from the first atom of its routing key. The ``events`` dictionary lists the possible last atoms of the routing key. It should be identical to the attribute of the ResourceType with the same name. Adding Update Methods +++++++++++++++++++++ Update methods are for use by the Buildbot process code, and as such are generally designed to suit the needs of that code. They generally encapsulate logic common to multiple users (e.g., creating buildsets), and they finish by performing modifications in the database and sending a corresponding message. In general, Buildbot does not depend on timing of either the database or message broker, so the order in which these operations are initiated is not important. Update methods are considered part of Buildbot's user-visible interface, and as such, incompatible changes should be avoided wherever possible. Instead, either add a new method (and potentially re-implement existing methods in terms of the new method) or add new, optional parameters to an existing method. If an incompatible change is unavoidable, it should be described clearly in the release notes. Update methods are implemented as methods of :py:class:`~buildbot.data.base.ResourceType` subclasses, decorated with ``@base.updateMethod``: .. py:function:: updateMethod(f) A decorator for :py:class:`~buildbot.data.base.ResourceType` subclass methods, indicating that the method should be copied to ``master.data.updates``. Returning to the pub example:: class PubResourceType(base.ResourceType): # ... @base.updateMethod @defer.inlineCallbacks def setPubTapList(self, pubid, beers): pub = yield self.master.db.pubs.getPub(pubid) # ... self.produceMessage(pub, 'taps-updated') Update methods should be documented in :src:`master/docs/developer/data.rst`. They should be thoroughly tested with unit tests. They should have a fake implementation in :src:`master/buildbot/test/fake/fakedata.py`. That fake implementation should be tested to match the real implementation in :src:`master/buildbot/test/unit/test_data_connector.py`. .. _Adding-Fields-to-Resource-Types: Adding Fields to Resource Types +++++++++++++++++++++++++++++++ .. py:module:: buildbot.data.types The details of the fields of a resource type are rigorously enforced at several points in the Buildbot tests. The enforcement is performed by the :py:mod:`buildbot.data.types` module. The module provides a number of type classes for basic and compound types. Each resource type class defines its entity type in its :py:attr:`~buildbot.data.base.ResourceType.entityType` class attribute. Other resource types may refer to this class attribute if they embed an entity of that type. The types are used both for tests and by the REST interface to properly decode user-supplied query parameters. Basic Types ........... .. py:class:: Integer() An integer. :: myid = types.Integer() .. py:class:: String() A string. Strings must always be Unicode. :: name = types.String() .. py:class:: Binary() A binary bytestring. :: data = types.Binary() .. py:class:: Boolean() A boolean value. :: complete = types.Boolean() .. py:class:: Identifier(length) An identifier; see :ref:`Identifier `. The constructor argument specifies the maximum length. :: ident = types.Identifier(25) Compound Types .............. .. py:class:: NoneOk(nestedType) Either the nested type, or None. :: category = types.NoneOk(types.String()) .. py:class:: List(of) An list of objects. The named constructor argument ``of`` specifies the type of the list elements. :: tags = types.List(of=types.String()) .. py:class:: SourcedProperties() A data structure representing properties with their sources, in the form ``{name: (value, source)}``. The property name and source must be Unicode, and the value must be JSON-able. :: props = types.SourcedProperties() Entity Type ........... .. py:class:: Entity(name) A data resource is represented by a dictionary with well-known keys. To define those keys and their values, subclass the :py:class:`Entity` class within your ResourceType class and include each field as an attribute:: class MyStuff(base.ResourceType): name = "mystuff" # ... class EntityType(types.Entity): myid = types.Integer() name = types.String() data = types.Binary() complete = types.Boolean() ident = types.Identifier(25) category = types.NoneOk(types.String()) tags = types.List(of=types.String()) props = types.SourcedProperties() Then instantiate the class with the resource type name. The second argument is used for GraphQl endpoints:: entityType = EntityType(name, 'MyStuff') To embed another entity type, reference its entityType class attribute:: class EntityType(types.Entity): # ... master = masters.Master.entityType Data Model ---------- The data API enforces a strong and well-defined model on Buildbot's data. This model is influenced by REST, in the sense that it defines resources, representations for resources, and identifiers for resources. For each resource type, the API specifies: * the attributes of the resource and their types (e.g., changes have a string specifying their project) * the format of links to other resources (e.g., buildsets to sourcestamp sets) * the paths relating to the resource type * the format of routing keys for messages relating to the resource type * the events that can occur on that resource (e.g., a buildrequest can be claimed) * options and filters for getting resources Some resource type attributes only appear in certain formats, as noted in the documentation for the resource types. In general, messages do not include any optional attributes, nor links. Paths are given here separated by slashes, with key names prefixed by ``:`` and described below. Similarly, message routing keys given here are separated by dots, with key names prefixed by ``$``. The translation to tuples and other formats should be obvious. All strings in the data model are unicode strings. buildbot-3.4.0/master/docs/developer/database.rst000066400000000000000000000475551413250514000220430ustar00rootroot00000000000000.. _developer-database: Database ======== Buildbot stores most of its state in a database. This section describes the database connector classes, which allow other parts of Buildbot to access the database. It also describes how to modify the database schema and the connector classes themselves. Database Overview ----------------- All access to the Buildbot database is mediated by database connector classes. These classes provide a functional, asynchronous interface to other parts of Buildbot, and encapsulate the database-specific details in a single location in the codebase. The connector API, defined below, is a stable API in Buildbot, and can be called from any other component. Given a master ``master``, the root of the database connectors is available at ``master.db``, so, for example, the state connector's ``getState`` method is ``master.db.state.getState``. All the connectors use `SQLAlchemy Core `_ to achieve (almost) database-independent operation. Note that the SQLAlchemy ORM is not used in Buildbot. Database queries are carried out in threads, and report their results back to the main thread via Twisted Deferreds. Schema ------ Changes to the schema are accomplished through migration scripts, supported by `Alembic `_. The schema itself is considered an implementation detail, and may change significantly from version to version. Users should rely on the API (below), rather than performing queries against the database itself. Identifier ---------- .. _type-identifier: Restrictions on many string fields in the database are referred to as the Identifier concept. An "identifier" is a nonempty unicode string of limited length, containing only UTF-8 alphanumeric characters along with ``-`` (dash) and ``_`` (underscore), and not beginning with a digit. Wherever an identifier is used, the documentation will give the maximum length in characters. The function :py:func:`buildbot.util.identifiers.isIdentifier` is useful to verify a well-formed identifier. Writing Database Connector Methods ---------------------------------- The information above is intended for developers working on the rest of Buildbot, and treating the database layer as an abstraction. The remainder of this section describes the internals of the database implementation, and is intended for developers modifying the schema or adding new methods to the database layer. .. warning:: It's difficult to change the database schema, especially after it has been released. Changing the database API is disruptive to users. Consider very carefully the future-proofing of any changes here! The DB Connector and Components ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.connector .. py:class:: DBConnector The root of the database connectors, ``master.db``, is a :class:`~buildbot.db.connector.DBConnector` instance. Its main purpose is to hold a reference to each of the connector components, but it also handles timed cleanup tasks. If you are adding a new connector component, import its module and create an instance of it in this class's constructor. .. py:module:: buildbot.db.base .. py:class:: DBConnectorComponent This is the base class for connector components. There should be no need to override the constructor defined by this base class. .. py:attribute:: db A reference to the :class:`~buildbot.db.connector.DBConnector`, so that connector components can use e.g., ``self.db.pool`` or ``self.db.model``. In the unusual case that a connector component needs access to the master, the easiest path is ``self.db.master``. .. py:method:: checkLength(col, value) For use by subclasses to check that 'value' will fit in 'col', where 'col' is a table column from the model. Ignore this check for database engines that either provide this error themselves (postgres) or that do not enforce maximum-length restrictions (sqlite). .. py:method:: findSomethingId(self, tbl, whereclause, insert_values, _race_hook=None, autoCreate=True) Find (using ``whereclause``) or add (using ``insert_values``) a row to ``table``, and return the resulting ID. If ``autoCreate`` == False, we will not automatically insert the row. .. py:method:: hashColumns(*args) Hash the given values in a consistent manner: None is represented as \xf5, an invalid unicode byte; strings are converted to utf8; and integers are represented by their decimal expansion. The values are then joined by '\0' and hashed with sha1. .. py:method:: doBatch(batch, batch_n=500) returns an Iterator that batches stuff in order to not push to many things in a single request. Especially sqlite has 999 limit that it can take in a request. Direct Database Access ~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.pool The connectors all use `SQLAlchemy Core `_ as a wrapper around database client drivers. Unfortunately, SQLAlchemy is a synchronous library, so some extra work is required to use it in an asynchronous context, like in Buildbot. This is accomplished by deferring all database operations to threads, and returning a Deferred. The :class:`~buildbot.db.pool.Pool` class takes care of the details. A connector method should look like this:: def myMethod(self, arg1, arg2): def thd(conn): q = ... # construct a query for row in conn.execute(q): ... # do something with the results return ... # return an interesting value return self.db.pool.do(thd) Picking that apart, the body of the method defines a function named ``thd`` taking one argument, a :class:`Connection ` object. It then calls ``self.db.pool.do``, passing the ``thd`` function. This function is called in a thread, and can make blocking calls to SQLAlchemy as desired. The ``do`` method will return a Deferred that will fire with the return value of ``thd``, or with a failure representing any exception raised by ``thd``. The return value of ``thd`` must not be an SQLAlchemy object - in particular, any :class:`ResultProxy ` objects must be parsed into lists or other data structures before they are returned. .. warning:: As the name ``thd`` indicates, the function runs in a thread. It should not interact with any other part of Buildbot, nor with any of the Twisted components that expect to be accessed from the main thread -- the reactor, Deferreds, etc. Queries can be constructed using any of the SQLAlchemy core methods, using tables from :class:`~buildbot.db.model.Model`, and executed with the connection object, ``conn``. .. note:: SQLAlchemy requires the use of a syntax that is forbidden by pep8. If in where clauses you need to select rows where a value is NULL, you need to write (`tbl.c.value == None`). This form is forbidden by pep8 which requires the use of `is None` instead of `== None`. As sqlalchemy is using operator overloading to implement pythonic SQL statements, and the `is` operator is not overloadable, we need to keep the `==` operators. In order to solve this issue, Buildbot uses `buildbot.db.NULL` constant, which is `None`. So instead of writing `tbl.c.value == None`, please write `tbl.c.value == NULL`). .. py:class:: DBThreadPool .. py:method:: do(callable, ...) :returns: Deferred Call ``callable`` in a thread, with a :class:`Connection ` object as first argument. Returns a deferred that will fire with the results of the callable, or with a failure representing any exception raised during its execution. Any additional positional or keyword arguments are passed to ``callable``. .. py:method:: do_with_engine(callable, ...) :returns: Deferred Similar to :meth:`do`, call ``callable`` in a thread, but with an :class:`Engine ` object as first argument. This method is only used for schema manipulation, and should not be used in a running master. Database Schema ~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.model Database connector methods access the database through SQLAlchemy, which requires access to Python objects representing the database tables. That is handled through the model. .. py:class:: Model This class contains the canonical description of the Buildbot schema. It is represented in the form of SQLAlchemy :class:`Table ` instances, as class variables. At runtime, the model is available at ``master.db.model``. So, for example, the ``buildrequests`` table can be referred to as ``master.db.model.buildrequests``, and columns are available in its ``c`` attribute. The source file, :src:`master/buildbot/db/model.py`, contains comments describing each table; that information is not replicated in this documentation. Note that the model is not used for new installations or upgrades of the Buildbot database. See :ref:`Modifying-the-Database-Schema` for more information. .. py:attribute:: metadata The model object also has a ``metadata`` attribute containing a :class:`MetaData ` instance. Connector methods should not need to access this object. The metadata is not bound to an engine. The :py:class:`Model` class also defines some migration-related methods: .. py:method:: is_current() :returns: boolean via Deferred Returns true if the current database's version is current. .. py:method:: upgrade() :returns: Deferred Upgrades the database to the most recent schema version. Caching ~~~~~~~ .. py:currentmodule:: buildbot.db.base Connector component methods that get an object based on an ID are good candidates for caching. The :func:`~buildbot.db.base.cached` decorator makes this automatic: .. py:function:: cached(cachename) :param cache_name: name of the cache to use A decorator for "getter" functions that fetch an object from the database based on a single key. The wrapped method will only be called if the named cache does not contain the key. The wrapped function must take one argument (the key); the wrapper will take a key plus an optional ``no_cache`` argument which, if true, will cause it to invoke the underlying method even if the key is in the cache. The resulting method will have a ``cache`` attribute which can be used to access the underlying cache. In most cases, getter methods return a well-defined dictionary. Unfortunately, Python does not handle weak references to bare dictionaries, so components must instantiate a subclass of ``dict``. The whole assembly looks something like this:: class ThDict(dict): pass class ThingConnectorComponent(base.DBConnectorComponent): @base.cached('thdicts') def getThing(self, thid): def thd(conn): ... thdict = ThDict(thid=thid, attr=row.attr, ...) return thdict return self.db.pool.do(thd) Tests ~~~~~ It goes without saying that any new connector methods must be fully tested! You will also want to add an in-memory implementation of the methods to the fake classes in ``master/buildbot/test/fake/fakedb.py``. Non-DB Buildbot code is tested using these fake implementations in order to isolate that code from the database code, and to speed-up tests. The keys and types used in the return value from a connector's ``get`` methods are described in :src:`master/buildbot/test/util/validation.py`, via the ``dbdict`` module-level value. This is a dictionary of ``DictValidator`` objects, one for each return value. These values are used within test methods like this:: rv = yield self.db.masters.getMaster(7) validation.verifyDbDict(self, 'masterdict', rv) .. _Modifying-the-Database-Schema: Modifying the Database Schema ----------------------------- Changes to the schema are accomplished through migration scripts, supported by `Alembic `_. The schema is tracked by a revision number, stored in the ``alembic_version`` table. It can be anything, but by convention Buildbot uses revision numbers that are numbers incremented by one for each revision. The master will refuse to run with an outdated database. To make a change to the schema, first consider how to handle any existing data. When adding new columns, this may not be necessary, but table refactorings can be complex and require caution so as not to lose information. Refer to the documentation of Alembic for details of how database migration scripts should be written. The database schema itself is stored in :src:`master/buildbot/db/model.py` which should be updated to represent the new schema. Buildbot's automated tests perform a rudimentary comparison of an upgraded database with the model, but it is important to check the details - key length, nullability, and so on can sometimes be missed by the checks. If the schema and the upgrade scripts get out of sync, bizarre behavior can result. Changes to database schema should be reflected in corresponding fake database table definitions in :src:`master/buildbot/test/fakedb` The upgrade scripts should have unit tests. The classes in :src:`master/buildbot/test/util/migration.py` make this straightforward. Unit test scripts should be named e.g., :file:`test_db_migrate_versions_015_remove_bad_master_objectid.py`. The :src:`master/buildbot/test/integration/test_upgrade.py ` also tests upgrades, and will confirm that the resulting database matches the model. If you encounter implicit indexes on MySQL, that do not appear on SQLite or Postgres, add them to ``implied_indexes`` in :file:`master/buidlbot/db/model.py`. Foreign key checking -------------------- PostgreSQL and SQlite db backends check the foreign keys consistency. :bug:`2248` needs to be fixed so that we can support foreign key checking for MySQL. To maintain consistency with real db, fakedb can check the foreign key consistency of your test data. For this, just enable it with:: self.db = fakedb.FakeDBConnector(self.master, self) self.db.checkForeignKeys = True Note that tests that only use fakedb do not really need foreign key consistency, even if this is a good practice to enable it in new code. .. note: Since version `3.6.19 `_, sqlite can do `foreignkey checks `_, which help a lot for testing foreign keys constraint in a developer friendly environment. For compat reason, they decided to disable foreign key checks by default. Since 0.9.0b8, buildbot now enforces by default the foreign key checking, and is now dependent on sqlite3 >3.6.19, which was released in 2009. Database Compatibility Notes ---------------------------- Or: "If you thought any database worked right, think again" Because Buildbot works over a wide range of databases, it is generally limited to database features present in all supported backends. This section highlights a few things to watch out for. In general, Buildbot should be functional on all supported database backends. If use of a backend adds minor usage restrictions, or cannot implement some kinds of error checking, that is acceptable if the restrictions are well-documented in the manual. The metabuildbot tests Buildbot against all supported databases, so most compatibility errors will be caught before a release. Index Length in MySQL ~~~~~~~~~~~~~~~~~~~~~ .. index:: single: MySQL; limitations MySQL only supports about 330-character indexes. The actual index length is 1000 bytes, but MySQL uses 3-byte encoding for UTF8 strings. This is a longstanding bug in MySQL - see `"Specified key was too long; max key length is 1000 bytes" with utf8 `_. While this makes sense for indexes used for record lookup, it limits the ability to use unique indexes to prevent duplicate rows. InnoDB only supports indexes up to 255 unicode characters, which is why all indexed columns are limited to 255 characters in Buildbot. Transactions in MySQL ~~~~~~~~~~~~~~~~~~~~~ .. index:: single: MySQL; limitations Unfortunately, use of the MyISAM storage engine precludes real transactions in MySQL. ``transaction.commit()`` and ``transaction.rollback()`` are essentially no-ops: modifications to data in the database are visible to other users immediately, and are not reverted in a rollback. Referential Integrity in SQLite and MySQL ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. index:: single: SQLite; limitations .. index:: single: MySQL; limitations Neither MySQL nor SQLite enforce referential integrity based on foreign keys. Postgres does enforce it, however. If possible, test your changes on Postgres before committing, to check that tables are added and removed in the proper order. Subqueries in MySQL ~~~~~~~~~~~~~~~~~~~ .. index:: single: MySQL; limitations MySQL's query planner is easily confused by subqueries. For example, a DELETE query specifying id's that are IN a subquery will not work. The workaround is to run the subquery directly, and then execute a DELETE query for each returned id. If this weakness has a significant performance impact, it would be acceptable to conditionalize use of the subquery on the database dialect. Too Many Variables in SQLite ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. index:: single: SQLite; limitations Sqlite has a limitation on the number of variables it can use. This limitation is usually `SQLITE_LIMIT_VARIABLE_NUMBER=999 `_. There is currently no way with pysqlite to query the value of this limit. The C-api ``sqlite_limit`` is just not bound to the python. When you hit this problem, you will get error like the following: .. code-block:: none sqlalchemy.exc.OperationalError: (OperationalError) too many SQL variables u'DELETE FROM scheduler_changes WHERE scheduler_changes.changeid IN (?, ?, ?, ..., ?) You can use the method :py:meth:`doBatch` in order to write batching code in a consistent manner. Testing migrations with real databases -------------------------------------- By default Buildbot test suite uses SQLite database for testing database migrations. To use other database set ``BUILDBOT_TEST_DB_URL`` environment variable to value in `SQLAlchemy database URL specification `_. For example, to run tests with file-based SQLite database you can start tests in the following way: .. code-block:: bash BUILDBOT_TEST_DB_URL=sqlite:////tmp/test_db.sqlite trial buildbot.test Run databases in Docker ~~~~~~~~~~~~~~~~~~~~~~~ `Docker `_ allows to easily install and configure different databases locally in containers. To run tests with PostgreSQL: .. code-block:: bash # Install psycopg pip install psycopg2 # Start container with PostgreSQL 9.5 # It will listen on port 15432 on localhost sudo docker run --name bb-test-postgres -e POSTGRES_PASSWORD=password \ -p 127.0.0.1:15432:5432 -d postgres:9.5 # Start interesting tests BUILDBOT_TEST_DB_URL=postgresql://postgres:password@localhost:15432/postgres \ trial buildbot.test To run tests with MySQL: .. code-block:: bash # Install mysqlclient pip install mysqlclient # Start container with MySQL 5.5 # It will listen on port 13306 on localhost sudo docker run --name bb-test-mysql -e MYSQL_ROOT_PASSWORD=password \ -p 127.0.0.1:13306:3306 -d mysql:5.5 # Start interesting tests BUILDBOT_TEST_DB_URL=mysql+mysqldb://root:password@127.0.0.1:13306/mysql \ trial buildbot.test buildbot-3.4.0/master/docs/developer/database/000077500000000000000000000000001413250514000212715ustar00rootroot00000000000000buildbot-3.4.0/master/docs/developer/database/build_data.rst000066400000000000000000000061601413250514000241160ustar00rootroot00000000000000Build data connector ~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.build_data .. py:class:: BuildDataConnectorComponent This class handles build data. Build data is potentially large transient text data attached to the build that the steps can use for their operations. One of the use cases is to carry large amount of data from one step to another where storing that data on the worker is not feasible. This effectively forms a key-value store for each build. It is valid only until the build finishes and all reporters are done reporting the build result. After that the data may be removed from the database. An instance of this class is available at ``master.db.build_data``. Builds are indexed by *build_dataid* and their contents represented as *build_datadicts* (build data dictionaries), with the following keys: * ``id`` (the build data ID, globally unique) * ``buildid`` (the ID of the build that the data is attached to) * ``name`` (the name of the data) * ``value`` (the value of the data. It must be an instance of ``bytes``) * ``source`` (an string identifying the source of this value) .. py:method:: setBuildData(buildid, name, value, source) :param integer buildid: build id to attach data to :param unicode name: the name of the data :param bytestr value: the value of the data as ``bytes``. :parma unicode source: the source of the data :returns: Deferred Adds or replaces build data attached to the build. .. py:method:: getBuildData(buildid, name) :param integer buildid: build id retrieve data for :param unicode name: the name of the data :returns: Build data dictionary as above or ``None``, via Deferred Get a single build data, in the format described above, specified by build and by name. Returns ``None`` if build has no data with such name. .. py:method:: getBuildDataNoValue(buildid, name) :param integer buildid: build id retrieve data for :param unicode name: the name of the data :returns: Build data dictionary as above or ``None``, via Deferred Get a single build data, in the format described above, specified by build and by name. The ``value`` field is omitted. Returns ``None`` if build has no data with such name. .. py:method:: getAllBuildDataNoValues(buildid, name=None) :param integer buildid: build id retrieve data for :param unicode name: the name of the data :returns: a list of build data dictionaries Returns all data for a specific build. The values are not loaded. The returned values can be filtered by name .. py:method:: deleteOldBuildData(older_than_timestamp) :param integer older_than_timestamp: the build data whose build's ``complete_at`` is older than ``older_than_timestamp`` will be deleted. :returns: Deferred Delete old build data (helper for the ``build_data_horizon`` policy). Old logs have their build data deleted from the database as they are only useful while build is running and shortly afterwards. buildbot-3.4.0/master/docs/developer/database/builders.rst000066400000000000000000000044211413250514000236350ustar00rootroot00000000000000Builders connector ~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.builders .. index:: double: Builders; DB Connector Component .. py:class:: BuildersConnectorComponent This class handles the relationship between builder names and their IDs, as well as tracking which masters are configured for this builder. Builders are represented by master dictionaries with the following keys: * ``id`` -- the ID of this builder * ``name`` -- the builder name, a 20-character :ref:`identifier ` * ``masterids`` -- the IDs of the masters where this builder is configured (sorted by id) .. py:method:: findBuilderId(name, autoCreate=True) :param name: name of this builder :type name: 20-character :ref:`identifier ` :param autoCreate: automatically create the builder if name not found :type autoCreate: bool :returns: builder id via Deferred Return the builder ID for the builder with this builder name. If such a builder is already in the database, this returns the ID. If not and ``autoCreate`` is True, the builder is added to the database. .. py:method:: addBuilderMaster(builderid=None, masterid=None) :param integer builderid: the builder :param integer masterid: the master :returns: Deferred Add the given master to the list of masters on which the builder is configured. This will do nothing if the master and builder are already associated. .. py:method:: removeBuilderMaster(builderid=None, masterid=None) :param integer builderid: the builder :param integer masterid: the master :returns: Deferred Remove the given master from the list of masters on which the builder is configured. .. py:method:: getBuilder(builderid) :param integer builderid: the builder to check in :returns: Builder dict or None via Deferred Get the indicated builder. .. py:method:: getBuilders(masterid=None) :param integer masterid: ID of the master to which the results should be limited :returns: list of Builder dicts via Deferred Get all builders (in unspecified order). If ``masterid`` is given, then only builders configured on that master are returned. buildbot-3.4.0/master/docs/developer/database/buildrequests.rst000066400000000000000000000136701413250514000247250ustar00rootroot00000000000000Buildrequests connector ~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.buildrequests .. index:: double: BuildRequests; DB Connector Component .. py:exception:: AlreadyClaimedError Raised when a build request is already claimed, usually by another master. .. py:exception:: NotClaimedError Raised when a build request is not claimed by this master. .. py:class:: BuildRequestsConnectorComponent This class handles the complex process of claiming and unclaiming build requests, based on a polling model: callers poll for unclaimed requests with :py:meth:`getBuildRequests`, and then they attempt to claim the requests with :py:meth:`claimBuildRequests`. The claim can fail if another master has claimed the request in the interim. An instance of this class is available at ``master.db.buildrequests``. .. index:: brdict, brid Build requests are indexed by an ID referred to as a *brid*. The contents of a request are represented as build request dictionaries (brdicts) with keys * ``buildrequestid`` * ``buildsetid`` * ``builderid`` * ``buildername`` * ``priority`` * ``claimed`` (boolean, true if the request is claimed) * ``claimed_at`` (datetime object, time this request was last claimed) * ``claimed_by_masterid`` (integer, the id of the master that claimed this buildrequest) * ``complete`` (boolean, true if the request is complete) * ``complete_at`` (datetime object, time this request was completed) * ``submitted_at`` (datetime object, time this request was completed) * ``results`` (integer result code) * ``waited_for`` (boolean) .. py:method:: getBuildRequest(brid) :param brid: build request id to look up :returns: brdict or ``None``, via Deferred Get a single BuildRequest, in the format described above. This method returns ``None`` if there is no such buildrequest. Note that build requests are not cached, as the values in the database are not fixed. .. py:method:: getBuildRequests(buildername=None, complete=None, claimed=None, bsid=None, branch=None, repository=None, resultSpec=None) :param buildername: limit results to buildrequests for this builder :type buildername: string :param complete: if true, limit to completed buildrequests; if false, limit to incomplete buildrequests; if ``None``, do not limit based on completion. :param claimed: see below :param bsid: see below :param repository: the repository associated with the sourcestamps originating the requests :param branch: the branch associated with the sourcestamps originating the requests :param resultSpec: resultSpec containing filters sorting and paging request from data/REST API. If possible, the db layer can optimize the SQL query using this information. :returns: list of brdicts, via Deferred Get a list of build requests matching the given characteristics. Pass all parameters as keyword parameters to allow future expansion. The ``claimed`` parameter can be ``None`` (the default) to ignore the claimed status of requests; ``True`` to return only claimed builds, ``False`` to return only unclaimed builds, or a ``master ID`` to return only builds claimed by a particular master instance. A request is considered unclaimed if its ``claimed_at`` column is either NULL or 0, and it is not complete. If ``bsid`` is specified, then only build requests for that buildset will be returned. A build is considered completed if its ``complete`` column is 1; the ``complete_at`` column is not consulted. .. py:method:: claimBuildRequests(brids[, claimed_at=XX]) :param brids: ids of buildrequests to claim :type brids: list :param datetime claimed_at: time at which the builds are claimed :returns: Deferred :raises: :py:exc:`AlreadyClaimedError` Try to "claim" the indicated build requests for this buildmaster instance. The resulting deferred will fire normally on success, or fail with :py:exc:`AlreadyClaimedError` if *any* of the build requests are already claimed by another master instance. In this case, none of the claims will take effect. If ``claimed_at`` is not given, then the current time will be used. .. index:: single: MySQL; limitations .. index:: single: SQLite; limitations .. note:: On database backends that do not enforce referential integrity (e.g., SQLite), this method will not prevent claims for nonexistent build requests. On database backends that do not support transactions (MySQL), this method will not properly roll back any partial claims made before an :py:exc:`AlreadyClaimedError` is generated. .. py:method:: unclaimBuildRequests(brids) :param brids: ids of buildrequests to unclaim :type brids: list :returns: Deferred Release this master's claim on all of the given build requests. This will not unclaim requests that are claimed by another master, but will not fail in this case. The method does not check whether a request is completed. .. py:method:: completeBuildRequests(brids, results[, complete_at=XX]) :param brids: build request ids to complete :type brids: integer :param results: integer result code :type results: integer :param datetime complete_at: time at which the buildset was completed :returns: Deferred :raises: :py:exc:`NotClaimedError` Complete a set of build requests, all of which are owned by this master instance. This will fail with :py:exc:`NotClaimedError` if the build request is already completed or does not exist. If ``complete_at`` is not given, the current time will be used. buildbot-3.4.0/master/docs/developer/database/builds.rst000066400000000000000000000124521413250514000233110ustar00rootroot00000000000000Builds connector ~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.builds .. index:: double: Builds; DB Connector Component .. py:class:: BuildsConnectorComponent This class handles builds. One build record is created for each build performed by a master. This record contains information on the status of the build, as well as links to the resources used in the build: builder, master, worker, etc. An instance of this class is available at ``master.db.builds``. .. index:: bdict, buildid Builds are indexed by *buildid* and their contents represented as *builddicts* (build dictionaries), with the following keys: * ``id`` (the build ID, globally unique) * ``number`` (the build number, unique only within the builder) * ``builderid`` (the ID of the builder that performed this build) * ``buildrequestid`` (the ID of the build request that caused this build) * ``workerid`` (the ID of the worker on which this build was performed) * ``masterid`` (the ID of the master on which this build was performed) * ``started_at`` (datetime at which this build began) * ``complete_at`` (datetime at which this build finished, or None if it is ongoing) * ``state_string`` (short string describing the build's state) * ``results`` (results of this build; see :ref:`Build-Result-Codes`) .. py:method:: getBuild(buildid) :param integer buildid: build id :returns: Build dictionary as above or ``None``, via Deferred Get a single build, in the format described above. Returns ``None`` if there is no such build. .. py:method:: getBuildByNumber(builderid, number) :param integer builder: builder id :param integer number: build number within that builder :returns: Build dictionary as above or ``None``, via Deferred Get a single build, in the format described above, specified by builder and number, rather than build id. Returns ``None`` if there is no such build. .. py:method:: getPrevSuccessfulBuild(builderid, number, ssBuild) :param integer builderid: builder to get builds for :param integer number: the current build number. Previous build will be taken from this number :param list ssBuild: the list of sourcestamps for the current build number :returns: None or a build dictionary Returns the last successful build from the current build number with the same repository, branch, or codebase. .. py:method:: getBuilds(builderid=None, buildrequestid=None, complete=None, resultSpec=None) :param integer builderid: builder to get builds for :param integer buildrequestid: buildrequest to get builds for :param boolean complete: if not None, filters results based on completeness :param resultSpec: result spec containing filters sorting and paging requests from data/REST API. If possible, the db layer can optimize the SQL query using this information. :returns: list of build dictionaries as above, via Deferred Get a list of builds, in the format described above. Each of the parameters limits the resulting set of builds. .. py:method:: addBuild(builderid, buildrequestid, workerid, masterid, state_string) :param integer builderid: builder to get builds for :param integer buildrequestid: build request id :param integer workerid: worker performing the build :param integer masterid: master performing the build :param unicode state_string: initial state of the build :returns: tuple of build ID and build number, via Deferred Add a new build to the db, recorded as having started at the current time. This will invent a new number for the build, unique within the context of the builder. .. py:method:: setBuildStateString(buildid, state_string): :param integer buildid: build id :param unicode state_string: updated state of the build :returns: Deferred Update the state strings for the given build. .. py:method:: finishBuild(buildid, results) :param integer buildid: build id :param integer results: build result :returns: Deferred Mark the given build as finished, with ``complete_at`` set to the current time. .. note:: This update is done unconditionally, even if the build is already finished. .. py:method:: getBuildProperties(buildid, resultSpec=None) :param buildid: build ID :param resultSpec: resultSpec :returns: dictionary mapping property name to ``value, source``, via Deferred Return the properties for a build, in the same format they were given to :py:meth:`addBuild`. Optional filtering via resultSpec is available and optimized in the db layer. Note that this method does not distinguish a non-existent build from a build with no properties, and returns ``{}`` in either case. .. py:method:: setBuildProperty(buildid, name, value, source) :param integer buildid: build ID :param string name: Name of the property to set :param value: Value of the property :param string source: Source of the Property to set :returns: Deferred Set a build property. If no property with that name existed in that build, a new property will be created. buildbot-3.4.0/master/docs/developer/database/buildsets.rst000066400000000000000000000130501413250514000240200ustar00rootroot00000000000000Buildsets connector ~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.buildsets .. index:: double: Buildsets; DB Connector Component .. py:class:: BuildsetsConnectorComponent This class handles getting buildsets into and out of the database. Buildsets combine multiple build requests that were triggered together. An instance of this class is available at ``master.db.buildsets``. .. index:: bsdict, bsid Buildsets are indexed by *bsid* and their contents are represented as *bsdicts* (buildset dictionaries), with keys * ``bsid`` * ``external_idstring`` (arbitrary string for mapping builds externally) * ``reason`` (string; reason these builds were triggered) * ``sourcestamps`` (list of sourcestamps for this buildset, by ID) * ``submitted_at`` (datetime object; time this buildset was created) * ``complete`` (boolean; true if all of the builds for this buildset are complete) * ``complete_at`` (datetime object; time this buildset was completed) * ``results`` (aggregate result of this buildset; see :ref:`Build-Result-Codes`) .. py:method:: addBuildset(sourcestamps, reason, properties, builderids, external_idstring=None, parent_buildid=None, parent_relationship=None) :param sourcestamps: sourcestamps for the new buildset; see below :type sourcestamps: list :param reason: reason for this buildset :type reason: short unicode string :param properties: properties for this buildset :type properties: dictionary, where values are tuples of (value, source) :param builderids: builderids specified by this buildset :type builderids: list of int :param external_idstring: external key to identify this buildset; defaults to None :type external_idstring: unicode string :param datetime submitted_at: time this buildset was created; defaults to the current time :param int parent_buildid: optional build id that is the parent for this buildset :param unicode parent_relationship: relationship identifier for the parent. This is the configured relationship between the parent build and the child buildsets :returns: buildset ID and buildrequest IDs, via a Deferred Add a new buildset to the database, along with build requests for each builder, returning the resulting bsid via a Deferred. Arguments should be specified by keyword. Each sourcestamp in the list of sourcestamps can be given either as an integer, assumed to be a sourcestamp ID, or a dictionary of keyword arguments to be passed to :py:meth:`~buildbot.db.sourcestamps.SourceStampsConnectorComponent.findSourceStampId`. The return value is a tuple ``(bsid, brids)`` where ``bsid`` is the inserted buildset ID and ``brids`` is a dictionary mapping builderids to build request IDs. .. py:method:: completeBuildset(bsid, results[, complete_at=XX]) :param bsid: buildset ID to complete :type bsid: integer :param results: integer result code :type results: integer :param datetime complete_at: time the buildset was completed :returns: Deferred :raises: :py:exc:`KeyError` if the buildset does not exist or is already complete Complete a buildset, marking it with the given ``results`` and setting its ``completed_at`` to the current time, if the ``complete_at`` argument is omitted. .. py:method:: getBuildset(bsid) :param bsid: buildset ID :returns: bsdict, or ``None``, via Deferred Get a bsdict representing the given buildset, or ``None`` if no such buildset exists. Note that buildsets are not cached, as the values in the database are not fixed. .. py:method:: getBuildsets(complete=None, resultSpec=None) :param complete: if true, return only complete buildsets; if false, return only incomplete buildsets; if ``None`` or omitted, return all buildsets :param resultSpec: result spec containing filters sorting and paging requests from data/REST API. If possible, the db layer can optimize the SQL query using this information. :returns: list of bsdicts, via Deferred Get a list of bsdicts matching the given criteria. .. py:method:: getRecentBuildsets(count=None, branch=None, repository=None, complete=None): :param count: maximum number of buildsets to retrieve (required) :type count: integer :param branch: optional branch name. If specified, only buildsets affecting such branch will be returned :type branch: string :param repository: optional repository name. If specified, only buildsets affecting such repository will be returned :type repository: string :param complete: if true, return only complete buildsets; if false, return only incomplete buildsets; if ``None`` or omitted, return all buildsets :type complete: Boolean :returns: list of bsdicts, via Deferred Get "recent" buildsets, as defined by their ``submitted_at`` times. .. py:method:: getBuildsetProperties(buildsetid) :param bsid: buildset ID :returns: dictionary mapping property name to ``value, source``, via Deferred Return the properties for a buildset, in the same format they were given to :py:meth:`addBuildset`. Note that this method does not distinguish a nonexistent buildset from a buildset with no properties, and returns ``{}`` in either case. buildbot-3.4.0/master/docs/developer/database/changes.rst000066400000000000000000000146521413250514000234430ustar00rootroot00000000000000Changes connector ~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.changes .. index:: double: Changes; DB Connector Component .. py:class:: ChangesConnectorComponent This class handles changes in the Buildbot database, including pulling information from the changes sub-tables. An instance of this class is available at ``master.db.changes``. .. index:: chdict, changeid Changes are indexed by *changeid*, and are represented by a *chdict*, which has the following keys: * ``changeid`` (the ID of this change) * ``parent_changeids`` (list of ID; change's parents) * ``author`` (unicode; the author of the change) * ``committer`` (unicode; the committer of the change) * ``files`` (list of unicode; source-code filenames changed) * ``comments`` (unicode; user comments) * ``is_dir`` (deprecated) * ``links`` (list of unicode; links for this change, e.g., to web views, review) * ``revision`` (unicode string; revision for this change, or ``None`` if unknown) * ``when_timestamp`` (datetime instance; time of the change) * ``branch`` (unicode string; branch on which the change took place, or ``None`` for the "default branch", whatever that might mean) * ``category`` (unicode string; user-defined category of this change, or ``None``) * ``revlink`` (unicode string; link to a web view of this change) * ``properties`` (user-specified properties for this change, represented as a dictionary mapping keys to (value, source)) * ``repository`` (unicode string; repository where this change occurred) * ``project`` (unicode string; user-defined project to which this change corresponds) .. py:method:: getParentChangeIds(branch, repository, project, codebase) :param branch: the branch of the change :type branch: unicode string :param repository: the repository in which this change took place :type repository: unicode string :param project: the project this change is a part of :type project: unicode string :param codebase: :type codebase: unicode string :returns: the last changeID that matches the branch, repository, project, or codebase .. py:method:: addChange(author=None, committer=None, files=None, comments=None, is_dir=0, links=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties={}, repository='', project='', uid=None) :param author: the author of this change :type author: unicode string :param committer: the committer of this change :type committer: unicode string :param files: a list of filenames that were changed :type branch: list of unicode strings :param comments: user comments on the change :type branch: unicode string :param is_dir: deprecated :param links: a list of links related to this change, e.g., to web viewers or review pages :type links: list of unicode strings :param revision: the revision identifier for this change :type revision: unicode string :param when_timestamp: when this change occurred, or the current time if None :type when_timestamp: datetime instance or None :param branch: the branch on which this change took place :type branch: unicode string :param category: category for this change (arbitrary use by Buildbot users) :type category: unicode string :param revlink: link to a web view of this revision :type revlink: unicode string :param properties: properties to set on this change, where values are tuples of (value, source). At the moment, the source must be ``'Change'``, although this may be relaxed in later versions :type properties: dictionary :param repository: the repository in which this change took place :type repository: unicode string :param project: the project this change is a part of :type project: unicode string :param uid: uid generated for the change author :type uid: integer :returns: new change's ID via Deferred Add a Change with the given attributes to the database, returning the changeid via a Deferred. All arguments should be given as keyword arguments. The ``project`` and ``repository`` arguments must be strings; ``None`` is not allowed. .. py:method:: getChange(changeid, no_cache=False) :param changeid: the id of the change instance to fetch :param no_cache: bypass cache and always fetch from database :type no_cache: boolean :returns: chdict via Deferred Get a change dictionary for the given changeid, or ``None`` if no such change exists. .. py:method:: getChangeUids(changeid) :param changeid: the id of the change instance to fetch :returns: list of uids via Deferred Get the userids associated with the given changeid. .. py:method:: getChanges(resultSpec=None) :param resultSpec: result spec containing filters sorting and paging requests from data/REST API. If possible, the db layer can optimize the SQL query using this information. :returns: list of dictionaries via Deferred Get a list of the changes, represented as dictionaries, matching the given criteria. if ``resultSpec`` is not provided, changes are sorted, and paged using generic data query options. .. py:method:: getChangesCount() :returns: list of dictionaries via Deferred Get the number of changes that the query option would return if no paging option was set. .. py:method:: getLatestChangeid() :returns: changeid via Deferred Get the most-recently-assigned changeid, or ``None`` if there are no changes at all. .. py:method:: getChangesForBuild(buildid) :param buildid: ID of the build :returns: list of dictionaries via Deferred Get the "blame" list of changes for a build. .. py:method:: getBuildsForChange(changeid) :param changeid: ID of the change :returns: list of buildDict via Deferred Get builds related to a change. .. py:method:: getChangeFromSSid(sourcestampid) :param sourcestampid: ID of the sourcestampid :returns: chdict via Deferred Returns the change dictionary related to the sourcestamp ID. buildbot-3.4.0/master/docs/developer/database/changesources.rst000066400000000000000000000056161413250514000246640ustar00rootroot00000000000000Change sources connector ~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.changesources .. index:: double: ChangeSources; DB Connector Component .. py:exception:: ChangeSourceAlreadyClaimedError Raised when a changesource request is already claimed by another master. .. py:class:: ChangeSourcesConnectorComponent This class manages the state of the Buildbot changesources. An instance of this class is available at ``master.db.changesources``. Changesources are identified by their changesourceid, which can be obtained from :py:meth:`findChangeSourceId`. Changesources are represented by dictionaries with the following keys: * ``id`` - changesource's ID * ``name`` - changesource's name * ``masterid`` - ID of the master currently running this changesource, or None if it is inactive Note that this class is conservative in determining what changesources are inactive: a changesource linked to an inactive master is still considered active. This situation should never occur, however; links to a master should be deleted when it is marked inactive. .. py:method:: findChangeSourceId(name) :param name: changesource name :returns: changesource ID via Deferred Return the changesource ID for the changesource with this name. If such a changesource is already in the database, this returns the ID. If not, the changesource is added to the database and its ID returned. .. py:method:: setChangeSourceMaster(changesourceid, masterid) :param changesourceid: changesource to set the master for :param masterid: new master for this changesource, or None :returns: Deferred Set, or unset if ``masterid`` is None, the active master for this changesource. If no master is currently set, or the current master is not active, this method will complete without error. If the current master is active, this method will raise :py:exc:`~buildbot.db.exceptions.ChangeSourceAlreadyClaimedError`. .. py:method:: getChangeSource(changesourceid) :param changesourceid: changesource ID :returns: changesource dictionary or None, via Deferred Get the changesource dictionary for the given changesource. .. py:method:: getChangeSources(active=None, masterid=None) :param boolean active: if specified, filter for active or inactive changesources :param integer masterid: if specified, only return changesources attached associated with this master :returns: list of changesource dictionaries in unspecified order Get a list of changesources. If ``active`` is given, changesources are filtered according to whether they are active (true) or inactive (false). An active changesource is one that is claimed by an active master. If ``masterid`` is given, the list is restricted to schedulers associated with that master. buildbot-3.4.0/master/docs/developer/database/index.rst000066400000000000000000000005261413250514000231350ustar00rootroot00000000000000Database connectors API ----------------------- This section documents the available database connector classes. .. toctree:: :maxdepth: 1 buildsets buildrequests builders builds build_data steps logs changes changesources schedulers sourcestamps state users masters workers buildbot-3.4.0/master/docs/developer/database/logs.rst000066400000000000000000000113011413250514000227630ustar00rootroot00000000000000Logs connector ~~~~~~~~~~~~~~ .. py:module:: buildbot.db.logs .. index:: double: Logs; DB Connector Component .. py:class:: LogsConnectorComponent This class handles log data. Build steps can have zero or more logs. Logs are uniquely identified by name within a step. Information about a log, apart from its contents, is represented as a dictionary with the following keys, referred to as a *logdict*: * ``id`` (log ID, globally unique) * ``stepid`` (step ID, indicating the containing step) * ``name`` free-form name of this log * ``slug`` (50-identifier for the log, unique within the step) * ``complete`` (true if the log is complete and will not receive more lines) * ``num_lines`` (number of lines in the log) * ``type`` (log type; see below) Each log has a type that describes how to interpret its contents. See the :bb:rtype:`logchunk` resource type for details. A log contains a sequence of newline-separated lines of unicode. Log line numbering is zero-based. Each line must be less than 64k when encoded in UTF-8. Longer lines will be truncated, and a warning will be logged. Lines are stored internally in "chunks", and optionally compressed, but the implementation hides these details from callers. .. py:method:: getLog(logid) :param integer logid: ID of the requested log :returns: logdict via Deferred Get a log, identified by logid. .. py:method:: getLogBySlug(stepid, slug) :param integer stepid: ID of the step containing this log :param slug: slug of the logfile to retrieve :type name: 50-character identifier :returns: logdict via Deferred Get a log, identified by name within the given step. .. py:method:: getLogs(stepid) :param integer stepid: ID of the step containing the desired logs :returns: list of logdicts via Deferred Get all logs within the given step. .. py:method:: getLogLines(logid, first_line, last_line) :param integer logid: ID of the log :param first_line: first line to return :param last_line: last line to return :returns: see below Get a subset of lines for a logfile. The return value, via Deferred, is a concatenation of newline-terminated strings. If the requested last line is beyond the end of the logfile, only existing lines will be included. If the log does not exist, or has no associated lines, this method returns an empty string. .. py:method:: addLog(stepid, name, type) :param integer stepid: ID of the step containing this log :param string name: name of the logfile :param slug: slug (unique identifier) of the logfile :type slug: 50-character identifier :param string type: log type (see above) :raises KeyError: if a log with the given slug already exists in the step :returns: ID of the new log, via Deferred Add a new log file to the given step. .. py:method:: appendLog(logid, content) :param integer logid: ID of the requested log :param string content: new content to be appended to the log :returns: tuple of first and last line numbers in the new chunk, via Deferred Append content to an existing log. The content must end with a newline. If the given log does not exist, the method will silently do nothing. It is not safe to call this method more than once simultaneously for the same ``logid``. .. py:method:: finishLog(logid) :param integer logid: ID of the log to mark complete :returns: Deferred Mark a log as complete. Note that no checking for completeness is performed when appending to a log. It is up to the caller to avoid further calls to ``appendLog`` after ``finishLog``. .. py:method:: compressLog(logid) :param integer logid: ID of the log to compress :returns: Deferred Compress the given log. This method performs internal optimizations on a log's chunks to reduce the space used and make read operations more efficient. It should only be called for finished logs. This method may take some time to complete. .. py:method:: deleteOldLogChunks(older_than_timestamp) :param integer older_than_timestamp: the logs whose step's ``started_at`` is older than ``older_than_timestamp`` will be deleted. :returns: Deferred Delete old logchunks (helper for the ``logHorizon`` policy). Old logs have their logchunks deleted from the database, but they keep their ``num_lines`` metadata. They have their types changed to 'd', so that the UI can display something meaningful. buildbot-3.4.0/master/docs/developer/database/masters.rst000066400000000000000000000045011413250514000235010ustar00rootroot00000000000000Masters connector ~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.masters .. index:: double: Masters; DB Connector Component .. py:class:: MastersConnectorComponent This class handles tracking the buildmasters in a multi-master configuration. Masters "check in" periodically. Other masters monitor the last activity times, and mark masters that have not recently checked in as inactive. Masters are represented by master dictionaries with the following keys: * ``id`` -- the ID of this master * ``name`` -- the name of the master (generally of the form ``hostname:basedir``) * ``active`` -- true if this master is running * ``last_active`` -- time that this master last checked in (a datetime object) .. py:method:: findMasterId(name) :param unicode name: name of this master :returns: master id via Deferred Return the master ID for the master with this master name (generally ``hostname:basedir``). If such a master is already in the database, this returns the ID. If not, the master is added to the database, with ``active=False``, and its ID returned. .. py:method:: setMasterState(masterid, active) :param integer masterid: the master to check in :param boolean active: whether to mark this master as active or inactive :returns: boolean via Deferred Mark the given master as active or inactive, returning true if the state actually changed. If ``active`` is true, the ``last_active`` time is updated to the current time. If ``active`` is false, then any links to this master, such as schedulers, will be deleted. .. py:method:: getMaster(masterid) :param integer masterid: the master to check in :returns: Master dict or None via Deferred Get the indicated master. .. py:method:: getMasters() :returns: list of Master dicts via Deferred Get a list of the masters, represented as dictionaries; masters are sorted and paged using generic data query options .. py:method:: setAllMastersActiveLongTimeAgo() :returns: None via Deferred This method is intended to be called by upgrade-master, and will effectively force housekeeping on all masters at next startup. This method is not intended to be called outside of housekeeping scripts. buildbot-3.4.0/master/docs/developer/database/schedulers.rst000066400000000000000000000113761413250514000241740ustar00rootroot00000000000000Schedulers connector ~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.schedulers .. index:: double: Schedulers; DB Connector Component .. py:exception:: SchedulerAlreadyClaimedError Raised when a scheduler request is already claimed by another master. .. py:class:: SchedulersConnectorComponent This class manages the state of the Buildbot schedulers. This state includes classifications of as-yet un-built changes. An instance of this class is available at ``master.db.schedulers``. Schedulers are identified by their schedulerid, which can be obtained from :py:meth:`findSchedulerId`. Schedulers are represented by dictionaries with the following keys: * ``id`` - scheduler's ID * ``name`` - scheduler's name * ``masterid`` - ID of the master currently running this scheduler, or None if it is inactive Note that this class is conservative in determining what schedulers are inactive: a scheduler linked to an inactive master is still considered active. This situation should never occur, however; links to a master should be deleted when it is marked inactive. .. py:method:: classifyChanges(objectid, classifications) :param schedulerid: ID of the scheduler classifying the changes :param classifications: mapping of changeid to boolean, where the boolean is true if the change is important, and false if it is unimportant :type classifications: dictionary :returns: Deferred Record the given classifications. This method allows a scheduler to record which changes were important and which were not immediately, even if the build based on those changes will not occur for some time (e.g., a tree stable timer). Schedulers should be careful to flush classifications once they are no longer needed, using :py:meth:`flushChangeClassifications`. .. py:method:: flushChangeClassifications(objectid, less_than=None) :param schedulerid: ID of the scheduler owning the flushed changes :param less_than: (optional) lowest changeid that should *not* be flushed :returns: Deferred Flush all scheduler_changes for the given scheduler, limiting to those with changeid less than ``less_than`` if the parameter is supplied. .. py:method:: getChangeClassifications(objectid[, branch]) :param schedulerid: ID of scheduler to look up changes for :type schedulerid: integer :param branch: (optional) limit to changes with this branch :type branch: string or None (for default branch) :returns: dictionary via Deferred Return the classifications made by this scheduler, in the form of a dictionary mapping changeid to a boolean, just as supplied to :py:meth:`classifyChanges`. If ``branch`` is specified, then only changes on that branch will be given. Note that specifying ``branch=None`` requests changes for the default branch, and is not the same as omitting the ``branch`` argument altogether. .. py:method:: findSchedulerId(name) :param name: scheduler name :returns: scheduler ID via Deferred Return the scheduler ID for the scheduler with this name. If such a scheduler is already in the database, this returns the ID. If not, the scheduler is added to the database and its ID is returned. .. py:method:: setSchedulerMaster(schedulerid, masterid) :param schedulerid: scheduler to set the master for :param masterid: new master for this scheduler, or None :returns: Deferred Set, or unset if ``masterid`` is None, the active master for this scheduler. If no master is currently set, or the current master is not active, this method will complete without error. If the current master is active, this method will raise :py:exc:`~buildbot.db.exceptions.SchedulerAlreadyClaimedError`. .. py:method:: getScheduler(schedulerid) :param schedulerid: scheduler ID :returns: scheduler dictionary or None via Deferred Get the scheduler dictionary for the given scheduler. .. py:method:: getSchedulers(active=None, masterid=None) :param boolean active: if specified, filter for active or inactive schedulers :param integer masterid: if specified, only return schedulers attached associated with this master :returns: list of scheduler dictionaries in unspecified order Get a list of schedulers. If ``active`` is given, schedulers are filtered according to whether they are active (true) or inactive (false). An active scheduler is one that is claimed by an active master. If ``masterid`` is given, the list is restricted to schedulers associated with that master. buildbot-3.4.0/master/docs/developer/database/sourcestamps.rst000066400000000000000000000101121413250514000245460ustar00rootroot00000000000000Source stamps connector ~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.sourcestamps .. index:: double: SourceStamps; DB Connector Component .. py:class:: SourceStampsConnectorComponent This class manages source stamps, as stored in the database. A source stamp uniquely identifies a particular version of a single codebase. Source stamps are identified by their ID. It is safe to use sourcestamp ID equality as a proxy for source stamp equality. For example, all builds of a particular version of a codebase will share the same sourcestamp ID. This equality does not extend to patches: two sourcestamps generated with exactly the same patch will have different IDs. Relative source stamps have a ``revision`` of None, meaning "whatever the latest is when this sourcestamp is interpreted". While such source stamps may correspond to a wide array of revisions over the lifetime of a Buildbot installation, they will only ever have one ID. An instance of this class is available at ``master.db.sourcestamps``. Sourcestamps are represented by dictionaries with the following keys: .. index:: ssid, ssdict * ``ssid`` * ``branch`` (branch, or ``None`` for default branch) * ``revision`` (revision, or ``None`` to indicate the latest revision, in which case this is a relative source stamp) * ``patchid`` (ID of the patch) * ``patch_body`` (body of the patch, or ``None``) * ``patch_level`` (directory stripping level of the patch, or ``None``) * ``patch_subdir`` (subdirectory in which to apply the patch, or ``None``) * ``patch_author`` (author of the patch, or ``None``) * ``patch_comment`` (comment for the patch, or ``None``) * ``repository`` (repository containing the source; never ``None``) * ``project`` (project this source is for; never ``None``) * ``codebase`` (codebase this stamp is in; never ``None``) * ``created_at`` (timestamp when this stamp was first created) Note that the patch body is a bytestring, not a unicode string. .. py:method:: findSourceStampId(branch=None, revision=Node, repository=None, project=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None) :param branch: :type branch: unicode string or None :param revision: :type revision: unicode string or None :param repository: :type repository: unicode string or None :param project: :type project: unicode string or None :param codebase: :type codebase: unicode string (required) :param patch_body: patch body :type patch_body: bytes or unicode string or None :param patch_level: patch level :type patch_level: integer or None :param patch_author: patch author :type patch_author: unicode string or None :param patch_comment: patch comment :type patch_comment: unicode string or None :param patch_subdir: patch subdir :type patch_subdir: unicode string or None :returns: ssid, via Deferred Create a new SourceStamp instance with the given attributes, or find an existing one. In either case, return its ssid. The arguments all have the same meaning as in an ssdict. If a new SourceStamp is created, its ``created_at`` is set to the current time. .. py:method:: getSourceStamp(ssid) :param ssid: sourcestamp to get :param no_cache: bypass cache and always fetch from database :type no_cache: boolean :returns: ssdict, or ``None``, via Deferred Get an ssdict representing the given source stamp, or ``None`` if no such source stamp exists. .. py:method:: getSourceStamps() :returns: list of ssdict, via Deferred Get all sourcestamps in the database. You probably don't want to do this! This method will be extended to allow appropriate filtering. .. py:method:: getSourceStampsForBuild(buildid) :param buildid: build ID :returns: list of ssdict, via Deferred Get sourcestamps related to a build. buildbot-3.4.0/master/docs/developer/database/state.rst000066400000000000000000000064511413250514000231510ustar00rootroot00000000000000State connector ~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.state .. index:: double: State; DB Connector Component .. py:class:: StateConnectorComponent This class handles maintaining arbitrary key-value state for Buildbot objects. Each object can store arbitrary key-value pairs, where the values are any JSON-encodable value. Each pair can be set and retrieved atomically. Objects are identified by their (user-visible) name and their class. This allows, for example, a ``nightly_smoketest`` object of class ``NightlyScheduler`` to maintain its state even if it moves between masters, but avoids cross-contaminating state between different classes of objects with the same name. Note that "class" is not interpreted literally, and can be any string that will uniquely identify the class for the object; if classes are renamed, they can continue to use the old names. An instance of this class is available at ``master.db.state``. .. index:: objectid, objdict Objects are identified by *objectid*. .. py:method:: getObjectId(name, class_name) :param name: name of the object :param class_name: object class name :returns: the objectid, via a Deferred. Get the object ID for this combination of name and class. This will add a row to the 'objects' table if none exists already. .. py:method:: getState(objectid, name[, default]) :param objectid: objectid on which the state should be checked :param name: name of the value to retrieve :param default: (optional) value to return if ``name`` is not present :returns: state value via a Deferred :raises KeyError: if ``name`` is not present and no default is given :raises: TypeError if JSON parsing fails Get the state value for key ``name`` for the object with id ``objectid``. .. py:method:: setState(objectid, name, value) :param objectid: the objectid for which the state should be changed :param name: the name of the value to change :param value: the value to set :type value: JSON-able value :param returns: value actually written via Deferred :raises: TypeError if JSONification fails Set the state value for ``name`` for the object with id ``objectid``, overwriting any existing value. In case of two racing writes, the first (as per db rule) one wins, the seconds returns the value from the first. .. py:method:: atomicCreateState(objectid, name, thd_create_callback) :param objectid: the objectid for which the state should be created :param name: the name of the value to create :param thd_create_callback: the function to call from thread to create the value if non-existent. (returns JSON-able value) :param returns: Deferred :raises: TypeError if JSONification fails Atomically creates the state value for ``name`` for the object with id ``objectid``. If there is an existing value, returns that instead. This implementation ensures the state is created only once for the whole cluster. Those 3 methods have their threaded equivalent, ``thdGetObjectId``, ``thdGetState``, ``thdSetState`` that is intended to run in synchronous code, (e.g master.cfg environment). buildbot-3.4.0/master/docs/developer/database/steps.rst000066400000000000000000000071501413250514000231640ustar00rootroot00000000000000Steps connector ~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.steps .. index:: double: Steps; DB Connector Component .. py:class:: StepsConnectorComponent This class handles the steps performed within the context of a build. Within a build, each step has a unique name and a unique 0-based number. An instance of this class is available at ``master.db.steps``. .. index:: stepdict, stepid Builds are indexed by *stepid* and their contents are represented as *stepdicts* (step dictionaries), with the following keys: * ``id`` (the step ID, globally unique) * ``number`` (the step number, unique only within the build) * ``name`` (the step name, an 50-character :ref:`identifier ` unique only within the build) * ``buildid`` (the ID of the build containing this step) * ``started_at`` (datetime at which this step began) * ``complete_at`` (datetime at which this step finished, or None if it is ongoing) * ``state_string`` (short string describing the step's state) * ``results`` (results of this step; see :ref:`Build-Result-Codes`) * ``urls`` (list of URLs produced by this step. Each urls is stored as a dictionary with keys `name` and `url`) * ``hidden`` (true if the step should be hidden in status displays) .. py:method:: getStep(stepid=None, buildid=None, number=None, name=None) :param integer stepid: the step id to retrieve :param integer buildid: the build from which to get the step :param integer number: the step number :param name: the step name :type name: 50-character :ref:`identifier ` :returns: stepdict via Deferred Get a single step. The step can be specified by: * ``stepid`` alone * ``buildid`` and ``number``, the step number within that build * ``buildid`` and ``name``, the unique step name within that build .. py:method:: getSteps(buildid) :param integer buildid: the build from which to get the step :returns: list of stepdicts, sorted by number, via Deferred Get all steps in the given build, ordered by number. .. py:method:: addStep(self, buildid, name, state_string) :param integer buildid: the build to which to add the step :param name: the step name :type name: 50-character :ref:`identifier ` :param unicode state_string: the initial state of the step :returns: tuple of step ID, step number, and step name, via Deferred Add a new step to a build. The given name will be used if it is unique; otherwise, a unique numerical suffix will be appended. .. py:method:: setStepStateString(stepid, state_string): :param integer stepid: step ID :param unicode state_string: updated state of the step :returns: Deferred Update the state string for the given step. .. py:method:: finishStep(stepid, results, hidden) :param integer stepid: step ID :param integer results: step result :param bool hidden: true if the step should be hidden :returns: Deferred Mark the given step as finished, with ``complete_at`` set to the current time. .. note:: This update is done unconditionally, even if the steps are already finished. .. py:method:: addURL(self, stepid, name, url) :param integer stepid: the stepid to add the url. :param string name: the url name :param string url: the actual url :returns: None via deferred Add a new url to a step. The new url is added to the list of urls. buildbot-3.4.0/master/docs/developer/database/users.rst000066400000000000000000000104431413250514000231660ustar00rootroot00000000000000Users connector ~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.users .. index:: double: Users; DB Connector Component .. py:class:: UsersConnectorComponent This class handles Buildbot's notion of users. Buildbot tracks the usual information about users -- username and password, plus a display name. The more complicated task is to recognize each user across multiple interfaces with Buildbot. For example, a user may be identified as 'djmitche' in Subversion, 'dustin@v.igoro.us' in Git, and 'dustin' on IRC. To support this functionality, each user has a set of attributes, keyed by type. The :py:meth:`findUserByAttr` method uses these attributes to match users, adding a new user if no matching user is found. Users are identified canonically by *uid*, and are represented by *usdicts* (user dictionaries) with the following keys: * ``uid`` * ``identifier`` (display name for the user) * ``bb_username`` (buildbot login username) * ``bb_password`` (hashed login password) All attributes are also included in the dictionary, keyed by type. Types colliding with the keys above are ignored. .. py:method:: findUserByAttr(identifier, attr_type, attr_data) :param identifier: identifier to use for a new user :param attr_type: attribute type to search for and/or add :param attr_data: attribute data to add :returns: userid via Deferred Get an existing user, or add a new one, based on the given attribute. This method is intended for use by other components of Buildbot to search for a user with the given attributes. Note that ``identifier`` is *not* used in the search for an existing user. It is only used when creating a new user. The identifier should be based deterministically on the attributes supplied, in some fashion that will seem natural to users. For future compatibility, always use keyword parameters to call this method. .. py:method:: getUser(uid) :param uid: user id to look up :type key: int :param no_cache: bypass cache and always fetch from database :type no_cache: boolean :returns: usdict via Deferred Get a usdict for the given user, or ``None`` if no matching user is found. .. py:method:: getUserByUsername(username) :param username: username portion of user credentials :type username: string :returns: usdict or None via deferred Looks up the user with the bb_username, returning the usdict or ``None`` if no matching user is found. .. py:method:: getUsers() :returns: list of partial usdicts via Deferred Get the entire list of users. User attributes are not included, so the results are not full usdicts. .. py:method:: updateUser(uid=None, identifier=None, bb_username=None, bb_password=None, attr_type=None, attr_data=None) :param uid: the user to change :type uid: int :param identifier: (optional) new identifier for this user :type identifier: string :param bb_username: (optional) new buildbot username :type bb_username: string :param bb_password: (optional) new hashed buildbot password :type bb_password: string :param attr_type: (optional) attribute type to update :type attr_type: string :param attr_data: (optional) value for ``attr_type`` :type attr_data: string :returns: Deferred Update information about the given user. Only the specified attributes are updated. If no user with the given uid exists, the method will return silently. Note that ``bb_password`` must be given if ``bb_username`` appears; similarly, ``attr_type`` requires ``attr_data``. .. py:method:: removeUser(uid) :param uid: the user to remove :type uid: int :returns: Deferred Remove the user with the given uid from the database. This will remove the user from any associated tables as well. .. py:method:: identifierToUid(identifier) :param identifier: identifier to search for :type identifier: string :returns: uid or ``None``, via Deferred Fetch a uid for the given identifier, if one exists. buildbot-3.4.0/master/docs/developer/database/workers.rst000066400000000000000000000112361413250514000235220ustar00rootroot00000000000000Workers connector ~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.db.workers .. index:: double: Workers; DB Connector Component .. py:class:: WorkersConnectorComponent This class handles Buildbot's notion of workers. The worker information is returned as a dictionary with the following keys: * ``id`` * ``name`` - the name of the worker * ``workerinfo`` - worker information as dictionary * ``paused`` - boolean indicating worker is paused and shall not take new builds * ``graceful`` - boolean indicating worker will be shutdown as soon as build finished * ``connected_to`` - a list of masters, by ID, to which this worker is currently connected. This list will typically contain only one master, but in unusual circumstances the same worker may appear to be connected to multiple masters simultaneously * ``configured_on`` - a list of master-builder pairs, on which this worker is configured. Each pair is represented by a dictionary with keys ``buliderid`` and ``masterid`` The worker information can be any JSON-able object. See :bb:rtype:`worker` for more detail. .. py:method:: findWorkerId(name=name) :param name: worker name :type name: 50-character identifier :returns: worker ID via Deferred Get the ID for a worker, adding a new worker to the database if necessary. The worker information for a new worker is initialized to an empty dictionary. .. py:method:: getWorkers(masterid=None, builderid=None) :param integer masterid: limit to workers configured on this master :param integer builderid: limit to workers configured on this builder :returns: list of worker dictionaries, via Deferred Get a list of workers. If either or both of the filtering parameters either specified, then the result is limited to workers configured to run on that master or builder. The ``configured_on`` results are limited by the filtering parameters as well. The ``connected_to`` results are limited by the ``masterid`` parameter. .. py:method:: getWorker(workerid=None, name=None, masterid=None, builderid=None) :param string name: the name of the worker to retrieve :param integer workerid: the ID of the worker to retrieve :param integer masterid: limit to workers configured on this master :param integer builderid: limit to workers configured on this builder :returns: info dictionary or None, via Deferred Looks up the worker with the given name or ID, returning ``None`` if no matching worker is found. The ``masterid`` and ``builderid`` arguments function as they do for :py:meth:`getWorkers`. .. py:method:: workerConnected(workerid, masterid, workerinfo) :param integer workerid: the ID of the worker :param integer masterid: the ID of the master to which it connected :param workerinfo: the new worker information dictionary :type workerinfo: dict :returns: Deferred Record the given worker as attached to the given master, and update its cached worker information. The supplied information completely replaces any existing information. .. py:method:: workerDisconnected(workerid, masterid) :param integer workerid: the ID of the worker :param integer masterid: the ID of the master to which it connected :returns: Deferred Record the given worker as no longer attached to the given master. .. py:method:: workerConfigured(workerid, masterid, builderids) :param integer workerid: the ID of the worker :param integer masterid: the ID of the master to which it configured :param list of integer builderids: the ID of the builders to which it is configured :returns: Deferred Record the given worker as being configured on the given master and for given builders. This method will also remove any other builder that were configured previously for same (worker, master) combination. .. py:method:: deconfigureAllWorkersForMaster(masterid) :param integer masterid: the ID of the master to which it configured :returns: Deferred Unregister all the workers configured to a master for given builders. This shall happen when master is disabled or before reconfiguration. .. py:method:: setWorkerState(workerid, paused, graceful) :param integer workerid: the ID of the worker whose state is being changed :param integer paused: the paused state :param integer graceful: the graceful state :returns: Deferred Change the state of a worker (see definition of states above in worker dict description). buildbot-3.4.0/master/docs/developer/encodings.rst000066400000000000000000000024301413250514000222270ustar00rootroot00000000000000String Encodings ~~~~~~~~~~~~~~~~ Buildbot expects all strings used internally to be valid Unicode strings - not bytestrings. Note that Buildbot rarely feeds strings back into external tools in such a way that those strings must match. For example, Buildbot does not attempt to access the filenames specified in a Change. So it is more important to store strings in a manner that will be most useful to a human reader (e.g., in logfiles, web status, etc.) than to store them in a lossless format. Inputs ++++++ On input, strings should be decoded, if their encoding is known. Where necessary, the assumed input encoding should be configurable. In some cases, such as filenames, this encoding is not known or not well-defined (e.g., a utf-8 encoded filename in a latin-1 directory). In these cases, the input mechanisms should make a best effort at decoding, and use e.g., the ``errors='replace'`` option to fail gracefully on un-decodable characters. Outputs +++++++ At most points where Buildbot outputs a string, the target encoding is known. For example, the web status can encode to utf-8. In cases where it is not known, it should be configurable, with a safe fallback (e.g., ascii with ``errors='replace'``. For HTML/XML outputs, consider using ``errors='xmlcharrefreplace'`` instead. buildbot-3.4.0/master/docs/developer/general.rst000066400000000000000000000006321413250514000216750ustar00rootroot00000000000000General Documents ================= This section gives some general information about Buildbot development. .. toctree:: :maxdepth: 2 master-overview style tests config schedulers utils results www-server www-data-module www-base-app auth authz master-worker br-claiming encodings metrics secrets stats-service plugins-publish buildbot-3.4.0/master/docs/developer/index.rst000066400000000000000000000014211413250514000213640ustar00rootroot00000000000000.. _Buildbot Development: .. _Public-API: Buildbot Development ==================== This chapter is the official repository for the collected wisdom of the Buildbot hackers. It is intended both for developers writing patches that will be included in Buildbot itself and for advanced users who wish to customize Buildbot. .. note:: **Public API** Any API that is not documented in the official Buildbot documentation is considered internal and subject to change. If you would like it to be officially exposed, open a bug report on the `Buildbot Github project `_. .. toctree:: :maxdepth: 2 quickstart pull-request general rest raml/index data database database/index mq classes buildbot-3.4.0/master/docs/developer/master-overview.rst000066400000000000000000000061721413250514000234240ustar00rootroot00000000000000.. _master-service-hierarchy: Master Organization =================== Buildbot makes heavy use of Twisted Python's support for services - software modules that can be started and stopped dynamically. Buildbot adds the ability to reconfigure such services, too - see :ref:`developer-reconfiguration`. Twisted arranges services into trees; the following section describes the service tree on a running master. BuildMaster Object ------------------ The hierarchy begins with the master, a :py:class:`buildbot.master.BuildMaster` instance. Most other services contain a reference to this object in their ``master`` attribute, and in general the appropriate way to access other objects or services is to begin with ``self.master`` and navigate from there. The master has a number of useful attributes: ``master.metrics`` A :py:class:`buildbot.process.metrics.MetricLogObserver` instance that handles tracking and reporting on master metrics. ``master.caches`` A :py:class:`buildbot.process.caches.CacheManager` instance that provides access to object caches. ``master.pbmanager`` A :py:class:`buildbot.pbmanager.PBManager` instance that handles incoming PB connections, potentially on multiple ports, and dispatching those connections to appropriate components based on the supplied username. ``master.workers`` A :py:class:`buildbot.worker.manager.WorkerManager` instance that provides wrappers around multiple master-worker protocols (e.g. PB) to unify calls for them from higher level code. ``master.change_svc`` A :py:class:`buildbot.changes.manager.ChangeManager` instance that manages the active change sources, as well as the stream of changes received from those sources. All active change sources are child services of this instance. ``master.botmaster`` A :py:class:`buildbot.process.botmaster.BotMaster` instance that manages all of the workers and builders as child services. The botmaster acts as the parent service for a :py:class:`buildbot.process.botmaster.BuildRequestDistributor` instance (at ``master.botmaster.brd``), as well as all active workers (:py:class:`buildbot.worker.AbstractWorker` instances) and builders (:py:class:`buildbot.process.builder.Builder` instances). ``master.scheduler_manager`` A :py:class:`buildbot.schedulers.manager.SchedulerManager` instance that manages the active schedulers. All active schedulers are child services of this instance. ``master.user_manager`` A :py:class:`buildbot.process.users.manager.UserManagerManager` instance that manages access to users. All active user managers are child services of this instance. ``master.db`` A :py:class:`buildbot.db.connector.DBConnector` instance that manages access to the buildbot database. See :ref:`developer-database` for more information. ``master.debug`` A :py:class:`buildbot.process.debug.DebugServices` instance that manages debugging-related access -- the manhole, in particular. ``master.masterid`` This is the ID for this master, from the ``masters`` table. It is used in the database and messages to uniquely identify this master. buildbot-3.4.0/master/docs/developer/master-worker.rst000066400000000000000000000371401413250514000230660ustar00rootroot00000000000000Master-Worker API ================= This section describes the master-worker interface. It covers the communication protocol of the "classic" remote Worker. Notice there are other types of workers which behave a bit differently, such as :ref:`Local Worker ` and :ref:`Latent Worker `. Connection ---------- The interface is based on Twisted's Perspective Broker, which operates over TCP connections. The worker connects to the master, using the parameters supplied to :command:`buildbot-worker create-worker`. It uses a reconnecting process with an exponential backoff, and will automatically reconnect on disconnection. Once connected, the worker authenticates with the Twisted Cred (newcred) mechanism, using the username and password supplied to :command:`buildbot-worker create-worker`. The *mind* behind the worker is the worker bot instance (class :class:`buildbot_worker.pb.BotPb`). On the master side, the realm is implemented by :class:`buildbot.pbmanager.Dispatcher`, which examines the username of incoming avatar requests. There are special cases for ``change`` and ``debug``, which are not discussed here. For all other usernames, the botmaster is consulted, and if a worker with that name is configured, its :class:`buildbot.worker.Worker` instance is returned as the perspective. Workers ------- At this point, the master-side Worker object has a pointer to the remote worker-side Bot object in its ``self.worker``, and the worker-side Bot object has a reference to the master-side Worker object in its ``self.perspective``. Bot methods ~~~~~~~~~~~ The worker-side Bot object has the following remote methods: :meth:`~buildbot_worker.pb.BotPb.remote_getCommands` Returns a dictionary for all commands the worker recognizes: the key of the dictionary is the command name and the command version is the value. :meth:`~buildbot_worker.pb.BotPb.remote_setBuilderList` Given a list of builders and their build directories, ensures that those builders, and only those builders, are running. This can be called after the initial connection is established, with a new list, to add or remove builders. This method returns a dictionary of :class:`WorkerForBuilder` objects - see below. :meth:`~buildbot_worker.pb.BotPb.remote_print` Adds a message to the worker logfile. :meth:`~buildbot_worker.pb.BotPb.remote_getWorkerInfo` Returns a dictionary with the contents of the worker's :file:`info/` directory (i.e. file name is used as key and file contents as the value). This dictionary also contains the following keys: ``environ`` copy of the workers environment ``system`` OS the worker is running (extracted from Python's ``os.name``) ``basedir`` base directory where the worker is running ``numcpus`` number of CPUs on the worker, either as configured or as detected (since ``buildbot-worker`` version 0.9.0) ``version`` worker's version (same as the result of :meth:`~buildbot_worker.pb.BotPb.remote_getVersion` call) ``worker_commands`` worker supported commands (same as the result of :meth:`~buildbot_worker.pb.BotPb.remote_getCommands` call) :meth:`~buildbot_worker.pb.BotPb.remote_getVersion` Returns the worker's version. :meth:`~buildbot_worker.pb.BotPb.remote_shutdown` Shuts down the worker cleanly. Worker methods ~~~~~~~~~~~~~~ The master-side object has the following method: :meth:`~buildbot.protocols.pb.Connection.perspective_keepalive` Does nothing - used to keep traffic flowing over the TCP connection Setup ----- After the initial connection and trading of a mind (:class:`buildbot_worker.pb.BotPb`) for an avatar (Worker), the master calls the Bot's :meth:`setBuilderList` method to set up the proper builders on the worker side. This method returns a reference to each of the new worker-side :class:`~buildbot_worker.pb.WorkerForBuilderPb` objects, described below. Each of these is handed to the corresponding master-side :class:`~buildbot.process.workerforbuilder.WorkerForBuilder` object. This immediately calls the remote :meth:`setMaster` method, and then the :meth:`print` method. Pinging ------- To ping a remote Worker, the master calls its :meth:`print` method. Building -------- When a build starts, the master calls the worker's :meth:`startBuild` method. Each BuildStep instance will subsequently call the :meth:`startCommand` method, passing a reference to itself as the ``stepRef`` parameter. The :meth:`startCommand` method returns immediately, and the end of the command is signalled with a call to a method on the master-side BuildStep object. .. _worker-for-builders: Worker For Builders ------------------- Each worker has a set of builders which can run on it. These are represented by distinct classes on the master and worker, just like the Worker and Bot objects described above. On the worker side, builders are represented as instances of the :class:`buildbot_worker.pb.WorkerForBuilderPb` class. On the master side, they are represented by the :class:`buildbot.process.workerforbuilder.WorkerForBuilder` class. The identical names are a source of confusion. The following will refer to these as the worker-side and master-side Worker For Builder classes. Each object keeps a reference to its opposite in ``self.remote``. Worker-Side :class:`~buildbot_worker.pb.WorkerForBuilderPb` Methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :meth:`~buildbot_worker.pb.WorkerForBuilderPb.remote_setMaster` Provides a reference to the master-side Worker For Builder :meth:`~buildbot_worker.pb.WorkerForBuilderPb.remote_print` Adds a message to the worker logfile; used to check round-trip connectivity :meth:`~buildbot_worker.pb.WorkerForBuilderPb.remote_startBuild` Indicates that a build is about to start, and that any subsequent commands are part of that build :meth:`~buildbot_worker.pb.WorkerForBuilderPb.remote_startCommand` Invokes a command on the worker side :meth:`~buildbot_worker.pb.WorkerForBuilderPb.remote_interruptCommand` Interrupts the currently-running command Master-side :class:`~buildbot.process.workerforbuilder.WorkerForBuilder` Methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The master side does not have any remotely-callable methods. Commands -------- The actual work done by the worker is represented on the master side by a :class:`buildbot.process.remotecommand.RemoteCommand` instance. The command instance keeps a reference to the worker-side :class:`buildbot_worker.pb.WorkerForBuilderPb`, and calls methods like :meth:`~buildbot_worker.pb.WorkerForBuilderPb.remote_startCommand` to start new commands. Once that method is called, the :class:`~buildbot_worker.pb.WorkerForBuilderPb` instance keeps a reference to the command, and calls the following methods on it: Master-Side RemoteCommand Methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :meth:`~buildbot.process.remotecommand.RemoteCommand.remote_update` Update information about the running command. See below for the format. :meth:`~buildbot.process.remotecommand.RemoteCommand.remote_complete` Signal that the command is complete, either successfully or with a Twisted failure. .. _master-worker-updates: Updates ------- Updates from the worker, sent via :meth:`~buildbot.process.remotecommand.RemoteCommand.remote_update`, are a list of individual update elements. Each update element is, in turn, a list of the form ``[data, 0]``, where the 0 is present for historical reasons. The data is a dictionary, with keys describing the contents. The updates are handled by :meth:`~buildbot.process.remotecommand.RemoteCommand.remote_update`. Updates with different keys can be combined into a single dictionary or delivered sequentially as list elements, at the worker's option. To summarize, an ``updates`` parameter to :meth:`~buildbot.process.remotecommand.RemoteCommand.remote_update` might look like this:: [ [ { 'header' : 'running command..' }, 0 ], [ { 'stdout' : 'abcd', 'stderr' : 'local modifications' }, 0 ], [ { 'log' : ( 'cmd.log', 'cmd invoked at 12:33 pm\n' ) }, 0 ], [ { 'rc' : 0 }, 0 ], ] Defined Commands ~~~~~~~~~~~~~~~~ The following commands are defined on the workers. .. _shell-command-args: shell ..... Runs a shell command on the worker. This command takes the following arguments: ``command`` The command to run. If this is a string, it will be passed to the system shell as a string. Otherwise, it must be a list, which will be executed directly. ``workdir`` The directory in which to run the command, relative to the builder dir. ``env`` A dictionary of environment variables to augment or replace the existing environment on the worker. In this dictionary, ``PYTHONPATH`` is treated specially: it should be a list of path components, rather than a string, and will be prepended to the existing Python path. ``initial_stdin`` A string which will be written to the command's standard input before it is closed. ``want_stdout`` If false, then no updates will be sent for stdout. ``want_stderr`` If false, then no updates will be sent for stderr. ``usePTY`` If true, the command should be run with a PTY (POSIX only). This defaults to False. ``not_really`` If true, skip execution and return an update with rc=0. ``timeout`` Maximum time without output before the command is killed. ``maxTime`` Maximum overall time from the start before the command is killed. ``logfiles`` A dictionary specifying logfiles other than stdio. Keys are the logfile names, and values give the workdir-relative filename of the logfile. Alternately, a value can be a dictionary; in this case, the dictionary must have a ``filename`` key specifying the filename, and can also have the following keys: ``follow`` Only follow the file from its current end-of-file, rather that starting from the beginning. ``logEnviron`` If false, the command's environment will not be logged. The ``shell`` command sends the following updates: ``stdout`` The data is a bytestring which represents a continuation of the stdout stream. Note that the bytestring boundaries are not necessarily aligned with newlines. ``stderr`` Similar to ``stdout``, but for the error stream. ``header`` Similar to ``stdout``, but containing data for a stream of Buildbot-specific metadata. ``rc`` The exit status of the command, where -- in keeping with UNIX tradition -- 0 indicates success and any nonzero value is considered a failure. No further updates should be sent after an ``rc``. ``log`` This update contains data for a logfile other than stdio. The data associated with the update is a tuple of the log name and the data for that log. Note that non-stdio logs do not distinguish output, error, and header streams. uploadFile .......... Upload a file from the worker to the master. The arguments are ``workdir`` Base directory for the filename, relative to the builder's basedir. ``workersrc`` Name of the filename to read from, relative to the workdir. ``writer`` A remote reference to a writer object, described below. ``maxsize`` Maximum size, in bytes, of the file to write. The operation will fail if the file exceeds this size. ``blocksize`` The block size with which to transfer the file. ``keepstamp`` If true, preserve the file modified and accessed times. The worker calls a few remote methods on the writer object. First, the ``write`` method is called with a bytestring containing data, until all of the data has been transmitted. Then, the worker calls the writer's ``close``, followed (if ``keepstamp`` is true) by a call to ``upload(atime, mtime)``. This command sends ``rc`` and ``stderr`` updates, as defined for the ``shell`` command. uploadDirectory ............... Similar to ``uploadFile``, this command will upload an entire directory to the master, in the form of a tarball. It takes the following arguments: ``workdir`` ``workersrc`` ``writer`` ``maxsize`` ``blocksize`` See ``uploadFile`` for these arguments. ``compress`` Compression algorithm to use -- one of ``None``, ``'bz2'``, or ``'gz'``. The writer object is treated similarly to the ``uploadFile`` command, but after the file is closed, the worker calls the master's ``unpack`` method with no arguments to extract the tarball. This command sends ``rc`` and ``stderr`` updates, as defined for the ``shell`` command. downloadFile ............ This command will download a file from the master to the worker. It takes the following arguments: ``workdir`` Base directory for the destination filename, relative to the builder basedir. ``workerdest`` Filename to write to, relative to the workdir. ``reader`` A remote reference to a reader object, described below. ``maxsize`` Maximum size of the file. ``blocksize`` The block size with which to transfer the file. ``mode`` Access mode for the new file. The reader object's ``read(maxsize)`` method will be called with a maximum size, which will return no more than that number of bytes as a bytestring. At EOF, it will return an empty string. Once EOF is received, the worker will call the remote ``close`` method. This command sends ``rc`` and ``stderr`` updates, as defined for the ``shell`` command. mkdir ..... This command will create a directory on the worker. It will also create any intervening directories required. It takes the following argument: ``dir`` Directory to create. The ``mkdir`` command produces the same updates as ``shell``. rmdir ..... This command will remove a directory or file on the worker. It takes the following arguments: ``dir`` Directory to remove. ``timeout`` ``maxTime`` See ``shell`` above. The ``rmdir`` command produces the same updates as ``shell``. cpdir ..... This command will copy a directory from one place to another place on the worker. It takes the following arguments: ``fromdir`` Source directory for the copy operation, relative to the builder's basedir. ``todir`` Destination directory for the copy operation, relative to the builder's basedir. ``timeout`` ``maxTime`` See ``shell`` above. The ``cpdir`` command produces the same updates as ``shell``. stat .... This command returns status information about a file or directory. It takes a single parameter, ``file``, specifying the filename relative to the builder's basedir. It produces two status updates: ``stat`` The return value from Python's ``os.stat``. ``rc`` 0 if the file is found, otherwise 1. glob .... This command finds all pathnames matching a specified pattern that uses shell-style wildcards. It takes a single parameter, ``path``, specifying the pattern to pass to Python's ``glob.glob`` function. It produces two status updates: ``files`` The list of matching files returned from ``glob.glob`` ``rc`` 0 if the ``glob.glob`` does not raise exception, otherwise 1. listdir ....... This command reads the directory and returns the list with directory contents. It takes a single parameter, ``dir``, specifying the directory relative to the builder's basedir. It produces two status updates: ``files`` The list of files in the directory returned from ``os.listdir`` ``rc`` 0 if the ``os.listdir`` does not raise exception, otherwise 1. rmfile ...... This command removes the file in the worker base directory. It takes a single parameter, ``path``, specifying the file path relative to the builder's basedir. It produces one status update: ``rc`` 0 if the ``os.remove`` does not raise exception, otherwise the corresponding errno. buildbot-3.4.0/master/docs/developer/metrics.rst000066400000000000000000000073501413250514000217320ustar00rootroot00000000000000.. _Metrics: Metrics ======= New in Buildbot 0.8.4 is support for tracking various performance metrics inside the buildbot master process. Currently, these are logged periodically according to the ``log_interval`` configuration setting of the :bb:cfg:`metrics` configuration. The metrics subsystem is implemented in :mod:`buildbot.process.metrics`. It makes use of twisted's logging system to pass metrics data from all over Buildbot's code to a central :class:`MetricsLogObserver` object, which is available at ``BuildMaster.metrics`` or via ``Status.getMetrics()``. Metric Events ------------- :class:`MetricEvent` objects represent individual items to monitor. There are three sub-classes implemented: :class:`MetricCountEvent` Records incremental increase or decrease of some value, or an absolute measure of some value. :: from buildbot.process.metrics import MetricCountEvent # We got a new widget! MetricCountEvent.log('num_widgets', 1) # We have exactly 10 widgets MetricCountEvent.log('num_widgets', 10, absolute=True) :class:`MetricTimeEvent` Measures how long things take. By default the average of the last 10 times will be reported. :: from buildbot.process.metrics import MetricTimeEvent # function took 0.001s MetricTimeEvent.log('time_function', 0.001) :class:`MetricAlarmEvent` Indicates the health of various metrics. :: from buildbot.process.metrics import MetricAlarmEvent, ALARM_OK # num_workers looks ok MetricAlarmEvent.log('num_workers', level=ALARM_OK) Metric Handlers --------------- :class:`MetricsHandler` objects are responsible for collecting :class:`MetricEvent`\s of a specific type and keeping track of their values for future reporting. There are :class:`MetricsHandler` classes corresponding to each of the :class:`MetricEvent` types. Metric Watchers --------------- Watcher objects can be added to :class:`MetricsHandlers` to be called when metric events of a certain type are received. Watchers are generally used to record alarm events in response to count or time events. Metric Helpers -------------- :func:`countMethod(name)` A function decorator that counts how many times the function is called. :: from buildbot.process.metrics import countMethod @countMethod('foo_called') def foo(): return "foo!" :func:`Timer(name)` :class:`Timer` objects can be used to make timing events easier. When ``Timer.stop()`` is called, a :class:`MetricTimeEvent` is logged with the elapsed time since ``timer.start()`` was called. :: from buildbot.process.metrics import Timer def foo(): t = Timer('time_foo') t.start() try: for i in range(1000): calc(i) return "foo!" finally: t.stop() :class:`Timer` objects also provide a pair of decorators, :func:`startTimer`/\ :func:`stopTimer` to decorate other functions. :: from buildbot.process.metrics import Timer t = Timer('time_thing') @t.startTimer def foo(): return "foo!" @t.stopTimer def bar(): return "bar!" foo() bar() :func:`timeMethod(name)` A function decorator that measures how long a function takes to execute. Note that many functions in Buildbot return deferreds, so may return before all the work they set up has completed. Using an explicit :class:`Timer` is better in this case. :: from buildbot.process.metrics import timeMethod @timeMethod('time_foo') def foo(): for i in range(1000): calc(i) return "foo!" buildbot-3.4.0/master/docs/developer/mq.rst000066400000000000000000000261371413250514000207050ustar00rootroot00000000000000.. _Messaging_and_Queues: Messaging and Queues ==================== Buildbot uses a message-queueing structure to handle asynchronous notifications in a distributed fashion. This avoids, for the most part, the need for each master to poll the database, allowing masters to react to events as they happen. Overview -------- Buildbot is structured as a hybrid state- and event-based application, which will probably offend adherents of either pattern. In particular, the most current state is stored in the :doc:`Database `, while any changes to the state are announced in the form of a message. The content of the messages is sufficient to reconstruct the updated state, allowing external processes to represent "live" state without polling the database. This split nature immediately brings to light the problem of synchronizing the two interfaces. Queueing systems can introduce queueing delays as messages propagate. Likewise, database systems may introduce a delay between committed modifications and the modified data appearing in queries; for example, with MySQL master/slave replication, there can be several seconds' delay before a slave is updated. Buildbot's MQ connector simply relays messages, and makes no attempt to coordinate the timing of those messages with the corresponding database updates. It is up to higher layers to apply such coordination. Connector API ------------- All access to the queueing infrastructure is mediated by an MQ connector. The connector's API is defined below. The connector itself is always available as ``master.mq``, where ``master`` is the current :py:class:`~buildbot.master.BuildMaster` instance. .. py:module:: buildbot.mq.base The connector API is quite simple. It is loosely based on AMQP, although simplified because there is only one exchange (see :ref:`queue-schema`). All messages include a "routing key", which is a tuple of *7-bit ascii* strings describing the content of the message. By convention, the first element of the tuple gives the type of the data in the message. The last element of the tuple describes the event represented by the message. The remaining elements of the tuple describe attributes of the data in the message that may be useful for filtering; for example, buildsets may usefully be filtered on buildsetids. The topics and associated message types are described below in :ref:`message-schema`. Filters are also specified with tuples. For a filter to match a routing key, it must have the same length, and each element of the filter that is not None must match the corresponding routing key element exactly. .. py:class:: MQConnector This is an abstract parent class for MQ connectors, and defines the interface. It should not be instantiated directly. It is a subclass of :py:class:`buildbot.util.service.AsyncService`, and subclasses can override service methods to start and stop the connector. .. py:method:: produce(routing_key, data) :param tuple routing_key: the routing key for this message :param data: JSON-serializable body of the message This method produces a new message and queues it for delivery to any associated consumers. The routing key and data should match one of the formats given in :ref:`message-schema`. The method returns immediately; the caller will not receive any indication of a failure to transmit the message, although errors will be displayed in ``twistd.log``. .. py:method:: startConsuming(callback, filter[, persistent_name=name]) :param callback: callable to invoke for matching messages :param tuple filter: filter for routing keys of interest :param persistent_name: persistent name for this consumer :returns: a :py:class:`QueueRef` instance via Deferred This method will begin consuming messages matching the filter, invoking ``callback`` for each message. See above for the format of the filter. The callback will be invoked with two arguments: the message's routing key and the message body, as a Python data structure. It may return a Deferred, but no special processing other than error handling will be applied to that Deferred. In particular, note that the callback may be invoked a second time before the Deferred from the first invocation fires. A message is considered delivered as soon as the callback is invoked - there is no support for acknowledgements or re-queueing unhandled messages. Note that the timing of messages is implementation-dependent. It is not guaranteed that messages sent before the :py:meth:`startConsuming` method completes will be received. In fact, because the registration process may not be immediate, even messages sent after the method completes may not be received. If ``persistent_name`` is given, then the consumer is assumed to be persistent, and consumption can be resumed with the given name. Messages that arrive when no consumer is active are queued and will be delivered when a consumer becomes active. .. py:method:: waitUntilEvent(filter, check_callback) :param tuple filter: filter for routing keys of interest :param function check_callback: a callback which check if the event has already happened :returns: a Deferred that fires when the event has been received and contains a (routing_key, value) tuple representing the event This method is a helper which returns a deferred that fires when a certain event has occurred. This is useful for waiting the end of a build or disconnection of a worker. You shall make sure when using this method that this event will happen in the future, and take care of race conditions. For this reason, the caller must provide a check_callback that will check if the event has already occurred. The whole race-condition-free process is: * Register to event * Check if it has already happened * If not, wait for the event * Unregister from event .. py:class:: QueueRef The :py:class:`QueueRef` returned (via Deferred) from :py:meth:`~MQConnector.startConsuming` can be used to stop consuming messages when they are no longer needed. Users should be *very* careful to ensure that consumption is terminated in all cases. .. py:method:: stopConsuming() Stop invoking the ``callback`` passed to :py:meth:`~MQConnector.startConsuming`. This method can be called multiple times for the same :py:class:`QueueRef` instance without harm. This method potentially returns a Deferred. After the first call to this method has returned, the callback will not be invoked. Implementations ~~~~~~~~~~~~~~~ Several concrete implementations of the MQ connector exist. The simplest is intended for cases where only one master exists, similar to the SQLite database support. The remainder use various existing queueing applications to support distributed communications. Simple ...... .. py:module:: buildbot.mq.simple .. py:class:: SimpleMQ The :py:class:`SimpleMQ` class implements a local equivalent of a message-queueing server. It is intended for Buildbot installations with only one master. Wamp .... .. py:module:: buildbot.mq.wamp .. py:class:: WampMQ The :py:class:`WampMQ` class implements message-queueing using a wamp router. This class translates the semantics of the Buildbot MQ API to the semantics of the wamp messaging system. The message route is translated to a wamp topic by joining with dot and prefixing with the Buildbot namespace. Here is an example message that is sent via wamp: .. code-block:: python topic = "org.buildbot.mq.builds.1.new" data = { 'builderid': 10, 'buildid': 1, 'buildrequestid': 13, 'workerid': 20, 'complete': False, 'complete_at': None, 'masterid': 824, 'number': 1, 'results': None, 'started_at': 1, 'state_string': u'created' } .. py:module:: buildbot.wamp.connector .. py:class:: WampConnector The :py:class:`WampConnector` class implements a Buildbot service for wamp. It is managed outside of the mq module as this protocol can also be reused as a worker protocol. The connector supports queueing of requests until the wamp connection is created but does not support disconnection and reconnection. Reconnection will be supported as part of a next release of AutobahnPython (https://github.com/crossbario/autobahn-python/issues/295). There is a chicken and egg problem at the Buildbot initialization phases, so the produce messages are actually not sent with deferred. .. _queue-schema: Queue Schema ------------ Buildbot uses a particularly simple architecture: in AMQP terms, all messages are sent to a single topic exchange, and consumers define anonymous queues bound to that exchange. In future versions of Buildbot, some components (e.g., schedulers) may use durable queues to ensure that messages are not lost when one or more masters are disconnected. .. _message-schema: Message Schema -------------- This section describes the general structure messages. The specific routing keys and content of each message are described in the relevant sub-sections of :ref:`Data_API`. Routing Keys ~~~~~~~~~~~~ Routing keys are a sequence of strings, usually written with dot separators. Routing keys are represented with variables when one or more of the words in the key are defined by the content of the message. For example, ``buildset.$bsid`` describes routing keys such as ``buildset.1984``, where 1984 is the ID of the buildset described by the message body. Internally, keys are represented as tuples of strings. Body Format ~~~~~~~~~~~ Message bodies are encoded in JSON. The top level of each message is an object (a dictionary). Most simple Python types - strings, numbers, lists, and dictionaries - are mapped directly to the corresponding JSON types. Timestamps are represented as seconds since the UNIX epoch in message bodies. Cautions ~~~~~~~~ Message ordering is generally maintained by the backend implementations, but this should not be depended on. That is, messages originating from the same master are *usually* delivered to consumers in the order they were produced. Thus, for example, a consumer can expect to see a build request claimed before it is completed. That said, consumers should be resilient to messages delivered out of order, at the very least by scheduling a "reload" from state stored in the database when messages arrive in an invalid order. Unit tests should be used to ensure this resiliency. Some related messages are sent at approximately the same time. Due to the non-blocking nature of message delivery, consumers should *not* assume that subsequent messages in a sequence remain queued. For example, upon receipt of a ``buildset.$bsid.new`` message, it is already too late to try to subscribe to the associated build requests messages, as they may already have been consumed. Schema Changes ~~~~~~~~~~~~~~ Future versions of Buildbot may add keys to messages, or add new messages. Consumers should expect unknown keys and, if using wildcard topics, unknown messages. buildbot-3.4.0/master/docs/developer/plugins-publish.rst000066400000000000000000000066221413250514000234120ustar00rootroot00000000000000=============================== How to package Buildbot plugins =============================== If you customized an existing component (see :doc:`../manual/customization`) or created a new component that you believe might be useful for others, you have two options: * submit the change to the Buildbot main tree, however you need to adhere to certain requirements (see :doc:`style`) * prepare a Python package that contains the functionality you created Here we cover the second option. Package the source ================== To begin with, you must package your changes. If you do not know what a Python package is, these two tutorials will get you going: * `Python Packaging User Guide `__ * `The Hitchhiker’s Guide to Packaging `__ The former is more recent and, while it addresses everything that you need to know about Python packages, it's still work in progress. The latter is a bit dated, though it was the most complete guide for quite some time available for Python developers looking to package their software. You may also want to check the `sample project `_, which exemplifies the best Python packaging practices. Making the plugin package ========================= Buildbot supports several kinds of pluggable components: * ``worker`` * ``changes`` * ``schedulers`` * ``steps`` * ``reporters`` * ``util`` (these are described in :doc:`../manual/plugins`), and * ``www`` which is described in :doc:`web server configuration <../manual/configuration/www>`. Once you have your component packaged, it's quite straightforward: you just need to add a few lines to the ``entry_points`` parameter of your call of ``setup`` function in :file:`setup.py` file: .. code-block:: python setup( ... entry_points = { ..., 'buildbot.{kind}': [ 'PluginName = PluginModule:PluginClass' ] }, ... ) (You might have seen different ways to specify the value for ``entry_points``, however they all do the same thing. Full description of possible ways is available in `setuptools documentation `_.) After the :src:`setup.py ` file is updated, you can build and install it: .. code-block:: none $ python setup.py build $ sudo python setup.py install (depending on your particular setup, you might not need to use :command:`sudo`). After that, the plugin should be available for Buildbot and you can use it in your :file:`master.cfg` as: .. code-block:: python from buildbot.plugins import {kind} ... {kind}.PluginName ... Publish the package =================== This is the last step before the plugin becomes available to others. Once again, there is a number of options available for you: * just put a link to your version control system * prepare a source tarball with the plugin (``python setup.py sdist``) * or publish it on `PyPI `_ The last option is probably the best one since it will make your plugin available pretty much to all Python developers. Once you have published the package, please send a link to `buildbot-devel `_ mailing list, so we can include a link to your plugin to :doc:`../manual/plugins`. buildbot-3.4.0/master/docs/developer/pull-request.rst000066400000000000000000000206201413250514000227210ustar00rootroot00000000000000Submitting Pull Requests ======================== As Buildbot is used by software developers, it tends to receive a significant number of patches. The most effective way to make sure your patch gets noticed and merged is to submit it via GitHub. This assumes some familiarity with git, but not too much. Note that GitHub has some great `Git guides `_ to get you started. Guidelines ---------- * Pull requests should be based on the latest development code, not on the most recent release. That is, you should check out the `master` branch and develop on top of it. * Final pull requests should include code changes, relevant documentation changes, and relevant unit tests. Any patch longer than a few lines which does not have documentation or tests is unlikely to be merged as is. The developers will most likely ask to add documentation or tests. * Individual commits should, to the extent possible, be single-purpose. Please do not lump all of the changes you made to get Buildbot working the way you like into a single commit. * Pull requests must pass all tests that run against the GitHub pull requests. See :ref:`LocalTestingCheatSheet` for instructions of how to launch various tests locally. * Python code in Buildbot uses four-space indentations, with no tabs. Lines should be wrapped before the 100th column. * Pull requests must reliably pass all tests. Buildbot does not tolerate "flaky" tests. If you have trouble with tests that fail without any of your changes applied, get in touch with the developers for help. * Pull requests that add features or change existing behavior should include a brief description in the release notes. See the `newsfragments` directory and read the `README.txt `_ file therein. * Git commit messages form the "ChangeLog" for Buildbot, and as such should be as descriptive as possible. * Backward and forward compatibility is important to Buildbot. Try to minimize the effect of your patch on existing users. Additional suggestions ~~~~~~~~~~~~~~~~~~~~~~ The Buildbot developers are quite busy, and it can take a while to review a patch. While the following are not required, they will make things easier for you and the developers: * Make a distinct pull request, on a distinct branch in your repository, for each unrelated change. Some pull request may get merged immediately, while others will require revision, and this can get very confusing in a single branch. * Smaller, incremental commits are better than one large commit, as they can be considered on their own merits. It's OK for a commit to add code that is unused (except for tests, of course) until a subsequent commit is applied. * If an individual change is complex or large, it makes sense to create an unpolished PR at first to gather feedback. When the Buildbot developers confirm that the presented pull request is the way to go, it can be polished as a second step. * Git history is the primary means by which Buildbot establishes authorship. Be careful to credit others for their work, if you include it in your code. How to create a pull request ---------------------------- .. note:: See `this github guide `_ which offers a more generic description of this process. * Sign up for a free account at http://github.com, if you don't already have one. * Go to http://github.com/buildbot/buildbot and click “fork”. This will create your own public copy of the latest Buildbot source. * Clone your forked repository on your local machine, so you can do your changes. GitHub will display a link titled "Your Clone URL". Click this link to see instructions for cloning your URL. It's something like: .. code-block:: bash git clone git@github.com:myusername/buildbot.git cd buildbot * Locally, create a new branch based on the `master` branch: .. code-block:: bash git checkout -b myfixes origin/master * Hack mercilessly. If you're a git aficionado, you can make a neat and pretty commit sequence; otherwise, just get it done. Don't forget to add new test cases and any necessary documentation. * Test your changes. See :ref:`LocalTestingCheatSheet` for instructions of how to launch various tests locally. * Commit. For this step it's best to use a GUI for Git. See this `list `_ of known Git GUIs. If you only want to use the shell, do the following: .. code-block:: bash git add $files_that_matter git commit * When you're confident that everything is as it should be, push your changes back to your repository on GitHub, effectively making them public. .. code-block:: bash git push origin myfixes * Now all that's left is to let the Buildbot developers know that you have patches awaiting their attention. In your web browser, go to your repository (you may have to hit "reload") and choose your new branch from the "all branches" menu. * Double-check that you're on your branch, and not on a particular commit. The current URL should end in the name of your patch, not in a SHA1 hash. * Click “Pull Request” * Double-check that the base branch is "buildbot/buildbot@master". If your repository is a fork of the buildbot/buildbot repository, this should already be the case. * Fill out the details and send away! .. _LocalTestingCheatSheet: Local testing cheat sheet ------------------------- This section details how to locally run the test suites that are run by Buildbot during each PR. Not all test suites have been documented so far, only these that fail most often. Before each of the commands detailed below, a virtualenv must be setup as described in :ref:`PythonDevQuickStart`: .. code-block:: bash make virtualenv . .venv/bin/activate If you see weird test results after changing branches of the repository, remove the `.venv` directory and repeat the above again. Note that `pip install -r .txt` only needs to be run once at the beginning of your testing session. Master unit tests ~~~~~~~~~~~~~~~~~ Tests in this category run the Python unit tests for the master. These tests are represented by **bb/trial/** test names in the Buildbot CI. To run locally, execute the following: .. code-block:: bash pip install -r requirements-ci.txt trial -j8 buildbot # change -j parameter to fit the number of cores you have Worker unit tests ~~~~~~~~~~~~~~~~~ Tests in this category run the Python unit tests for the worker. These tests are represented by **bb/trial_worker/** test names in the Buildbot CI. To run locally, execute the following: .. code-block:: bash pip install -r requirements-ciworker.txt trial buildbot_worker Linter checks ~~~~~~~~~~~~~ Tests in this category run simple syntax and style checks on the Python code. These tests are represented by **bb/pylint/** and **bb/flake8/** test names in the Buildbot CI. To run locally, execute the following: .. code-block:: bash pip install -r requirements-ci.txt make pylint make flake8 If you see spell check errors, but your words are perfectly correct, then you may need to add these words to a whitelist at `common/code_spelling_ignore_words.txt`. isort ~~~~~ Tests in this category sort the imports in the Python code. These tests are represented by **bb/isort/** test names in the Buildbot CI. To run locally, execute the following: .. code-block:: bash pip install -r requirements-ci.txt isort Documentation ~~~~~~~~~~~~~ This test builds the documentation. It is represented by **bb/docs/** test names in the Buildbot CI. To run locally, execute the following: .. code-block:: bash pip install -r requirements-ci.txt pip install -r requirements-cidocs.txt make docs If you see spell check errors, but your words are perfectly correct, then you may need to add these words to a whitelist at `master/docs/spelling_wordlist.txt`. End-to-end tests ~~~~~~~~~~~~~~~~ Tests in this category run the end-to-end tests by launching a full Buildbot instance, clicking on buttons on the web UI and testing the results. It is represented by **bb/smokes/** test names in the Buildbot CI. The tests are sometimes unstable: if you didn't change the front end code and see a failure then it's most likely an instability. To run locally, install a Chrome-compatible browser and execute the following: .. code-block:: bash pip install -r requirements-ci.txt make tarballs ./common/smokedist.sh whl buildbot-3.4.0/master/docs/developer/quickstart.rst000066400000000000000000000134471413250514000224620ustar00rootroot00000000000000 Development Quick-start ======================= Buildbot is a python based application. It tries very hard to follow the python best practices and make it easy to dive into the code. In order to develop on Buildbot you need just a python environment and possibly some native packages in stripped-down setups. The most up to date list is in the docker file we use to manage our CI (MetaBBotDockerFile_). If you are completely new to python, it's best to first follow the tutorials you get when you type "python virtualenv for dummies" in your favorite search engine. .. _MetaBBotDockerFile: https://github.com/buildbot/metabbotcfg/blob/nine/docker/metaworker/Dockerfile .. _PythonDevQuickStart: Create a Buildbot Python Environment ------------------------------------ Buildbot uses Twisted `trial `_ to run its test suite. Windows users also need GNU make on their machines. The easiest way is to install it via the choco package manager, ``choco install make``. But WSL or MSYS2 is an even better option because of the integrated bash. Following is a quick shell session to put you on the right track, including running the test suite. .. code-block:: bash # the usual buildbot development bootstrap with git and virtualenv git clone https://github.com/buildbot/buildbot cd buildbot # run a helper script which creates the virtualenv for development. # Virtualenv allows to install python packages without affecting # other parts of the system make virtualenv # activate the virtualenv (you should now see (.venv) in your shell prompt) . .venv/bin/activate # now run the test suite trial buildbot # using all CPU cores within the system helps to speed everything up trial -j16 buildbot # find all tests that talk about mail trial -n --reporter=bwverbose buildbot | grep mail # run only one test module trial buildbot.test.unit.test_reporters_mail # you can also skip the virtualenv activation and # run the test suite in one step with make make trial # you can pass options to make using TRIALOPTS make trial TRIALOPTS='-j16 buildbot' # or test with a specific Python version make trial VENV_PY_VERSION=/usr/local/bin/python3 Create a JavaScript Frontend Environment ---------------------------------------- This section describes how to get set up quickly to hack on the JavaScript UI. It does not assume familiarity with Python, although a Python installation is required, as well as ``virtualenv``. You will also need ``NodeJS``, and ``yarn`` installed. Prerequisites ~~~~~~~~~~~~~ .. note:: Buildbot UI requires at least node 4 or newer and yarn. * Install LTS release of node.js. http://nodejs.org/ is a good start for Windows and OSX. For modern Linux distributions, you can often just install the distribution-provided node version if it's recent enough. You can use yarn from the same source. The below method has been tested on Ubuntu 18.04 and should work on recent enough Debian. .. code-block:: none sudo apt install nodejs yarn In other cases, use https://deb.nodesource.com. .. _JSDevQuickStart: Hacking the Buildbot JavaScript ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To effectively develop Buildbot JavaScript, you'll need a running Buildmaster configured to operate out of the source directory. As a prerequisite, follow :ref:`PythonDevQuickStart`. With that, you should have created and enabled a virtualenv Python environment. Next, you need to install the ``buildbot`` and ``buildbot-www`` python packages in ``--editable`` mode, which means their source directories will be directly used. .. code-block:: none make frontend This will fetch a number of python dependencies from pypi, the Python package repository, and also a number of node.js dependencies that are used for building the web application. Then the actual frontend code will be built with artifacts stored in the source directory, e.g. ``www/base/buildbot_www/static``. Finally, the built python packages will be installed to virtualenv environment as ``--editable`` packages. This means that the webserver will load resources from ``www/base/buildbot_www/static``. Now you need to create a master instance. For more details, see the Buildbot :ref:`first-run-label` tutorial. .. code-block:: none mkdir test-master buildbot create-master test-master mv test-master/master.cfg.sample test-master/master.cfg buildbot start test-master If all goes well, the master will start up and run in the background. During ``make frontend``, the www frontend was built using production mode, so everything is minified and hard to debug. However, the frontend was installed as an editable python package, so all changes in the artifacts (e.g. ``www/base/buildbot_www/static``) in the source directories will be observed in the browser. Thus, we can manually rebuild the JavaScript resources using development settings, so they are not minified and easier to debug. This can be done by running the following in e.g. ``www/base`` directory: .. code-block:: none yarn run build-dev The above rebuilds the resources only once. After each change you need to refresh the built resources. The actual commands that are run are stored in the ``package.json`` file under the ``scripts`` key. To avoid the need to type the above command after each change, you can use the following: .. code-block:: none yarn run dev This will watch files for changes and reload automatically. To run unit tests, do the following: .. code-block:: none yarn run test To run unit tests within all frontend packages within Buildbot, do the following at the root of the project: .. code-block:: none make frontend_tests .. note:: You need to have Chrome-based browser installed in order to run unit tests in the default configuration. buildbot-3.4.0/master/docs/developer/raml/000077500000000000000000000000001413250514000204605ustar00rootroot00000000000000buildbot-3.4.0/master/docs/developer/raml/build.rst000066400000000000000000000000721413250514000223100ustar00rootroot00000000000000.. jinja:: data_api_build :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/build_data.rst000066400000000000000000000000771413250514000233060ustar00rootroot00000000000000.. jinja:: data_api_build_data :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/builder.rst000066400000000000000000000000741413250514000226410ustar00rootroot00000000000000.. jinja:: data_api_builder :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/buildrequest.rst000066400000000000000000000001011413250514000237120ustar00rootroot00000000000000.. jinja:: data_api_buildrequest :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/buildset.rst000066400000000000000000000000751413250514000230270ustar00rootroot00000000000000.. jinja:: data_api_buildset :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/change.rst000066400000000000000000000000731413250514000224370ustar00rootroot00000000000000.. jinja:: data_api_change :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/changesource.rst000066400000000000000000000001011413250514000236500ustar00rootroot00000000000000.. jinja:: data_api_changesource :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/forcescheduler.rst000066400000000000000000000001031413250514000242010ustar00rootroot00000000000000.. jinja:: data_api_forcescheduler :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/identifier.rst000066400000000000000000000000771413250514000233400ustar00rootroot00000000000000.. jinja:: data_api_identifier :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/index.rst000066400000000000000000000007671413250514000223330ustar00rootroot00000000000000.. _REST_API_specs: REST API Specification ====================== This section documents the available REST APIs according to the RAML specification. .. toctree:: :maxdepth: 1 builder buildrequest build buildset build_data change changesource forcescheduler identifier logchunk log master patch rootlink scheduler sourcedproperties sourcestamp spec step worker test_result test_result_set raw-endpoints buildbot-3.4.0/master/docs/developer/raml/log.rst000066400000000000000000000000701413250514000217700ustar00rootroot00000000000000.. jinja:: data_api_log :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/logchunk.rst000066400000000000000000000000751413250514000230260ustar00rootroot00000000000000.. jinja:: data_api_logchunk :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/master.rst000066400000000000000000000000731413250514000225050ustar00rootroot00000000000000.. jinja:: data_api_master :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/patch.rst000066400000000000000000000000721413250514000223100ustar00rootroot00000000000000.. jinja:: data_api_patch :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/raw-endpoints.rst000066400000000000000000000010711413250514000240030ustar00rootroot00000000000000Raw endpoints ............. .. jinja:: data_api Raw endpoints allow to download content in their raw format (i.e. not within a json glue). The ``content-disposition`` http header is set, so that the browser knows which file to store the content to. {% for ep, config in raml.rawendpoints.items()|sort %} .. bb:rpath:: {{ep}} {% for key, value in config.uriParameters.items() -%} :pathkey {{value.type}} {{key}}: {{raml.reindent(value.description, 4*2)}} {% endfor %} {{config['get'].description}} {% endfor %} buildbot-3.4.0/master/docs/developer/raml/rootlink.rst000066400000000000000000000000751413250514000230550ustar00rootroot00000000000000.. jinja:: data_api_rootlink :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/scheduler.rst000066400000000000000000000000761413250514000231730ustar00rootroot00000000000000.. jinja:: data_api_scheduler :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/sourcedproperties.rst000066400000000000000000000001061413250514000247700ustar00rootroot00000000000000.. jinja:: data_api_sourcedproperties :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/sourcestamp.rst000066400000000000000000000001001413250514000235460ustar00rootroot00000000000000.. jinja:: data_api_sourcestamp :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/spec.rst000066400000000000000000000000711413250514000221420ustar00rootroot00000000000000.. jinja:: data_api_spec :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/step.rst000066400000000000000000000000711413250514000221630ustar00rootroot00000000000000.. jinja:: data_api_step :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/test_result.rst000066400000000000000000000001001413250514000235560ustar00rootroot00000000000000.. jinja:: data_api_test_result :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/test_result_set.rst000066400000000000000000000001041413250514000244350ustar00rootroot00000000000000.. jinja:: data_api_test_result_set :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/raml/worker.rst000066400000000000000000000000731413250514000225230ustar00rootroot00000000000000.. jinja:: data_api_worker :file: templates/raml.jinja buildbot-3.4.0/master/docs/developer/rest.rst000066400000000000000000000165051413250514000212430ustar00rootroot00000000000000.. This is a partially generated document. You can modify it in incremental manner using following command: pip install watchdog # install watchmedo make html # to do once watchmedo shell-command -p '*.rst' -c 'time sphinx-build -b html -d _build/doctrees -q . _build/html developer/rest.rst' -wR # will re-run each time you modify rst file .. _REST_API: REST API ======== The REST API is a public interface which can be used by external code to control Buildbot. Internally, the REST API is a thin wrapper around the data API's "Getter" and "Control" sections. It is also designed, in keeping with REST principles, to be discoverable. As such, the details of the paths and resources are not documented here. Begin at the root URL, and see the :ref:`Data_API` documentation for more information. The precise specifications in RAML format are described in :ref:`REST_API_specs` documentation. .. contents:: :local: Versions ~~~~~~~~ The API described here is version 2. The ad-hoc API from Buildbot-0.8.x, version 1, is no longer supported [#apiv1]_. The policy for incrementing the version is when there is an incompatible change added. Removing a field or endpoint is considered incompatible change. Adding a field or endpoint is not considered incompatible, and thus will only be described as a change in release notes. The policy is that we will avoid as much as possible incrementing the version. .. [#apiv1] The JSON API defined by ``status_json.py`` in Buildbot-0.8.x is considered version 1, although its root path was ``json``, not ``api/v1``. Getting ~~~~~~~ To get data, issue a GET request to the appropriate path. For example, with a base URL of ``http://build.example.org/buildbot``, the list of masters for builder 9 is available at ``http://build.example.org/buildbot/api/v2/builders/9/masters``. .. bb:rtype:: collection Collections ~~~~~~~~~~~ Results are formatted in keeping with the `JSON API `_ specification. The top level of every response is an object. Its keys are the plural names of the resource types, and the values are lists of objects, even for a single-resource request. For example: .. code-block:: json { "meta": { "total": 2 }, "schedulers": [ { "master": null, "name": "smoketest", "schedulerid": 1 }, { "master": { "active": true, "last_active": 1369604067, "link": "http://build.example.org/api/v2/master/1", "masterid": 1, "name": "master3:/BB/master" }, "name": "goaheadtryme", "schedulerid": 2 } ] } A response may optionally contain extra, related resources beyond those requested. The ``meta`` key contains metadata about the response, including the total count of resources in a collection. Several query parameters may be used to affect the results of a request. These parameters are applied in the order described (so, it is not possible to sort on a field that is not selected, for example). Field Selection ............... If only certain fields of each resource are required, the ``field`` query parameter can be used to select them. For example, the following will select just the names and id's of all schedulers: * ``http://build.example.org/api/v2/scheduler?field=name&field=schedulerid`` Field selection can be used for either detail (single-entity) or collection (multi-entity) requests. The remaining options only apply to collection requests. Filtering ......... Collection responses may be filtered on any simple top-level field. To select records with a specific value use the query parameter ``{field}={value}``. For example, ``http://build.example.org/api/v2/scheduler?name=smoketest`` selects the scheduler named "smoketest". Filters can use any of the operators listed below, with query parameters of the form ``{field}__{operator}={value}``. ``eq`` equality, with the same parameter appearing one or multiple times, is equality with one of the given values (so `foo__eq=x&foo__eq=y` would match resources where foo is `x` or `y`) ``ne`` inequality, or set exclusion ``lt`` select resources where the field's value is less than ``{value}`` ``le`` select resources where the field's value is less than or equal to ``{value}`` ``gt`` select resources where the field's value is greater than ``{value}`` ``ge`` select resources where the field's value is greater than or equal to ``{value}`` ``contains`` Select resources where the field's value contains ``{value}``. If the parameter is provided multiple times, results containing at least one of the values are returned (so `foo__contains=x&foo__contains=y` would match resources where foo contains `x`, `y` or both). For example: * ``http://build.example.org/api/v2/builder?name__lt=cccc`` * ``http://build.example.org/api/v2/buildsets?complete__eq=false`` Boolean values can be given as ``on``/``off``, ``true``/``false``, ``yes``/``no``, or ``1``/``0``. Sorting ....... Collection responses may be ordered with the ``order`` query parameter. This parameter takes a field name to sort on, optionally prefixed with ``-`` to reverse the sort. The parameter can appear multiple times, and will be sorted lexicographically with the fields arranged in the given order. For example: * ``http://build.example.org/api/v2/buildrequests?order=builderid&order=buildrequestid`` Pagination .......... Collection responses may be paginated with the ``offset`` and ``limit`` query parameters. The offset is the 0-based index of the first result to include, after filtering and sorting. The limit is the maximum number of results to return. Some resource types may impose a maximum on the limit parameter; be sure to check the resulting links to determine whether further data is available. For example: * ``http://build.example.org/api/v2/buildrequests?order=builderid&limit=10`` * ``http://build.example.org/api/v2/buildrequests?order=builderid&offset=20&limit=10`` Controlling ~~~~~~~~~~~ Data API control operations are handled by POST requests using a simplified form of `JSONRPC 2.0 `_. The JSONRPC "method" is mapped to the data API "action", and the parameters are passed to that application. The following parts of the protocol are not supported: * positional parameters * batch requests Requests are sent as an HTTP POST, containing the request JSON in the body. The content-type header must be ``application/json``. A simple example: .. code-block:: none POST http://build.example.org/api/v2/scheduler/4 --> {"jsonrpc": "2.0", "method": "force", "params": {"revision": "abcd", "branch": "dev"}, "id": 843} <-- {"jsonrpc": "2.0", "result": {"buildsetid": 44}, "id": 843} Authentication ~~~~~~~~~~~~~~ Authentication to the REST API is performed in the same manner as authentication to the main web interface. Once credentials have been established, a cookie will be set, which must be sent to the Buildbot REST API with every request thereafter. .. code-block:: python import requests s = requests.Session() s.get("https:///auth/login", auth=('user', 'passwd')) builders = s.get("https:///api/v2/builders").json() For those Buildbot instances using OAuth2 authentication providers, it is at the moment not possible to access the authenticated API . buildbot-3.4.0/master/docs/developer/results.rst000066400000000000000000000047231413250514000217660ustar00rootroot00000000000000.. _Build-Result-Codes: Build Result Codes ================== .. py:module:: buildbot.process.results Buildbot represents the status of a step, build, or buildset using a set of numeric constants. From Python, these constants are available in the module ``buildbot.process.results``, but the values also appear in the database and in external tools, so the values are fixed. .. py:data:: SUCCESS Value: 0; color: green; a successful run. .. py:data:: WARNINGS Value: 1; color: orange; a successful run, with some warnings. .. py:data:: FAILURE Value: 2; color: red; a failed run, due to problems in the build itself, as opposed to a Buildbot misconfiguration or bug. .. py:data:: SKIPPED Value: 3; color: white; a run that was skipped -- usually a step skipped by ``doStepIf`` (see :ref:`Buildstep-Common-Parameters`) .. py:data:: EXCEPTION Value: 4; color: purple; a run that failed due to a problem in Buildbot itself. .. py:data:: RETRY Value: 5; color: purple; a run that should be retried, usually due to a worker disconnection. .. py:data:: CANCELLED Value: 6; color: pink; a run that was cancelled by the user. .. py:data:: Results A dictionary mapping result codes to their lowercase names. .. py:function:: worst_status(a, b) This function takes two status values, and returns the "worst" status of the two. This is used to aggregate step statuses into build statuses, and build statuses into buildset statuses. .. py:function:: computeResultAndTermination(obj, result, previousResult) :param obj: an object with the attributes of :py:class:`ResultComputingConfigMixin` :param result: the new result :param previousResult: the previous aggregated result Building on :py:func:`worst_status`, this function determines what the aggregated overall status is, as well as whether the attempt should be terminated, based on the configuration in ``obj``. .. py:class:: ResultComputingConfigMixin This simple mixin is intended to help implement classes that will use :py:meth:`computeResultAndTermination`. The class has, as class attributes, the result computing configuration parameters with default values: .. py:attribute:: haltOnFailure .. py:attribute:: flunkOnWarnings .. py:attribute:: flunkOnFailure .. py:attribute:: warnOnWarnings .. py:attribute:: warnOnFailure The names of these attributes are available in the following attribute: .. py:attribute:: resultConfig buildbot-3.4.0/master/docs/developer/schedulers.rst000066400000000000000000000123431413250514000224230ustar00rootroot00000000000000.. _Writing-Schedulers: Writing Schedulers ================== Buildbot schedulers are the process objects responsible for requesting builds. Schedulers are free to decide when to request builds, and to define the parameters of the builds. Many schedulers (e.g., :bb:sched:`SingleBranchScheduler`) request builds in response to changes from change sources. Others, such as :bb:sched:`Nightly`, request builds at specific times. Still others, like :bb:sched:`ForceScheduler`, :bb:sched:`Try_Jobdir`, or :bb:sched:`Triggerable`, respond to external inputs. Each scheduler has a unique name, and within a Buildbot cluster, can be active on at most one master. If a scheduler is configured on multiple masters, it will be inactive on all but one master. This provides a form of non-revertive failover for schedulers: if an active scheduler's master fails, an inactive instance of that scheduler on another master will become active. API Stability ------------- Until Buildbot reaches version 1.0.0, API stability is not guaranteed. The instructions in this document may change incompatibly until that time. Implementing A Scheduler ------------------------ A scheduler is a subclass of :py:class:`~buildbot.schedulers.base.BaseScheduler`. The constructor's arguments form the scheduler's configuration. The first two arguments, ``name`` and ``builderNames``, are positional. The remaining arguments are keyword arguments, and the subclass's constructor should accept ``**kwargs`` to pass them to the parent class, along with the positional arguments. :: class MyScheduler(base.BaseScheduler): def __init__(self, name, builderNames, arg1=None, arg2=None, **kwargs): super().__init__(name, builderNames, **kwargs) self.arg1 = arg1 self.arg2 = arg2 Schedulers are Twisted services, so they can implement ``startService`` and ``stopService``. However, it is more common for scheduler subclasses to override ``startActivity`` and ``stopActivity`` instead. See below. Consuming Changes ----------------- A scheduler that needs to be notified of new changes should call :py:meth:`~buildbot.schedulers.base.BaseScheduler.startConsumingChanges` when it becomes active. Change consumption will automatically stop when the scheduler becomes inactive. Once consumption has started, the :py:meth:`~buildbot.schedulers.base.BaseScheduler.gotChange` method is invoked for each new change. The scheduler is free to do whatever it likes in this method. Adding Buildsets ---------------- To add a new buildset, subclasses should call one of the parent-class methods with the prefix ``addBuildsetFor``. These methods call :py:meth:`~buildbot.db.buildsets.BuildsetConnector.addBuildset` after applying behaviors common to all schedulers. Any of these methods can be called at any time. Handling Reconfiguration ------------------------ When the configuration for a scheduler changes, Buildbot deactivates, stops and removes the old scheduler, then adds, starts, and maybe activates the new scheduler. Buildbot determines whether a scheduler has changed by subclassing :py:class:`~buildbot.util.ComparableMixin`. See the documentation for class for an explanation of the ``compare_attrs`` attribute. .. note:: In a future version, schedulers will be converted to handle reconfiguration as reconfigurable services, and will no longer require ``compare_attrs`` to be set. Becoming Active and Inactive ---------------------------- An inactive scheduler should not do anything that might interfere with an active scheduler of the same name. Simple schedulers can consult the :py:attr:`~buildbot.schedulers.base.BaseScheduler.active` attribute to determine whether the scheduler is active. Most schedulers, however, will implement the ``activate`` method to begin any processing expected of an active scheduler. That may involve calling :py:meth:`~buildbot.schedulers.base.BaseScheduler.startConsumingChanges`, beginning a ``LoopingCall``, or subscribing to messages. Any processing begun by the ``activate`` method, or by an active scheduler, should be stopped by the ``deactivate`` method. The ``deactivate`` method's Deferred should not fire until such processing has completely stopped. Schedulers must up-call the parent class's ``activate`` and ``deactivate`` methods! Keeping State ------------- The :py:class:`~buildbot.schedulers.base.BaseScheduler` class provides :py:meth:`~buildbot.schedulers.base.BaseScheduler.getState` and :py:meth:`~buildbot.schedulers.base.BaseScheduler.setState` methods to get and set state values for the scheduler. Active scheduler instances should use these functions to store persistent scheduler state, such that if they fail or become inactive, other instances can pick up where they left off. A scheduler can cache its state locally, only calling ``getState`` when it first becomes active. However, it is best to keep the state as up-to-date as possible, by calling ``setState`` any time the state changes. This prevents loss of state from an unexpected master failure. Note that the state-related methods do not use locks of any sort. It is up to the caller to ensure that no race conditions exist between getting and setting state. Generally, it is sufficient to rely on there being only one running instance of a scheduler, and cache state in memory. buildbot-3.4.0/master/docs/developer/secrets.rst000066400000000000000000000114021413250514000217250ustar00rootroot00000000000000Secrets ------- A Secret is defined by a key associated with a value, returned from a provider. Secrets returned by providers are stored in a ``SecretDetails`` object. A ``SecretDetails`` object is initialized with a provider name, a key and a value. Each parameter is an object property. .. code-block:: python secret = SecretDetails("SourceProvider", "myKey", "myValue") print(secret.source) "SourceProvider" print(secret.key) "myKey" print(secret.value) "myValue" Secrets manager --------------- The secrets manager is a Buildbot service manager. .. code-block:: python secretsService = self.master.namedServices['secrets'] secretDetailsList = secretsService.get(self.secrets) The service executes a get method. Depending on the kind of storage chosen and declared in the configuration, the manager gets the selected provider and returns a list of ``secretDetails``. Secrets providers ----------------- The secrets providers are implementing the specific getters, related to the storage chosen. File provider ````````````` .. code-block:: python c['secretsProviders'] = [secrets.SecretInAFile(dirname="/path/toSecretsFiles")] In the master configuration the provider is instantiated through a Buildbot service secret manager with the file directory path. File secrets provider reads the file named by the key wanted by Buildbot and returns the contained text value (removing trailing newlines if present). SecretInAFile provider allows Buildbot to read secrets in the secret directory. Vault provider `````````````` .. code-block:: python c['secretsProviders'] = [secrets.HashiCorpVaultKvSecretProvider(authenticator=secrets.VaultAuthenticatorApprole(roleId="xxx", secretId="yyy"), vault_server="http://localhost:8200")] In the master configuration, the provider is instantiated through a Buildbot service secret manager with the Vault authenticator and the Vault server address. Vault secrets provider accesses the Vault backend asking the key wanted by Buildbot and returns the contained text value. SecretInVaultKv provider allows Buildbot to read secrets only in the Vault KV store, other secret engines are not supported by this provider. Currently v1 and v2 of the Key-Value secret engines are supported, v2 being the default version. Interpolate secret `````````````````` .. code-block:: python text = Interpolate("some text and %(secret:foo)s") Secret keys are replaced in a string by the secret value using the class Interpolate and the keyword secret. The secret is searched across the providers defined in the master configuration. Secret Obfuscation `````````````````` .. code-block:: python text = Interpolate("some text and %(secret:foo)s") # some text rendered rendered = yield self.build.render(text) cleantext = self.build.properties.cleanupTextFromSecrets(rendered) Secrets don't have to be visible to the normal user via logs and thus are transmitted directly to the workers. Secrets are rendered and can arrive anywhere in the logs. The function ``cleanupTextFromSecrets`` defined in the class Properties helps to replace the secret value by the key value. .. code-block:: python print("the example value is:%s" % (cleantext)) >> the example value is: The secret is rendered and is recorded in a dictionary, named ``_used_secrets``, where the key is the secret value and the value the secret key. Therefore anywhere logs are written having content with secrets, the secrets are replaced by the value from ``_used_secrets``. How to use a secret in a BuildbotService ```````````````````````````````````````` Service configurations are loaded during a Buildbot start or modified during a Buildbot restart. Secrets are used like renderables in a service and are rendered during the configuration load. .. code-block:: python class MyService(BuildbotService): secrets = ['foo', 'other'] ``secrets`` is a list containing all the secret keys that can be used as class attributes. When the service is loaded during the Buildbot reconfigService function, secrets are rendered and the values are updated. Everywhere the variable with the secret name (`foo` or `other` in the example) is used, the class attribute value is replaced by the secret value. This is similar to the "renderable" annotation, but will only work for BuildbotServices, and will only interpolate secrets. Other renderables can still be held in the service as attributes and rendered dynamically at a later time. .. code-block:: python class MyService(object): secrets = ['foo', 'other'] myService = MyService() After a Buildbot reconfigService: .. code-block:: python print("myService returns secret value:", myService.foo)) >> myService returns secret value bar buildbot-3.4.0/master/docs/developer/stats-service.rst000066400000000000000000000526641413250514000230700ustar00rootroot00000000000000.. _stats-service: Statistics Service ================== The statistics service (or stats service) is implemented in :mod:`buildbot.statistics.stats_service`. Please see :bb:cfg:`stats-service` for more information. Here is a diagram demonstrating the workings of the stats service: .. image:: _images/stats-service.png Stats Service ------------- .. py:class:: buildbot.statistics.stats_service.StatsService An instance of this class functions as a :class:`BuildbotService`. The instance of the running service is initialized in the master configuration file (see :bb:cfg:`stats-service` for more information). The running service is accessible everywhere in Buildbot via the :class:`BuildMaster`. The service is available at ``self.master.namedServices['']``. It takes the following initialization arguments: ``storage_backends`` A list of storage backends. These are instance of subclasses of :class:`StatsStorageBase`. ``name`` (str) The name of this service. This name can be used to access the running instance of this service using ``self.master.namedServices[name]``. Please see :bb:cfg:`stats-service` for examples. .. py:method:: checkConfig(self, storage_backends) ``storage_backends`` A list of storage backends. This method is called automatically to verify that the list of storage backends contains instances of subclasses of :class:`StatsStorageBase`. .. py:method:: reconfigService(self, storage_backends) ``storage_backends`` A list of storage backends. This method is called automatically to reconfigure the running service. .. py:method:: registerConsumers(self) Internal method for this class called to register all consumers (methods from Capture classes) to the MQ layer. .. py:method:: stopService(self) Internal method for this class to stop the stats service and clean up. .. py:method:: removeConsumers(self) Internal method for this class to stop and remove consumers from the MQ layer. .. py:method:: yieldMetricsValue(self, data_name, post_data, buildid) ``data_name`` (str) The name of the data being sent for storage. ``post_data`` A dictionary of key-value pairs that is sent for storage. ``buildid`` The integer build id of the current build. Obtainable in all ``BuildSteps``. This method should be called to post data that is not generated and stored as build-data in the database. This method generates the ``stats-yield-data`` event to the mq layer which is then consumed in :py:class:`postData`. .. _storage-backend: Storage backends ---------------- Storage backends are responsible for storing any statistics/data sent to them. A storage backend will generally be some sort of a database-server running on a machine. .. note:: This machine may be different from the one running :class:`BuildMaster`. Data is captured according to the master config file and then is sent to each of the storage backends provided by the master configuration (see :bb:cfg:`stats-service`). Each storage backend has a Python client defined as part of :mod:`buildbot.statistics.storage_backends` to aid in posting data by :class:`StatsService`. Currently, only `InfluxDB`_ is supported as a storage backend. .. py:class:: buildbot.statistis.storage_backends.base.StatsStorageBase An abstract class for all storage services. It cannot be directly initialized - it would raise a ``TypeError`` otherwise. .. py:method:: thd_postStatsValue(self, post_data, series_name, context) ``post_data`` A dict of key-value pairs that is sent for storage. The keys of this dict can be thought of as columns in a database and the value is the data stored for that column. ``series_name`` (str) The name of the time-series for this statistic. ``context`` (Optional) Any other contextual information about the data. It is a dict of key-value pairs. An abstract method that needs to be implemented by every child class of this class. Not doing so will result in a ``TypeError`` when starting Buildbot. .. py:class:: buildbot.statistics.storage_backends.influxdb_client.InfluxStorageService `InfluxDB`_ is a distributed time series database that employs a key-value pair storage system. This class is a Buildbot client to the InfluxDB storage backend. It is available in the configuration as ``statistics.InfluxStorageService``. It takes the following initialization arguments: ``url`` (str) The URL where the service is running. ``port`` (int) The port on which the service is listening. ``user`` (str) Username of an InfluxDB user. ``password`` (str) Password for ``user``. ``db`` (str) The name of database to be used. ``captures`` A list of instances of subclasses of :py:class:`Capture`. This tells which stats are to be stored in this storage backend. ``name=None`` (Optional) (str) The name of this storage backend. .. py:method:: thd_postStatsValue(self, post_data, series_name, context={}) ``post_data`` A dict of key-value pairs that is sent for storage. The keys of this dict can be thought of as columns in a database and the value is the data stored for that column. ``series_name`` (str) The name of the time-series for this statistic. ``context`` (Optional) Any other contextual information about the data. It is a dict of key-value pairs. This method constructs a dictionary of data to be sent to InfluxDB in the proper format and then sends the data to the InfluxDB instance. .. _InfluxDB: https://influxdata.com/time-series-platform/influxdb/ Capture Classes --------------- Capture classes are used for declaring which data needs to captured and sent to storage backends for storage. .. py:class:: buildbot.statistics.capture.Capture This is the abstract base class for all capture classes. Not to be used directly. It's initialized with the following parameters: .. py:attribute:: routingKey :noindex: (tuple) The routing key to be used by :class:`StatsService` to register consumers to the MQ layer for the subclass of this class. .. py:attribute:: callback The callback registered with the MQ layer for the consumer of a subclass of this class. Each subclass must provide a default callback for this purpose. .. py:method:: _defaultContext(self, msg) A method for providing default context to the storage backends. .. py:method:: consume(self, routingKey, msg) This is an abstract method - each subclass of this class should implement its own consume method. If not, then the subclass can't be instantiated. The consume method, when called (from the mq layer), receives the following arguments: .. py:attribute:: routingKey The routing key which was registered to the MQ layer. Same as the ``routingKey`` provided to instantiate this class. .. py:attribute:: msg The message that was sent by the producer. .. py:method:: _store(self, post_data, series_name, context) This is an abstract method of this class. It must be implemented by all subclasses of this class. It takes the following arguments: .. py:attribute:: post_data (dict) The key-value pair being sent to the storage backend. .. py:attribute:: series_name (str) The name of the series to which this data is stored. .. py:attribute:: context (dict) Any additional information pertaining to the data being sent. .. py:class:: buildbot.statistics.capture.CapturePropertyBase This is a base class for both :class:`CaptureProperty` and :class:`CapturePropertyAllBuilders` and abstracts away much of the common functionality between the two classes. It cannot be initialized directly as it contains an abstract method and raises ``TypeError`` if tried. It is initialized with the following arguments: .. py:attribute:: property_name :noindex: (str) The name of property needed to be recorded as a statistic. This can be a regular expression if ``regex=True`` (see below). .. py:attribute:: callback=None The callback function that is used by ``CaptureProperty.consumer`` to post-process data before formatting it and sending it to the appropriate storage backends. A default callback is provided for this: .. py:function:: default_callback(props, property_name) It returns property value for ``property_name``. It receives the following arguments: .. py:attribute:: props A dictionary of all build properties. .. py:attribute:: property_name (str) Name of the build property to return. .. py:attribute:: regex=False If this is set to ``True``, then the property name can be a regular expression. All properties matching this regular expression will be sent for storage. .. py:method:: consume(self, routingKey, msg) The consumer for all CaptureProperty classes described below. This method filters out the correct properties as per the configuration file and sends those properties for storage. The subclasses of this method do not need to implement this method as it takes care of all the functionality itself. See :class:`Capture` for more information. .. py:method:: _builder_name_matches(self, builder_info) This is an abstract method and needs to be implemented by all subclasses of this class. This is a helper method to the ``consume`` method mentioned above. It checks whether a builder is allowed to send properties to the storage backend according to the configuration file. It takes one argument: .. py:attribute:: builder_info (dict) The dictionary returned by the data API containing the builder information. .. py:class:: buildbot.statistics.capture.CaptureProperty The capture class for capturing build properties. It is available in the configuration as ``statistics.CaptureProperty``. It takes the following arguments: .. py:attribute:: builder_name (str) The name of builder in which the property is recorded. .. py:attribute:: property_name (str) The name of property needed to be recorded as a statistic. .. py:attribute:: callback=None The callback function that is used by ``CaptureProperty.consumer`` to post-process data before formatting it and sending it to the appropriate storage backends. A default callback is provided for this (see :class:`CapturePropertyBase` for more information). .. py:attribute:: regex=False If this is set to ``True``, then the property name can be a regular expression. All properties matching this regular expression will be sent for storage. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CapturePropertyBase` for more information on this method. .. py:class:: buildbot.statistics.capture.CapturePropertyAllBuilders The capture class to use for capturing build properties on all builders. It is available in the configuration as ``statistics.CapturePropertyAllBuilders``. It takes the following arguments: .. py:attribute:: property_name (str) The name of property needed to be recorded as a statistic. .. py:attribute:: callback=None The callback function that is used by ``CaptureProperty.consumer`` to post-process data before formatting it and sending it to the appropriate storage backends. A default callback is provided for this (see :class:`CapturePropertyBase` for more information). .. py:attribute:: regex=False If this is set to ``True``, then the property name can be a regular expression. All properties matching this regular expression will be sent for storage. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CapturePropertyBase` for more information on this method. .. py:class:: buildbot.statistics.capture.CaptureBuildTimes A base class for all Capture classes that deal with build times (start/end/duration). Not to be used directly. It's initialized with: .. py:attribute:: builder_name (str) The name of builder whose times are to be recorded. .. py:attribute:: callback The callback function that is used by a subclass of this class to post-process data before formatting it and sending it to the appropriate storage backends. A default callback is provided for this. Each subclass must provide a default callback that is used in initialization of this class should the user not provide a callback. .. py:method:: consume(self, routingKey, msg) The consumer for all subclasses of this class. See :class:`Capture` for more information. .. note:: This consumer requires all subclasses to implement: .. py:attribute:: self._time_type A string used as a key in ``post_data`` sent to storage services. .. py:method:: self._retValParams(msg) A method that takes in the ``msg`` this consumer gets and returns a list of arguments for the capture callback. .. py:method:: _retValParams(self, msg) This is an abstract method which needs to be implemented by subclasses. This method needs to return a list of parameters that will be passed to the ``callback`` function. See individual build ``CaptureBuild*`` classes for more information. .. py:method:: _err_msg(self, build_data, builder_name) A helper method that returns an error message for the ``consume`` method. .. py:method:: _builder_name_matches(self, builder_info) This is an abstract method and needs to be implemented by all subclasses of this class. This is a helper method to the ``consume`` method mentioned above. It checks whether a builder is allowed to send build times to the storage backend according to the configuration file. It takes one argument: .. py:attribute:: builder_info (dict) The dictionary returned by the data API containing the builder information. .. py:class:: buildbot.statistics.capture.CaptureBuildStartTime A capture class for capturing build start times. It takes the following arguments: .. py:attribute:: builder_name (str) The name of builder whose times are to be recorded. .. py:attribute:: callback=None The callback function for this class. See :class:`CaptureBuildTimes` for more information. The default callback: .. py:function:: default_callback(start_time) It returns the start time in ISO format. It takes one argument: .. py:attribute:: start_time A python datetime object that denotes the build start time. .. py:method:: _retValParams(self, msg) Returns a list containing one Python datetime object (start time) from ``msg`` dictionary. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CaptureBuildTimes` for more information on this method. .. py:class:: buildbot.statistics.capture.CaptureBuildStartTimeAllBuilders A capture class for capturing build start times from all builders. It is a subclass of :class:`CaptureBuildStartTime`. It takes the following arguments: .. py:attribute:: callback=None The callback function for this class. See :class:`CaptureBuildTimes` for more information. The default callback: See ``CaptureBuildStartTime.__init__`` for the definition. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CaptureBuildTimes` for more information on this method. .. py:class:: buildbot.statistics.capture.CaptureBuildEndTime A capture class for capturing build end times. Takes the following arguments: .. py:attribute:: builder_name (str) The name of builder whose times are to be recorded. .. py:attribute:: callback=None The callback function for this class. See :class:`CaptureBuildTimes` for more information. The default callback: .. py:function:: default_callback(end_time) It returns the end time in ISO format. It takes one argument: .. py:attribute:: end_time A python datetime object that denotes the build end time. .. py:method:: _retValParams(self, msg) Returns a list containing two Python datetime object (start time and end time) from ``msg`` dictionary. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CaptureBuildTimes` for more information on this method. .. py:class:: buildbot.statistics.capture.CaptureBuildEndTimeAllBuilders A capture class for capturing build end times from all builders. It is a subclass of :class:`CaptureBuildEndTime`. It takes the following arguments: .. py:attribute:: callback=None The callback function for this class. See :class:`CaptureBuildTimes` for more information. The default callback: See ``CaptureBuildEndTime.__init__`` for the definition. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CaptureBuildTimes` for more information on this method. .. py:class:: buildbot.statistics.capture.CaptureBuildDuration A capture class for capturing build duration. Takes the following arguments: .. py:attribute:: builder_name (str) The name of builder whose times are to be recorded. .. py:attribute:: report_in='seconds' Can be one of three: ``'seconds'``, ``'minutes'``, or ``'hours'``. This is the units in which the build time will be reported. .. py:attribute:: callback=None The callback function for this class. See :class:`CaptureBuildTimes` for more information. The default callback: .. py:function:: default_callback(start_time, end_time) It returns the duration of the build as per the ``report_in`` argument. It receives the following arguments: .. py:attribute:: start_time A python datetime object that denotes the build start time. .. py:attribute:: end_time A python datetime object that denotes the build end time. .. py:method:: _retValParams(self, msg) Returns a list containing one Python datetime object (end time) from ``msg`` dictionary. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CaptureBuildTimes` for more information on this method. .. py:class:: buildbot.statistics.capture.CaptureBuildDurationAllBuilders A capture class for capturing build durations from all builders. It is a subclass of :class:`CaptureBuildDuration`. It takes the following arguments: .. py:attribute:: callback=None The callback function for this class. See :class:`CaptureBuildTimes` for more. The default callback: See ``CaptureBuildDuration.__init__`` for the definition. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CaptureBuildTimes` for more information on this method. .. py:class:: buildbot.statistics.capture.CaptureDataBase This is a base class for both :class:`CaptureData` and :class:`CaptureDataAllBuilders` and abstracts away much of the common functionality between the two classes. Cannot be initialized directly as it contains an abstract method and raises ``TypeError`` if tried. It is initialized with the following arguments: .. py:attribute:: data_name (str) The name of data to be captured. Same as in :meth:`yieldMetricsValue`. .. py:attribute:: callback=None The callback function for this class. The default callback: The default callback takes a value ``x`` and return it without changing. As such, ``x`` itself acts as the ``post_data`` sent to the storage backends. .. py:method:: consume(self, routingKey, msg) The consumer for this class. See :class:`Capture` for more. .. py:method:: _builder_name_matches(self, builder_info) This is an abstract method and needs to be implemented by all subclasses of this class. This is a helper method to the ``consume`` method mentioned above. It checks whether a builder is allowed to send properties to the storage backend according to the configuration file. It takes one argument: .. py:attribute:: builder_info (dict) The dictionary returned by the data API containing the builder information. .. py:class:: buildbot.statistics.capture.CaptureData A capture class for capturing arbitrary data that is not stored as build-data. See :meth:`yieldMetricsValue` for more. Takes the following arguments for initialization: .. py:attribute:: data_name (str) The name of data to be captured. Same as in :meth:`yieldMetricsValue`. .. py:attribute:: builder_name (str) The name of the builder on which the data is captured. .. py:attribute:: callback=None The callback function for this class. The default callback: See :class:`CaptureDataBase` of definition. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CaptureDataBase` for more information on this method. .. py:class:: buildbot.statistics.capture.CaptureDataAllBuilders A capture class to capture arbitrary data on all builders. See :meth:`yieldMetricsValue` for more. It takes the following arguments: .. py:attribute:: data_name (str) The name of data to be captured. Same as in :meth:`yieldMetricsValue`. .. py:attribute:: callback=None The callback function for this class. .. py:method:: _builder_name_matches(self, builder_info) See :class:`CaptureDataBase` for more information on this method. buildbot-3.4.0/master/docs/developer/style.rst000066400000000000000000000261521413250514000214250ustar00rootroot00000000000000Buildbot Coding Style ===================== Documentation ------------- Buildbot strongly encourages developers to document the methods, behavior, and usage of classes that users might interact with. However, this documentation should be in ``.rst`` files under ``master/docs/developer``, rather than in docstrings within the code. For private methods or where code deserves some kind of explanatory preface, use comments instead of a docstring. While some docstrings remain within the code, these should be migrated to documentation files and removed as the code is modified. Within the reStructuredText files, write each English sentence on its own line. While this does not affect the generated output, it makes git diffs between versions of the documentation easier to read, as they are not obscured by changes due to re-wrapping. This convention is not followed everywhere, but we are slowly migrating documentation from the old (wrapped) style as we update it. Symbol Names ------------ Buildbot follows `PEP8 `_ regarding the formatting of symbol names. Due to historical reasons, most of the public API uses interCaps naming style To preserve backwards compatibility, the public API should continue using interCaps naming style. That is, you should spell public API methods and functions with the first character in lower-case, and the first letter of subsequent words capitalized, e.g., ``compareToOther`` or ``getChangesGreaterThan``. The public API refers to the documented API that external developers can rely on. See section on the definition of the public API in :ref:`Public-API`. Everything else should use the style recommended by PEP8. In summary: =========================================== ============ Symbol Type Format =========================================== ============ Methods and functions under_scores Method and function arguments under_scores Public API methods and functions interCaps Public API method and function arguments interCaps Classes InitialCaps Variables under_scores Constants ALL_CAPS =========================================== ============ Twisted Idioms -------------- Programming with Twisted Python can be daunting. But sticking to a few well-defined patterns can help avoid surprises. Prefer to Return Deferreds ~~~~~~~~~~~~~~~~~~~~~~~~~~ If you're writing a method that doesn't currently block, but could conceivably block sometime in the future, return a Deferred and document that it does so. Just about anything might block - even getters and setters! Helpful Twisted Classes ~~~~~~~~~~~~~~~~~~~~~~~ Twisted has some useful, but little-known classes. Brief descriptions follow, but you should consult the API documentation or source code for the full details. :class:`twisted.internet.task.LoopingCall` Calls an asynchronous function repeatedly at set intervals. Note that this will stop looping if the function fails. In general, you will want to wrap the function to capture and log errors. :class:`twisted.application.internet.TimerService` Similar to ``t.i.t.LoopingCall``, but implemented as a service that will automatically start and stop the function calls when the service starts and stops. See the warning about failing functions for ``t.i.t.LoopingCall``. Sequences of Operations ~~~~~~~~~~~~~~~~~~~~~~~ Especially in Buildbot, we're often faced with executing a sequence of operations, many of which may block. In all cases where this occurs, there is a danger of pre-emption, so exercise the same caution you would if writing a threaded application. For simple cases, you can use nested callback functions. For more complex cases, inlineCallbacks is appropriate. In all cases, please prefer maintainability and readability over performance. Nested Callbacks ................ First, an admonition: do not create extra class methods that represent the continuations of the first: .. code-block:: python def myMethod(self): d = ... d.addCallback(self._myMethod_2) # BAD! def _myMethod_2(self, res): # BAD! ... Invariably, this extra method gets separated from its parent as the code evolves, and the result is completely unreadable. Instead, include all of the code for a particular function or method within the same indented block, using nested functions: .. code-block:: python def getRevInfo(revname): # for demonstration only! see below for a better implementation with inlineCallbacks results = {} d = defer.succeed(None) def rev_parse(_): # note use of '_' to quietly indicate an ignored parameter return utils.getProcessOutput(git, [ 'rev-parse', revname ]) d.addCallback(rev_parse) def parse_rev_parse(res): results['rev'] = res.strip() return utils.getProcessOutput(git, [ 'log', '-1', '--format=%s%n%b', results['rev'] ]) d.addCallback(parse_rev_parse) def parse_log(res): results['comments'] = res.strip() d.addCallback(parse_log) def set_results(_): return results d.addCallback(set_results) return d It is usually best to make the first operation occur within a callback, as the deferred machinery will then handle any exceptions as a failure in the outer Deferred. As a shortcut, ``d.addCallback`` can work as a decorator: .. code-block:: python d = defer.succeed(None) @d.addCallback def rev_parse(_): # note use of '_' to quietly indicate an ignored parameter return utils.getProcessOutput(git, [ 'rev-parse', revname ]) .. note:: ``d.addCallback`` is not really a decorator as it does not return a modified function. As a result, in the previous code, ``rev_parse`` value is actually the Deferred. In general, the :class:`inlineCallbacks` method is preferred inside new code as it keeps the code easier to read. As a general rule of thumb, when you need more than 2 callbacks in the same method, it's time to switch to :class:`inlineCallbacks`. This would be for example the case for the :py:func:`getRevInfo` example. See this `discussion `_ with Twisted experts for more information. Be careful with local variables. For example, if ``parse_rev_parse``, above, merely assigned ``rev = res.strip()``, then that variable would be local to ``parse_rev_parse`` and not available in ``set_results``. Mutable variables (dicts and lists) at the outer function level are appropriate for this purpose. .. note:: Do not try to build a loop in this style by chaining multiple Deferreds! Unbounded chaining can result in stack overflows, at least on older versions of Twisted. Use ``inlineCallbacks`` instead. In most of the cases, if you need more than two callbacks in a method, it is more readable and maintainable to use inlineCallbacks. inlineCallbacks ............... :class:`twisted.internet.defer.inlineCallbacks` is a great help to writing code that makes a lot of asynchronous calls, particularly if those calls are made in loop or conditionals. Refer to the Twisted documentation for the details, but the style within Buildbot is as follows: .. code-block:: python from twisted.internet import defer @defer.inlineCallbacks def mymethod(self, x, y): xval = yield getSomething(x) for z in (yield getZValues()): y += z if xval > 10: return xval + y self.someOtherMethod() The key points to notice here: * Always import ``defer`` as a module, not the names within it. * Use the decorator form of ``inlineCallbacks``. * In most cases, the result of a ``yield`` expression should be assigned to a variable. It can be used in a larger expression, but remember that Python requires that you enclose the expression in its own set of parentheses. * Python does not permit returning a value from a generator, so statements like ``return xval + y`` are invalid. Instead, yield the result of ``defer.returnValue``. For clarity, follow it with a bare ``return``, unless it is the last statement in the function. The great advantage of ``inlineCallbacks`` is that it allows you to use all of the usual Pythonic control structures in their natural form. In particular, it is easy to represent a loop or even nested loops in this style without losing any readability. Note that code using ``deferredGenerator`` is no longer acceptable in Buildbot. The previous :py:func:`getRevInfo` example implementation should rather be written as: .. code-block:: python @defer.inlineCallbacks def getRevInfo(revname): results = {} res = yield utils.getProcessOutput(git, [ 'rev-parse', revname ]) results['rev'] = res.strip() res = yield utils.getProcessOutput(git, [ 'log', '-1', '--format=%s%n%b', results['rev'] ]) results['comments'] = res.strip() return results Locking ....... Remember that asynchronous programming does not free you from the need to worry about concurrency issues. In particular, if you are executing a sequence of operations, and each time you wait for a Deferred, other arbitrary actions can take place. In general, you should try to perform actions atomically, but for the rare situations that require synchronization, the following might be useful: * :py:class:`twisted.internet.defer.DeferredLock` * :py:func:`buildbot.util.misc.deferredLocked` Joining Sequences ~~~~~~~~~~~~~~~~~ It's often the case that you want to perform multiple operations in parallel and rejoin the results at the end. For this purpose, you may use a `DeferredList `_: .. code-block:: python def getRevInfo(revname): results = {} finished = dict(rev_parse=False, log=False) rev_parse_d = utils.getProcessOutput(git, [ 'rev-parse', revname ]) def parse_rev_parse(res): return res.strip() rev_parse_d.addCallback(parse_rev_parse) log_d = utils.getProcessOutput(git, [ 'log', '-1', '--format=%s%n%b', results['rev'] ]) def parse_log(res): return res.strip() log_d.addCallback(parse_log) d = defer.DeferredList([rev_parse_d, log_d], consumeErrors=1, fireOnFirstErrback=1) def handle_results(results): return dict(rev=results[0][1], log=results[1][1]) d.addCallback(handle_results) return d Here, the deferred list will wait for both ``rev_parse_d`` and ``log_d`` to fire, or for one of them to fail. You may attach callbacks and errbacks to a ``DeferredList`` just as you would with a deferred. Functions running outside of the main thread ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It is very important in Twisted to be able to distinguish functions that runs in the main thread and functions that don't, as reactors and deferreds can only be used in the main thread. To make this distinction clearer, every function meant to be run in a secondary thread must be prefixed with ``thd_``. buildbot-3.4.0/master/docs/developer/tests.rst000066400000000000000000000467531413250514000214400ustar00rootroot00000000000000Buildbot's Test Suite ===================== Buildbot's master tests are under ``buildbot.test`` and ``buildbot-worker`` package tests are under ``buildbot_worker.test``. Tests for the workers are similar to the master, although in some cases helpful functionality on the master is not re-implemented on the worker. Quick-Start ----------- Buildbot uses Twisted `trial `_ to run its test suite. Following is a quick shell session to put you on the right track. .. code-block:: bash # the usual buildbot development bootstrap with git and virtualenv git clone https://github.com/buildbot/buildbot cd buildbot # helper script which creates the virtualenv for development make virtualenv . .venv/bin/activate # now run the test suite trial buildbot # find all tests that talk about mail trial -n --reporter=bwverbose buildbot | grep mail # run only one test module trial buildbot.test.unit.test_reporters_mail Suites ------ Tests are divided into a few suites: * Unit tests (``buildbot.test.unit``) - these follow unit-testing practices and attempt to maximally isolate the system under test. Unit tests are the main mechanism of achieving test coverage, and all new code should be well-covered by corresponding unit tests. * Interface tests are a special type of unit tests, and are found in the same directory and often the same file. In many cases, Buildbot has multiple implementations of the same interface -- at least one "real" implementation and a fake implementation used in unit testing. The interface tests ensure that these implementations all meet the same standards. This ensures consistency between implementations, and also ensures that the unit tests are tested against realistic fakes. * Integration tests (``buildbot.test.integration``) - these test combinations of multiple units. Of necessity, integration tests are incomplete - they cannot test every condition; difficult to maintain - they tend to be complex and touch a lot of code; and slow - they usually require considerable setup and execute a lot of code. As such, use of integration tests is limited to a few broad tests that act as a failsafe for the unit and interface tests. * Regression tests (``buildbot.test.regressions``) - these tests are used to prevent re-occurrence of historical bugs. In most cases, a regression is better tested by a test in the other suites, or is unlikely to recur, so this suite tends to be small. * Fuzz tests (``buildbot.test.fuzz``) - these tests run for a long time and apply randomization to try to reproduce rare or unusual failures. The Buildbot project does not currently have a framework to run fuzz tests regularly. Unit Tests ~~~~~~~~~~ Every code module should have corresponding unit tests. This is not currently true of Buildbot, due to a large body of legacy code, but is a goal of the project. All new code must meet this requirement. Unit test modules follow the source file hierarchy (omitting the root ``buildbot`` directory) and are named after the package or class they test (replacing ``.`` with ``_``). For example, :src:`test_timed_Periodic.py ` tests the :class:`Periodic` class in :src:`master/buildbot/schedulers/timed.py`. Modules with only one class, or a few trivial classes, can be tested in a single test module. For more complex situations, prefer to use multiple test modules. Unit tests using renderables require special handling. The following example shows how the same test would be written with the 'param' parameter as a plain argument and with the same parameter as a renderable:: def test_param(self): f = self.ConcreteClass(param='val') self.assertEqual(f.param, 'val') When the parameter is renderable, you need to instantiate the class before you can test the renderables:: def setUp(self): self.build = Properties(param='val') @defer.inlineCallbacks def test_param_renderable(self): f = self.ConcreteClass( param=Interpolate('%(kw:rendered_val)s', rendered_val=Property('param'))) yield f.start_instance(self.build) self.assertEqual(f.param, 'val') Interface Tests ~~~~~~~~~~~~~~~ Interface tests exist to verify that multiple implementations of an interface meet the same requirements. Note that the name 'interface' should not be confused with the sparse use of Zope Interfaces in the Buildbot code -- in this context, an interface is any boundary between testable units. Ideally, all interfaces, both public and private, should be tested. Certainly, any *public* interfaces need interface tests. Interface tests are most often found in files named for the "real" implementation, e.g., :src:`test_changes.py `. When there is ambiguity, test modules should be named after the interface they are testing. Interface tests have the following form:: from buildbot.test.util import interfaces from twistd.trial import unittest class Tests(interfaces.InterfaceTests): # define methods that must be overridden per implementation def someSetupMethod(self): raise NotImplementedError # method signature tests def test_signature_someMethod(self): @self.assertArgSpecMatches(self.systemUnderTest.someMethod) def someMethod(self, arg1, arg2): pass # tests that all implementations must pass def test_something(self): pass # ... class RealTests(Tests): # tests that only *real* implementations must pass def test_something_else(self): pass # ... All of the test methods are defined here, segregated into tests that all implementations must pass, and tests that the fake implementation is not expected to pass. The ``test_signature_someMethod`` test above illustrates the :py:func:`buildbot.test.util.interfaces.assertArgSpecMatches` decorator, which can be used to compare the argument specification of a callable with a reference signature conveniently written as a nested function. Wherever possible, prefer to add tests to the ``Tests`` class, even if this means testing one method (e.g,. ``setFoo``) in terms of another (e.g., ``getFoo``). The ``assertArgSpecMatches`` method can take multiple methods to test; it will check each one in turn. At the bottom of the test module, a subclass is created for each implementation, implementing the setup methods that were stubbed out in the parent classes:: class TestFakeThing(unittest.TestCase, Tests): def someSetupMethod(self): pass # ... class TestRealThing(unittest.TestCase, RealTests): def someSetupMethod(self): pass # ... For implementations which require optional software, such as an AMQP server, this is the appropriate place to signal that tests should be skipped when their prerequisites are not available:: from twisted.trial import unittest class TestRealThing(unittest.TestCase, RealTests): def someSetupMethod(self): try: import foo except ImportError: raise unittest.SkipTest("foo not found") Integration Tests ~~~~~~~~~~~~~~~~~ Integration test modules test several units at once, including their interactions. In general, they serve as a catch-all for failures and bugs that were not detected by the unit and interface tests. As such, they should not aim to be exhaustive, but merely representative. Integration tests are very difficult to maintain if they reach into the internals of any part of Buildbot. Where possible, try to use the same means as a user would to set up, run, and check the results of an integration test. That may mean writing a :file:`master.cfg` to be parsed, and checking the results by examining the database (or fake DB API) afterward. Regression Tests ~~~~~~~~~~~~~~~~ Regression tests are even more rare in Buildbot than integration tests. In many cases, a regression test is not necessary -- either the test is better-suited as a unit or interface test, or the failure is so specific that a test will never fail again. Regression tests tend to be closely tied to the code in which the error occurred. When that code is refactored, the regression test generally becomes obsolete, and is deleted. Fuzz Tests ~~~~~~~~~~ Fuzz tests generally run for a fixed amount of time, running randomized tests against a system. They do not run at all during normal runs of the Buildbot tests, unless ``BUILDBOT_FUZZ`` is defined. This is accomplished with something like the following at the end of each test module:: if 'BUILDBOT_FUZZ' not in os.environ: del LRUCacheFuzzer Mixins ------ Buildbot provides a number of purpose-specific mixin classes in :src:`master/buildbot/util`. These generally define a set of utility functions as well as ``setUpXxx`` and ``tearDownXxx`` methods. These methods should be called explicitly from your subclass's ``setUp`` and ``tearDown`` methods. Note that some of these methods return Deferreds, which should be handled properly by the caller. .. _Fakes: Fakes ----- Buildbot provides a number of pre-defined fake implementations of internal interfaces, in :src:`master/buildbot/test/fake`. These are designed to be used in unit tests to limit the scope of the test. For example, the fake DB API eliminates the need to create a real database when testing code that uses the DB API, and isolates bugs in the system under test from bugs in the real DB implementation. The danger of using fakes is that the fake interface and the real interface can differ. The interface tests exist to solve this problem. All fakes should be fully tested in an integration test, so that the fakes pass the same tests as the "real" thing. It is particularly important that the method signatures be compared. Type Validation --------------- The :src:`master/buildbot/test/util/validation.py` provides a set of classes and definitions for validating Buildbot data types. It supports four types of data: * DB API dictionaries, as returned from the ``getXxx`` methods, * Data API dictionaries, as returned from ``get``, * Data API messages, and * Simple data types. These are validated from elsewhere in the codebase with calls to * ``verifyDbDict(testcase, type, value)``, * ``verifyData(testcase, type, options, value)``, * ``verifyMessage(testcase, routingKey, message)``, and * ``verifyType(testcase, name, value, validator)``, respectively. The ``testcase`` argument is used to fail the test case if the validation does not succeed. For DB dictionaries and data dictionaries, the ``type`` identifies the expected data type. For messages, the type is determined from the first element of the routing key. All messages sent with the fake MQ implementation are automatically validated using ``verifyMessage``. The ``verifyType`` method is used to validate simple types, e.g., :: validation.verifyType(self, 'param1', param1, validation.StringValidator()) In any case, if ``testcase`` is None, then the functions will raise an :py:exc:`AssertionError` on failure. Validator Classes ~~~~~~~~~~~~~~~~~ A validator is an instance of the ``Validator`` class. Its ``validate`` method is a generator function that takes a name and an object to validate. It yields error messages describing any deviations of ``object`` from the designated data type. The ``name`` argument is used to make such messages more helpful. A number of validators are supplied for basic types. A few classes deserve special mention: * ``NoneOk`` wraps another validator, allowing the object to be None. * ``Any`` will match any object without error. * ``IdentifierValidator`` will match identifiers; see :ref:`identifier `. * ``DictValidator`` takes key names as keyword arguments, with the values giving validators for each key. The ``optionalNames`` argument is a list of keys which may be omitted without error. * ``SourcedPropertiesValidator`` matches dictionaries with (value, source) keys, the representation used for properties in the data API. * ``MessageValidator`` validates messages. It checks that the routing key is a tuple of strings. The first tuple element gives the message type. The last tuple element is the event, and must be a member of the ``events`` set. The remaining "middle" tuple elements must match the message values identified by ``keyFields``. The ``messageValidator`` should be a ``DictValidator`` configured to check the message body. This validator's ``validate`` method is called with a tuple ``(routingKey, message)``. * ``Selector`` allows different validators to be selected based on matching functions. Its ``add`` method takes a matching function, which should return a boolean, and a validator to use if the matching function returns true. If the matching function is None, it is used as a default. This class is used for message and data validation. Defining Validators ~~~~~~~~~~~~~~~~~~~ DB validators are defined in the ``dbdict`` dictionary, e.g., :: dbdict['foodict'] = DictValidator( id=IntValidator(), name=StringValidator(), ... ) Data validators are ``Selector`` validators, where the selector is the ``options`` passed to ``verifyData``. :: data['foo'] = Selector() data['foo'].add(lambda opts : opt.get('fanciness') > 10, DictValidator( fooid=IntValidator(), name=StringValidator(), ... )) Similarly, message validators are ``Selector`` validators, where the selector is the routing key. The underlying validator should be a ``MessageValidator``. :: message['foo'] = Selector() message['foo'].add(lambda rk : rk[-1] == 'new', MessageValidator( keyFields=['fooid'], events=['new', 'complete'], messageValidator=DictValidator( fooid=IntValidator(), name=StringValidator(), ... ))) Good Tests ---------- Bad tests are worse than no tests at all. Since they waste developers' time wondering "was that a spurious failure?" or "what the heck is this test trying to do?", Buildbot needs good tests. So what makes a test good? .. _Tests-Independent-of-Time: Independent of Time ~~~~~~~~~~~~~~~~~~~ Tests that depend on wall time will fail. As a bonus, they run very slowly. Do not use :meth:`reactor.callLater` to wait "long enough" for something to happen. For testing things that themselves depend on time, consider using :class:`twisted.internet.tasks.Clock`. This may mean passing a clock instance to the code under test, and propagating that instance as necessary to ensure that all of the code using :meth:`callLater` uses it. Refactoring code for testability is difficult, but worthwhile. For testing things that do not depend on time, but for which you cannot detect the "end" of an operation: add a way to detect the end of the operation! Clean Code ~~~~~~~~~~ Make your tests readable. This is no place to skimp on comments! Others will attempt to learn about the expected behavior of your class by reading the tests. As a side note, if you use a :class:`Deferred` chain in your test, write the callbacks as nested functions, rather than using methods with funny names:: def testSomething(self): d = doThisFirst() def andThisNext(res): pass # ... d.addCallback(andThisNext) return d This isolates the entire test into one indented block. It is OK to add methods for common functionality, but give them real names and explain in detail what they do. Good Name ~~~~~~~~~ Test method names should follow the pattern :samp:`test_{METHOD}_{CONDITION}` where *METHOD* is the method being tested, and *CONDITION* is the condition under which it's tested. Since we can't always test a single method, this is not a hard-and-fast rule. Assert Only One Thing ~~~~~~~~~~~~~~~~~~~~~ Where practical, each test should have a single assertion. This may require a little bit of work to get several related pieces of information into a single Python object for comparison. The problem with multiple assertions is that, if the first assertion fails, the remainder are not tested. The test results then do not tell the entire story. Prefer Fakes to Mocks ~~~~~~~~~~~~~~~~~~~~~ Mock objects are too "compliant", and this often masks errors in the system under test. For example, a mis-spelled method name on a mock object will not raise an exception. Where possible, use one of the pre-written fake objects (see :ref:`Fakes`) instead of a mock object. Fakes themselves should be well-tested using interface tests. Where they are appropriate, Mock objects can be constructed easily using the aptly-named `mock `_ module, which is a requirement for Buildbot's tests. Small Tests ~~~~~~~~~~~ The shorter a test is, the better. Test as little code as possible in each test. It is fine, and in fact encouraged, to write the code under test in such a way as to facilitate this. As an illustrative example, if you are testing a new Step subclass, but your tests require instantiating a BuildMaster, you're probably doing something wrong! This also applies to test modules. Several short, easily-digested test modules are preferred over a 1000-line monster. Isolation ~~~~~~~~~ Each test should be maximally independent of other tests. Do not leave files laying around after your test has finished, and do not assume that some other test has run beforehand. It's fine to use caching techniques to avoid repeated, lengthy setup times. Be Correct ~~~~~~~~~~ Tests should be as robust as possible, which at a basic level means using the available frameworks correctly. All Deferreds should have callbacks and be chained properly. Error conditions should be checked properly. Race conditions should not exist (see :ref:`Tests-Independent-of-Time`, above). Be Helpful ~~~~~~~~~~ Note that tests will pass most of the time, but the moment when they are most useful is when they fail. When the test fails, it should produce output that is helpful to the person chasing it down. This is particularly important when the tests are run remotely, in which case the person chasing down the bug does not have access to the system on which the test fails. A test which fails sporadically with no more information than "AssertionFailed" is a prime candidate for deletion if the error isn't obvious. Making the error obvious also includes adding comments describing the ways a test might fail. Keeping State ~~~~~~~~~~~~~ Python does not allow assignment to anything but the innermost local scope or the global scope with the ``global`` keyword. This presents a problem when creating nested functions:: def test_localVariable(self): cb_called = False def cb(): cb_called = True cb() self.assertTrue(cb_called) # will fail! The ``cb_called = True`` assigns to a *different variable* than ``cb_called = False``. In production code, it's usually best to work around such problems, but in tests this is often the clearest way to express the behavior under test. The solution is to change something in a common mutable object. While a simple list can serve as such a mutable object, this leads to code that is hard to read. Instead, use :class:`State`:: from buildbot.test.state import State def test_localVariable(self): state = State(cb_called=False) def cb(): state.cb_called = True cb() self.assertTrue(state.cb_called) # passes This is almost as readable as the first example, but it actually works. buildbot-3.4.0/master/docs/developer/utils.rst000066400000000000000000001674471413250514000214420ustar00rootroot00000000000000Utilities ========= .. py:module:: buildbot.util Several small utilities are available at the top-level :mod:`buildbot.util` package. .. py:function:: naturalSort(list) :param list: list of strings :returns: sorted strings This function sorts strings "naturally", with embedded numbers sorted numerically. This ordering is good for objects which might have a numeric suffix, e.g., ``winworker1``, ``winworker2`` .. py:function:: formatInterval(interval) :param interval: duration in seconds :returns: human-readable (English) equivalent This function will return a human-readable string describing a length of time, given a number of seconds. .. py:class:: ComparableMixin This mixin class adds comparability to a subclass. Use it like this: .. code-block:: python class Widget(FactoryProduct, ComparableMixin): compare_attrs = ( 'radius', 'thickness' ) # ... Any attributes not in ``compare_attrs`` will not be considered when comparing objects. This is used to implement Buildbot's reconfig logic, where a comparison between the new and existing objects is used to determine whether the new object should replace the existing object. If the comparison shows the objects to be equivalent, then the old object is left in place. If they differ, the old object is removed from the buildmaster, and the new object is added. For use in configuration objects (schedulers, changesources, etc.), include any attributes which are set in the constructor based on the user's configuration. Be sure to also include the superclass's list, e.g.: .. code-block:: python class MyScheduler(base.BaseScheduler): compare_attrs = base.BaseScheduler.compare_attrs + ('arg1', 'arg2') A point to note is that the compare_attrs list is cumulative; that is, when a subclass also has a compare_attrs and the parent class has a compare_attrs, the subclass' compare_attrs also includes the parent class' compare_attrs. This class also implements the :py:class:`buildbot.interfaces.IConfigured` interface. The configuration is automatically generated, being the dict of all ``compare_attrs``. .. py:function:: safeTranslate(str) :param str: input string :returns: safe version of the input This function will filter out some inappropriate characters for filenames; it is suitable for adapting strings from the configuration for use as filenames. It is not suitable for use with strings from untrusted sources. .. py:function:: epoch2datetime(epoch) :param epoch: an epoch time (integer) :returns: equivalent datetime object Convert a UNIX epoch timestamp to a Python datetime object, in the UTC timezone. Note that timestamps specify UTC time (modulo leap seconds and a few other minor details). If the argument is None, returns None. .. py:function:: datetime2epoch(datetime) :param datetime: a datetime object :returns: equivalent epoch time (integer) Convert an arbitrary Python datetime object into a UNIX epoch timestamp. If the argument is None, returns None. .. py:data:: UTC A ``datetime.tzinfo`` subclass representing UTC time. A similar class has finally been added to Python in version 3.2, but the implementation is simple enough to include here. This is mostly used in tests to create timezone-aware datetime objects in UTC: .. code-block:: python dt = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC) .. py:function:: diffSets(old, new) :param old: old set :type old: set or iterable :param new: new set :type new: set or iterable :returns: a (removed, added) tuple This function compares two sets of objects, returning elements that were added and elements that were removed. This is largely a convenience function for reconfiguring services. .. py:function:: makeList(input) :param input: a thing :returns: a list of zero or more things This function is intended to support the many places in Buildbot where the user can specify either a string or a list of strings, but the implementation wishes to always consider lists. It converts any string to a single-element list, ``None`` to an empty list, and any iterable to a list. Input lists are copied, avoiding aliasing issues. .. py:function:: now() :returns: epoch time (integer) Return the current time, using either ``reactor.seconds`` or ``time.time()``. .. py:function:: flatten(list, [types]) :param list: potentially nested list :param types: An optional iterable of the types to flatten. By default, if unspecified, this flattens both lists and tuples :returns: flat list Flatten nested lists into a list containing no other lists. For example: .. code-block:: python >>> flatten([ [ 1, 2 ], 3, [ [ 4 ], 5 ] ]) [ 1, 2, 3, 4, 5 ] Both lists and tuples are looked at by default. .. py:function:: flattened_iterator(list, [types]) :param list: potentially nested list :param types: An optional iterable of the types to flatten. By default, if unspecified, this flattens both lists and tuples. :returns: iterator over every element whose type isn't in types Returns a generator that doesn't yield any lists/tuples. For example: .. code-block:: none >>> for x in flattened_iterator([ [ 1, 2 ], 3, [ [ 4 ] ] ]): >>> print x 1 2 3 4 Use this for extremely large lists to keep memory-usage down and improve performance when you only need to iterate once. .. py:function:: none_or_str(obj) :param obj: input value :returns: string or ``None`` If ``obj`` is not None, return its string representation. .. py:function:: bytes2unicode(bytestr, encoding='utf-8', errors='strict') :param bytestr: bytes :param encoding: unicode encoding to pass to :py:func:`str.encode`, default ``utf-8``. :param errors: error handler to pass to :py:func:`str.encode`, default ``strict``. :returns: string as unicode This function is intended to convert bytes to unicode for user convenience. If given a bytestring, it returns the string decoded using ``encoding``. If given a unicode string, it returns it directly. .. py:function:: string2boolean(str) :param str: string :raises KeyError: :returns: boolean This function converts a string to a boolean. It is intended to be liberal in what it accepts: case-insensitive "true", "on", "yes", "1", etc. It raises :py:exc:`KeyError` if the value is not recognized. .. py:function:: toJson(obj) :param obj: object :returns: UNIX epoch timestamp This function is a helper for json.dump, that allows to convert non-json able objects to json. For now it supports converting datetime.datetime objects to unix timestamp. .. py:data:: NotABranch This is a sentinel value used to indicate that no branch is specified. It is necessary since schedulers and change sources consider ``None`` a valid name for a branch. This is generally used as a default value in a method signature, and then tested against with ``is``: .. code-block:: python if branch is NotABranch: pass # ... .. py:function:: in_reactor(fn) This decorator will cause the wrapped function to be run in the Twisted reactor, with the reactor stopped when the function completes. It returns the result of the wrapped function. If the wrapped function fails, its traceback will be printed, the reactor halted, and ``None`` returned. .. py:function:: asyncSleep(secs, reactor=None) Yield a deferred that will fire with no result after ``secs`` seconds. This is the asynchronous equivalent to ``time.sleep``, and can be useful in tests. In case a custom reactor is used, the ``reactor`` parameter may be set. By default, ``twisted.internet.reactor`` is used. .. py:function:: stripUrlPassword(url) :param url: a URL :returns: URL with any password component replaced with ``xxxx`` Sanitize a URL; use this before logging or displaying a DB URL. .. py:function:: join_list(maybe_list) :param maybe_list: list, tuple, byte string, or unicode :returns: unicode string If ``maybe_list`` is a list or tuple, join it with spaces, casting any strings into unicode using :py:func:`bytes2unicode`. This is useful for configuration parameters that may be strings or lists of strings. .. py:class:: Notifier() This is a helper for firing multiple deferreds with the same result. .. py:method:: wait() Return a deferred that will fire when when the notifier is notified. .. py:method:: notify(value) Fire all the outstanding deferreds with the given value. .. py:function:: giturlparse(url) :param url: a git url :returns: a :py:class:`GitUrl` with results of parsed url This function is intended to help various components to parse git urls. It helps to find the ``/`` of a git repository url coming from a change, in order to call urls. ``owner`` and ``repo`` is a common scheme for identifying a git repository between various git hosting services, like GitHub, GitLab, BitBucket, etc. Each service has their own naming for similar things, but we choose to use the GitHub naming as a de-facto standard. To simplify implementation, the parser is accepting invalid urls, but it should always parse valid urls correctly. The unit tests in ``test_util_giturlparse.py`` are the references on what the parser accepts. Please feel free to update the parser and the unit tests. Example use: .. code-block:: python from buildbot.util import giturlparse repourl = giturlparse(sourcestamp['repository']) repoOwner = repourl.owner repoName = repourl.repo .. py:class:: GitUrl() .. py:attribute:: proto The protocol of the url .. py:attribute:: user The user of the url (as in ``user@domain``) .. py:attribute:: domain The domain part of the url .. py:attribute:: port The optional port of the url .. py:attribute:: owner The owner of the repository (in case of GitLab might be a nested group, i.e contain ``/``, e.g ``repo/subrepo/subsubrepo``) .. py:attribute:: repo The name of the repository (in case of GitLab might be a nested group, i.e contain ``/``) :py:mod:`buildbot.util.lru` ~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.lru .. py:class:: LRUCache(miss_fn, max_size=50) :param miss_fn: function to call, with key as parameter, for cache misses. The function should return the value associated with the key argument, or None if there is no value associated with the key. :param max_size: maximum number of objects in the cache. This is a simple least-recently-used cache. When the cache grows beyond the maximum size, the least-recently used items will be automatically removed from the cache. This cache is designed to control memory usage by minimizing duplication of objects, while avoiding unnecessary re-fetching of the same rows from the database. All values are also stored in a weak valued dictionary, even after they have expired from the cache. This allows values that are used elsewhere in Buildbot to "stick" in the cache in case they are needed by another component. Weak references cannot be used for some types, so these types are not compatible with this class. Note that dictionaries can be weakly referenced if they are an instance of a subclass of ``dict``. If the result of the ``miss_fn`` is ``None``, then the value is not cached; this is intended to avoid caching negative results. This is based on `Raymond Hettinger's implementation `_, licensed under the PSF license, which is GPL-compatible. .. py:attribute:: hits cache hits so far .. py:attribute:: refhits cache misses found in the weak ref dictionary, so far .. py:attribute:: misses cache misses leading to re-fetches, so far .. py:attribute:: max_size maximum allowed size of the cache .. py:method:: get(key, **miss_fn_kwargs) :param key: cache key :param miss_fn_kwargs: keyword arguments to the ``miss_fn`` :returns: value via Deferred Fetch a value from the cache by key, invoking ``miss_fn(key, **miss_fn_kwargs)`` if the key is not in the cache. Any additional keyword arguments are passed to the ``miss_fn`` as keyword arguments; these can supply additional information relating to the key. It is up to the caller to ensure that this information is functionally identical for each key value: if the key is already in the cache, the ``miss_fn`` will not be invoked, even if the keyword arguments differ. .. py:method:: put(key, value) :param key: key at which to place the value :param value: value to place there Add the given key and value into the cache. The purpose of this method is to insert a new value into the cache *without* invoking the miss_fn (e.g., to avoid unnecessary overhead). .. py:method set_max_size(max_size) :param max_size: new maximum cache size Change the cache's maximum size. If the size is reduced, cached elements will be evicted. This method exists to support dynamic reconfiguration of cache sizes in a running process. .. py:method:: inv() Check invariants on the cache. This is intended for debugging purposes. .. py:class:: AsyncLRUCache(miss_fn, max_size=50) :param miss_fn: This is the same as the miss_fn for class LRUCache, with the difference that this function *must* return a Deferred. :param max_size: maximum number of objects in the cache. This class has the same functional interface as LRUCache, but asynchronous locking is used to ensure that in the common case of multiple concurrent requests for the same key, only one fetch is performed. :py:mod:`buildbot.util.bbcollections` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.bbcollections This package provides a few useful collection objects. .. note:: This module used to be named ``collections``, but without absolute imports (:pep:`328`), this precluded using the standard library's ``collections`` module. .. py:class:: defaultdict This is a clone of the Python :class:`collections.defaultdict` for use in Python-2.4. In later versions, this is simply a reference to the built-in :class:`defaultdict`, so Buildbot code can simply use :class:`buildbot.util.collections.defaultdict` everywhere. .. py:class:: KeyedSets This is a collection of named sets. In principle, it contains an empty set for every name, and you can add things to sets, discard things from sets, and so on. .. code-block:: python >>> ks = KeyedSets() >>> ks['tim'] # get a named set set([]) >>> ks.add('tim', 'friendly') # add an element to a set >>> ks.add('tim', 'dexterous') >>> ks['tim'] set(['friendly', 'dexterous']) >>> 'tim' in ks # membership testing True >>> 'ron' in ks False >>> ks.discard('tim', 'friendly')# discard set element >>> ks.pop('tim') # return set and reset to empty set(['dexterous']) >>> ks['tim'] set([]) This class is careful to conserve memory space - empty sets do not occupy any space. :py:mod:`buildbot.util.eventual` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.eventual This function provides a simple way to say "please do this later". For example .. code-block:: python from buildbot.util.eventual import eventually def do_what_I_say(what, where): # ... return d eventually(do_what_I_say, "clean up", "your bedroom") The package defines "later" as "next time the reactor has control", so this is a good way to avoid long loops that block another activity in the reactor. .. py:function:: eventually(cb, *args, **kwargs) :param cb: callable to invoke later :param args: args to pass to ``cb`` :param kwargs: kwargs to pass to ``cb`` Invoke the callable ``cb`` in a later reactor turn. Callables given to :func:`eventually` are guaranteed to be called in the same order as the calls to :func:`eventually` -- writing ``eventually(a); eventually(b)`` guarantees that ``a`` will be called before ``b``. Any exceptions that occur in the callable will be logged with ``log.err()``. If you really want to ignore them, provide a callable that catches those exceptions. This function returns None. If you care to know when the callable was run, be sure to provide a callable that notifies somebody. .. py:function:: fireEventually(value=None) :param value: value with which the Deferred should fire :returns: Deferred This function returns a Deferred which will fire in a later reactor turn, after the current call stack has been completed, and after all other Deferreds previously scheduled with :py:func:`eventually`. The returned Deferred will never fail. .. py:function:: flushEventualQueue() :returns: Deferred This returns a Deferred which fires when the eventual-send queue is finally empty. This is useful for tests and other circumstances where it is useful to know that "later" has arrived. :py:mod:`buildbot.util.debounce` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.debounce It's often necessary to perform some action in response to a particular type of event. For example, steps need to update their status after updates arrive from the worker. However, when many events arrive in quick succession, it's more efficient to only perform the action once, after the last event has occurred. The ``debounce.method(wait)`` decorator is the tool for the job. .. py:function:: method(wait, get_reactor) :param wait: time to wait before invoking, in seconds :param get_reactor: A callable that takes the underlying instance and returns the reactor to use. Defaults to ``instance.master.reactor``. Returns a decorator that debounces the underlying method. The underlying method must take no arguments (except ``self``). For each call to the decorated method, the underlying method will be invoked at least once within *wait* seconds (plus the time the method takes to execute). Calls are "debounced" during that time, meaning that multiple calls to the decorated method will result in a single invocation. .. note:: This functionality is similar to Underscore's ``debounce``, except that the Underscore method resets its timer on every call. The decorated method is an instance of :py:class:`Debouncer`, allowing it to be started and stopped. This is useful when the method is a part of a Buildbot service: call ``method.start()`` from ``startService`` and ``method.stop()`` from ``stopService``, handling its Deferred appropriately. .. py:class:: Debouncer .. py:method:: stop() :returns: Deferred Stop the debouncer. While the debouncer is stopped, calls to the decorated method will be ignored. If a call is pending when ``stop`` is called, that call will occur immediately. When the Deferred that ``stop`` returns fires, the underlying method is not executing. .. py:method:: start() Start the debouncer. This reverses the effects of ``stop``. This method can be called on a started debouncer without issues. :py:mod:`buildbot.util.poll` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.poll Many Buildbot services perform some periodic, asynchronous operation. Change sources, for example, contact the repositories they monitor on a regular basis. The tricky bit is, the periodic operation must complete before the service stops. The ``@poll.method`` decorator makes this behavior easy and reliable. .. py:function:: method This decorator replaces the decorated method with a :py:class:`Poller` instance configured to call the decorated method periodically. The poller is initially stopped, so periodic calls will not begin until its ``start`` method is called. The start polling interval is specified when the poller is started. A random delay may optionally be supplied. This allows to avoid the situation of multiple services with the same interval are executing at exactly the same time. If the decorated method fails or raises an exception, the Poller logs the error and re-schedules the call for the next interval. If a previous invocation of the method has not completed when the interval expires, then the next invocation is skipped and the interval timer starts again. A common idiom is to call ``start`` and ``stop`` from ``startService`` and ``stopService``: .. code-block:: python class WatchThings(object): @poll.method def watch(self): d = self.beginCheckingSomething() return d def startService(self): self.watch.start(interval=self.pollingInterval, now=False) def stopService(self): return self.watch.stop() .. py:class:: Poller .. py:method:: start(interval=N, now=False, random_delay_min=0, random_delay_max=0) :param interval: time, in seconds, between invocations :param now: if true, call the decorated method immediately on startup. :param random_delay_min: Minimum random delay to apply to the start time of the decorated method. :param random_delay_min: Maximum random delay to apply to the start time of the decorated method. Start the poller. .. py:method:: stop() :returns: Deferred Stop the poller. The returned Deferred fires when the decorated method is complete. .. py:method:: __call__() Force a call to the decorated method now. If the decorated method is currently running, another call will begin as soon as it completes unless the poller is currently stopping. :py:mod:`buildbot.util.maildir` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.maildir Several Buildbot components make use of `maildirs `_ to hand off messages between components. On the receiving end, there's a need to watch a maildir for incoming messages and trigger some action when one arrives. .. py:class:: MaildirService(basedir) :param basedir: (optional) base directory of the maildir A :py:class:`MaildirService` instance watches a maildir for new messages. It should be a child service of some :py:class:`~twisted.application.service.MultiService` instance. When running, this class uses the linux dirwatcher API (if available) or polls for new files in the 'new' maildir subdirectory. When it discovers a new message, it invokes its :py:meth:`messageReceived` method. To use this class, subclass it and implement a more interesting :py:meth:`messageReceived` function. .. py:method:: setBasedir(basedir) :param basedir: base directory of the maildir If no ``basedir`` is provided to the constructor, this method must be used to set the basedir before the service starts. .. py:method:: messageReceived(filename) :param filename: unqualified filename of the new message This method is called with the short filename of the new message. The full name of the new file can be obtained with ``os.path.join(maildir, 'new', filename)``. The method is un-implemented in the :py:class:`MaildirService` class, and must be implemented in subclasses. .. py:method:: moveToCurDir(filename) :param filename: unqualified filename of the new message :returns: open file object Call this from :py:meth:`messageReceived` to start processing the message; this moves the message file to the 'cur' directory and returns an open file handle for it. :py:mod:`buildbot.util.misc` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.misc .. py:function:: deferredLocked(lock) :param lock: a :py:class:`twisted.internet.defer.DeferredLock` instance or a string naming an instance attribute containing one This is a decorator to wrap an event-driven method (one returning a ``Deferred``) in an acquire/release pair of a designated :py:class:`~twisted.internet.defer.DeferredLock`. For simple functions with a static lock, this is as easy as: .. code-block:: python someLock = defer.DeferredLock() @util.deferredLocked(someLock) def someLockedFunction(): # .. return d For class methods which must access a lock that is an instance attribute, the lock can be specified by a string, which will be dynamically resolved to the specific instance at runtime: .. code-block:: python def __init__(self): self.someLock = defer.DeferredLock() @util.deferredLocked('someLock') def someLockedFunction(): # .. return d .. py:function:: cancelAfter(seconds, deferred) :param seconds: timeout in seconds :param deferred: deferred to cancel after timeout expires :returns: the deferred passed to the function Cancel the given deferred after the given time has elapsed, if it has not already been fired. When this occurs, the deferred's errback will be fired with a :py:class:`twisted.internet.defer.CancelledError` failure. :py:mod:`buildbot.util.netstrings` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.netstrings Similar to maildirs, `netstrings `_ are used occasionally in Buildbot to encode data for interchange. While Twisted supports a basic netstring receiver protocol, it does not have a simple way to apply that to a non-network situation. .. py:class:: NetstringParser This class parses strings piece by piece, either collecting the accumulated strings or invoking a callback for each one. .. py:method:: feed(data) :param data: a portion of netstring-formatted data :raises: :py:exc:`twisted.protocols.basic.NetstringParseError` Add arbitrarily-sized ``data`` to the incoming-data buffer. Any complete netstrings will trigger a call to the :py:meth:`stringReceived` method. Note that this method (like the Twisted class it is based on) cannot detect a trailing partial netstring at EOF - the data will be silently ignored. .. py:method:: stringReceived(string): :param string: the decoded string This method is called for each decoded string as soon as it is read completely. The default implementation appends the string to the :py:attr:`strings` attribute, but subclasses can do anything. .. py:attribute:: strings The strings decoded so far, if :py:meth:`stringReceived` is not overridden. :py:mod:`buildbot.util.sautils` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.sautils This module contains a few utilities that are not included with SQLAlchemy. .. py:class:: InsertFromSelect(table, select) :param table: table into which insert should be performed :param select: select query from which data should be drawn This class is taken directly from SQLAlchemy's `compiler.html `_, and allows a Pythonic representation of ``INSERT INTO .. SELECT ..`` queries. .. py:function:: sa_version() Return a 3-tuple representing the SQLAlchemy version. Note that older versions that did not have a ``__version__`` attribute are represented by ``(0,0,0)``. :py:mod:`buildbot.util.pathmatch` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.pathmatch .. py:class:: Matcher This class implements the path-matching algorithm used by the data API. Patterns are tuples of strings, with strings beginning with a colon (``:``) denoting variables. A character can precede the colon to indicate the variable type: * ``i`` specifies an identifier (:ref:`identifier `). * ``n`` specifies a number (parseable by ``int``). A tuple of strings matches a pattern if the lengths are identical, every variable matches and has the correct type, and every non-variable pattern element matches exactly. A matcher object takes patterns using dictionary-assignment syntax: .. code-block:: python ep = ChangeEndpoint() matcher[('change', 'n:changeid')] = ep and performs matching using the dictionary-lookup syntax: .. code-block:: python changeEndpoint, kwargs = matcher[('change', '13')] # -> (ep, {'changeid': 13}) where the result is a tuple of the original assigned object (the ``Change`` instance in this case) and the values of any variables in the path. .. py:method:: iterPatterns() Returns an iterator which yields all patterns in the matcher as tuples of (pattern, endpoint). :py:mod:`buildbot.util.topicmatch` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.topicmatch .. py:class:: TopicMatcher(topics) :param list topics: topics to match This class implements the AMQP-defined syntax: routing keys are treated as dot-separated sequences of words and matched against topics. A star (``*``) in the topic will match any single word, while an octothorpe (``#``) will match zero or more words. .. py:method:: matches(routingKey) :param string routingKey: routing key to examine :returns: True if the routing key matches a topic :py:mod:`buildbot.util.subscription` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The classes in the :py:mod:`buildbot.util.subscription` module are used for master-local subscriptions. In the near future, all uses of this module will be replaced with message-queueing implementations that allow subscriptions and subscribers to span multiple masters. :py:mod:`buildbot.util.croniter` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This module is a copy of https://github.com/taichino/croniter, and provides support for converting cron-like time specifications to actual times. :py:mod:`buildbot.util.state` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.state The classes in the :py:mod:`buildbot.util.state` module are used for dealing with object state stored in the database. .. py:class:: StateMixin This class provides helper methods for accessing the object state stored in the database. .. py:attribute:: name This must be set to the name to be used to identify this object in the database. .. py:attribute:: master This must point to the :py:class:`BuildMaster` object. .. py:method:: getState(name, default) :param name: name of the value to retrieve :param default: (optional) value to return if `name` is not present :returns: state value via a Deferred :raises KeyError: if `name` is not present and no default is given :raises TypeError: if JSON parsing fails Get a named state value from the object's state. .. py:method:: setState(name, value) :param name: the name of the value to change :param value: the value to set - must be a JSONable object :param returns: Deferred :raises TypeError: if JSONification fails Set a named state value in the object's persistent state. Note that value must be json-able. :py:mod:`buildbot.util.identifiers` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.identifiers This module makes it easy to manipulate identifiers. .. py:function:: isIdentifier(maxLength, object) :param maxLength: maximum length of the identifier :param object: object to test for identifier-ness :returns: boolean Is object a :ref:`identifier `? .. py:function:: forceIdentifier(maxLength, str) :param maxLength: maximum length of the identifier :param str: string to coerce to an identifier :returns: identifier of maximum length ``maxLength`` Coerce a string (assuming UTF-8 for bytestrings) into an identifier. This method will replace any invalid characters with ``_`` and truncate to the given length. .. py:function:: incrementIdentifier(maxLength, str) :param maxLength: maximum length of the identifier :param str: identifier to increment :returns: identifier of maximum length ``maxLength`` :raises: ValueError if no suitable identifier can be constructed "Increment" an identifier by adding a numeric suffix, while keeping the total length limited. This is useful when selecting a unique identifier for an object. Maximum-length identifiers like ``_999999`` cannot be incremented and will raise :py:exc:`ValueError`. :py:mod:`buildbot.util.lineboundaries` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.lineboundaries .. py:class:: LineBoundaryFinder This class accepts a sequence of arbitrary strings and invokes a callback only with complete (newline-terminated) substrings. It buffers any partial lines until a subsequent newline is seen. It considers any of ``\r``, ``\n``, and ``\r\n`` to be newlines. Because of the ambiguity of an append operation ending in the character ``\r`` (it may be a bare ``\r`` or half of ``\r\n``), the last line of such an append operation will be buffered until the next append or flush. :param callback: asynchronous function to call with newline-terminated strings .. py:method:: append(text) :param text: text to append to the boundary finder :returns: Deferred Add additional text to the boundary finder. If the addition of this text completes at least one line, the callback will be invoked with as many complete lines as possible. .. py:method:: flush() :returns: Deferred Flush any remaining partial line by adding a newline and invoking the callback. :py:mod:`buildbot.util.service` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.service This module implements some useful subclasses of Twisted services. The first two classes are more robust implementations of two Twisted classes, and should be used universally in Buildbot code. .. class:: AsyncMultiService This class is similar to :py:class:`twisted.application.service.MultiService`, except that it handles Deferreds returned from child services ``startService`` and ``stopService`` methods. Twisted's service implementation does not support asynchronous ``startService`` methods. The reasoning is that all services should start at process startup, with no need to coordinate between them. For Buildbot, this is not sufficient. The framework needs to know when startup has completed, so it can begin scheduling builds. This class implements the desired functionality, with a parent service's ``startService`` returning a Deferred which will only fire when all child services ``startService`` methods have completed. This class also fixes a bug with Twisted's implementation of ``stopService`` which ignores failures in the ``stopService`` process. With :py:class:`AsyncMultiService`, any errors in a child's ``stopService`` will be propagated to the parent's ``stopService`` method. .. py:class:: AsyncService This class is similar to :py:class:`twisted.application.service.Service`, except that its ``setServiceParent`` method will return a Deferred. That Deferred will fire after the ``startService`` method has completed, if the service was started because the new parent was already running. .. index:: Service utilities; ClusteredService Some services in buildbot must have only one "active" instance at any given time. In a single-master configuration, this requirement is trivial to maintain. In a multiple-master configuration, some arbitration is required to ensure that the service is always active on exactly one master in the cluster. For example, a particular daily scheduler could be configured on multiple masters, but only one of them should actually trigger the required builds. .. py:class:: ClusteredService A base class for a service that must have only one "active" instance in a buildbot configuration. Each instance of the service is started and stopped via the usual twisted ``startService`` and ``stopService`` methods. This utility class hooks into those methods in order to run an arbitration strategy to pick the one instance that should actually be "active". The arbitration strategy is implemented via a polling loop. When each service instance starts, it immediately offers to take over as the active instance (via ``_claimService``). If successful, the ``activate`` method is called. Once active, the instance remains active until it is explicitly stopped (eg, via ``stopService``) or otherwise fails. When this happens, the ``deactivate`` method is invoked and the "active" status is given back to the cluster (via ``_unclaimService``). If another instance is already active, this offer fails, and the instance will poll periodically to try again. The polling strategy helps guard against active instances that might silently disappear and leave the service without any active instance running. Subclasses should use these methods to hook into this activation scheme: .. method:: activate() When a particular instance of the service is chosen to be the one "active" instance, this method is invoked. It is the corollary to twisted's ``startService``. .. method:: deactivate() When the one "active" instance must be deactivated, this method is invoked. It is the corollary to twisted's ``stopService``. .. method:: isActive() Returns whether this particular instance is the active one. The arbitration strategy is implemented via the following required methods: .. method:: _getServiceId() The "service id" uniquely represents this service in the cluster. Each instance of this service must have this same id, which will be used in the arbitration to identify candidates for activation. This method may return a Deferred. .. method:: _claimService() An instance is attempting to become the one active instance in the cluster. This method must return `True` or `False` (optionally via a Deferred) to represent whether this instance's offer to be the active one was accepted. If this returns `True`, the ``activate`` method will be called for this instance. .. method:: _unclaimService() Surrender the "active" status back to the cluster and make it available for another instance. This will only be called on an instance that successfully claimed the service and has been activated and after its ``deactivate`` has been called. Therefore, in this method it is safe to reassign the "active" status to another instance. This method may return a Deferred. .. py:class:: SharedService This class implements a generic Service that needs to be instantiated only once according to its parameters. It is a common use case to need this for accessing remote services. Having a shared service allows to limit the number of simultaneous access to the same remote service. Thus, several completely independent Buildbot services can use that :py:class:`SharedService` to access the remote service, and automatically synchronize themselves to not overwhelm it. .. py:method:: __init__(self, *args, **kwargs) Constructor of the service. Note that unlike :py:class:`BuildbotService`, :py:class:`SharedService` is not reconfigurable and uses the classical constructor method. Reconfigurability would mean to add some kind of reference counting of the users, which will make the design much more complicated to use. This means that the SharedService will not be destroyed when there is no more users, it will be destroyed at the master's stopService It is important that those :py:class:`SharedService` life cycles are properly handled. Twisted will indeed wait for any thread pool to finish at master stop, which will not happen if the thread pools are not properly closed. The lifecycle of the SharedService is the same as a service, it must implement startService and stopService in order to allocate and free its resources. .. py:method:: getName(cls, *args, **kwargs) Class method. Takes same arguments as the constructor of the service. Get a unique name for that instance of a service. This returned name is the key inside the parent's service dictionary that is used to decide if the instance has already been created before or if there is a need to create a new object. Default implementation will hash args and kwargs and use ``_`` as the name. .. py:method:: getService(cls, parentService, *args, **kwargs) :param parentService: an :py:class:`AsyncMultiService` where to lookup and register the :py:class:`SharedService` (usually the root service, the master) :returns: instance of the service via Deferred Class method. Takes same arguments as the constructor of the service (plus the `parentService` at the beginning of the list). Construct an instance of the service if needed, and place it at the beginning of the `parentService` service list. Placing it at the beginning will guarantee that the :py:class:`SharedService` will be stopped after the other services. .. py:class:: BuildbotService This class is the combinations of all `Service` classes implemented in buildbot. It is Async, MultiService, and Reconfigurable, and designed to be eventually the base class for all buildbot services. This class makes it easy to manage (re)configured services. The design separates the check of the config and the actual configuration/start. A service sibling is a configured object that has the same name of a previously started service. The sibling configuration will be used to configure the running service. Service lifecycle is as follow: * Buildbot master start * Buildbot is evaluating the configuration file. BuildbotServices are created, and checkConfig() are called by the generic constructor. * If everything is fine, all services are started. BuildbotServices startService() is called, and call reconfigService() for the first time. * User reconfigures buildbot. * Buildbot is evaluating the configuration file. BuildbotServices siblings are created, and checkConfig() are called by the generic constructor. * BuildbotServiceManager is figuring out added services, removed services, unchanged services * BuildbotServiceManager calls stopService() for services that disappeared from the configuration. * BuildbotServiceManager calls startService() like in buildbot start phase for services that appeared from the configuration. * BuildbotServiceManager calls reconfigService() for the second time for services that have their configuration changed. .. py:method:: __init__(self, *args, **kwargs) Constructor of the service. The constructor initializes the service, calls checkConfig() and stores the config arguments in private attributes. This should *not* be overridden by subclasses, as they should rather override checkConfig. .. py:method:: canReconfigWithSibling(self, sibling) This method is used to check if we are able to call :py:func:`reconfigServiceWithSibling` with the given sibling. If it returns `False`, we stop the old service and start a new one, instead of attempting a reconfig. .. py:method:: checkConfig(self, *args, **kwargs) Please override this method to check the parameters of your config. Please use :py:func:`buildbot.config.error` for error reporting. You can replace them ``*args, **kwargs`` by actual constructor like arguments with default args, and it have to match self.reconfigService This method is synchronous, and executed in the context of the master.cfg. Please don't block, or use deferreds in this method. Remember that the object that runs checkConfig is not always the object that is actually started. The checked configuration can be passed to another sibling service. Any actual resource creation shall be handled in reconfigService() or startService() .. py:method:: reconfigService(self, *args, **kwargs) This method is called at buildbot startup, and buildbot reconfig. `*args` and `**kwargs` are the configuration arguments passed to the constructor in master.cfg. You can replace ``them *args, **kwargs`` by actual constructor like arguments with default args, and it have to match self.checkConfig Returns a deferred that should fire when the service is ready. Builds are not started until all services are configured. BuildbotServices must be aware that during reconfiguration, their methods can still be called by running builds. So they should atomically switch old configuration and new configuration, so that the service is always available. If this method raises :py:class:`NotImplementedError`, it means the service is legacy, and do not support reconfiguration. The :py:class:`BuildbotServiceManager` parent, will detect this, and swap old service with new service. This behaviour allow smooth transition of old code to new reconfigurable service lifecycle but shall not be used for new code. .. py:method:: reconfigServiceWithSibling(self, sibling) Internal method that finds the configuration bits in a sibling, an object with same class that is supposed to replace it from a new configuration. We want to reuse the service started at master startup and just reconfigure it. This method handles necessary steps to detect if the config has changed, and eventually call self.reconfigService() .. py:method:: renderSecrets(self, *args) Utility method which renders a list of parameters which can be interpolated as a secret. This is meant for services which have their secrets parameter configurable as positional arguments. If there are several argument, the secrets are interpolated in parallel, and a list of result is returned via deferred. If there is one argument, the result is directly returned. .. note:: For keyword arguments, a simpler method is to use the ``secrets`` class variable, whose items will be automatically interpolated just before reconfiguration. .. code-block:: python def reconfigService(self, user, password, ...) user, password = yield self.renderSecrets(user, password) .. code-block:: python def reconfigService(self, token, ...) token = yield self.renderSecrets(token) .. code-block:: python secrets = ("user", "password") def reconfigService(self, user=None, password=None, ...): # nothing to do; user and password will be automatically interpolated Advanced users can derive this class to make their own services that run inside buildbot, and follow the application lifecycle of buildbot master. Such services are singletons accessible to nearly every object in Buildbot (buildsteps, status, changesources, etc) using self.master.namedServices['']. As such, they can be used to factorize access to external services, available e.g using a REST api. Having a single service will help with caching, and rate-limiting access of those APIs. Here is an example on how you would integrate and configure a simple service in your `master.cfg`: .. code-block:: python class MyShellCommand(ShellCommand): def getResultSummary(self): # access the service attribute service = self.master.namedServices['myService'] return dict(step=u"arg value: %d" % (service.arg1,)) class MyService(BuildbotService): name = "myService" def checkConfig(self, arg1): if not isinstance(arg1, int): config.error("arg1 must be an integer while it is %r" % (arg1,)) return if arg1 < 0: config.error("arg1 must be positive while it is %d" % (arg1,)) def reconfigService(self, arg1): self.arg1 = arg1 return defer.succeed(None) c['schedulers'] = [ ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(MyShellCommand(command='echo hei')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] c['services'] = [ MyService(arg1=1) ] :py:mod:`buildbot.util.httpclientservice` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.httpclientservice .. py:class:: HTTPClientService This class implements a SharedService for doing http client access. The module automatically chooses from `txrequests`_ and `treq`_ and uses whichever is installed. It provides minimalistic API similar to the one from `txrequests`_ and `treq`_. Having a SharedService for this allows to limits the number of simultaneous connection for the same host. While twisted application can managed thousands of connections at the same time, this is often not the case for the services buildbot controls. Both `txrequests`_ and `treq`_ use keep-alive connection polling. Lots of HTTP REST API will however force a connection close in the end of a transaction. .. note:: The API described here is voluntary minimalistic, and reflects what is tested. As most of this module is implemented as a pass-through to the underlying libraries, other options can work but have not been tested to work in both backends. If there is a need for more functionality, please add new tests before using them. .. py:staticmethod:: getService(master, base_url, auth=None, headers=None, debug=None, verify=None) :param master: the instance of the master service (available in self.master for all the :py:class:`BuildbotService` instances) :param base_url: The base http url of the service to access. e.g. ``http://github.com/`` :param auth: Authentication information. If auth is a tuple then ``BasicAuth`` will be used. e.g ``('user', 'passwd')`` It can also be a :mod:`requests.auth` authentication plugin. In this case `txrequests`_ will be forced, and `treq`_ cannot be used. :param headers: The headers to pass to every requests for this url :param debug: log every requests and every response. :param verify: disable the SSL verification. :returns: instance of :`HTTPClientService` Get an instance of the SharedService. There is one instance per base_url and auth. The constructor initialize the service, and store the config arguments in private attributes. This should *not* be overridden by subclasses, as they should rather override checkConfig. .. py:method:: get(endpoint, params=None) :param endpoint: endpoint relative to the base_url (starts with ``/``) :param params: optional dictionary that will be encoded in the query part of the url (e.g. ``?param1=foo``) :returns: implementation of :`IHTTPResponse` via deferred Performs a HTTP ``GET`` .. py:method:: delete(endpoint, params=None) :param endpoint: endpoint relative to the base_url (starts with ``/``) :param params: optional dictionary that will be encoded in the query part of the url (e.g. ``?param1=foo``) :returns: implementation of :`IHTTPResponse` via deferred Performs a HTTP ``DELETE`` .. py:method:: post(endpoint, data=None, json=None, params=None) :param endpoint: endpoint relative to the base_url (starts with ``/``) :param data: optional dictionary that will be encoded in the body of the http requests as ``application/x-www-form-urlencoded`` :param json: optional dictionary that will be encoded in the body of the http requests as ``application/json`` :param params: optional dictionary that will be encoded in the query part of the url (e.g. ``?param1=foo``) :returns: implementation of :`IHTTPResponse` via deferred Performs a HTTP ``POST`` .. note:: json and data cannot be used at the same time. .. py:method:: put(endpoint, data=None, json=None, params=None) :param endpoint: endpoint relative to the base_url (starts with ``/``) :param data: optional dictionary that will be encoded in the body of the http requests as ``application/x-www-form-urlencoded`` :param json: optional dictionary that will be encoded in the body of the http requests as ``application/json`` :param params: optional dictionary that will be encoded in the query part of the url (e.g. ``?param1=foo``) :returns: implementation of :`IHTTPResponse` via deferred Performs a HTTP ``PUT`` .. note:: json and data cannot be used at the same time. .. py:class:: IHTTPResponse .. note:: :class:`IHTTPResponse` is a subset of `treq`_ :py:class:`Response` API described `here `_. The API it is voluntarily minimalistic and reflects what is tested and reliable to use with the three backends (including fake). The API is a subset of the `treq`_ API, which is itself a superset of `twisted IResponse API`_. `treq`_ is thus implemented as passthrough. Notably: * There is no API to automatically decode content, as this is not implemented the same in both backends. * There is no API to stream content as the two libraries have very different way for doing it, and we do not see use-case where buildbot would need to transfer large content to the master. .. py:method:: content() :returns: raw (``bytes``) content of the response via deferred .. py:method:: json() :returns: json decoded content of the response via deferred .. py:attribute:: code :returns: http status code of the request's response (e.g 200) .. py:attribute:: url :returns: request's url (e.g https://api.github.com/endpoint') .. _txrequests: https://pypi.python.org/pypi/txrequests .. _treq: https://pypi.python.org/pypi/treq .. _twisted IResponse API: https://twistedmatrix.com/documents/current/api/twisted.web.iweb.IResponse.html :py:mod:`buildbot.test.fake.httpclientservice` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.test.fake.httpclientservice .. py:class:: HTTPClientService This class implements a fake version of the :class:`buildbot.util.httpclientservice.HTTPClientService` that needs to be used for testing services which need http client access. It implements the same APIs as :class:`buildbot.util.httpclientservice.HTTPClientService`, plus one that should be used to register the expectations. It should be registered by the test case before the tested service actually requests an HTTPClientService instance, with the same parameters. It will then replace the original implementation automatically (no need to patch anything). The testing methodology is based on `AngularJS ngMock`_. .. py:method:: getService(cls, master, case, *args, **kwargs) :param master: the instance of a fake master service :param case: a :py:class:`twisted.python.unittest.TestCase` instance :py:meth:`getService` returns a fake :py:class:`HTTPClientService`, and should be used just like the regular :py:meth:`getService`. It will make sure the original :py:class:`HTTPClientService` is not called, and assert that all expected http requests have been described in the test case. .. py:method:: expect(self, method, ep, params=None, data=None, json=None, code=200, content=None, content_json=None) :param method: expected HTTP method :param ep: expected endpoint :param params: optional expected query parameters :param data: optional expected non-json data (bytes) :param json: optional expected json data (dictionary or list or string) :param code: optional http code that will be received :param content: optional content that will be received :param content_json: optional content encoded in json that will be received Records an expectation of HTTP requests that will happen during the test. The order of the requests is important. All the request expectation must be defined in the test. For example: .. code-block:: python from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.util import httpclientservice from buildbot.util import service class myTestedService(service.BuildbotService): name = 'myTestedService' @defer.inlineCallbacks def reconfigService(self, baseurl): self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseurl) @defer.inlineCallbacks def doGetRoot(self): res = yield self._http.get("/") # note that at this point, only the http response headers are received if res.code != 200: raise Exception("%d: server did not succeed" % (res.code)) res_json = yield res.json() # res.json() returns a deferred to account for the time needed to fetch the # entire body return res_json class Test(unittest.SynchronousTestCase): def setUp(self): baseurl = 'http://127.0.0.1:8080' self.parent = service.MasterService() self._http = self.successResultOf( fakehttpclientservice.HTTPClientService.getService(self.parent, self, baseurl)) self.tested = myTestedService(baseurl) self.successResultOf(self.tested.setServiceParent(self.parent)) self.successResultOf(self.parent.startService()) def test_root(self): self._http.expect("get", "/", content_json={'foo': 'bar'}) response = self.successResultOf(self.tested.doGetRoot()) self.assertEqual(response, {'foo': 'bar'}) def test_root_error(self): self._http.expect("get", "/", content_json={'foo': 'bar'}, code=404) response = self.failureResultOf(self.tested.doGetRoot()) self.assertEqual(response.getErrorMessage(), '404: server did not succeed') .. _AngularJS ngMock: https://docs.angularjs.org/api/ngMock/service/$httpBackend :py:mod:`buildbot.util.ssl` ~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:module:: buildbot.util.ssl This module is a copy of :py:mod:`twisted.internet.ssl` except it won't crash with :py:class:`ImportError` if :py:mod:`pyopenssl` is not installed. If you need to use :py:mod:`twisted.internet.ssl`, please instead use :py:mod:`buildbot.util.ssl`, and call :py:func:`ssl.ensureHasSSL` in :py:meth:`checkConfig` to provide helpful message to the user, only if they enabled SSL for your plugin. .. py:function:: ensureHasSSL(plugin_name) :param plugin_name: name of the plugin. Usually ``self.__class__.__name__`` Call this function to provide helpful config error to the user in case of ``OpenSSL`` not installed. .. py:function:: skipUnless(f) :param f: decorated test Test decorator which will skip the test if ``OpenSSL`` is not installed. buildbot-3.4.0/master/docs/developer/www-base-app.rst000066400000000000000000000216011413250514000225710ustar00rootroot00000000000000.. _WWW-base-app: Base web application ==================== JavaScript Application ---------------------- The client side of the web UI is written in JavaScript and based on the AngularJS framework and concepts. This is a `Single Page Application `_. All Buildbot pages are loaded from the same path, at the master's base URL. The actual content of the page is dictated by the fragment in the URL (the portion following the ``#`` character). Using the fragment is a common JS technique to avoid reloading the whole page over HTTP when the user changes the URI or clicks on a link. AngularJS ~~~~~~~~~ The best place to learn about AngularJS is `its own documentation `_. AngularJS strong points are: * A very powerful `MVC system `_ allowing automatic update of the UI when data changes * A `Testing Framework and philosophy `_ * A `deferred system `_ similar to the one from Twisted * A `fast growing community and ecosystem `_ On top of Angular, we use nodeJS tools to ease development: * webpack build system, seamlessly build the app, watch files for modification, rebuild and reload browser in dev mode. In production mode, the build system minifies html, css and js, so that the final app is only 3 files to download (+img) * `pug template language (aka jade) `_, adds syntax sugar and readability to angular html templates * `Bootstrap `_ is a CSS library providing known good basis for our styles * `Font Awesome `_ is a coherent and large icon library Additionally the following npm modules are loaded by webpack and are available to plugins: * `@uirouter/angularjs `_ * `angular-animate `_ * `angular-ui-boostrap `_ * `d3 `_ * `jQuery `_ For the exact versions of these dependencies, check :src:`www/base/package.json`. Extensibility ~~~~~~~~~~~~~ The Buildbot UI is designed for extensibility. The base application should be pretty minimal and only include very basic status pages. The base application cannot be disabled, so any page that's not absolutely necessary should be put in plugins. You can also completely replace the default application by another application more suitable to your needs. Some Web plugins are maintained inside Buildbot's git repository, but this is not required in order for a plugin to work. Unofficial plugins are possible and encouraged. Typical plugin source code layout is: ``setup.py`` Standard setup script. Most plugins should use the same boilerplate, which implements building the BuildBot plugin app as part of the package setup. Minimal adaptation is needed. ``/__init__.py`` The python entrypoint. Must contain an "ep" variable of type buildbot.www.plugin.Application. Minimal adaptation is needed ``webpack.config.js`` Configuration for Webpack. Few changes are usually needed here. Please see webpack docs for details. ``src/...`` Source code for the AngularJS application. ``package.json`` Declares npm dependencies and development scripts. ``MANIFEST.in`` Needed by setup.py for sdist generation. You need to adapt this file to match the name of your plugin. Plugins are packaged as python entry-points for the ``buildbot.www`` namespace. The python part is defined in the ``buildbot.www.plugin`` module. The entrypoint must contain a ``twisted.web`` Resource, that is populated in the web server in ``//``. The plugin may only add an http endpoint, or it could add a full JavaScript UI. This is controlled by the ``ui`` argument of the ``Application`` endpoint object. If ``ui==True``, then it will automatically load ``//scripts.js`` and ``//styles.css`` into the angular.js application. Additionally, an angular.js module with the name ```` will be registered as a dependency of the main ``app`` module. The ``scripts.js`` file may register some new states to ``$stateProvider`` or add new menu items via ``glMenuProvider`` for example. The plugin writers may add more REST APIs to ``//api``. For that, a reference to the master singleton is provided in ``master`` attribute of the Application entrypoint. The plugins are not restricted to Twisted, and could even `load a wsgi application using flask, django, or some other framework `_. Check out the official BuildBot www plugins for examples. The :src:`www/grid_view` and :src:`www/badges` are good examples of plugins with and without a JavaScript UI respectively. .. _Routing: Routing ~~~~~~~ AngularJS uses a router to match URLs and choose which page to display. The router we use is ``ui.router``. Menu is managed by guanlecoja-ui's glMenuProvider. Please look at ``ui.router`` and guanlecoja-ui documentation for details. Typically, a route registration will look like following example: .. code-block:: javascript class MyState { // Dependency injection: we inject $stateProvider and glMenuServiceProvider constructor($stateProvider, glMenuServiceProvider) { // Name of the state const name = 'myname'; const caption = 'My Name Plugin'; // Configuration glMenuServiceProvider.addGroup({ name: name, caption: caption, // text of the menu icon: 'exclamation-circle', // icon, from Font-Awesome // Order in the menu, as menu are declared in several places, // we need this to control menu order order: 5 }); const cfg = { group: name, caption: caption }; // Register new state const state = { controller: "myStateController", template: require('./myname.tpl.jade'), name: name, url: `/${name}`, data: cfg }; $stateProvider.state(state); } } angular.module('mymodule') .config(['$stateProvider', 'glMenuServiceProvider', MyState]); Directives ~~~~~~~~~~ We use angular directives as much as possible to implement reusable UI components. Linking with Buildbot ~~~~~~~~~~~~~~~~~~~~~ A running buildmaster needs to be able to find the JavaScript source code it needs to serve the UI. This needs to work in a variety of contexts - Python development, JavaScript development, and end-user installations. To accomplish this, the www build process finishes by bundling all of the static data into a Python distribution tarball, along with a little bit of Python glue. The Python glue implements the interface described below, with some care taken to handle multiple contexts. See :ref:`JSDevQuickStart` for a more extensive explanation and tutorial. Testing Setup ------------- buildbot_www uses `Karma `_ to run the JavaScript test suite. This is the official test framework made for angular.js. We don't run the front-end testsuite inside the python 'trial' test suite, because testing python and JS is technically very different. Karma needs a browser to run the unit test in. It supports all the major browsers. Given our current experience, we did not see any bugs yet that would only happen on a particular browser. This is the reason why only Chrome is used for testing at the moment. Debug with karma ~~~~~~~~~~~~~~~~ ``console.log`` is available via karma. In order to debug the unit tests, you can also use the global variable ``dump``, which dumps any object for inspection in the console. This can be handy to be sure that you don't let debug logs in your code to always use ``dump``. Testing with real data ~~~~~~~~~~~~~~~~~~~~~~ It is possible to run only the frontend and proxy the requests to another BuildBot instance. This allows to make front-end work on realistic data without bothering to reproduce the setup locally. This is implemented as the ``master/buildbot/scripts/devproxy.py`` aiohttp server. To run it, set up and enable a virtualenv like the one described in :ref:`PythonDevQuickStart`. Then execute the script as follows: .. code-block:: bash buildbot dev-proxy There are many options which are documented as usual with ``--help``. Note that ``dev-proxy`` does not work with most of authentication except basic password. You can steal a ``document.cookie`` string from your real Buildbot and then pass to ``dev-proxy`` using the ``--auth_cookie`` option. buildbot-3.4.0/master/docs/developer/www-data-module.rst000066400000000000000000000217401413250514000233010ustar00rootroot00000000000000.. _WWW-data-module: Javascript Data Module ====================== The Data module is a reusable AngularJS module used to access Buildbot's data API from the browser. Its main purpose is to handle the 3 way binding. 2 way binding is the angular MVVM concept, which seamlessly synchronise the view and the model. Here, we introduce an additional way of synchronisation, which is from the server to the model. .. image:: ../_images/js-data-module-mvvm.svg :width: 100% We use the message queue and the websocket interfaces to maintain synchronisation between the server and the client. The client application just needs to query the needed data using a highlevel API, and the data module uses the best approach to make the data always up to date. Once the binding is set up by the controller, everything is automatically up to date. Base Concepts ------------- Collections ~~~~~~~~~~~ All the data you can get are Collections. Even a query to a single resource returns a collection. A collection is an Array subclass which has extra capabilities: - It listens to the event stream and is able to maintain itself up-to-date - It implements client side queries in order to guarantee up-to-date filtering, ordering and limiting queries. - It has a fast access to each item it contains via its id. - It has its own event handlers so that the client code can react when the Collection is changing Wrappers ~~~~~~~~ Each data type contained in a collection is wrapped in a javascript object. This allows to create some custom enhancements to the data model. For example, the Change wrapper decodes the author name and email from the "author" field. Each wrapper class also has specific access methods, which allow to access more data from the REST hierarchy. .. image:: ../_images/js-data-module-wrappers.svg :width: 100% Installation ~~~~~~~~~~~~ The Data module is available as a standalone AngularJS module. Installation via yarn: .. code-block:: sh yarn add buildbot-data Inject the ``bbData`` module to your application: .. code-block:: javascript angular.module('myApp', ['bbData']) Service API ~~~~~~~~~~~ .. js:class:: DataService DataService is the service used for accessing the Buildbot data API. It has a modern interface for accessing data in such a way that the updating of the data via web socket is transparent. .. js:method:: open() :returns: a DataAccessor which handles 3 way data binding Open a new accessor every time you need to update the data in a controller. It registers on $destroy event on the scope, and thus automatically unsubscribes from updates when the data is not used anymore. .. code-block:: javascript // open a new accessor every time you need updating data in a controller class DemoController { constructor($scope, dataService) { // automatically closes all the bindings when the $scope is destroyed const data = dataService.open().closeOnDestroy($scope); // request new data, it updates automatically this.builders = data.getBuilders({limit: 10, order: '-started_at'}); } } .. js:method:: getXs([id], [query]) ``Xs`` can be the following: ``Builds``, ``Builders``, ``Buildrequests``, ``Buildsets``, ``Workers``, ``Changes``, ``Changesources``, ``Forceschedulers``, ``Masters``, ``Schedulers``, ``Sourcestamps``. It's highly advised to use these methods instead of the lower level ``get('string')``. :returns: collection which will eventually contain all the requested data The collections returned without using an accessor are not automatically updated. So use those methods only when you know the data are not changing. .. code-block:: javascript // assign builds to $scope.builds and then load the steps when the builds are discovered // onNew is called at initial load $scope.builds = dataService.getBuilds({builderid: 1}); $scope.builds.onNew = build => build.loadSteps(); .. js:method:: get(endpoint, [id], [query]) :returns: a collection; when the promise is resolved, the collection contains all the requested data .. code-block:: javascript // assign builds to $scope.builds once the Collection is filled const builderid = 1; $scope.builds = dataService.get(`builders/${builderid}/builds`, {limit: 1}); $scope.builds.onNew = build => build.loadSteps(); .. code-block:: javascript // assign builds to $scope.builds before the Collection is filled using the // getArray() method $scope.builds = dataService.get('builds', {builderid: 1}); .. js:method:: control(url, method, [params]) :returns: a promise; sends a JSON RPC2 POST request to the server .. code-block:: javascript // open a new accessor every time you need to update the data in a controller dataService.control('forceschedulers/force', 'force') .then(response => $log.debug(response), reason => $log.error(reason)); .. js:class:: DataAccessor DataAccessor object is returned by the ``dataService.open()`` method. .. js:method:: closeOnDestroy($scope) Registers scope destruction as waterfall destruction for all collection accessed via this accessor. .. js:method:: close() Destructs all collections previously accessed via this accessor. Destroying a collection means it will unsubscribe from any events necessary to maintain it up-to-date. .. js:method:: getXs([id], [query]) Same methods as in DataService, except here the data will be maintained up-to-date. :returns: a collection which will eventually contain all the requested data .. js:class:: Collections .. js:method:: get(id) This method does not do any network access, and thus only knows about data already fetched. :returns: one element of the collection by id, or undefined, if this id is unknown to the collection. .. js:method:: hasOwnProperty(id) :returns: true if this id is known by this collection. .. js:method:: close() Forcefully unsubscribes this connection from auto-update. Normally, this is done automatically on scope destruction, but sometimes, when you got enough data, you want to save bandwidth and disconnect the collection. .. js:method:: put(object) Inserts one plain object to the collection. As an external API, this method is only useful for unit tests to simulate new data coming asynchronously. .. js:method:: from(object_list) Inserts several plain objects to the collection. This method is only useful for unit tests to simulate new data coming asynchronously. .. js:method:: onNew = (object) -> Callback method which is called when a new object arrives in the collection. This can be called either when initial data is coming via REST API, or when data is coming via the event stream. The affected object is given in parameter. `this` context is the collection. .. js:method:: onUpdate = (object) -> Callback method which is called when an object is modified. This is called when data is coming via the event stream. The affected object is given in parameter. `this` context is the collection. .. js:method:: onChange = (collection) -> Callback method which is called when an object is modified. This is called when data is coming via the event stream. `this` context is the collection. The full collection is given in parameter (in case you override ``this`` via fat arrow). .. js:attribute:: $ready Attribute similar to what ``ngResource`` provides. True after first server interaction is completed, false before that. Knowing if the Collection has been resolved is useful in data-binding (for example to display a loading graphic). .. js:class:: Wrapper Wrapper objects are objects stored in the collection. These objects have specific methods, depending on their types. .. js:method:: getXs([id], [query]) Same as ``DataService.getXs``, but with a relative endpoint. :returns: a collection; when the promise is resolved, the collection contains all the requested data .. code-block:: javascript // assign builds to $scope.builds once the Collection is filled $scope.builds = dataService.getBuilds({builderid: 1}); $scope.builds.onNew = function(b) { b.complete_steps = b.getSteps({complete:true}); b.running_steps = b.getSteps({complete:false}); }; .. js:method:: loadXs([id], [query]) ``o.loadXs()`` is equivalent to ``o.xs = o.getXs()``. :returns: a collection; the collection contains all the requested data, which is also assigned to ``o.Xs`` .. code-block:: javascript // get builder with id = 1 dataService.getBuilders(1).onNew = builder => { // load all builds in builder.builds builder.loadBuilds().onNew(build => { // load all buildsteps in build.steps build.loadSteps(); }); }; .. js:method:: control(method, params) :returns: a promise; sends a JSON RPC2 POST request to the server buildbot-3.4.0/master/docs/developer/www-server.rst000066400000000000000000000175401413250514000224160ustar00rootroot00000000000000.. _WWW: .. _WWW-server: WWW Server ========== History and Motivation ---------------------- One of the goals of the 'nine' project is to rework Buildbot's web services to use a more modern, consistent design and implement UI features in client-side JavaScript instead of server-side Python. The rationale behind this is that a client side UI relieves pressure on the server while being more responsive for the user. The web server only concentrates on serving data via a REST interface wrapping the :ref:`Data_API`. This removes a lot of sources of latency where, in previous versions, long synchronous calculations were made on the server to generate complex pages. Another big advantage is live updates of status pages, without having to poll or reload. The new system uses Comet techniques in order to relay Data API events to connected clients. Finally, making web services an integral part of Buildbot, rather than a status plugin, allows tighter integration with the rest of the application. Design Overview --------------- The ``www`` service exposes three pieces via HTTP: * A REST interface wrapping :ref:`Data_API`; * HTTP-based messaging protocols wrapping the :ref:`Messaging_and_Queues` interface; and * Static resources implementing the client-side UI. The REST interface is a very thin wrapper: URLs are translated directly into Data API paths, and results are returned directly, in JSON format. It is based on `JSON API `_. Control calls are handled with a simplified form of `JSONRPC 2.0 `_. The message interface is also a thin wrapper around Buildbot's MQ mechanism. Clients can subscribe to messages, and receive copies of the messages, in JSON, as they are received by the buildmaster. The client-side UI is an AngularJS application. Buildbot uses the Python setuptools entry-point mechanism to allow multiple packages to be combined into a single client-side experience. This allows frontend developers and users to build custom components for the web UI without hacking Buildbot itself. Python development and AngularJS development are very different processes, requiring different environment requirements and skillsets. To maximize hackability, Buildbot separates the two cleanly. An experienced AngularJS hacker should be quite comfortable in the :src:`www/` directory, with a few exceptions described below. Similarly, an experienced Python hacker can simply download the pre-built web UI (from pypi!) and never venture near the :src:`www/` directory. URLs ~~~~ The Buildbot web interface is rooted at its base URL, as configured by the user. It is entirely possible for this base URL to contain path components, e.g., ``http://build.example.org/buildbot/``, if hosted behind an HTTP proxy. To accomplish this, all URLs are generated relative to the base URL. Overall, the space under the base URL looks like this: * ``/`` -- The HTML document that loads the UI * ``/api/v{version}`` -- The root of the REST APIs, each versioned numerically. Users should, in general, use the latest version. * ``/ws`` -- The WebSocket endpoint to subscribe to messages from the mq system. * ``/sse`` -- The `server sent event `_ endpoint where clients can subscribe to messages from the mq system. REST API -------- Rest API is described in its own section. Server-Side Session ------------------- The web server keeps a session state for each user, keyed on a session cookie. This session is available from ``request.getSession()``, and data is stored as attributes. The following attributes may be available: ``user_info`` A dictionary maintained by the :doc:`authentication subsystem `. It may have the following information about the logged-in user: * ``username`` * ``email`` * ``full_name`` * ``groups`` (a list of group names) As well as additional fields specific to the user info implementation. The contents of the ``user_info`` dictionary are made available to the UI as ``config.user``. Message API ----------- Currently, messages are implemented with two protocols, WebSockets and `server sent events `_. WebSocket ~~~~~~~~~ WebSocket is a protocol for arbitrary messaging to and from a browser. As an HTTP extension, the protocol is not yet well supported by all HTTP proxy technologies. Although, it has been reported to work well used behind the https protocol. Only one WebSocket connection is needed per browser. The client can connect using the url ``ws[s]:///ws``. The protocol used is a simple in-house protocol based on json. The structure of a command from the client is as follows: .. code-block:: javascript { "cmd": "", '_id': , "arg1": arg1, "arg2": arg2 } * ``cmd`` is used to reference a command name * ``_id`` is used to track the response, can be any unique number or string, generated by the client. It needs to be unique per websocket session. Response is sent asynchronously, reusing ``_id`` to track which command is responded. Success answer example would be: .. code-block:: javascript { "msg": "OK", "_id": 1, "code": 200 } Error answer example would be: .. code-block:: javascript { "_id": 1, "code": 404, "error": "no such command 'poing'" } The client can send several commands without waiting for a response. Responses are not guaranteed to be sent in order. Several commands are implemented: ``ping`` .. code-block:: javascript { "_id": 1, "cmd": "ping" } The server will respond with a "pong" message: .. code-block:: javascript { "_id": 1, "msg": "pong", "code": 200 } ``startConsuming`` Start consuming events that match ``path``. ``path``\s are described in the :ref:`Messaging_and_Queues` section. For size optimization reasons, paths are joined with "/", and with the None wildcard replaced by "*". .. code-block:: javascript { "_id": 1, "cmd": "startConsuming", "path": "change/*/*" } Success answer example will be: .. code-block:: javascript { "msg": "OK", "_id": 1, "code": 200 } ``stopConsuming`` Stop consuming events that were previously registered with ``path``. .. code-block:: javascript { "_id": 1, "cmd": "stopConsuming", "path": "change/*/*" } Success answer example will be: .. code-block:: javascript { "msg": "OK", "_id": 1, "code": 200 } The client will receive events as websocket frames encoded in json with the following format: .. code-block:: javascript { "k": key, "m": message } .. _SSE: Server Sent Events ~~~~~~~~~~~~~~~~~~ SSE is a simpler protocol than WebSockets and is more REST compliant. It uses the chunk-encoding HTTP feature to stream the events. SSE also does not work well behind an enterprise proxy, unless you use the https protocol. The client can connect using following endpoints: * ``http[s]:///sse/listen/``: Start listening to events on the http connection. Optionally, setup a first event filter on ````. The first message send is a handshake, giving a uuid that can be used to add or remove event filters. * ``http[s]:///sse/add//``: Configure a sse session to add an event filter * ``http[s]:///sse/remove//``: Configure a sse session to remove an event filter Note that if a load balancer is setup as a front end to buildbot web masters, the load balancer must be configured to always use the same master given a client IP address for /sse endpoint. The client will receive events as sse events, encoded with the following format: .. code-block:: none event: event data: { "key": , "message": } The first event received is a handshake, and is used to inform the client about the uuid to use for configuring additional event filters .. code-block:: none event: handshake data: buildbot-3.4.0/master/docs/examples/000077500000000000000000000000001413250514000173565ustar00rootroot00000000000000buildbot-3.4.0/master/docs/examples/git_gerrit.cfg000066400000000000000000000167341413250514000222110ustar00rootroot00000000000000# -*- python -*- # ex: set syntax=python: from buildbot.plugins import * # This is a sample buildmaster config file. It must be installed as # 'master.cfg' in your buildmaster's base directory. # This is the dictionary that the buildmaster pays attention to. We also use # a shorter alias to save typing. c = BuildmasterConfig = {} ####### BUILDSLAVES # The 'slaves' list defines the set of recognized buildslaves. Each element is # a BuildSlave object, specifying a unique slave name and password. The same # slave name and password must be configured on the slave. c['slaves'] = [buildslave.BuildSlave("example-slave", "pass")] # 'protocols' contains information about protocols which master will use for # communicating with slaves. You must define at least 'port' option that slaves # could connect to your master with this protocol. # 'port' must match the value configured into the buildslaves (with their # --master option) c['protocols'] = {'pb': {'port': 9989}} ####### CHANGESOURCES # the 'change_source' setting tells the buildmaster how it should find out # about source code changes. Here we point to the buildbot clone of pyflakes. #Gerrit Configuration gerrit_url = "gerrit.example.com" gerrit_user = "gerrit" gerrit_port = "29418" gerrit_project = "mygerritproject" gerrit_repo = "ssh://%s@%s:%s/%s" % (gerrit_user, gerrit_url, gerrit_port, gerrit_project) #Add comment-added to handled_events to have approvals information (Code-Review...) c['change_source'] = [] c['change_source'].append(changes.GerritChangeSource(gerrit_url, gerrit_user, handled_events=["patchset-created", "comment-added"])) ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. In this # case, just kick off a 'runtests' build #Check there is Code-Review=+2 in Approvals (of comment-added) def change_code_review_plus_2(change): if "event.approvals" in change.properties: for a in change.properties["event.approvals"]: if "Code-Review" in a["type"] and int(a["value"]) == 2: return True return False c['schedulers'] = [] c['schedulers'].append(schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch_re="master/*", filter_fn=change_code_review_plus_2), treeStableTimer=None, builderNames=["runtests-gcc","runtests-clang"])) c['schedulers'].append(schedulers.ForceScheduler( name="force", builderNames=["runtests-gcc","runtests-clang"])) ####### BUILDERS # The 'builders' list defines the Builders, which tell Buildbot how to perform a build: # what steps, and which slaves can execute them. Note that any particular build will # only take place on one slave. #Build with GCC f_gcc = util.BuildFactory([ steps.Gerrit(repourl=gerrit_repo, mode="full",retry=[60,60],timeout=3600), steps.ShellCommand(command=["bash","./autogen.sh"],timeout=3600), steps.Configure(command=["./configure"]), steps.Compile(command=["make", "-j", "4"]), steps.Compile(command=["make", "test"]) ]) #Build with Clang f_clang = BuildFactory([ steps.Gerrit(repourl=gerrit_repo, mode="full",retry=[60,60],timeout=3600), steps.ShellCommand(command=["bash","./autogen.sh"],timeout=3600), steps.Configure(command=["./configure"],env={ "CC":"clang", "CXX":"clang++"}), steps.Compile(command=["make", "-j", "4"]), steps.Compile(command=["make", "test"]) ]) c['builders'] = [] c['builders'].append( util.BuilderConfig(name="runtests-gcc", slavenames=["example-slave"], factory=f_gcc)) c['builders'].append( util.BuilderConfig(name="runtests-clang", slavenames=["example-slave"], factory=f_clang)) ####### STATUS TARGETS # 'status' is a list of Status Targets. The results of each build will be # pushed to these targets. buildbot/status/*.py has a variety to choose from, # like IRC bots. c['status'] = [] authz_cfg=authz.Authz( # change any of these to True to enable; see the manual for more # options auth=auth.BasicAuth([("pyflakes","pyflakes")]), gracefulShutdown = False, forceBuild = 'auth', # use this to test your slave once it is set up forceAllBuilds = False, pingBuilder = False, stopBuild = False, stopAllBuilds = False, cancelPendingBuild = False, ) c['status'].append(html.WebStatus(http_port=8010, authz=authz_cfg)) def gerritReviewCB(builderName, build, result, master, arg): if result == util.RETRY: return dict() message = "Buildbot finished compiling your patchset\n" message += "on configuration: %s\n" % builderName message += "The result is: %s\n" % util.Results[result].upper() if arg: message += "\nFor more details visit:\n" message += build['url'] + "\n" if result == util.SUCCESS: verified = 1 else: verified = -1 return dict(message=message, labels={'Verified': verified}) def gerritStartCB(builderName, build, arg): message = "Buildbot started compiling your patchset\n" message += "on configuration: %s\n" % builderName message += "See your build here: %s" % build['url'] return dict(message=message) def gerritSummaryCB(buildInfoList, results, status, arg): success = False failure = False msgs = [] for buildInfo in buildInfoList: msg = "Builder %(name)s %(resultText)s (%(text)s)" % buildInfo link = buildInfo.get('url', None) if link: msg += " - " + link else: msg += "." msgs.append(msg) if buildInfo['result'] == util.SUCCESS: success = True else: failure = True if success and not failure: verified = 1 else: verified = -1 return dict(message='\n\n'.join(msgs), labels={ 'Verified': verified }) c['buildbotURL'] = 'http://buildbot.example.com/' c['status'].append(status.GerritStatusPush(gerrit_url, gerrit_user, reviewCB=gerritReviewCB, reviewArg=c['buildbotURL'], startCB=gerritStartCB, startArg=c['buildbotURL'], summaryCB=gerritSummaryCB, summaryArg=c['buildbotURL'])) ####### PROJECT IDENTITY # the 'title' string will appear at the top of this buildbot installation's # home pages (linked to the 'titleURL'). c['title'] = "Buildbot with Gerrit" c['titleURL'] = "https://" + gerrit_url # the 'buildbotURL' string should point to the location where the buildbot's # internal web server is visible. This typically uses the port number set in # the 'www' entry below, but with an externally-visible host name which the # buildbot cannot figure out without some help. c['buildbotURL'] = "http://localhost:8010/" # minimalistic config to activate new web UI c['www'] = dict(port=8010, plugins=dict(waterfall_view={}, console_view={}, grid_view={})) ####### DB URL c['db'] = { # This specifies what database buildbot uses to store its state. You can leave # this at its default for all but the largest installations. 'db_url' : "sqlite:///state.sqlite", } buildbot-3.4.0/master/docs/examples/gitlab.cfg000066400000000000000000000271051413250514000213060ustar00rootroot00000000000000# -*- python -*- # ex: set filetype=python: # This is a sample buildmaster config file. It must be installed as # 'master.cfg' in your buildmaster's base directory. # # Are all your projects built the same way? # Do you yearn for a way to do simple static configuration? # If so, try writing a function! # # Here's an example that # - uses a function to make adding new projects easy # - provides a regular builder and a smoke test builder per project # - stores secrets in separate files # - integrates with GitLab, and does smoke builds on merge requests # - demonstrates access control using GitLab authentication # # To use this example with your own local instance of GitLab: # # 0. Set up local mirrors of the gnu hello and time projects, e.g. # for proj in hello time # do # git clone --mirror git@gitlab.com:GNU/$proj.git # cd $proj # git push --mirror git@gitlab.example.com:build/gnu-$proj.git # cd .. # done # # 1. Edit this file to replace example.com with your own domain, # and adjust worker name and password in c['workers']. # # 2. Create secrets.dir next to master.cfg: # mkdir secrets.dir # # 3. Tell GitLab to use webhooks to request builds. # Pick a random password for our webhook and save it as a secret, e.g. # echo "" > secrets.dir/my-webhook-token # chmod 600 secrets.dir/* # (where is just a placeholder for a value). # For each project to build, create a webhook in the GitLab UI at # project / Settings / Integrations / Add Webhook # with a URL of e.g. # http://buildbot.example.com:8010/change_hook/gitlab # the secret chosen above, # and with push and merge request triggers checked. # # Then start the build master and worker. # Test the webhook by visiting # project / Settings / Integrations / Webhooks # and clicking 'test' on your webhook. # If something goes wrong, GitLab will show a red banner with the reason. # GitLab merge requests should now trigger buildbot builds. # # 4. Tell buildbot to report build status to GitLab. # Uncomment sections below marked # "CONFIGME: uncomment for gitlab status reporting" # Create a GitLab access token (so buildbot can send status to GitLab). # Pick a display name for your buildbot and save it as a secret, e.g. # echo "" > secrets.dir/my-buildbot-name # chmod 600 secrets.dir/* # Create an access token in the GitLab UI at # "User Settings / Access Tokens / Add a personal access token" # using that display name as the context, and save it as a secret, e.g. # echo "" > secrets.dir/my-gitlab-token # chmod 600 secrets.dir/* # # Then restart the master. # GitLab merge requests should now show status of buildbot's builds. # # 5. Tell GitLab to accept authentication requests from buildbot. # Enter the URL of your buildbot gitlab hook, e.g. # http://buildbot.example.com:8010/change_hook/gitlab # into the GitLab UI at # "User Settings / Applications / Add New Application", # with scopes 'api' and 'openid' ticked, # and save the appid and secret it produces: # echo "" > secrets.dir/my-gitlab-appid # echo "" > secrets.dir/my-gitlab-appsecret # chmod 600 secrets.dir/* # 6. Restrict buildbot web UI access to logged in GitLab users. # Uncomment sections below marked # "CONFIGME: uncomment for buildbot authentication" # and replace with a valid GitLab group. # # Then restart the master. # Buildbot's web ui should now require you to be logged in to # that GitLab group before it shows you much or lets you force builds. from buildbot.plugins import * import os import re def makeFactoryNormal(repourl, branch): ''' A Factory that builds, tests, and uploads incoming changesets. The branch argument is a default in case the changeset lacks one. ''' # Adjust this factory to match your site's build steps. # This example uses the canonical gnu configure/make steps. # Adjust to match your site's build system. f = util.BuildFactory() f.addStep(steps.GitLab(repourl=repourl, branch=branch)) f.addStep(steps.ShellCommand(haltOnFailure=True, command=["if test -x ./bootstrap; then ./bootstrap; fi"])) f.addStep(steps.ShellCommand(haltOnFailure=True, command=["./configure"])) f.addStep(steps.ShellCommand(haltOnFailure=True, command=["make"])) f.addStep(steps.ShellCommand(haltOnFailure=True, command=["make check"])) f.addStep(steps.ShellCommand(haltOnFailure=True, command=[": insert upload step here"])) return f def makeFactorySmoke(repourl, branch): ''' A Factory that just builds and tests incoming changesets. The branch argument is a default in case the changeset lacks one. ''' f = util.BuildFactory() f.addStep(steps.GitLab(repourl=repourl, branch=branch)) f.addStep(steps.ShellCommand(haltOnFailure=True, command=["if test -x ./bootstrap; then ./bootstrap; fi"])) f.addStep(steps.ShellCommand(haltOnFailure=True, command=["./configure"])) f.addStep(steps.ShellCommand(haltOnFailure=True, command=["make"])) f.addStep(steps.ShellCommand(haltOnFailure=True, command=["make check"])) return f def repoUrlToName(repourl): ''' Gets project name from the repourl, ignoring namespace. ''' # Strip off everything before project name # FIXME: parse this more artfully to allow projects in folders name = re.sub(r'^.*/', '', repourl) # Strip off .git suffix, if present return re.sub(r'\.git$', '', name) def addBuilder(repourl, branch, flavor, workernames): ''' Add a builder for the given project and branch on the given workers. Give each a Force button. flavor must be 'smoke' or 'normal'. ''' factory = None changehook_category = None if flavor is "normal": # Respond to push events with a normal build changehook_category = "push" factory = makeFactoryNormal(repourl, branch) elif flavor is "smoke": # Respond to merge request events with a smoke build changehook_category = "merge_request" factory = makeFactorySmoke(repourl, branch) else: raise ValueError("wanted 'normal' or 'smoke', got '%s'" % flavor) name = repoUrlToName(repourl) id = name + "-" + branch + "-" + flavor builder = util.BuilderConfig(name=id, workernames=workernames, factory=factory) c['builders'].append(builder) c['schedulers'].append(schedulers.SingleBranchScheduler( name=id, change_filter=util.ChangeFilter( project=name, branch=branch, category=changehook_category), treeStableTimer=None, builderNames=[builder.name])) c['schedulers'].append(schedulers.ForceScheduler( name=id + '-force', builderNames=[builder.name])) # For parts of buildbot that don't support Secret interpolation yet. # Once https://github.com/buildbot/buildbot/issues/4118 is fixed, # use util.Secret(s) instead. def dumbSecret(s): with open(os.path.join(secrets_dir, s), 'r') as myfile: return myfile.read().replace('\n', '') # This is the dictionary that the buildmaster pays attention to. We also use # a shorter alias to save typing. c = BuildmasterConfig = {} ####### SECRETS # Checking secrets into your master.cfg is insecure; # best practice is to keep them elsewhere else. # Place the secrets directory next to master.cfg: this_dir = os.path.dirname(os.path.abspath(__file__)) secrets_dir = os.path.join(this_dir, 'secrets.dir') c['secretsProviders'] = [secrets.SecretInAFile(dirname=secrets_dir)] ####### WORKERS # The 'workers' list defines the set of recognized workers. # Each element is a Worker object, with a unique worker name and password. # The same worker name and password must be configured on the worker. # CONFIGME c['workers'] = [ worker.Worker("buildbot-worker", "buildbot-pass"), ] workernames = [x.name for x in c['workers']] # 'protocols' contains information about protocols which master will use for # communicating with workers. You must define at least a 'port' option; # the master will listen on that port for connections from workers. # 'port' must match the value configured into the workers (with their # --master option) c['protocols'] = {'pb': {'port': 9989}} ####### CHANGESOURCES # the 'change_source' setting tells the buildmaster how it should find out # about source code changes. c['change_source'] = [] ####### SCHEDULERS AND BUILDERS # The Schedulers decide how to react to incoming changes. c['schedulers'] = [] # The 'builders' list defines the Builders, which tell Buildbot how to # perform a build: what steps, and which workers can execute them. # Note that any particular build will only take place on one worker. c['builders'] = [] # Call addBuilder for each similar project you want to build. # It adds a builder with both normal and force schedulers. # Note: urls must start with git@ and end with .git addBuilder('git@gitlab.example.com:build/gnu-hello.git', branch='master', flavor='normal', workernames=workernames) addBuilder('git@gitlab.example.com:build/gnu-hello.git', branch='master', flavor='smoke', workernames=workernames) addBuilder('git@gitlab.example.com:build/gnu-time.git', branch='master', flavor='normal', workernames=workernames) addBuilder('git@gitlab.example.com:build/gnu-time.git', branch='master', flavor='smoke', workernames=workernames) ####### BUILDBOT SERVICES # 'services' is a list of BuildbotService items like reporter targets. The # status of each build will be pushed to these targets. buildbot/reporters/*.py # has a variety to choose from, like IRC bots. c['services'] = [] ## CONFIGME: uncomment for gitlab status reporting ## Report build status back to GitLab UI #c['services'].append(reporters.GitLabStatusPush( # token=util.Secret('my-gitlab-token'), # context=util.Secret('my-buildbot-name'), # baseURL='https://gitlab.example.com', # verbose=True)) ####### PROJECT IDENTITY # the 'title' string will appear at the top of this buildbot installation's # home pages (linked to the 'titleURL'). c['title'] = "Gnu Hello GitLab" c['titleURL'] = "https://gitlab.example.com/build/" # the 'buildbotURL' string should point to the location where the buildbot's # internal web server is visible. This typically uses the port number set in # the 'www' entry below, but with an externally-visible host name which the # buildbot cannot figure out without some help. c['buildbotURL'] = "http://buildbot.example.com:8010/" # CONFIGME: uncomment for buildbot authentication ## This example tries to show nothing to anonymous users. #authz = util.Authz( # allowRules=[ # util.AnyEndpointMatcher(role="platform"), # util.AnyEndpointMatcher(role="xxend-of-listxx", defaultDeny=True), # ], # roleMatchers=[ # util.RolesFromGroups() # ] #) # minimalistic config to activate new web UI c['www'] = dict( port=8010, ## CONFIGME: uncomment for buildbot authentication #auth=util.GitLabAuth("https://gitlab.example.com", # dumbSecret('my-gitlab-appid'), # dumbSecret('my-gitlab-appsecret')), #authz=authz, change_hook_dialects=dict( gitlab={ 'secret': dumbSecret('my-webhook-token') }, ), plugins=dict(waterfall_view={}, console_view={}, grid_view={})) # Let buildbot developers know you're using gitlab support :-) c['buildbotNetUsageData'] = 'basic' ####### DB URL c['db'] = { # This specifies what database buildbot uses to store its state. # You can leave this at its default for all but the largest installations. 'db_url': "sqlite:///state.sqlite", } buildbot-3.4.0/master/docs/examples/hello.cfg000066400000000000000000000037631413250514000211530ustar00rootroot00000000000000# -*- python -*- # ex: set syntax=python: from buildbot.plugins import * BuildmasterConfig = c = {} c['slaves'] = [buildslave.BuildSlave("bot1", "sekrit")] c['change_source'] = changes.PBChangeSource(prefix="trunk") c['builders'] = [] if True: f = util.BuildFactory() f.addStep(steps.CVS(cvsroot="/usr/home/warner/stuff/Projects/BuildBot/demo/Repository", cvsmodule="hello", mode="clobber", checkoutDelay=6, alwaysUseLatest=True)) f.addStep(steps.Configure()) f.addStep(steps.Compile()) f.addStep(steps.Test(command=["make", "check"])) b1 = { "name": "cvs-hello", "slavename": "bot1", "builddir": "cvs-hello", "factory": f } c['builders'].append(b1) if True: svnrep="file:///usr/home/warner/stuff/Projects/BuildBot/demo/SVN-Repository" f = util.BuildFactory() f.addStep(steps.SVN(repourl=svnrep+"/hello", mode="update")) f.addStep(steps.Configure()) f.addStep(steps.Compile()), f.addStep(steps.Test(command=["make", "check"])) b1 = { "name": "svn-hello", "slavename": "bot1", "builddir": "svn-hello", "factory": f } c['builders'].append(b1) if True: f = util.BuildFactory() f.addStep(steps.Darcs(repourl="http://localhost/~warner/hello-darcs", mode="copy")) f.addStep(steps.Configure(command=["/bin/sh", "./configure"])) f.addStep(steps.Compile()) f.addStep(steps.Test(command=["make", "check"])) b1 = { "name": "darcs-hello", "slavename": "bot1", "builddir": "darcs-hello", "factory": f } c['builders'].append(b1) c['title'] = "Hello" c['titleURL'] = "http://www.hello.example.com/" c['buildbotURL'] = "http://localhost:8080" c['slavePortnum'] = 8007 c['manhole'] = util.PasswordManhole(9900, "username", "password", ssh_hostkey_dir="/data/ssh_host_keys/") c['www'] = { 'port': 8080 } # vim:ft=python buildbot-3.4.0/master/docs/examples/repo_gerrit.cfg000066400000000000000000000133501413250514000223620ustar00rootroot00000000000000# -*- python -*- # ex: set syntax=python: from buildbot.plugins import * manifest_url = "git://github.com/CyanogenMod/android.git" manifest_branch = "froyo" slaves = ["slave%02d"%(i) for i in range(1, 2)] repotarball = "/local/android/cyanogen/cyanogen_bootstrap.tgz" gerrit_server = "review.cyanogenmod.com" gerrit_user = "yourid" build_branches = [] # for i in "passion inc hero heroc sholes dream_sapphire bravo bravoc espresso # supersonic liberty vibrant legend vision".split(" "): for i in "passion hero dream_sapphire".split(" "): build_branches.append([i, "default.xml", "froyo"]) # This is the dictionary that the buildmaster pays attention to. We also use # a shorter alias to save typing. c = BuildmasterConfig = {} ## DB URL # This specifies what database buildbot uses to store change and scheduler # state c['db_url'] = "sqlite:///state.sqlite" ## BUILDSLAVES c['slaves'] = [buildslave.BuildSlave(i, i + "pw", max_builds=1) for i in slaves] c['slavePortnum'] = 9989 ## CHANGESOURCES c['change_source'] = changes.GerritChangeSource(gerrit_server, gerrit_user) ## SCHEDULERS ## configure the Schedulers buildernames = ["%s_%s" % (board, manifest) for board, manifest, gerrit_branch in build_branches] c['schedulers'] = [] c['schedulers'].append( schedulers.SingleBranchScheduler(name="all", branch=None, treeStableTimer=2 * 60, builderNames=buildernames)) branches = {} for board, manifest, gerrit_branch in build_branches: if not gerrit_branch in branches: branches[gerrit_branch] = [] branches[gerrit_branch].append("%s_%s" % (board, manifest)) for branch in branches.keys(): print branch, branches[branch] c['schedulers'].append( schedulers.SingleBranchScheduler(name=branch, branch=branch, treeStableTimer=None, builderNames=branches[branch])) ## BUILDERS proprietary_url = "http://where.to.find.com/proprietaries/%(device)s.tgz" getOutputDir = Interpolate("/var/www/builds/build-%(prop:buildername)s-%(prop:changenumber)s") getWebDir = Interpolate("http://buildmaster.mysite.com/builds/build-%(prop:buildername)s-%(prop:changenumber)s") builders = [] for board, manifest_file, gerrit_branch in build_branches: f1 = util.BuildFactory() f1.workdir = "system" f1.addStep(steps.Repo(manifest_url=manifest_url, manifest_branch=manifest_branch, manifest_file=manifest_file, tarball=repotarball)) f1.addStep(steps.Compile(name="clobber old output", command="rm -rf out")) f1.addStep(steps.Compile(name="download proprietaries", command="curl \""+proprietary_url+"\" > props.tgz;" % ({'device': board}) + "tar zxvf props.tgz;" + "rm props.tgz;")) f1.addStep(steps.Compile(name="get rommanager", command="./vendor/cyanogen/get-rommanager")) buildcommand = """ set -e export LANG=C . build/envsetup.sh lunch cyanogen_%s-eng make -j4 make bacon -j4 repo manifest -o out/target/product/%s/manifest.xml """ % (board, board) f1.addStep(steps.Compile(name="compile everything", command=["/bin/bash", "-c", buildcommand])) # todo should upload result of compilation somewhere else builddir = "%s_%s" % (board, manifest_file) b1 = util.BuilderConfig(name=builddir, slavenames=slaves, builddir=builddir, factory=f1) builders.append(b1) c['builders'] = builders ## STATUS TARGETS # 'status' is a list of Status Targets. The results of each build will be # pushed to these targets. buildbot/status/*.py has a variety to choose from, # including web pages, email senders, and IRC bots. c['status'] = [] # The code below is valid for 0.8.x versions, and is not valid for 0.9+ if 0: authz_cfg = util.Authz( # change any of these to True to enable; see the manual for more # options gracefulShutdown=True, forceBuild=True, forceAllBuilds=True, pingBuilder=True, stopBuild=True, stopAllBuilds=True, cancelPendingBuild=True, ) c['status'].append(html.WebStatus(http_port=8010, authz=authz_cfg)) def gerritMessageCB(buildername, build, results): sep = "-------------------------------\n" message = "buildbot finished compiling your patchset\n" message += sep message += "on configuration %s\n"%(buildername) message += sep message += "the result is %s\n"%(Results[results]) message += sep message += "more details %s/builders/%s/builds/%d\n" % (c['buildbotURL'], buildername, build.getNumber()) return dict(message=message) c['status'].append(status.GerritStatusPush(gerrit_server, gerrit_user, gerritMessageCB)) ## PROJECT IDENTITY # the 'title' string will appear at the top of this buildbot # installation's html.WebStatus home page (linked to the 'titleURL') # and is embedded in the title of the waterfall HTML page. c['title'] = "froyo" c['titleURL'] = "http://review.android.com" # the 'buildbotURL' string should point to the location where the buildbot's # internal web server (usually the html.WebStatus page) is visible. This # typically uses the port number set in the Waterfall 'status' entry, but # with an externally-visible host name which the buildbot cannot figure out # without some help. c['buildbotURL'] = "http://buildbot.cyanogenmod.com" buildbot-3.4.0/master/docs/examples/twisted_master.cfg000066400000000000000000000272001413250514000230760ustar00rootroot00000000000000#! /usr/bin/python # NOTE: this configuration file is from the buildbot-0.7.5 era or earlier. It # has not been brought up-to-date with the standards of buildbot-0.7.6 . For # examples of modern usage, please see hello.cfg, or the sample.cfg which is # installed when you run 'buildbot create-master'. # This configuration file is described in $BUILDBOT/docs/config.xhtml # This is used (with online=True) to run the Twisted Buildbot at # http://www.twistedmatrix.com/buildbot/ . Passwords and other secret # information are loaded from a neighboring file called 'private.py'. from __future__ import absolute_import from __future__ import print_function import os.path import sys import extra_factory import private # holds passwords from buildbot.changes.pb import PBChangeSource from buildbot.process.factory import s from buildbot.process.process_twisted import FullTwistedBuildFactory from buildbot.process.process_twisted import QuickTwistedBuildFactory from buildbot.process.process_twisted import TwistedReactorsBuildFactory from buildbot.scheduler import Scheduler from buildbot.scheduler import Try_Userpass from buildbot.steps.source import SVN from extra_factory import GoodTwistedBuildFactory sys.path.append('/home/buildbot/BuildBot/support-master') reload(extra_factory) reload(private) # make it possible to change the contents without a restart BuildmasterConfig = c = {} # I set really=False when testing this configuration at home really = True usePBChangeSource = True c['slaves'] = [] for slave in private.bot_passwords.keys(): c['slaves'].append(BuildSlave(slave, private.bot_passwords[slave])) c['sources'] = [] # the Twisted buildbot currently uses the contrib/svn_buildbot.py script. # This makes a TCP connection to the ChangeMaster service to push Changes # into the build master. The script is invoked by # /svn/Twisted/hooks/post-commit, so it will only be run for things inside # the Twisted repository. However, the standard SVN practice is to put the # actual trunk in a subdirectory named "trunk/" (to leave room for # "branches/" and "tags/"). We want to only pay attention to the trunk, so # we use "trunk" as a prefix for the ChangeSource. This also strips off that # prefix, so that the Builders all see sensible pathnames (which means they # can do things like ignore the sandbox properly). source = PBChangeSource(prefix="trunk/") c['sources'].append(source) ## configure the builders if 0: # always build on trunk svnurl = "svn://svn.twistedmatrix.com/svn/Twisted/trunk" source_update = s(SVN, repourl=svnurl, mode="update") source_copy = s(SVN, repourl=svnurl, mode="copy") source_export = s(SVN, repourl=svnurl, mode="export") else: # for build-on-branch, we use these instead baseURL = "svn://svn.twistedmatrix.com/svn/Twisted/" defaultBranch = "trunk" source_update = s(SVN, baseURL=baseURL, defaultBranch=defaultBranch, mode="update") source_copy = s(SVN, baseURL=baseURL, defaultBranch=defaultBranch, mode="copy") source_export = s(SVN, baseURL=baseURL, defaultBranch=defaultBranch, mode="export") builders = [] b24compile_opts = [ "-Wignore::PendingDeprecationWarning:distutils.command.build_py", "-Wignore::PendingDeprecationWarning:distutils.command.build_ext", ] b25compile_opts = b24compile_opts # FIXME b1 = {'name': "quick", 'slavename': "bot1", 'builddir': "quick", 'factory': QuickTwistedBuildFactory(source_update, python=["python2.3", "python2.4"]), } builders.append(b1) b23compile_opts = [ "-Wignore::PendingDeprecationWarning:distutils.command.build_py", "-Wignore::PendingDeprecationWarning:distutils.command.build_ext", ] b23 = {'name': "debian-py2.3-select", 'slavename': "bot-exarkun", 'builddir': "full2.3", 'factory': FullTwistedBuildFactory(source_copy, python=["python2.3", "-Wall"], # use -Werror soon compileOpts=b23compile_opts, processDocs=1, runTestsRandomly=1), } builders.append(b23) b24 = {'name': "debian-py2.4-select", 'slavenames': ["bot-exarkun"], 'builddir': "full2.4", 'factory': FullTwistedBuildFactory(source_copy, python=["python2.4", "-Wall"], # use -Werror soon compileOpts=b24compile_opts, runTestsRandomly=1), } builders.append(b24) b24debian64 = { 'name': 'debian64-py2.4-select', 'slavenames': ['bot-idnar-debian64'], 'builddir': 'full2.4-debian64', 'factory': FullTwistedBuildFactory(source_copy, python=["python2.4", "-Wall"], compileOpts=b24compile_opts), } builders.append(b24debian64) b25debian = { 'name': 'debian-py2.5-select', 'slavenames': ['bot-idnar-debian'], 'builddir': 'full2.5-debian', 'factory': FullTwistedBuildFactory(source_copy, python=["python2.5", "-Wall"], compileOpts=b24compile_opts)} builders.append(b25debian) b25suse = { 'name': 'suse-py2.5-select', 'slavenames': ['bot-scmikes-2.5'], 'builddir': 'bot-scmikes-2.5', 'factory': FullTwistedBuildFactory(source_copy, python=["python2.5", "-Wall"], compileOpts=b24compile_opts), } builders.append(b25suse) reactors = ['poll', 'epoll', 'gtk', 'gtk2'] b4 = {'name': "debian-py2.4-reactors", 'slavename': "bot2", 'builddir': "reactors", 'factory': TwistedReactorsBuildFactory(source_copy, python="python2.4", reactors=reactors), } builders.append(b4) bosx24 = { 'name': 'osx-py2.4-select', 'slavenames': ['bot-exarkun-osx'], 'builddir': 'full2.4-exarkun-osx', 'factory': FullTwistedBuildFactory(source_copy, python=["python2.4", "-Wall"], compileOpts=b24compile_opts, runTestsRandomly=1)} builders.append(bosx24) forcegc = { 'name': 'osx-py2.4-select-gc', 'slavenames': ['bot-exarkun-osx'], 'builddir': 'full2.4-force-gc-exarkun-osx', 'factory': GoodTwistedBuildFactory(source_copy, python="python2.4")} builders.append(forcegc) # debuild is offline while we figure out how to build 2.0 .debs from SVN # b3 = {'name': "debuild", # 'slavename': "bot2", # 'builddir': "debuild", # 'factory': TwistedDebsBuildFactory(source_export, # python="python2.4"), # } # builders.append(b3) b24w32_scmikes_select = { 'name': "win32-py2.4-select", 'slavename': "bot-scmikes-win32", 'builddir': "W32-full2.4-scmikes-select", 'factory': TwistedReactorsBuildFactory(source_copy, python="python", compileOpts2=["-c","mingw32"], reactors=["default"]), } builders.append(b24w32_scmikes_select) b25w32_scmikes_select = { 'name': "win32-py2.5-select", 'slavename': "bot-scmikes-win32-2.5", 'builddir': "W32-full2.5-scmikes-select", 'factory': TwistedReactorsBuildFactory(source_copy, python="python", compileOpts2=["-c","mingw32"], reactors=["default"]), } builders.append(b25w32_scmikes_select) b24w32_win32er = { 'name': "win32-py2.4-er", 'slavename': "bot-win32-win32er", 'builddir': "W32-full2.4-win32er", 'factory': TwistedReactorsBuildFactory(source_copy, python="python", compileOpts2=["-c","mingw32"], reactors=["win32"]), } builders.append(b24w32_win32er) b24w32_iocp = { 'name': "win32-py2.4-iocp", 'slavename': "bot-win32-iocp", 'builddir': "W32-full2.4-iocp", 'factory': TwistedReactorsBuildFactory(source_copy, python="python", compileOpts2=[], reactors=["iocp"]), } builders.append(b24w32_iocp) b24freebsd = {'name': "freebsd-py2.4-select-kq", 'slavename': "bot-landonf", 'builddir': "freebsd-full2.4", 'factory': TwistedReactorsBuildFactory(source_copy, python="python2.4", reactors=["default", "kqueue", ]), } builders.append(b24freebsd) osxtsr = {'name': "osx-py2.4-tsr", 'slavename': "bot-exarkun-osx", 'builddir': "osx-tsr", 'factory': TwistedReactorsBuildFactory( source_copy, python="python2.4", reactors=["tsr"])} builders.append(osxtsr) bpypyc = {'name': 'osx-pypyc-select', 'slavename': 'bot-jerub-pypy', 'builddir': 'pypy-c', 'factory': TwistedReactorsBuildFactory(source_copy, python="pypy-c", reactors=["default"])} builders.append(bpypyc) c['builders'] = builders # now set up the schedulers. We do this after setting up c['builders'] so we # can auto-generate a list of all of them. all_builders = [b['name'] for b in c['builders']] all_builders.sort() all_builders.remove("quick") ## configure the schedulers s_quick = Scheduler(name="quick", branch=None, treeStableTimer=30, builderNames=["quick"]) s_try = Try_Userpass("try", all_builders, port=9989, userpass=private.try_users) s_all = [] for i, builderName in enumerate(all_builders): s_all.append(Scheduler(name="all-" + builderName, branch=None, builderNames=[builderName], treeStableTimer=(5 * 60 + i * 30))) c['schedulers'] = [s_quick, s_try] + s_all # configure other status things c['slavePortnum'] = 9987 c['status'] = [] if really: p = os.path.expanduser("~/.twistd-web-pb") c['status'].append(html.Waterfall(distrib_port=p)) else: c['status'].append(html.Waterfall(http_port=9988)) if really: c['status'].append(words.IRC(host="irc.freenode.net", nick='buildbot', channels=["twisted"])) #c['interlocks'] = [("do-deb", ["full-2.2"], ["debuild"])] if hasattr(private, "manhole"): from buildbot import manhole c['manhole'] = manhole.PasswordManhole(*private.manhole) m = mail.MailNotifier(fromaddr="buildbot@twistedmatrix.com", builders=["quick", "debian-py2.3-select"], sendToInterestedUsers=True, extraRecipients=["warner@lothar.com"], mode="problem", ) c['status'].append(m) c['title'] = "Twisted" c['titleURL'] = "http://twistedmatrix.com/" c['buildbotURL'] = "http://twistedmatrix.com/buildbot/" buildbot-3.4.0/master/docs/index.rst000066400000000000000000000025621413250514000174060ustar00rootroot00000000000000.. ====================================== Buildbot Documentation - |version| ====================================== This is the Buildbot documentation for Buildbot version |version|. If you are evaluating Buildbot and would like to get started quickly, start with the :doc:`Tutorial `. Regular users of Buildbot should consult the :doc:`Manual `, and those wishing to modify Buildbot directly will want to be familiar with the :doc:`Developer's Documentation `. Table Of Contents ----------------- .. toctree:: :maxdepth: 2 :numbered: 4 tutorial/index manual/index developer/index relnotes/index indices Copyright ========= This documentation is part of Buildbot. Buildbot is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Copyright Buildbot Team Members buildbot-3.4.0/master/docs/indices.rst000066400000000000000000000004631413250514000177130ustar00rootroot00000000000000API Indices =========== * :ref:`apiindex` * :bb:index:`cfg` * :bb:index:`sched` * :bb:index:`chsrc` * :bb:index:`step` * :bb:index:`reportgen` * :bb:index:`reporter` * :bb:index:`cmdline` * :bb:index:`msg` * :bb:index:`event` * :bb:index:`rtype` * :bb:index:`rpath` * :bb:index:`raction` * :ref:`genindex` buildbot-3.4.0/master/docs/manual/000077500000000000000000000000001413250514000170155ustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/cmdline.rst000066400000000000000000001001251413250514000211610ustar00rootroot00000000000000.. _Command-line-Tool: Command-line Tool ================= This section describes command-line tools available after buildbot installation. The two main command-line tools are :command:`buildbot` and :command:`buildbot-worker`. The former handles a Buildbot master and the former handles a Buildbot worker. Every command-line tool has a list of global options and a set of commands which have their own options. One can run these tools in the following way: .. code-block:: none buildbot [global options] command [command options] buildbot-worker [global options] command [command options] The ``buildbot`` command is used on the master, while ``buildbot-worker`` is used on the worker. Global options are the same for both tools which perform the following actions: --help Print general help about available commands and global options and exit. All subsequent arguments are ignored. --verbose Set verbose output. --version Print current buildbot version and exit. All subsequent arguments are ignored. You can get help on any command by specifying ``--help`` as a command option: .. code-block:: none buildbot command --help You can also use manual pages for :command:`buildbot` and :command:`buildbot-worker` for quick reference on command-line options. The remainder of this section describes each buildbot command. See :bb:index:`cmdline` for a full list. buildbot -------- The :command:`buildbot` command-line tool can be used to start or stop a buildmaster or buildbot, and to interact with a running buildmaster. Some of its subcommands are intended for buildmaster admins, while some are for developers who are editing the code that the buildbot is monitoring. Administrator Tools ~~~~~~~~~~~~~~~~~~~ The following :command:`buildbot` sub-commands are intended for buildmaster administrators: .. bb:cmdline:: create-master create-master +++++++++++++ .. code-block:: none buildbot create-master -r {BASEDIR} This creates a new directory and populates it with files that allow it to be used as a buildmaster's base directory. You will usually want to use the option `-r` option to create a relocatable :file:`buildbot.tac`. This allows you to move the master directory without editing this file. .. bb:cmdline:: upgrade-master upgrade-master ++++++++++++++ .. code-block:: none buildbot upgrade-master {BASEDIR} This upgrades a previously created buildmaster's base directory for a new version of buildbot master source code. This will copy the web server static files, and potentially upgrade the db. .. bb:cmdline:: start (buildbot) start +++++ .. code-block:: none buildbot start [--nodaemon] {BASEDIR} This starts a buildmaster which was already created in the given base directory. The daemon is launched in the background, with events logged to a file named :file:`twistd.log`. The option `--nodaemon` option instructs Buildbot to skip daemonizing. The process will start in the foreground. It will only return to the command-line when it is stopped. .. bb:cmdline:: restart (buildbot) restart +++++++ .. code-block:: none buildbot restart [--nodaemon] {BASEDIR} Restart the buildmaster. This is equivalent to ``stop`` followed by ``start`` The option `--nodaemon` option has the same meaning as for ``start``. .. bb:cmdline:: stop (buildbot) stop ++++ .. code-block:: none buildbot stop {BASEDIR} This terminates the daemon (either buildmaster or worker) running in the given directory. The ``--clean`` option shuts down the buildmaster cleanly. With ``--no-wait`` option ``buildbot stop`` command will send buildmaster shutdown signal and will immediately exit, not waiting for complete buildmaster shutdown. .. bb:cmdline:: sighup sighup ++++++ .. code-block:: none buildbot sighup {BASEDIR} This sends a SIGHUP to the buildmaster running in the given directory, which causes it to re-read its :file:`master.cfg` file. .. bb:cmdline:: checkconfig checkconfig +++++++++++ .. code-block:: none buildbot checkconfig {BASEDIR|CONFIG_FILE} This checks if the buildmaster configuration is well-formed and contains no deprecated or invalid elements. If no arguments are used or the base directory is passed as the argument the config file specified in :file:`buildbot.tac` is checked. If the argument is the path to a config file then it will be checked without using the :file:`buildbot.tac` file. .. bb:cmdline:: cleanupdb cleanupdb +++++++++ .. code-block:: none buildbot cleanupdb {BASEDIR|CONFIG_FILE} [-q] This command is frontend for various database maintenance jobs: - optimiselogs: This optimization groups logs into bigger chunks to apply higher level of compression. Developer Tools ~~~~~~~~~~~~~~~ These tools are provided for use by the developers who are working on the code that the buildbot is monitoring. .. bb:cmdline:: try try +++ This lets a developer to ask the question ``What would happen if I committed this patch right now?``. It runs the unit test suite (across multiple build platforms) on the developer's current code, allowing them to make sure they will not break the tree when they finally commit their changes. The ``buildbot try`` command is meant to be run from within a developer's local tree, and starts by figuring out the base revision of that tree (what revision was current the last time the tree was updated), and a patch that can be applied to that revision of the tree to make it match the developer's copy. This ``(revision, patch)`` pair is then sent to the buildmaster, which runs a build with that :class:`SourceStamp`. If you want, the tool will emit status messages as the builds run, and will not terminate until the first failure has been detected (or the last success). There is an alternate form which accepts a pre-made patch file (typically the output of a command like :command:`svn diff`). This ``--diff`` form does not require a local tree to run from. See :ref:`try--diff` concerning the ``--diff`` command option. For this command to work, several pieces must be in place: the :bb:sched:`Try_Jobdir` or ::bb:sched:`Try_Userpass`, as well as some client-side configuration. Locating the master ################### The :command:`try` command needs to be told how to connect to the try scheduler, and must know which of the authentication approaches described above is in use by the buildmaster. You specify the approach by using ``--connect=ssh`` or ``--connect=pb`` (or ``try_connect = 'ssh'`` or ``try_connect = 'pb'`` in :file:`.buildbot/options`). For the PB approach, the command must be given a option `--master` argument (in the form :samp:`{HOST}:{PORT}`) that points to TCP port that you picked in the :class:`Try_Userpass` scheduler. It also takes a option `--username` and option `--passwd` pair of arguments that match one of the entries in the buildmaster's ``userpass`` list. These arguments can also be provided as ``try_master``, ``try_username``, and ``try_password`` entries in the :file:`.buildbot/options` file. For the SSH approach, the command must be given option `--host` and option `--username`, to get to the buildmaster host. It must also be given option `--jobdir`, which points to the inlet directory configured above. The jobdir can be relative to the user's home directory, but most of the time you will use an explicit path like :file:`~buildbot/project/trydir`. These arguments can be provided in :file:`.buildbot/options` as ``try_host``, ``try_username``, ``try_password``, and ``try_jobdir``. If you need to use something different from the default ``ssh`` command for connecting to the remote system, you can use `--ssh` command line option or ``try_ssh`` in the configuration file. The SSH approach also provides a option `--buildbotbin` argument to allow specification of the buildbot binary to run on the buildmaster. This is useful in the case where buildbot is installed in a :ref:`virtualenv ` on the buildmaster host, or in other circumstances where the buildbot command is not on the path of the user given by option `--username`. The option `--buildbotbin` argument can be provided in :file:`.buildbot/options` as ``try_buildbotbin`` The following command line arguments are deprecated, but retained for backward compatibility: --tryhost is replaced by option `--host` --trydir is replaced by option `--jobdir` --master is replaced by option `--masterstatus` Likewise, the following :file:`.buildbot/options` file entries are deprecated, but retained for backward compatibility: * ``try_dir`` is replaced by ``try_jobdir`` * ``masterstatus`` is replaced by ``try_masterstatus`` Waiting for results ################### If you provide the option `--wait` option (or ``try_wait = True`` in :file:`.buildbot/options`), the ``buildbot try`` command will wait until your changes have either been proven good or bad before exiting. Unless you use the option `--quiet` option (or ``try_quiet=True``), it will emit a progress message every 60 seconds until the builds have completed. The SSH connection method does not support waiting for results. Choosing the Builders ##################### A trial build is performed on multiple Builders at the same time, and the developer gets to choose which Builders are used (limited to a set selected by the buildmaster admin with the :class:`TryScheduler`'s ``builderNames=`` argument). The set you choose will depend upon what your goals are: if you are concerned about cross-platform compatibility, you should use multiple Builders, one from each platform of interest. You might use just one builder if that platform has libraries or other facilities that allow better test coverage than what you can accomplish on your own machine, or faster test runs. The set of Builders to use can be specified with multiple option `--builder` arguments on the command line. It can also be specified with a single ``try_builders`` option in :file:`.buildbot/options` that uses a list of strings to specify all the Builder names: .. code-block:: python try_builders = ["full-OSX", "full-win32", "full-linux"] If you are using the PB approach, you can get the names of the builders that are configured for the try scheduler using the ``get-builder-names`` argument: .. code-block:: bash buildbot try --get-builder-names --connect=pb --master=... --username=... --passwd=... Specifying the VC system ######################## The :command:`try` command also needs to know how to take the developer's current tree and extract the (revision, patch) source-stamp pair. Each VC system uses a different process, so you start by telling the :command:`try` command which VC system you are using, with an argument like option `--vc=cvs` or option `--vc=git`. This can also be provided as ``try_vc`` in :file:`.buildbot/options`. .. The order of this list comes from the end of scripts/tryclient.py The following names are recognized: ``bzr`` ``cvs`` ``darcs`` ``hg`` ``git`` ``mtn`` ``p4`` ``svn`` Finding the top of the tree ########################### Some VC systems (notably CVS and SVN) track each directory more-or-less independently, which means the :command:`try` command needs to move up to the top of the project tree before it will be able to construct a proper full-tree patch. To accomplish this, the :command:`try` command will crawl up through the parent directories until it finds a marker file. The default name for this marker file is :file:`.buildbot-top`, so when you are using CVS or SVN you should ``touch .buildbot-top`` from the top of your tree before running :command:`buildbot try`. Alternatively, you can use a filename like :file:`ChangeLog` or :file:`README`, since many projects put one of these files in their top-most directory (and nowhere else). To set this filename, use ``--topfile=ChangeLog``, or set it in the options file with ``try_topfile = 'ChangeLog'``. You can also manually set the top of the tree with ``--topdir=~/trees/mytree``, or ``try_topdir = '~/trees/mytree'``. If you use ``try_topdir``, in a :file:`.buildbot/options` file, you will need a separate options file for each tree you use, so it may be more convenient to use the ``try_topfile`` approach instead. Other VC systems which work on full projects instead of individual directories (Darcs, Mercurial, Git, Monotone) do not require :command:`try` to know the top directory, so the option `--try-topfile` and option `--try-topdir` arguments will be ignored. If the :command:`try` command cannot find the top directory, it will abort with an error message. The following command line arguments are deprecated, but retained for backward compatibility: * ``--try-topdir`` is replaced by option `--topdir` * ``--try-topfile`` is replaced by option `--topfile` Determining the branch name ########################### Some VC systems record the branch information in a way that ``try`` can locate it. For the others, if you are using something other than the default branch, you will have to tell the buildbot which branch your tree is using. You can do this with either the option `--branch` argument, or a ``try_branch`` entry in the :file:`.buildbot/options` file. Determining the revision and patch ################################## Each VC system has a separate approach for determining the tree's base revision and computing a patch. CVS :command:`try` pretends that the tree is up to date. It converts the current time into a option `-D` time specification, uses it as the base revision, and computes the diff between the upstream tree as of that point in time versus the current contents. This works, more or less, but requires that the local clock be in reasonably good sync with the repository. SVN :command:`try` does a :command:`svn status -u` to find the latest repository revision number (emitted on the last line in the :samp:`Status against revision: {NN}` message). It then performs an :samp:`svn diff -r{NN}` to find out how your tree differs from the repository version, and sends the resulting patch to the buildmaster. If your tree is not up to date, this will result in the ``try`` tree being created with the latest revision, then *backwards* patches applied to bring it ``back`` to the version you actually checked out (plus your actual code changes), but this will still result in the correct tree being used for the build. bzr :command:`try` does a ``bzr revision-info`` to find the base revision, then a ``bzr diff -r$base..`` to obtain the patch. Mercurial ``hg parents --template '{node}\n'`` emits the full revision id (as opposed to the common 12-char truncated) which is a SHA1 hash of the current revision's contents. This is used as the base revision. ``hg diff`` then provides the patch relative to that revision. For :command:`try` to work, your working directory must only have patches that are available from the same remotely-available repository that the build process' ``source.Mercurial`` will use. Perforce :command:`try` does a ``p4 changes -m1 ...`` to determine the latest changelist and implicitly assumes that the local tree is synced to this revision. This is followed by a ``p4 diff -du`` to obtain the patch. A p4 patch differs slightly from a normal diff. It contains full depot paths and must be converted to paths relative to the branch top. To convert the following restriction is imposed. The p4base (see :bb:chsrc:`P4Source`) is assumed to be ``//depot`` Darcs :command:`try` does a ``darcs changes --context`` to find the list of all patches back to and including the last tag that was made. This text file (plus the location of a repository that contains all these patches) is sufficient to re-create the tree. Therefore the contents of this ``context`` file *are* the revision stamp for a Darcs-controlled source tree. It then does a ``darcs diff -u`` to compute the patch relative to that revision. Git ``git branch -v`` lists all the branches available in the local repository along with the revision ID it points to and a short summary of the last commit. The line containing the currently checked out branch begins with "\* " (star and space) while all the others start with " " (two spaces). :command:`try` scans for this line and extracts the branch name and revision from it. Then it generates a diff against the base revision. .. The spaces in the previous 2 literals are non-breakable spaces   .. todo:: I'm not sure if this actually works the way it's intended since the extracted base revision might not actually exist in the upstream repository. Perhaps we need to add a --remote option to specify the remote tracking branch to generate a diff against. Monotone :command:`mtn automate get_base_revision_id` emits the full revision id which is a SHA1 hash of the current revision's contents. This is used as the base revision. :command:`mtn diff` then provides the patch relative to that revision. For :command:`try` to work, your working directory must only have patches that are available from the same remotely-available repository that the build process' :class:`source.Monotone` will use. patch information ################# You can provide the option `--who=dev` to designate who is running the try build. This will add the ``dev`` to the Reason field on the try build's status web page. You can also set ``try_who = dev`` in the :file:`.buildbot/options` file. Note that option `--who=dev` will not work on version 0.8.3 or earlier masters. Similarly, option `--comment=COMMENT` will specify the comment for the patch, which is also displayed in the patch information. The corresponding config-file option is ``try_comment``. Sending properties ################## You can set properties to send with your change using either the option `--property=key=value` option, which sets a single property, or the option `--properties=key1=value1,key2=value2...` option, which sets multiple comma-separated properties. Either of these can be specified multiple times. Note that the option `--properties` option uses commas to split on properties, so if your property value itself contains a comma, you'll need to use the option `--property` option to set it. .. _try--diff: try --diff ++++++++++ Sometimes you might have a patch from someone else that you want to submit to the buildbot. For example, a user may have created a patch to fix some specific bug and sent it to you by email. You've inspected the patch and suspect that it might do the job (and have at least confirmed that it doesn't do anything evil). Now you want to test it out. One approach would be to check out a new local tree, apply the patch, run your local tests, then use ``buildbot try`` to run the tests on other platforms. An alternate approach is to use the ``buildbot try --diff`` form to have the buildbot test the patch without using a local tree. This form takes a option `--diff` argument which points to a file that contains the patch you want to apply. By default this patch will be applied to the TRUNK revision, but if you give the optional option `--baserev` argument, a tree of the given revision will be used as a starting point instead of TRUNK. You can also use ``buildbot try --diff=-`` to read the patch from :file:`stdin`. Each patch has a ``patchlevel`` associated with it. This indicates the number of slashes (and preceding pathnames) that should be stripped before applying the diff. This exactly corresponds to the option `-p` or option `--strip` argument to the :command:`patch` utility. By default ``buildbot try --diff`` uses a patchlevel of 0, but you can override this with the option `-p` argument. When you use option `--diff`, you do not need to use any of the other options that relate to a local tree, specifically option `--vc`, option `--try-topfile`, or option `--try-topdir`. These options will be ignored. Of course you must still specify how to get to the buildmaster (with option `--connect`, option `--tryhost`, etc). Other Tools ~~~~~~~~~~~ These tools are generally used by buildmaster administrators. .. bb:cmdline:: sendchange sendchange ++++++++++ This command is used to tell the buildmaster about source changes. It is intended to be used from within a commit script, installed on the VC server. It requires that you have a :class:`PBChangeSource` (:bb:chsrc:`PBChangeSource`) running in the buildmaster (by being set in ``c['change_source']``). .. code-block:: none buildbot sendchange --master {MASTERHOST}:{PORT} --auth {USER}:{PASS} --who {USER} {FILENAMES..} The option `--auth` option specifies the credentials to use to connect to the master, in the form ``user:pass``. If the password is omitted, then sendchange will prompt for it. If both are omitted, the old default (username "change" and password "changepw") will be used. Note that this password is well-known, and should not be used on an internet-accessible port. The option `--master` and option `--username` arguments can also be given in the options file (see :ref:`buildbot-config-directory`). There are other (optional) arguments which can influence the ``Change`` that gets submitted: --branch (or option ``branch``) This provides the (string) branch specifier. If omitted, it defaults to ``None``, indicating the ``default branch``. All files included in this Change must be on the same branch. --category (or option ``category``) This provides the (string) category specifier. If omitted, it defaults to ``None``, indicating ``no category``. The category property can be used by schedulers to filter what changes they listen to. --project (or option ``project``) This provides the (string) project to which this change applies, and defaults to ''. The project can be used by schedulers to decide which builders should respond to a particular change. --repository (or option ``repository``) This provides the repository from which this change came, and defaults to ``''``. --revision This provides a revision specifier, appropriate to the VC system in use. --revision_file This provides a filename which will be opened and the contents used as the revision specifier. This is specifically for Darcs, which uses the output of ``darcs changes --context`` as a revision specifier. This context file can be a couple of kilobytes long, spanning a couple lines per patch, and would be a hassle to pass as a command-line argument. --property This parameter is used to set a property on the :class:`Change` generated by ``sendchange``. Properties are specified as a :samp:`{name}:{value}` pair, separated by a colon. You may specify many properties by passing this parameter multiple times. --comments This provides the change comments as a single argument. You may want to use option `--logfile` instead. --logfile This instructs the tool to read the change comments from the given file. If you use ``-`` as the filename, the tool will read the change comments from stdin. --encoding Specifies the character encoding for all other parameters, defaulting to ``'utf8'``. --vc Specifies which VC system the Change is coming from, one of: ``cvs``, ``svn``, ``darcs``, ``hg``, ``bzr``, ``git``, ``mtn``, or ``p4``. Defaults to ``None``. .. bb:cmdline:: user user ++++ Note that in order to use this command, you need to configure a `CommandlineUserManager` instance in your `master.cfg` file, which is explained in :ref:`Users-Options`. This command allows you to manage users in buildbot's database. No extra requirements are needed to use this command, aside from the Buildmaster running. For details on how Buildbot manages users, see :ref:`Concepts-Users`. --master The :command:`user` command can be run virtually anywhere provided a location of the running buildmaster. The option `--master` argument is of the form :samp:`{MASTERHOST}:{PORT}`. --username PB connection authentication that should match the arguments to `CommandlineUserManager`. --passwd PB connection authentication that should match the arguments to `CommandlineUserManager`. --op There are four supported values for the option `--op` argument: ``add``, ``update``, ``remove``, and ``get``. Each are described in full in the following sections. --bb_username Used with the option `--op=update` option, this sets the user's username for web authentication in the database. It requires option `--bb_password` to be set along with it. --bb_password Also used with the option `--op=update` option, this sets the password portion of a user's web authentication credentials into the database. The password is first encrypted prior to storage for security reasons. --ids When working with users, you need to be able to refer to them by unique identifiers to find particular users in the database. The option `--ids` option lets you specify a comma separated list of these identifiers for use with the :command:`user` command. The option `--ids` option is used only when using option `--op=remove` or option `--op=get`. --info Users are known in buildbot as a collection of attributes tied together by some unique identifier (see :ref:`Concepts-Users`). These attributes are specified in the form ``{TYPE}={VALUE}`` when using the option `--info` option. These ``{TYPE}={VALUE}`` pairs are specified in a comma separated list, so for example: .. code-block:: none --info=svn=jdoe,git='John Doe ' The option `--info` option can be specified multiple times in the :command:`user` command, as each specified option will be interpreted as a new user. Note that option `--info` is only used with option `--op=add` or with option `--op=update`, and whenever you use option `--op=update` you need to specify the identifier of the user you want to update. This is done by prepending the option `--info` arguments with ``{ID:}``. If we were to update ``'jschmo'`` from the previous example, it would look like this: .. code-block:: none --info=jdoe:git='Joe Doe ' Note that option `--master`, option `--username`, option `--passwd`, and option `--op` are always required to issue the :command:`user` command. The option `--master`, option `--username`, and option `--passwd` options can be specified in the option file with keywords ``user_master``, ``user_username``, and ``user_passwd``, respectively. If ``user_master`` is not specified, then option `--master` from the options file will be used instead. Below are examples of how each command should look. Whenever a :command:`user` command is successful, results will be shown to whoever issued the command. For option `--op=add`: .. code-block:: none buildbot user --master={MASTERHOST} --op=add \ --username={USER} --passwd={USERPW} \ --info={TYPE}={VALUE},... For option `--op=update`: .. code-block:: none buildbot user --master={MASTERHOST} --op=update \ --username={USER} --passwd={USERPW} \ --info={ID}:{TYPE}={VALUE},... For option `--op=remove`: .. code-block:: none buildbot user --master={MASTERHOST} --op=remove \ --username={USER} --passwd={USERPW} \ --ids={ID1},{ID2},... For option `--op=get`: .. code-block:: none buildbot user --master={MASTERHOST} --op=get \ --username={USER} --passwd={USERPW} \ --ids={ID1},{ID2},... A note on option `--op=update`: when updating the option `--bb_username` and option `--bb_password`, the option `--info` doesn't need to have additional ``{TYPE}={VALUE}`` pairs to update and can just take the ``{ID}`` portion. .. _buildbot-config-directory: :file:`.buildbot` config directory ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Many of the :command:`buildbot` tools must be told how to contact the buildmaster that they interact with. This specification can be provided as a command-line argument, but most of the time it will be easier to set them in an ``options`` file. The :command:`buildbot` command will look for a special directory named :file:`.buildbot`, starting from the current directory (where the command was run) and crawling upwards, eventually looking in the user's home directory. It will look for a file named :file:`options` in this directory, and will evaluate it as a Python script, looking for certain names to be set. You can just put simple ``name = 'value'`` pairs in this file to set the options. For a description of the names used in this file, please see the documentation for the individual :command:`buildbot` sub-commands. The following is a brief sample of what this file's contents could be. .. code-block:: none # for status-reading tools masterstatus = 'buildbot.example.org:12345' # for 'sendchange' or the debug port master = 'buildbot.example.org:18990' Note carefully that the names in the :file:`options` file usually do not match the command-line option name. ``master`` Equivalent to option `--master` for :bb:cmdline:`sendchange`. It is the location of the :class:`pb.PBChangeSource` for ```sendchange``. ``username`` Equivalent to option `--username` for the :bb:cmdline:`sendchange` command. ``branch`` Equivalent to option `--branch` for the :bb:cmdline:`sendchange` command. ``category`` Equivalent to option `--category` for the :bb:cmdline:`sendchange` command. ``try_connect`` Equivalent to option `--connect`, this specifies how the :bb:cmdline:`try` command should deliver its request to the buildmaster. The currently accepted values are ``ssh`` and ``pb``. ``try_builders`` Equivalent to option `--builders`, specifies which builders should be used for the :bb:cmdline:`try` build. ``try_vc`` Equivalent to option `--vc` for :bb:cmdline:`try`, this specifies the version control system being used. ``try_branch`` Equivalent to option `--branch`, this indicates that the current tree is on a non-trunk branch. ``try_topdir`` ``try_topfile`` Use ``try_topdir``, equivalent to option `--try-topdir`, to explicitly indicate the top of your working tree, or ``try_topfile``, equivalent to option `--try-topfile` to name a file that will only be found in that top-most directory. ``try_host`` ``try_username`` ``try_dir`` When ``try_connect`` is ``ssh``, the command will use ``try_host`` for option `--tryhost`, ``try_username`` for option `--username`, and ``try_dir`` for option `--trydir`. Apologies for the confusing presence and absence of 'try'. ``try_username`` ``try_password`` ``try_master`` Similarly, when ``try_connect`` is ``pb``, the command will pay attention to ``try_username`` for option `--username`, ``try_password`` for option `--passwd`, and ``try_master`` for option `--master`. ``try_wait`` ``masterstatus`` ``try_wait`` and ``masterstatus`` (equivalent to option `--wait` and ``master``, respectively) are used to ask the :bb:cmdline:`try` command to wait for the requested build to complete. buildbot-worker --------------- :command:`buildbot-worker` command-line tool is used for worker management only and does not provide any additional functionality. One can create, start, stop and restart the worker. .. bb:cmdline:: create-worker create-worker ~~~~~~~~~~~~~ This creates a new directory and populates it with files that let it be used as a worker's base directory. You must provide several arguments, which are used to create the initial :file:`buildbot.tac` file. The option `-r` option is advisable here, just like for ``create-master``. .. code-block:: none buildbot-worker create-worker -r {BASEDIR} {MASTERHOST}:{PORT} {WORKERNAME} {PASSWORD} The create-worker options are described in :ref:`Worker-Options`. .. bb:cmdline:: start (worker) start ~~~~~ This starts a worker which was already created in the given base directory. The daemon is launched in the background, with events logged to a file named :file:`twistd.log`. .. code-block:: none buildbot-worker start [--nodaemon] BASEDIR The option `--nodaemon` option instructs Buildbot to skip daemonizing. The process will start in the foreground. It will only return to the command-line when it is stopped. .. bb:cmdline:: restart (worker) restart ~~~~~~~ .. code-block:: none buildbot-worker restart [--nodaemon] BASEDIR This restarts a worker which is already running. It is equivalent to a ``stop`` followed by a ``start``. The option `--nodaemon` option has the same meaning as for ``start``. .. bb:cmdline:: stop (worker) stop ~~~~ This terminates the daemon worker running in the given directory. .. code-block:: none buildbot stop BASEDIR buildbot-3.4.0/master/docs/manual/concepts.rst000066400000000000000000000442721413250514000213760ustar00rootroot00000000000000.. _Concepts: Concepts ======== This chapter defines some of the basic concepts that Buildbot uses. You'll need to understand how Buildbot sees the world to configure it properly. .. index: repository .. index: codebase .. index: project .. index: revision .. index: branch .. index: source stamp .. _Source-Stamps: Source identification --------------------- The following concepts are used within Buildbot to describe source code that is being built: Repository A repository is a location where files tracked by a version control system reside. Usually, it is identified by a URL or a location on a disk. It contains a subset of the history of a codebase. Codebase A codebase is a collection of related files and their history tracked as a unit by version control systems. The files and their history are stored in one or more repositories. For example, the primary repository for the Buildbot codebase is at ``https://github.com/buildbot/buildbot/``. There are also more than a thousand forks of Buildbot. These repositories, while storing potentially very old versions of Buildbot code, still contain the same codebase. Project A project is a set of one or more codebases that together may be built and produce some end artifact. For example, an application may be comprised of two codebases - one for the code and one for the test data, the latter of which occupies a lot of space. Building and testing such an application requires acquiring code from both codebases. Revision: A revision is an textual identifier used by most version control systems to uniquely specify a particular version of the source code in a particular codebase. Source stamp: A source stamp is a collection of information needed to identify a particular version of code on a certain codebase. In most version control systems, source stamps only store a revision. On other version control systems, a branch is also required. Source stamp set: A source stamp set is a set of source stamps to identify a particular version of code on a certain project. Like a project is a collection of codebases, a source stamp set is a collection of source stamps, one for each codebase within a project. In order to build a project, Buildbot only needs to know a source stamp set corresponding to that project. This source stamp set has a source stamp for each codebase comprising the project. In turn, each source stamp has enough information to identify a particular version of the code within the codebase. .. image:: ../_images/changes.* :alt: Source Stamp Sets .. _Concepts-Change-Source: Change sources -------------- Change sources are user-configurable components that interact with external version control systems and retrieve new code. Internally, new code is represented as :ref:`Changes ` which roughly correspond to a single commit or changeset. The changes are sent to the schedulers which then decide whether new builds should be created for these new code changes. The design of Buildbot requires the workers to have their own copies of the source code, thus change sources is an optional component as long as there are no schedulers that create new builds based on new code commit events. .. index: change .. _Concept-Change: Changes ------- A :ref:`Change` is an abstract way Buildbot uses to represent a single change to the source files, performed by a developer. In version control systems that support the notion of atomic check-ins, a change represents a changeset or commit. Changes are used for the :ref:`Change sources` to communicate with :ref:`Schedulers `. A :class:`Change` comprises the following information: - the developer who is responsible for the change - the list of files that the change added, removed or modified - the message of the commit - the repository, the codebase and the project that the change corresponds to - the revision and the branch of the commit .. _Concepts-Scheduler: Schedulers ---------- A scheduler is a component that decides when to start a build. The decision could be based on time, on new code being committed or on similar events. Schedulers are responsible for creating :ref:`Build Requests` which identify a request to start a build on a specific version of the source code. Each Buildmaster has a set of scheduler objects, each of which gets a copy of every incoming :class:`Change`. The Schedulers are responsible for deciding when :class:`Build`\s should be run. Some Buildbot installations might have a single scheduler, while others may have several, each for a different purpose. .. _Concepts-Build-Request: BuildRequests ------------- A :class:`BuildRequest` is a request to start a specific build. A :class:`BuildRequest` consists of the following information: - the name of the :class:`Builder` (see below) that will perform the build. - the set of :class:`SourceStamp`\s (see above) that specify the version of the source tree to build and/or test. Two build requests representing the same version of the source code and the same builder may be merged. The user may configure additional restrictions for determining mergeability of build requests. .. _Concepts-Builder: .. _Concepts-Build-Factories: Builders and Build Factories ---------------------------- A :class:`Builder` is responsible for creating new builds from :class:`BuildRequest`\s. Creating a new build is essentially determining the following properties of the subsequent build: - the exact :ref:`steps ` a build will execute - the :ref:`workers ` that the build may run on The sequence of steps to run is performed by user-configurable :class:`BuildFactory` that is attached to each :class:`Builder` by the user. A :class:`Builder` will attempt to create a :class:`Build` from a :class:`BuildRequest` as soon as it is possible, that is, as soon as the associated worker becomes free. When a worker becomes free, the build master will select the oldest :class:`BuildRequest` that can run on that worker and notify the corresponding :class:`Builder` to maybe start a build out of it. Each :class:`Builder` by default runs completely independently. This means, that a worker that has N builders attached to it, may potentially attempt to run N builds concurrently. This level of concurrency may be controlled by various kinds of :ref:`Interlocks`. At a low level, each builder has its own exclusive directory on the build master and one exclusive directory on each of the workers it is attached to. The directory on the master is used for keeping status information. The directories on the workers are used as a location where the actual checkout, compilation and testing steps happen. .. _Concepts-Build: .. _Concepts-Step: Builds ------ A :class:`Build` represents a single compile or test run of a particular version of a source code. A build is comprised of a series of steps. The steps may be arbitrary. For example, for compiled software a build generally consists of the checkout, configure, make, and make check sequence. For interpreted projects like Python modules, a build is generally a checkout followed by an invocation of the bundled test suite. Builds are created by instances of :class:`Builder` (see above). .. _Concepts-BuildSet: BuildSets --------- A :class:`BuildSet` represents a set of potentially not yet created :class:`Build`\s that all compile and/or test the same version of the source tree. It tracks whether this set of builds as a whole succeeded or not. The information that is stored in a BuildSet is a set of :class:`SourceStamp`\s which define the version of the code to test and a set of :class:`Builder`\s which define what builds to create. .. _Concepts-Worker: Workers ------- A :class:`Worker` corresponds to an environment where builds are executed. A single physical machine must run at least one :class:`Worker` in order for Buildbot to be able to utilize it for running builds. Multiple :class:`Worker`\s may run on a single machine to provide different environments that can reuse the same hardware by means of containers or virtual machines. Each builder is associated with one or more :class:`Worker`\s. For example, a builder which is used to perform macOS builds (as opposed to Linux or Windows builds) should naturally be associated with a Mac worker. If multiple workers are available for any given builder, you will have some measure of redundancy: in case one worker goes offline, the others can still keep the :class:`Builder` working. In addition, multiple workers will allow multiple simultaneous builds for the same :class:`Builder`, which might be useful if you have a lot of forced or ``try`` builds taking place. Ideally, each :class:`Worker` that is configured for a builder should be identical. Otherwise build or test failures will be dependent on which worker the build is run and this will complicate investigations of failures. .. _Concepts-Users: Users ----- Buildbot has a somewhat limited awareness of *users*. It assumes the world consists of a set of developers, each of whom can be described by a couple of simple attributes. These developers make changes to the source code, causing builds which may succeed or fail. Users also may have different levels of authorization when issuing Buildbot commands, such as forcing a build from the web interface or from an IRC channel. Each developer is primarily known through the source control system. Each :class:`Change` object that arrives is tagged with a :attr:`who` field that typically gives the account name (on the repository machine) of the user responsible for that change. This string is displayed on the HTML status pages and in each :class:`Build`\'s *blamelist*. To do more with the User than just refer to them, this username needs to be mapped into an address of some sort. The responsibility for this mapping is left up to the status module which needs the address. In the future, the responsibility for managing users will be transferred to User Objects. The ``who`` fields in ``git`` Changes are used to create :ref:`User-Objects`, which allows for more control and flexibility in how Buildbot manages users. .. _User-Objects: User Objects ~~~~~~~~~~~~ User Objects allow Buildbot to better manage users throughout its various interactions with users (see :ref:`Change-Sources` and :ref:`Reporters`). The User Objects are stored in the Buildbot database and correlate the various attributes that a user might have: irc, Git, etc. Changes +++++++ Incoming Changes all have a ``who`` attribute attached to them that specifies which developer is responsible for that Change. When a Change is first rendered, the ``who`` attribute is parsed and added to the database, if it doesn't exist, or checked against an existing user. The ``who`` attribute is formatted in different ways depending on the version control system that the Change came from. ``git`` ``who`` attributes take the form ``Full Name ``. ``svn`` ``who`` attributes are of the form ``Username``. ``hg`` ``who`` attributes are free-form strings, but usually adhere to similar conventions as ``git`` attributes (``Full Name ``). ``cvs`` ``who`` attributes are of the form ``Username``. ``darcs`` ``who`` attributes contain an ``Email`` and may also include a ``Full Name`` like ``git`` attributes. ``bzr`` ``who`` attributes are free-form strings like ``hg``, and can include a ``Username``, ``Email``, and/or ``Full Name``. Tools +++++ For managing users manually, use the ``buildbot user`` command, which allows you to add, remove, update, and show various attributes of users in the Buildbot database (see :ref:`Command-line-Tool`). Uses ++++ Correlating the various bits and pieces that Buildbot views as users also means that one attribute of a user can be translated into another. This provides a more complete view of users throughout Buildbot. One such use is being able to find email addresses based on a set of Builds to notify users through the ``MailNotifier``. This process is explained more clearly in :ref:`Email-Addresses`. Another way to utilize `User Objects` is through `UsersAuth` for web authentication. To use `UsersAuth`, you need to set a `bb_username` and `bb_password` via the ``buildbot user`` command line tool to check against. The password will be encrypted before it gets stored in the database along with other user attributes. .. _Doing-Things-With-Users: Doing Things With Users ~~~~~~~~~~~~~~~~~~~~~~~ Each change has a single user who is responsible for it. Most builds have a set of changes: the build generally represents the first time these changes have been built and tested by the Buildbot. The build has a *blamelist* that is the union of the users responsible for all of the build's changes. If the build was created by a :ref:`Try-Schedulers` this list will include the submitter of the try job if known. The build provides a list of users who are interested in the build -- the *interested users*. Usually this is equal to the blamelist, but may also be expanded, e.g., to include the current build sherrif or a module's maintainer. If desired, buildbot can notify the interested users until the problem is resolved. .. _Email-Addresses: Email Addresses ~~~~~~~~~~~~~~~ The :bb:reporter:`MailNotifier` is a status target which can send emails about the results of each build. It accepts a static list of email addresses to which each message should be delivered, but it can also be configured to send emails to a :class:`Build`\'s Interested Users. To do this, it needs a way to convert User names into email addresses. For many VCSs, the User name is actually an account name on the system which hosts the repository. As such, turning the name into an email address is simply a matter of appending ``@repositoryhost.com``. Some projects use other kinds of mappings (for example the preferred email address may be at ``project.org``, despite the repository host being named ``cvs.project.org``), and some VCSs have full separation between the concept of a user and that of an account on the repository host (like Perforce). Some systems (like Git) put a full contact email address in every change. To convert these names to addresses, the :class:`MailNotifier` uses an :class:`EmailLookup` object. This provides a :meth:`getAddress` method which accepts a name and (eventually) returns an address. The default :class:`MailNotifier` module provides an :class:`EmailLookup` which simply appends a static string, configurable when the notifier is created. To create more complex behaviors (perhaps using an LDAP lookup, or using ``finger`` on a central host to determine a preferred address for the developer), provide a different object as the ``lookup`` argument. If an EmailLookup object isn't given to the MailNotifier, the MailNotifier will try to find emails through :ref:`User-Objects`. If every user in the Build's Interested Users list has an email in the database for them, this will work the same as if an EmailLookup object was used. If a user whose change led to a Build doesn't have an email attribute, that user will not receive an email. If ``extraRecipients`` is given, those users still get an email when the EmailLookup object is not specified. In the future, when the Problem mechanism has been set up, Buildbot will need to send emails to arbitrary Users. It will do this by locating a :class:`MailNotifier`\-like object among all the buildmaster's status targets, and asking it to send messages to various Users. This means the User-to-address mapping only has to be set up once, in your :class:`MailNotifier`, and every email message buildbot emits will take advantage of it. .. _IRC-Nicknames: IRC Nicknames ~~~~~~~~~~~~~ Like :class:`MailNotifier`, the :class:`buildbot.reporters.irc.IRC` class provides a status target which can announce the results of each build. It also provides an interactive interface by responding to online queries posted in the channel or sent as private messages. In the future, buildbot can be configured to map User names to IRC nicknames, to watch for the recent presence of these nicknames, and to deliver build status messages to the interested parties. Like :class:`MailNotifier` does for email addresses, the :class:`IRC` object will have an :class:`IRCLookup` which is responsible for nicknames. The mapping can be set up statically, or it can be updated by online users themselves (by claiming a username with some kind of ``buildbot: i am user warner`` commands). Once the mapping is established, buildbot can then ask the :class:`IRC` object to send messages to various users. It can report on the likelihood that the user saw the given message (based upon how long the user has been inactive on the channel), which might prompt the Problem Hassler logic to send them an email message instead. These operations and authentication of commands issued by particular nicknames will be implemented in :ref:`User-Objects`. .. index:: Properties .. _Build-Properties: Build Properties ---------------- Each build has a set of *Build Properties*, which can be used by its build steps to modify their actions. The properties are represented as a set of key-value pairs. Effectively, a single property is a variable that, once set, can be used by subsequent steps in a build to modify their behaviour. The value of a property can be a number, a string, a list or a dictionary. Lists and dictionaries can contain other lists or dictionaries. Thus, the value of a property could be any arbitrarily complex structure. Properties work pretty much like variables, so they can be used to implement all manner of functionality. The following are a couple of examples: - By default, the name of the worker that runs the build is set to the ``workername`` property. If there are multiple different workers and the actions of the build depend on the exact worker, some users may decide that it's more convenient to vary the actions depending on the ``workername`` property instead of creating separate builders for each worker. - In most cases, the build does not know the exact code revision that will be tested until it checks out the code. This information is only known after a :ref:`source step ` runs. To give this information to the subsequent steps, the source step records the checked out revision into the ``got_revision`` property. buildbot-3.4.0/master/docs/manual/configuration/000077500000000000000000000000001413250514000216645ustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/configuration/builders.rst000066400000000000000000000264571413250514000242450ustar00rootroot00000000000000.. -*- rst -*- .. bb:cfg:: builders .. _Builder-Configuration: Builder Configuration --------------------- .. contents:: :depth: 1 :local: The :bb:cfg:`builders` configuration key is a list of objects holding the configuration of the Builders. For more information on the Builders' function in Buildbot, see :ref:`the Concepts chapter `. The class definition for the builder configuration is in :file:`buildbot.config`. However, there is a simpler way to use it and it looks like this: .. code-block:: python from buildbot.plugins import util c['builders'] = [ util.BuilderConfig(name='quick', workernames=['bot1', 'bot2'], factory=f_quick), util.BuilderConfig(name='thorough', workername='bot1', factory=f_thorough), ] ``BuilderConfig`` takes the following keyword arguments: ``name`` The name of the Builder, which is used in status reports. ``workername`` ``workernames`` These arguments specify the worker or workers that will be used by this Builder. All worker names must appear in the :bb:cfg:`workers` configuration parameter. Each worker can accommodate multiple builders. The ``workernames`` parameter can be a list of names, while ``workername`` can specify only one worker. ``factory`` This is a :class:`buildbot.process.factory.BuildFactory` instance which controls how the build is performed by defining the steps in the build. Full details appear in their own section, :ref:`Build-Factories`. Other optional keys may be set on each ``BuilderConfig``: ``builddir`` Specifies the name of a subdirectory of the master's basedir in which everything related to this builder will be stored. This holds build status information. If not set, this parameter defaults to the builder name, with some characters escaped. Each builder must have a unique build directory. ``workerbuilddir`` Specifies the name of a subdirectory (under the worker's configured base directory) in which everything related to this builder will be placed on the worker. This is where checkouts, compilations, and tests are run. If not set, defaults to ``builddir``. If a worker is connected to multiple builders that share the same ``workerbuilddir``, make sure the worker is set to run one build at a time or ensure this is fine to run multiple builds from the same directory simultaneously. ``tags`` If provided, this is a list of strings that identifies tags for the builder. Status clients can limit themselves to a subset of the available tags. A common use for this is to add new builders to your setup (for a new module or a new worker) that do not work correctly yet and allow you to integrate them with the active builders. You can tag these new builders with a ``test`` tag, make your main status clients ignore them, and have only private status clients pick them up. As soon as they work, you can move them over to the active tag. ``nextWorker`` If provided, this is a function that controls which worker will be assigned future jobs. The function is passed three arguments, the :class:`Builder` object which is assigning a new job, a list of :class:`WorkerForBuilder` objects and the :class:`BuildRequest`. The function should return one of the :class:`WorkerForBuilder` objects, or ``None`` if none of the available workers should be used. As an example, for each ``worker`` in the list, ``worker.worker`` will be a :class:`Worker` object, and ``worker.worker.workername`` is the worker's name. The function can optionally return a Deferred, which should fire with the same results. ``nextBuild`` If provided, this is a function that controls which build request will be handled next. The function is passed two arguments, the :class:`Builder` object which is assigning a new job, and a list of :class:`BuildRequest` objects of pending builds. The function should return one of the :class:`BuildRequest` objects, or ``None`` if none of the pending builds should be started. This function can optionally return a Deferred which should fire with the same results. ``canStartBuild`` If provided, this is a function that can veto whether a particular worker should be used for a given build request. The function is passed three arguments: the :class:`Builder`, a :class:`Worker`, and a :class:`BuildRequest`. The function should return ``True`` if the combination is acceptable, or ``False`` otherwise. This function can optionally return a Deferred which should fire with the same results. See :ref:`canStartBuild-Functions` for a concrete example. ``locks`` A list of ``Locks`` (instances of :class:`buildbot.locks.WorkerLock` or :class:`buildbot.locks.MasterLock`) that should be acquired before starting a :class:`Build` from this :class:`Builder`. Alternatively, this could be a renderable that returns this list depending on properties related to the build that is just about to be created. This lets you defer picking the locks to acquire until it is known which :class:`Worker` a build would get assigned to. The properties available to the renderable include all properties that are set to the build before its first step excluding the properties that come from the build itself and the ``builddir`` property that comes from worker. The ``Locks`` will be released when the build is complete. Note that this is a list of actual :class:`Lock` instances, not names. Also note that all Locks must have unique names. See :ref:`Interlocks`. ``env`` A Builder may be given a dictionary of environment variables in this parameter. The variables are used in :bb:step:`ShellCommand` steps in builds created by this builder. The environment variables will override anything in the worker's environment. Variables passed directly to a :class:`ShellCommand` will override variables of the same name passed to the Builder. For example, if you have a pool of identical workers it is often easier to manage variables like :envvar:`PATH` from Buildbot rather than manually editing them in the workers' environment. .. code-block:: python f = factory.BuildFactory f.addStep(ShellCommand( command=['bash', './configure'])) f.addStep(Compile()) c['builders'] = [ BuilderConfig(name='test', factory=f, workernames=['worker1', 'worker2', 'worker3', 'worker4'], env={'PATH': '/opt/local/bin:/opt/app/bin:/usr/local/bin:/usr/bin'}), ] Unlike most builder configuration arguments, this argument can contain renderables. .. index:: Builds; merging ``collapseRequests`` Specifies how build requests for this builder should be collapsed. See :ref:`Collapsing-Build-Requests`, below. .. index:: Properties; builder ``properties`` A builder may be given a dictionary of :ref:`Build-Properties` specific for this builder in this parameter. Those values can be used later on like other properties. :ref:`Interpolate`. ``defaultProperties`` Similar to the ``properties`` parameter. But ``defaultProperties`` will only be added to :ref:`Build-Properties` if they are not already set by :ref:`another source `. ``description`` A builder may be given an arbitrary description, which will show up in the web status on the builder's page. .. index:: Builds; merging .. _Collapsing-Build-Requests: Collapsing Build Requests ~~~~~~~~~~~~~~~~~~~~~~~~~ When more than one build request is available for a builder, Buildbot can "collapse" the requests into a single build. This is desirable when build requests arrive more quickly than the available workers can satisfy them, but has the drawback that separate results for each build are not available. Requests are only candidated for a merge if both requests have exactly the same :ref:`codebases`. This behavior can be controlled globally, using the :bb:cfg:`collapseRequests` parameter, and on a per-:class:`Builder` basis, using the ``collapseRequests`` argument to the :class:`Builder` configuration. If ``collapseRequests`` is given, it completely overrides the global configuration. Possible values for both ``collapseRequests`` configurations are: ``True`` Requests will be collapsed if their sourcestamp are compatible (see below for definition of compatible). ``False`` Requests will never be collapsed. ``callable(builder, req1, req2)`` Requests will be collapsed if the callable returns true. See :ref:`Collapse-Request-Functions` for detailed example. Sourcestamps are compatible if all of the below conditions are met: * Their codebase, branch, project, and repository attributes match exactly * Neither source stamp has a patch (e.g., from a try scheduler) * Either both source stamps are associated with changes, or neither is associated with changes but they have matching revisions. .. index:: Builds; priority .. _Prioritizing-Builds: Prioritizing Builds ~~~~~~~~~~~~~~~~~~~ The :class:`BuilderConfig` parameter ``nextBuild`` can be used to prioritize build requests within a builder. Note that this is orthogonal to :ref:`Prioritizing-Builders`, which controls the order in which builders are called on to start their builds. The details of writing such a function are in :ref:`Build-Priority-Functions`. Such a function can be provided to the BuilderConfig as follows: .. code-block:: python def pickNextBuild(builder, requests): ... c['builders'] = [ BuilderConfig(name='test', factory=f, nextBuild=pickNextBuild, workernames=['worker1', 'worker2', 'worker3', 'worker4']), ] .. _Virtual-Builders: Virtual Builders ~~~~~~~~~~~~~~~~ :ref:`Dynamic-Trigger` is a method which allows to trigger the same builder, with different parameters. This method is used by frameworks which store the build config along side the source code like Buildbot_travis_. The drawback of this method is that it is difficult to extract statistics for similar builds. The standard dashboards are not working well due to the fact that all the builds are on the same builder. In order to overcome these drawbacks, Buildbot has the concept of virtual builder. If a build has the property ``virtual_builder_name``, it will automatically attach to that builder instead of the original builder. That created virtual builder is not attached to any master and is only used for better sorting in the UI and better statistics. The original builder and worker configuration is still used for all other build behaviors. The virtual builder metadata is configured with the following properties: * ``virtual_builder_name``: The name of the virtual builder. * ``virtual_builder_description``: The description of the virtual builder. * ``virtual_builder_tags``: The tags for the virtual builder. You can also use virtual builders with :bb:sched:`SingleBranchScheduler`. For example if you want to automatically build all branches in your project without having to manually create a new builder each time one is added: .. code-block:: python c['schedulers'].append(schedulers.SingleBranchScheduler( name='myproject-epics', change_filter=util.ChangeFilter(branch_re='epics/.*'), builderNames=['myproject-epics'], properties={ 'virtual_builder_name': util.Interpolate("myproject-%(ss::branch)s") } )) .. _Buildbot_travis: https://github.com/buildbot/buildbot_travis buildbot-3.4.0/master/docs/manual/configuration/buildfactories.rst000066400000000000000000000416221413250514000254220ustar00rootroot00000000000000.. _Build-Factories: Build Factories =============== Each Builder is equipped with a ``build factory``, which defines the steps used to perform a particular type of build. This factory is created in the configuration file, and attached to a Builder through the ``factory`` element of its dictionary. The steps used by these builds are defined in the next section, :ref:`Build-Steps`. .. note:: Build factories are used with builders, and are not added directly to the buildmaster configuration dictionary. .. contents:: :depth: 1 :local: .. _BuildFactory: .. index:: Build Factory Defining a Build Factory ------------------------ A :class:`BuildFactory` defines the steps that every build will follow. Think of it as a glorified script. For example, a build factory which consists of an SVN checkout followed by a ``make build`` would be configured as follows: .. code-block:: python from buildbot.plugins import util, steps f = util.BuildFactory() f.addStep(steps.SVN(repourl="http://..", mode="incremental")) f.addStep(steps.Compile(command=["make", "build"])) This factory would then be attached to one builder (or several, if desired): .. code-block:: python c['builders'].append( BuilderConfig(name='quick', workernames=['bot1', 'bot2'], factory=f)) It is also possible to pass a list of steps into the :class:`BuildFactory` when it is created. Using :meth:`addStep` is usually simpler, but there are cases where it is more convenient to create the list of steps ahead of time, perhaps using some Python tricks to generate the steps. .. code-block:: python from buildbot.plugins import steps, util all_steps = [ steps.CVS(cvsroot=CVSROOT, cvsmodule="project", mode="update"), steps.Compile(command=["make", "build"]), ] f = util.BuildFactory(all_steps) Finally, you can also add a sequence of steps all at once: .. code-block:: python f.addSteps(all_steps) Attributes ~~~~~~~~~~ The following attributes can be set on a build factory after it is created, e.g., .. code-block:: python f = util.BuildFactory() f.useProgress = False :attr:`useProgress` (defaults to ``True``): if ``True``, the buildmaster keeps track of how long each step takes, so it can provide estimates of how long future builds will take. If builds are not expected to take a consistent amount of time (such as incremental builds in which a random set of files are recompiled or tested each time), this should be set to ``False`` to inhibit progress-tracking. :attr:`workdir` (defaults to 'build'): workdir given to every build step created by this factory as default. The workdir can be overridden in a build step definition. If this attribute is set to a string, that string will be used for constructing the workdir (worker base + builder builddir + workdir). The attribute can also be a Python callable, for more complex cases, as described in :ref:`Factory-Workdir-Functions`. .. _DynamicBuildFactories: Dynamic Build Factories ------------------------ In some cases you may not know what commands to run until after you checkout the source tree. For those cases, you can dynamically add steps during a build from other steps. The :class:`Build` object provides 2 functions to do this: ``addStepsAfterCurrentStep(self, step_factories)`` This adds the steps after the step that is currently executing. ``addStepsAfterLastStep(self, step_factories)`` This adds the steps onto the end of the build. Both functions only accept as an argument a list of steps to add to the build. For example, let's say you have a script checked in into your source tree called build.sh. When this script is called with the argument ``--list-stages`` it outputs a newline separated list of stage names. This can be used to generate at runtime a step for each stage in the build. Each stage is then run in this example using ``./build.sh --run-stage ``. .. code-block:: python from buildbot.plugins import util, steps from buildbot.process import buildstep, logobserver from twisted.internet import defer class GenerateStagesCommand(buildstep.ShellMixin, steps.BuildStep): def __init__(self, **kwargs): kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) self.observer = logobserver.BufferLogObserver() self.addLogObserver('stdio', self.observer) def extract_stages(self, stdout): stages = [] for line in stdout.split('\n'): stage = str(line.strip()) if stage: stages.append(stage) return stages @defer.inlineCallbacks def run(self): # run './build.sh --list-stages' to generate the list of stages cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) # if the command passes extract the list of stages result = cmd.results() if result == util.SUCCESS: # create a ShellCommand for each stage and add them to the build self.build.addStepsAfterCurrentStep([ steps.ShellCommand(name=stage, command=["./build.sh", "--run-stage", stage]) for stage in self.extract_stages(self.observer.getStdout()) ]) return result f = util.BuildFactory() f.addStep(steps.Git(repourl=repourl)) f.addStep(GenerateStagesCommand( name="Generate build stages", command=["./build.sh", "--list-stages"], haltOnFailure=True)) Predefined Build Factories -------------------------- Buildbot includes a few predefined build factories that perform common build sequences. In practice, these are rarely used, as every site has slightly different requirements, but the source for these factories may provide examples for implementation of those requirements. .. _GNUAutoconf: .. index:: GNUAutoconf Build Factory; GNUAutoconf GNUAutoconf ~~~~~~~~~~~ .. py:class:: buildbot.process.factory.GNUAutoconf `GNU Autoconf `_ is a software portability tool, intended to make it possible to write programs in C (and other languages) which will run on a variety of UNIX-like systems. Most GNU software is built using autoconf. It is frequently used in combination with GNU automake. These tools both encourage a build process which usually looks like this: .. code-block:: bash % CONFIG_ENV=foo ./configure --with-flags % make all % make check # make install (except, of course, from Buildbot, which always skips the ``make install`` part). The Buildbot's :class:`buildbot.process.factory.GNUAutoconf` factory is designed to build projects which use GNU autoconf and/or automake. The configuration environment variables, the configure flags, and command lines used for the compile and test are all configurable, in general the default values will be suitable. Example: .. code-block:: python f = util.GNUAutoconf(source=source.SVN(repourl=URL, mode="copy"), flags=["--disable-nls"]) Required Arguments: ``source`` This argument must be a step specification tuple that provides a BuildStep to generate the source tree. Optional Arguments: ``configure`` The command used to configure the tree. Defaults to :command:`./configure`. Accepts either a string or a list of shell argv elements. ``configureEnv`` The environment used for the initial configuration step. This accepts a dictionary which will be merged into the worker's normal environment. This is commonly used to provide things like ``CFLAGS="-O2 -g"`` (to turn off debug symbols during the compile). Defaults to an empty dictionary. ``configureFlags`` A list of flags to be appended to the argument list of the configure command. This is commonly used to enable or disable specific features of the autoconf-controlled package, like ``["--without-x"]`` to disable windowing support. Defaults to an empty list. ``reconf`` use autoreconf to generate the ./configure file, set to True to use a buildbot default autoreconf command, or define the command for the ShellCommand. ``compile`` this is a shell command or list of argv values which is used to actually compile the tree. It defaults to ``make all``. If set to ``None``, the compile step is skipped. ``test`` this is a shell command or list of argv values which is used to run the tree's self-tests. It defaults to ``make check``. If set to None, the test step is skipped. ``distcheck`` this is a shell command or list of argv values which is used to run the packaging test. It defaults to ``make distcheck``. If set to None, the test step is skipped. .. _BasicBuildFactory: .. index:: BasicBuildFactory Build Factory; BasicBuildFactory BasicBuildFactory ~~~~~~~~~~~~~~~~~ .. py:class:: buildbot.process.factory.BasicBuildFactory This is a subclass of :class:`GNUAutoconf` which assumes the source is in CVS, and uses ``mode='full'`` and ``method='clobber'`` to always build from a clean working copy. .. _QuickBuildFactory: .. index:: QuickBuildFactory Build Factory; QuickBuildFactory QuickBuildFactory ~~~~~~~~~~~~~~~~~ .. py:class:: buildbot.process.factory.QuickBuildFactory The :class:`QuickBuildFactory` class is a subclass of :class:`GNUAutoconf` which assumes the source is in CVS, and uses ``mode='incremental'`` to get incremental updates. The difference between a `full build` and a `quick build` is that quick builds are generally done incrementally, starting with the tree where the previous build was performed. That simply means that the source-checkout step should be given a ``mode='incremental'`` flag, to do the source update in-place. In addition to that, this class sets the :attr:`useProgress` flag to ``False``. Incremental builds will (or at least the ought to) compile as few files as necessary, so they will take an unpredictable amount of time to run. Therefore it would be misleading to claim to predict how long the build will take. This class is probably not of use to new projects. .. _BasicSVN: .. index:: BasicSVN Build Factory; BasicSVN BasicSVN ~~~~~~~~ .. py:class:: buildbot.process.factory.BasicSVN This class is similar to :class:`QuickBuildFactory`, but uses SVN instead of CVS. .. _Factory-CPAN: .. index:: CPAN Build Factory; CPAN CPAN ~~~~ .. py:class:: buildbot.process.factory.CPAN Most Perl modules available from the `CPAN `_ archive use the ``MakeMaker`` module to provide configuration, build, and test services. The standard build routine for these modules looks like: .. code-block:: bash % perl Makefile.PL % make % make test # make install (except again Buildbot skips the install step) Buildbot provides a :class:`CPAN` factory to compile and test these projects. Arguments: ``source`` (required): A step specification tuple, like that used by :class:`GNUAutoconf`. ``perl`` A string which specifies the :command:`perl` executable to use. Defaults to just :command:`perl`. .. _Distutils: .. index:: Distutils, Build Factory; Distutils Distutils ~~~~~~~~~ .. py:class:: buildbot.process.factory.Distutils Most Python modules use the ``distutils`` package to provide configuration and build services. The standard build process looks like: .. code-block:: bash % python ./setup.py build % python ./setup.py install Unfortunately, although Python provides a standard unit-test framework named ``unittest``, to the best of my knowledge, ``distutils`` does not provide a standardized target to run such unit tests. (Please let me know if I'm wrong, and I will update this factory.) The :class:`Distutils` factory provides support for running the build part of this process. It accepts the same ``source=`` parameter as the other build factories. Arguments: ``source`` (required): A step specification tuple, like that used by :class:`GNUAutoconf`. ``python`` A string which specifies the :command:`python` executable to use. Defaults to just :command:`python`. ``test`` Provides a shell command which runs unit tests. This accepts either a string or a list. The default value is ``None``, which disables the test step (since there is no common default command to run unit tests in distutils modules). .. _Trial: .. index:: Trial Build Factory; Trial Trial ~~~~~ .. py:class:: buildbot.process.factory.Trial Twisted provides a unit test tool named :command:`trial` which provides a few improvements over Python's built-in :mod:`unittest` module. Many Python projects which use Twisted for their networking or application services also use trial for their unit tests. These modules are usually built and tested with something like the following: .. code-block:: bash % python ./setup.py build % PYTHONPATH=build/lib.linux-i686-2.3 trial -v PROJECTNAME.test % python ./setup.py install Unfortunately, the :file:`build/lib` directory into which the built/copied ``.py`` files are placed is actually architecture-dependent, and I do not yet know of a simple way to calculate its value. For many projects it is sufficient to import their libraries `in place` from the tree's base directory (``PYTHONPATH=.``). In addition, the :samp:`{PROJECTNAME}` value where the test files are located is project-dependent: it is usually just the project's top-level library directory, as common practice suggests the unit test files are put in the :mod:`test` sub-module. This value cannot be guessed, the :class:`Trial` class must be told where to find the test files. The :class:`Trial` class provides support for building and testing projects which use distutils and trial. If the test module name is specified, trial will be invoked. The library path used for testing can also be set. One advantage of trial is that the Buildbot happens to know how to parse trial output, letting it identify which tests passed and which ones failed. The Buildbot can then provide fine-grained reports about how many tests have failed, when individual tests fail when they had been passing previously, etc. Another feature of trial is that you can give it a series of source ``.py`` files, and it will search them for special ``test-case-name`` tags that indicate which test cases provide coverage for that file. Trial can then run just the appropriate tests. This is useful for quick builds, where you want to only run the test cases that cover the changed functionality. Arguments: ``testpath`` Provides a directory to add to :envvar:`PYTHONPATH` when running the unit tests, if tests are being run. Defaults to ``.`` to include the project files in-place. The generated build library is frequently architecture-dependent, but may simply be :file:`build/lib` for pure-Python modules. ``python`` Which Python executable to use. This list will form the start of the `argv` array that will launch trial. If you use this, you should set ``trial`` to an explicit path (like :file:`/usr/bin/trial` or :file:`./bin/trial`). The parameter defaults to ``None``, which leaves it out entirely (running ``trial args`` instead of ``python ./bin/trial args``). Likely values are ``['python']``, ``['python2.2']``, or ``['python', '-Wall']``. ``trial`` Provides the name of the :command:`trial` command. It is occasionally useful to use an alternate executable, such as :command:`trial2.2` which might run the tests under an older version of Python. Defaults to :command:`trial`. ``trialMode`` A list of arguments to pass to trial, specifically to set the reporting mode. This defaults to ``['--reporter=bwverbose']``, which only works for Twisted-2.1.0 and later. ``trialArgs`` A list of arguments to pass to trial, available to turn on any extra flags you like. Defaults to ``[]``. ``tests`` Provides a module name or names which contain the unit tests for this project. Accepts a string, typically :samp:`{PROJECTNAME}.test`, or a list of strings. Defaults to ``None``, indicating that no tests should be run. You must either set this or ``testChanges``. ``testChanges`` If ``True``, ignore the ``tests`` parameter and instead ask the Build for all the files that make up the Changes going into this build. Pass these filenames to trial and ask it to look for test-case-name tags, running just the tests necessary to cover the changes. ``recurse`` If ``True``, tells Trial (with the ``--recurse`` argument) to look in all subdirectories for additional test cases. ``reactor`` which reactor to use, like 'gtk' or 'java'. If not provided, the Twisted's usual platform-dependent default is used. ``randomly`` If ``True``, tells Trial (with the ``--random=0`` argument) to run the test cases in random order, which sometimes catches subtle inter-test dependency bugs. Defaults to ``False``. The step can also take any of the :class:`ShellCommand` arguments, e.g., :attr:`haltOnFailure`. Unless one of ``tests`` or ``testChanges`` are set, the step will generate an exception. buildbot-3.4.0/master/docs/manual/configuration/buildsets.rst000066400000000000000000000055131413250514000244200ustar00rootroot00000000000000.. _BuildSet: Build Sets ========== A :class:`BuildSet` represents a set of :class:`Build`\s that all compile and/or test the same version of the source tree. Usually, these builds are created by multiple :class:`Builder`\s and will thus execute different steps. The :class:`BuildSet` is tracked as a single unit, which fails if any of the component :class:`Build`\s have failed, and therefore can succeed only if *all* of the component :class:`Build`\s have succeeded. There are two kinds of status notification messages that can be emitted for a :class:`BuildSet`: the ``firstFailure`` type (which fires as soon as we know the :class:`BuildSet` will fail), and the ``Finished`` type (which fires once the :class:`BuildSet` has completely finished, regardless of whether the overall set passed or failed). A :class:`BuildSet` is created with a set of one or more *source stamp* tuples of ``(branch, revision, changes, patch)``, some of which may be ``None``, and a list of :class:`Builder`\s on which it is to be run. They are then given to the BuildMaster, which is responsible for creating a separate :class:`BuildRequest` for each :class:`Builder`. There are a couple of different likely values for the ``SourceStamp``: :samp:`(revision=None, changes={CHANGES}, patch=None)` This is a :class:`SourceStamp` used when a series of :class:`Change`\s have triggered a build. The VC step will attempt to check out a tree that contains *CHANGES* (and any changes that occurred before *CHANGES*, but not any that occurred after them.) :samp:`(revision=None, changes=None, patch=None)` This builds the most recent code on the default branch. This is the sort of :class:`SourceStamp` that would be used on a :class:`Build` that was triggered by a user request, or a :bb:sched:`Periodic` scheduler. It is also possible to configure the VC Source Step to always check out the latest sources rather than paying attention to the :class:`Change`\s in the :class:`SourceStamp`, which will result in the same behavior as this. :samp:`(branch={BRANCH}, revision=None, changes=None, patch=None)` This builds the most recent code on the given *BRANCH*. Again, this is generally triggered by a user request or a :bb:sched:`Periodic` scheduler. :samp:`(revision={REV}, changes=None, patch=({LEVEL}, {DIFF}, {SUBDIR_ROOT}))` This checks out the tree at the given revision *REV*, then applies a patch (using ``patch -pLEVEL `_ combines the repository, module, and branch into a single *Subversion URL* parameter. Within that scope, source checkouts can be specified by a numeric *revision number* (a repository-wide monotonically-increasing marker, such that each transaction that changes the repository is indexed by a different revision number), or a revision timestamp. When branches are used, the repository and module form a static ``baseURL``, while each build has a *revision number* and a *branch* (which defaults to a statically-specified ``defaultBranch``). The ``baseURL`` and ``branch`` are simply concatenated together to derive the ``repourl`` to use for the checkout. `Perforce `_ is similar. The server is specified through a ``P4PORT`` parameter. Module and branch are specified in a single depot path, and revisions are depot-wide. When branches are used, the ``p4base`` and ``defaultBranch`` are concatenated together to produce the depot path. `Bzr `_ (which is a descendant of Arch/Bazaar, and is frequently referred to as "Bazaar") has the same sort of repository-vs-workspace model as Arch, but the repository data can either be stored inside the working directory or kept elsewhere (either on the same machine or on an entirely different machine). For the purposes of Buildbot (which never commits changes), the repository is specified with a URL and a revision number. The most common way to obtain read-only access to a bzr tree is via HTTP, simply by making the repository visible through a web server like Apache. Bzr can also use FTP and SFTP servers, if the worker process has sufficient privileges to access them. Higher performance can be obtained by running a special Bazaar-specific server. None of these matter to the buildbot: the repository URL just has to match the kind of server being used. The ``repoURL`` argument provides the location of the repository. Branches are expressed as subdirectories of the main central repository, which means that if branches are being used, the BZR step is given a ``baseURL`` and ``defaultBranch`` instead of getting the ``repoURL`` argument. `Darcs `_ doesn't really have the notion of a single master repository. Nor does it really have branches. In Darcs, each working directory is also a repository, and there are operations to push and pull patches from one of these ``repositories`` to another. For the Buildbot's purposes, all you need to do is specify the URL of a repository that you want to build from. The worker will then pull the latest patches from that repository and build them. Multiple branches are implemented by using multiple repositories (possibly living on the same server). Builders which use Darcs therefore have a static ``repourl`` which specifies the location of the repository. If branches are being used, the source Step is instead configured with a ``baseURL`` and a ``defaultBranch``, and the two strings are simply concatenated together to obtain the repository's URL. Each build then has a specific branch which replaces ``defaultBranch``, or just uses the default one. Instead of a revision number, each build can have a ``context``, which is a string that records all the patches that are present in a given tree (this is the output of ``darcs changes --context``, and is considerably less concise than, e.g. Subversion's revision number, but the patch-reordering flexibility of Darcs makes it impossible to provide a shorter useful specification). `Mercurial `_ follows a decentralized model, and each repository can have several branches and tags. The source Step is configured with a static ``repourl`` which specifies the location of the repository. Branches are configured with the ``defaultBranch`` argument. The *revision* is the hash identifier returned by ``hg identify``. `Git `_ also follows a decentralized model, and each repository can have several branches and tags. The source Step is configured with a static ``repourl`` which specifies the location of the repository. In addition, an optional ``branch`` parameter can be specified to check out code from a specific branch instead of the default *master* branch. The *revision* is specified as a SHA1 hash as returned by e.g. ``git rev-parse``. No attempt is made to ensure that the specified revision is actually a subset of the specified branch. `Monotone `_ is another that follows a decentralized model where each repository can have several branches and tags. The source Step is configured with static ``repourl`` and ``branch`` parameters, which specifies the location of the repository and the branch to use. The *revision* is specified as a SHA1 hash as returned by e.g. ``mtn automate select w:``. No attempt is made to ensure that the specified revision is actually a subset of the specified branch. Comparison ++++++++++ =========== =========== =========== =================== Name Change Revision Branches =========== =========== =========== =================== CVS patch [1] timestamp unnamed Subversion revision integer directories Git commit sha1 hash named refs Mercurial changeset sha1 hash different repos or (permanently) named commits Darcs ? none [2] different repos Bazaar ? ? ? Perforce ? ? ? BitKeeper changeset ? different repos =========== =========== =========== =================== * [1] note that CVS only tracks patches to individual files. Buildbot tries to recognize coordinated changes to multiple files by correlating change times. * [2] Darcs does not have a concise way of representing a particular revision of the source. Tree Stability ++++++++++++++ Changes tend to arrive at a buildmaster in bursts. In many cases, these bursts of changes are meant to be taken together. For example, a developer may have pushed multiple commits to a DVCS that comprise the same new feature or bugfix. To avoid trying to build every change, Buildbot supports the notion of *tree stability*, by waiting for a burst of changes to finish before starting to schedule builds. This is implemented as a timer, with builds not scheduled until no changes have occurred for the duration of the timer. .. _Choosing-a-Change-Source: Choosing a Change Source ~~~~~~~~~~~~~~~~~~~~~~~~ There are a variety of :class:`ChangeSource` classes available, some of which are meant to be used in conjunction with other tools to deliver :class:`Change` events from the VC repository to the buildmaster. As a quick guide, here is a list of VC systems and the :class:`ChangeSource`\s that might be useful with them. Note that some of these modules are in Buildbot's :contrib-src:`master/contrib` directory, meaning that they have been offered by other users in hopes they may be useful, and might require some additional work to make them functional. CVS * :bb:chsrc:`CVSMaildirSource` (watching mail sent by :contrib-src:`master/contrib/buildbot_cvs_mail.py` script) * :bb:chsrc:`PBChangeSource` (listening for connections from ``buildbot sendchange`` run in a loginfo script) * :bb:chsrc:`PBChangeSource` (listening for connections from a long-running :contrib-src:`master/contrib/viewcvspoll.py` polling process which examines the ViewCVS database directly) * :bb:chsrc:`Change Hooks` in WebStatus SVN * :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/svn_buildbot.py` run in a postcommit script) * :bb:chsrc:`PBChangeSource` (listening for connections from a long-running :contrib-src:`master/contrib/svn_watcher.py` or :contrib-src:`master/contrib/svnpoller.py` polling process * :bb:chsrc:`SVNCommitEmailMaildirSource` (watching for email sent by :file:`commit-email.pl`) * :bb:chsrc:`SVNPoller` (polling the SVN repository) * :bb:chsrc:`Change Hooks` in WebStatus Darcs * :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/darcs_buildbot.py` in a commit script) * :bb:chsrc:`Change Hooks` in WebStatus Mercurial * :bb:chsrc:`Change Hooks` in WebStatus (including :contrib-src:`master/contrib/hgbuildbot.py`, configurable in a ``changegroup`` hook) * `BitBucket change hook `_ (specifically designed for BitBucket notifications, but requiring a publicly-accessible WebStatus) * :bb:chsrc:`HgPoller` (polling a remote Mercurial repository) * :bb:chsrc:`BitbucketPullrequestPoller` (polling Bitbucket for pull requests) * :ref:`Mail-parsing-ChangeSources`, though there are no ready-to-use recipes Bzr (the newer Bazaar) * :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/bzr_buildbot.py` run in a post-change-branch-tip or commit hook) * :bb:chsrc:`BzrPoller` (polling the Bzr repository) * :bb:chsrc:`Change Hooks` in WebStatus Git * :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/git_buildbot.py` run in the post-receive hook) * :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/github_buildbot.py`, which listens for notifications from GitHub) * :bb:chsrc:`Change Hooks` in WebStatus * :bb:chsrc:`GitHub` change hook (specifically designed for GitHub notifications, but requiring a publicly-accessible WebStatus) * :bb:chsrc:`BitBucket` change hook (specifically designed for BitBucket notifications, but requiring a publicly-accessible WebStatus) * :bb:chsrc:`GitPoller` (polling a remote Git repository) * :bb:chsrc:`GitHubPullrequestPoller` (polling GitHub API for pull requests) * :bb:chsrc:`BitbucketPullrequestPoller` (polling Bitbucket for pull requests) Repo/Gerrit * :bb:chsrc:`GerritChangeSource` connects to Gerrit via SSH to get a live stream of changes * :bb:chsrc:`GerritEventLogPoller` connects to Gerrit via HTTP with the help of the plugin events-log_ Monotone * :bb:chsrc:`PBChangeSource` (listening for connections from :file:`monotone-buildbot.lua`, which is available with Monotone) All VC systems can be driven by a :bb:chsrc:`PBChangeSource` and the ``buildbot sendchange`` tool run from some form of commit script. If you write an email parsing function, they can also all be driven by a suitable :ref:`mail-parsing source `. Additionally, handlers for web-based notification (i.e. from GitHub) can be used with WebStatus' change_hook module. The interface is simple, so adding your own handlers (and sharing!) should be a breeze. See :bb:index:`chsrc` for a full list of change sources. .. index:: Change Sources .. bb:cfg:: change_source Configuring Change Sources ~~~~~~~~~~~~~~~~~~~~~~~~~~ The :bb:cfg:`change_source` configuration key holds all active change sources for the configuration. Most configurations have a single :class:`ChangeSource`, watching only a single tree, e.g., .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.PBChangeSource() For more advanced configurations, the parameter can be a list of change sources: .. code-block:: python source1 = ... source2 = ... c['change_source'] = [ source1, source2 ] Repository and Project ++++++++++++++++++++++ :class:`ChangeSource`\s will, in general, automatically provide the proper :attr:`repository` attribute for any changes they produce. For systems which operate on URL-like specifiers, this is a repository URL. Other :class:`ChangeSource`\s adapt the concept as necessary. Many :class:`ChangeSource`\s allow you to specify a project, as well. This attribute is useful when building from several distinct codebases in the same buildmaster: the project string can serve to differentiate the different codebases. Schedulers can filter on project, so you can configure different builders to run for each project. .. _Mail-parsing-ChangeSources: Mail-parsing ChangeSources ~~~~~~~~~~~~~~~~~~~~~~~~~~ Many projects publish information about changes to their source tree by sending an email message out to a mailing list, frequently named :samp:`{PROJECT}-commits` or :samp:`{PROJECT}-changes`. Each message usually contains a description of the change (who made the change, which files were affected) and sometimes a copy of the diff. Humans can subscribe to this list to stay informed about what's happening to the source tree. Buildbot can also subscribe to a `-commits` mailing list, and can trigger builds in response to Changes that it hears about. The buildmaster admin needs to arrange for these email messages to arrive in a place where the buildmaster can find them, and configure the buildmaster to parse the messages correctly. Once that is in place, the email parser will create Change objects and deliver them to the schedulers (see :ref:`Schedulers`) just like any other ChangeSource. There are two components to setting up an email-based ChangeSource. The first is to route the email messages to the buildmaster, which is done by dropping them into a `maildir`. The second is to actually parse the messages, which is highly dependent upon the tool that was used to create them. Each VC system has a collection of favorite change-emailing tools with a slightly different format and its own parsing function. Buildbot has a separate ChangeSource variant for each of these parsing functions. Once you've chosen a maildir location and a parsing function, create the change source and put it in :bb:cfg:`change_source`: .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.CVSMaildirSource("~/maildir-buildbot", prefix="/trunk/") .. _Subscribing-the-Buildmaster: Subscribing the Buildmaster +++++++++++++++++++++++++++ The recommended way to install Buildbot is to create a dedicated account for the buildmaster. If you do this, the account will probably have a distinct email address (perhaps `buildmaster@example.org`). Then just arrange for this account's email to be delivered to a suitable maildir (described in the next section). If Buildbot does not have its own account, `extension addresses` can be used to distinguish between emails intended for the buildmaster and emails intended for the rest of the account. In most modern MTAs, the e.g. `foo@example.org` account has control over every email address at example.org which begins with "foo", such that emails addressed to `account-foo@example.org` can be delivered to a different destination than `account-bar@example.org`. qmail does this by using separate :file:`.qmail` files for the two destinations (:file:`.qmail-foo` and :file:`.qmail-bar`, with :file:`.qmail` controlling the base address and :file:`.qmail-default` controlling all other extensions). Other MTAs have similar mechanisms. Thus you can assign an extension address like `foo-buildmaster@example.org` to the buildmaster and retain `foo@example.org` for your own use. .. _Using-Maildirs: Using Maildirs ++++++++++++++ A `maildir` is a simple directory structure originally developed for qmail that allows safe atomic update without locking. Create a base directory with three subdirectories: :file:`new`, :file:`tmp`, and :file:`cur`. When messages arrive, they are put into a uniquely-named file (using pids, timestamps, and random numbers) in :file:`tmp`. When the file is complete, it is atomically renamed into :file:`new`. Eventually the buildmaster notices the file in :file:`new`, reads and parses the contents, then moves it into :file:`cur`. A cronjob can be used to delete files in :file:`cur` at leisure. Maildirs are frequently created with the :command:`maildirmake` tool, but a simple :samp:`mkdir -p ~/{MAILDIR}/\{cur,new,tmp\}` is pretty much equivalent. Many modern MTAs can deliver directly to maildirs. The usual :file:`.forward` or :file:`.procmailrc` syntax is to name the base directory with a trailing slash, so something like :samp:`~/{MAILDIR}/`\. qmail and postfix are maildir-capable MTAs, and procmail is a maildir-capable MDA (Mail Delivery Agent). Here is an example procmail config, located in :file:`~/.procmailrc`: .. code-block:: none # .procmailrc # routes incoming mail to appropriate mailboxes PATH=/usr/bin:/usr/local/bin MAILDIR=$HOME/Mail LOGFILE=.procmail_log SHELL=/bin/sh :0 * new If procmail is not setup on a system wide basis, then the following one-line :file:`.forward` file will invoke it. .. code-block:: none !/usr/bin/procmail For MTAs which cannot put files into maildirs directly, the `safecat` tool can be executed from a :file:`.forward` file to accomplish the same thing. The Buildmaster uses the linux DNotify facility to receive immediate notification when the maildir's :file:`new` directory has changed. When this facility is not available, it polls the directory for new messages, every 10 seconds by default. .. _Parsing-Email-Change-Messages: Parsing Email Change Messages +++++++++++++++++++++++++++++ The second component to setting up an email-based :class:`ChangeSource` is to parse the actual notices. This is highly dependent upon the VC system and commit script in use. A couple of common tools used to create these change emails, along with the Buildbot tools to parse them, are: CVS Buildbot CVS MailNotifier :bb:chsrc:`CVSMaildirSource` SVN svnmailer http://opensource.perlig.de/en/svnmailer/ :file:`commit-email.pl` :bb:chsrc:`SVNCommitEmailMaildirSource` Bzr Launchpad :bb:chsrc:`BzrLaunchpadEmailMaildirSource` Mercurial NotifyExtension https://www.mercurial-scm.org/wiki/NotifyExtension Git post-receive-email http://git.kernel.org/?p=git/git.git;a=blob;f=contrib/hooks/post-receive-email;hb=HEAD The following sections describe the parsers available for each of these tools. Most of these parsers accept a ``prefix=`` argument, which is used to limit the set of files that the buildmaster pays attention to. This is most useful for systems like CVS and SVN which put multiple projects in a single repository (or use repository names to indicate branches). Each filename that appears in the email is tested against the prefix: if the filename does not start with the prefix, the file is ignored. If the filename *does* start with the prefix, that prefix is stripped from the filename before any further processing is done. Thus the prefix usually ends with a slash. .. bb:chsrc:: CVSMaildirSource .. _CVSMaildirSource: CVSMaildirSource ++++++++++++++++ .. py:class:: buildbot.changes.mail.CVSMaildirSource This parser works with the :contrib-src:`master/contrib/buildbot_cvs_mail.py` script. The script sends an email containing all the files submitted in one directory. It is invoked by using the :file:`CVSROOT/loginfo` facility. The Buildbot's :bb:chsrc:`CVSMaildirSource` knows how to parse these messages and turn them into Change objects. It takes the directory name of the maildir root. For example: .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.CVSMaildirSource("/home/buildbot/Mail") Configuration of CVS and :contrib-src:`buildbot_cvs_mail.py ` :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: CVS must be configured to invoke the :contrib-src:`buildbot_cvs_mail.py ` script when files are checked in. This is done via the CVS loginfo configuration file. To update this, first do: .. code-block:: bash cvs checkout CVSROOT cd to the CVSROOT directory and edit the file loginfo, adding a line like: .. code-block:: none SomeModule /cvsroot/CVSROOT/buildbot_cvs_mail.py --cvsroot :ext:example.com:/cvsroot -e buildbot -P SomeModule %@{sVv@} .. note:: For cvs version 1.12.x, the ``--path %p`` option is required. Version 1.11.x and 1.12.x report the directory path differently. The above example you put the :contrib-src:`buildbot_cvs_mail.py ` script under /cvsroot/CVSROOT. It can be anywhere. Run the script with ``--help`` to see all the options. At the very least, the options ``-e`` (email) and ``-P`` (project) should be specified. The line must end with ``%{sVv}``. This is expanded to the files that were modified. Additional entries can be added to support more modules. See :command:`buildbot_cvs_mail.py --help` for more information on the available options. .. bb:chsrc:: SVNCommitEmailMaildirSource .. _SVNCommitEmailMaildirSource: SVNCommitEmailMaildirSource ++++++++++++++++++++++++++++ .. py:class:: buildbot.changes.mail.SVNCommitEmailMaildirSource :bb:chsrc:`SVNCommitEmailMaildirSource` parses message sent out by the :file:`commit-email.pl` script, which is included in the Subversion distribution. It does not currently handle branches: all of the Change objects that it creates will be associated with the default (i.e. trunk) branch. .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.SVNCommitEmailMaildirSource("~/maildir-buildbot") .. bb:chsrc:: BzrLaunchpadEmailMaildirSource .. _BzrLaunchpadEmailMaildirSource: BzrLaunchpadEmailMaildirSource +++++++++++++++++++++++++++++++ .. py:class:: buildbot.changes.mail.BzrLaunchpadEmailMaildirSource :bb:chsrc:`BzrLaunchpadEmailMaildirSource` parses the mails that are sent to addresses that subscribe to branch revision notifications for a bzr branch hosted on Launchpad. The branch name defaults to :samp:`lp:{Launchpad path}`. For example ``lp:~maria-captains/maria/5.1``. If only a single branch is used, the default branch name can be changed by setting ``defaultBranch``. For multiple branches, pass a dictionary as the value of the ``branchMap`` option to map specific repository paths to specific branch names (see example below). The leading ``lp:`` prefix of the path is optional. The ``prefix`` option is not supported (it is silently ignored). Use the ``branchMap`` and ``defaultBranch`` instead to assign changes to branches (and just do not subscribe the Buildbot to branches that are not of interest). The revision number is obtained from the email text. The bzr revision id is not available in the mails sent by Launchpad. However, it is possible to set the bzr `append_revisions_only` option for public shared repositories to avoid new pushes of merges changing the meaning of old revision numbers. .. code-block:: python from buildbot.plugins import changes bm = { 'lp:~maria-captains/maria/5.1': '5.1', 'lp:~maria-captains/maria/6.0': '6.0' } c['change_source'] = changes.BzrLaunchpadEmailMaildirSource("~/maildir-buildbot", branchMap=bm) .. bb:chsrc:: PBChangeSource .. _PBChangeSource: PBChangeSource ~~~~~~~~~~~~~~ .. py:class:: buildbot.changes.pb.PBChangeSource :bb:chsrc:`PBChangeSource` actually listens on a TCP port for clients to connect and push change notices *into* the Buildmaster. This is used by the built-in ``buildbot sendchange`` notification tool, as well as several version-control hook scripts. This change is also useful for creating new kinds of change sources that work on a `push` model instead of some kind of subscription scheme, for example a script which is run out of an email :file:`.forward` file. This ChangeSource always runs on the same TCP port as the workers. It shares the same protocol, and in fact shares the same space of "usernames", so you cannot configure a :bb:chsrc:`PBChangeSource` with the same name as a worker. If you have a publicly accessible worker port and are using :bb:chsrc:`PBChangeSource`, *you must establish a secure username and password for the change source*. If your sendchange credentials are known (e.g., the defaults), then your buildmaster is susceptible to injection of arbitrary changes, which (depending on the build factories) could lead to arbitrary code execution on workers. The :bb:chsrc:`PBChangeSource` is created with the following arguments. ``port`` Which port to listen on. If ``None`` (which is the default), it shares the port used for worker connections. ``user`` The user account that the client program must use to connect. Defaults to ``change`` ``passwd`` The password for the connection - defaults to ``changepw``. Can be a :ref:`Secret`. Do not use this default on a publicly exposed port! ``prefix`` The prefix to be found and stripped from filenames delivered over the connection, defaulting to ``None``. Any filenames which do not start with this prefix will be removed. If all the filenames in a given Change are removed, then that whole Change will be dropped. This string should probably end with a directory separator. This is useful for changes coming from version control systems that represent branches as parent directories within the repository (like SVN and Perforce). Use a prefix of ``trunk/`` or ``project/branches/foobranch/`` to only follow one branch and to get correct tree-relative filenames. Without a prefix, the :bb:chsrc:`PBChangeSource` will probably deliver Changes with filenames like :file:`trunk/foo.c` instead of just :file:`foo.c`. Of course this also depends upon the tool sending the Changes in (like :bb:cmdline:`buildbot sendchange `) and what filenames it is delivering: that tool may be filtering and stripping prefixes at the sending end. For example: .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.PBChangeSource(port=9999, user='laura', passwd='fpga') The following hooks are useful for sending changes to a :bb:chsrc:`PBChangeSource`\: .. _Bzr-Hook: Bzr Hook ++++++++ Bzr is also written in Python, and the Bzr hook depends on Twisted to send the changes. To install, put :contrib-src:`master/contrib/bzr_buildbot.py` in one of your plugins locations a bzr plugins directory (e.g., :file:`~/.bazaar/plugins`). Then, in one of your bazaar conf files (e.g., :file:`~/.bazaar/locations.conf`), set the location you want to connect with Buildbot with these keys: * ``buildbot_on`` one of 'commit', 'push, or 'change'. Turns the plugin on to report changes via commit, changes via push, or any changes to the trunk. 'change' is recommended. * ``buildbot_server`` (required to send to a Buildbot master) the URL of the Buildbot master to which you will connect (as of this writing, the same server and port to which workers connect). * ``buildbot_port`` (optional, defaults to 9989) the port of the Buildbot master to which you will connect (as of this writing, the same server and port to which workers connect) * ``buildbot_pqm`` (optional, defaults to not pqm) Normally, the user that commits the revision is the user that is responsible for the change. When run in a pqm (Patch Queue Manager, see https://launchpad.net/pqm) environment, the user that commits is the Patch Queue Manager, and the user that committed the *parent* revision is responsible for the change. To turn on the pqm mode, set this value to any of (case-insensitive) "Yes", "Y", "True", or "T". * ``buildbot_dry_run`` (optional, defaults to not a dry run) Normally, the post-commit hook will attempt to communicate with the configured Buildbot server and port. If this parameter is included and any of (case-insensitive) "Yes", "Y", "True", or "T", then the hook will simply print what it would have sent, but not attempt to contact the Buildbot master. * ``buildbot_send_branch_name`` (optional, defaults to not sending the branch name) If your Buildbot's bzr source build step uses a repourl, do *not* turn this on. If your buildbot's bzr build step uses a baseURL, then you may set this value to any of (case-insensitive) "Yes", "Y", "True", or "T" to have the Buildbot master append the branch name to the baseURL. .. note:: The bzr smart server (as of version 2.2.2) doesn't know how to resolve ``bzr://`` urls into absolute paths so any paths in ``locations.conf`` won't match, hence no change notifications will be sent to Buildbot. Setting configuration parameters globally or in-branch might still work. When Buildbot no longer has a hardcoded password, it will be a configuration option here as well. Here's a simple example that you might have in your :file:`~/.bazaar/locations.conf`\. .. code-block:: ini [chroot-*:///var/local/myrepo/mybranch] buildbot_on = change buildbot_server = localhost .. bb:chsrc:: P4Source .. _P4Source: P4Source ~~~~~~~~ The :bb:chsrc:`P4Source` periodically polls a `Perforce `_ depot for changes. It accepts the following arguments: ``p4port`` The Perforce server to connect to (as :samp:`{host}:{port}`). ``p4user`` The Perforce user. ``p4passwd`` The Perforce password. ``p4base`` The base depot path to watch, without the trailing '/...'. ``p4bin`` An optional string parameter. Specify the location of the perforce command line binary (p4). You only need to do this if the perforce binary is not in the path of the Buildbot user. Defaults to `p4`. ``split_file`` A function that maps a pathname, without the leading ``p4base``, to a (branch, filename) tuple. The default just returns ``(None, branchfile)``, which effectively disables branch support. You should supply a function which understands your repository structure. ``pollInterval`` How often to poll, in seconds. Defaults to 600 (10 minutes). ``pollRandomDelayMin`` Minimum delay in seconds to wait before each poll, default is 0. This is useful in case you have a lot of pollers and you want to spread the polling load over a period of time. Setting it equal to the maximum delay will effectively delay all polls by a fixed amount of time. Must be less than or equal to the maximum delay. ``pollRandomDelayMax`` Maximum delay in seconds to wait before each poll, default is 0. This is useful in case you have a lot of pollers and you want to spread the polling load over a period of time. Must be less than the poll interval. ``project`` Set the name of the project to be used for the :bb:chsrc:`P4Source`. This will then be set in any changes generated by the ``P4Source``, and can be used in a Change Filter for triggering particular builders. ``pollAtLaunch`` Determines when the first poll occurs. True = immediately on launch, False = wait for one pollInterval (default). ``histmax`` The maximum number of changes to inspect at a time. If more than this number occur since the last poll, older changes will be silently ignored. ``encoding`` The character encoding of ``p4``\'s output. This defaults to "utf8", but if your commit messages are in another encoding, specify that here. For example, if you're using Perforce on Windows, you may need to use "cp437" as the encoding if "utf8" generates errors in your master log. ``server_tz`` The timezone of the Perforce server, using the usual timezone format (e.g: ``"Europe/Stockholm"``) in case it's not in UTC. ``use_tickets`` Set to ``True`` to use ticket-based authentication, instead of passwords (but you still need to specify ``p4passwd``). ``ticket_login_interval`` How often to get a new ticket, in seconds, when ``use_tickets`` is enabled. Defaults to 86400 (24 hours). ``revlink`` A function that maps branch and revision to a valid url (e.g. p4web), stored along with the change. This function must be a callable which takes two arguments, the branch and the revision. Defaults to lambda branch, revision: (u'') ``resolvewho`` A function that resolves the Perforce 'user@workspace' into a more verbose form, stored as the author of the change. Useful when usernames do not match email addresses and external, client-side lookup is required. This function must be a callable which takes one argument. Defaults to lambda who: (who) Example #1 ++++++++++ This configuration uses the :envvar:`P4PORT`, :envvar:`P4USER`, and :envvar:`P4PASSWD` specified in the buildmaster's environment. It watches a project in which the branch name is simply the next path component, and the file is all path components after. .. code-block:: python from buildbot.plugins import changes s = changes.P4Source(p4base='//depot/project/', split_file=lambda branchfile: branchfile.split('/',1)) c['change_source'] = s Example #2 ++++++++++ Similar to the previous example but also resolves the branch and revision into a valid revlink. .. code-block:: python from buildbot.plugins import changes s = changes.P4Source( p4base='//depot/project/', split_file=lambda branchfile: branchfile.split('/',1)) revlink=lambda branch, revision: 'http://p4web:8080/@md=d&@/{}?ac=10'.format(revision) c['change_source'] = s .. bb:chsrc:: SVNPoller .. _SVNPoller: SVNPoller ~~~~~~~~~ .. py:class:: buildbot.changes.svnpoller.SVNPoller The :bb:chsrc:`SVNPoller` is a ChangeSource which periodically polls a `Subversion `_ repository for new revisions, by running the ``svn log`` command in a subshell. It can watch a single branch or multiple branches. :bb:chsrc:`SVNPoller` accepts the following arguments: ``repourl`` The base URL path to watch, like ``svn://svn.twistedmatrix.com/svn/Twisted/trunk``, or ``http://divmod.org/svn/Divmo/``, or even ``file:///home/svn/Repository/ProjectA/branches/1.5/``. This must include the access scheme, the location of the repository (both the hostname for remote ones, and any additional directory names necessary to get to the repository), and the sub-path within the repository's virtual filesystem for the project and branch of interest. The :bb:chsrc:`SVNPoller` will only pay attention to files inside the subdirectory specified by the complete repourl. ``split_file`` A function to convert pathnames into ``(branch, relative_pathname)`` tuples. Use this to explain your repository's branch-naming policy to :bb:chsrc:`SVNPoller`. This function must accept a single string (the pathname relative to the repository) and return a two-entry tuple. Directory pathnames always end with a right slash to distinguish them from files, like ``trunk/src/``, or ``src/``. There are a few utility functions in :mod:`buildbot.changes.svnpoller` that can be used as a :meth:`split_file` function; see below for details. For directories, the relative pathname returned by :meth:`split_file` should end with a right slash but an empty string is also accepted for the root, like ``("branches/1.5.x", "")`` being converted from ``"branches/1.5.x/"``. The default value always returns ``(None, path)``, which indicates that all files are on the trunk. Subclasses of :bb:chsrc:`SVNPoller` can override the :meth:`split_file` method instead of using the ``split_file=`` argument. ``project`` Set the name of the project to be used for the :bb:chsrc:`SVNPoller`. This will then be set in any changes generated by the :bb:chsrc:`SVNPoller`, and can be used in a :ref:`Change Filter ` for triggering particular builders. ``svnuser`` An optional string parameter. If set, the option `--user` argument will be added to all :command:`svn` commands. Use this if you have to authenticate to the svn server before you can do :command:`svn info` or :command:`svn log` commands. Can be a :ref:`Secret`. ``svnpasswd`` Like ``svnuser``, this will cause a option `--password` argument to be passed to all :command:`svn` commands. Can be a :ref:`Secret`. ``pollInterval`` How often to poll, in seconds. Defaults to 600 (checking once every 10 minutes). Lower this if you want the Buildbot to notice changes faster, raise it if you want to reduce the network and CPU load on your svn server. Please be considerate of public SVN repositories by using a large interval when polling them. ``pollRandomDelayMin`` Minimum delay in seconds to wait before each poll, default is 0. This is useful in case you have a lot of pollers and you want to spread the polling load over a period of time. Setting it equal to the maximum delay will effectively delay all polls by a fixed amount of time. Must be less than or equal to the maximum delay. ``pollRandomDelayMax`` Maximum delay in seconds to wait before each poll, default is 0. This is useful in case you have a lot of pollers and you want to spread the polling load over a period of time. Must be less than the poll interval. ``pollAtLaunch`` Determines when the first poll occurs. True = immediately on launch, False = wait for one pollInterval (default). ``histmax`` The maximum number of changes to inspect at a time. Every ``pollInterval`` seconds, the :bb:chsrc:`SVNPoller` asks for the last ``histmax`` changes and looks through them for any revisions it does not already know about. If more than ``histmax`` revisions have been committed since the last poll, older changes will be silently ignored. Larger values of ``histmax`` will cause more time and memory to be consumed on each poll attempt. ``histmax`` defaults to 100. ``svnbin`` This controls the :command:`svn` executable to use. If subversion is installed in a weird place on your system (outside of the buildmaster's :envvar:`PATH`), use this to tell :bb:chsrc:`SVNPoller` where to find it. The default value of `svn` will almost always be sufficient. ``revlinktmpl`` This parameter is deprecated in favour of specifying a global revlink option. This parameter allows a link to be provided for each revision (for example, to websvn or viewvc). These links appear anywhere changes are shown, such as on build or change pages. The proper form for this parameter is an URL with the portion that will substitute for a revision number replaced by ''%s''. For example, ``'http://myserver/websvn/revision.php?rev=%s'`` could be used to cause revision links to be created to a websvn repository viewer. ``cachepath`` If specified, this is a pathname of a cache file that :bb:chsrc:`SVNPoller` will use to store its state between restarts of the master. ``extra_args`` If specified, the extra arguments will be added to the svn command args. Several split file functions are available for common SVN repository layouts. For a poller that is only monitoring trunk, the default split file function is available explicitly as ``split_file_alwaystrunk``: .. code-block:: python from buildbot.plugins import changes, util c['change_source'] = changes.SVNPoller( repourl="svn://svn.twistedmatrix.com/svn/Twisted/trunk", split_file=util.svn.split_file_alwaystrunk) For repositories with the ``/trunk`` and :samp:`/branches/{BRANCH}` layout, ``split_file_branches`` will do the job: .. code-block:: python from buildbot.plugins import changes, util c['change_source'] = changes.SVNPoller( repourl="https://amanda.svn.sourceforge.net/svnroot/amanda/amanda", split_file=util.svn.split_file_branches) When using this splitter the poller will set the ``project`` attribute of any changes to the ``project`` attribute of the poller. For repositories with the :samp:`{PROJECT}/trunk` and :samp:`{PROJECT}/branches/{BRANCH}` layout, ``split_file_projects_branches`` will do the job: .. code-block:: python from buildbot.plugins import changes, util c['change_source'] = changes.SVNPoller( repourl="https://amanda.svn.sourceforge.net/svnroot/amanda/", split_file=util.svn.split_file_projects_branches) When using this splitter the poller will set the ``project`` attribute of any changes to the project determined by the splitter. The :bb:chsrc:`SVNPoller` is highly adaptable to various Subversion layouts. See :ref:`Customizing-SVNPoller` for details and some common scenarios. .. bb:chsrc:: BzrPoller .. _Bzr-Poller: Bzr Poller ~~~~~~~~~~ If you cannot insert a Bzr hook in the server, you can use the :bb:chsrc:`BzrPoller`. To use it, put :contrib-src:`master/contrib/bzr_buildbot.py` somewhere that your Buildbot configuration can import it. Even putting it in the same directory as the :file:`master.cfg` should work. Install the poller in the Buildbot configuration as with any other change source. Minimally, provide a URL that you want to poll (``bzr://``, ``bzr+ssh://``, or ``lp:``), making sure the Buildbot user has necessary privileges. .. code-block:: python # put bzr_buildbot.py file to the same directory as master.cfg from bzr_buildbot import BzrPoller c['change_source'] = BzrPoller( url='bzr://hostname/my_project', poll_interval=300) The ``BzrPoller`` parameters are: ``url`` The URL to poll. ``poll_interval`` The number of seconds to wait between polls. Defaults to 10 minutes. ``branch_name`` Any value to be used as the branch name. Defaults to None, or specify a string, or specify the constants from :contrib-src:`bzr_buildbot.py ` ``SHORT`` or ``FULL`` to get the short branch name or full branch address. ``blame_merge_author`` Normally, the user that commits the revision is the user that is responsible for the change. When run in a pqm (Patch Queue Manager, see https://launchpad.net/pqm) environment, the user that commits is the Patch Queue Manager, and the user that committed the merged, *parent* revision is responsible for the change. Set this value to ``True`` if this is pointed against a PQM-managed branch. .. bb:chsrc:: GitPoller .. _GitPoller: GitPoller ~~~~~~~~~ If you cannot take advantage of post-receive hooks as provided by :contrib-src:`master/contrib/git_buildbot.py` for example, then you can use the :bb:chsrc:`GitPoller`. The :bb:chsrc:`GitPoller` periodically fetches from a remote Git repository and processes any changes. It requires its own working directory for operation. The default should be adequate, but it can be overridden via the ``workdir`` property. .. note:: There can only be a single `GitPoller` pointed at any given repository. The :bb:chsrc:`GitPoller` requires Git-1.7 and later. It accepts the following arguments: ``repourl`` The git-url that describes the remote repository, e.g. ``git@example.com:foobaz/myrepo.git`` (see the :command:`git fetch` help for more info on git-url formats) ``branches`` One of the following: * a list of the branches to fetch. Non-existing branches are ignored. * ``True`` indicating that all branches should be fetched * a callable which takes a single argument. It should take a remote refspec (such as ``'refs/heads/master'``), and return a boolean indicating whether that branch should be fetched. ``branch`` Accepts a single branch name to fetch. Exists for backwards compatibility with old configurations. ``pollInterval`` Interval in seconds between polls, default is 10 minutes ``pollRandomDelayMin`` Minimum delay in seconds to wait before each poll, default is 0. This is useful in case you have a lot of pollers and you want to spread the polling load over a period of time. Setting it equal to the maximum delay will effectively delay all polls by a fixed amount of time. Must be less than or equal to the maximum delay. ``pollRandomDelayMax`` Maximum delay in seconds to wait before each poll, default is 0. This is useful in case you have a lot of pollers and you want to spread the polling load over a period of time. Must be less than the poll interval. ``pollAtLaunch`` Determines when the first poll occurs. True = immediately on launch, False = wait for one pollInterval (default). ``buildPushesWithNoCommits`` Determines if a push on a new branch or update of an already known branch with already known commits should trigger a build. This is useful in case you have build steps depending on the name of the branch and you use topic branches for development. When you merge your topic branch into "master" (for instance), a new build will be triggered. (defaults to False). ``gitbin`` Path to the Git binary, defaults to just ``'git'`` ``category`` Set the category to be used for the changes produced by the :bb:chsrc:`GitPoller`. This will then be set in any changes generated by the :bb:chsrc:`GitPoller`, and can be used in a Change Filter for triggering particular builders. ``project`` Set the name of the project to be used for the :bb:chsrc:`GitPoller`. This will then be set in any changes generated by the ``GitPoller``, and can be used in a Change Filter for triggering particular builders. ``usetimestamps`` Parse each revision's commit timestamp (default is ``True``), or ignore it in favor of the current time, so that recently processed commits appear together in the waterfall page. ``encoding`` Set encoding will be used to parse author's name and commit message. Default encoding is ``'utf-8'``. This will not be applied to file names since Git will translate non-ascii file names to unreadable escape sequences. ``workdir`` The directory where the poller should keep its local repository. The default is :samp:`gitpoller_work`. If this is a relative path, it will be interpreted relative to the master's basedir. Multiple Git pollers can share the same directory. ``only_tags`` Determines if the GitPoller should poll for new tags in the git repository. ``sshPrivateKey`` (optional) Specifies private SSH key for git to use. This may be either a :ref:`Secret` or just a string. This option requires Git-2.3 or later. The master must either have the host in the known hosts file or the host key must be specified via the `sshHostKey` option. ``sshHostKey`` (optional) Specifies public host key to match when authenticating with SSH public key authentication. This may be either a :ref:`Secret` or just a string. `sshPrivateKey` must be specified in order to use this option. The host key must be in the form of ` `, e.g. `ssh-rsa AAAAB3N<...>FAaQ==`. ``sshKnownHosts`` (optional) Specifies the contents of the SSH known_hosts file to match when authenticating with SSH public key authentication. This may be either a :ref:`Secret` or just a string. `sshPrivateKey` must be specified in order to use this option. `sshHostKey` must not be specified in order to use this option. A configuration for the Git poller might look like this: .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.GitPoller(repourl='git@example.com:foobaz/myrepo.git', branches=['master', 'great_new_feature']) .. bb:chsrc:: HgPoller .. _HgPoller: HgPoller ~~~~~~~~ The :bb:chsrc:`HgPoller` periodically pulls a named branch from a remote Mercurial repository and processes any changes. It requires its own working directory for operation, which must be specified via the ``workdir`` property. The :bb:chsrc:`HgPoller` requires a working ``hg`` executable, and at least a read-only access to the repository it polls (possibly through ssh keys or by tweaking the ``hgrc`` of the system user Buildbot runs as). The :bb:chsrc:`HgPoller` will not transmit any change if there are several heads on the watched named branch. This is similar (although not identical) to the Mercurial executable behaviour. This exceptional condition is usually the result of a developer mistake, and usually does not last for long. It is reported in logs. If fixed by a later merge, the buildmaster administrator does not have anything to do: that merge will be transmitted, together with the intermediate ones. The :bb:chsrc:`HgPoller` accepts the following arguments: ``name`` The name of the poller. This must be unique, and defaults to the ``repourl``. ``repourl`` The url that describes the remote repository, e.g. ``http://hg.example.com/projects/myrepo``. Any url suitable for ``hg pull`` can be specified. ``bookmarks`` A list of the bookmarks to monitor. ``branches`` A list of the branches to monitor; defaults to ``['default']``. ``branch`` The desired branch to pull. Exists for backwards compatibility with old configurations. ``workdir`` The directory where the poller should keep its local repository. It is mandatory for now, although later releases may provide a meaningful default. It also serves to identify the poller in the buildmaster internal database. Changing it may result in re-processing all changes so far. Several :bb:chsrc:`HgPoller` instances may share the same ``workdir`` for mutualisation of the common history between two different branches, thus easing on local and remote system resources and bandwidth. If relative, the ``workdir`` will be interpreted from the master directory. ``pollInterval`` Interval in seconds between polls, default is 10 minutes ``pollRandomDelayMin`` Minimum delay in seconds to wait before each poll, default is 0. This is useful in case you have a lot of pollers and you want to spread the polling load over a period of time. Setting it equal to the maximum delay will effectively delay all polls by a fixed amount of time. Must be less than or equal to the maximum delay. ``pollRandomDelayMax`` Maximum delay in seconds to wait before each poll, default is 0. This is useful in case you have a lot of pollers and you want to spread the polling load over a period of time. Must be less than the poll interval. ``pollAtLaunch`` Determines when the first poll occurs. True = immediately on launch, False = wait for one pollInterval (default). ``hgbin`` Path to the Mercurial binary, defaults to just ``'hg'``. ``category`` Set the category to be used for the changes produced by the :bb:chsrc:`HgPoller`. This will then be set in any changes generated by the :bb:chsrc:`HgPoller`, and can be used in a Change Filter for triggering particular builders. ``project`` Set the name of the project to be used for the :bb:chsrc:`HgPoller`. This will then be set in any changes generated by the ``HgPoller``, and can be used in a Change Filter for triggering particular builders. ``usetimestamps`` Parse each revision's commit timestamp (default is ``True``), or ignore it in favor of the current time, so that recently processed commits appear together in the waterfall page. ``encoding`` Set encoding will be used to parse author's name and commit message. Default encoding is ``'utf-8'``. ``revlink`` A function that maps branch and revision to a valid url (e.g. hgweb), stored along with the change. This function must be a callable which takes two arguments, the branch and the revision. Defaults to lambda branch, revision: (u'') A configuration for the Mercurial poller might look like this: .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.HgPoller(repourl='http://hg.example.org/projects/myrepo', branch='great_new_feature', workdir='hg-myrepo') .. bb:chsrc:: GitHubPullrequestPoller .. _GitHubPullrequestPoller: GitHubPullrequestPoller ~~~~~~~~~~~~~~~~~~~~~~~ .. py:class:: buildbot.changes.github.GitHubPullrequestPoller This :bb:chsrc:`GitHubPullrequestPoller` periodically polls the GitHub API for new or updated pull requests. The `author`, `revision`, `revlink`, `branch` and `files` fields in the recorded changes are populated with information extracted from the pull request. This allows to filter for certain changes in files and create a blamelist based on the authors in the GitHub pull request. The :bb:chsrc:`GitHubPullrequestPoller` accepts the following arguments: ``owner`` The owner of the GitHub repository. This argument is required. ``repo`` The name of the GitHub repository. This argument is required. ``branches`` List of branches to accept as base branch (e.g. master). Defaults to `None` and accepts all branches as base. ``pollInterval`` Poll interval between polls in seconds. Default is 10 minutes. ``pollAtLaunch`` Whether to poll on startup of the buildbot master. Default is `False` and first poll will occur `pollInterval` seconds after the master start. ``category`` Set the category to be used for the changes produced by the :bb:chsrc:`GitHubPullrequestPoller`. This will then be set in any changes generated by the :bb:chsrc:`GitHubPullrequestPoller`, and can be used in a Change Filter for triggering particular builders. ``baseURL`` GitHub API endpoint. Default is ``https://api.github.com``. ``pullrequest_filter`` A callable which takes a `dict` which contains the decoded `JSON` object of the GitHub pull request as argument. All fields specified by the GitHub API are accessible. If the callable returns `False` the pull request is ignored. Default is `True` which does not filter any pull requests. ``token`` A GitHub API token to execute all requests to the API authenticated. It is strongly recommended to use a API token since it increases GitHub API rate limits significantly. ``repository_type`` Set which type of repository link will be in the `repository` property. Possible values ``https``, ``svn``, ``git`` or ``svn``. This link can then be used in a Source Step to checkout the source. ``magic_link`` Set to `True` if the changes should contain ``refs/pulls//merge`` in the `branch` property and a link to the base `repository` in the repository property. These properties can be used by the :bb:step:`GitHub` source to pull from the special branch in the base repository. Default is `False`. ``github_property_whitelist`` A list of ``fnmatch`` expressions which match against the flattened pull request information JSON prefixed with ``github``. For example ``github.number`` represents the pull request number. Available entries can be looked up in the GitHub API Documentation or by examining the data returned for a pull request by the API. .. bb:chsrc:: BitbucketPullrequestPoller .. _BitbucketPullrequestPoller: BitbucketPullrequestPoller ~~~~~~~~~~~~~~~~~~~~~~~~~~ .. py:class:: buildbot.changes.bitbucket.BitbucketPullrequestPoller This :bb:chsrc:`BitbucketPullrequestPoller` periodically polls Bitbucket for new or updated pull requests. It uses Bitbuckets powerful `Pull Request REST API`_ to gather the information needed. The :bb:chsrc:`BitbucketPullrequestPoller` accepts the following arguments: ``owner`` The owner of the Bitbucket repository. All Bitbucket Urls are of the form ``https://bitbucket.org/owner/slug/``. ``slug`` The name of the Bitbucket repository. ``auth`` Authorization data tuple ``(usename, password)`` (optional). If set, it will be used as authorization headers at Bitbucket API. ``branch`` A single branch or a list of branches which should be processed. If it is ``None`` (the default) all pull requests are used. ``pollInterval`` Interval in seconds between polls, default is 10 minutes. ``pollAtLaunch`` Determines when the first poll occurs. ``True`` = immediately on launch, ``False`` = wait for one ``pollInterval`` (default). ``category`` Set the category to be used by the :bb:chsrc:`BitbucketPullrequestPoller`. This will then be set in any changes generated by the :bb:chsrc:`BitbucketPullrequestPoller`, and can be used in a Change Filter for triggering particular builders. ``project`` Set the name of the project to be used by the :bb:chsrc:`BitbucketPullrequestPoller`. This will then be set in any changes generated by the ``BitbucketPullrequestPoller``, and can be used in a Change Filter for triggering particular builders. ``pullrequest_filter`` A callable which takes one parameter, the decoded Python object of the pull request JSON. If it returns ``False``, the pull request is ignored. It can be used to define custom filters based on the content of the pull request. See the Bitbucket documentation for more information about the format of the response. By default, the filter always returns ``True``. ``usetimestamps`` Parse each revision's commit timestamp (default is ``True``), or ignore it in favor of the current time, so that recently processed commits appear together in the waterfall page. ``bitbucket_property_whitelist`` A list of ``fnmatch`` expressions which match against the flattened pull request information JSON prefixed with ``bitbucket``. For example ``bitbucket.id`` represents the pull request ID. Available entries can be looked up in the BitBucket API Documentation or by examining the data returned for a pull request by the API. ``encoding`` This parameter is deprecated and has no effects. Author's name and commit message are always parsed in ``'utf-8'``. A minimal configuration for the Bitbucket pull request poller might look like this: .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.BitbucketPullrequestPoller( owner='myname', slug='myrepo', ) Here is a more complex configuration using a ``pullrequest_filter``. The pull request is only processed if at least 3 people have already approved it: .. code-block:: python def approve_filter(pr, threshold): approves = 0 for participant in pr['participants']: if participant['approved']: approves = approves + 1 if approves < threshold: return False return True from buildbot.plugins import changes c['change_source'] = changes.BitbucketPullrequestPoller( owner='myname', slug='myrepo', branch='mybranch', project='myproject', pullrequest_filter=lambda pr : approve_filter(pr,3), pollInterval=600, ) .. warning:: Anyone who can create pull requests for the Bitbucket repository can initiate a change, potentially causing the buildmaster to run arbitrary code. .. _Pull Request REST API: https://confluence.atlassian.com/display/BITBUCKET/pullrequests+Resource .. bb:chsrc:: GerritChangeSource .. _GerritChangeSource: GerritChangeSource ~~~~~~~~~~~~~~~~~~ .. py:class:: buildbot.changes.gerritchangesource.GerritChangeSource The :bb:chsrc:`GerritChangeSource` class connects to a Gerrit server by its SSH interface and uses its event source mechanism, `gerrit stream-events `_. Note that the Gerrit event stream is stateless and any events that occur while buildbot is not connected to Gerrit will be lost. See :bb:chsrc:`GerritEventLogPoller` for a stateful change source. The ``patchset-created`` and ``ref-updated`` events will be deduplicated, that is, if multiple events related to the same revision are received, only the first will be acted upon. This allows ``GerritChangeSource`` to be used together with :bb:chsrc:`GerritEventLogPoller`. The :bb:chsrc:`GerritChangeSource` accepts the following arguments: ``gerritserver`` The dns or ip that host the Gerrit ssh server ``gerritport`` The port of the Gerrit ssh server ``username`` The username to use to connect to Gerrit ``identity_file`` Ssh identity file to for authentication (optional). Pay attention to the `ssh passphrase` ``handled_events`` Event to be handled (optional). By default processes `patchset-created` and `ref-updated` ``get_files`` Populate the `files` attribute of emitted changes (default `False`). Buildbot will run an extra query command for each handled event to determine the changed files. ``debug`` Print Gerrit event in the log (default `False`). This allows to debug event content, but will eventually fill your logs with useless Gerrit event logs. By default this class adds a change to the Buildbot system for each of the following events: ``patchset-created`` A change is proposed for review. Automatic checks like :file:`checkpatch.pl` can be automatically triggered. Beware of what kind of automatic task you trigger. At this point, no trusted human has reviewed the code, and a patch could be specially crafted by an attacker to compromise your workers. ``ref-updated`` A change has been merged into the repository. Typically, this kind of event can lead to a complete rebuild of the project, and upload binaries to an incremental build results server. But you can specify how to handle events: * Any event with change and patchSet will be processed by universal collector by default. * In case you've specified processing function for the given kind of events, all events of this kind will be processed only by this function, bypassing universal collector. An example: .. code-block:: python from buildbot.plugins import changes class MyGerritChangeSource(changes.GerritChangeSource): """Custom GerritChangeSource """ def eventReceived_patchset_created(self, properties, event): """Handler events without properties """ properties = {} self.addChangeFromEvent(properties, event) This class will populate the property list of the triggered build with the info received from Gerrit server in JSON format. .. warning:: If you selected :class:`GerritChangeSource`, you **must** use :bb:step:`Gerrit` source step: the ``branch`` property of the change will be :samp:`{target_branch}/{change_id}` and such a ref cannot be resolved, so the :bb:step:`Git` source step would fail. .. index:: Properties; from GerritChangeSource In case of ``patchset-created`` event, these properties will be: ``event.change.branch`` Branch of the Change ``event.change.id`` Change's ID in the Gerrit system (the ChangeId: in commit comments) ``event.change.number`` Change's number in Gerrit system ``event.change.owner.email`` Change's owner email (owner is first uploader) ``event.change.owner.name`` Change's owner name ``event.change.project`` Project of the Change ``event.change.subject`` Change's subject ``event.change.url`` URL of the Change in the Gerrit's web interface ``event.patchSet.number`` Patchset's version number ``event.patchSet.ref`` Patchset's Gerrit "virtual branch" ``event.patchSet.revision`` Patchset's Git commit ID ``event.patchSet.uploader.email`` Patchset uploader's email (owner is first uploader) ``event.patchSet.uploader.name`` Patchset uploader's name (owner is first uploader) ``event.type`` Event type (``patchset-created``) ``event.uploader.email`` Patchset uploader's email ``event.uploader.name`` Patchset uploader's name In case of ``ref-updated`` event, these properties will be: ``event.refUpdate.newRev`` New Git commit ID (after merger) ``event.refUpdate.oldRev`` Previous Git commit ID (before merger) ``event.refUpdate.project`` Project that was updated ``event.refUpdate.refName`` Branch that was updated ``event.submitter.email`` Submitter's email (merger responsible) ``event.submitter.name`` Submitter's name (merger responsible) ``event.type`` Event type (``ref-updated``) ``event.submitter.email`` Submitter's email (merger responsible) ``event.submitter.name`` Submitter's name (merger responsible) A configuration for this source might look like: .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.GerritChangeSource( "gerrit.example.com", "gerrit_user", handled_events=["patchset-created", "change-merged"]) See :file:`master/docs/examples/git_gerrit.cfg` or :file:`master/docs/examples/repo_gerrit.cfg` in the Buildbot distribution for a full example setup of Git+Gerrit or Repo+Gerrit of :bb:chsrc:`GerritChangeSource`. .. bb:chsrc:: GerritEventLogPoller .. _GerritEventLogPoller: GerritEventLogPoller ~~~~~~~~~~~~~~~~~~~~~ .. py:class:: buildbot.changes.gerritchangesource.GerritEventLogPoller The :bb:chsrc:`GerritEventLogPoller` class is similar to :bb:chsrc:`GerritChangeSource` but connects to the Gerrit server by its HTTP interface and uses the events-log_ plugin. Note that the decision of whether to use :bb:chsrc:`GerritEventLogPoller` and :bb:chsrc:`GerritChangeSource` will depend on your needs. The trade off is: 1. :bb:chsrc:`GerritChangeSource` is low-overhead and reacts instantaneously to events, but a broken connection to Gerrit will lead to missed changes 2. :bb:chsrc:`GerritEventLogPoller` is subject to polling overhead and reacts only at it's polling rate, but is robust to a broken connection to Gerrit and missed changes will be discovered when a connection is restored. You can use both at the same time to get the advantages of each. They will coordinate through the database to avoid duplicate changes generated for buildbot. .. note:: The :bb:chsrc:`GerritEventLogPoller` requires either the ``txrequest`` or the ``treq`` package. The :bb:chsrc:`GerritEventLogPoller` accepts the following arguments: ``baseURL`` The HTTP url where to find Gerrit. If the URL of the events-log endpoint for your server is ``https://example.com/a/plugins/events-log/events/`` then the ``baseURL`` is ``https://example.com/a``. Ensure that ``/a`` is included. ``auth`` A request's authentication configuration. If Gerrit is configured with ``BasicAuth``, then it shall be ``('login', 'password')``. If Gerrit is configured with ``DigestAuth``, then it shall be ``requests.auth.HTTPDigestAuth('login', 'password')`` from the requests module. However, note that usage of ``requests.auth.HTTPDigestAuth`` is incompatible with ``treq``. ``handled_events`` Event to be handled (optional). By default processes `patchset-created` and `ref-updated`. ``pollInterval`` Interval in seconds between polls (default is 30 sec). ``pollAtLaunch`` Determines when the first poll occurs. True = immediately on launch (default), False = wait for one pollInterval. ``gitBaseURL`` The git URL where Gerrit is accessible via git+ssh protocol. ``get_files`` Populate the `files` attribute of emitted changes (default `False`). Buildbot will run an extra query command for each handled event to determine the changed files. ``debug`` Print Gerrit event in the log (default `False`). This allows to debug event content, but will eventually fill your logs with useless Gerrit event logs. The same customization can be done as :bb:chsrc:`GerritChangeSource` for handling special events. .. _events-log: https://gerrit.googlesource.com/plugins/events-log/ GerritChangeFilter ~~~~~~~~~~~~~~~~~~ .. py:class:: buildbot.changes.gerritchangesource.GerritChangeFilter :class:`GerritChangeFilter` is a ready to use :class:`ChangeFilter` you can pass to :bb:sched:`AnyBranchScheduler` in order to filter changes, to create pre-commit builders or post-commit schedulers. It has the same api as :ref:`Change Filter `, except it has additional `eventtype` set of filter (can as well be specified as value, list, regular expression, or callable). An example is following: .. code-block:: python from buildbot.plugins import schedulers, util # this scheduler will create builds when a patch is uploaded to gerrit # but only if it is uploaded to the "main" branch schedulers.AnyBranchScheduler( name="main-precommit", change_filter=util.GerritChangeFilter(branch="main", eventtype="patchset-created"), treeStableTimer=15*60, builderNames=["main-precommit"]) # this scheduler will create builds when a patch is merged in the "main" branch # for post-commit tests schedulers.AnyBranchScheduler(name="main-postcommit", change_filter=util.GerritChangeFilter("main", "ref-updated"), treeStableTimer=15*60, builderNames=["main-postcommit"]) .. bb:chsrc:: Change Hooks .. _Change-Hooks-HTTP-Notifications: Change Hooks (HTTP Notifications) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Buildbot already provides a web frontend, and that frontend can easily be used to receive HTTP push notifications of commits from services like GitHub. See :ref:`Change-Hooks` for more information. .. index: change .. _Change-Attrs: Changes ------- .. py:class:: buildbot.changes.changes.Change A :class:`Change` is an abstract way Buildbot uses to represent a single change to the source files performed by a developer. In version control systems that support the notion of atomic check-ins, a change represents a changeset or commit. Instances of :class:`Change` have the following attributes. .. _Change-Attr-Who: Who ~~~ Each :class:`Change` has a :attr:`who` attribute, which specifies which developer is responsible for the change. This is a string which comes from a namespace controlled by the VC repository. Frequently this means it is a username on the host which runs the repository, but not all VC systems require this. Each :class:`StatusNotifier` will map the :attr:`who` attribute into something appropriate for their particular means of communication: an email address, an IRC handle, etc. This ``who`` attribute is also parsed and stored into Buildbot's database (see :ref:`User-Objects`). Currently, only ``who`` attributes in Changes from ``git`` repositories are translated into user objects, but in the future all incoming Changes will have their ``who`` parsed and stored. .. _Change-Attr-Files: Files ~~~~~ It also has a list of :attr:`files`, which are just the tree-relative filenames of any files that were added, deleted, or modified for this :class:`Change`. These filenames are checked by the :func:`fileIsImportant` function of a scheduler to decide whether it should trigger a new build or not. For example, the scheduler could use the following function to only run a build if a C file was checked in: .. code-block:: python def has_C_files(change): for name in change.files: if name.endswith(".c"): return True return False Certain :class:`BuildStep`\s can also use the list of changed files to run a more targeted series of tests, e.g. the ``python_twisted.Trial`` step can run just the unit tests that provide coverage for the modified .py files instead of running the full test suite. .. _Change-Attr-Comments: Comments ~~~~~~~~ The Change also has a :attr:`comments` attribute, which is a string containing any checkin comments. .. _Change-Attr-Project: Project ~~~~~~~ The :attr:`project` attribute of a change or source stamp describes the project to which it corresponds, as a short human-readable string. This is useful in cases where multiple independent projects are built on the same buildmaster. In such cases, it can be used to control which builds are scheduled for a given commit, and to limit status displays to only one project. .. _Change-Attr-Repository: Repository ~~~~~~~~~~ This attribute specifies the repository in which this change occurred. In the case of DVCS's, this information may be required to check out the committed source code. However, using the repository from a change has security risks: if Buildbot is configured to blindly trust this information, then it may easily be tricked into building arbitrary source code, potentially compromising the workers and the integrity of subsequent builds. .. _Change-Attr-Codebase: Codebase ~~~~~~~~ This attribute specifies the codebase to which this change was made. As described in :ref:`source stamps ` section, multiple repositories may contain the same codebase. A change's codebase is usually determined by the :bb:cfg:`codebaseGenerator` configuration. By default the codebase is ''; this value is used automatically for single-codebase configurations. .. _Change-Attr-Revision: Revision ~~~~~~~~ Each Change can have a :attr:`revision` attribute, which describes how to get a tree with a specific state: a tree which includes this Change (and all that came before it) but none that come after it. If this information is unavailable, the :attr:`revision` attribute will be ``None``. These revisions are provided by the :class:`ChangeSource`. Revisions are always strings. `CVS` :attr:`revision` is the seconds since the epoch as an integer. `SVN` :attr:`revision` is the revision number `Darcs` :attr:`revision` is a large string, the output of :command:`darcs changes --context` `Mercurial` :attr:`revision` is a short string (a hash ID), the output of :command:`hg identify` `P4` :attr:`revision` is the transaction number `Git` :attr:`revision` is a short string (a SHA1 hash), the output of e.g. :command:`git rev-parse` Branches ~~~~~~~~ The Change might also have a :attr:`branch` attribute. This indicates that all of the Change's files are in the same named branch. The schedulers get to decide whether the branch should be built or not. For VC systems like CVS, Git, Mercurial and Monotone the :attr:`branch` name is unrelated to the filename. (That is, the branch name and the filename inhabit unrelated namespaces.) For SVN, branches are expressed as subdirectories of the repository, so the file's ``repourl`` is a combination of some base URL, the branch name, and the filename within the branch. (In a sense, the branch name and the filename inhabit the same namespace.) Darcs branches are subdirectories of a base URL just like SVN. `CVS` branch='warner-newfeature', files=['src/foo.c'] `SVN` branch='branches/warner-newfeature', files=['src/foo.c'] `Darcs` branch='warner-newfeature', files=['src/foo.c'] `Mercurial` branch='warner-newfeature', files=['src/foo.c'] `Git` branch='warner-newfeature', files=['src/foo.c'] `Monotone` branch='warner-newfeature', files=['src/foo.c'] Change Properties ~~~~~~~~~~~~~~~~~ A Change may have one or more properties attached to it, usually specified through the Force Build form or :bb:cmdline:`sendchange`. Properties are discussed in detail in the :ref:`Build-Properties` section. buildbot-3.4.0/master/docs/manual/configuration/configurators.rst000066400000000000000000000036601413250514000253100ustar00rootroot00000000000000.. bb:cfg:: configurators Configurators ------------- For advanced users or plugin writers, the ``configurators`` key is available and holds a list of :py:class:`buildbot.interfaces.IConfigurator`. The configurators will run after the ``master.cfg`` has been processed, and will modify the config dictionary. Configurator implementers should make sure that they are interoperable with each other, which means carefully modifying the config to avoid overriding a setting already made by the user or another configurator. Configurators are run (thus prioritized) in the order of the ``configurators`` list. .. bb:configurator:: JanitorConfigurator JanitorConfigurator ~~~~~~~~~~~~~~~~~~~ Buildbot stores historical information in its database. In a large installation, these can quickly consume disk space, yet developers never consult this historical information in many cases. :bb:configurator:`JanitorConfigurator` creates a builder and :bb:sched:`Nightly` scheduler which will regularly remove old information. At the moment, it only supports cleaning of logs, but it will contain more features as we implement them. :: from buildbot.plugins import util from datetime import timedelta # configure a janitor which will delete all logs older than one month, # and will run on sundays at noon c['configurators'] = [util.JanitorConfigurator( logHorizon=timedelta(weeks=4), hour=12, dayOfWeek=6 )] Parameters for :bb:configurator:`JanitorConfigurator` are: ``logHorizon`` A ``timedelta`` object describing the minimum time for which the log data should be maintained. ``hour``, ``dayOfWeek``, ... Arguments given to the :bb:sched:`Nightly` scheduler which is backing the :bb:configurator:`JanitorConfigurator`. Determines when the cleanup will be done. With this, you can configure it daily, weekly or even hourly if you wish. You probably want to schedule it when Buildbot is less loaded. buildbot-3.4.0/master/docs/manual/configuration/dbconfig.rst000066400000000000000000000031271413250514000241740ustar00rootroot00000000000000.. bb:cfg:: dbconfig DbConfig -------- DbConfig is a utility for ``master.cfg`` to get easy-to-use key-value storage in the Buildbot database. DbConfig can get and store any ``json``-able object to the db for use by other masters or separate UI plugins to edit them. The design is intentionally simplistic, as the focus is on ease of use rather than efficiency. A separate db connection is created each time ``get()`` or ``set()`` is called. Example: .. code-block:: python from buildbot.plugins import util, worker c = BuildmasterConfig = {} c['db_url'] = 'mysql://username:password@mysqlserver/buildbot' dbConfig = util.DbConfig(BuildmasterConfig, basedir) workers = dbConfig.get("workers") c['workers'] = [ worker.Worker(worker['name'], worker['passwd'], properties=worker.get('properties')), for worker in workers ] .. py:class:: DbConfig .. py:method:: __init__(BuildmasterConfig, basedir) :param BuildmasterConfig: the ``BuildmasterConfig``, where ``db_url`` is already configured :param basedir: ``basedir`` global variable of the ``master.cfg`` run environment. SQLite urls are relative to this dir .. py:method:: get(name, default=MarkerClass) :param name: the name of the config variable to retrieve :param default: in case the config variable has not been set yet, default is returned if defined, else ``KeyError`` is raised .. py:method:: set(name, value) :param name: the name of the config variable to be set :param value: the value of the config variable to be set buildbot-3.4.0/master/docs/manual/configuration/global.rst000066400000000000000000001177551413250514000236760ustar00rootroot00000000000000Global Configuration -------------------- The keys in this section affect the operations of the buildmaster globally. .. contents:: :depth: 1 :local: .. bb:cfg:: db .. bb:cfg:: db_url .. _Database-Specification: Database Specification ~~~~~~~~~~~~~~~~~~~~~~ Buildbot requires a connection to a database to maintain certain state information, such as tracking pending build requests. In the default configuration Buildbot uses a file-based SQLite database, stored in the :file:`state.sqlite` file of the master's base directory. .. important:: SQLite3 is perfectly suitable for small setups with a few users. However, it does not scale well with large numbers of builders, workers and users. If you expect your Buildbot to grow over time, it is strongly advised to use a real database server (e.g., MySQL or Postgres). See the :ref:`Database-Server` section for more details. Override this configuration with the :bb:cfg:`db_url` parameter. Buildbot accepts a database configuration in a dictionary named ``db``. All keys are optional: .. code-block:: python c['db'] = { 'db_url' : 'sqlite:///state.sqlite', } The ``db_url`` key indicates the database engine to use. The format of this parameter is completely documented at http://www.sqlalchemy.org/docs/dialects/, but is generally of the form: .. code-block:: python "driver://[username:password@]host:port/database[?args]" This parameter can be specified directly in the configuration dictionary, as ``c['db_url']``, although this method is deprecated. The following sections give additional information for particular database backends: .. index:: SQLite SQLite ++++++ For sqlite databases, since there is no host and port, relative paths are specified with ``sqlite:///`` and absolute paths with ``sqlite:////``. For example: .. code-block:: python c['db_url'] = "sqlite:///state.sqlite" SQLite requires no special configuration. .. index:: MySQL MySQL +++++ .. code-block:: python c['db_url'] = "mysql://username:password@example.com/database_name?max_idle=300" The ``max_idle`` argument for MySQL connections is unique to Buildbot and should be set to something less than the ``wait_timeout`` configured for your server. This controls the SQLAlchemy ``pool_recycle`` parameter, which defaults to no timeout. Setting this parameter ensures that connections are closed and re-opened after the configured amount of idle time. If you see errors such as ``_mysql_exceptions.OperationalError: (2006, 'MySQL server has gone away')``, this means your ``max_idle`` setting is probably too high. ``show global variables like 'wait_timeout';`` will show what the currently configured ``wait_timeout`` is on your MySQL server. Buildbot requires ``use_unique=True`` and ``charset=utf8``, and will add them automatically, so they do not need to be specified in ``db_url``. MySQL defaults to the MyISAM storage engine, but this can be overridden with the ``storage_engine`` URL argument. .. index:: Postgres Postgres ++++++++ .. code-block:: python c['db_url'] = "postgresql://username:password@hostname/dbname" PosgreSQL requires no special configuration. .. bb:cfg:: mq .. _MQ-Specification: MQ Specification ~~~~~~~~~~~~~~~~ Buildbot uses a message-queueing system to handle communication within the master. Messages are used to indicate events within the master, and components that are interested in those events arrange to receive them. The message queueing implementation is configured as a dictionary in the ``mq`` option. The ``type`` key describes the type of MQ implementation to be used. Note that the implementation type cannot be changed in a reconfig. The available implementation types are described in the following sections. Simple ++++++ .. code-block:: python c['mq'] = { 'type' : 'simple', 'debug' : False, } This is the default MQ implementation. Similar to SQLite, it has no additional software dependencies, but does not support multi-master mode. Note that this implementation also does not support message persistence across a restart of the master. For example, if a change is received, but the master shuts down before the schedulers can create build requests for it, then those schedulers will not be notified of the change when the master starts again. The ``debug`` key, which defaults to False, can be used to enable logging of every message produced on this master. .. _mq-Wamp: Wamp ++++ .. note:: At the moment, wamp is the only message queue implementation for multimaster. It has been privileged as this is the only message queue that has very solid support for Twisted. Other more common message queue systems like ``RabbitMQ`` (using the ``AMQP`` protocol) do not have a convincing driver for twisted, and this would require to run on threads, which will add an important performance overhead. .. code-block:: python c['mq'] = { 'type' : 'wamp', 'router_url': 'ws://localhost:8080/ws', 'realm': 'realm1', # valid are: none, critical, error, warn, info, debug, trace 'wamp_debug_level' : 'error' } This is a MQ implementation using the `wamp `_ protocol. This implementation uses `Python Autobahn `_ wamp client library, and is fully asynchronous (no use of threads). To use this implementation, you need a wamp router like `Crossbar `_. Please refer to Crossbar documentation for more details, but the default Crossbar setup will just work with Buildbot, provided you use the example ``mq`` configuration above, and start Crossbar with: .. code-block:: bash # of course, you should work in a virtualenv... pip install crossbar crossbar init crossbar start The implementation does not yet support wamp authentication. This MQ allows buildbot to run in multi-master mode. Note that this implementation also does not support message persistence across a restart of the master. For example, if a change is received, but the master shuts down before the schedulers can create build requests for it, then those schedulers will not be notified of the change when the master starts again. ``router_url`` (mandatory): points to your router websocket url. Buildbot is only supporting wamp over websocket, which is a sub-protocol of http. SSL is supported using ``wss://`` instead of ``ws://``. ``realm`` (optional, defaults to ``buildbot``): defines the wamp realm to use for your buildbot messages. ``wamp_debug_level`` (optional, defaults to ``error``): defines the log level of autobahn. You must use a router with very reliable connection to the master. If for some reason, the wamp connection is lost, then the master will stop, and should be restarted via a process manager. .. bb:cfg:: multiMaster .. _Multi-master-mode: Multi-master mode ~~~~~~~~~~~~~~~~~ See :ref:`Multimaster` for details on the multi-master mode in Buildbot Nine. By default, Buildbot makes coherency checks that prevent typos in your ``master.cfg``. It makes sure schedulers are not referencing unknown builders, and enforces there is at least one builder. In the case of an asymmetric multimaster, those coherency checks can be harmful and prevent you to implement what you want. For example, you might want to have one master dedicated to the UI, so that a big load generated by builds will not impact page load times. To enable multi-master mode in this configuration, you will need to set the :bb:cfg:`multiMaster` option so that buildbot doesn't warn about missing schedulers or builders. .. code-block:: python # Enable multiMaster mode; disables warnings about unknown builders and # schedulers c['multiMaster'] = True c['db'] = { 'db_url' : 'mysql://...', } c['mq'] = { # Need to enable multimaster aware mq. Wamp is the only option for now. 'type' : 'wamp', 'router_url': 'ws://localhost:8080', 'realm': 'realm1', # valid are: none, critical, error, warn, info, debug, trace 'wamp_debug_level' : 'error' } .. bb:cfg:: buildbotURL .. bb:cfg:: titleURL .. bb:cfg:: title Site Definitions ~~~~~~~~~~~~~~~~ Three basic settings describe the buildmaster in status reports: .. code-block:: python c['title'] = "Buildbot" c['titleURL'] = "http://buildbot.sourceforge.net/" :bb:cfg:`title` is a short string that will appear at the top of this buildbot installation's home page (linked to the :bb:cfg:`titleURL`). :bb:cfg:`titleURL` is a URL string that must end with a slash (``/``). HTML status displays will show ``title`` as a link to :bb:cfg:`titleURL`. This URL is often used to provide a link from buildbot HTML pages to your project's home page. The :bb:cfg:`buildbotURL` string should point to the location where the buildbot's internal web server is visible. This URL must end with a slash (``/``). When status notices are sent to users (e.g., by email or over IRC), :bb:cfg:`buildbotURL` will be used to create a URL to the specific build or problem that they are being notified about. .. bb:cfg:: logCompressionLimit .. bb:cfg:: logCompressionMethod .. bb:cfg:: logMaxSize .. bb:cfg:: logMaxTailSize .. bb:cfg:: logEncoding .. _Log-Encodings: Log Handling ~~~~~~~~~~~~ .. code-block:: python c['logCompressionMethod'] = 'gz' c['logMaxSize'] = 1024*1024 # 1M c['logMaxTailSize'] = 32768 c['logEncoding'] = 'utf-8' The :bb:cfg:`logCompressionLimit` enables compression of build logs on disk for logs that are bigger than the given size, or disables that completely if set to ``False``. The default value is 4096, which should be a reasonable default on most file systems. This setting has no impact on status plugins, and merely affects the required disk space on the master for build logs. The :bb:cfg:`logCompressionMethod` controls what type of compression is used for build logs. The default is 'gz', and the other valid option are 'raw' (no compression), 'gz' or 'lz4' (required lz4 package). Please find below some stats extracted from 50x "trial Pyflakes" runs (results may differ according to log type). .. csv-table:: Space saving details :header: "compression", "raw log size", "compressed log size", "space saving", "compression speed" "bz2", "2.981 MB", "0.603 MB", "79.77%", "3.433 MB/s" "gz", "2.981 MB", "0.568 MB", "80.95%", "6.604 MB/s" "lz4", "2.981 MB", "0.844 MB", "71.68%", "77.668 MB/s" The :bb:cfg:`logMaxSize` parameter sets an upper limit (in bytes) to how large logs from an individual build step can be. The default value is None, meaning no upper limit to the log size. Any output exceeding :bb:cfg:`logMaxSize` will be truncated, and a message to this effect will be added to the log's HEADER channel. If :bb:cfg:`logMaxSize` is set, and the output from a step exceeds the maximum, the :bb:cfg:`logMaxTailSize` parameter controls how much of the end of the build log will be kept. The effect of setting this parameter is that the log will contain the first :bb:cfg:`logMaxSize` bytes and the last :bb:cfg:`logMaxTailSize` bytes of output. Don't set this value too high, as the the tail of the log is kept in memory. The :bb:cfg:`logEncoding` parameter specifies the character encoding to use to decode bytestrings provided as logs. It defaults to ``utf-8``, which should work in most cases, but can be overridden if necessary. In extreme cases, a callable can be specified for this parameter. It will be called with byte strings, and should return the corresponding Unicode string. This setting can be overridden for a single build step with the ``logEncoding`` step parameter. It can also be overridden for a single log file by passing the ``logEncoding`` parameter to :py:meth:`~buildbot.process.buildstep.addLog`. Data Lifetime ~~~~~~~~~~~~~ Horizons ++++++++ Previously Buildbot implemented a global configuration for horizons. Now it is implemented as a utility Builder, and shall be configured via the :bb:configurator:`JanitorConfigurator`. .. bb:cfg:: caches .. bb:cfg:: changeCacheSize .. bb:cfg:: buildCacheSize Caches ++++++ .. code-block:: python c['caches'] = { 'Changes' : 100, # formerly c['changeCacheSize'] 'Builds' : 500, # formerly c['buildCacheSize'] 'chdicts' : 100, 'BuildRequests' : 10, 'SourceStamps' : 20, 'ssdicts' : 20, 'objectids' : 10, 'usdicts' : 100, } The :bb:cfg:`caches` configuration key contains the configuration for Buildbot's in-memory caches. These caches keep frequently-used objects in memory to avoid unnecessary trips to the database. Caches are divided by object type, and each has a configurable maximum size. The default size for each cache is 1, except where noted below. A value of 1 allows Buildbot to make a number of optimizations without consuming much memory. Larger, busier installations will likely want to increase these values. The available caches are: ``Changes`` the number of change objects to cache in memory. This should be larger than the number of changes that typically arrive in the span of a few minutes, otherwise your schedulers will be reloading changes from the database every time they run. For distributed version control systems, like Git or Hg, several thousand changes may arrive at once, so setting this parameter to something like 10000 isn't unreasonable. This parameter is the same as the deprecated global parameter :bb:cfg:`changeCacheSize`. Its default value is 10. ``Builds`` The :bb:cfg:`buildCacheSize` parameter gives the number of builds for each builder which are cached in memory. This number should be larger than the number of builds required for commonly-used status displays (the waterfall or grid views), so that those displays do not miss the cache on a refresh. This parameter is the same as the deprecated global parameter :bb:cfg:`buildCacheSize`. Its default value is 15. ``chdicts`` The number of rows from the ``changes`` table to cache in memory. This value should be similar to the value for ``Changes``. ``BuildRequests`` The number of BuildRequest objects kept in memory. This number should be higher than the typical number of outstanding build requests. If the master ordinarily finds jobs for BuildRequests immediately, you may set a lower value. ``SourceStamps`` the number of SourceStamp objects kept in memory. This number should generally be similar to the number ``BuildRequesets``. ``ssdicts`` The number of rows from the ``sourcestamps`` table to cache in memory. This value should be similar to the value for ``SourceStamps``. ``objectids`` The number of object IDs - a means to correlate an object in the Buildbot configuration with an identity in the database--to cache. In this version, object IDs are not looked up often during runtime, so a relatively low value such as 10 is fine. ``usdicts`` The number of rows from the ``users`` table to cache in memory. Note that for a given user there will be a row for each attribute that user has. c['buildCacheSize'] = 15 .. bb:cfg:: collapseRequests .. index:: Builds; merging Merging Build Requests ~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: python c['collapseRequests'] = True This is a global default value for builders' :bb:cfg:`collapseRequests` parameter, and controls the merging of build requests. This parameter can be overridden on a per-builder basis. See :ref:`Collapsing-Build-Requests` for the allowed values for this parameter. .. index:: Builders; priority .. bb:cfg:: prioritizeBuilders .. _Prioritizing-Builders: Prioritizing Builders ~~~~~~~~~~~~~~~~~~~~~ .. code-block:: python def prioritizeBuilders(buildmaster, builders): ... c['prioritizeBuilders'] = prioritizeBuilders By default, buildbot will attempt to start builds on builders in order, beginning with the builder with the oldest pending request. Customize this behavior with the :bb:cfg:`prioritizeBuilders` configuration key, which takes a callable. See :ref:`Builder-Priority-Functions` for details on this callable. This parameter controls the order that the buildmaster can start builds, and is useful in situations where there is resource contention between builders, e.g., for a test database. It does not affect the order in which a builder processes the build requests in its queue. For that purpose, see :ref:`Prioritizing-Builds`. .. bb:cfg:: protocols .. _Setting-the-PB-Port-for-Workers: Setting the PB Port for Workers ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: python c['protocols'] = {"pb": {"port": 10000}} The buildmaster will listen on a TCP port of your choosing for connections from workers. It can also use this port for connections from remote Change Sources, status clients, and debug tools. This port should be visible to the outside world, and you'll need to tell your worker admins about your choice. It does not matter which port you pick, as long it is externally visible; however, you should probably use something larger than 1024, since most operating systems don't allow non-root processes to bind to low-numbered ports. If your buildmaster is behind a firewall or a NAT box of some sort, you may have to configure your firewall to permit inbound connections to this port. ``c['protocols']['pb']['port']`` can also be used as a *connection string*, as defined in the ConnectionStrings_ guide. This means that you can have the buildmaster listen on a localhost-only port by doing: .. code-block:: python c['protocols'] = {"pb": {"port": "tcp:10000:interface=127.0.0.1"}} This might be useful if you only run workers on the same machine, and they are all configured to contact the buildmaster at ``localhost:10000``. *connection strings* can also be used to configure workers connecting over TLS. The syntax is then .. code-block:: python c['protocols'] = {"pb": {"port": "ssl:9989:privateKey=master.key:certKey=master.crt"}} Please note that IPv6 addresses with : must be escaped with \ as well as : in paths and \ in paths. Read more about the *connection strings* format in ConnectionStrings_ documentation. See also :ref:`Worker TLS Configuration ` .. _ConnectionStrings: https://twistedmatrix.com/documents/current/core/howto/endpoints.html .. index:: Properties; global .. bb:cfg:: properties Defining Global Properties ~~~~~~~~~~~~~~~~~~~~~~~~~~ The :bb:cfg:`properties` configuration key defines a dictionary of properties that will be available to all builds started by the buildmaster: .. code-block:: python c['properties'] = { 'Widget-version' : '1.2', 'release-stage' : 'alpha' } .. index:: Manhole .. bb:cfg:: manhole Manhole ~~~~~~~ Manhole is an interactive Python shell which allows full access to the Buildbot master instance. It is probably only useful for buildbot developers. See :ref:`documentation on Manhole implementations ` for available authentication and connection methods. The ``manhole`` configuration key accepts a single instance of a Manhole class. For example: .. code-block:: python from buildbot import manhole c['manhole'] = manhole.PasswordManhole("tcp:1234:interface=127.0.0.1", "admin", "passwd", ssh_hostkey_dir="data/ssh_host_keys") .. bb:cfg:: metrics Metrics Options ~~~~~~~~~~~~~~~ .. code-block:: python c['metrics'] = dict(log_interval=10, periodic_interval=10) :bb:cfg:`metrics` can be a dictionary that configures various aspects of the metrics subsystem. If :bb:cfg:`metrics` is ``None``, then metrics collection, logging and reporting will be disabled. ``log_interval`` determines how often metrics should be logged to twistd.log. It defaults to 60s. If set to 0 or ``None``, then logging of metrics will be disabled. This value can be changed via a reconfig. ``periodic_interval`` determines how often various non-event based metrics are collected, such as memory usage, uncollectable garbage, reactor delay. This defaults to 10s. If set to 0 or ``None``, then periodic collection of this data is disabled. This value can also be changed via a reconfig. Read more about metrics in the :ref:`Metrics` section in the developer documentation. .. bb:cfg:: stats-service Statistics Service ~~~~~~~~~~~~~~~~~~ The Statistics Service (stats service for short) supports the collection of arbitrary data from within a running Buildbot instance and the export to a number of storage backends. Currently, only `InfluxDB`_ is supported as a storage backend. Also, InfluxDB (or any other storage backend) is not a mandatory dependency. Buildbot can run without it, although :class:`StatsService` will be of no use in such a case. At present, :class:`StatsService` can keep track of build properties, build times (start, end, duration) and arbitrary data produced inside Buildbot (more on this later). Example usage: .. code-block:: python captures = [stats.CaptureProperty('Builder1', 'tree-size-KiB'), stats.CaptureBuildDuration('Builder2')] c['services'] = [] c['services'].append(stats.StatsService( storage_backends=[ stats.InfluxStorageService('localhost', 8086, 'root', 'root', 'test', captures) ], name="StatsService")) The ``services`` configuration value should be initialized as a list and a :class:`StatsService` instance should be appended to it as shown in the example above. Statistics Service ++++++++++++++++++ .. py:class:: buildbot.statistics.stats_service.StatsService :noindex: This is the main class for statistics services. It is initialized in the master configuration as shown in the example above. It takes two arguments: ``storage_backends`` A list of storage backends (see :ref:`storage-backends`). In the example above, ``stats.InfluxStorageService`` is an instance of a storage backend. Each storage backend is an instance of subclasses of :py:class:`statsStorageBase`. ``name`` The name of this service. :py:meth:`yieldMetricsValue`: This method can be used to send arbitrary data for storage. (See :ref:`yieldMetricsValue` for more information.) .. _capture-classes: Capture Classes +++++++++++++++ .. py:class:: buildbot.statistics.capture.CaptureProperty :noindex: Instance of this class declares which properties must be captured and sent to the :ref:`storage-backends`. It takes the following arguments: ``builder_name`` The name of builder in which the property is recorded. ``property_name`` The name of property needed to be recorded as a statistic. ``callback=None`` (Optional) A custom callback function for this class. This callback function should take in two arguments - `build_properties` (dict) and `property_name` (str) and return a string that will be sent for storage in the storage backends. ``regex=False`` If this is set to ``True``, then the property name can be a regular expression. All properties matching this regular expression will be sent for storage. .. py:class:: buildbot.statistics.capture.CapturePropertyAllBuilders :noindex: Instance of this class declares which properties must be captured on all builders and sent to the :ref:`storage-backends`. It takes the following arguments: ``property_name`` The name of property needed to be recorded as a statistic. ``callback=None`` (Optional) A custom callback function for this class. This callback function should take in two arguments - `build_properties` (dict) and `property_name` (str) and return a string that will be sent for storage in the storage backends. ``regex=False`` If this is set to ``True``, then the property name can be a regular expression. All properties matching this regular expression will be sent for storage. .. py:class:: buildbot.statistics.capture.CaptureBuildStartTime :noindex: Instance of this class declares which builders' start times are to be captured and sent to :ref:`storage-backends`. It takes the following arguments: ``builder_name`` The name of builder whose times are to be recorded. ``callback=None`` (Optional) A custom callback function for this class. This callback function should take in a Python datetime object and return a string that will be sent for storage in the storage backends. .. py:class:: buildbot.statistics.capture.CaptureBuildStartTimeAllBuilders :noindex: Instance of this class declares start times of all builders to be captured and sent to :ref:`storage-backends`. It takes the following arguments: ``callback=None`` (Optional) A custom callback function for this class. This callback function should take in a Python datetime object and return a string that will be sent for storage in the storage backends. .. py:class:: buildbot.statistics.capture.CaptureBuildEndTime :noindex: Exactly like :py:class:`CaptureBuildStartTime` except it declares the builders whose end time is to be recorded. The arguments are same as :py:class:`CaptureBuildStartTime`. .. py:class:: buildbot.statistics.capture.CaptureBuildEndTimeAllBuilders :noindex: Exactly like :py:class:`CaptureBuildStartTimeAllBuilders` except it declares all builders' end time to be recorded. The arguments are same as :py:class:`CaptureBuildStartTimeAllBuilders`. .. py:class:: buildbot.statistics.capture.CaptureBuildDuration :noindex: Instance of this class declares the builders whose build durations are to be recorded. It takes the following arguments: ``builder_name`` The name of builder whose times are to be recorded. ``report_in='seconds'`` Can be one of three: ``'seconds'``, ``'minutes'``, or ``'hours'``. This is the units in which the build time will be reported. ``callback=None`` (Optional) A custom callback function for this class. This callback function should take in two Python datetime objects - a ``start_time`` and an ``end_time`` and return a string that will be sent for storage in the storage backends. .. py:class:: buildbot.statistics.capture.CaptureBuildDurationAllBuilders :noindex: Instance of this class declares build durations to be recorded for all builders. It takes the following arguments: ``report_in='seconds'`` Can be one of three: ``'seconds'``, ``'minutes'``, or ``'hours'``. This is the units in which the build time will be reported. ``callback=None`` (Optional) A custom callback function for this class. This callback function should take in two Python datetime objects - a ``start_time`` and an ``end_time`` and return a string that will be sent for storage in the storage backends. .. py:class:: buildbot.statistics.capture.CaptureData :noindex: Instance of this capture class is for capturing arbitrary data that is not stored as build-data. Needs to be used in combination with ``yieldMetricsValue`` (see :ref:`yieldMetricsValue`). Takes the following arguments: ``data_name`` The name of data to be captured. Same as in ``yieldMetricsValue``. ``builder_name`` The name of builder whose times are to be recorded. ``callback=None`` The callback function for this class. This callback receives the data sent to ``yieldMetricsValue`` as ``post_data`` (see :ref:`yieldMetricsValue`). It must return a string that is to be sent to the storage backends for storage. .. py:class:: buildbot.statistics.capture.CaptureDataAllBuilders :noindex: Instance of this capture class for capturing arbitrary data that is not stored as build-data on all builders. Needs to be used in combination with ``yieldMetricsValue`` (see :ref:`yieldMetricsValue`). Takes the following arguments: ``data_name`` The name of data to be captured. Same as in ``yieldMetricsValue``. ``callback=None`` The callback function for this class. This callback receives the data sent to ``yieldMetricsValue`` as ``post_data`` (see :ref:`yieldMetricsValue`). It must return a string that is to be sent to the storage backends for storage. .. _yieldMetricsValue: Using ``StatsService.yieldMetricsValue`` ++++++++++++++++++++++++++++++++++++++++ Advanced users can modify ``BuildSteps`` to use ``StatsService.yieldMetricsValue`` which will send arbitrary data for storage to the ``StatsService``. It takes the following arguments: ``data_name`` The name of the data being sent or storage. ``post_data`` A dictionary of key value pair that is sent for storage. The keys will act as columns in a database and the value is stored under that column. ``buildid`` The integer build id of the current build. Obtainable in all ``BuildSteps``. Along with using ``yieldMetricsValue``, the user will also need to use the ``CaptureData`` capture class. As an example, we can add the following to a build step: .. code-block:: python yieldMetricsValue('test_data_name', {'some_data': 'some_value'}, buildid) Then, we can add in the master configuration a capture class like this: .. code-block:: python captures = [CaptureBuildData('test_data_name', 'Builder1')] Pass this ``captures`` list to a storage backend (as shown in the example at the top of this section) for capturing this data. .. _storage-backends: Storage Backends ++++++++++++++++ Storage backends are responsible for storing any statistics data sent to them. A storage backend will generally be some sort of a database-server running on a machine. (*Note*: This machine may be different from the one running :class:`BuildMaster`) Currently, only `InfluxDB`_ is supported as a storage backend. .. py:class:: buildbot.statistics.storage_backends.influxdb_client.InfluxStorageService :noindex: This class is a Buildbot client to the InfluxDB storage backend. `InfluxDB`_ is a distributed, time series database that employs a key-value pair storage system. It requires the following arguments: ``url`` The URL where the service is running. ``port`` The port on which the service is listening. ``user`` Username of a InfluxDB user. ``password`` Password for ``user``. ``db`` The name of database to be used. ``captures`` A list of objects of :ref:`capture-classes`. This tells which statistics are to be stored in this storage backend. ``name=None`` (Optional) The name of this storage backend. .. bb:cfg:: secretsProviders ``secretsProviders`` ~~~~~~~~~~~~~~~~~~~~ See :ref:`secretManagement` for details on secret concepts. Example usage: .. code-block:: python c['secretsProviders'] = [ .. ] ``secretsProviders`` is a list of secrets storage. See :ref:`secretManagement` to configure a secret storage provider. .. bb:cfg:: buildbotNetUsageData BuildbotNetUsageData ~~~~~~~~~~~~~~~~~~~~ Since buildbot 0.9.0, buildbot has a simple feature which sends usage analysis info to buildbot.net. This is very important for buildbot developers to understand how the community is using the tools. This allows to better prioritize issues, and understand what plugins are actually being used. This will also be a tool to decide whether to keep support for very old tools. For example buildbot contains support for the venerable CVS, but we have no information whether it actually works beyond the unit tests. We rely on the community to test and report issues with the old features. With BuildbotNetUsageData, we can know exactly what combination of plugins are working together, how much people are customizing plugins, what versions of the main dependencies people run. We take your privacy very seriously. BuildbotNetUsageData will never send information specific to your Code or Intellectual Property. No repository url, shell command values, host names, ip address or custom class names. If it does, then this is a bug, please report. We still need to track unique number for installation. This is done via doing a sha1 hash of master's hostname, installation path and fqdn. Using a secure hash means there is no way of knowing hostname, path and fqdn given the hash, but still there is a different hash for each master. You can see exactly what is sent in the master's twisted.log. Usage data is sent every time the master is started. BuildbotNetUsageData can be configured with 4 values: * ``c['buildbotNetUsageData'] = None`` disables the feature * ``c['buildbotNetUsageData'] = 'basic'`` sends the basic information to buildbot including: * versions of buildbot, python and twisted * platform information (CPU, OS, distribution, python flavor (i.e CPython vs PyPy)) * mq and database type (mysql or sqlite?) * www plugins usage * Plugins usages: This counts the number of time each class of buildbot is used in your configuration. This counts workers, builders, steps, schedulers, change sources. If the plugin is subclassed, then it will be prefixed with a `>` example of basic report (for the metabuildbot): .. code-block:: javascript { 'versions': { 'Python': '2.7.6', 'Twisted': '15.5.0', 'Buildbot': '0.9.0rc2-176-g5fa9dbf' }, 'platform': { 'machine': 'x86_64', 'python_implementation': 'CPython', 'version': '#140-Ubuntu SMP Mon Jul', 'processor': 'x86_64', 'distro:': ('Ubuntu', '14.04', 'trusty') }, 'db': 'sqlite', 'mq': 'simple', 'plugins': { 'buildbot.schedulers.forcesched.ForceScheduler': 2, 'buildbot.schedulers.triggerable.Triggerable': 1, 'buildbot.config.BuilderConfig': 4, 'buildbot.schedulers.basic.AnyBranchScheduler': 2, 'buildbot.steps.source.git.Git': 4, '>>buildbot.steps.trigger.Trigger': 2, '>>>buildbot.worker.base.Worker': 4, 'buildbot.reporters.irc.IRC': 1}, 'www_plugins': ['buildbot_travis', 'waterfall_view'] } * ``c['buildbotNetUsageData'] = 'full'`` sends the basic information plus additional information: * configuration of each builders: how the steps are arranged together. for example: .. code-block:: javascript { 'builders': [ ['buildbot.steps.source.git.Git', '>>>buildbot.process.buildstep.BuildStep'], ['buildbot.steps.source.git.Git', '>>buildbot.steps.trigger.Trigger'], ['buildbot.steps.source.git.Git', '>>>buildbot.process.buildstep.BuildStep'], ['buildbot.steps.source.git.Git', '>>buildbot.steps.trigger.Trigger'] ] } * ``c['buildbotNetUsageData'] = myCustomFunction`` declares a callback to use to specify exactly what to send. This custom function takes the generated data from full report in the form of a dictionary, and returns a customized report as a jsonable dictionary. You can use this to filter any information you don't want to disclose. You can also use a custom http_proxy environment variable in order to not send any data while developing your callback. .. bb:cfg:: user_managers .. _Users-Options: Users Options ~~~~~~~~~~~~~ .. code-block:: python from buildbot.plugins import util c['user_managers'] = [] c['user_managers'].append(util.CommandlineUserManager(username="user", passwd="userpw", port=9990)) :bb:cfg:`user_managers` contains a list of ways to manually manage User Objects within Buildbot (see :ref:`User-Objects`). Currently implemented is a commandline tool `buildbot user`, described at length in :bb:cmdline:`user`. In the future, a web client will also be able to manage User Objects and their attributes. As shown above, to enable the `buildbot user` tool, you must initialize a `CommandlineUserManager` instance in your `master.cfg`. `CommandlineUserManager` instances require the following arguments: ``username`` This is the `username` that will be registered on the PB connection and need to be used when calling `buildbot user`. ``passwd`` This is the `passwd` that will be registered on the PB connection and need to be used when calling `buildbot user`. ``port`` The PB connection `port` must be different than `c['protocols']['pb']['port']` and be specified when calling `buildbot user` .. bb:cfg:: validation .. _Input-Validation: Input Validation ~~~~~~~~~~~~~~~~ .. code-block:: python import re c['validation'] = { 'branch' : re.compile(r'^[\w.+/~-]*$'), 'revision' : re.compile(r'^[ \w\.\-\/]*$'), 'property_name' : re.compile(r'^[\w\.\-\/\~:]*$'), 'property_value' : re.compile(r'^[\w\.\-\/\~:]*$'), } This option configures the validation applied to user inputs of various types. This validation is important since these values are often included in command-line arguments executed on workers. Allowing arbitrary input from untrusted users may raise security concerns. The keys describe the type of input validated; the values are compiled regular expressions against which the input will be matched. The defaults for each type of input are those given in the example, above. .. bb:cfg:: revlink Revision Links ~~~~~~~~~~~~~~ The :bb:cfg:`revlink` parameter is used to create links from revision IDs in the web status to a web-view of your source control system. The parameter's value must be a callable. By default, Buildbot is configured to generate revlinks for a number of open source hosting platforms (https://github.com, https://sourceforge.net and https://bitbucket.org). The callable takes the revision id and repository argument, and should return a URL to the revision. Note that the revision id may not always be in the form you expect, so code defensively. In particular, a revision of "??" may be supplied when no other information is available. Note that :class:`SourceStamp`\s that are not created from version-control changes (e.g., those created by a :bb:sched:`Nightly` or :bb:sched:`Periodic` scheduler) may have an empty repository string if the repository is not known to the scheduler. Revision Link Helpers +++++++++++++++++++++ Buildbot provides two helpers for generating revision links. :class:`buildbot.revlinks.RevlinkMatcher` takes a list of regular expressions and a replacement text. The regular expressions should all have the same number of capture groups. The replacement text should have sed-style references to that capture groups (i.e. '\1' for the first capture group), and a single '%s' reference for the revision ID. The repository given is tried against each regular expression in turn. The results are then substituted into the replacement text, along with the revision ID, to obtain the revision link. .. code-block:: python from buildbot.plugins import util c['revlink'] = util.RevlinkMatch([r'git://notmuchmail.org/git/(.*)'], r'http://git.notmuchmail.org/git/\1/commit/%s') :class:`buildbot.revlinks.RevlinkMultiplexer` takes a list of revision link callables, and tries each in turn, returning the first successful match. .. bb:cfg:: codebaseGenerator Codebase Generator ~~~~~~~~~~~~~~~~~~ .. code-block:: python all_repositories = { r'https://hg/hg/mailsuite/mailclient': 'mailexe', r'https://hg/hg/mailsuite/mapilib': 'mapilib', r'https://hg/hg/mailsuite/imaplib': 'imaplib', r'https://github.com/mailinc/mailsuite/mailclient': 'mailexe', r'https://github.com/mailinc/mailsuite/mapilib': 'mapilib', r'https://github.com/mailinc/mailsuite/imaplib': 'imaplib', } def codebaseGenerator(chdict): return all_repositories[chdict['repository']] c['codebaseGenerator'] = codebaseGenerator For any incoming change, the :ref:`codebase` is set to ''. This codebase value is sufficient if all changes come from the same repository (or clones). If changes come from different repositories, extra processing will be needed to determine the codebase for the incoming change. This codebase will then be a logical name for the combination of repository and or branch etc. The `codebaseGenerator` accepts a change dictionary as produced by the :py:class:`buildbot.db.changes.ChangesConnectorComponent `, with a changeid equal to `None`. .. _TwistedConch: http://twistedmatrix.com/trac/wiki/TwistedConch .. _InfluxDB: https://influxdata.com/time-series-platform/influxdb/ buildbot-3.4.0/master/docs/manual/configuration/index.rst000066400000000000000000000016311413250514000235260ustar00rootroot00000000000000.. _Configuration: Configuration ============= The following sections describe the configuration of the various Buildbot components. The information available here is sufficient to create basic build and test configurations, and does not assume great familiarity with Python. In more advanced Buildbot configurations, Buildbot acts as a framework for a continuous-integration application. The next section, :doc:`../customization`, describes this approach, with frequent references into the :ref:`development documentation `. .. toctree:: :maxdepth: 1 intro global changesources schedulers workers builders buildfactories buildsets properties steps/index interlocks report_generators/index reporters/index www wwwhooks services/index dbconfig configurators manhole multimaster multicodebase misc/index buildbot-3.4.0/master/docs/manual/configuration/interlocks.rst000066400000000000000000000174161413250514000246040ustar00rootroot00000000000000.. -*- rst -*- .. _Interlocks: Interlocks ---------- .. contents:: :depth: 1 :local: Until now, we assumed that a master can run builds at any worker whenever needed or desired. Some times, you want to enforce additional constraints on builds. For reasons like limited network bandwidth, old worker machines, or a self-willed data base server, you may want to limit the number of builds (or build steps) that can access a resource. .. _Access-Modes: Access Modes ~~~~~~~~~~~~ The mechanism used by Buildbot is known as the read/write lock [#]_. It allows either many readers or a single writer but not a combination of readers and writers. The general lock has been modified and extended for use in Buildbot. Firstly, the general lock allows an infinite number of readers. In Buildbot, we often want to put an upper limit on the number of readers, for example allowing two out of five possible builds at the same time. To do this, the lock counts the number of active readers. Secondly, the terms *read mode* and *write mode* are confusing in the context of Buildbot. They have been replaced by *counting mode* (since the lock counts them) and *exclusive mode*. As a result of these changes, locks in Buildbot allow a number of builds (up to some fixed number) in counting mode, or they allow one build in exclusive mode. .. note:: Access modes are specified when a lock is used. That is, it is possible to have a single lock that is used by several workers in counting mode, and several workers in exclusive mode. In fact, this is the strength of the modes: accessing a lock in exclusive mode will prevent all counting-mode accesses. Count ~~~~~ Often, not all workers are equal. To address this situation, Buildbot allows to have a separate upper limit on the count for each worker. In this way, for example, you can have at most 3 concurrent builds at a fast worker, 2 at a slightly older worker, and 1 at all other workers. You can also specify the count during an access request. This specifies how many units an access consumes from the lock (in other words, as how many builds a build will count). This way, you can balance a shared resource that builders consume unevenly, for example, the amount of memory or the number of CPU cores. Scope ~~~~~ The final thing you can specify when you introduce a new lock is its scope. Some constraints are global and must be enforced on all workers. Other constraints are local to each worker. A *master lock* is used for the global constraints. You can ensure for example that at most one build (of all builds running at all workers) accesses the database server. With a *worker lock* you can add a limit local to each worker. With such a lock, you can for example enforce an upper limit to the number of active builds at a worker, like above. Examples ~~~~~~~~ Time for a few examples. A master lock is defined below to protect a database, and a worker lock is created to limit the number of builds at each worker. .. code-block:: python from buildbot.plugins import util db_lock = util.MasterLock("database") build_lock = util.WorkerLock("worker_builds", maxCount=1, maxCountForWorker={'fast': 3, 'new': 2}) :data:`db_lock` is defined to be a master lock. The ``database`` string is used for uniquely identifying the lock. At the next line, a worker lock called :data:`build_lock` is created with the name ``worker_builds``. Since the requirements of the worker lock are a bit more complicated, two optional arguments are also specified. The ``maxCount`` parameter sets the default limit for builds in counting mode to ``1``. For the worker called ``'fast'`` however, we want to have at most three builds, and for the worker called ``'new'``, the upper limit is two builds running at the same time. The next step is accessing the locks in builds. Buildbot allows a lock to be used during an entire build (from beginning to end) or only during a single build step. In the latter case, the lock is claimed for use just before the step starts and released again when the step ends. To prevent deadlocks [#]_, it is not possible to claim or release locks at other times. To use locks, you add them with a ``locks`` argument to a build or a step. Each use of a lock is either in counting mode (that is, possibly shared with other builds) or in exclusive mode, and this is indicated with the syntax ``lock.access(mode, count)``, where :data:`mode` is one of ``"counting"`` or ``"exclusive"``. The optional argument :data:`count` is a non-negative integer (for counting locks) or 1 (for exclusive locks). If unspecified, it defaults to 1. If 0, the access always succeeds. This argument allows to use locks for balancing a shared resource that is utilized unevenly. A build or build step proceeds only when it has acquired all locks. If a build or step needs many locks, it may be starved [#]_ by other builds requiring fewer locks. To illustrate the use of locks, here are a few examples. .. code-block:: python from buildbot.plugins import util, steps db_lock = util.MasterLock("database") build_lock = util.WorkerLock("worker_builds", maxCount=1, maxCountForWorker={'fast': 3, 'new': 2}) f = util.BuildFactory() f.addStep(steps.SVN(repourl="http://example.org/svn/Trunk")) f.addStep(steps.ShellCommand(command="make all")) f.addStep(steps.ShellCommand(command="make test", locks=[db_lock.access('exclusive')])) b1 = {'name': 'full1', 'workername': 'fast', 'builddir': 'f1', 'factory': f, 'locks': [build_lock.access('counting')] } b2 = {'name': 'full2', 'workername': 'new', 'builddir': 'f2', 'factory': f, 'locks': [build_lock.access('counting')] } b3 = {'name': 'full3', 'workername': 'old', 'builddir': 'f3', 'factory': f, 'locks': [build_lock.access('counting')] } b4 = {'name': 'full4', 'workername': 'other', 'builddir': 'f4', 'factory': f, 'locks': [build_lock.access('counting')] } c['builders'] = [b1, b2, b3, b4] Here we have four workers :data:`fast`, :data:`new`, :data:`old`, and :data:`other`. Each worker performs the same checkout, make, and test build step sequence. We want to enforce that at most one test step is executed between all workers due to restrictions with the database server. This is done by adding the ``locks=`` parameter to the third step. It takes a list of locks with their access mode. Alternatively, this can take a renderable that returns a list of locks with their access mode. In this case, only the :data:`db_lock` is needed. The exclusive access mode is used to ensure there is at most one worker that executes the test step. In addition to exclusive access to the database, we also want workers to stay responsive even under the load of a large number of builds being triggered. For this purpose, the worker lock called :data:`build_lock` is defined. Since the restraint holds for entire builds, the lock is specified in the builder with ``'locks': [build_lock.access('counting')]``. Note that you will occasionally see ``lock.access(mode)`` written as ``LockAccess(lock, mode)``. The two are equivalent, but the former is preferred. .. [#] See http://en.wikipedia.org/wiki/Read/write_lock_pattern for more information. .. [#] Deadlock is the situation where two or more workers each hold a lock in exclusive mode, and in addition, they want to claim the lock held by the other worker exclusively as well. Since locks allow at most one exclusive user, both workers would wait forever. .. [#] Starving is the situation where only a few locks are available, and they are immediately grabbed by another build. As a result, it may take a long time before all locks needed by the starved build are free at the same time. buildbot-3.4.0/master/docs/manual/configuration/intro.rst000066400000000000000000000220741413250514000235560ustar00rootroot00000000000000Configuring Buildbot ==================== Buildbot's behavior is defined by the *config file*, which normally lives in the :file:`master.cfg` file in the buildmaster's base directory (but this can be changed with an option to the :command:`buildbot create-master` command). This file completely specifies which :class:`Builder`\s are to be run, which workers they should use, how :class:`Change`\s should be tracked, and where the status information is to be sent. The buildmaster's :file:`buildbot.tac` file names the base directory; everything else comes from the config file. A sample config file was installed for you when you created the buildmaster, but you will need to edit it before your Buildbot will do anything useful. This chapter gives an overview of the format of this file and the various sections in it. You will need to read the later chapters to understand how to fill in each section properly. .. _Config-File-Format: Config File Format ------------------ The config file is, fundamentally, just a piece of Python code which defines a dictionary named ``BuildmasterConfig``, with a number of keys that are treated specially. You don't need to know Python to do the basic configuration, though; you can just copy the sample file's syntax. If you *are* comfortable writing Python code, however, you can use all the power of a full programming language to build more complicated configurations. .. index: BuildMaster Config The ``BuildmasterConfig`` name is the only one which matters: all other names defined during the execution of the file are discarded. When parsing the config file, the Buildmaster generally compares the old configuration with the new one and performs the minimum set of actions necessary to bring Buildbot up to date: :class:`Builder`\s which are not changed are left untouched, and :class:`Builder`\s which are modified get to keep their old event history. The beginning of the :file:`master.cfg` file typically starts with something like: .. code-block:: python BuildmasterConfig = c = {} Therefore a config key like :bb:cfg:`change_source` will usually appear in :file:`master.cfg` as ``c['change_source']``. See :bb:index:`cfg` for a full list of ``BuildMasterConfig`` keys. Basic Python Syntax ~~~~~~~~~~~~~~~~~~~ The master configuration file is interpreted as Python, allowing the full flexibility of the language. For the configurations described in this section, a detailed knowledge of Python is not required, but the basic syntax is easily described. Python comments start with a hash character ``#``, tuples are defined with ``(parenthesis, pairs)``, and lists (arrays) are defined with ``[square, brackets]``. Tuples and lists are mostly interchangeable. Dictionaries (data structures which map *keys* to *values*) are defined with curly braces: ``{'key1': value1, 'key2': value2}``. Function calls (and object instantiations) can use named parameters, like ``steps.ShellCommand(command=["trial", "hello"])``. The config file starts with a series of ``import`` statements, which make various kinds of :class:`Step`\s and :class:`Status` targets available for later use. The main ``BuildmasterConfig`` dictionary is created, and then it is populated with a variety of keys, described section-by-section in the subsequent chapters. .. _Predefined-Config-File-Symbols: Predefined Config File Symbols ------------------------------ The following symbols are automatically available for use in the configuration file. ``basedir`` the base directory for the buildmaster. This string has not been expanded, so it may start with a tilde. It needs to be expanded before use. The config file is located in: .. code-block:: python os.path.expanduser(os.path.join(basedir, 'master.cfg')) ``__file__`` the absolute path of the config file. The config file's directory is located in ``os.path.dirname(__file__)``. .. _Testing-the-Config-File: Testing the Config File ----------------------- To verify that the config file is well-formed and contains no deprecated or invalid elements, use the ``checkconfig`` command, passing it either a master directory or a config file. .. code-block:: bash % buildbot checkconfig master.cfg Config file is good! # or % buildbot checkconfig /tmp/masterdir Config file is good! If the config file has deprecated features (perhaps because you've upgraded the buildmaster and need to update the config file to match), they will be announced by checkconfig. In this case, the config file will work, but you should really remove the deprecated items and use the recommended replacements instead: .. code-block:: none % buildbot checkconfig master.cfg /usr/lib/python2.4/site-packages/buildbot/master.py:559: DeprecationWarning: c['sources'] is deprecated as of 0.7.6 and will be removed by 0.8.0 . Please use c['change_source'] instead. Config file is good! If you have errors in your configuration file, checkconfig will let you know: .. code-block:: none % buildbot checkconfig master.cfg Configuration Errors: c['workers'] must be a list of Worker instances no workers are configured builder 'smoketest' uses unknown workers 'linux-002' If the config file is simply broken, that will be caught too: .. code-block:: none % buildbot checkconfig master.cfg error while parsing config file: Traceback (most recent call last): File "/home/buildbot/master/bin/buildbot", line 4, in runner.run() File "/home/buildbot/master/buildbot/scripts/runner.py", line 1358, in run if not doCheckConfig(so): File "/home/buildbot/master/buildbot/scripts/runner.py", line 1079, in doCheckConfig return cl.load(quiet=quiet) File "/home/buildbot/master/buildbot/scripts/checkconfig.py", line 29, in load self.basedir, self.configFileName) --- --- File "/home/buildbot/master/buildbot/config.py", line 147, in loadConfig exec f in localDict exceptions.SyntaxError: invalid syntax (master.cfg, line 52) Configuration Errors: error while parsing config file: invalid syntax (master.cfg, line 52) (traceback in logfile) Loading the Config File ----------------------- The config file is only read at specific points in time. It is first read when the buildmaster is launched. .. note:: If the configuration is invalid, the master will display the errors in the console output, but will not exit. Reloading the Config File (reconfig) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If you are on the system hosting the buildmaster, you can send a ``SIGHUP`` signal to it: the :command:`buildbot` tool has a shortcut for this: .. code-block:: none buildbot reconfig BASEDIR This command will show you all of the lines from :file:`twistd.log` that relate to the reconfiguration. If there are any problems during the config-file reload, they will be displayed in the output. When reloading the config file, the buildmaster will endeavor to change as little as possible about the running system. For example, although old status targets may be shut down and new ones started up, any status targets that were not changed since the last time the config file was read will be left running and untouched. Likewise any :class:`Builder`\s which have not been changed will be left running. If a :class:`Builder` is modified (say, the build command is changed), this change will apply only for new :class:`Build`\s. Any existing build that is currently running or was already queued will be allowed to finish using the old configuration. Note that if any lock is renamed, old and new instances of the lock will be completely unrelated in the eyes of the buildmaster. This means that buildmaster will be able to start new builds that would otherwise have waited for the old lock to be released. .. warning:: Buildbot's reconfiguration system is fragile for a few difficult-to-fix reasons: * Any modules imported by the configuration file are not automatically reloaded. Python modules such as https://docs.python.org/3/library/importlib.html and `importlib.reload()` may help here, but reloading modules is fraught with subtleties and difficult-to-decipher failure cases. * During the reconfiguration, active internal objects are divorced from the service hierarchy, leading to tracebacks in the web interface and other components. These are ordinarily transient, but with HTTP connection caching (either by the browser or an intervening proxy) they can last for a long time. * If the new configuration file is invalid, it is possible for Buildbot's internal state to be corrupted, leading to undefined results. When this occurs, it is best to restart the master. * For more advanced configurations, it is impossible for Buildbot to tell if the configuration for a :class:`Builder` or :class:`Scheduler` has changed, and thus the :class:`Builder` or :class:`Scheduler` will always be reloaded. This occurs most commonly when a callable is passed as a configuration parameter. The bbproto project (at https://github.com/dabrahams/bbproto) may help to construct large (multi-file) configurations which can be effectively reloaded and reconfigured. buildbot-3.4.0/master/docs/manual/configuration/manhole.rst000066400000000000000000000136761413250514000240560ustar00rootroot00000000000000.. _Manhole: .. py:module:: buildbot.plugins.util Manhole ------- Manhole is an interactive Python shell that gives full access to the Buildbot master instance. It is probably only useful for Buildbot developers. Using Manhole requires the ``cryptography`` and ``pyasn1`` python packages to be installed. These are not part of the normal Buildbot dependencies. There are several implementations of Manhole available, which differ by the authentication mechanisms and the security of the connection. .. note:: Manhole exposes full access to the buildmaster's account (including the ability to modify and delete files). It's recommended not to expose the manhole to the Internet and to use a strong password. .. py:class:: AuthorizedKeysManhole(port, keyfile, ssh_hostkey_dir) A manhole implementation that accepts encrypted ssh connections and authenticates by ssh keys. The prospective client must have an ssh private key that matches one of the public keys in manhole's authorized keys file. :type port: string or int :param port: The port to listen on. This is a `strports `__ specification string, like ``tcp:12345`` or ``tcp:12345:interface=127.0.0.1``. Bare integers are treated as a simple tcp port. :type keyfile: string :param keyfile: The path to the file containing public parts of the authorized SSH keys. The path is interpreted relative to the buildmaster's basedir. The file should contain one public SSH key per line. This is the exact same format as used by sshd in ``~/.ssh/authorized_keys``. :type ssh_hostkey_dir: string :param ssh_hostkey_dir: The path to the directory which contains ssh host keys for this server. .. py:class:: PasswordManhole(port, username, password, ssh_hostkey_dir) A manhole implementation that accepts encrypted ssh connections and authenticates by username and password. :type port: string or int :param port: The port to listen on. This is a `strports `__ specification string, like ``tcp:12345`` or ``tcp:12345:interface=127.0.0.1``. Bare integers are treated as a simple tcp port. :type username: string :param username: The username to authenticate. :type password: string :param password: The password of the user to authenticate. :type ssh_hostkey_dir: string :param ssh_hostkey_dir: The path to the directory which contains ssh host keys for this server. .. py:class:: TelnetManhole(port, username, password) A manhole implementation that accepts unencrypted telnet connections and authenticates by username and password. .. note:: This connection method is not secure and should not be used anywhere where the port is exposed to the Internet. :type port: string or int :param port: The port to listen on. This is a `strports `__ specification string, like ``tcp:12345`` or ``tcp:12345:interface=127.0.0.1``. Bare integers are treated as a simple tcp port. :type username: string :param username: The username to authenticate. :type password: string :param password: The password of the user to authenticate. Using manhole ~~~~~~~~~~~~~ The interactive Python shell can be entered by simply connecting to the host in question. For instance, in the case of ssh password-based manhole, the configuration may look like this: .. code-block:: python from buildbot import manhole c['manhole'] = manhole.PasswordManhole("tcp:1234:interface=127.0.0.1", "admin", "passwd", ssh_hostkey_dir="data/ssh_host_keys") The above `ssh_hostkey_dir` declares a path relative to the buildmaster's basedir to look for ssh keys. To create an ssh key, navigate to the buildmaster's basedir and run: .. code-block:: bash mkdir -p data/ssh_host_keys ckeygen3 -t rsa -f "data/ssh_host_keys/ssh_host_rsa_key" Restart Buildbot and then try to connect to the running buildmaster like this: .. code-block:: bash ssh -p1234 admin@127.0.0.1 # enter passwd at prompt After connection has been established, objects can be explored in more depth using `dir(x)` or the helper function `show(x)`. For example: .. code-block:: python >>> master.workers.workers {'example-worker': } >>> show(master) data attributes of basedir : '/home/dustin/code/buildbot/t/buildbot/'... botmaster : buildCacheSize : None buildHorizon : None buildbotURL : http://localhost:8010/ changeCacheSize : None change_svc : configFileName : master.cfg db : db_url : sqlite:///state.sqlite ... >>> show(master.botmaster.builders['win32']) data attributes of The buildmaster's SSH server will use a different host key than the normal sshd running on a typical unix host. This will cause the ssh client to complain about a `host key mismatch`, because it does not realize there are two separate servers running on the same host. To avoid this, use a clause like the following in your :file:`.ssh/config` file: .. code-block:: none Host remotehost-buildbot HostName remotehost HostKeyAlias remotehost-buildbot Port 1234 # use 'user' if you use PasswordManhole and your name is not 'admin'. # if you use AuthorizedKeysManhole, this probably doesn't matter. User admin buildbot-3.4.0/master/docs/manual/configuration/misc/000077500000000000000000000000001413250514000226175ustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/configuration/misc/index.rst000066400000000000000000000004631413250514000244630ustar00rootroot00000000000000.. _Miscellaneous_Configuration: Miscellaneous Configuration =========================== .. toctree:: :hidden: :maxdepth: 2 source_stamp_filter This section outlines miscellaneous functionality that is useful for configuration but does not fit any other section. * :ref:`SourceStampFilter` buildbot-3.4.0/master/docs/manual/configuration/misc/source_stamp_filter.rst000066400000000000000000000037331413250514000274300ustar00rootroot00000000000000.. _SourceStampFilter: SourceStampFilter +++++++++++++++++ .. py:class:: buildbot.util.SourceStampFilter This class is used to filter source stamps. It is conceptually very similar to ``ChangeFilter`` except that it operates on source stamps. It accepts a set of conditions. A source stamp is considered *accepted* if all conditions are satisfied. The conditions are specified via the constructor arguments. The following parameters are supported by the :py:class:`SourceStampFilter`: ``project_eq``, ``codebase_eq``, ``repository_eq``, ``branch_eq`` (optional, a string or a list of strings) The corresponding property of the source stamp must match exactly to at least one string from the value supplied by the argument. ``project_not_eq``, ``codebase_not_eq``, ``repository_not_eq``, ``branch_not_eq`` (optional, a string or a list of strings) The corresponding property of the source stamp must not match exactly to any string from the value supplied by the argument. ``project_re``, ``codebase_re``, ``repository_re``, ``branch_re`` (optional, a string or a list of strings or regex pattern objects) The corresponding property of the source stamp must match to at least one regex from the value supplied by the argument. Any strings passed via this parameter are converted to a regex via ``re.compile``. ``project_not_re``, ``codebase_not_re``, ``repository_not_re``, ``branch_not_re`` (optional, a string or a list of strings or regex pattern objects) The corresponding property of the source stamp must not match to any regex from the value supplied by the argument. Any strings passed via this parameter are converted to a regex via ``re.compile``. ``filter_fn`` (optional, a callable accepting a dictionary and returning a boolean) The given function will be passed the source stamp. It is expected to return ``True`` if the source stamp is matched, ``False`` otherwise. In case of a match, all other conditions will still be evaluated. buildbot-3.4.0/master/docs/manual/configuration/multicodebase.rst000066400000000000000000000052171413250514000252430ustar00rootroot00000000000000.. _Multiple-Codebase-Builds: Multiple-Codebase Builds ------------------------ What if an end-product is composed of code from several codebases? Changes may arrive from different repositories within the tree-stable-timer period. Buildbot will not only use the source-trees that contain changes but also needs the remaining source-trees to build the complete product. For this reason, a :ref:`Scheduler` can be configured to base a build on a set of several source-trees that can (partly) be overridden by the information from incoming :class:`Change`\s. As described in :ref:`Source-Stamps `, the source for each codebase is identified by a source stamp, containing its repository, branch and revision. A full build set will specify a source stamp set describing the source to use for each codebase. Configuring all of this takes a coordinated approach. A complete multiple repository configuration consists of: a *codebase generator* Every relevant change arriving from a VC must contain a codebase. This is done by a :bb:cfg:`codebaseGenerator` that is defined in the configuration. Most generators examine the repository of a change to determine its codebase, using project-specific rules. some *schedulers* Each :bb:cfg:`scheduler` has to be configured with a set of all required ``codebases`` to build a product. These codebases indicate the set of required source-trees. In order for the scheduler to be able to produce a complete set for each build, the configuration can give a default repository, branch, and revision for each codebase. When a scheduler must generate a source stamp for a codebase that has received no changes, it applies these default values. multiple *source steps* - one for each codebase A :ref:`Builder`'s build factory must include a :ref:`source step` for each codebase. Each of the source steps has a ``codebase`` attribute which is used to select an appropriate source stamp from the source stamp set for a build. This information comes from the arrived changes or from the scheduler's configured default values. .. note:: Each :ref:`source step` has to have its own ``workdir`` set in order for the checkout to be done for each codebase in its own directory. .. note:: Ensure you specify the codebase within your source step's Interpolate() calls (e.g. ``http://.../svn/%(src:codebase:branch)s``). See :ref:`Interpolate` for details. .. warning:: Defining a :bb:cfg:`codebaseGenerator` that returns non-empty (not ``''``) codebases will change the behavior of all the schedulers. buildbot-3.4.0/master/docs/manual/configuration/multimaster.rst000066400000000000000000000057101413250514000247670ustar00rootroot00000000000000.. _Multimaster: Multimaster ----------- .. Warning:: Buildbot Multimaster is considered experimental. There are still some companies using it in production. Don't hesitate to use the mailing lists to share your experience. .. image:: ../../_images/multimaster.* :alt: Multi Master Buildbot supports interconnection of several masters. This has to be done through a multi-master enabled message queue backend. As of now the only one supported is wamp and crossbar.io. see :ref:`wamp ` There are then several strategy for introducing multimaster in your buildbot infra. A simple way to say it is by adding the concept of symmetrics and asymmetrics multimaster (like there is SMP and AMP for multi core CPUs) Symmetric multimaster is when each master share the exact same configuration. They run the same builders, same schedulers, same everything, the only difference is that workers are connected evenly between the masters (by any means (e.g. DNS load balancing, etc)) Symmetric multimaster is good to use to scale buildbot horizontally. Asymmetric multimaster is when each master have different configuration. Each master may have a specific responsibility (e.g schedulers, set of builder, UI). This was more how you did in 0.8, also because of its own technical limitations. A nice feature of asymmetric multimaster is that you can have the UI only handled by some masters. Separating the UI from the controlling will greatly help in the performance of the UI, because badly written BuildSteps?? can stall the reactor for several seconds. The fanciest configuration would probably be a symmetric configuration for everything but the UI. You would scale the number of UI master according to your number of UI users, and scale the number of engine masters to the number of workers. Depending on your workload and size of master host, it is probably a good idea to start thinking of multimaster starting from a hundred workers connected. Multimaster can also be used for high availability, and seamless upgrade of configuration code. Complex configuration indeed requires sometimes to restart the master to reload custom steps or code, or just to upgrade the upstream buildbot version. In this case, you will implement following procedure: * Start new master(s) with new code and configuration. * Send a graceful shutdown to the old master(s). * New master(s) will start taking the new jobs, while old master(s) will just finish managing the running builds. * As an old master is finishing the running builds, it will drop the connections from the workers, who will then reconnect automatically, and by the mean of load balancer will get connected to a new master to run new jobs. As buildbot nine has been designed to allow such procedure, it has not been implemented in production yet as we know. There is probably a new REST API needed in order to gracefully shutdown a master, and the details of gracefully dropping the connection to the workers to be sorted out. buildbot-3.4.0/master/docs/manual/configuration/properties.rst000066400000000000000000000515611413250514000246220ustar00rootroot00000000000000.. index:: Properties .. _Properties: Properties ========== Build properties are a generalized way to provide configuration information to build steps; see :ref:`Build-Properties` for the conceptual overview of properties. .. contents:: :depth: 1 :local: Some build properties come from external sources and are set before the build begins; others are set during the build and are available for later steps. The sources for properties are: :bb:cfg:`global configuration ` These properties apply to all builds. :ref:`schedulers ` A scheduler can specify properties that become available to all builds it starts. :ref:`changes ` A change can have properties attached to it, supplying extra information gathered by the change source. This is most commonly used with the :bb:cmdline:`sendchange` command. forced builds The "Force Build" form allows users to specify properties :bb:cfg:`workers ` A worker can pass properties on to the builds it performs. :ref:`builds ` A build automatically sets a number of properties on itself. :bb:cfg:`builders ` A builder can set properties on all the builds it runs. :ref:`steps ` The steps of a build can set properties that are available to subsequent steps. In particular, source steps set the `got_revision` property. If the same property is supplied in multiple places, the final appearance takes precedence. For example, a property set in a builder configuration will override the one supplied by the scheduler. Properties are stored internally in JSON format, so they are limited to basic types of data: numbers, strings, lists, and dictionaries. .. index:: single: Properties; Common Properties .. _Common-Build-Properties: Common Build Properties ----------------------- The following build properties are set when the build is started, and are available to all steps. .. index:: single: Properties; got_revision ``got_revision`` This property is set when a :class:`Source` step checks out the source tree, and provides the revision that was actually obtained from the VC system. In general this should be the same as ``revision``, except for non-absolute sourcestamps, where ``got_revision`` indicates what revision was current when the checkout was performed. This can be used to rebuild the same source code later. .. note:: For some VC systems (Darcs in particular), the revision is a large string containing newlines, and is not suitable for interpolation into a filename. For multi-codebase builds (where codebase is not the default `''`), this property is a dictionary, keyed by codebase. .. index:: single: Properties; buildername ``buildername`` This is a string that indicates which :class:`Builder` the build was a part of. The combination of buildername and buildnumber uniquely identify a build. .. index:: single: Properties; buildnumber ``buildnumber`` Each build gets a number, scoped to the :class:`Builder` (so the first build performed on any given :class:`Builder` will have a build number of 0). This integer property contains the build's number. .. index:: single: Properties; workername ``workername`` This is a string which identifies which worker the build is running on. .. index:: single: Properties; scheduler ``scheduler`` If the build was started from a scheduler, then this property will contain the name of that scheduler. ``builddir`` The absolute path of the base working directory on the worker of the current builder. .. index:: single: Properties; builddir For single codebase builds, where the codebase is `''`, the following :ref:`Source-Stamp-Attributes` are also available as properties: ``branch``, ``revision``, ``repository``, and ``project`` . .. _Source-Stamp-Attributes: Source Stamp Attributes ----------------------- .. index:: single: Properties; branch ``branch`` ``revision`` ``repository`` ``project`` ``codebase`` For details of these attributes see :doc:`/manual/concepts`. ``changes`` This attribute is a list of dictionaries representing the changes that make up this sourcestamp. Using Properties in Steps ------------------------- For the most part, properties are used to alter the behavior of build steps during a build. This is done by using :index:`renderables ` (objects implementing the :class:`~buildbot.interfaces.IRenderable` interface) as step parameters. When the step is started, each such object is rendered using the current values of the build properties, and the resultant rendering is substituted as the actual value of the step parameter. Buildbot offers several renderable object types covering common cases. It's also possible to :ref:`create custom renderables `. .. note:: Properties are defined while a build is in progress; their values are not available when the configuration file is parsed. This can sometimes confuse newcomers to Buildbot! In particular, the following is a common error: .. code-block:: python if Property('release_train') == 'alpha': f.addStep(...) This does not work because the value of the property is not available when the ``if`` statement is executed. However, Python will not detect this as an error - you will just never see the step added to the factory. You can use renderables in most step parameters. Please file bugs for any parameters which do not accept renderables. .. index:: single: Properties; Property .. _Property: Property ++++++++ The simplest renderable is :class:`Property`, which renders to the value of the property named by its argument: .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.ShellCommand(command=['echo', 'buildername:', util.Property('buildername')])) You can specify a default value by passing a ``default`` keyword argument: .. code-block:: python f.addStep(steps.ShellCommand(command=['echo', 'warnings:', util.Property('warnings', default='none')])) The default value is used when the property doesn't exist, or when the value is something Python regards as ``False``. The ``defaultWhenFalse`` argument can be set to ``False`` to force Buildbot to use the default argument only if the parameter is not set: .. code-block:: python f.addStep(steps.ShellCommand(command=['echo', 'warnings:', util.Property('warnings', default='none', defaultWhenFalse=False)])) The default value can be a renderable itself, e.g., .. code-block:: python command=util.Property('command', default=util.Property('default-command')) .. index:: single: Properties; Interpolate .. _Interpolate: Interpolate +++++++++++ :class:`Property` can only be used to replace an entire argument: in the example above, it replaces an argument to ``echo``. Often, properties need to be interpolated into strings, instead. The tool for that job is :ref:`Interpolate`. The more common pattern is to use Python dictionary-style string interpolation by using the ``%(prop:)s`` syntax. In this form, the property name goes in the parentheses, as above. A common mistake is to omit the trailing "s", leading to a rather obscure error from Python ("ValueError: unsupported format character"). .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.ShellCommand( command=['make', util.Interpolate('REVISION=%(prop:got_revision)s'), 'dist'])) This example will result in a ``make`` command with an argument like ``REVISION=12098``. .. _Interpolate-DictStyle: The syntax of dictionary-style interpolation is a selector, followed by a colon, followed by a selector specific key, optionally followed by a colon and a string indicating how to interpret the value produced by the key. The following selectors are supported. ``prop`` The key is the name of a property. ``src`` The key is a codebase and source stamp attribute, separated by a colon. Note, the syntax is ``%(src::)s``, which differs from other selectors. ``kw`` The key refers to a keyword argument passed to ``Interpolate``. Those keyword arguments may be ordinary values or renderables. ``secret`` The key refers to a secret provided by a provider declared in :bb:cfg:`secretsProviders` . ``worker`` The key refers to an info item provided by :bb:cfg:`workers`. The following ways of interpreting the value are available. ``-replacement`` If the key exists, substitute its value; otherwise, substitute ``replacement``. ``replacement`` may be empty (default), ``%(prop:propname:-)s``. ``~replacement`` Like ``-replacement``, but only substitutes the value of the key if it is something Python regards as ``True``. Python considers ``None``, 0, empty lists, and the empty string to be false, so such values will be replaced by ``replacement``. ``+replacement`` If the key exists, substitute ``replacement``; otherwise, substitute an empty string. ``?|sub_if_exists|sub_if_missing`` ``#?|sub_if_true|sub_if_false`` Ternary substitution, depending on either the key being present (with ``?``, similar to ``+``) or being ``True`` (with ``#?``, like ``~``). Notice that there is a pipe immediately following the question mark *and* between the two substitution alternatives. The character that follows the question mark is used as the delimiter between the two alternatives. In the above examples, it is a pipe, but any character other than ``(`` can be used. .. note:: Although these are similar to shell substitutions, no other substitutions are currently supported. Example: .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.ShellCommand( command=[ 'save-build-artifacts-script.sh', util.Interpolate('-r %(prop:repository)s'), util.Interpolate('-b %(src::branch)s'), util.Interpolate('-d %(kw:data)s', data="some extra needed data") ])) .. note:: We use ``%(src::branch)s`` in most examples, because ``codebase`` is empty by default. Example: .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.ShellCommand( command=[ 'make', util.Interpolate('REVISION=%(prop:got_revision:-%(src::revision:-unknown)s)s'), 'dist' ])) In addition, ``Interpolate`` supports using positional string interpolation. Here, ``%s`` is used as a placeholder, and the substitutions (which may be renderables) are given as subsequent arguments: .. code-block:: python f.addStep(steps.ShellCommand( command=[ 'echo', util.Interpolate('%d warnings and %d errors', util.Property('warnings'), util.Property('errors')) ])) .. note:: Like Python, you can use either positional interpolation *or* dictionary-style interpolation, but not both. Thus you cannot use a string like ``Interpolate("foo-%(src::revision)s-%s", "branch")``. .. index:: single: Properties; Renderer .. _Renderer: Renderer ++++++++ While Interpolate can handle many simple cases, and even some common conditionals, more complex cases are best handled with Python code. The ``renderer`` decorator creates a renderable object whose rendering is obtained by calling the decorated function when the step to which it's passed begins. The function receives an :class:`~buildbot.interfaces.IProperties` object, which it can use to examine the values of any and all properties. For example: .. code-block:: python from buildbot.plugins import steps, util @util.renderer def makeCommand(props): command = ['make'] cpus = props.getProperty('CPUs') if cpus: command.extend(['-j', str(cpus+1)]) else: command.extend(['-j', '2']) command.extend([util.Interpolate('%(prop:MAKETARGET)s')]) return command f.addStep(steps.ShellCommand(command=makeCommand)) You can think of ``renderer`` as saying "call this function when the step starts". .. note:: Since 0.9.3, renderer can itself return :class:`~buildbot.interfaces.IRenderable` objects or containers containing :class:`~buildbot.interfaces.IRenderable`. Optionally, extra arguments may be passed to the rendered function at any time by calling ``withArgs`` on the renderable object. The ``withArgs`` method accepts ``*args`` and ``**kwargs`` arguments which are stored in a new renderable object which is returned. The original renderable object is not modified. Multiple ``withArgs`` calls may be chained. The passed ``*args`` and ``**kwargs`` parameters are rendered and the results are passed to the rendered function at the time it is itself rendered. For example: .. code-block:: python from buildbot.plugins import steps, util @util.renderer def makeCommand(props, target): command = ['make'] cpus = props.getProperty('CPUs') if cpus: command.extend(['-j', str(cpus+1)]) else: command.extend(['-j', '2']) command.extend([target]) return command f.addStep(steps.ShellCommand(command=makeCommand.withArgs('mytarget'))) .. note:: The rendering of the renderable object may happen at unexpected times, so it is best to ensure that the passed extra arguments are not changed. .. note:: Config errors with Renderables may not always be caught via checkconfig. .. index:: single: Properties; Transform .. _Transform: Transform +++++++++ ``Transform`` is an alternative to ``renderer``. While ``renderer`` is useful for creating new renderables, ``Transform`` is easier to use when you want to transform or combine the renderings of preexisting renderables. ``Transform`` takes a function and any number of positional and keyword arguments. The function must either be a callable object or a renderable producing one. When rendered, a ``Transform`` first replaces all of its arguments that are renderables with their renderings, then calls the function, passing it the positional and keyword arguments, and returns the result as its own rendering. For example, suppose ``my_path`` is a path on the worker, and you want to get it relative to the build directory. You can do it like this: .. code-block:: python import os.path from buildbot.plugins import util my_path_rel = util.Transform(os.path.relpath, my_path, start=util.Property('builddir')) This works whether ``my_path`` is an ordinary string or a renderable. ``my_path_rel`` will be a renderable in either case, however. .. index:: single: Properties; WithProperties .. _WithProperties: FlattenList +++++++++++ If a nested list should be flattened for some renderables, FlattenList can be used. For example: .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.ShellCommand( command=[ 'make' ], descriptionDone=util.FlattenList([ 'make ', [ 'done' ]]) )) ``descriptionDone`` will be set to ``[ 'make', 'done' ]`` when the ``ShellCommand`` executes. This is useful when a list-returning property is used in renderables. .. note:: ShellCommand automatically flattens nested lists in its ``command`` argument, so there is no need to use ``FlattenList`` for it. WithProperties ++++++++++++++ .. warning:: This class is deprecated. It is an older version of :ref:`Interpolate`. It exists for compatibility with older configs. The simplest use of this class is with positional string interpolation. Here, ``%s`` is used as a placeholder, and property names are given as subsequent arguments: .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.ShellCommand( command=["tar", "czf", util.WithProperties("build-%s-%s.tar.gz", "branch", "revision"), "source"])) If this :class:`BuildStep` were used in a tree obtained from Git, it would create a tarball with a name like :file:`build-master-a7d3a333db708e786edb34b6af646edd8d4d3ad9.tar.gz`. .. index:: unsupported format character The more common pattern is to use Python dictionary-style string interpolation by using the ``%(propname)s`` syntax. In this form, the property name goes in the parentheses, as above. A common mistake is to omit the trailing "s", leading to a rather obscure error from Python ("ValueError: unsupported format character"). .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.ShellCommand( command=['make', util.WithProperties('REVISION=%(got_revision)s'), 'dist'])) This example will result in a ``make`` command with an argument like ``REVISION=12098``. .. _WithProperties-DictStyle: The dictionary-style interpolation supports a number of more advanced syntaxes in the parentheses. ``propname:-replacement`` If ``propname`` exists, substitute its value; otherwise, substitute ``replacement``. ``replacement`` may be empty (``%(propname:-)s``) ``propname:~replacement`` Like ``propname:-replacement``, but only substitutes the value of property ``propname`` if it is something Python regards as ``True``. Python considers ``None``, 0, empty lists, and the empty string to be false, so such values will be replaced by ``replacement``. ``propname:+replacement`` If ``propname`` exists, substitute ``replacement``; otherwise, substitute an empty string. Although these are similar to shell substitutions, no other substitutions are currently supported, and ``replacement`` in the above cannot contain more substitutions. Note: like Python, you can use either positional interpolation *or* dictionary-style interpolation, not both. Thus you cannot use a string like ``WithProperties("foo-%(revision)s-%s", "branch")``. .. _Custom-Renderables: Custom Renderables ++++++++++++++++++ If the options described above are not sufficient, more complex substitutions can be achieved by writing custom renderables. The :class:`~buildbot.interfaces.IRenderable` interface is simple - objects must provide a `getRenderingFor` method. The method should take one argument - an :class:`~buildbot.interfaces.IProperties` provider - and should return the rendered value or a deferred firing with one. You can pass instances of the class anywhere other renderables are accepted. For example: .. code-block:: python import time from buildbot.interfaces import IRenderable from zope.interface import implementer @implementer(IRenderable) class DetermineFoo(object): def getRenderingFor(self, props): if props.hasProperty('bar'): return props['bar'] elif props.hasProperty('baz'): return props['baz'] return 'qux' ShellCommand(command=['echo', DetermineFoo()]) or, more practically, .. code-block:: python from buildbot.interfaces import IRenderable from zope.interface import implementer from buildbot.plugins import util @implementer(IRenderable) class Now(object): def getRenderingFor(self, props): return time.clock() ShellCommand(command=['make', util.Interpolate('TIME=%(kw:now)s', now=Now())]) This is equivalent to: .. code-block:: python from buildbot.plugins import util @util.renderer def now(props): return time.clock() ShellCommand(command=['make', util.Interpolate('TIME=%(kw:now)s', now=now)]) Note that a custom renderable must be instantiated (and its constructor can take whatever arguments you like), whereas a function decorated with :func:`renderer` can be used directly. .. _URLForBuild: URL for build +++++++++++++ Its common to need to use the URL for the build in a step. For this, you can use a special custom renderer as following: .. code-block:: python from buildbot.plugins import * ShellCommand(command=['make', util.Interpolate('BUILDURL=%(kw:url)s', url=util.URLForBuild)]) .. _RenderableComparison: Renderable Comparison +++++++++++++++++++++ Its common to need to make basic comparison or calculation with properties. The :class:`Property` and :class:`Interpolate` objects contain necessary operator overloads to make this possible. .. code-block:: python from buildbot.plugins import * ShellCommand(command=['make'], doStepIf=Interpolate("worker:os_id") == 'ubuntu') In previous code, the value of the comparison can only be computed at runtime, so the result of the comparison is actually a renderable which will be computed at the start of the step. .. code-block:: python from buildbot.plugins import * ShellCommand(command=['make'], doStepIf=Interpolate("worker:os_id").in_(['debian', 'ubuntu'])) 'in' operator cannot be overloaded, so we add a simple ``in_`` method to :class:`Property` and :class:`Interpolate`. Currently supported operators are ``in_``, ``==``, ``!=``, ``<``, ``<=``, ``>``, ``>=``, ``+``, ``-``, ``*``, ``/``, ``//``, ``%``. buildbot-3.4.0/master/docs/manual/configuration/report_generators/000077500000000000000000000000001413250514000254305ustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/configuration/report_generators/build.rst000066400000000000000000000065351413250514000272720ustar00rootroot00000000000000.. bb:reportgen:: BuildStatusGenerator .. _Reportgen-BuildStatusGenerator: BuildStatusGenerator ++++++++++++++++++++ .. py:class:: buildbot.reporters.BuildStatusGenerator This report generator sends a message when a build completes. In case a reporter is used to provide a live status notification for both build start and completion, :ref:`Reportgen-BuildStartEndStatusGenerator` is a better option. The following parameters are supported: ``subject`` (string, optional). A string to be used as the subject line of the message. ``%(builder)s`` will be replaced with the name of the builder which provoked the message. ``%(result)s`` will be replaced with the name of the result of the build. ``%(title)s`` and ``%(projectName)s`` will be replaced with the title of the Buildbot instance. ``mode`` (list of strings or a string, optional). Defines the cases when a message should be sent. There are two strings which can be used as shortcuts instead of the full lists. The possible shortcuts are: ``all`` Send message for all cases. Equivalent to ``('change', 'failing', 'passing', 'problem', 'warnings', 'exception')``. ``warnings`` Equivalent to ``('warnings', 'failing')``. If the argument is list of strings, it must be a combination of: ``cancelled`` Send message about builds which were cancelled. ``change`` Send message about builds which change status. ``failing`` Send message about builds which fail. ``passing`` Send message about builds which succeed. ``problem`` Send message about a build which failed when the previous build has passed. ``warnings`` Send message about builds which generate warnings. ``exception`` Send message about builds which generate exceptions. Defaults to ``('failing', 'passing', 'warnings')``. ``builders`` (list of strings, optional). A list of builder names to serve build status information for. Defaults to ``None`` (all builds). Use either builders or tags, but not both. ``tags`` (list of strings, optional). A list of tag names to serve build status information for. Defaults to ``None`` (all tags). Use either builders or tags, but not both. ``schedulers`` (list of strings, optional). A list of scheduler names to serve build status information for. Defaults to ``None`` (all schedulers). ``branches`` (list of strings, optional). A list of branch names to serve build status information for. Defaults to ``None`` (all branches). ``add_logs`` (boolean or a list of strings, optional). If ``True``, include all build logs as attachments to the messages. These can be quite large. This can also be set to a list of log names to send a subset of the logs. Defaults to ``False``. ``add_patch`` (boolean, optional). If ``True``, include the patch content if a patch was present. Patches are usually used on a :class:`Try` server. Defaults to ``False``. ``report_new`` (boolean, optional) Whether new builds will be reported in addition to finished builds. Defaults to ``False``. ``message_formatter`` (optional, instance of ``reporters.MessageFormatter``) This is an optional instance of the ``reporters.MessageFormatter`` class that can be used to generate a custom message. buildbot-3.4.0/master/docs/manual/configuration/report_generators/build_start_end.rst000066400000000000000000000036751413250514000313370ustar00rootroot00000000000000.. bb:reportgen:: BuildStartEndStatusGenerator .. _Reportgen-BuildStartEndStatusGenerator: BuildStartEndStatusGenerator ++++++++++++++++++++++++++++ .. py:class:: buildbot.plugins.reporters.BuildStartEndStatusGenerator This report generator that sends a message both when a build starts and finishes. The following parameters are supported: ``builders`` (list of strings, optional). A list of builder names to serve build status information for. Defaults to ``None`` (all builds). Use either builders or tags, but not both. ``tags`` (list of strings, optional). A list of tag names to serve build status information for. Defaults to ``None`` (all tags). Use either builders or tags, but not both. ``schedulers`` (list of strings, optional). A list of scheduler names to serve build status information for. Defaults to ``None`` (all schedulers). ``branches`` (list of strings, optional). A list of branch names to serve build status information for. Defaults to ``None`` (all branches). ``add_logs`` (boolean or a list of strings, optional). If ``True``, include all build logs as attachments to the messages. These can be quite large. This can also be set to a list of log names to send a subset of the logs. Defaults to ``False``. ``add_patch`` (boolean, optional). If ``True``, include the patch content if a patch was present. Patches are usually used on a :class:`Try` server. Defaults to ``False``. ``start_formatter`` (optional, instance of ``reporters.MessageFormatter`` or ``reporters.MessageFormatterRenderable``) This is an optional message formatter that can be used to generate a custom message at the start of the build. ``end_formatter`` (optional, instance of ``reporters.MessageFormatter`` or ``reporters.MessageFormatterRenderable``) This is an optional message formatter that can be used to generate a custom message at the end of the build. buildbot-3.4.0/master/docs/manual/configuration/report_generators/buildset.rst000066400000000000000000000063111413250514000277760ustar00rootroot00000000000000.. bb:reportgen:: BuildSetStatusGenerator .. _Reportgen-BuildSetStatusGenerator: BuildSetStatusGenerator +++++++++++++++++++++++ .. py:class:: buildbot.reporters.BuildSetStatusGenerator This report generator sends a message about builds in a buildset. It is very similar to :bb:reportgen:`BuildStatusGenerator` but sends single message about all builds in a buildset, not individual builds. The following parameters are supported: ``subject`` (string, optional). A string to be used as the subject line of the message. ``%(builder)s`` will be replaced with the name of the builder which provoked the message. ``%(result)s`` will be replaced with the name of the result of the build. ``%(title)s`` and ``%(projectName)s`` will be replaced with the title of the Buildbot instance. ``mode`` (list of strings or a string, optional). Defines the cases when a message should be sent. Only information about builds that matched the ``mode`` will be included. There are two strings which can be used as shortcuts instead of the full lists. The possible shortcuts are: ``all`` Send message for all cases. Equivalent to ``('change', 'failing', 'passing', 'problem', 'warnings', 'exception')``. ``warnings`` Equivalent to ``('warnings', 'failing')``. If the argument is list of strings, it must be a combination of: ``cancelled`` Include builds which were cancelled. ``change`` Include builds which change status. ``failing`` Include builds which fail. ``passing`` Include builds which succeed. ``problem`` Include a build which failed when the previous build has passed. ``warnings`` Include builds which generate warnings. ``exception`` Include builds which generate exceptions. Defaults to ``('failing', 'passing', 'warnings')``. ``builders`` (list of strings, optional). A list of builder names to serve build status information for. Defaults to ``None`` (all builds). Use either builders or tags, but not both. ``tags`` (list of strings, optional). A list of tag names to serve build status information for. Defaults to ``None`` (all tags). Use either builders or tags, but not both. ``schedulers`` (list of strings, optional). A list of scheduler names to serve build status information for. Defaults to ``None`` (all schedulers). ``branches`` (list of strings, optional). A list of branch names to serve build status information for. Defaults to ``None`` (all branches). ``add_logs`` (boolean or a list of strings, optional). If ``True``, include all build logs as attachments to the messages. These can be quite large. This can also be set to a list of log names to send a subset of the logs. Defaults to ``False``. ``add_patch`` (boolean, optional). If ``True``, include the patch content if a patch was present. Patches are usually used on a :class:`Try` server. Defaults to ``False``. ``message_formatter`` (optional, instance of ``reporters.MessageFormatter``) This is an optional instance of the ``reporters.MessageFormatter`` class that can be used to generate a custom message. buildbot-3.4.0/master/docs/manual/configuration/report_generators/formatter.rst000066400000000000000000000152401413250514000301670ustar00rootroot00000000000000.. _MessageFormatter: MessageFormatter ++++++++++++++++ .. py:currentmodule:: buildbot.reporters.message This formatter is used to format messages in :ref:`Reportgen-BuildStatusGenerator` and :ref:`Reportgen-BuildSetStatusGenerator`. It formats a message using the Jinja2_ templating language and picks the template either from a string or from a file. The constructor of the class takes the following arguments: ``template`` If set, this parameter indicates the content of the template used to generate the body of the mail as string. ``template_type`` This indicates the type of the generated template. Use either 'plain' (the default) or 'html'. ``subject`` The content of the subject of the mail as string. ``ctx`` This is an extension of the standard context that will be given to the templates. Use this to add content to the templates that is otherwise not available. Alternatively, you can subclass MessageFormatter and override the :py:meth:`buildAdditionalContext` in order to grab more context from the data API. .. py:method:: buildAdditionalContext(master, ctx) :noindex: :param master: the master object :param ctx: the context dictionary to enhance :returns: optionally deferred default implementation will add ``self.ctx`` into the current template context ``want_properties`` This parameter (defaults to True) will extend the content of the given ``build`` object with the Properties from the build. ``wantProperties`` Deprecated, use ``want_properties`` set to the same value. ``want_steps`` This parameter (defaults to False) will extend the content of the given ``build`` object with information about the steps of the build. Use it only when necessary as this increases the overhead in terms of CPU and memory on the master. ``wantSteps`` Deprecated, use ``want_steps`` set to the same value. ``wantLogs`` Deprecated, use ``want_logs`` and ``want_logs_content`` set to the same value. ``want_logs`` This parameter (defaults to False) will extend the content of the steps of the given ``build`` object with the log metadata of each steps from the build. This implies ``wantSteps`` to be `True`. Use it only when mandatory, as this greatly increases the overhead in terms of CPU and memory on the master. ``want_logs_content`` This parameter (defaults to False) will extend the content of the logs with the log contents of each steps from the build. This implies ``want_logs`` and ``wantSteps`` to be `True`. Use it only when mandatory, as this greatly increases the overhead in terms of CPU and memory on the master. Context ~~~~~~~ The context that is given to the template consists of the following data: ``results`` The results of the build. Equivalent to ``build['results']``. ``buildername`` The name of the builder. Equivalent to ``build['builder']['name']`` ``mode`` The mode argument that has been passed to the report generator. ``workername`` The name of the worker. Equivalent to the ``workername`` property of the build or ```` if it's not available. ``buildset`` The :bb:rtype:`buildset` dictionary from data API. ``build`` The :bb:rtype:`build` dictionary from data API. The ``properties`` attribute is populated only if ``want_properties`` is set to ``True``. It has the following extra properties: ``builder`` The :bb:rtype:`builder` dictionary from the data API that describes the builder of the build. ``buildrequest`` The :bb:rtype:`buildrequest` dictionary from the data API that describes the build request that the build was built for. ``buildset`` The :bb:rtype:`buildset` dictionary from the data API that describes the buildset that the build was built for. ``parentbuild`` The :bb:rtype:`build` dictionary from the data API that describes the parent build. This build is identified by the ``parent_buildid`` attribute of the buildset. ``parentbuilder`` The :bb:rtype:`builder` dictionary from the data API that describes the builder of the parent build. ``url`` URL to the build in the Buildbot UI. ``prev_build`` The :bb:rtype:`build` dictionary from the data API that describes previous build, if any. This attribute is populated only if ``wantPreviousBuild`` is set to ``True``. ``steps`` A list of :bb:rtype:`step` dictionaries from the data API that describe steps in the build, if any. This attribute is populated only if ``wantSteps`` is set to ``True``. Additionally, if ``want_logs`` is set to ``True`` then the step dictionaries will contain ``logs`` attribute with a list of :bb:rtype:`log` dictionaries from the data API that describe the logs of the step. The log dictionaries will additionally contain ``url`` key with URL to the log in the web UI as the value. Additionally, if ``want_logs_content`` is set to ``True`` then the log dictionaries will contain ``contents`` key with full contents of the log. ``projects`` A string identifying the projects that the build was built for. ``previous_results`` Results of the previous build, if available, otherwise ``None``. ``status_detected`` String that describes the build in terms of current build results, previous build results and ``mode``. ``build_url`` URL to the build in the Buildbot UI. ``buildbot_url`` URL to the Buildbot instance. ``blamelist`` The list of users responsible for the build. ``summary`` A string that summarizes the build result. ``sourcestamps`` A string identifying the source stamps for which the build was made. Examples ~~~~~~~~ The following examples describe how to get some useful pieces of information from the various data objects: Name of the builder that generated this event ``{{ buildername }}`` Title of the BuildMaster ``{{ projects }}`` MailNotifier mode ``{{ mode }}`` (a combination of ``change``, ``failing``, ``passing``, ``problem``, ``warnings``, ``exception``, ``all``) URL to build page ``{{ build_url }}`` URL to Buildbot main page ``{{ buildbot_url }}`` Status of the build as string. This require extending the context of the Formatter via the ``ctx`` parameter with: ``ctx=dict(statuses=util.Results)``. ``{{ statuses[results] }}`` Build text ``{{ build['state_string'] }}`` Mapping of property names to (values, source) ``{{ build['properties'] }}`` For instance the build reason (from a forced build) ``{{ build['properties']['reason'][0] }}`` Worker name ``{{ workername }}`` List of responsible users ``{{ blamelist | join(', ') }}`` .. _Jinja2: http://jinja.pocoo.org/docs/dev/templates/ buildbot-3.4.0/master/docs/manual/configuration/report_generators/formatter_function.rst000066400000000000000000000033001413250514000320660ustar00rootroot00000000000000.. _MessageFormatterFunction: MessageFormatterFunction ++++++++++++++++++++++++ .. py:currentmodule:: buildbot.reporters.message This formatter can be used to generate arbitrary messages according to arbitrary calculations. As opposed to :ref:`MessageFormatterRenderable`, more information is made available to this reporter. .. py:class:: MessageFormatterFunction(function, template_type, want_properties=True, wantProperties=None, want_steps=False, wantSteps=None, wantLogs=None, want_logs=False, want_logs_content=False) :param callable function: A callable that will be called with a dictionary that contains ``build`` key with the value that contains the build dictionary as received from the data API. :param string template_type: either ``plain``, ``html`` or ``json`` depending on the output of the formatter. JSON output must not be encoded. :param boolean want_properties: include 'properties' in the build dictionary :param boolean wantProperties: deprecated, use ``want_properties`` instead :param boolean want_steps: include 'steps' in the build dictionary :param boolean wantSteps: deprecated, use ``want_steps`` instead :param boolean wantLogs: deprecated, use ``want_logs`` and ``want_logs_content`` set to the same value. :param boolean want_logs: include 'logs' in the steps dictionaries. This implies `wantSteps=True`. This includes only log metadata, for content use ``want_logs_content``. :param boolean want_logs_content: include logs content in the logs dictionaries. This implies `want_logs=True` and `wantSteps=True`. This dumps the *full* content of logs and may consume lots of memory and CPU depending on the log size. buildbot-3.4.0/master/docs/manual/configuration/report_generators/formatter_missing_worker.rst000066400000000000000000000037571413250514000333230ustar00rootroot00000000000000.. _MessageFormatterMissingWorkers: MessageFormatterMissingWorkers ++++++++++++++++++++++++++++++ .. py:currentmodule:: buildbot.reporters.message This formatter is used to format messages in :ref:`Reportgen-WorkerMissingGenerator`. It formats a message using the Jinja2_ templating language and picks the template either from a string or from a file. The constructor to that class takes the same arguments as MessageFormatter, minus ``wantLogs``, ``want_logs``, ``want_logs_content``, ``wantProperties``, ``want_properties``, ``wantSteps``, ``want_steps``. ``template`` The content of the template used to generate the body of the mail as string. ``template_type`` This indicates the type of the generated template. Use either 'plain' (the default) or 'html'. ``subject`` The content of the subject of the mail as string. ``ctx`` This is an extension of the standard context that will be given to the templates. Use this to add content to the templates that is otherwise not available. Alternatively, you can subclass MessageFormatter and override the :py:meth:`buildAdditionalContext` in order to grab more context from the data API. .. py:method:: buildAdditionalContext(master, ctx) :noindex: :param master: the master object :param ctx: the context dictionary to enhance :returns: optionally deferred The default implementation will add ``self.ctx`` into the current template context The default ``ctx`` for the missing worker email is made of: ``buildbot_title`` The Buildbot title as per ``c['title']`` from the ``master.cfg`` ``buildbot_url`` The Buildbot title as per ``c['title']`` from the ``master.cfg`` ``worker`` The worker object as defined in the REST api plus two attributes: ``notify`` List of emails to be notified for this worker. ``last_connection`` String describing the approximate time of last connection for this worker. .. _Jinja2: http://jinja.pocoo.org/docs/dev/templates/ buildbot-3.4.0/master/docs/manual/configuration/report_generators/formatter_renderable.rst000066400000000000000000000010571413250514000323530ustar00rootroot00000000000000.. _MessageFormatterRenderable: MessageFormatterRenderable ++++++++++++++++++++++++++ .. py:currentmodule:: buildbot.reporters.message This formatter is used to format messages in :ref:`Reportgen-BuildStatusGenerator`. It renders any renderable using the properties of the build that was passed by the status generator. The constructor of the class takes the following arguments: ``template`` A renderable that is used to generate the body of the build report. ``subject`` A renderable that is used to generate the subject of the build report. buildbot-3.4.0/master/docs/manual/configuration/report_generators/index.rst000066400000000000000000000033301413250514000272700ustar00rootroot00000000000000.. _Report-Generators: Report Generators ================= .. toctree:: :hidden: :maxdepth: 2 build build_start_end buildset worker formatter formatter_function formatter_renderable formatter_missing_worker Report generators abstract the conditions of when a message is sent by a :ref:`Reporter ` and the content of the message. Multiple report generators can be registered to a reporter. At this moment, only the following reporters support report generators: * :bb:reporter:`BitbucketServerPRCommentPush` * :bb:reporter:`BitbucketStatusPush` * :bb:reporter:`GitHubStatusPush` * :bb:reporter:`GitHubCommentPush` * :bb:reporter:`GitLabStatusPush` * :bb:reporter:`HttpStatusPush` * :bb:reporter:`MailNotifier` * :bb:reporter:`PushjetNotifier` * :bb:reporter:`PushoverNotifier` Eventually, report generator support will be added to the rest of the reporters as well. .. contents:: :depth: 2 :local: The following report generators are available: * :ref:`Reportgen-BuildStatusGenerator` * :ref:`Reportgen-BuildStartEndStatusGenerator` * :ref:`Reportgen-BuildSetStatusGenerator` * :ref:`Reportgen-WorkerMissingGenerator` The report generators may customize the reports using message formatters. The following message formatter classes are provided: * :ref:`MessageFormatter` (used in ``BuildStatusGenerator``, ``BuildStartEndStatusGenerator`` and ``BuildSetStatusGenerator``) * :ref:`MessageFormatterRenderable` (used in ``BuildStatusGenerator`` and ``BuildStartEndStatusGenerator``) * :ref:`MessageFormatterFunction` (used in ``BuildStatusGenerator`` and ``BuildStartEndStatusGenerator``) * :ref:`MessageFormatterMissingWorkers` (used in ``WorkerMissingGenerator``) buildbot-3.4.0/master/docs/manual/configuration/report_generators/worker.rst000066400000000000000000000014501413250514000274730ustar00rootroot00000000000000.. bb:reportgen:: WorkerMissingGenerator .. _Reportgen-WorkerMissingGenerator: WorkerMissingGenerator ++++++++++++++++++++++ .. py:class:: buildbot.reporters.WorkerMissingGenerator This report generator sends a message when a worker goes missing. The following parameters are supported: ``workers`` (``"all"`` or a list of strings, optional). Identifies the workers for which to send a message. ``"all"`` (the default) means that a message will be sent when any worker goes missing. The list version of the parameter specifies the names of the workers. ``message_formatter`` (optional, instance of ``reporters.MessageFormatterMissingWorker``) This is an optional instance of the ``reporters.MessageFormatterMissingWorker`` class that can be used to generate a custom message. buildbot-3.4.0/master/docs/manual/configuration/reporters/000077500000000000000000000000001413250514000237115ustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/configuration/reporters/bitbucket_server_core_api_status.rst000066400000000000000000000115431413250514000332550ustar00rootroot00000000000000.. bb:reporter:: BitbucketServerCoreAPIStatusPush BitbucketServerCoreAPIStatusPush ++++++++++++++++++++++++++++++++ .. code-block:: python from buildbot.plugins import reporters ss = reporters.BitbucketServerCoreAPIStatusPush('https://bitbucketserver.example.com:8080/', auth=('bitbucketserver_username', 'secret_password')) c['services'].append(ss) Or using `Bitbucket personal access token `_ .. code-block:: python from buildbot.plugins import reporters ss = reporters.BitbucketServerCoreAPIStatusPush('https://bitbucketserver.example.com:8080/', token='MDM0MjM5NDc2MDxxxxxxxxxxxxxxxxxxxxx') c['services'].append(ss) :class:`BitbucketServerCoreAPIStatusPush` publishes build status using `BitbucketServer Core REST API `_ into which it was integrated in `Bitbucket Server 7.4 `_. The build status is published to a specific commit SHA in specific repository in Bitbucket Server with some additional information about reference name, build duration, parent relationship and also possibly test results. It requires `txrequests`_ package to allow interaction with Bitbucket Server REST API. .. py:class:: BitbucketServerCoreAPIStatusPush(base_url, token=None, auth=None, name=None, statusSuffix=None, generators=None, key=None, parentName=None, buildNumber=None, ref=None, duration=None, testResults=None, verbose=False, debug=None, verify=None) :param string base_url: The base url of the Bitbucket Server host. :param string token: Bitbucket personal access token (mutually exclusive with `auth`) (can be a :ref:`Secret`) :param tuple auth: A tuple of Bitbucket Server username and password (mutually exclusive with `token`) (can be a :ref:`Secret`) :param renderable string statusName: The name that is displayed for this status. If not defined it is constructed to look like `"%(prop:buildername)s #%(prop:buildnumber)s"`. Or if the plan has a parent plan the default is constructed to look like `" # >> %(prop:buildername)s #%(prop:buildnumber)s"`. Note: Parent information is not accessible as properties for user defined renderer. :param renderable string statusSuffix: Additional string that is appended to `statusName`. Empty by default. It is useful when the same plan is launched multiple times for a single parent plan instance. This way every instance of the child plan can have unique suffix and thus be more recognizable (than it would be just by the buildnumber). :type generators: list of IReportGenerator instances :param generators: A list of report generators that will be used to generate reports to be sent by this reporter. Currently the reporter will consider only the report generated by the first generator. :param renderable string key: Passed to Bitbucket Server to differentiate between statuses. A static string can be passed or :class:`Interpolate` for dynamic substitution. The default key is `%(prop:buildername)s`. :param renderable string parentName: Defaults to parent's buildername if plan has a parent plan. Otherwise plan's own buildername is used as default. :param renderable string buildNumber: The default build number is `%(prop:buildername)s`. :param renderable string ref: By default branch name from :class:`SourceStamp` is used. If branch doesn't start with string `refs/` prefix `refs/heads/` is added to it's beginning. :param renderable int duration: Computed for finished builds. Otherwise None. (value in milliseconds) :param renderable dict testResults: Test results can be reported via this parameter. Resulting dictionary must contain keys `failed`, `skipped`, `successful`. By default these keys are filled with values from build properties (`tests_failed`, `tests_skipped`, `tests_successful`) if at least one of the properties is found (missing values will default to `0`). Otherwise None. Note: If you want to suppress the default behavior pass renderable that always interpolates to None. :param boolean verbose: If True, logs a message for each successful status push. :param boolean verify: Disable ssl verification for the case you use temporary self signed certificates. :param boolean debug: Logs every requests and their response. .. _txrequests: https://pypi.python.org/pypi/txrequests buildbot-3.4.0/master/docs/manual/configuration/reporters/bitbucket_server_pr_comment_push.rst000066400000000000000000000036021413250514000332700ustar00rootroot00000000000000.. bb:reporter:: BitbucketServerPRCommentPush BitbucketServerPRCommentPush ++++++++++++++++++++++++++++ .. py:currentmodule:: buildbot.reporters.bitbucketserver .. code-block:: python from buildbot.plugins import reporters ss = reporters.BitbucketServerPRCommentPush('https://bitbucket-server.example.com:8080/', 'bitbucket_server__username', 'secret_password') c['services'].append(ss) :class:`BitbucketServerPRCommentPush` publishes a comment on a PR using `Bitbucket Server REST API `_. .. py:class:: BitbucketServerPRCommentPush(base_url, user, password, verbose=False, debug=None, verify=None, mode=('failing', 'passing', 'warnings'), tags=None, generators=None) The following parameters are accepted by this reporter: ``base_url`` (string) The base url of the Bitbucket server host. ``user`` (string) The Bitbucket server user to post as. (can be a :ref:`Secret`) ``password`` (string) The Bitbucket server user's password. (can be a :ref:`Secret`) ``generators`` (list) A list of instances of ``IReportGenerator`` which defines the conditions of when the messages will be sent and contents of them. See :ref:`Report-Generators` for more information. ``verbose`` (boolean, defaults to ``False``) If ``True``, logs a message for each successful status push. ``debug`` (boolean, defaults to ``False``) If ``True``, logs every requests and their response ``verify`` (boolean, defaults to ``None``) If ``False``, disables SSL verification for the case you use temporary self signed certificates. Default enables SSL verification. .. Note:: This reporter depends on the Bitbucket server hook to get the pull request url. buildbot-3.4.0/master/docs/manual/configuration/reporters/bitbucket_server_status.rst000066400000000000000000000050301413250514000314060ustar00rootroot00000000000000.. bb:reporter:: BitbucketServerStatusPush BitbucketServerStatusPush +++++++++++++++++++++++++ .. code-block:: python from buildbot.plugins import reporters ss = reporters.BitbucketServerStatusPush('https://bitbucketserver.example.com:8080/', 'bitbucketserver_username', 'secret_password') c['services'].append(ss) :class:`BitbucketServerStatusPush` publishes build status using `BitbucketServer Build Integration REST API `_. The build status is published to a specific commit SHA in Bitbucket Server. It tracks the last build for each builderName for each commit built. Specifically, it follows the `Updating build status for commits `_ document. It requires `txrequests`_ package to allow interaction with Bitbucket Server REST API. It uses HTTP Basic AUTH. As a result, we recommend you use https in your base_url rather than http. .. py:class:: BitbucketServerStatusPush(base_url, user, password, key=None, statusName=None, generators=None, verbose=False) :param string base_url: The base url of the Bitbucket Server host, up to and optionally including the first `/` of the path. :param string user: The Bitbucket Server user to post as. (can be a :ref:`Secret`) :param string password: The Bitbucket Server user's password. (can be a :ref:`Secret`) :param renderable string key: Passed to Bitbucket Server to differentiate between statuses. A static string can be passed or :class:`Interpolate` for dynamic substitution. The default key is `%(prop:buildername)s`. :param renderable string statusName: The name that is displayed for this status. The default name is nothing, so Bitbucket Server will use the ``key`` parameter. :type generators: list of IReportGenerator instances :param generators: A list of report generators that will be used to generate reports to be sent by this reporter. Currently the reporter will consider only the report generated by the first generator. :param boolean verbose: If True, logs a message for each successful status push. :param boolean verify: Disable ssl verification for the case you use temporary self signed certificates :param boolean debug: Logs every requests and their response .. _txrequests: https://pypi.python.org/pypi/txrequests buildbot-3.4.0/master/docs/manual/configuration/reporters/bitbucket_status.rst000066400000000000000000000067771413250514000300430ustar00rootroot00000000000000.. bb:reporter:: BitbucketStatusPush BitbucketStatusPush +++++++++++++++++++ .. py:currentmodule:: buildbot.reporters.bitbucket .. code-block:: python from buildbot.plugins import reporters c['services'].append(reporters.BitbucketStatusPush('oauth_key', 'oauth_secret')) :class:`BitbucketStatusPush` publishes build status using the `Bitbucket Build Status API `_. The build status is published to a specific commit SHA in Bitbucket. By default, it tracks the last build for each builder and each commit built. It requires `txrequests`_ package to allow interaction with the Bitbucket REST and OAuth APIs. It uses OAuth 2.x to authenticate with Bitbucket. To enable this, you need to go to your Bitbucket Settings -> OAuth page. Click "Add consumer". Give the new consumer a name, e.g. buildbot, and put in any URL as the callback (this is needed for Oauth 2.x, but it's not used by this reporter), e.g. http://localhost:8010/callback. Give the consumer `Repositories:Write` access. After creating the consumer, you will then be able to see the OAuth key and secret. .. py:class:: BitbucketStatusPush(oauth_key, oauth_secret, base_url='https://api.bitbucket.org/2.0/repositories', oauth_url='https://bitbucket.org/site/oauth2/access_token', status_key=None, status_name=None, generators=None) :param string oauth_key: The OAuth consumer key. (can be a :ref:`Secret`) :param string oauth_secret: The OAuth consumer secret. (can be a :ref:`Secret`) :param string base_url: Bitbucket's Build Status API URL :param string oauth_url: Bitbucket's OAuth API URL :param string status_key: Key that identifies a build status. Setting the key to a unique value per build allows to push multiple build statuses to a given commit. A static string can be passed or :class:`Interpolate` for dynamic substitution. The default key is ``%(prop:buildername)s`` :param string status_name: Name of a build status. It shows up next to the status icon in Bitbucket. A static string can be passed or :class:`Interpolate` for dynamic substitution. The default name is ``%(prop:buildername)s`` :param generators: A list of report generators that will be used to generate reports to be sent by this reporter. Currently the reporter will consider only the report generated by the first generator. The subject of the report will be used to set the description of the build status. The default subject is an empty string :param boolean verify: Disable ssl verification for the case you use temporary self signed certificates :param boolean debug: Logs every requests and their response For example, the following reporter .. code-block:: python from buildbot.plugins import reporters reporters.BitbucketStatusPush( 'oauth_key', 'oauth_secret', status_key=Interpolate("%(prop:buildername)s/%(prop:buildnumber)s"), status_name=Interpolate("%(prop:buildername)s/%(prop:buildnumber)s"), generators=[ reporters.BuildStartEndStatusGenerator( start_formatter=reporters.MessageFormatter(subject="{{ status_detected }}"), end_formatter=reporters.MessageFormatter(subject="{{ status_detected }}")) ]) c['services'].append(ss) produces the build statuses below when a build stars and ends, respectively. .. image:: ../../../_images/bitbucket-status-push.png .. _txrequests: https://pypi.python.org/pypi/txrequests buildbot-3.4.0/master/docs/manual/configuration/reporters/gerrit_status.rst000066400000000000000000000121661413250514000273500ustar00rootroot00000000000000.. bb:reporter:: GerritStatusPush GerritStatusPush ++++++++++++++++ .. py:currentmodule:: buildbot.reporters.status_gerrit :class:`GerritStatusPush` sends review of the :class:`Change` back to the Gerrit server, optionally also sending a message when a build is started. GerritStatusPush can send a separate review for each build that completes, or a single review summarizing the results for all of the builds. .. py:class:: GerritStatusPush(server, username, reviewCB, startCB, port, reviewArg, startArg, summaryCB, summaryArg, identity_file, builders, notify...) :param string server: Gerrit SSH server's address to use for push event notifications. :param string username: Gerrit SSH server's username. :param identity_file: (optional) Gerrit SSH identity file. :param int port: (optional) Gerrit SSH server's port (default: 29418) :param reviewCB: (optional) Called each time a build finishes. Build properties are available. Can be a deferred. :param reviewArg: (optional) Argument passed to the review callback. :: If :py:func:`reviewCB` callback is specified, it must return a message and optionally labels. If no message is specified, nothing will be sent to Gerrit. It should return a dictionary: .. code-block:: python {'message': message, 'labels': {label-name: label-score, ...} } For example: .. literalinclude:: /examples/git_gerrit.cfg :pyobject: gerritReviewCB :language: python Which require an extra import in the config: .. code-block:: python from buildbot.plugins import util :param startCB: (optional) Called each time a build is started. Build properties are available. Can be a deferred. :param startArg: (optional) Argument passed to the start callback. If :py:func:`startCB` is specified, it must return a message and optionally labels. If no message is specified, nothing will be sent to Gerrit. It should return a dictionary: .. code-block:: python {'message': message, 'labels': {label-name: label-score, ...} } For example: .. literalinclude:: /examples/git_gerrit.cfg :pyobject: gerritStartCB :language: python :param summaryCB: (optional) Called each time a buildset finishes. Each build in the buildset has properties available. Can be a deferred. :param summaryArg: (optional) Argument passed to the summary callback. If :py:func:`summaryCB` callback is specified, it must return a message and optionally labels. If no message is specified, nothing will be sent to Gerrit. The message and labels should be a summary of all the builds within the buildset. It should return a dictionary: .. code-block:: python {'message': message, 'labels': {label-name: label-score, ...} } For example: .. literalinclude:: /examples/git_gerrit.cfg :pyobject: gerritSummaryCB :language: python :param builders: (optional) List of builders to send results for. This method allows to filter results for a specific set of builder. By default, or if builders is None, then no filtering is performed. :param notify: (optional) Control who gets notified by Gerrit once the status is posted. The possible values for `notify` can be found in your version of the Gerrit documentation for the `gerrit review` command. :param wantSteps: (optional, defaults to False) Extends the given ``build`` object with information about steps of the build. Use it only when necessary as this increases the overhead in term of CPU and memory on the master. :param wantLogs: (optional, default to False) Extends the steps of the given ``build`` object with the full logs of the build. This requires ``wantSteps`` to be True. Use it only when mandatory as this increases the overhead in term of CPU and memory on the master greatly. .. note:: By default, a single summary review is sent; that is, a default :py:func:`summaryCB` is provided, but no :py:func:`reviewCB` or :py:func:`startCB`. .. note:: If :py:func:`reviewCB` or :py:func:`summaryCB` do not return any labels, only a message will be pushed to the Gerrit server. .. seealso:: :src:`master/docs/examples/git_gerrit.cfg` and :src:`master/docs/examples/repo_gerrit.cfg` in the Buildbot distribution provide a full example setup of Git+Gerrit or Repo+Gerrit of :bb:reporter:`GerritStatusPush`. buildbot-3.4.0/master/docs/manual/configuration/reporters/gerrit_verify_status.rst000066400000000000000000000052741413250514000307360ustar00rootroot00000000000000.. bb:reporter:: GerritVerifyStatusPush GerritVerifyStatusPush ++++++++++++++++++++++ .. py:currentmodule:: buildbot.reporters.status_gerrit_verify_status .. py:class:: GerritVerifyStatusPush :class:`GerritVerifyStatusPush` sends a verify status to Gerrit using the verify-status_ Gerrit plugin. It is an alternate method to :bb:reporter:`GerritStatusPush`, which uses the SSH API to send reviews. The verify-status_ plugin allows several CI statuses to be sent for the same change, and display them separately in the Gerrit UI. Most parameters are :index:`renderables `. .. py:class:: GerritVerifyStatusPush( \ baseURL, auth, \ verification_name=Interpolate("%(prop:buildername)s"), abstain=False, category=None, reporter=None, \ verbose=False, generators=None, **kwargs) :noindex: :param string baseURL: Gerrit HTTP base URL :param string auth: A requests authentication configuration. (can be a :ref:`Secret`). If Gerrit is configured with ``BasicAuth``, then it shall be ``('login', 'password')``. If Gerrit is configured with ``DigestAuth``, then it shall be ``requests.auth.HTTPDigestAuth('login', 'password')`` from the requests module. :type generators: list of IReportGenerator instances :param generators: A list of report generators that will be used to generate reports to be sent by this reporter. Currently the reporter will consider only the report generated by the first generator. :param renderable string verification_name: The name of the job displayed in the Gerrit UI :param renderable boolean abstain: Whether this results should be counted as voting :param renderable boolean category: Category of the build :param renderable boolean reporter: The user that verified this build :param boolean verbose: Whether to log every requests :param boolean verify: Disable ssl verification for the case you use temporary self signed certificates :param boolean debug: Logs every requests and their response This reporter is integrated with :class:`GerritChangeSource`, and will update changes detected by this change source. This reporter can also send reports for changes triggered manually provided that there is a property in the build named ``gerrit_changes``, containing the list of changes that were tested. This property must be a list of dictionaries, containing ``change_id`` and ``revision_id`` keys, as defined in the revision endpoints of the `Gerrit documentation`_. .. _txrequests: https://pypi.python.org/pypi/txrequests .. _verify-status: https://gerrit.googlesource.com/plugins/verify-status .. _Gerrit documentation: https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#revision-endpoints buildbot-3.4.0/master/docs/manual/configuration/reporters/github_comment.rst000066400000000000000000000063231413250514000274530ustar00rootroot00000000000000.. bb:reporter:: GitHubCommentPush GitHubCommentPush +++++++++++++++++ .. py:currentmodule:: buildbot.plugins.reporters .. code-block:: python from buildbot.plugins import reporters, util context = Interpolate("bb/%(prop:buildername)s") c['services'].append(reporters.GitHubCommentPush(token='githubAPIToken', context=context)) :class:`GitHubCommentPush` publishes a comment on a GitHub PR using `GitHub Review Comments API `_. It requires `txrequests`_ package to allow interaction with GitHub REST API. It requires a GitHub API token in order to operate. By default, the reporter will only comment at the end of a build unless a custom build report generator is supplied. You can create a token from your own `GitHub - Profile - Applications - Register new application `_ or use an external tool to generate one. .. py:class:: GitHubCommentPush(token, context=None, generators=None, baseURL=None, verbose=False) :param string token: Token used for authentication. (can be a :ref:`Secret`) :type context: renderable string :param context: Passed to GitHub to differentiate between statuses. A static string can be passed or :class:`Interpolate` for dynamic substitution. The default context is ``buildbot/%(prop:buildername)s``. :type generators: list of IReportGenerator instances :param generators: A list of report generators that will be used to generate reports to be sent by this reporter. Currently the reporter will consider only the report generated by the first generator. :param string baseURL: Specify the github API endpoint if you work with GitHub Enterprise :param boolean verbose: If True, logs a message for each successful status push Here's a complete example of posting build results as a github comment: .. code-block:: python @util.renderer @defer.inlineCallbacks def getresults(props): all_logs=[] master = props.master steps = yield props.master.data.get( ('builders', props.getProperty('buildername'), 'builds', props.getProperty('buildnumber'), 'steps')) for step in steps: if step['results'] == util.Results.index('failure'): logs = yield master.data.get(("steps", step['stepid'], 'logs')) for l in logs: all_logs.append('Step : {0} Result : {1}'.format( step['name'], util.Results[step['results']])) all_logs.append('```') l['stepname'] = step['name'] l['content'] = yield master.data.get(("logs", l['logid'], 'contents')) step_logs = l['content']['content'].split('\n') include = False for i, sl in enumerate(step_logs): all_logs.append(sl[1:]) all_logs.append('```') return '\n'.join(all_logs) generator = BuildStatusGenerator(message_formatter=MessageFormatterRenderable(getresults)) c['services'].append(GitHubCommentPush(token='githubAPIToken', generators=[generator])) .. _txrequests: https://pypi.python.org/pypi/txrequests buildbot-3.4.0/master/docs/manual/configuration/reporters/github_status.rst000066400000000000000000000033151413250514000273320ustar00rootroot00000000000000.. bb:reporter:: GitHubStatusPush GitHubStatusPush ++++++++++++++++ .. py:currentmodule:: buildbot.plugins.reporters .. code-block:: python from buildbot.plugins import reporters, util context = Interpolate("bb/%(prop:buildername)s") c['services'].append(reporters.GitHubStatusPush(token='githubAPIToken', context=context)) :class:`GitHubStatusPush` publishes a build status using the `GitHub Status API `_. It requires `txrequests`_ package to allow interaction with the GitHub REST API. It requires a GitHub API token in order to operate. You can create a token from your own `GitHub - Profile - Applications - Register new application `_ or use an external tool to generate one. .. py:class:: GitHubStatusPush(token, context=None, generators=None, baseURL=None, verbose=False) :param string token: Token used for authentication. (can be a :ref:`Secret`) :type context: renderable string :param context: Passed to GitHub to differentiate between statuses. A static string can be passed or :class:`Interpolate` for dynamic substitution. The default context is ``buildbot/%(prop:buildername)s``. :type generators: list of IReportGenerator instances :param generators: A list of report generators that will be used to generate reports to be sent by this reporter. Currently the reporter will consider only the report generated by the first generator. :param string baseURL: Specify the github api endpoint if you work with GitHub Enterprise :param boolean verbose: If True, logs a message for each successful status push .. _txrequests: https://pypi.python.org/pypi/txrequests buildbot-3.4.0/master/docs/manual/configuration/reporters/gitlab_status.rst000066400000000000000000000035071413250514000273150ustar00rootroot00000000000000.. bb:reporter:: GitLabStatusPush GitLabStatusPush ++++++++++++++++ .. py:currentmodule:: buildbot.reporters.gitlab .. code-block:: python from buildbot.plugins import reporters gl = reporters.GitLabStatusPush('private-token', context='continuous-integration/buildbot', baseURL='https://git.yourcompany.com') c['services'].append(gl) :class:`GitLabStatusPush` publishes build status using `GitLab Commit Status API `_. The build status is published to a specific commit SHA in GitLab. It requires `txrequests`_ package to allow interaction with GitLab Commit Status API. It uses private token auth, and the token owner is required to have at least developer access to each repository. As a result, we recommend you use https in your base_url rather than http. .. py:class:: GitLabStatusPush(token, context=None, baseURL=None, generators=None, verbose=False) :param string token: Private token of user permitted to update status for commits. (can be a :ref:`Secret`) :param string context: Name of your build system, e.g. continuous-integration/buildbot :type generators: list of IReportGenerator instances :param generators: A list of report generators that will be used to generate reports to be sent by this reporter. Currently the reporter will consider only the report generated by the first generator. :param string baseURL: The base url of the GitLab host, up to and optionally including the first `/` of the path. Do not include /api/ :param string verbose: Be more verbose :param boolean verify: Disable ssl verification for the case you use temporary self signed certificates :param boolean debug: Logs every requests and their response .. _txrequests: https://pypi.python.org/pypi/txrequests buildbot-3.4.0/master/docs/manual/configuration/reporters/http_status.rst000066400000000000000000000046161413250514000270340ustar00rootroot00000000000000.. bb:reporter:: HttpStatusPush HttpStatusPush ++++++++++++++ .. py:currentmodule:: buildbot.reporters .. code-block:: python from buildbot.plugins import reporters sp = reporters.HttpStatusPush(serverUrl="http://example.com/submit") c['services'].append(sp) :class:`HttpStatusPush` sends HTTP POST requests to ``serverUrl``. The body of request contains json-encoded data of the build as returned by the data API. It is useful to create a status front end outside of Buildbot for better scalability. It requires either `txrequests`_ or `treq`_ to be installed to allow interaction with HTTP server. .. note:: The json data object sent is completely different from the one that was generated by 0.8.x buildbot. It is indeed generated using data api. .. py:class:: HttpStatusPush(serverUrl, auth=None, headers=None, generators=None, debug=None, verify=None) :param string serverUrl: The url where to do the HTTP POST request :param auth: The authentication method to use. Refer to the documentation of the requests library for more information. :param dict headers: Pass custom headers to HTTP request. :type generators: list of IReportGenerator instances :param generators: A list of report generators that will be used to generate reports to be sent by this reporter. Currently the reporter will consider only the report generated by the first generator. :param boolean debug: Logs every requests and their response :param boolean verify: Disable ssl verification for the case you use temporary self signed certificates :param boolean skipEncoding: Disables encoding of json data to bytes before pushing to server Json object spec ~~~~~~~~~~~~~~~~ The default json object sent is a build object augmented with some more data as follow. .. code-block:: json { "url": "http://yourbot/path/to/build", "": "[...]", "buildset": "", "builder": "", "buildrequest": "" } If you want another format, don't hesitate to use the ``format_fn`` parameter to customize the payload. The ``build`` parameter given to that function is of type :bb:rtype:`build`, optionally enhanced with properties, steps, and logs information. .. _txrequests: https://pypi.python.org/pypi/txrequests .. _treq: https://pypi.python.org/pypi/treq buildbot-3.4.0/master/docs/manual/configuration/reporters/index.rst000066400000000000000000000045661413250514000255650ustar00rootroot00000000000000.. bb:cfg:: reporter .. _Reporters: Reporters ========= .. toctree:: :hidden: :maxdepth: 2 reporter_base bitbucket_server_core_api_status bitbucket_server_pr_comment_push bitbucket_server_status bitbucket_status gerrit_status gerrit_verify_status github_comment github_status gitlab_status http_status irc mail_notifier pushjet_notifier pushover_notifier telegram zulip_status The Buildmaster has a variety of ways to present build status to various users. Each such delivery method is a `Reporter Target` object in the configuration's ``services`` list. To add reporter targets, you just append more objects to this list: .. code-block:: python c['services'] = [] m = reporters.MailNotifier(fromaddr="buildbot@localhost", extraRecipients=["builds@lists.example.com"], sendToInterestedUsers=False) c['services'].append(m) c['services'].append(reporters.irc.IRC(host="irc.example.com", nick="bb", channels=[{"channel": "#example1"}, {"channel": "#example2", "password": "somesecretpassword"}])) Most reporter objects take a ``tags=`` argument, which can contain a list of tag names. In this case, the reporters will only show status for Builders that contain the named tags. .. note:: Implementation Note Each of these objects should be a :class:`service.BuildbotService` which will be attached to the BuildMaster object when the configuration is processed. The following reporters are available: * :bb:reporter:`BitbucketServerCoreAPIStatusPush` * :bb:reporter:`BitbucketServerPRCommentPush` * :bb:reporter:`BitbucketServerStatusPush` * :bb:reporter:`BitbucketStatusPush` * :bb:reporter:`GerritStatusPush` * :bb:reporter:`GerritVerifyStatusPush` * :bb:reporter:`GitHubCommentPush` * :bb:reporter:`GitHubStatusPush` * :bb:reporter:`GitLabStatusPush` * :bb:reporter:`HttpStatusPush` * :bb:reporter:`IRC` * :bb:reporter:`MailNotifier` * :bb:reporter:`PushjetNotifier` * :bb:reporter:`PushoverNotifier` * :bb:reporter:`TelegramBot` * :bb:reporter:`ZulipStatusPush` Most of the report generators derive from :class:`ReporterBase` which implements basic reporter management functionality. buildbot-3.4.0/master/docs/manual/configuration/reporters/irc.rst000066400000000000000000000262561413250514000252330ustar00rootroot00000000000000.. bb:reporter:: IRC IRC Bot +++++++ .. py:currentmodule:: buildbot.reporters.irc .. py:class:: IRC The :bb:reporter:`IRC` reporter creates an IRC bot which will attach to certain channels and be available for status queries. It can also be asked to announce builds as they occur, or be told to shut up. The IRC Bot in buildbot nine, is mostly a rewrite, and not all functionality has been ported yet. Patches are very welcome for restoring the full functionality. .. code-block:: python from buildbot.plugins import reporters irc = reporters.IRC("irc.example.org", "botnickname", useColors=False, channels=[{"channel": "#example1"}, {"channel": "#example2", "password": "somesecretpassword"}], password="mysecretnickservpassword", authz={('force', 'stop'): "authorizednick"} notify_events=[ 'exception', 'problem', 'recovery', 'worker' ]) c['services'].append(irc) The following parameters are accepted by this class: ``host`` (mandatory) The IRC server address to connect to. ``nick`` (mandatory) The name this bot will use on the IRC server. ``channels`` (mandatory) This is a list of channels to join on the IRC server. Each channel can be a string (e.g. ``#buildbot``), or a dictionary ``{'channel': '#buildbot', 'password': 'secret'}`` if each channel requires a different password. A global password can be set with the ``password`` parameter. ``pm_to_nicks`` (optional) This is a list of person to contact on the IRC server. ``authz`` (optional) Authentication list for commands. It must be a dictionary with command names or tuples of command names as keys. There are two special command names: ``''`` (empty string) meaning any harmless command and ``'!'`` for dangerous commands (currently ``force``, ``stop``, and ``shutdown``). The dictionary values are either ``True`` of ``False`` (which allows or deny commands for everybody) or a list of nicknames authorized to issue specified commands. By default, harmless commands are allowed for everybody and the dangerous ones are prohibited. A sample ``authz`` parameter may look as follows: .. code-block:: python authz={ 'version': True, '': ['alice', 'bob'], ('force', 'stop'): ['alice'], } Anybody will be able to run the ``version`` command, *alice* and *bob* will be allowed to run any safe command and *alice* will also have the right to force and stop builds. This parameter replaces older ``allowForce`` and ``allowShutdown``, which are deprecated as they were considered a security risk. .. note:: The authorization is purely nick-based, so it only makes sense if the specified nicks are registered to the IRC server. ``port`` (optional, default to 6667) The port to connect to on the IRC server. ``tags`` (optional) When set, this bot will only communicate about builders containing those tags. (tags functionality is not yet ported) ``password`` (optional) The global password used to register the bot to the IRC server. If provided, it will be sent to Nickserv to claim the nickname: some IRC servers will not allow clients to send private messages until they have logged in with a password. Can be a :ref:`Secret`. ``notify_events`` (optional) A list or set of events to be notified on the IRC channels. At the moment, irc bot can listen to build 'start' and 'finish' events. It can also notify about missing workers and their return. This parameter can be changed during run-time by sending the ``notify`` command to the bot. Note however, that at the buildbot restart or reconfig the notifications listed here will be turned on for the specified channel and nicks. On the other hand, removing events from this parameters will not automatically stop notifications for them (you need to turn them off for every channel with the ``notify`` command). ``noticeOnChannel`` (optional, disabled by default) Whether to send notices rather than messages when communicating with a channel. ``showBlameList`` (optional, disabled by default) Whether or not to display the blame list for failed builds. (blame list functionality is not ported yet) ``useRevisions`` (optional, disabled by default) Whether or not to display the revision leading to the build the messages are about. (useRevisions functionality is not ported yet) ``useSSL`` (optional, disabled by default) Whether or not to use SSL when connecting to the IRC server. Note that this option requires `PyOpenSSL`_. ``lostDelay`` (optional) Delay to wait before reconnecting to the server when the connection has been lost. ``failedDelay`` (optional) Delay to wait before reconnecting to the IRC server when the connection failed. ``useColors`` (optional, enabled by default) The bot can add color to some of its messages. You might turn it off by setting this parameter to ``False``. The following parameters are deprecated. You must not use them if you use the new ``authz`` parameter. .. note:: Security Note Please note that any user having access to your irc channel or can PM the bot will be able to create or stop builds :bug:`3377`. Use ``authz`` to give explicit list of nicks who are allowed to do this. ``allowForce`` (deprecated, disabled by default) This allow all users to force and stop builds via this bot. ``allowShutdown`` (deprecated, disabled by default) This allow all users to shutdown the master. To use the service, you address messages at the Buildbot, either normally (``botnickname: status``) or with private messages (``/msg botnickname status``). The Buildbot will respond in kind. If you issue a command that is currently not available, the Buildbot will respond with an error message. If the ``noticeOnChannel=True`` option was used, error messages will be sent as channel notices instead of messaging. Some of the commands currently available: ``list builders`` Emit a list of all configured builders :samp:`status {BUILDER}` Announce the status of a specific Builder: what it is doing right now. ``status all`` Announce the status of all Builders :samp:`watch {BUILDER}` If the given :class:`Builder` is currently running, wait until the :class:`Build` is finished and then announce the results. :samp:`last {BUILDER}` Return the results of the last build to run on the given :class:`Builder`. :samp:`notify on|off|list {EVENT}` Report events relating to builds. If the command is issued as a private message, then the report will be sent back as a private message to the user who issued the command. Otherwise, the report will be sent to the channel. Available events to be notified are: ``started`` A build has started. ``finished`` A build has finished. ``success`` A build finished successfully. ``failure`` A build failed. ``exception`` A build generated and exception. ``cancelled`` A build was cancelled. ``problem`` The previous build result was success or warnings, but this one ended with failure or exception. ``recovery`` This is the opposite of ``problem``: the previous build result was failure or exception and this one ended with success or warnings. ``worse`` A build state was worse than the previous one (so e.g. it ended with warnings and the previous one was successful). ``better`` A build state was better than the previous one. ``worker`` A worker is missing. A notification is also send when the previously reported missing worker connects again. By default, this command can be executed by anybody. However, consider limiting it with ``authz``, as enabling notifications in huge number of channels or private chats can cause some problems with your buildbot efficiency. :samp:`help {COMMAND}` Describe a command. Use :command:`help commands` to get a list of known commands. ``source`` Announce the URL of the Buildbot's home page. ``version`` Announce the version of this Buildbot. Additionally, the config file may specify default notification options as shown in the example earlier. If explicitly allowed in the ``authz`` config, some additional commands will be available: :samp:`join {CHANNEL}` Join the given IRC channel :samp:`leave {CHANNEL}` Leave the given IRC channel .. index:: Properties; from forced build :samp:`force build [--codebase={CODEBASE}] [--branch={BRANCH}] [--revision={REVISION}] [--props=PROP1=VAL1,PROP2=VAL2...] {BUILDER} {REASON}` Tell the given :class:`Builder` to start a build of the latest code. The user requesting the build and *REASON* are recorded in the :class:`Build` status. The Buildbot will announce the build's status when it finishes.The user can specify a branch and/or revision with the optional parameters :samp:`--branch={BRANCH}` and :samp:`--revision={REVISION}`. The user can also give a list of properties with :samp:`--props={PROP1=VAL1,PROP2=VAL2..}`. :samp:`stop build {BUILDER} {REASON}` Terminate any running build in the given :class:`Builder`. *REASON* will be added to the build status to explain why it was stopped. You might use this if you committed a bug, corrected it right away, and don't want to wait for the first build (which is destined to fail) to complete before starting the second (hopefully fixed) build. :samp:`shutdown {ARG}` Control the shutdown process of the Buildbot master. Available arguments are: ``check`` Check if the Buildbot master is running or shutting down ``start`` Start clean shutdown ``stop`` Stop clean shutdown ``now`` Shutdown immediately without waiting for the builders to finish If the `tags` is set (see the tags option in :ref:`Builder-Configuration`) changes related to only builders belonging to those tags of builders will be sent to the channel. If the `useRevisions` option is set to `True`, the IRC bot will send status messages that replace the build number with a list of revisions that are contained in that build. So instead of seeing `build #253 of ...`, you would see something like `build containing revisions [a87b2c4]`. Revisions that are stored as hashes are shortened to 7 characters in length, as multiple revisions can be contained in one build and may exceed the IRC message length limit. Two additional arguments can be set to control how fast the IRC bot tries to reconnect when it encounters connection issues. ``lostDelay`` is the number of seconds the bot will wait to reconnect when the connection is lost, where as ``failedDelay`` is the number of seconds until the bot tries to reconnect when the connection failed. ``lostDelay`` defaults to a random number between 1 and 5, while ``failedDelay`` defaults to a random one between 45 and 60. Setting random defaults like this means multiple IRC bots are less likely to deny each other by flooding the server. .. _PyOpenSSL: http://pyopenssl.sourceforge.net/ buildbot-3.4.0/master/docs/manual/configuration/reporters/mail_notifier.rst000066400000000000000000000242171413250514000272720ustar00rootroot00000000000000.. bb:reporter:: MailNotifier MailNotifier ++++++++++++ .. py:currentmodule:: buildbot.reporters.mail .. py:class:: MailNotifier Buildbot can send emails when builds finish. The most common use of this is to tell developers when their change has caused the build to fail. It is also quite common to send a message to a mailing list (usually named `builds` or similar) about every build. The :class:`MailNotifier` reporter is used to accomplish this. You configure it by specifying who should receive mail, under what circumstances mail should be sent, and how to deliver the mail. It can be configured to only send out mail for certain builders, and only send them when a build fails or when the builder transitions from success to failure. It can also be configured to include various build logs in each message. If a proper lookup function is configured, the message will be sent to the "interested users" list (:ref:`Doing-Things-With-Users`), which includes all developers who made changes in the build. By default, however, Buildbot does not know how to construct an email address based on the information from the version control system. See the ``lookup`` argument, below, for more information. You can add additional, statically-configured, recipients with the ``extraRecipients`` argument. You can also add interested users by setting the ``owners`` build property to a list of users in the scheduler constructor (:ref:`Configuring-Schedulers`). Each :class:`MailNotifier` sends mail to a single set of recipients. To send different kinds of mail to different recipients, use multiple :class:`MailNotifier`\s. TODO: or subclass MailNotifier and override getRecipients() The following simple example will send an email upon the completion of each build, to just those developers whose :class:`Change`\s were included in the build. The email contains a description of the :class:`Build`, its results, and URLs where more information can be obtained. .. code-block:: python from buildbot.plugins import reporters mn = reporters.MailNotifier(fromaddr="buildbot@example.org", lookup="example.org") c['services'].append(mn) To get a simple one-message-per-build (say, for a mailing list), use the following form instead. This form does not send mail to individual developers (and thus does not need the ``lookup=`` argument, explained below); instead it only ever sends mail to the `extra recipients` named in the arguments: .. code-block:: python mn = reporters.MailNotifier(fromaddr="buildbot@example.org", sendToInterestedUsers=False, extraRecipients=['listaddr@example.org']) If your SMTP host requires authentication before it allows you to send emails, this can also be done by specifying ``smtpUser`` and ``smtpPassword``: .. code-block:: python mn = reporters.MailNotifier(fromaddr="myuser@example.com", sendToInterestedUsers=False, extraRecipients=["listaddr@example.org"], relayhost="smtp.example.com", smtpPort=587, smtpUser="myuser@example.com", smtpPassword="mypassword") .. note:: If for some reasons you are not able to send a notification with TLS enabled and specified user name and password, you might want to use :contrib-src:`master/contrib/check_smtp.py` to see if it works at all. If you want to require Transport Layer Security (TLS), then you can also set ``useTls``: .. code-block:: python mn = reporters.MailNotifier(fromaddr="myuser@example.com", sendToInterestedUsers=False, extraRecipients=["listaddr@example.org"], useTls=True, relayhost="smtp.example.com", smtpPort=587, smtpUser="myuser@example.com", smtpPassword="mypassword") .. note:: If you see ``twisted.mail.smtp.TLSRequiredError`` exceptions in the log while using TLS, this can be due *either* to the server not supporting TLS or a missing `PyOpenSSL`_ package on the BuildMaster system. In some cases, it is desirable to have different information than what is provided in a standard MailNotifier message. For this purpose, MailNotifier provides the argument ``messageFormatter`` (an instance of ``MessageFormatter``), which allows for creating messages with unique content. For example, if only short emails are desired (e.g., for delivery to phones): .. code-block:: python from buildbot.plugins import reporters generator = reporters.BuildStatusGenerator( mode=('problem',), message_formatter=reporters.MessageFormatter(template="STATUS: {{ summary }}")) mn = reporters.MailNotifier(fromaddr="buildbot@example.org", sendToInterestedUsers=False, extraRecipients=['listaddr@example.org'], generators=[generator]) Another example of a function delivering a customized HTML email is given below: .. code-block:: python from buildbot.plugins import reporters template=u'''\

Build status: {{ summary }}

Worker used: {{ workername }}

{% for step in build['steps'] %}

{{ step['name'] }}: {{ step['results'] }}

{% endfor %}

-- Buildbot

''' generator = reporters.BuildStatusGenerator( mode=('failing',), message_formatter=reporters.MessageFormatter( template=template, template_type='html', want_properties=True, want_steps=True)) mn = reporters.MailNotifier(fromaddr="buildbot@example.org", sendToInterestedUsers=False, mode=('failing',), extraRecipients=['listaddr@example.org'], generators=[generator]) .. _PyOpenSSL: http://pyopenssl.sourceforge.net/ MailNotifier arguments ~~~~~~~~~~~~~~~~~~~~~~ ``fromaddr`` The email address to be used in the 'From' header. ``sendToInterestedUsers`` (boolean). If ``True`` (the default), send mail to all of the Interested Users. Interested Users are authors of changes and users from the ``owners`` build property. Override ``MailNotifier`` ``getResponsibleUsersForBuild`` method to change that. If ``False``, only send mail to the ``extraRecipients`` list. ``extraRecipients`` (list of strings). A list of email addresses to which messages should be sent (in addition to the InterestedUsers list, which includes any developers who made :class:`Change`\s that went into this build). It is a good idea to create a small mailing list and deliver to that, then let subscribers come and go as they please. ``generators`` (list). A list of instances of ``IReportGenerator`` which defines the conditions of when the messages will be sent and contents of them. See :ref:`Report-Generators` for more information. ``relayhost`` (string, deprecated). The host to which the outbound SMTP connection should be made. Defaults to 'localhost' ``smtpPort`` (int). The port that will be used on outbound SMTP connections. Defaults to 25. ``useTls`` (boolean). When this argument is ``True`` (default is ``False``), ``MailNotifier`` requires that STARTTLS encryption is used for the connection with the ``relayhost``. Authentication is required for STARTTLS so the arguments ``smtpUser`` and ``smtpPassword`` must also be specified. ``useSmtps`` (boolean). When this argument is ``True`` (default is ``False``), ``MailNotifier`` connects to ``relayhost`` over an encrypted SSL/TLS connection. This configuration is typically used over port 465. ``smtpUser`` (string). The user name to use when authenticating with the ``relayhost``. Can be a :ref:`Secret`. ``smtpPassword`` (string). The password that will be used when authenticating with the ``relayhost``. Can be a :ref:`Secret`. ``lookup`` (implementer of :class:`IEmailLookup`). Object which provides :class:`IEmailLookup`, which is responsible for mapping User names (which come from the VC system) into valid email addresses. If the argument is not provided, the ``MailNotifier`` will attempt to build the ``sendToInterestedUsers`` from the authors of the Changes that led to the Build via :ref:`User-Objects`. If the author of one of the Build's Changes has an email address stored, it will added to the recipients list. With this method, ``owners`` are still added to the recipients. Note that, in the current implementation of user objects, email addresses are not stored; as a result, unless you have specifically added email addresses to the user database, this functionality is unlikely to actually send any emails. Most of the time you can use a simple Domain instance. As a shortcut, you can pass as string: this will be treated as if you had provided ``Domain(str)``. For example, ``lookup='example.com'`` will allow mail to be sent to all developers whose SVN usernames match their ``example.com`` account names. See :src:`master/buildbot/reporters/mail.py` for more details. Regardless of the setting of ``lookup``, ``MailNotifier`` will also send mail to addresses in the ``extraRecipients`` list. ``extraHeaders`` (dictionary). A dictionary containing key/value pairs of extra headers to add to sent e-mails. Both the keys and the values may be an `Interpolate` instance. ``watchedWorkers`` This is a list of names of workers, which should be watched. In case a worker goes missing, a notification is sent. The value of ``watchedWorkers`` can also be set to *all* (default) or ``None``. You also need to specify an email address to which the notification is sent in the worker configuration. ``dumpMailsToLog`` If set to ``True``, all completely formatted mails will be dumped to the log before being sent. This can be useful to debug problems with your mail provider. Be sure to only turn this on if you really need it, especially if you attach logs to emails. This can dump sensitive information to logs and make them very large. buildbot-3.4.0/master/docs/manual/configuration/reporters/pushjet_notifier.rst000066400000000000000000000024071413250514000300270ustar00rootroot00000000000000.. bb:reporter:: PushjetNotifier .. _Pushjet: https://pushjet.io/ PushjetNotifier +++++++++++++++ .. py:class:: buildbot.reporters.pushover.PushjetNotifier Pushjet_ is another instant notification service, similar to :bb:reporter:`PushoverNotifier`. To use this reporter, you need to generate a Pushjet service and provide its secret. The following parameters are accepted by this class: ``generators`` (list) A list of instances of ``IReportGenerator`` which defines the conditions of when the messages will be sent and contents of them. See :ref:`Report-Generators` for more information. ``secret`` This is a secret token for your Pushjet service. See http://docs.pushjet.io/docs/creating-a-new-service to learn how to create a new Pushjet service and get its secret token. Can be a :ref:`Secret`. ``levels`` Dictionary of Pushjet notification levels. The keys of the dictionary can be ``change``, ``failing``, ``passing``, ``warnings``, ``exception`` and are equivalent to the ``mode`` strings. The values are integers between 0...5, specifying notification priority. In case a mode is missing from this dictionary, the default value set by Pushover is used. ``base_url`` Base URL for custom Pushjet instances. Defaults to https://api.pushjet.io. buildbot-3.4.0/master/docs/manual/configuration/reporters/pushover_notifier.rst000066400000000000000000000043631413250514000302230ustar00rootroot00000000000000.. _Pushover: https://pushover.net/ .. bb:reporter:: PushoverNotifier PushoverNotifier ++++++++++++++++ .. py:currentmodule:: buildbot.reporters.pushover .. py:class:: buildbot.reporters.pushover.PushoverNotifier Apart of sending mail, Buildbot can send Pushover_ notifications. It can be used by administrators to receive an instant message to an iPhone or an Android device if a build fails. The :class:`PushoverNotifier` reporter is used to accomplish this. Its configuration is very similar to the mail notifications, however—due to the notification size constrains—the logs and patches cannot be attached. To use this reporter, you need to generate an application on the Pushover website https://pushover.net/apps/ and provide your user key and the API token. The following simple example will send a Pushover notification upon the completion of each build. The notification contains a description of the :class:`Build`, its results, and URLs where more information can be obtained. The ``user_key`` and ``api_token`` values should be replaced with proper ones obtained from the Pushover website for your application. .. code-block:: python from buildbot.plugins import reporters pn = reporters.PushoverNotifier(user_key="1234", api_token='abcd') c['services'].append(pn) The following parameters are accepted by this class: ``generators`` (list) A list of instances of ``IReportGenerator`` which defines the conditions of when the messages will be sent and contents of them. See :ref:`Report-Generators` for more information. ``user_key`` The user key from the Pushover website. It is used to identify the notification recipient. Can be a :ref:`Secret`. ``api_token`` API token for a custom application from the Pushover website. Can be a :ref:`Secret`. ``priorities`` Dictionary of Pushover notification priorities. The keys of the dictionary can be ``change``, ``failing``, ``passing``, ``warnings``, ``exception`` and are equivalent to the ``mode`` strings. The values are integers between -2...2, specifying notification priority. In case a mode is missing from this dictionary, the default value of 0 is used. ``otherParams`` Other parameters send to Pushover API. Check https://pushover.net/api/ for their list. buildbot-3.4.0/master/docs/manual/configuration/reporters/reporter_base.rst000066400000000000000000000052071413250514000273030ustar00rootroot00000000000000ReporterBase ++++++++++++ .. py:currentmodule:: buildbot.reporters.base .. py:class:: ReporterBase(generators) :class:`ReporterBase` is a base class used to implement various reporters. It accepts a list of :ref:`report generators` which define what messages to issue on what events. If generators decide that an event needs a report, then the ``sendMessage`` function is called. The ``sendMessage`` function should be implemented by deriving classes. :param generators: (a list of report generator instances) A list of report generators to manage. .. py:method:: sendMessage(self, reports) Sends the reports via the mechanism implemented by the specific implementation of the reporter. The reporter is expected to interpret all reports, figure out the best mechanism for reporting and report the given information. .. note:: The API provided by the sendMessage function is not yet stable and is subject to change. :param reports: A list of dictionaries, one for each generator that provided a report. Frequently used report keys ~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. note:: The list of report keys and their meanings are currently subject to change. This documents frequently used keys within the dictionaries that are passed to the ``sendMessage`` function. - ``body``: (string) The body of the report to be sent, usually sent as the body of e.g. email. - ``subject``: (string or ``None``) The subject of the report to be sent or ``None`` if nothing was supplied. - ``type``: (string) The type of the body of the report. The following are currently supported: ``plain`` and ``html``. - ``builder_name``: (string) The name of the builder corresponding to the build or buildset that the report describes. - ``results``: (an instance of a result value from ``buildbot.process.results``) The current result of the build. - ``builds`` (a list of build dictionaries as reported by the data API) A list of builds that the report describes. - ``users`` (a list of strings) A list of users to send the report to. - ``patches`` (a list of patch dictionaries corresponding to sourcestamp's ``patch`` values) A list of patches applied to the build or buildset that is being built. - ``logs`` (a list of dictionaries corresponding to logs as reported by the data API) A list of logs produced by the build(s) so far. The following two keys are provided in addition to what the data API provides: - ``stepname`` (string) The name of the step that produced the log. - ``content`` (string) The content of the log. buildbot-3.4.0/master/docs/manual/configuration/reporters/telegram.rst000066400000000000000000000242311413250514000262450ustar00rootroot00000000000000.. bb:reporter:: TelegramBot Telegram Bot ++++++++++++ Buildbot offers a bot, similar to the :bb:reporter:`IRC` for Telegram mobile and desktop messaging app. The bot can notify users and groups about build events, respond to status queries, or force and stop builds on request (if allowed to). In order to use this reporter, you must first speak to BotFather_ and create a `new telegram bot `_. A quick step-by-step procedure is as follows: 1. Start a chat with BotFather_. 2. Type ``/newbot``. 3. Enter a display name for your bot. It can be any string. 4. Enter a unique username for your bot. Usernames are 5-32 characters long and are case insensitive, but may only include Latin characters, numbers, and underscores. Your bot's username must end in `bot`, e.g. `MyBuildBot` or `MyBuildbotBot`. 5. You will be presented with a token for your bot. Save it, as you will need it for :bb:reporter:`TelegramBot` configuration. 6. Optionally, you may type ``/setcommands``, select the username of your new bot and paste the following text: .. jinja:: telegram .. code-block:: text {% for line in commands|sort %} {{ line -}} {% endfor %} If you do this, Telegram will provide hints about your bot commands. 7. If you want, you can set a custom picture and description for your bot. .. _BotFather: https://telegram.me/botfather After setting up the bot in Telegram, you should configure it in Buildbot. .. code-block:: python from buildbot.plugins import reporters telegram = reporters.TelegramBot( bot_token='bot_token_given_by_botfather', bot_username'username_set_in_botfather_bot', chat_ids=[-1234567], authz={('force', 'stop'): "authorizednick"} notify_events=[ 'exception', 'problem', 'recovery', 'worker' ], usePolling=True) c['services'].append(telegram) The following parameters are accepted by this class: ``bot_token`` (mandatory) Bot token given by BotFather. ``bot_username`` (optional) This should be set to the the bot unique username defined in BotFather. If this parameter is missing, it will be retrieved from the Telegram server. However, in case of the connection problems, configuration of the Buildbot will be interrupted. For this reason it is advised to set this parameter to the correct value. ``chat_ids`` (optional) List of chats IDs to send notifications specified in the ``notify_events`` parameter. For channels it should have form ``@channelusername`` and for private chats and groups it should be a numeric ID. To get it, talk to your bot or add it to a Telegram group and issue ``/getid`` command. .. note:: In order to receive notification from the bot, you need to talk to it first (and hit the ``/start`` button) or add it to the group chat. ``authz`` (optional) Authentication list for commands. It must be a dictionary with command names (without slashes) or tuples of command names as keys. There are two special command names: ``''`` (empty string) meaning any harmless command and ``'!'`` for dangerous commands (currently ``/force``, ``/stop``, and ``/shutdown``). The dictionary values are either ``True`` of ``False`` (which allows or deny commands for everybody) or a list of numeric IDs authorized to issue specified commands. By default, harmless commands are allowed for everybody and the dangerous ones are prohibited. A sample ``authz`` parameter may look as follows: .. code-block:: python authz={ 'getid': True, '': [123456, 789012], ('force', 'stop'): [123456], } Anybody will be able to run the ``getid`` command, users with IDs 123456 and 789012 will be allowed to run any safe command and the user with ID 123456 will also have the right to force and stop builds. ``tags`` (optional) When set, this bot will only communicate about builders containing those tags. (tags functionality is not yet implemented) ``notify_events`` (optional) A list or set of events to be notified on the Telegram chats. Telegram bot can listen to build 'start' and 'finish' events. It can also notify about missing workers and their return. This parameter can be changed during run-time by sending the ``/notify`` command to the bot. Note however, that at the buildbot restart or reconfig the notifications listed here will be turned on for the specified chats. On the other hand, removing events from this parameters will not automatically stop notifications for them (you need to turn them off for every channel with the ``/notify`` command). ``showBlameList`` (optional, disabled by default) Whether or not to display the blame list for failed builds. (blame list functionality is not yet implemented) ``useRevisions`` (optional, disabled by default) Whether or not to display the revision leading to the build the messages are about. (useRevisions functionality is not yet implemented) ``useWebhook`` (optional, disabled by default) By default this bot receives messages from Telegram through polling. You can configure it to use a web-hook, which may be more efficient. However, this requires the web frontend of the Buildbot to be configured and accessible through HTTPS (not HTTP) on a public IP and port number 443, 80, 88, or 8443. Furthermore, the Buildbot configuration option :bb:cfg:`buildbotURL` must be correctly set. If you are using HTTP authentication, please ensure that the location *buildbotURL*\ ``/telegram``\ *bot_token* (e.g. ``https://buildbot.example.com/telegram123456:secret``) is accessible by everybody. ``certificate`` (optional) A content of your server SSL certificate. This is necessary if the access to the Buildbot web interface is through HTTPS protocol with self-signed certificate and ``userWebhook`` is set to ``True``. ``pollTimeout`` (optional) The time the bot should wait for Telegram to respond to polling using `long polling `_. ``retryDelay`` (optional) The delay the bot should wait before attempting to retry communication in case of no connection. To use the service, you sent Telegram commands (messages starting with a slash) to the bot. In most cases you do not need to add any parameters; the bot will ask you about the details. Some of the commands currently available: ``/getid`` Get ID of the user and group. This is useful to find the numeric IDs, which should be put in ``authz`` and ``chat_ids`` configuration parameters. ``/list`` Emit a list of all configured builders, workers or recent changes. ``/status`` Announce the status of all builders. ``/watch`` You will be presented with a list of builders that are currently running. You can select any of them to be notified when the build finishes.. ``/last`` Return the results of the last builds on every builder. ``/notify`` Report events relating to builds. If the command is issued as a private message, then the report will be sent back as a private message to the user who issued the command. Otherwise, the report will be sent to the group chat. Available events to be notified are: ``started`` A build has started. ``finished`` A build has finished. ``success`` A build finished successfully. ``failure`` A build failed. ``exception`` A build generated and exception. ``cancelled`` A build was cancelled. ``problem`` The previous build result was success or warnings, but this one ended with failure or exception. ``recovery`` This is the opposite of ``problem``: the previous build result was failure or exception and this one ended with success or warnings. ``worse`` A build state was worse than the previous one (so e.g. it ended with warnings and the previous one was successful). ``better`` A build state was better than the previous one. ``worker`` A worker is missing. A notification is also send when the previously reported missing worker connects again. By default this command can be executed by anybody. However, consider limiting it with ``authz``, as enabling notifications in huge number of chats (of any kind) can cause some problems with your buildbot efficiency. ``/help`` Show short help for the commands. ``/commands`` List all available commands. If you explicitly type ``/commands botfather``, the bot will respond with a list of commands with short descriptions, to be provided to BotFather. ``/source`` Announce the URL of the Buildbot's home page. ``/version`` Announce the version of this Buildbot. If explicitly allowed in the ``authz`` config, some additional commands will be available: .. index:: Forced Builds, from Telegram ``/force`` Force a build. The bot will read configuration from every configured :bb:sched:`ForceScheduler` and present you with the build parameters you can change. If you set all the required parameters, you will be given an option to start the build. ``/stop`` Stop a build. If there are any active builds, you will be presented with options to stop them. ``/shutdown`` Control the shutdown process of the Buildbot master. You will be presented with options to start a graceful shutdown, stop it or to shutdown immediately. If you are in the middle of the conversation with the bot (e.g. it has just asked you a question), you can always stop the current command with a command ``/nay``. If the `tags` is set (see the tags option in :ref:`Builder-Configuration`) changes related to only builders belonging to those tags of builders will be sent to the channel. If the `useRevisions` option is set to `True`, the IRC bot will send status messages that replace the build number with a list of revisions that are contained in that build. So instead of seeing `build #253 of ...`, you would see something like `build containing revisions a87b2c4`. Revisions that are stored as hashes are shortened to 7 characters in length, as multiple revisions can be contained in one build and may result in too long messages. buildbot-3.4.0/master/docs/manual/configuration/reporters/zulip_status.rst000066400000000000000000000023411413250514000272110ustar00rootroot00000000000000.. bb:reporter:: ZulipStatusPush ZulipStatusPush +++++++++++++++ .. py:currentmodule:: buildbot.reporters.zulip .. code-block:: python from buildbot.plugins import reporters zs = reporters.ZulipStatusPush(endpoint='your-organization@zulipchat.com', token='private-token', stream='stream_to_post_in') c['services'].append(zs) :class:`ZulipStatusPush` sends build status using `The Zulip API `_. The build status is sent to a user as a private message or in a stream in Zulip. .. py:class:: ZulipStatusPush(endpoint, token, stream=None) :param string endpoint: URL of your Zulip server :param string token: Private API token :param string stream: The stream in which the build status is to be sent. Defaults to None .. note:: A private message is sent if stream is set to None. Json object spec ~~~~~~~~~~~~~~~~ The json object sent contains the following build status values. .. code-block:: json { "event": "new/finished", "buildid": "", "buildername": "", "url": "", "project": "name of the project", "timestamp": "" } buildbot-3.4.0/master/docs/manual/configuration/schedulers.rst000066400000000000000000001756621413250514000246000ustar00rootroot00000000000000.. -*- rst -*- .. _Schedulers: Schedulers ---------- .. contents:: :depth: 2 :local: Schedulers are responsible for initiating builds on builders. Some schedulers listen for changes from ChangeSources and generate build sets in response to these changes. Others generate build sets without changes, based on other events in the buildmaster. .. _Configuring-Schedulers: Configuring Schedulers ~~~~~~~~~~~~~~~~~~~~~~ .. bb:cfg:: schedulers The :bb:cfg:`schedulers` configuration parameter gives a list of scheduler instances, each of which causes builds to be started on a particular set of Builders. The two basic scheduler classes you are likely to start with are :bb:sched:`SingleBranchScheduler` and :bb:sched:`Periodic`, but you can write a customized subclass to implement more complicated build scheduling. Scheduler arguments should always be specified by name (as keyword arguments), to allow for future expansion: .. code-block:: python sched = SingleBranchScheduler(name="quick", builderNames=['lin', 'win']) There are several common arguments for schedulers, although not all are available with all schedulers. .. _Scheduler-Attr-Name: ``name`` Each Scheduler must have a unique name. This is used in status displays, and is also available in the build property ``scheduler``. .. _Scheduler-Attr-BuilderNames: ``builderNames`` This is the set of builders which this scheduler should trigger, specified as a list of names (strings). This can also be an :class:`~IRenderable` object which will render to a list of builder names (or a list of :class:`~IRenderable` that will render to builder names). .. note:: When ``builderNames`` is rendered, these additional :class:`~Properties` attributes are available: ``master`` A reference to the :class:`~BuildMaster` object that owns this scheduler. This can be used to access the data API. ``sourcestamps`` The list of sourcestamps that triggered the scheduler. ``changes`` The list of changes associated with the sourcestamps. ``files`` The list of modified files associated with the changes. Any property attached to the change(s) that triggered the scheduler will be combined and available when rendering `builderNames`. Here is a simple example: .. code-block:: python from buildbot.plugins import util, schedulers @util.renderer def builderNames(props): builders = set() for f in props.files: if f.endswith('.rst'): builders.add('check_docs') if f.endswith('.c'): builders.add('check_code') return list(builders) c['schedulers'] = [ schedulers.AnyBranchScheduler( name='all', builderNames=builderNames, ) ] And a more complex one: .. code-block:: python import fnmatch from twisted.internet import defer from buildbot.plugins import util, schedulers @util.renderer @defer.inlineCallbacks def builderNames(props): # If "buildername_pattern" is defined with "buildbot sendchange", # check if the builder name matches it. pattern = props.getProperty('buildername_pattern') # If "builder_tags" is defined with "buildbot sendchange", # only schedule builders that have the specified tags. tags = props.getProperty('builder_tags') builders = [] for b in (yield props.master.data.get(('builders',))): if pattern and not fnmatch.fnmatchcase(b['name'], pattern): continue if tags and not set(tags.split()).issubset(set(b['tags'])): continue builders.append(b['name']) return builders c['schedulers'] = [ schedulers.AnyBranchScheduler( name='matrix', builderNames=builderNames, ) ] .. index:: Properties; from scheduler .. _Scheduler-Attr-Properties: ``properties`` (optional) This is a dictionary specifying properties that will be transmitted to all builds started by this scheduler. The ``owner`` property may be of particular interest, as its contents (list) will be added to the list of "interested users" (:ref:`Doing-Things-With-Users`) for each triggered build. For example: .. code-block:: python sched = Scheduler(..., properties = { 'owner': ['zorro@example.com', 'silver@example.com'] }) .. _Scheduler-Attr-Codebases: ``codebases`` (optional) Specifies codebase definitions that are used when the scheduler processes data from more than one repository at the same time. The ``codebases`` parameter is only used to fill in missing details about a codebase when scheduling a build. For example, when a change to codebase ``A`` occurs, a scheduler must invent a sourcestamp for codebase ``B``. Source steps that specify codebase ``B`` as their codebase will use the invented timestamp. The parameter does not act as a filter on incoming changes -- use a change filter for that purpose. This parameter can be specified in two forms: - as a list of strings. This is the simplest form; use it if no special overrides are needed. In this form, just the names of the codebases are listed. - as a dictionary of dictionaries. In this form, the per-codebase overrides of repository, branch and revision can be specified. Each codebase definition dictionary is a dictionary with any of the keys: ``repository``, ``branch``, ``revision``. The codebase definitions are combined in a dictionary keyed by the name of the codebase. .. code-block:: python codebases = {'codebase1': {'repository':'....', 'branch':'default', 'revision': None}, 'codebase2': {'repository':'....'} } .. _Scheduler-Attr-FileIsImportant: ``fileIsImportant`` (optional) A callable which takes as argument a Change instance and returns ``True`` if the change is worth building, and ``False`` if it is not. Unimportant Changes are accumulated until the build is triggered by an important change. The default value of ``None`` means that all Changes are important. .. _Scheduler-Attr-ChangeFilter: ``change_filter`` (optional) The change filter that will determine which changes are recognized by this scheduler (see :ref:`Change-Filters`). Note that this is different from ``fileIsImportant``; if the change filter filters out a change, the change is completely ignored by the scheduler. If a change is allowed by the change filter but is deemed unimportant, it will not cause builds to start but will be remembered and shown in status displays. The default value of ``None`` does not filter any changes at all. .. _Scheduler-Attr-OnlyImportant: ``onlyImportant`` (optional) A boolean that, when ``True``, only adds important changes to the buildset as specified in the ``fileIsImportant`` callable. This means that unimportant changes are ignored the same way a ``change_filter`` filters changes. The default value is ``False`` and only applies when ``fileIsImportant`` is given. .. _Scheduler-Attr-Reason: ``reason`` (optional) A string that will be used as the reason for the triggered build. By default it lists the type and name of the scheduler triggering the build. The remaining subsections represent a catalog of the available scheduler types. All these schedulers are defined in modules under :mod:`buildbot.schedulers`, and their docstrings are the best source of documentation on the arguments each one takes. Scheduler Resiliency ~~~~~~~~~~~~~~~~~~~~ In a multi-master configuration, schedulers with the same name can be configured on multiple masters. Only one instance of the scheduler will be active. If that instance becomes inactive, due to its master being shut down or failing, then another instance will become active after a short delay. This provides resiliency in scheduler configurations, so that schedulers are not a single point of failure in a Buildbot infrastructure. The Data API and web UI display the master on which each scheduler is running. There is currently no mechanism to control which master's scheduler instance becomes active. The behavior is nondeterministic, based on the timing of polling by inactive schedulers. The failover is non-revertive. .. _Change-Filters: Change Filters ~~~~~~~~~~~~~~ Several schedulers perform filtering on an incoming set of changes. The filter can most generically be specified as a :class:`ChangeFilter`. Set up a :class:`ChangeFilter` like this: .. code-block:: python from buildbot.plugins import util my_filter = util.ChangeFilter(project_re="^baseproduct/.*", branch="devel") and then assign it to a scheduler with the ``change_filter`` parameter: .. code-block:: python sch = SomeSchedulerClass(..., change_filter=my_filter) There are five attributes of changes on which you can filter: ``project`` The project string, as defined by the ChangeSource. ``repository`` The repository in which the change occurred. ``branch`` The branch on which the change occurred. Note that 'trunk' or 'master' is often denoted by ``None``. ``category`` The category, again as defined by the ChangeSource. ``codebase`` The change's codebase. For each attribute, the filter can look for one specific value: .. code-block:: python my_filter = util.ChangeFilter(project='myproject') or accept a set of values: .. code-block:: python my_filter = util.ChangeFilter(project=['myproject', 'jimsproject']) or apply a regular expression, using the attribute name with a "``_re``" suffix: .. code-block:: python my_filter = util.ChangeFilter(category_re='.*deve.*') # or, to use regular expression flags: import re my_filter = util.ChangeFilter(category_re=re.compile('.*deve.*', re.I)) :class:`buildbot.www.hooks.github.GitHubEventHandler` has a special ``github_distinct`` property that can be used to specify whether or not non-distinct changes should be considered. For example, if a commit is pushed to a branch that is not being watched and then later pushed to a watched branch, by default, this will be recorded as two separate changes. In order to record a change only the first time the commit appears, you can use a custom :class:`ChangeFilter` like this: .. code-block:: python ChangeFilter(filter_fn=lambda c: c.properties.getProperty('github_distinct')) For anything more complicated, define a Python function to recognize the strings you want: .. code-block:: python def my_branch_fn(branch): return branch in branches_to_build and branch not in branches_to_ignore my_filter = util.ChangeFilter(branch_fn=my_branch_fn) The special argument ``filter_fn`` can be used to specify a function that is given the entire Change object, and returns a boolean. The entire set of allowed arguments, then, is +------------+---------------+---------------+ | project | project_re | project_fn | +------------+---------------+---------------+ | repository | repository_re | repository_fn | +------------+---------------+---------------+ | branch | branch_re | branch_fn | +------------+---------------+---------------+ | category | category_re | category_fn | +------------+---------------+---------------+ | codebase | codebase_re | codebase_fn | +------------+---------------+---------------+ | filter_fn | +--------------------------------------------+ A Change passes the filter only if *all* arguments are satisfied. If no filter object is given to a scheduler, then all changes will be built (subject to any other restrictions the scheduler enforces). Usage example ~~~~~~~~~~~~~ A *quick* scheduler might exist to give immediate feedback to developers, hoping to catch obvious problems in the code that can be detected quickly. These typically do not run the full test suite, nor do they run on a wide variety of platforms. They also usually do a VC update rather than performing a brand-new checkout each time. A separate *full* scheduler might run more comprehensive tests, to catch more subtle problems. It might be configured to run after the quick scheduler, to give developers time to commit fixes to bugs caught by the quick scheduler before running the comprehensive tests. This scheduler would also feed multiple :class:`Builder`\s. Many schedulers can be configured to wait a while after seeing a source-code change - this is the *tree stable timer*. The timer allows multiple commits to be "batched" together. This is particularly useful in distributed version control systems, where a developer may push a long sequence of changes all at once. To save resources, it's often desirable only to test the most recent change. Schedulers can also filter out the changes they are interested in, based on a number of criteria. For example, a scheduler that only builds documentation might skip any changes that do not affect the documentation. Schedulers can also filter on the branch to which a commit was made. Periodic builds (those which are run every N seconds rather than after new Changes arrive) are triggered by a special :bb:sched:`Periodic` scheduler. Each scheduler creates and submits :class:`BuildSet` objects to the :class:`BuildMaster`, which is then responsible for making sure the individual :class:`BuildRequests` are delivered to the target :class:`Builder`\s. Scheduler instances are activated by placing them in the :bb:cfg:`schedulers` list in the buildmaster config file. Each scheduler must have a unique name. Scheduler Types ~~~~~~~~~~~~~~~ .. bb:sched:: SingleBranchScheduler .. bb:sched:: Scheduler .. _Scheduler-SingleBranchScheduler: SingleBranchScheduler ::::::::::::::::::::: This is the original and still most popular scheduler class. It follows exactly one branch, and starts a configurable tree-stable-timer after each change on that branch. When the timer expires, it starts a build on some set of Builders. This scheduler accepts a :meth:`fileIsImportant` function which can be used to ignore some Changes if they do not affect any *important* files. If ``treeStableTimer`` is not set, then this scheduler starts a build for every Change that matches its ``change_filter`` and satisfies :meth:`fileIsImportant`. If ``treeStableTimer`` is set, then a build is triggered for each set of Changes that arrive in intervals shorter than the configured time and match the filters. .. note:: The behavior of this scheduler is undefined, if ``treeStableTimer`` is set, and changes from multiple branches, repositories or codebases are accepted by the filter. .. note:: The ``codebases`` argument will filter out codebases not specified there, but *won't* filter based on the branches specified there. The arguments to this scheduler are: ``name`` See :ref:`name scheduler argument `. ``builderNames`` See :ref:`builderNames scheduler argument `. ``properties`` (optional) See :ref:`properties scheduler argument `. ``codebases`` (optional) See :ref:`codebases scheduler argument `. ``fileIsImportant`` (optional) See :ref:`fileIsImportant scheduler argument `. ``change_filter`` (optional) See :ref:`change_filter scheduler argument `. ``onlyImportant`` (optional) See :ref:`onlyImportant scheduler argument `. ``reason`` (optional) See :ref:`reason scheduler argument `. ``treeStableTimer`` The scheduler will wait for this many seconds before starting the build. If new changes are made during this interval, the timer will be restarted. So the build will be started after this many seconds of inactivity following the last change. If ``treeStableTimer`` is ``None``, then a separate build is started immediately for each Change. ``categories`` (deprecated; use change_filter) A list of categories of changes that this scheduler will respond to. If this is specified, then any non-matching changes are ignored. ``branch`` (deprecated; use change_filter) The scheduler will pay attention to this branch, ignoring Changes that occur on other branches. Setting ``branch`` equal to the special value of ``None`` means it should only pay attention to the default branch. .. note:: ``None`` is a keyword, not a string, so write ``None`` and not ``"None"``. Example: .. code-block:: python from buildbot.plugins import schedulers, util quick = schedulers.SingleBranchScheduler( name="quick", change_filter=util.ChangeFilter(branch='master'), treeStableTimer=60, builderNames=["quick-linux", "quick-netbsd"]) full = schedulers.SingleBranchScheduler( name="full", change_filter=util.ChangeFilter(branch='master'), treeStableTimer=5*60, builderNames=["full-linux", "full-netbsd", "full-OSX"]) c['schedulers'] = [quick, full] In this example, the two *quick* builders are triggered 60 seconds after the tree has been changed. The *full* builders do not run quite that quickly (they wait 5 minutes), so that hopefully, if the quick builds fail due to a missing file or a simple typo, the developer can discover and fix the problem before the full builds are started. Both schedulers only pay attention to the default branch: any changes on other branches are ignored. Each scheduler triggers a different set of builders, referenced by name. .. note:: The old names for this scheduler, ``buildbot.scheduler.Scheduler`` and ``buildbot.schedulers.basic.Scheduler``, are deprecated in favor of using :mod:`buildbot.plugins`: .. code-block:: python from buildbot.plugins import schedulers However if you must use a fully qualified name, it is ``buildbot.schedulers.basic.SingleBranchScheduler``. .. bb:sched:: AnyBranchScheduler .. _AnyBranchScheduler: AnyBranchScheduler :::::::::::::::::: This scheduler uses a tree-stable-timer like the default one, but uses a separate timer for each branch. If ``treeStableTimer`` is not set, then this scheduler is indistinguishable from :bb:sched:`SingleBranchScheduler`. If ``treeStableTimer`` is set, then a build is triggered for each set of Changes that arrive in intervals shorter than the configured time and match the filters. The arguments to this scheduler are: ``name`` See :ref:`name scheduler argument `. ``builderNames`` See :ref:`builderNames scheduler argument `. ``properties`` (optional) See :ref:`properties scheduler argument `. ``codebases`` (optional) See :ref:`codebases scheduler argument `. ``fileIsImportant`` (optional) See :ref:`fileIsImportant scheduler argument `. ``change_filter`` (optional) See :ref:`change_filter scheduler argument `. ``onlyImportant`` (optional) See :ref:`onlyImportant scheduler argument `. ``reason`` (optional) See :ref:`reason scheduler argument `. ``treeStableTimer`` The scheduler will wait for this many seconds before starting a build. If new changes are made *on the same branch* during this interval, the timer will be restarted. ``branches`` (deprecated; use change_filter) Changes on branches not specified on this list will be ignored. ``categories`` (deprecated; use change_filter) A list of categories of changes that this scheduler will respond to. If this is specified, then any non-matching changes are ignored. .. bb:sched:: Dependent .. _Dependent-Scheduler: Dependent Scheduler ::::::::::::::::::: It is common to wind up with one kind of build which should only be performed if the same source code was successfully handled by some other kind of build first. An example might be a packaging step: you might only want to produce .deb or RPM packages from a tree that was known to compile successfully and pass all unit tests. You could put the packaging step in the same Build as the compile and testing steps, but there might be other reasons to not do this (in particular you might have several Builders worth of compiles/tests, but only wish to do the packaging once). Another example is if you want to skip the *full* builds after a failing *quick* build of the same source code. Or, if one Build creates a product (like a compiled library) that is used by some other Builder, you'd want to make sure the consuming Build is run *after* the producing one. You can use *dependencies* to express this relationship to Buildbot. There is a special kind of scheduler named :bb:sched:`Dependent` that will watch an *upstream* scheduler for builds to complete successfully (on all of its Builders). Each time that happens, the same source code (i.e. the same ``SourceStamp``) will be used to start a new set of builds, on a different set of Builders. This *downstream* scheduler doesn't pay attention to Changes at all. It only pays attention to the upstream scheduler. If the build fails on any of the Builders in the upstream set, the downstream builds will not fire. Note that, for SourceStamps generated by a :bb:sched:`Dependent` scheduler, the ``revision`` is ``None``, meaning HEAD. If any changes are committed between the time the upstream scheduler begins its build and the time the dependent scheduler begins its build, then those changes will be included in the downstream build. See the :bb:sched:`Triggerable` scheduler for a more flexible dependency mechanism that can avoid this problem. The arguments to this scheduler are: ``name`` See :ref:`name scheduler argument `. ``builderNames`` See :ref:`builderNames scheduler argument `. ``properties`` (optional) See :ref:`properties scheduler argument `. ``codebases`` (optional) See :ref:`codebases scheduler argument `. ``upstream`` The upstream scheduler to watch. Note that this is an *instance*, not the name of the scheduler. Example: .. code-block:: python from buildbot.plugins import schedulers tests = schedulers.SingleBranchScheduler(name="just-tests", treeStableTimer=5*60, builderNames=["full-linux", "full-netbsd", "full-OSX"]) package = schedulers.Dependent(name="build-package", upstream=tests, # <- no quotes! builderNames=["make-tarball", "make-deb", "make-rpm"]) c['schedulers'] = [tests, package] .. bb:sched:: Periodic .. _Periodic-Scheduler: Periodic Scheduler :::::::::::::::::: This simple scheduler just triggers a build every *N* seconds. The arguments to this scheduler are: ``name`` See :ref:`name scheduler argument `. ``builderNames`` See :ref:`builderNames scheduler argument `. ``properties`` (optional) See :ref:`properties scheduler argument `. ``codebases`` (optional) See :ref:`codebases scheduler argument `. ``fileIsImportant`` (optional) See :ref:`fileIsImportant scheduler argument `. ``change_filter`` (optional) See :ref:`change_filter scheduler argument `. ``onlyImportant`` (optional) See :ref:`onlyImportant scheduler argument `. ``reason`` (optional) See :ref:`reason scheduler argument `. ``createAbsoluteSourceStamps`` (optional) This option only has effect when using multiple codebases. When ``True``, it uses the last seen revision for each codebase that does not have a change. When ``False`` (the default), codebases without changes will use the revision from the ``codebases`` argument. ``onlyIfChanged`` (optional) If this is ``True``, then builds will not be scheduled at the designated time *unless* the specified branch has seen an important change since the previous build. By default this setting is ``False``. ``periodicBuildTimer`` The time, in seconds, after which to start a build. Example: .. code-block:: python from buildbot.plugins import schedulers nightly = schedulers.Periodic(name="daily", builderNames=["full-solaris"], periodicBuildTimer=24*60*60) c['schedulers'] = [nightly] The scheduler in this example just runs the full solaris build once per day. Note that this scheduler only lets you control the time between builds, not the absolute time-of-day of each Build, so this could easily wind up an *evening* or *every afternoon* scheduler depending upon when it was first activated. .. bb:sched:: Nightly .. _Nightly-Scheduler: Nightly Scheduler ::::::::::::::::: This is highly configurable periodic build scheduler, which triggers a build at particular times of day, week, month, or year. The configuration syntax is very similar to the well-known ``crontab`` format, in which you provide values for minute, hour, day, and month (some of which can be wildcards), and a build is triggered whenever the current time matches the given constraints. This can run a build every night, every morning, every weekend, alternate Thursdays, on your boss's birthday, etc. Pass some subset of ``minute``, ``hour``, ``dayOfMonth``, ``month``, and ``dayOfWeek``\; each may be a single number or a list of valid values. The builds will be triggered whenever the current time matches these values. Wildcards are represented by a '*' string. All fields default to a wildcard except 'minute', so with no fields, this defaults to a build every hour, on the hour. The full list of parameters is: ``name`` See :ref:`name scheduler argument `. ``builderNames`` See :ref:`builderNames scheduler argument `. ``properties`` (optional) See :ref:`properties scheduler argument `. ``codebases`` (optional) See :ref:`codebases scheduler argument `. ``fileIsImportant`` (optional) See :ref:`fileIsImportant scheduler argument `. ``change_filter`` (optional) See :ref:`change_filter scheduler argument `. ``onlyImportant`` (optional) See :ref:`onlyImportant scheduler argument `. ``reason`` (optional) See :ref:`reason scheduler argument `. ``createAbsoluteSourceStamps`` (optional) This option only has effect when using multiple codebases. When ``True``, it uses the last seen revision for each codebase that does not have a change. When ``False`` (the default), codebases without changes will use the revision from the ``codebases`` argument. ``onlyIfChanged`` (optional) If this is ``True``, then builds will not be scheduled at the designated time *unless* the change filter has accepted an important change since the previous build. The default value is ``False``. ``branch`` (optional) (Deprecated; use ``change_filter`` and ``codebases``.) The branch to build when the time comes, and the branch to filter for if ``change_filter`` is not specified. Remember that a value of ``None`` here means the default branch, and will not match other branches! ``minute`` (optional) The minute of the hour on which to start the build. This defaults to 0, meaning an hourly build. ``hour`` (optional) The hour of the day on which to start the build, in 24-hour notation. This defaults to \*, meaning every hour. ``dayOfMonth`` (optional) The day of the month to start a build. This defaults to ``*``, meaning every day. ``month`` (optional) The month in which to start the build, with January = 1. This defaults to ``*``, meaning every month. ``dayOfWeek`` (optional) The day of the week to start a build, with Monday = 0. This defaults to ``*``, meaning every day of the week. For example, the following :file:`master.cfg` clause will cause a build to be started every night at 3:00am: .. code-block:: python from buildbot.plugins import schedulers, util c['schedulers'].append( schedulers.Nightly(name='nightly', change_filter=util.ChangeFilter(branch='master'), builderNames=['builder1', 'builder2'], hour=3, minute=0)) This scheduler will perform a build each Monday morning at 6:23am and again at 8:23am, but only if someone has committed code in the interim: .. code-block:: python c['schedulers'].append( schedulers.Nightly(name='BeforeWork', change_filter=util.ChangeFilter(branch='default'), builderNames=['builder1'], dayOfWeek=0, hour=[6,8], minute=23, onlyIfChanged=True)) The following runs a build every two hours, using Python's :func:`range` function: .. code-block:: python c.schedulers.append( schedulers.Nightly(name='every2hours', change_filter=util.ChangeFilter(branch=None), # default branch builderNames=['builder1'], hour=range(0, 24, 2))) Finally, this example will run only on December 24th: .. code-block:: python c['schedulers'].append( schedulers.Nightly(name='SleighPreflightCheck', change_filter=util.ChangeFilter(branch=None), # default branch builderNames=['flying_circuits', 'radar'], month=12, dayOfMonth=24, hour=12, minute=0)) .. bb:sched:: Try_Jobdir .. bb:sched:: Try_Userpass .. _Try-Schedulers: Try Schedulers :::::::::::::: This scheduler allows developers to use the :bb:cmdline:`buildbot try` command to trigger builds of code they have not yet committed. See :bb:cmdline:`try` for complete details. Two implementations are available: :bb:sched:`Try_Jobdir` and :bb:sched:`Try_Userpass`. The former monitors a job directory, specified by the ``jobdir`` parameter, while the latter listens for PB connections on a specific ``port``, and authenticates against ``userport``. The buildmaster must have a scheduler instance in the config file's :bb:cfg:`schedulers` list to receive try requests. This lets the administrator control who may initiate these `trial` builds, which branches are eligible for trial builds, and which Builders should be used for them. The scheduler has various means to accept build requests. All of them enforce more security than the usual buildmaster ports do. Any source code being built can be used to compromise the worker accounts, but in general that code must be checked out from the VC repository first, so only people with commit privileges can get control of the workers. The usual force-build control channels can waste worker time but do not allow arbitrary commands to be executed by people who don't have those commit privileges. However, the source code patch that is provided with the trial build does not have to go through the VC system first, so it is important to make sure these builds cannot be abused by a non-committer to acquire as much control over the workers as a committer has. Ideally, only developers who have commit access to the VC repository would be able to start trial builds, but unfortunately, the buildmaster does not, in general, have access to the VC system's user list. As a result, the try scheduler requires a bit more configuration. There are currently two ways to set this up: ``jobdir`` (ssh) This approach creates a command queue directory, called the :file:`jobdir`, in the buildmaster's working directory. The buildmaster admin sets the ownership and permissions of this directory to only grant write access to the desired set of developers, all of whom must have accounts on the machine. The :command:`buildbot try` command creates a special file containing the source stamp information and drops it in the jobdir, just like a standard maildir. When the buildmaster notices the new file, it unpacks the information inside and starts the builds. The config file entries used by 'buildbot try' either specify a local queuedir (for which write and mv are used) or a remote one (using scp and ssh). The advantage of this scheme is that it is quite secure, the disadvantage is that it requires fiddling outside the buildmaster config (to set the permissions on the jobdir correctly). If the buildmaster machine happens to also house the VC repository, then it can be fairly easy to keep the VC userlist in sync with the trial-build userlist. If they are on different machines, this will be much more of a hassle. It may also involve granting developer accounts on a machine that would not otherwise require them. To implement this, the worker invokes :samp:`ssh -l {username} {host} buildbot tryserver {ARGS}`, passing the patch contents over stdin. The arguments must include the inlet directory and the revision information. ``user+password`` (PB) In this approach, each developer gets a username/password pair, which are all listed in the buildmaster's configuration file. When the developer runs :command:`buildbot try`, their machine connects to the buildmaster via PB and authenticates themselves using that username and password, then sends a PB command to start the trial build. The advantage of this scheme is that the entire configuration is performed inside the buildmaster's config file. The disadvantages are that it is less secure (while the `cred` authentication system does not expose the password in plaintext over the wire, it does not offer most of the other security properties that SSH does). In addition, the buildmaster admin is responsible for maintaining the username/password list, adding and deleting entries as developers come and go. For example, to set up the `jobdir` style of trial build, using a command queue directory of :file:`{MASTERDIR}/jobdir` (and assuming that all your project developers were members of the ``developers`` unix group), you would first set up that directory: .. code-block:: bash mkdir -p MASTERDIR/jobdir MASTERDIR/jobdir/new MASTERDIR/jobdir/cur MASTERDIR/jobdir/tmp chgrp developers MASTERDIR/jobdir MASTERDIR/jobdir/* chmod g+rwx,o-rwx MASTERDIR/jobdir MASTERDIR/jobdir/* and then use the following scheduler in the buildmaster's config file: .. code-block:: python from buildbot.plugins import schedulers s = schedulers.Try_Jobdir(name="try1", builderNames=["full-linux", "full-netbsd", "full-OSX"], jobdir="jobdir") c['schedulers'] = [s] Note that you must create the jobdir before telling the buildmaster to use this configuration, otherwise you will get an error. Also remember that the buildmaster must be able to read and write to the jobdir as well. Be sure to watch the :file:`twistd.log` file (:ref:`Logfiles`) as you start using the jobdir, to make sure the buildmaster is happy with it. .. note:: Patches in the jobdir are encoded using netstrings, which place an arbitrary upper limit on patch size of 99999 bytes. If your submitted try jobs are rejected with `BadJobfile`, try increasing this limit with a snippet like this in your `master.cfg`: .. code-block:: python from twisted.protocols.basic import NetstringReceiver NetstringReceiver.MAX_LENGTH = 1000000 To use the username/password form of authentication, create a :class:`Try_Userpass` instance instead. It takes the same ``builderNames`` argument as the :class:`Try_Jobdir` form, but accepts an additional ``port`` argument (to specify the TCP port to listen on) and a ``userpass`` list of username/password pairs to accept. Remember to use good passwords for this: the security of the worker accounts depends upon it: .. code-block:: python from buildbot.plugins import schedulers s = schedulers.Try_Userpass(name="try2", builderNames=["full-linux", "full-netbsd", "full-OSX"], port=8031, userpass=[("alice","pw1"), ("bob", "pw2")]) c['schedulers'] = [s] Like in most classes in Buildbot, the ``port`` argument takes a `strports` specification. See :mod:`twisted.application.strports` for details. .. bb:sched:: Triggerable .. index:: Triggers .. _Triggerable-Scheduler: Triggerable Scheduler ::::::::::::::::::::: The :bb:sched:`Triggerable` scheduler waits to be triggered by a :bb:step:`Trigger` step (see :ref:`Step-Trigger`) in another build. That step can optionally wait for the scheduler's builds to complete. This provides two advantages over :bb:sched:`Dependent` schedulers. First, the same scheduler can be triggered from multiple builds. Second, the ability to wait for :bb:sched:`Triggerable`'s builds to complete provides a form of "subroutine call", where one or more builds can "call" a scheduler to perform some work for them, perhaps on other workers. The :bb:sched:`Triggerable` scheduler supports multiple codebases. The scheduler filters out all codebases from :bb:step:`Trigger` steps that are not configured in the scheduler. The parameters are just the basics: ``name`` See :ref:`name scheduler argument `. ``builderNames`` See :ref:`builderNames scheduler argument `. ``properties`` (optional) See :ref:`properties scheduler argument `. ``codebases`` (optional) See :ref:`codebases scheduler argument `. ``reason`` (optional) See :ref:`reason scheduler argument `. This class is only useful in conjunction with the :bb:step:`Trigger` step. Here is a fully-worked example: .. code-block:: python from buildbot.plugins import schedulers, steps, util checkin = schedulers.SingleBranchScheduler(name="checkin", change_filter=util.ChangeFilter(branch=None), treeStableTimer=5*60, builderNames=["checkin"]) nightly = schedulers.Nightly(name='nightly', change_filter=util.ChangeFilter(branch=None), builderNames=['nightly'], hour=3, minute=0) mktarball = schedulers.Triggerable(name="mktarball", builderNames=["mktarball"]) build = schedulers.Triggerable(name="build-all-platforms", builderNames=["build-all-platforms"]) test = schedulers.Triggerable(name="distributed-test", builderNames=["distributed-test"]) package = schedulers.Triggerable(name="package-all-platforms", builderNames=["package-all-platforms"]) c['schedulers'] = [mktarball, checkin, nightly, build, test, package] # on checkin, make a tarball, build it, and test it checkin_factory = util.BuildFactory() checkin_factory.addStep(steps.Trigger(schedulerNames=['mktarball'], waitForFinish=True)) checkin_factory.addStep(steps.Trigger(schedulerNames=['build-all-platforms'], waitForFinish=True)) checkin_factory.addStep(steps.Trigger(schedulerNames=['distributed-test'], waitForFinish=True)) # and every night, make a tarball, build it, and package it nightly_factory = util.BuildFactory() nightly_factory.addStep(steps.Trigger(schedulerNames=['mktarball'], waitForFinish=True)) nightly_factory.addStep(steps.Trigger(schedulerNames=['build-all-platforms'], waitForFinish=True)) nightly_factory.addStep(steps.Trigger(schedulerNames=['package-all-platforms'], waitForFinish=True)) .. bb:sched:: NightlyTriggerable NightlyTriggerable Scheduler :::::::::::::::::::::::::::: .. py:class:: buildbot.schedulers.timed.NightlyTriggerable The :bb:sched:`NightlyTriggerable` scheduler is a mix of the :bb:sched:`Nightly` and :bb:sched:`Triggerable` schedulers. This scheduler triggers builds at a particular time of day, week, or year, exactly as the :bb:sched:`Nightly` scheduler. However, the source stamp set that is used is provided by the last :bb:step:`Trigger` step that targeted this scheduler. The following parameters are just the basics: ``name`` See :ref:`name scheduler argument `. ``builderNames`` See :ref:`builderNames scheduler argument `. ``properties`` (optional) See :ref:`properties scheduler argument `. ``codebases`` (optional) See :ref:`codebases scheduler argument `. ``reason`` (optional) See :ref:`reason scheduler argument `. ``minute`` (optional) See :bb:sched:`Nightly`. ``hour`` (optional) See :bb:sched:`Nightly`. ``dayOfMonth`` (optional) See :bb:sched:`Nightly`. ``month`` (optional) See :bb:sched:`Nightly`. ``dayOfWeek`` (optional) See :bb:sched:`Nightly`. This class is only useful in conjunction with the :bb:step:`Trigger` step. Note that ``waitForFinish`` is ignored by :bb:step:`Trigger` steps targeting this scheduler. Here is a fully-worked example: .. code-block:: python from buildbot.plugins import schedulers, steps, util checkin = schedulers.SingleBranchScheduler(name="checkin", change_filter=util.ChangeFilter(branch=None), treeStableTimer=5*60, builderNames=["checkin"]) nightly = schedulers.NightlyTriggerable(name='nightly', builderNames=['nightly'], hour=3, minute=0) c['schedulers'] = [checkin, nightly] # on checkin, run tests checkin_factory = util.BuildFactory([ steps.Test(), steps.Trigger(schedulerNames=['nightly']) ]) # and every night, package the latest successful build nightly_factory = util.BuildFactory([ steps.ShellCommand(command=['make', 'package']) ]) .. bb:sched:: ForceScheduler .. index:: Forced Builds ForceScheduler Scheduler :::::::::::::::::::::::: The :bb:sched:`ForceScheduler` scheduler is the way you can configure a force build form in the web UI. In the ``/#/builders/:builderid`` web page, you will see, on the top right of the page, one button for each :bb:sched:`ForceScheduler` scheduler that was configured for this builder. If you click on that button, a dialog will let you choose various parameters for requesting a new build. The Buildbot framework allows you to customize exactly how the build form looks, which builders have a force build form (it might not make sense to force build every builder), and who is allowed to force builds on which builders. You do so by configuring a :bb:sched:`ForceScheduler` and adding it to the list of :bb:cfg:`schedulers`. The scheduler takes the following parameters: ``name`` See :ref:`name scheduler argument `. Force buttons are ordered by this property in the UI (so you can prefix by 01, 02, etc, in order to control precisely the order). ``builderNames`` List of builders where the force button should appear. See :ref:`builderNames scheduler argument `. ``reason`` A :ref:`parameter ` allowing the user to specify the reason for the build. The default value is a string parameter with a default value "force build". ``reasonString`` A string that will be used to create the build reason for the forced build. This string can contain the placeholders ``%(owner)s`` and ``%(reason)s``, which represents the value typed into the reason field. ``username`` A :ref:`parameter ` specifying the username associated with the build (aka owner). The default value is a username parameter. ``codebases`` A list of strings or :ref:`CodebaseParameter ` specifying the codebases that should be presented. The default is a single codebase with no name (i.e. `codebases=['']`). ``properties`` A list of :ref:`parameters `, one for each property. These can be arbitrary parameters, where the parameter's name is taken as the property name, or ``AnyPropertyParameter``, which allows the web user to specify the property name. The default value is an empty list. ``buttonName`` The name of the "submit" button on the resulting force-build form. This defaults to the name of scheduler. An example may be better than long explanation. What you need in your config file is something like: .. code-block:: python from buildbot.plugins import schedulers, util sch = schedulers.ForceScheduler( name="force", buttonName="pushMe!", label="My nice Force form", builderNames=["my-builder"], codebases=[ util.CodebaseParameter( "", label="Main repository", # will generate a combo box branch=util.ChoiceStringParameter( name="branch", choices=["master", "hest"], default="master"), # will generate nothing in the form, but revision, repository, # and project are needed by buildbot scheduling system so we # need to pass a value ("") revision=util.FixedParameter(name="revision", default=""), repository=util.FixedParameter(name="repository", default=""), project=util.FixedParameter(name="project", default=""), ), ], # will generate a text input reason=util.StringParameter(name="reason", label="reason:", required=True, size=80), # in case you don't require authentication, this will display # input for user to type their name username=util.UserNameParameter(label="your name:", size=80), # A completely customized property list. The name of the # property is the name of the parameter properties=[ util.NestedParameter(name="options", label="Build Options", layout="vertical", fields=[ util.StringParameter(name="pull_url", label="optionally give a public Git pull url:", default="", size=80), util.BooleanParameter(name="force_build_clean", label="force a make clean", default=False) ]) ]) This will result in the following UI: .. image:: ../../_images/forcedialog1.png :alt: Force Form Result Authorization ............. The force scheduler uses the web interface's authorization framework to determine which user has the right to force which build. Here is an example of code on how you can define which user has which right: .. code-block:: python user_mapping = { re.compile("project1-builder"): ["project1-maintainer", "john"] , re.compile("project2-builder"): ["project2-maintainer", "jack"], re.compile(".*"): ["root"] } def force_auth(user, status): global user_mapping for r,users in user_mapping.items(): if r.match(status.name): if user in users: return True return False # use authz_cfg in your WebStatus setup authz_cfg=authz.Authz( auth=my_auth, forceBuild = force_auth, ) .. _ForceScheduler-Parameters: ForceScheduler Parameters ......................... Most of the arguments to :bb:sched:`ForceScheduler` are "parameters". Several classes of parameters are available, each describing a different kind of input from a force-build form. All parameter types have a few common arguments: ``name`` (required) The name of the parameter. For properties, this will correspond to the name of the property that your parameter will set. The name is also used internally as the identifier for in the HTML form. ``label`` (optional; default is same as name) The label of the parameter. This is what is displayed to the user. ``tablabel`` (optional; default is same as label) The label of the tab if this parameter is included into a tab layout NestedParameter. This is what is displayed to the user. ``default`` (optional; default: "") The default value for the parameter that is used if there is no user input. ``required`` (optional; default: False) If this is true, then an error will be shown to user if there is no input in this field ``maxsize`` (optional; default: None) The maximum size of a field (in bytes). Buildbot will ensure the field sent by the user is not too large. ``autopopulate`` (optional; default: None) If not None, ``autopopulate`` is a dictionary which describes how other parameters are updated if this one changes. This is useful for when you have lots of parameters, and defaults depends on e.g. the branch. This is implemented generically, and all parameters can update others. Beware of infinite loops! .. code-block:: python c['schedulers'].append(schedulers.ForceScheduler( name="custom", builderNames=["runtests"], buttonName="Start Custom Build", codebases = [util.CodebaseParameter( codebase='', project=None, branch=util.ChoiceStringParameter( name="branch", label="Branch", strict=False, choices=["master", "dev"], autopopulate={ 'master': { 'build_name': 'build for master branch', }, 'dev': { 'build_name': 'build for dev branch', } } ))], properties=[ util.StringParameter( name="build_name", label="Name of the Build release.", default="")])) # this parameter will be auto populated when user chooses branch The parameter types are: .. bb:sched:: NestedParameter NestedParameter ############### .. code-block:: python NestedParameter(name="options", label="Build options", layout="vertical", fields=[...]), This parameter type is a special parameter which contains other parameters. This can be used to group a set of parameters together, and define the layout of your form. You can recursively include NestedParameter into NestedParameter, to build very complex UIs. It adds the following arguments: ``layout`` (optional, default is "vertical") The layout defines how the fields are placed in the form. The layouts implemented in the standard web application are: * ``simple``: fields are displayed one by one without alignment. They take the horizontal space that they need. * ``vertical``: all fields are displayed vertically, aligned in columns (as per the ``column`` attribute of the NestedParameter) * ``tabs``: each field gets its own `tab `_. This can be used to declare complex build forms which won't fit into one screen. The children fields are usually other NestedParameters with vertical layout. ``columns`` (optional, accepted values are 1, 2, 3, 4) The number of columns to use for a `vertical` layout. If omitted, it is set to 1 unless there are more than 3 visible child fields in which case it is set to 2. FixedParameter ############## .. code-block:: python FixedParameter(name="branch", default="trunk"), This parameter type will not be shown on the web form and always generates a property with its default value. StringParameter ############### .. code-block:: python StringParameter(name="pull_url", label="optionally give a public Git pull url:", default="", size=80) This parameter type will show a single-line text-entry box, and allow the user to enter an arbitrary string. It adds the following arguments: ``regex`` (optional) A string that will be compiled as a regex and used to validate the input of this parameter. ``size`` (optional; default is 10) The width of the input field (in characters). TextParameter ############# .. code-block:: python TextParameter(name="comments", label="comments to be displayed to the user of the built binary", default="This is a development build", cols=60, rows=5) This parameter type is similar to StringParameter, except that it is represented in the HTML form as a ``textarea``, allowing multi-line input. It adds the StringParameter arguments and the following ones: ``cols`` (optional; default is 80) The number of columns the ``textarea`` will have. ``rows`` (optional; default is 20) The number of rows the ``textarea`` will have. This class could be subclassed to have more customization, e.g. * developer could send a list of Git branches to pull from * developer could send a list of Gerrit changes to cherry-pick, * developer could send a shell script to amend the build. Beware of security issues anyway. IntParameter ############ .. code-block:: python IntParameter(name="debug_level", label="debug level (1-10)", default=2) This parameter type accepts an integer value using a text-entry box. BooleanParameter ################ .. code-block:: python BooleanParameter(name="force_build_clean", label="force a make clean", default=False) This type represents a boolean value. It will be presented as a checkbox. UserNameParameter ################# .. code-block:: python UserNameParameter(label="your name:", size=80) This parameter type accepts a username. If authentication is active, it will use the authenticated user instead of displaying a text-entry box. ``size`` (optional; default is 10) The width of the input field (in characters). ``need_email`` (optional; default is True) If true, requires a full email address rather than arbitrary text. .. bb:sched:: ChoiceStringParameter ChoiceStringParameter ##################### .. code-block:: python ChoiceStringParameter(name="branch", choices=["main","devel"], default="main") This parameter type lets the user choose between several choices (e.g. the list of branches you are supporting, or the test campaign to run). If ``multiple`` is false, then its result is a string with one of the choices. If ``multiple`` is true, then the result is a list of strings from the choices. Note that for some use cases, the choices need to be generated dynamically. This can be done via subclassing and overriding the 'getChoices' member function. An example of this is provided by the source for the :py:class:`InheritBuildParameter` class. Its arguments, in addition to the common options, are: ``choices`` The list of available choices. ``strict`` (optional; default is True) If true, verify that the user's input is from the list. Note that this only affects the validation of the form request; even if this argument is False, there is no HTML form component available to enter an arbitrary value. ``multiple`` If true, then the user may select multiple choices. Example: .. code-block:: python ChoiceStringParameter(name="forced_tests", label="smoke test campaign to run", default=default_tests, multiple=True, strict=True, choices=["test_builder1", "test_builder2", "test_builder3"]) # .. and later base the schedulers to trigger off this property: # triggers the tests depending on the property forced_test builder1.factory.addStep(Trigger(name="Trigger tests", schedulerNames=Property("forced_tests"))) Example of scheduler allowing to choose which worker to run on: .. code-block:: python worker_list = ["worker1", "worker2", "worker3"] ChoiceStringParameter(name="worker", label="worker to run the build on", default="*", multiple=False, strict=True, choices=worker_list) # .. and in nextWorker, use this property: def nextWorker(bldr, workers, buildrequest): forced_worker = buildrequest.properties.getProperty("worker", "*") if forced_worker == "*": return random.choice(workers) if workers else None for w in workers: if w.worker.workername == forced_worker: return w return None # worker not yet available c['builders'] = [ BuilderConfig(name='mybuild', factory=f, nextWorker=nextWorker, workernames=worker_list), ] .. bb:sched:: CodebaseParameter CodebaseParameter ################# .. code-block:: python CodebaseParameter(codebase="myrepo") This is a parameter group to specify a sourcestamp for a given codebase. ``codebase`` The name of the codebase. ``branch`` (optional; default is StringParameter) A :ref:`parameter ` specifying the branch to build. ``revision`` (optional; default is StringParameter) A :ref:`parameter ` specifying the revision to build. ``repository`` (optional; default is StringParameter) A :ref:`parameter ` specifying the repository for the build. ``project`` (optional; default is StringParameter) A :ref:`parameter ` specifying the project for the build. ``patch`` (optional; default is None) A :bb:sched:`PatchParameter` specifying that the user can upload a patch for this codebase. .. bb:sched:: FileParameter FileParameter ############# This parameter allows the user to upload a file to a build. The user can either write some text to a text area, or select a file from the browser. Note that the file is then stored inside a property, so a ``maxsize`` of 10 megabytes has been set. You can still override that ``maxsize`` if you wish. .. bb:sched:: PatchParameter PatchParameter ############## This parameter allows the user to specify a patch to be applied at the source step. The patch is stored within the sourcestamp, and associated to a codebase. That is why :bb:sched:`PatchParameter` must be set inside a :bb:sched:`CodebaseParameter`. :bb:sched:`PatchParameter` is actually a :bb:sched:`NestedParameter` composed of following fields: .. code-block:: python FileParameter('body'), IntParameter('level', default=1), StringParameter('author', default=""), StringParameter('comment', default=""), StringParameter('subdir', default=".") You can customize any of these fields by overwriting their field name e.g: .. code-block:: python c['schedulers'] = [ schedulers.ForceScheduler( name="force", codebases=[util.CodebaseParameter("foo", patch=util.PatchParameter( body=FileParameter('body', maxsize=10000)))], # override the maximum size # of a patch to 10k instead of 10M builderNames=["testy"])] .. bb:sched:: InheritBuildParameter InheritBuildParameter ##################### .. note:: InheritBuildParameter is not yet ported to data API, and cannot be used with buildbot nine yet (:bug:`3521`). This is a special parameter for inheriting force build properties from another build. The user is presented with a list of compatible builds from which to choose, and all forced-build parameters from the selected build are copied into the new build. The new parameter is: ``compatible_builds`` A function to find compatible builds in the build history. This function is given the master instance as first argument, and the current builder name as second argument, or None when forcing all builds. Example: .. code-block:: python @defer.inlineCallbacks def get_compatible_builds(master, builder): if builder is None: # this is the case for force_build_all return ["cannot generate build list here"] # find all successful builds in builder1 and builder2 builds = [] for builder in ["builder1", "builder2"]: # get 40 last builds for the builder build_dicts = yield master.data.get(('builders', builder, 'builds'), order=['-buildid'], limit=40) for build_dict in build_dicts: if build_dict['results'] != SUCCESS: continue builds.append(builder + "/" + str(build_dict['number'])) return builds # ... sched = Scheduler(..., properties=[ InheritBuildParameter( name="inherit", label="promote a build for merge", compatible_builds=get_compatible_builds, required = True), ]) .. bb:sched:: WorkerChoiceParameter WorkerChoiceParameter ##################### .. note:: WorkerChoiceParameter is not yet ported to data API, and cannot be used with buildbot nine yet (:bug:`3521`). This parameter allows a scheduler to require that a build is assigned to the chosen worker. The choice is assigned to the `workername` property for the build. The :py:class:`~buildbot.builder.enforceChosenWorker` functor must be assigned to the ``canStartBuild`` parameter for the ``Builder``. Example: .. code-block:: python from buildbot.plugins import util # schedulers: ForceScheduler( # ... properties=[ WorkerChoiceParameter(), ] ) # builders: BuilderConfig( # ... canStartBuild=util.enforceChosenWorker, ) AnyPropertyParameter #################### This parameter type can only be used in ``properties``, and allows the user to specify both the property name and value in the web form. This Parameter is here to reimplement old Buildbot behavior, and should be avoided. Stricter parameter names and types should be preferred. buildbot-3.4.0/master/docs/manual/configuration/services/000077500000000000000000000000001413250514000235075ustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/configuration/services/failing_buildset_canceller.rst000066400000000000000000000026411413250514000315600ustar00rootroot00000000000000.. _FailingBuildsetCanceller: FailingBuildsetCanceller ++++++++++++++++++++++++ .. py:class:: buildbot.plugins.util.FailingBuildsetCanceller The purpose of this service is to cancel builds once one build on a buildset fails. This is useful for reducing use of resources in cases when there is no need to gather information from all builds of a buildset once one of them fails. The service may be configured to be track a subset of builds. This is controlled by the ``filters`` parameter. The decision on whether to cancel a build is done once a build fails. The following parameters are supported by the :py:class:`FailingBuildsetCanceller`: ``name`` (required, a string) The name of the service. All services must have different names in Buildbot. For most use cases value like ``buildset_canceller`` will work fine. ``filters`` (required, a list of three-element tuples) The source stamp filters that specify which builds the build canceller should track. The first element of each tuple must be a list of builder names that the filter would apply to. The second element of each tuple must be a list of builder names that will have the builders cancelled once a build fails. Alternatively, the value ``None`` as the second element of the tuple specifies that all builds should be cancelled. The third element of each tuple must be an instance of :py:class:`buildbot.util.SourceStampFilter`. buildbot-3.4.0/master/docs/manual/configuration/services/index.rst000066400000000000000000000011131413250514000253440ustar00rootroot00000000000000.. bb:cfg:: services Custom Services --------------- .. toctree:: :hidden: :maxdepth: 2 failing_buildset_canceller old_build_canceller Custom services are stateful components of Buildbot that can be added to the ``services`` key of the Buildbot config dictionary. The following is the services that are meant to be used without advanced knowledge of Buildbot. * :ref:`FailingBuildsetCanceller` * :ref:`OldBuildCanceller` More complex services are described in the developer section of the Buildbot manual. They are meant to be used by advanced users of Buildbot. buildbot-3.4.0/master/docs/manual/configuration/services/old_build_canceller.rst000066400000000000000000000026531413250514000302140ustar00rootroot00000000000000.. _OldBuildCanceller: OldBuildCanceller +++++++++++++++++ .. py:class:: buildbot.plugins.util.OldBuildCanceller The purpose of this service is to cancel builds on branches as soon as a new commit is detected on the branch. This allows to reduce resource usage in projects that use Buildbot to run tests on pull request branches. For example, if a developer pushes new commits to the branch, notices and fixes a problem quickly and then pushes again, the builds that have been started on the older commit will be cancelled immediately instead of waiting for builds to finish. The service may be configured to be track a subset of builds. This is controlled by the ``filters`` parameter. The decision on whether to track a build is done on build startup. Configuration changes are ignored for builds that have already started. The following parameters are supported by the :py:class:`OldBuildCanceller`: ``name`` (required, a string) The name of the service. All services must have different names in Buildbot. For most use cases value like ``build_canceller`` will work fine. ``filters`` (required, a list of two-element tuples) The source stamp filters that specify which builds the build canceller should track. The first element of each tuple must be a list of builder names that the filter would apply to. The second element of each tuple must be an instance of :py:class:`buildbot.util.SourceStampFilter`. buildbot-3.4.0/master/docs/manual/configuration/steps/000077500000000000000000000000001413250514000230225ustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/configuration/steps/assert.rst000066400000000000000000000004241413250514000250550ustar00rootroot00000000000000.. bb:step:: Assert .. _Step-Assert: Assert ++++++ .. py:class:: buildbot.steps.master.Assert This build step takes a Renderable or constant passed in as first argument. It will test if the expression evaluates to ``True`` and succeed the step or fail the step otherwise. buildbot-3.4.0/master/docs/manual/configuration/steps/build_epydoc.rst000066400000000000000000000031551413250514000262220ustar00rootroot00000000000000.. bb:step:: BuildEPYDoc .. _Step-BuildEPYDoc: BuildEPYDoc +++++++++++ .. py:class:: buildbot.steps.python.BuildEPYDoc `epydoc `_ is a tool for generating API documentation for Python modules from their docstrings. It reads all the :file:`.py` files from your source tree, processes the docstrings therein, and creates a large tree of :file:`.html` files (or a single :file:`.pdf` file). The :bb:step:`BuildEPYDoc` step will run :command:`epydoc` to produce this API documentation, and will count the errors and warnings from its output. You must supply the command line to be used. The default is ``make epydocs``, which assumes that your project has a :file:`Makefile` with an `epydocs` target. You might wish to use something like :samp:`epydoc -o apiref source/{PKGNAME}` instead. You might also want to add option `--pdf` to generate a PDF file instead of a large tree of HTML files. The API docs are generated in-place in the build tree (under the workdir, in the subdirectory controlled by the option `-o` argument). To make them useful, you will probably have to copy them to somewhere they can be read. For example if you have server with configured nginx web server, you can place generated docs to it's public folder with command like ``rsync -ad apiref/ dev.example.com:~/usr/share/nginx/www/current-apiref/``. You might instead want to bundle them into a tarball and publish it in the same place where the generated install tarball is placed. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.BuildEPYDoc(command=["epydoc", "-o", "apiref", "source/mypkg"])) buildbot-3.4.0/master/docs/manual/configuration/steps/cmake.rst000066400000000000000000000034671413250514000246460ustar00rootroot00000000000000.. bb:step:: CMake .. _Step-CMake: CMake +++++ .. py:class:: buildbot.steps.cmake.CMake This is intended to handle the :command:`cmake` step for projects that use `CMake-based build systems `_. .. note:: Links below point to the latest CMake documentation. Make sure that you check the documentation for the CMake you use. In addition to the parameters :bb:step:`ShellCommand` supports, this step accepts the following parameters: ``path`` Either a path to a source directory to (re-)generate a build system for it in the current working directory. Or an existing build directory to re-generate its build system. ``generator`` A build system generator. See `cmake-generators(7) `_ for available options. ``definitions`` A dictionary that contains parameters that will be converted to ``-D{name}={value}`` when passed to CMake. A renderable which renders to a dictionary can also be provided, see :ref:`Properties`. Refer to `cmake(1) `_ for more information. ``options`` A list or a tuple that contains options that will be passed to CMake as is. A renderable which renders to a tuple or list can also be provided, see :ref:`Properties`. Refer to `cmake(1) `_ for more information. ``cmake`` Path to the CMake binary. Default is :command:`cmake` .. code-block:: python from buildbot.plugins import steps ... factory.addStep( steps.CMake( generator='Ninja', definitions={ 'CMAKE_BUILD_TYPE': Property('BUILD_TYPE') }, options=[ '-Wno-dev' ] ) ) ... buildbot-3.4.0/master/docs/manual/configuration/steps/common.rst000066400000000000000000000163461413250514000250560ustar00rootroot00000000000000.. index:: Buildstep Parameter .. _Buildstep-Common-Parameters: Parameters Common to all Steps ------------------------------ All :class:`BuildStep`\s accept some common parameters. Some of these control how their individual status affects the overall build. Others are used to specify which `Locks` (see :ref:`Interlocks`) should be acquired before allowing the step to run. Arguments common to all :class:`BuildStep` subclasses: ``name`` The name used to describe the step on the status display. Since 0.9.8, this argument might be renderable. .. index:: Buildstep Parameter; haltOnFailure ``haltOnFailure`` If ``True``, a ``FAILURE`` of this build step will cause the build to halt immediately. Any steps with ``alwaysRun=True`` will still be run. Generally speaking, ``haltOnFailure`` implies ``flunkOnFailure`` (the default for most :class:`BuildStep`\s). In some cases, particularly with a series of tests, it makes sense to ``haltOnFailure`` if something fails early on but not ``flunkOnFailure``. This can be achieved with ``haltOnFailure=True``, ``flunkOnFailure=False``. .. index:: Buildstep Parameter; flunkOnWarnings ``flunkOnWarnings`` When ``True``, a ``WARNINGS`` or ``FAILURE`` of this build step will mark the overall build as ``FAILURE``. The remaining steps will still be executed. .. index:: Buildstep Parameter; flunkOnFailure ``flunkOnFailure`` When ``True``, a ``FAILURE`` of this build step will mark the overall build as a ``FAILURE``. The remaining steps will still be executed. .. index:: Buildstep Parameter; warnOnWarnings ``warnOnWarnings`` When ``True``, a ``WARNINGS`` or ``FAILURE`` of this build step will mark the overall build as having ``WARNINGS``. The remaining steps will still be executed. .. index:: Buildstep Parameter; warnOnFailure ``warnOnFailure`` When ``True``, a ``FAILURE`` of this build step will mark the overall build as having ``WARNINGS``. The remaining steps will still be executed. .. index:: Buildstep Parameter; alwaysRun ``alwaysRun`` If ``True``, this build step will always be run, even if a previous buildstep with ``haltOnFailure=True`` has failed. .. index:: Buildstep Parameter; description ``description`` This will be used to describe the command (on the Waterfall display) while the command is still running. It should be a single imperfect-tense verb, like `compiling` or `testing`. The preferred form is a single, short string, but for historical reasons a list of strings is also acceptable. .. index:: Buildstep Parameter; descriptionDone ``descriptionDone`` This will be used to describe the command once it has finished. A simple noun like `compile` or `tests` should be used. Like ``description``, this may either be a string or a list of short strings. If neither ``description`` nor ``descriptionDone`` are set, the actual command arguments will be used to construct the description. This may be a bit too wide to fit comfortably on the Waterfall display. All subclasses of :py:class:`BuildStep` will contain the description attributes. Consequently, you could add a :bb:step:`ShellCommand` step like so: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand(command=["make", "test"], description="testing", descriptionDone="tests")) .. index:: Buildstep Parameter; descriptionSuffix ``descriptionSuffix`` This is an optional suffix appended to the end of the description (ie, after ``description`` and ``descriptionDone``). This can be used to distinguish between build steps that would display the same descriptions in the waterfall. This parameter may be a string, a list of short strings or ``None``. For example, a builder might use the :bb:step:`Compile` step to build two different codebases. The ``descriptionSuffix`` could be set to `projectFoo` and `projectBar`, respectively for each step, which will result in the full descriptions `compiling projectFoo` and `compiling projectBar` to be shown in the waterfall. .. index:: Buildstep Parameter; doStepIf ``doStepIf`` A step can be configured to only run under certain conditions. To do this, set the step's ``doStepIf`` to a boolean value, or to a function that returns a boolean value or Deferred. If the value or function result is false, then the step will return ``SKIPPED`` without doing anything. Otherwise, the step will be executed normally. If you set ``doStepIf`` to a function, that function should accept one parameter, which will be the :class:`BuildStep` object itself. .. index:: Buildstep Parameter; hideStepIf ``hideStepIf`` A step can be optionally hidden from the waterfall and build details web pages. To do this, set the step's ``hideStepIf`` to a boolean value, or a function that takes two parameters (the results and the :class:`BuildStep`) and returns a boolean value. Steps are always shown while they execute; however, after the step has finished, this parameter is evaluated (if it's a function), and if the value is true, the step is hidden. For example, in order to hide the step if the step has been skipped: .. code-block:: python factory.addStep(Foo(..., hideStepIf=lambda results, s: results==SKIPPED)) .. index:: Buildstep Parameter; locks ``locks`` A list of ``Locks`` (instances of :class:`buildbot.locks.WorkerLock` or :class:`buildbot.locks.MasterLock`) that should be acquired before starting this :py:class:`BuildStep`. Alternatively, this could be a renderable that returns this list during build execution. This lets you defer picking the locks to acquire until the build step is about to start running. The ``Locks`` will be released when the step is complete. Note that this is a list of actual :class:`Lock` instances, not names. Also note that all Locks must have unique names. See :ref:`Interlocks`. .. index:: Buildstep Parameter; logEncoding ``logEncoding`` The character encoding to use to decode logs produced during the execution of this step. This overrides the default :bb:cfg:`logEncoding`; see :ref:`Log-Encodings`. .. index:: Buildstep Parameter; updateBuildSummaryPolicy ``updateBuildSummaryPolicy`` The policy to use to propagate the step summary to the build summary. If False, the build summary will never include the step summary. If True, the build summary will always include the step summary. If set to a list (e.g. ``[FAILURE, EXCEPTION]``), the step summary will be propagated if the step results id is present in that list. If not set or None, the default is computed according to other BuildStep parameters using following algorithm: .. code-block:: python self.updateBuildSummaryPolicy = [EXCEPTION, RETRY, CANCELLED] if self.flunkOnFailure or self.haltOnFailure or self.warnOnFailure: self.updateBuildSummaryPolicy.append(FAILURE) if self.warnOnWarnings or self.flunkOnWarnings: self.updateBuildSummaryPolicy.append(WARNINGS) Note that in a custom step, if :py:meth:`BuildStep.getResultSummary` is overridden and sets the ``build`` summary, ``updateBuildSummaryPolicy`` is ignored and the ``build`` summary will be used regardless. buildbot-3.4.0/master/docs/manual/configuration/steps/compile.rst000066400000000000000000000123411413250514000252050ustar00rootroot00000000000000.. bb:step:: Compile .. _Step-Compile: Compile +++++++ .. index:: Properties; warnings-count This is meant to handle compiling or building a project written in C. The default command is ``make all``. When the compilation is finished, the log file is scanned for GCC warning messages, a summary log is created with any problems that were seen, and the step is marked as WARNINGS if any were discovered. Through the :class:`WarningCountingShellCommand` superclass, the number of warnings is stored in a Build Property named `warnings-count`, which is accumulated over all :bb:step:`Compile` steps (so if two warnings are found in one step, and three are found in another step, the overall build will have a `warnings-count` property of 5). Each step can be optionally given a maximum number of warnings via the maxWarnCount parameter. If this limit is exceeded, the step will be marked as a failure. The default regular expression used to detect a warning is ``'.*warning[: ].*'`` , which is fairly liberal and may cause false-positives. To use a different regexp, provide a ``warningPattern=`` argument, or use a subclass which sets the ``warningPattern`` attribute: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.Compile(command=["make", "test"], warningPattern="^Warning: ")) The ``warningPattern=`` can also be a pre-compiled Python regexp object: this makes it possible to add flags like ``re.I`` (to use case-insensitive matching). Note that the compiled ``warningPattern`` will have its :meth:`match` method called, which is subtly different from a :meth:`search`. Your regular expression must match the from the beginning of the line. This means that to look for the word "warning" in the middle of a line, you will need to prepend ``'.*'`` to your regular expression. The ``suppressionFile=`` argument can be specified as the (relative) path of a file inside the workdir defining warnings to be suppressed from the warning counting and log file. The file will be uploaded to the master from the worker before compiling, and any warning matched by a line in the suppression file will be ignored. This is useful to accept certain warnings (e.g. in some special module of the source tree or in cases where the compiler is being particularly stupid), yet still be able to easily detect and fix the introduction of new warnings. The file must contain one line per pattern of warnings to ignore. Empty lines and lines beginning with ``#`` are ignored. Other lines must consist of a regexp matching the file name, followed by a colon (``:``), followed by a regexp matching the text of the warning. Optionally this may be followed by another colon and a line number range. For example: .. code-block:: none # Sample warning suppression file mi_packrec.c : .*result of 32-bit shift implicitly converted to 64 bits.* : 560-600 DictTabInfo.cpp : .*invalid access to non-static.* kernel_types.h : .*only defines private constructors and has no friends.* : 51 If no line number range is specified, the pattern matches the whole file; if only one number is given it matches only on that line. The ``suppressionList=`` argument can be specified as a list of four-tuples as addition or instead of ``suppressionFile=``. The tuple should be ``[ FILE-RE, WARNING-RE, START, END ]``. If ``FILE-RE`` is ``None``, then the suppression applies to any file. ``START`` and ``END`` can be specified as in suppression file, or ``None``. The default warningPattern regexp only matches the warning text, so line numbers and file names are ignored. To enable line number and file name matching, provide a different regexp and provide a function (callable) as the argument of ``warningExtractor=``. The function is called with three arguments: the :class:`BuildStep` object, the line in the log file with the warning, and the ``SRE_Match`` object of the regexp search for ``warningPattern``. It should return a tuple ``(filename, linenumber, warning_test)``. For example: .. code-block:: python f.addStep(Compile(command=["make"], warningPattern="^(.\*?):([0-9]+): [Ww]arning: (.\*)$", warningExtractor=Compile.warnExtractFromRegexpGroups, suppressionFile="support-files/compiler_warnings.supp")) (``Compile.warnExtractFromRegexpGroups`` is a pre-defined function that returns the filename, linenumber, and text from groups (1,2,3) of the regexp match). In projects with source files in multiple directories, it is possible to get full path names for file names matched in the suppression file, as long as the build command outputs the names of directories as they are entered into and left again. For this, specify regexps for the arguments ``directoryEnterPattern=`` and ``directoryLeavePattern=``. The ``directoryEnterPattern=`` regexp should return the name of the directory entered into in the first matched group. The defaults, which are suitable for GNU Make, are these: .. code-block:: python directoryEnterPattern="make.*: Entering directory [\"`'](.*)['`\"]" directoryLeavePattern="make.*: Leaving directory" (TODO: this step needs to be extended to look for GCC error messages as well, and collect them into a separate logfile, along with the source code filenames involved). buildbot-3.4.0/master/docs/manual/configuration/steps/configure.rst000066400000000000000000000010371413250514000255360ustar00rootroot00000000000000.. bb:step:: Configure .. _Step-Configure: Configure +++++++++ .. py:class:: buildbot.steps.shell.Configure This is intended to handle the :command:`./configure` step from autoconf-style projects, or the ``perl Makefile.PL`` step from perl :file:`MakeMaker.pm`-style modules. The default command is :command:`./configure` but you can change this by providing a ``command=`` parameter. The arguments are identical to :bb:step:`ShellCommand`. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.Configure()) buildbot-3.4.0/master/docs/manual/configuration/steps/cppcheck.rst000066400000000000000000000021361413250514000253360ustar00rootroot00000000000000.. bb:step:: Cppcheck .. _Step-Cppcheck: Cppcheck ++++++++ This step runs ``cppcheck``, analyse its output, and set the outcome in :ref:`Properties`. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.Cppcheck(enable=['all'], inconclusive=True])) This class adds the following arguments: ``binary`` (Optional, defaults to ``cppcheck``) Use this if you need to give the full path to the cppcheck binary or if your binary is called differently. ``source`` (Optional, defaults to ``['.']``) This is the list of paths for the sources to be checked by this step. ``enable`` (Optional) Use this to give a list of the message classes that should be in cppcheck report. See the cppcheck man page for more information. ``inconclusive`` (Optional) Set this to ``True`` if you want cppcheck to also report inconclusive results. See the cppcheck man page for more information. ``extra_args`` (Optional) This is the list of extra arguments to be given to the cppcheck command. All other arguments are identical to :bb:step:`ShellCommand`. buildbot-3.4.0/master/docs/manual/configuration/steps/deb_lintian.rst000066400000000000000000000012051413250514000260220ustar00rootroot00000000000000.. bb:step:: DebLintian .. _Step-DebLintian: DebLintian ++++++++++ The :bb:step:`DebLintian` step checks a build .deb for bugs and policy violations. The packages or changes file to test is specified in ``fileloc``. .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.DebLintian(fileloc=util.Interpolate("%(prop:deb-changes)s"))) This class adds the following arguments: ``fileloc`` (Optional, string) Location of the .deb or .changes files to test. ``suppressTags`` (Optional, list of strings) List of tags to suppress. All other arguments are identical to :bb:step:`ShellCommand`. buildbot-3.4.0/master/docs/manual/configuration/steps/deb_pbuilder.rst000066400000000000000000000026541413250514000262030ustar00rootroot00000000000000.. bb:step:: DebPbuilder .. _Step-DebPbuilder: DebPbuilder +++++++++++ The :bb:step:`DebPbuilder` step builds Debian packages within a chroot built by :command:`pbuilder`. It populates the chroot with a basic system and the packages listed as build requirements. The type of the chroot to build is specified with the ``distribution``, ``distribution`` and ``mirror`` parameter. To use pbuilder, your Buildbot user must have the right to run :command:`pbuilder` as root using :command:`sudo`. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.DebPbuilder()) The step takes the following parameters ``architecture`` Architecture to build chroot for. ``distribution`` Name, or nickname, of the distribution. Defaults to 'stable'. ``basetgz`` Path of the basetgz to use for building. ``mirror`` URL of the mirror used to download the packages from. ``othermirror`` List of additional ``deb URL ...`` lines to add to ``sources.list``. ``extrapackages`` List if packages to install in addition to the base system. ``keyring`` Path to a gpg keyring to verify the downloaded packages. This is necessary if you build for a foreign distribution. ``components`` Repos to activate for chroot building. .. bb:step:: DebCowbuilder DebCowbuilder +++++++++++++ The :bb:step:`DebCowbuilder` step is a subclass of :bb:step:`DebPbuilder`, which use cowbuilder instead of pbuilder. buildbot-3.4.0/master/docs/manual/configuration/steps/file_transfer.rst000066400000000000000000000265011413250514000264030ustar00rootroot00000000000000 .. index:: File Transfer .. bb:step:: FileUpload .. bb:step:: FileDownload .. _Step-FileTransfer: Transferring Files ------------------ .. py:class:: buildbot.steps.transfer.FileUpload .. py:class:: buildbot.steps.transfer.FileDownload Most of the work involved in a build will take place on the worker. But occasionally it is useful to do some work on the buildmaster side. The most basic way to involve the buildmaster is simply to move a file from the worker to the master, or vice versa. There are a pair of steps named :bb:step:`FileUpload` and :bb:step:`FileDownload` to provide this functionality. :bb:step:`FileUpload` moves a file *up to* the master, while :bb:step:`FileDownload` moves a file *down from* the master. As an example, let's assume that there is a step which produces an HTML file within the source tree that contains some sort of generated project documentation. And let's assume that we run nginx web server on the buildmaster host for serving static files. We want to move this file to the buildmaster, into a :file:`/usr/share/nginx/www/` directory, so it can be visible to developers. This file will wind up in the worker-side working directory under the name :file:`docs/reference.html`. We want to put it into the master-side :file:`/usr/share/nginx/www/ref.html`, and add a link to the HTML status to the uploaded file. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand(command=["make", "docs"])) f.addStep(steps.FileUpload(workersrc="docs/reference.html", masterdest="/usr/share/nginx/www/ref.html", url="http://somesite/~buildbot/ref.html")) The ``masterdest=`` argument will be passed to :meth:`os.path.expanduser`, so things like ``~`` will be expanded properly. Non-absolute paths will be interpreted relative to the buildmaster's base directory. Likewise, the ``workersrc=`` argument will be expanded and interpreted relative to the builder's working directory. .. note:: The copied file will have the same permissions on the master as on the worker, look at the ``mode=`` parameter to set it differently. To move a file from the master to the worker, use the :bb:step:`FileDownload` command. For example, let's assume that some step requires a configuration file that, for whatever reason, could not be recorded in the source code repository or generated on the worker side: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.FileDownload(mastersrc="~/todays_build_config.txt", workerdest="build_config.txt")) f.addStep(steps.ShellCommand(command=["make", "config"])) Like :bb:step:`FileUpload`, the ``mastersrc=`` argument is interpreted relative to the buildmaster's base directory, and the ``workerdest=`` argument is relative to the builder's working directory. If the worker is running in :file:`~worker`, and the builder's ``builddir`` is something like :file:`tests-i386`, then the workdir is going to be :file:`~worker/tests-i386/build`, and a ``workerdest=`` of :file:`foo/bar.html` will get put in :file:`~worker/tests-i386/build/foo/bar.html`. Both of these commands will create any missing intervening directories. Other Parameters ++++++++++++++++ The ``maxsize=`` argument lets you set a maximum size for the file to be transferred. This may help to avoid surprises: transferring a 100MB coredump when you were expecting to move a 10kB status file might take an awfully long time. The ``blocksize=`` argument controls how the file is sent over the network: larger blocksizes are slightly more efficient but also consume more memory on each end, and there is a hard-coded limit of about 640kB. The ``mode=`` argument allows you to control the access permissions of the target file, traditionally expressed as an octal integer. The most common value is probably ``0o755``, which sets the `x` executable bit on the file (useful for shell scripts and the like). The default value for ``mode=`` is ``None``, which means the permission bits will default to whatever the umask of the writing process is. The default umask tends to be fairly restrictive, but at least on the worker you can make it less restrictive with a ``--umask`` command-line option at creation time (:ref:`Worker-Options`). The ``keepstamp=`` argument is a boolean that, when ``True``, forces the modified and accessed time of the destination file to match the times of the source file. When ``False`` (the default), the modified and accessed times of the destination file are set to the current time on the buildmaster. The ``url=`` argument allows you to specify an url that will be displayed in the HTML status. The title of the url will be the name of the item transferred (directory for :class:`DirectoryUpload` or file for :class:`FileUpload`). This allows the user to add a link to the uploaded item if that one is uploaded to an accessible place. For :bb:step:`FileUpload`, the ``urlText=`` argument allows you to specify the url title that will be displayed in the web UI. .. bb:step:: DirectoryUpload Transferring Directories ++++++++++++++++++++++++ .. py:class:: buildbot.steps.transfer.DirectoryUpload To transfer complete directories from the worker to the master, there is a :class:`BuildStep` named :bb:step:`DirectoryUpload`. It works like :bb:step:`FileUpload`, just for directories. However it does not support the ``maxsize``, ``blocksize`` and ``mode`` arguments. As an example, let's assume an generated project documentation, which consists of many files (like the output of :command:`doxygen` or :command:`epydoc`). And let's assume that we run nginx web server on buildmaster host for serving static files. We want to move the entire documentation to the buildmaster, into a :file:`/usr/share/nginx/www/docs` directory, and add a link to the uploaded documentation on the HTML status page. On the worker-side the directory can be found under :file:`docs`: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand(command=["make", "docs"])) f.addStep(steps.DirectoryUpload(workersrc="docs", masterdest="/usr/share/nginx/www/docs", url="~buildbot/docs")) The :bb:step:`DirectoryUpload` step will create all necessary directories and transfers empty directories, too. The ``maxsize`` and ``blocksize`` parameters are the same as for :bb:step:`FileUpload`, although note that the size of the transferred data is implementation-dependent, and probably much larger than you expect due to the encoding used (currently tar). The optional ``compress`` argument can be given as ``'gz'`` or ``'bz2'`` to compress the datastream. For :bb:step:`DirectoryUpload` the ``urlText=`` argument allows you to specify the url title that will be displayed in the web UI. .. note:: The permissions on the copied files will be the same on the master as originally on the worker, see option ``buildbot-worker create-worker --umask`` to change the default one. .. bb:step:: MultipleFileUpload Transferring Multiple Files At Once +++++++++++++++++++++++++++++++++++ .. py:class:: buildbot.steps.transfer.MultipleFileUpload In addition to the :bb:step:`FileUpload` and :bb:step:`DirectoryUpload` steps there is the :bb:step:`MultipleFileUpload` step for uploading a bunch of files (and directories) in a single :class:`BuildStep`. The step supports all arguments that are supported by :bb:step:`FileUpload` and :bb:step:`DirectoryUpload`, but instead of a the single ``workersrc`` parameter it takes a (plural) ``workersrcs`` parameter. This parameter should either be a list, something that can be rendered as a list or a string which will be converted to a list. Additionally it supports the ``glob`` parameter if this parameter is set to ``True`` all arguments in ``workersrcs`` will be parsed through ``glob`` and the results will be uploaded to ``masterdest``.: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand(command=["make", "test"])) f.addStep(steps.ShellCommand(command=["make", "docs"])) f.addStep(steps.MultipleFileUpload(workersrcs=["docs", "test-results.html"], masterdest="/usr/share/nginx/www/", url="~buildbot")) The ``url=`` parameter, can be used to specify a link to be displayed in the HTML status of the step. The way URLs are added to the step can be customized by extending the :bb:step:`MultipleFileUpload` class. The `allUploadsDone` method is called after all files have been uploaded and sets the URL. The `uploadDone` method is called once for each uploaded file and can be used to create file-specific links. .. code-block:: python import os from buildbot.plugins import steps class CustomFileUpload(steps.MultipleFileUpload): linkTypes = ('.html', '.txt') def linkFile(self, basename): name, ext = os.path.splitext(basename) return ext in self.linkTypes def uploadDone(self, result, source, masterdest): if self.url: basename = os.path.basename(source) if self.linkFile(basename): self.addURL(self.url + '/' + basename, basename) def allUploadsDone(self, result, sources, masterdest): if self.url: notLinked = [src for src in sources if not self.linkFile(src)] numFiles = len(notLinked) if numFiles: self.addURL(self.url, '... %d more' % numFiles) For :bb:step:`MultipleFileUpload` the ``urlText=`` argument allows you to specify the url title that will be displayed in the web UI. .. bb:step:: StringDownload .. bb:step:: JSONStringDownload .. bb:step:: JSONPropertiesDownload Transferring Strings -------------------- .. py:class:: buildbot.steps.transfer.StringDownload .. py:class:: buildbot.steps.transfer.JSONStringDownload .. py:class:: buildbot.steps.transfer.JSONPropertiesDownload Sometimes it is useful to transfer a calculated value from the master to the worker. Instead of having to create a temporary file and then use FileDownload, you can use one of the string download steps. .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.StringDownload(util.Interpolate("%(src::branch)s-%(prop:got_revision)s\n"), workerdest="buildid.txt")) :bb:step:`StringDownload` works just like :bb:step:`FileDownload` except it takes a single argument, ``s``, representing the string to download instead of a ``mastersrc`` argument. .. code-block:: python from buildbot.plugins import steps buildinfo = { 'branch': Property('branch'), 'got_revision': Property('got_revision') } f.addStep(steps.JSONStringDownload(buildinfo, workerdest="buildinfo.json")) :bb:step:`JSONStringDownload` is similar, except it takes an ``o`` argument, which must be JSON serializable, and transfers that as a JSON-encoded string to the worker. .. index:: Properties; JSONPropertiesDownload .. code-block:: python from buildbot.plugins import steps f.addStep(steps.JSONPropertiesDownload(workerdest="build-properties.json")) :bb:step:`JSONPropertiesDownload` transfers a json-encoded string that represents a dictionary where properties maps to a dictionary of build property ``name`` to property ``value``; and ``sourcestamp`` represents the build's sourcestamp. buildbot-3.4.0/master/docs/manual/configuration/steps/git_diffinfo.rst000066400000000000000000000076201413250514000262100ustar00rootroot00000000000000.. bb:step:: GitDiffInfo .. _Step-GitDiffInfo: GitDiffInfo +++++++++++ The `GitDiffInfo` step gathers information about differences between the current revision and the last common ancestor of this revision and another commit or branch. This information is useful for various reporters to be able to identify new warnings that appear in newly modified code. The diff information is stored as a custom json as transient build data via ``setBuildData`` function. Currently only git repositories are supported. The class inherits the arguments accepted by ``ShellMixin`` except ``command``. Additionally, it accepts the following arguments: ``compareToRef`` (Optional, string, defaults to ``master``) The commit or branch identifying the revision to get the last common ancestor to. In most cases, this will be the target branch of a pull or merge request. ``dataName`` (Optional, string, defaults to ``diffinfo-master``) The name of the build data to save the diff json to. Build data specification ------------------------ This section documents the format of the data produced by the ``GitDiffInfo`` step and put into build data. Any future steps performing the same operation on different version control systems should produce data in the same format. Likewise, all consumers should expect the input data to be in the format as documented here. Conceptually, the diffinfo data is a list of file changes, each of which itself contain a list of diff hunks within that file. This data is stored as a JSON document. The root element is a list of objects, each of which represent a file where changes have been detected. Each of these **file** objects has the following keys: - ``source_file`` - a string representing path to the source file. This does not include any prefixes such as ``a/``. When there is no source file, e.g. when a new file is created, ``/dev/null`` is used. - ``target_file`` - a string representing path to the target file. This does not include any prefixes such as ``b/``. When there is no target file, e.g. when a file has been deleted, ``/dev/null`` is used. - ``is_binary`` - a boolean specifying whether this is a binary file or not. Changes in binary files are not interpreted as hunks. - ``is_rename`` - a boolean specifying whether this file has been renamed - ``hunks`` - a list of objects (described below) specifying individual changes within the file. Each of the **hunk** objects has the following keys: - ``ss`` - an integer specifying the start line of the diff hunk in the source file - ``sl`` - an integer specifying the length of the hunk in the source file as a number of lines - ``ts`` - an integer specifying the start line of the diff hunk in the target file - ``tl`` - an integer specifying the length of the hunk in the target file as a number lines Example of produced build data ------------------------------ The following shows build data that is produced for a deleted file, a changed file and a new file. .. code-block:: python [ { "source_file": "file1", "target_file": "/dev/null", "is_binary": false, "is_rename": false, "hunks": [ { "ss": 1, "sl": 3, "ts": 0, "tl": 0 } ] }, { "source_file": "file2", "target_file": "file2", "is_binary": false, "is_rename": false, "hunks": [ { "ss": 4, "sl": 0, "ts": 5, "tl": 3 }, { "ss": 15, "sl": 0, "ts": 19, "tl": 3 } ] }, { "source_file": "/dev/null", "target_file": "file3", "is_binary": false, "is_rename": false, "hunks": [ { "ss": 0, "sl": 0, "ts": 1, "tl": 3 } ] } ] buildbot-3.4.0/master/docs/manual/configuration/steps/gitcommit.rst000066400000000000000000000033411413250514000255510ustar00rootroot00000000000000.. bb:step:: GitCommit .. _Step-GitCommit: GitCommit +++++++++ .. py:class:: buildbot.steps.source.git.GitCommit The :bb:step:`GitCommit` build step adds files and commits modifications in your local `Git `_ repository. The GitCommit step takes the following arguments: ``workdir`` (required) The path to the local repository to push commits from. ``messages`` (required) List of message that will be created with the commit. Correspond to the ``-m`` flag of the ``git commit`` command. ``paths`` (required) List of path that will be added to the commit. ``logEnviron`` (optional) If this option is true (the default), then the step's logfile will describe the environment variables on the worker. In situations where the environment is not relevant and is long, it may be easier to set ``logEnviron=False``. ``env`` (optional) A dictionary of environment strings which will be added to the child command's environment. The usual property interpolations can be used in environment variable names and values - see :ref:`Properties`. ``timeout`` (optional) Specifies the timeout for worker-side operations, in seconds. If your repositories are particularly large, then you may need to increase this value from its default of 1200 (20 minutes). ``config`` (optional) A dict of git configuration settings to pass to the remote git commands. ``emptyCommits`` (optional) One of the values ``disallow`` (default), ``create-empty-commit``, and ``ignore``. Decides the behavior when there is nothing to be committed. The value ``disallow`` will make the buildstep fail. The value ``create-empty-commit`` will create an empty commit. The value ``ignore`` will create no commit. buildbot-3.4.0/master/docs/manual/configuration/steps/gitpush.rst000066400000000000000000000051011413250514000252340ustar00rootroot00000000000000.. bb:step:: GitPush .. _Step-GitPush: GitPush +++++++ .. py:class:: buildbot.steps.source.git.GitPush The :bb:step:`GitPush` build step pushes new commits to a `Git `_ repository. The GitPush step takes the following arguments: ``workdir`` (required) The path to the local repository to push commits from. ``repourl`` (required) The URL of the upstream Git repository. ``branch`` (required) The branch to push. The branch should already exist on the local repository. ``force`` (optional) If ``True``, forces overwrite of refs on the remote repository. Corresponds to the ``--force`` flag of the ``git push`` command. ``logEnviron`` (optional) If this option is true (the default), then the step's logfile will describe the environment variables on the worker. In situations where the environment is not relevant and is long, it may be easier to set ``logEnviron=False``. ``env`` (optional) A dictionary of environment strings which will be added to the child command's environment. The usual property interpolations can be used in environment variable names and values - see :ref:`Properties`. ``timeout`` (optional) Specifies the timeout for worker-side operations, in seconds. If your repositories are particularly large, then you may need to increase this value from its default of 1200 (20 minutes). ``config`` (optional) A dict of git configuration settings to pass to the remote git commands. ``sshPrivateKey`` (optional) The private key to use when running git for fetch operations. The ssh utility must be in the system path in order to use this option. On Windows only git distribution that embeds MINGW has been tested (as of July 2017 the official distribution is MINGW-based). The worker must either have the host in the known hosts file or the host key must be specified via the ``sshHostKey`` option. ``sshHostKey`` (optional) Specifies public host key to match when authenticating with SSH public key authentication. This may be either a :ref:`Secret` or just a string. ``sshPrivateKey`` must be specified in order to use this option. The host key must be in the form of `` ``, e.g. ``ssh-rsa AAAAB3N<...>FAaQ==``. ``sshKnownHosts`` (optional) Specifies the contents of the SSH known_hosts file to match when authenticating with SSH public key authentication. This may be either a :ref:`Secret` or just a string. `sshPrivateKey` must be specified in order to use this option. `sshHostKey` must not be specified in order to use this option. buildbot-3.4.0/master/docs/manual/configuration/steps/gittag.rst000066400000000000000000000031371413250514000250370ustar00rootroot00000000000000.. bb:step:: GitTag .. _Step-GitTag: GitTag ++++++ .. py:class:: buildbot.steps.source.git.GitTag The :bb:step:`GitTag` build step creates a tag in your local `Git `_ repository. The GitTag step takes the following arguments: ``workdir`` (required) The path to the local repository to push commits from. ``tagName`` (required) The name of the tag. ``annotated`` (optional) If ``True``, create an annotated tag. ``messages`` (optional) List of message that will be created with the annotated tag. Must be set only if annotated parameter is ``True``. Correspond to the ``-m`` flag of the ``git tag`` command. ``force`` (optional) If ``True``, forces overwrite of tags on the local repository. Corresponds to the ``--force`` flag of the ``git tag`` command. ``logEnviron`` (optional) If this option is true (the default), then the step's logfile will describe the environment variables on the worker. In situations where the environment is not relevant and is long, it may be easier to set ``logEnviron=False``. ``env`` (optional) A dictionary of environment strings which will be added to the child command's environment. The usual property interpolations can be used in environment variable names and values - see :ref:`Properties`. ``timeout`` (optional) Specifies the timeout for worker-side operations, in seconds. If your repositories are particularly large, then you may need to increase this value from its default of 1200 (20 minutes). ``config`` (optional) A dict of git configuration settings to pass to the remote git commands. buildbot-3.4.0/master/docs/manual/configuration/steps/hlint.rst000066400000000000000000000011751413250514000246760ustar00rootroot00000000000000.. bb:step:: HLint .. _Step-HLint: HLint +++++ The :bb:step:`HLint` step runs Twisted Lore, a lint-like checker over a set of ``.xhtml`` files. Any deviations from recommended style is flagged and put in the output log. The step looks at the list of changes in the build to determine which files to check - it does not check all files. It specifically excludes any ``.xhtml`` files in the top-level ``sandbox/`` directory. The step takes a single, optional, parameter: ``python``. This specifies the Python executable to use to run Lore. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.HLint()) buildbot-3.4.0/master/docs/manual/configuration/steps/http_step.rst000066400000000000000000000047441413250514000255770ustar00rootroot00000000000000.. index:: HTTP Requests .. bb:step:: HTTPStep .. bb:step:: POST .. bb:step:: GET .. bb:step:: PUT .. bb:step:: DELETE .. bb:step:: HEAD .. bb:step:: OPTIONS .. _Step-HTTPStep: HTTP Requests +++++++++++++ Using the :bb:step:`HTTPStep` step, it is possible to perform HTTP requests in order to trigger another REST service about the progress of the build. .. note:: This step requires the `txrequests `_ and `requests `_ Python libraries. The parameters are the following: ``url`` (mandatory) The URL where to send the request ``method`` The HTTP method to use (out of ``POST``, ``GET``, ``PUT``, ``DELETE``, ``HEAD`` or ``OPTIONS``), default to ``POST``. ``params`` Dictionary of URL parameters to append to the URL. ``data`` The body to attach the request. If a dictionary is provided, form-encoding will take place. ``headers`` Dictionary of headers to send. ``hide_request_headers`` Iterable of request headers to be hidden from the log. The header will be listed in the log but the value will be shown as ````. ``hide_response_headers`` Iterable of response headers to be hidden from the log. The header will be listed in the log but the value will be shown as ````. ``other params`` Any other keywords supported by the ``requests`` `api `_ can be passed to this step. .. note:: The entire Buildbot master process shares a single Requests ``Session`` object. This has the advantage of supporting connection re-use and other HTTP/1.1 features. However, it also means that any cookies or other state changed by one step will be visible to other steps, causing unexpected results. This behavior may change in future versions. When the method is known in advance, class with the name of the method can also be used. In this case, it is not necessary to specify the method. Example: .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.POST('http://myRESTService.example.com/builds', data = { 'builder': util.Property('buildername'), 'buildnumber': util.Property('buildnumber'), 'workername': util.Property('workername'), 'revision': util.Property('got_revision') })) buildbot-3.4.0/master/docs/manual/configuration/steps/index.rst000066400000000000000000000120331413250514000246620ustar00rootroot00000000000000.. _Build-Steps: Build Steps =========== .. toctree:: :hidden: :maxdepth: 2 common source_common source_bzr source_cvs source_darcs source_gerrit source_github source_gitlab source_git source_mercurial source_monotone source_p4 source_repo source_svn gitcommit gittag gitpush git_diffinfo shell_command shell_sequence compile configure cmake visual_cxx cppcheck robocopy test treesize perl_module_test subunit_shell_command hlint maxq trigger build_epydoc pyflakes sphinx pylint trial remove_pycs http_step worker_filesystem file_transfer master_shell_command log_renderable assert set_property set_properties set_property_from_command set_properties_from_env rpm_build rpm_lint mock_build_srpm mock_rebuild deb_pbuilder deb_lintian :class:`BuildStep`\s are usually specified in the buildmaster's configuration file, in a list that given to a :class:`BuildFactory`. The :class:`BuildStep` instances in this list are used as templates to construct new independent copies for each build (so that state can be kept on the :class:`BuildStep` in one build without affecting a later build). Each :class:`BuildFactory` can be created with a list of steps, or the factory can be created empty and then steps added to it using the :meth:`addStep` method: .. code-block:: python from buildbot.plugins import util, steps f = util.BuildFactory() f.addSteps([ steps.SVN(repourl="http://svn.example.org/Trunk/"), steps.ShellCommand(command=["make", "all"]), steps.ShellCommand(command=["make", "test"]) ]) The basic behavior for a :class:`BuildStep` is to: * run for a while, then stop * possibly invoke some RemoteCommands on the attached worker * possibly produce a set of log files * finish with a status described by one of four values defined in :mod:`buildbot.process.results`: ``SUCCESS``, ``WARNINGS``, ``FAILURE``, ``SKIPPED`` * provide a list of short strings to describe the step The rest of this section describes all the standard :class:`BuildStep` objects available for use in a :class:`Build`, and the parameters that can be used to control each. A full list of build steps is available in the :bb:index:`step`. .. contents:: :depth: 2 :local: Build steps ----------- The following build steps are available: * :ref:`Buildstep-Common-Parameters` * **Source checkout steps** - used to checkout the source code * :ref:`Step-Source-Common` * :ref:`Step-Bzr` * :ref:`Step-CVS` * :ref:`Step-Darcs` * :ref:`Step-Git` * :ref:`Step-Gerrit` * :ref:`Step-GitHub` * :ref:`Step-GitLab` * :ref:`Step-Mercurial` * :ref:`Step-Monotone` * :ref:`Step-P4` * :ref:`Step-Repo` * :ref:`Step-SVN` * **Other source-related steps** - used to perform non-checkout source operations * :ref:`Step-GitCommit` * :ref:`Step-GitTag` * :ref:`Step-GitPush` * :ref:`Step-GitDiffInfo` * **ShellCommand steps** - used to perform various shell-based operations * :ref:`Step-ShellCommand` * :ref:`Step-ShellSequence` * :ref:`Step-Compile` * :ref:`Step-Configure` * :ref:`Step-CMake` * :ref:`Step-VisualCxx` (``VC<...>``, ``VS<...>``, ``VCExpress9``, ``MsBuild<...``) * :ref:`Step-Cppcheck` * :ref:`Step-Robocopy` * :ref:`Step-Test` * :ref:`Step-TreeSize` * :ref:`Step-PerlModuleTest` * :ref:`Step-SubunitShellCommand` * :ref:`Step-HLint` * :ref:`Step-MaxQ` * :ref:`Step-Trigger` - triggering other builds * **Python build steps** - used to perform Python-related build operations * :ref:`Step-BuildEPYDoc` * :ref:`Step-PyFlakes` * :ref:`Step-Sphinx` * :ref:`Step-PyLint` * :ref:`Step-Trial` * **Debian build steps** - used to build ``deb`` packages * :ref:`Step-DebPbuilder`, DebCowBuilder * :ref:`Step-DebLintian` * **RPM build steps** - used to build ``rpm`` packages * :ref:`Step-RpmBuild` * :ref:`Step-RpmLint` * :ref:`Step-MockBuildSRPM` * :ref:`Step-MockRebuild` * :ref:`Step-FileTransfer` - used to perform file transfer operations * FileUpload * FileDownload * DirectoryUpload * MultipleFileUpload * StringDownload * JSONStringDownload * JSONPropertiesDownload * :ref:`Step-HTTPStep` - used to perform HTTP requests * HTTPStep * POST * GET * PUT * DELETE * HEAD * OPTIONS * :ref:`Worker-Filesystem-Steps` - used to perform filesystem operations on the worker * FileExists * CopyDirectory * RemoveDirectory * MakeDirectory * **Master steps** - used to perform operations on the build master * :ref:`Step-MasterShellCommand` * :ref:`Step-SetProperty` * :ref:`Step-SetProperties` * :ref:`Step-SetPropertyFromCommand` * :ref:`Step-SetPropertiesFromEnv` * :ref:`Step-LogRenderable` - used to log a renderable property for debugging * :ref:`Step-Assert` - used to terminate build depending on condition buildbot-3.4.0/master/docs/manual/configuration/steps/log_renderable.rst000066400000000000000000000004351413250514000265220ustar00rootroot00000000000000.. bb:step:: LogRenderable .. _Step-LogRenderable: LogRenderable +++++++++++++ .. py:class:: buildbot.steps.master.LogRenderable This build step takes content which can be renderable and logs it in a pretty-printed format. It can be useful for debugging properties during a build. buildbot-3.4.0/master/docs/manual/configuration/steps/master_shell_command.rst000066400000000000000000000043631413250514000277420ustar00rootroot00000000000000.. bb:step:: MasterShellCommand .. _Step-MasterShellCommand: MasterShellCommand ++++++++++++++++++ .. py:class:: buildbot.steps.master.MasterShellCommand Occasionally, it is useful to execute some task on the master, for example to create a directory, deploy a build result, or trigger some other centralized processing. This is possible, in a limited fashion, with the :bb:step:`MasterShellCommand` step. This step operates similarly to a regular :bb:step:`ShellCommand`, but executes on the master, instead of the worker. To be clear, the enclosing :class:`Build` object must still have a worker object, just as for any other step -- only, in this step, the worker does not do anything. In the following example, the step renames a tarball based on the day of the week. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.FileUpload(workersrc="widgetsoft.tar.gz", masterdest="/var/buildoutputs/widgetsoft-new.tar.gz")) f.addStep(steps.MasterShellCommand( command="mv widgetsoft-new.tar.gz widgetsoft-`date +%a`.tar.gz", workdir="/var/buildoutputs")) .. note:: By default, this step passes a copy of the buildmaster's environment variables to the subprocess. To pass an explicit environment instead, add an ``env={..}`` argument. Environment variables constructed using the ``env`` argument support expansion so that if you just want to prepend :file:`/home/buildbot/bin` to the :envvar:`PATH` environment variable, you can do it by putting the value ``${PATH}`` at the end of the value like in the example below. Variables that don't exist on the master will be replaced by ``""``. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.MasterShellCommand( command=["make", "www"], env={'PATH': ["/home/buildbot/bin", "${PATH}"]})) Note that environment values must be strings (or lists that are turned into strings). In particular, numeric properties such as ``buildnumber`` must be substituted using :ref:`Interpolate`. ``workdir`` (optional) The directory from which the command will be run. ``interruptSignal`` (optional) Signal to use to end the process if the step is interrupted. buildbot-3.4.0/master/docs/manual/configuration/steps/maxq.rst000066400000000000000000000007371413250514000245310ustar00rootroot00000000000000.. bb:step:: MaxQ .. _Step-MaxQ: MaxQ ++++ MaxQ (http://maxq.tigris.org/) is a web testing tool that allows you to record HTTP sessions and play them back. The :bb:step:`MaxQ` step runs this framework. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.MaxQ(testdir='tests/')) The single argument, ``testdir``, specifies where the tests should be run. This directory will be passed to the ``run_maxq.py`` command, and the results analyzed. buildbot-3.4.0/master/docs/manual/configuration/steps/mock_build_srpm.rst000066400000000000000000000022511413250514000267250ustar00rootroot00000000000000.. bb:step:: MockBuildSRPM .. _Step-MockBuildSRPM: MockBuildSRPM Step ++++++++++++++++++ The :bb:step:`MockBuildSRPM` step builds a SourceRPM based on a spec file and optionally a source directory: Mock (http://fedoraproject.org/wiki/Projects/Mock) creates chroots and builds packages in them. It populates the changeroot with a basic system and the packages listed as build requirement. The type of chroot to build is specified with the ``root`` parameter. To use mock your Buildbot user must be added to the ``mock`` group. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.MockBuildSRPM(root='default', spec='mypkg.spec')) The step takes the following parameters ``root`` Use chroot configuration defined in ``/etc/mock/.cfg``. ``resultdir`` The directory where the logfiles and the SourceRPM are written to. ``spec`` Build the SourceRPM from this spec file. ``sources`` Path to the directory containing the sources, defaulting to ``.``. .. note:: It is necessary to pass the ``resultdir`` parameter to let the master watch for (and display) changes to :file:`build.log`, :file:`root.log`, and :file:`state.log`. buildbot-3.4.0/master/docs/manual/configuration/steps/mock_rebuild.rst000066400000000000000000000020311413250514000262070ustar00rootroot00000000000000.. bb:step:: MockRebuild .. _Step-MockRebuild: MockRebuild +++++++++++ The :bb:step:`MockRebuild` step rebuilds a SourceRPM package: Mock (http://fedoraproject.org/wiki/Projects/Mock) creates chroots and builds packages in them. It populates the changeroot with a basic system and the packages listed as build requirement. The type of chroot to build is specified with the ``root`` parameter. To use mock your Buildbot user must be added to the ``mock`` group. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.MockRebuild(root='default', srpm='mypkg-1.0-1.src.rpm')) The step takes the following parameters ``root`` Uses chroot configuration defined in ``/etc/mock/.cfg``. ``resultdir`` The directory where the logfiles and the SourceRPM are written to. ``srpm`` The path to the SourceRPM to rebuild. .. note:: It is necessary to pass the ``resultdir`` parameter to let the master watch for (and display) changes to :file:`build.log`, :file:`root.log`, and :file:`state.log`. buildbot-3.4.0/master/docs/manual/configuration/steps/perl_module_test.rst000066400000000000000000000010721413250514000271220ustar00rootroot00000000000000.. bb:step:: PerlModuleTest .. _Step-PerlModuleTest: PerlModuleTest ++++++++++++++ .. code-block:: python from buildbot.plugins import steps f.addStep(steps.PerlModuleTest()) This is a simple command that knows how to run tests of perl modules. It parses the output to determine the number of tests passed and failed and total number executed, saving the results for later query. The command is ``prove --lib lib -r t``, although this can be overridden with the ``command`` argument. All other arguments are identical to those for :bb:step:`ShellCommand`. buildbot-3.4.0/master/docs/manual/configuration/steps/pyflakes.rst000066400000000000000000000016711413250514000253770ustar00rootroot00000000000000.. bb:step:: PyFlakes .. _Step-PyFlakes: PyFlakes ++++++++ .. py:class:: buildbot.steps.python.PyFlakes `PyFlakes `_ is a tool to perform basic static analysis of Python code to look for simple errors, like missing imports and references of undefined names. It is like a fast and simple form of the C :command:`lint` program. Other tools (like `pychecker `_\) provide more detailed results but take longer to run. The :bb:step:`PyFlakes` step will run pyflakes and count the various kinds of errors and warnings it detects. You must supply the command line to be used. The default is ``make pyflakes``, which assumes you have a top-level :file:`Makefile` with a ``pyflakes`` target. You might want to use something like ``pyflakes .`` or ``pyflakes src``. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.PyFlakes(command=["pyflakes", "src"])) buildbot-3.4.0/master/docs/manual/configuration/steps/pylint.rst000066400000000000000000000007571413250514000251040ustar00rootroot00000000000000.. bb:step:: PyLint .. _Step-PyLint: PyLint ++++++ Similarly, the :bb:step:`PyLint` step will run :command:`pylint` and analyze the results. You must supply the command line to be used. There is no default. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.PyLint(command=["pylint", "src"])) This step takes the following arguments: ``store_results`` (Optional, defaults to ``True``) If ``True``, the test results will be stored in the test database. buildbot-3.4.0/master/docs/manual/configuration/steps/remove_pycs.rst000066400000000000000000000010541413250514000261070ustar00rootroot00000000000000.. bb:step:: RemovePYCs .. _Step-RemovePYCs: RemovePYCs ++++++++++ .. py:class:: buildbot.steps.python_twisted.RemovePYCs This is a simple built-in step that will remove ``.pyc`` files from the workdir. This is useful in builds that update their source (and thus do not automatically delete ``.pyc`` files) but where some part of the build process is dynamically searching for Python modules. Notably, trial has a bad habit of finding old test modules. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.RemovePYCs()) buildbot-3.4.0/master/docs/manual/configuration/steps/robocopy.rst000066400000000000000000000040561413250514000254150ustar00rootroot00000000000000.. bb:step:: Robocopy .. _Step-Robocopy: Robocopy ++++++++ .. py:class:: buildbot.steps.mswin.Robocopy This step runs ``robocopy`` on Windows. `Robocopy `_ is available in versions of Windows starting with Windows Vista and Windows Server 2008. For previous versions of Windows, it's available as part of the `Windows Server 2003 Resource Kit Tools `_. .. code-block:: python from buildbot.plugins import steps, util f.addStep( steps.Robocopy( name='deploy_binaries', description='Deploying binaries...', descriptionDone='Deployed binaries.', source=util.Interpolate('Build\\Bin\\%(prop:configuration)s'), destination=util.Interpolate('%(prop:deploy_dir)\\Bin\\%(prop:configuration)s'), mirror=True ) ) Available constructor arguments are: ``source`` The path to the source directory (mandatory). ``destination`` The path to the destination directory (mandatory). ``files`` An array of file names or patterns to copy. ``recursive`` Copy files and directories recursively (``/E`` parameter). ``mirror`` Mirror the source directory in the destination directory, including removing files that don't exist anymore (``/MIR`` parameter). ``move`` Delete the source directory after the copy is complete (``/MOVE`` parameter). ``exclude_files`` An array of file names or patterns to exclude from the copy (``/XF`` parameter). ``exclude_dirs`` An array of directory names or patterns to exclude from the copy (``/XD`` parameter). ``custom_opts`` An array of custom parameters to pass directly to the ``robocopy`` command. ``verbose`` Whether to output verbose information (``/V /TS /FP`` parameters). Note that parameters ``/TEE /NP`` will always be appended to the command to signify, respectively, to output logging to the console, use Unicode logging, and not print any percentage progress information for each file. buildbot-3.4.0/master/docs/manual/configuration/steps/rpm_build.rst000066400000000000000000000022011413250514000255240ustar00rootroot00000000000000.. bb:step:: RpmBuild .. _Step-RpmBuild: RpmBuild ++++++++ The :bb:step:`RpmBuild` step builds RPMs based on a spec file: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.RpmBuild(specfile="proj.spec", dist='.el5')) The step takes the following parameters ``specfile`` The ``.spec`` file to build from ``topdir`` Definition for ``_topdir``, defaulting to the workdir. ``builddir`` Definition for ``_builddir``, defaulting to the workdir. ``rpmdir`` Definition for ``_rpmdir``, defaulting to the workdir. ``sourcedir`` Definition for ``_sourcedir``, defaulting to the workdir. ``srcrpmdir`` Definition for ``_srcrpmdir``, defaulting to the workdir. ``dist`` Distribution to build, used as the definition for ``_dist``. ``define`` A dictionary of additional definitions to declare. ``autoRelease`` If true, use the auto-release mechanics. ``vcsRevision`` If true, use the version-control revision mechanics. This uses the ``got_revision`` property to determine the revision and define ``_revision``. Note that this will not work with multi-codebase builds. buildbot-3.4.0/master/docs/manual/configuration/steps/rpm_lint.rst000066400000000000000000000010201413250514000253710ustar00rootroot00000000000000.. bb:step:: RpmLint .. _Step-RpmLint: RpmLint +++++++ The :bb:step:`RpmLint` step checks for common problems in RPM packages or spec files: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.RpmLint()) The step takes the following parameters ``fileloc`` The file or directory to check. In case of a directory, it is recursively searched for RPMs and spec files to check. ``config`` Path to a rpmlint config file. This is passed as the user configuration file if present. buildbot-3.4.0/master/docs/manual/configuration/steps/set_properties.rst000066400000000000000000000037551413250514000266350ustar00rootroot00000000000000.. bb:step:: SetProperties .. _Step-SetProperties: SetProperties +++++++++++++ .. py:class:: buildbot.steps.master.SetProperties :bb:step:`SetProperties` takes a dictionary to be turned into build properties. It is similar to :bb:step:`SetProperty`, and meant to be used with a :ref:`renderer` function or a dictionary of :ref:`Interpolate` objects which allows the value to be built from other property values: .. code-block:: python """Example borrowed from Julia's master.cfg https://github.com/staticfloat/julia-buildbot (MIT)""" from buildbot.plugins import * @util.renderer def compute_artifact_filename(props): # Get the output of the `make print-BINARYDIST_FILENAME` step reported_filename = props.getProperty('artifact_filename') # First, see if we got a BINARYDIST_FILENAME output if reported_filename[:26] == "BINARYDIST_FILENAME=": local_filename = util.Interpolate(reported_filename[26:].strip() + "%(prop:os_pkg_ext)s") else: # If not, use non-sf/consistent_distnames naming if is_mac(props): template = \ "path/to/Julia-%(prop:version)s-%(prop:shortcommit)s.%(prop:os_pkg_ext)s" elif is_winnt(props): template = \ "julia-%(prop:version)s-%(prop:tar_arch)s.%(prop:os_pkg_ext)s" else: template = \ "julia-%(prop:shortcommit)s-Linux-%(prop:tar_arch)s.%(prop:os_pkg_ext)s" local_filename = util.Interpolate(template) # upload_filename always follows sf/consistent_distname rules upload_filename = util.Interpolate( "julia-%(prop:shortcommit)s-%(prop:os_name)s%(prop:bits)s.%(prop:os_pkg_ext)s") return { "local_filename": local_filename "upload_filename": upload_filename } f1.addStep(steps.SetProperties(properties=compute_artifact_filename)) buildbot-3.4.0/master/docs/manual/configuration/steps/set_properties_from_env.rst000066400000000000000000000026771413250514000305320ustar00rootroot00000000000000.. bb:step:: SetPropertiesFromEnv .. py:class:: buildbot.steps.worker.SetPropertiesFromEnv .. _Step-SetPropertiesFromEnv: SetPropertiesFromEnv ++++++++++++++++++++ Buildbot workers (later than version 0.8.3) provide their environment variables to the master on connect. These can be copied into Buildbot properties with the :bb:step:`SetPropertiesFromEnv` step. Pass a variable or list of variables in the ``variables`` parameter, then simply use the values as properties in a later step. Note that on Windows, environment variables are case-insensitive, but Buildbot property names are case sensitive. The property will have exactly the variable name you specify, even if the underlying environment variable is capitalized differently. If, for example, you use ``variables=['Tmp']``, the result will be a property named ``Tmp``, even though the environment variable is displayed as :envvar:`TMP` in the Windows GUI. .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.SetPropertiesFromEnv(variables=["SOME_JAVA_LIB_HOME", "JAVAC"])) f.addStep(steps.Compile(commands=[util.Interpolate("%(prop:JAVAC)s"), "-cp", util.Interpolate("%(prop:SOME_JAVA_LIB_HOME)s")])) Note that this step requires that the worker be at least version 0.8.3. For previous versions, no environment variables are available (the worker environment will appear to be empty). buildbot-3.4.0/master/docs/manual/configuration/steps/set_property.rst000066400000000000000000000012421413250514000263120ustar00rootroot00000000000000.. bb:step:: SetProperty .. _Step-SetProperty: SetProperty +++++++++++ .. py:class:: buildbot.steps.master.SetProperty :bb:step:`SetProperty` takes two arguments of ``property`` and ``value`` where the ``value`` is to be assigned to the ``property`` key. It is usually called with the ``value`` argument being specified as an :ref:`Interpolate` object which allows the value to be built from other property values: .. code-block:: python from buildbot.plugins import steps, util f.addStep( steps.SetProperty( property="SomeProperty", value=util.Interpolate("sch=%(prop:scheduler)s, worker=%(prop:workername)s") ) ) buildbot-3.4.0/master/docs/manual/configuration/steps/set_property_from_command.rst000066400000000000000000000050201413250514000310310ustar00rootroot00000000000000.. bb:step:: SetPropertyFromCommand .. _Step-SetPropertyFromCommand: SetPropertyFromCommand ++++++++++++++++++++++ .. py:class:: buildbot.steps.shell.SetPropertyFromCommand .. note:: This step is being migrated to :ref:`new-style`. A new-style equivalent is provided as ``SetPropertyFromCommand``. This should be inherited by any custom steps until :ref:`Buildbot 3.0 is released<3.0_Upgrading>`. Regular uses without inheritance are not affected. This buildstep is similar to :bb:step:`ShellCommand`, except that it captures the output of the command into a property. It is usually used like this: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.SetPropertyFromCommand(command="uname -a", property="uname")) This runs ``uname -a`` and captures its stdout, stripped of leading and trailing whitespace, in the property ``uname``. To avoid stripping, add ``strip=False``. The ``property`` argument can be specified as an :ref:`Interpolate` object, allowing the property name to be built from other property values. Passing ``includeStdout=False`` (defaults to ``True``) stops capture from stdout. Passing ``includeStderr=True`` (defaults to ``False``) allows capture from stderr. The more advanced usage allows you to specify a function to extract properties from the command output. Here you can use regular expressions, string interpolation, or whatever you would like. In this form, :func:`extract_fn` should be passed, and not :class:`Property`. The :func:`extract_fn` function is called with three arguments: the exit status of the command, its standard output as a string, and its standard error as a string. It should return a dictionary containing all new properties. Note that passing in :func:`extract_fn` will set ``includeStderr`` to ``True``. .. code-block:: python def glob2list(rc, stdout, stderr): jpgs = [l.strip() for l in stdout.split('\n')] return {'jpgs': jpgs} f.addStep(SetPropertyFromCommand(command="ls -1 *.jpg", extract_fn=glob2list)) Note that any ordering relationship of the contents of stdout and stderr is lost. For example, given: .. code-block:: python f.addStep(SetPropertyFromCommand( command="echo output1; echo error >&2; echo output2", extract_fn=my_extract)) Then ``my_extract`` will see ``stdout="output1\noutput2\n"`` and ``stderr="error\n"``. Avoid using the ``extract_fn`` form of this step with commands that produce a great deal of output, as the output is buffered in memory until complete. buildbot-3.4.0/master/docs/manual/configuration/steps/shell_command.rst000066400000000000000000000241141413250514000263630ustar00rootroot00000000000000.. bb:step:: ShellCommand .. _Step-ShellCommand: ShellCommand ------------ Most interesting steps involve executing a process of some sort on the worker. The :bb:step:`ShellCommand` class handles this activity. Several subclasses of :bb:step:`ShellCommand` are provided as starting points for common build steps. Using ShellCommands +++++++++++++++++++ .. py:class:: buildbot.steps.shell.ShellCommand This is a useful base class for just about everything you might want to do during a build (except for the initial source checkout). It runs a single command in a child shell on the worker. All stdout/stderr is recorded into a :class:`LogFile`. The step usually finishes with a status of ``FAILURE`` if the command's exit code is non-zero, otherwise it has a status of ``SUCCESS``. The preferred way to specify the command is with a list of argv strings, since this allows for spaces in filenames and avoids doing any fragile shell-escaping. You can also specify the command with a single string, in which case the string is given to :samp:`/bin/sh -c {COMMAND}` for parsing. On Windows, commands are run via ``cmd.exe /c`` which works well. However, if you're running a batch file, the error level does not get propagated correctly unless you add 'call' before your batch file's name: ``cmd=['call', 'myfile.bat', ...]``. The :bb:step:`ShellCommand` arguments are: ``command`` A list of strings (preferred) or single string (discouraged) which specifies the command to be run. A list of strings is preferred because it can be used directly as an argv array. Using a single string (with embedded spaces) requires the worker to pass the string to :command:`/bin/sh` for interpretation, which raises all sorts of difficult questions about how to escape or interpret shell metacharacters. If ``command`` contains nested lists (for example, from a properties substitution), then that list will be flattened before it is executed. ``workdir`` All :class:`ShellCommand`\s are run by default in the ``workdir``, which defaults to the :file:`build` subdirectory of the worker builder's base directory. The absolute path of the workdir will thus be the worker's basedir (set as an option to ``buildbot-worker create-worker``, :ref:`Creating-a-worker`), plus the builder's basedir (set in the builder's ``builddir`` key in :file:`master.cfg`), plus the workdir itself (a class-level attribute of the BuildFactory, defaults to :file:`build`). For example: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand(command=["make", "test"], workdir="build/tests")) ``env`` A dictionary of environment strings which will be added to the child command's environment. For example, to run tests with a different i18n language setting, you might use: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand(command=["make", "test"], env={'LANG': 'fr_FR'})) These variable settings will override any existing ones in the worker's environment or the environment specified in the :class:`Builder`. The exception is :envvar:`PYTHONPATH`, which is merged with (actually prepended to) any existing :envvar:`PYTHONPATH` setting. The following example will prepend :file:`/home/buildbot/lib/python` to any existing :envvar:`PYTHONPATH`: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand( command=["make", "test"], env={'PYTHONPATH': "/home/buildbot/lib/python"})) To avoid the need of concatenating paths together in the master config file, if the value is a list, it will be joined together using the right platform dependent separator. Those variables support expansion so that if you just want to prepend :file:`/home/buildbot/bin` to the :envvar:`PATH` environment variable, you can do it by putting the value ``${PATH}`` at the end of the value like in the example below. Variables that don't exist on the worker will be replaced by ``""``. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand( command=["make", "test"], env={'PATH': ["/home/buildbot/bin", "${PATH}"]})) Note that environment values must be strings (or lists that are turned into strings). In particular, numeric properties such as ``buildnumber`` must be substituted using :ref:`Interpolate`. ``want_stdout`` If ``False``, stdout from the child process is discarded rather than being sent to the buildmaster for inclusion in the step's :class:`LogFile`. ``want_stderr`` Like ``want_stdout`` but for :file:`stderr`. Note that commands that run through a PTY do not have separate :file:`stdout`/:file:`stderr` streams, and both are merged into :file:`stdout`. ``usePTY`` If ``True``, this command will be run in a ``pty`` (defaults to ``False``). This option is not available on Windows. In general, you do not want to use a pseudo-terminal. This is *only* useful for running commands that require a terminal - for example, testing a command-line application that will only accept passwords read from a terminal. Using a pseudo-terminal brings lots of compatibility problems, and prevents Buildbot from distinguishing the standard error (red) and standard output (black) streams. In previous versions, the advantage of using a pseudo-terminal was that ``grandchild`` processes were more likely to be cleaned up if the build was interrupted or it timed out. This occurred because using a pseudo-terminal incidentally puts the command into its own process group. As of Buildbot-0.8.4, all commands are placed in process groups, and thus grandchild processes will be cleaned up properly. ``logfiles`` Sometimes commands will log interesting data to a local file, rather than emitting everything to stdout or stderr. For example, Twisted's :command:`trial` command (which runs unit tests) only presents summary information to stdout, and puts the rest into a file named :file:`_trial_temp/test.log`. It is often useful to watch these files as the command runs, rather than using :command:`/bin/cat` to dump their contents afterwards. The ``logfiles=`` argument allows you to collect data from these secondary logfiles in near-real-time, as the step is running. It accepts a dictionary which maps from a local Log name (which is how the log data is presented in the build results) to either a remote filename (interpreted relative to the build's working directory), or a dictionary of options. Each named file will be polled on a regular basis (every couple of seconds) as the build runs, and any new text will be sent over to the buildmaster. If you provide a dictionary of options instead of a string, you must specify the ``filename`` key. You can optionally provide a ``follow`` key which is a boolean controlling whether a logfile is followed or concatenated in its entirety. Following is appropriate for logfiles to which the build step will append, where the pre-existing contents are not interesting. The default value for ``follow`` is ``False``, which gives the same behavior as just providing a string filename. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand( command=["make", "test"], logfiles={"triallog": "_trial_temp/test.log"})) The above example will add a log named 'triallog' on the master, based on :file:`_trial_temp/test.log` on the worker. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.ShellCommand(command=["make", "test"], logfiles={ "triallog": { "filename": "_trial_temp/test.log", "follow": True } })) ``lazylogfiles`` If set to ``True``, logfiles will be tracked lazily, meaning that they will only be added when and if something is written to them. This can be used to suppress the display of empty or missing log files. The default is ``False``. ``timeout`` If the command fails to produce any output for this many seconds, it is assumed to be locked up and will be killed. This defaults to 1200 seconds. Pass ``None`` to disable. ``maxTime`` If the command takes longer than this many seconds, it will be killed. This is disabled by default. ``logEnviron`` If ``True`` (the default), then the step's logfile will describe the environment variables on the worker. In situations where the environment is not relevant and is long, it may be easier to set it to ``False``. ``interruptSignal`` This is the signal (specified by name) that should be sent to the process when the command needs to be interrupted (either by the buildmaster, a timeout, etc.). By default, this is "KILL" (9). Specify "TERM" (15) to give the process a chance to cleanup. This functionality requires a version 0.8.6 worker or newer. ``sigtermTime`` If set, when interrupting, try to kill the command with SIGTERM and wait for sigtermTime seconds before firing ``interuptSignal``. If None, ``interruptSignal`` will be fired immediately upon interrupt. ``initialStdin`` If the command expects input on stdin, the input can be supplied as a string with this parameter. This value should not be excessively large, as it is handled as a single string throughout Buildbot -- for example, do not pass the contents of a tarball with this parameter. ``decodeRC`` This is a dictionary that decodes exit codes into results value. For example, ``{0:SUCCESS,1:FAILURE,2:WARNINGS}`` will treat the exit code ``2`` as ``WARNINGS``. The default (``{0:SUCCESS}``) is to treat just 0 as successful. Any exit code not present in the dictionary will be treated as ``FAILURE``. buildbot-3.4.0/master/docs/manual/configuration/steps/shell_sequence.rst000066400000000000000000000076111413250514000265600ustar00rootroot00000000000000.. bb:step:: ShellSequence .. _Step-ShellSequence: Shell Sequence ++++++++++++++ Some steps have a specific purpose, but require multiple shell commands to implement them. For example, a build is often ``configure; make; make install``. We have two ways to handle that: * Create one shell command with all these. To put the logs of each commands in separate logfiles, we need to re-write the script as ``configure 1> configure_log; ...`` and to add these ``configure_log`` files as ``logfiles`` argument of the buildstep. This has the drawback of complicating the shell script, and making it harder to maintain as the logfile name is put in different places. * Create three :bb:step:`ShellCommand` instances, but this loads the build UI unnecessarily. :bb:step:`ShellSequence` is a class that executes not one but a sequence of shell commands during a build. It takes as argument a renderable, or list of commands which are :class:`~buildbot.steps.shellsequence.ShellArg` objects. Each such object represents a shell invocation. The single :bb:step:`ShellSequence` argument aside from the common parameters is: ``commands`` A list of :class:`~buildbot.steps.shellsequence.ShellArg` objects or a renderable that returns a list of :class:`~buildbot.steps.shellsequence.ShellArg` objects. .. code-block:: python from buildbot.plugins import steps, util f.addStep(steps.ShellSequence( commands=[ util.ShellArg(command=['configure']), util.ShellArg(command=['make'], logname='make'), util.ShellArg(command=['make', 'check_warning'], logname='warning', warnOnFailure=True), util.ShellArg(command=['make', 'install'], logname='make install') ])) All these commands share the same configuration of ``environment``, ``workdir`` and ``pty`` usage that can be set up the same way as in :bb:step:`ShellCommand`. .. py:class:: buildbot.steps.shellsequence.ShellArg(self, command=None, logname=None, haltOnFailure=False, flunkOnWarnings=False, flunkOnFailure=False, warnOnWarnings=False, warnOnFailure=False) :param command: (see the :bb:step:`ShellCommand` ``command`` argument), :param logname: optional log name, used as the stdio log of the command The ``haltOnFailure``, ``flunkOnWarnings``, ``flunkOnFailure``, ``warnOnWarnings``, ``warnOnFailure`` parameters drive the execution of the sequence, the same way steps are scheduled in the build. They have the same default values as for buildsteps - see :ref:`Buildstep-Common-Parameters`. Any of the arguments to this class can be renderable. Note that if ``logname`` name does not start with the prefix ``stdio``, that prefix will be set like ``stdio ``. If no ``logname`` is supplied, the output of the command will not be collected. The two :bb:step:`ShellSequence` methods below tune the behavior of how the list of shell commands are executed, and can be overridden in subclasses. .. py:class:: buildbot.steps.shellsequence.ShellSequence .. py:method:: shouldRunTheCommand(oneCmd) :param oneCmd: a string or a list of strings, as rendered from a :py:class:`~buildbot.steps.shellsequence.ShellArg` instance's ``command`` argument. Determine whether the command ``oneCmd`` should be executed. If ``shouldRunTheCommand`` returns ``False``, the result of the command will be recorded as SKIPPED. The default method skips all empty strings and empty lists. .. py:method:: getFinalState() Return the status text of the step in the end. The default value is to set the text describing the execution of the last shell command. .. py:method:: runShellSequence(commands): :param commands: list of shell args This method actually runs the shell sequence. The default ``run`` method calls ``runShellSequence``, but subclasses can override ``run`` to perform other operations, if desired. buildbot-3.4.0/master/docs/manual/configuration/steps/source_bzr.rst000066400000000000000000000043101413250514000257270ustar00rootroot00000000000000.. bb:step:: Bzr .. _Step-Bzr: Bzr +++ .. py:class:: buildbot.steps.source.bzr.Bzr `bzr `_ is a descendant of Arch/Baz, and is frequently referred to as simply `Bazaar`. The repository-vs-workspace model is similar to Darcs, but it uses a strictly linear sequence of revisions (one history per branch) like Arch. Branches are put in subdirectories. This makes it look very much like Mercurial. .. code-block:: python from buildbot.plugins import steps factory.addStep(steps.Bzr(mode='incremental', repourl='lp:~knielsen/maria/tmp-buildbot-test')) The step takes the following arguments: ``repourl`` (required unless ``baseURL`` is provided): the URL at which the Bzr source repository is available. ``baseURL`` (required unless ``repourl`` is provided): the base repository URL, to which a branch name will be appended. It should probably end in a slash. ``defaultBranch`` (allowed if and only if ``baseURL`` is provided): this specifies the name of the branch to use when a Build does not provide one of its own. This will be appended to ``baseURL`` to create the string that will be passed to the ``bzr checkout`` command. If ``alwaysUseLatest`` is ``True`` then the branch and revision information that comes with the Build is ignored and branch specified in this parameter is used. ``mode`` ``method`` No method is needed for incremental mode. For full mode, ``method`` can take the values shown below. If no value is given, it defaults to ``fresh``. ``clobber`` This specifies to remove the ``workdir`` and make a full checkout. ``fresh`` This method first runs ``bzr clean-tree`` to remove all the unversioned files then ``update`` the repo. This remove all unversioned files including those in .bzrignore. ``clean`` This is same as fresh except that it doesn't remove the files mentioned in :file:`.bzrginore` i.e, by running ``bzr clean-tree --ignore``. ``copy`` A local bzr repository is maintained and the repo is copied to ``build`` directory for each build. Before each build the local bzr repo is updated then copied to ``build`` for next steps. buildbot-3.4.0/master/docs/manual/configuration/steps/source_common.rst000066400000000000000000000101611413250514000264230ustar00rootroot00000000000000.. _Step-Source-Common: Common Parameters of source checkout operations +++++++++++++++++++++++++++++++++++++++++++++++ All source checkout steps accept some common parameters to control how they get the sources and where they should be placed. The remaining per-VC-system parameters are mostly to specify where exactly the sources are coming from. ``mode`` ``method`` These two parameters specify the means by which the source is checked out. ``mode`` specifies the type of checkout and ``method`` tells about the way to implement it. .. code-block:: python from buildbot.plugins import steps factory = BuildFactory() factory.addStep(steps.Mercurial(repourl='path/to/repo', mode='full', method='fresh')) The ``mode`` parameter a string describing the kind of VC operation that is desired (defaults to ``incremental``). The options are: ``incremental`` Update the source to the desired revision, but do not remove any other files generated by previous builds. This allows compilers to take advantage of object files from previous builds. This mode is exactly same as the old ``update`` mode. ``full`` Update the source, but delete remnants of previous builds. Build steps that follow will need to regenerate all object files. Methods are specific to the VC system in question, as they may take advantage of special behaviors in that VC system that can make checkouts more efficient or reliable. ``workdir`` Like all Steps, this indicates the directory where the build will take place. Source Steps are special in that they perform some operations outside of the workdir (like creating the workdir itself). ``alwaysUseLatest`` If True, bypass the usual behavior of checking out the revision in the source stamp, and always update to the latest revision in the repository instead. If the specific VC system supports branches and a specific branch is specified in the step parameters via ``branch`` or ``defaultBranch``, then the latest revision on that branch is checked out. ``retry`` If set, this specifies a tuple of ``(delay, repeats)`` which means that when a full VC checkout fails, it should be retried up to ``repeats`` times, waiting ``delay`` seconds between the attempts. If you don't provide this, it defaults to ``None``, which means VC operations should not be retried. This is provided to make life easier for workers which are stuck behind poor network connections. ``repository`` The name of this parameter might vary depending on the Source step you are running. The concept explained here is common to all steps and applies to ``repourl`` as well as for ``baseURL`` (when applicable). A common idiom is to pass ``Property('repository', 'url://default/repo/path')`` as repository. This grabs the repository from the source stamp of the build. This can be a security issue, if you allow force builds from the web, or have the :class:`WebStatus` change hooks enabled; as the worker will download code from an arbitrary repository. ``codebase`` This specifies which codebase the source step should use to select the right source stamp. The default codebase value is ``''``. The codebase must correspond to a codebase assigned by the :bb:cfg:`codebaseGenerator`. If there is no codebaseGenerator defined in the master, then codebase doesn't need to be set; the default value will match all changes. ``timeout`` Specifies the timeout for worker-side operations, in seconds. If your repositories are particularly large, then you may need to increase this value from the default of 1200 (20 minutes). ``logEnviron`` If this option is true (the default), then the step's logfile will describe the environment variables on the worker. In situations where the environment is not relevant and is long, it may be easier to set ``logEnviron=False``. ``env`` A dictionary of environment strings which will be added to the child command's environment. The usual property interpolations can be used in environment variable names and values - see :ref:`Properties`. buildbot-3.4.0/master/docs/manual/configuration/steps/source_cvs.rst000066400000000000000000000046101413250514000257300ustar00rootroot00000000000000.. bb:step:: CVS .. _Step-CVS: CVS +++ .. py:class:: buildbot.steps.source.cvs.CVS The :bb:step:`CVS` build step performs a `CVS `_ checkout or update. .. code-block:: python from buildbot.plugins import steps factory.addStep(steps.CVS(mode='incremental', cvsroot=':pserver:me@cvs.example.net:/cvsroot/myproj', cvsmodule='buildbot')) This step takes the following arguments: ``cvsroot`` (required): specify the CVSROOT value, which points to a CVS repository, probably on a remote machine. For example, if Buildbot was hosted in CVS then the CVSROOT value you would use to get a copy of the Buildbot source code might be ``:pserver:anonymous@cvs.example.net:/cvsroot/buildbot``. ``cvsmodule`` (required): specify the cvs ``module``, which is generally a subdirectory of the :file:`CVSROOT`. The cvsmodule for the Buildbot source code is ``buildbot``. ``branch`` a string which will be used in a ``-r`` argument. This is most useful for specifying a branch to work on. Defaults to ``HEAD``. If ``alwaysUseLatest`` is ``True`` then the branch and revision information that comes with the Build is ignored and branch specified in this parameter is used. ``global_options`` a list of flags to be put before the argument ``checkout`` in the CVS command. ``extra_options`` a list of flags to be put after the ``checkout`` in the CVS command. ``mode`` ``method`` No method is needed for incremental mode. For full mode, ``method`` can take the values shown below. If no value is given, it defaults to ``fresh``. ``clobber`` This specifies to remove the ``workdir`` and make a full checkout. ``fresh`` This method first runs ``cvsdisard`` in the build directory, then updates it. This requires ``cvsdiscard`` which is a part of the cvsutil package. ``clean`` This method is the same as ``method='fresh'``, but it runs ``cvsdiscard --ignore`` instead of ``cvsdiscard``. ``copy`` This maintains a ``source`` directory for source, which it updates copies to the build directory. This allows Buildbot to start with a fresh directory, without downloading the entire repository on every build. ``login`` Password to use while performing login to the remote CVS server. Default is ``None`` meaning that no login needs to be performed. buildbot-3.4.0/master/docs/manual/configuration/steps/source_darcs.rst000066400000000000000000000030301413250514000262240ustar00rootroot00000000000000 .. bb:step:: Darcs .. _Step-Darcs: Darcs +++++ .. py:class:: buildbot.steps.source.darcs.Darcs The :bb:step:`Darcs` build step performs a `Darcs `_ checkout or update. .. code-block:: python from buildbot.plugins import steps factory.addStep(steps.Darcs(repourl='http://path/to/repo', mode='full', method='clobber', retry=(10, 1))) Darcs step takes the following arguments: ``repourl`` (required): The URL at which the Darcs source repository is available. ``mode`` (optional): defaults to ``'incremental'``. Specifies whether to clean the build tree or not. ``incremental`` The source is update, but any built files are left untouched. ``full`` The build tree is clean of any built files. The exact method for doing this is controlled by the ``method`` argument. ``method`` (optional): defaults to ``copy`` when mode is ``full``. Darcs' incremental mode does not require a method. The full mode has two methods defined: ``clobber`` It removes the working directory for each build then makes full checkout. ``copy`` This first checkout source into source directory then copy the ``source`` directory to ``build`` directory then performs the build operation in the copied directory. This way we make fresh builds with very less bandwidth to download source. The behavior of source checkout follows exactly same as incremental. It performs all the incremental checkout behavior in ``source`` directory. buildbot-3.4.0/master/docs/manual/configuration/steps/source_gerrit.rst000066400000000000000000000012511413250514000264270ustar00rootroot00000000000000.. bb:step:: Gerrit .. _Step-Gerrit: Gerrit ++++++ .. py:class:: buildbot.steps.source.gerrit.Gerrit :bb:step:`Gerrit` step is exactly like the :bb:step:`Git` step, except that it integrates with :bb:chsrc:`GerritChangeSource`, and will automatically checkout the additional changes. Gerrit integration can be also triggered using forced build with property named ``gerrit_change`` with values in format ``change_number/patchset_number``. This property will be translated into a branch name. This feature allows integrators to build with several pending interdependent changes, which at the moment cannot be described properly in Gerrit, and can only be described by humans. buildbot-3.4.0/master/docs/manual/configuration/steps/source_git.rst000066400000000000000000000165301413250514000257240ustar00rootroot00000000000000.. bb:step:: Git .. _Step-Git: Git +++ .. py:class:: buildbot.steps.source.git.Git The :bb:step:`Git` build step clones or updates a `Git `_ repository and checks out the specified branch or revision. .. note:: Buildbot supports Git version 1.2.0 or later. Earlier versions (such as the one shipped in Ubuntu 'Dapper') do not support the :command:`git init` command that Buildbot uses. .. code-block:: python from buildbot.plugins import steps factory.addStep(steps.Git(repourl='git://path/to/repo', mode='full', method='clobber', submodules=True)) The Git step takes the following arguments: ``repourl`` (required) The URL of the upstream Git repository. ``branch`` (optional) This specifies the name of the branch or the tag to use when a Build does not provide one of its own. If this parameter is not specified, and the Build does not provide a branch, the default branch of the remote repository will be used. If ``alwaysUseLatest`` is ``True`` then the branch and revision information that comes with the Build is ignored and the branch specified in this parameter is used. ``submodules`` (optional, default: ``False``) When initializing/updating a Git repository, this tells Buildbot whether to handle Git submodules. If ``remoteSubmodules`` is ``True``, then this tells Buildbot to use remote submodules: `Git Remote Submodules `_ ``shallow`` (optional) Instructs Git to attempt shallow clones (``--depth 1``). The depth defaults to 1 and can be changed by passing an integer instead of ``True``. This option can be used only in full builds with clobber method. ``reference`` (optional) Use the specified string as a path to a reference repository on the local machine. Git will try to grab objects from this path first instead of the main repository, if they exist. ``origin`` (optional) By default, any clone will use the name "origin" as the remote repository (eg, "origin/master"). This renderable option allows that to be configured to an alternate name. ``filters`` (optional, type: ``list``) For each string in the passed in list, adds a ``--filter `` argument to :command:`git clone`. This allows for adding filters like ``--filter "tree:0"`` to speed up the clone step. This requires git version 2.27 or higher. ``progress`` (optional) Passes the (``--progress``) flag to (:command:`git fetch`). This solves issues of long fetches being killed due to lack of output, but requires Git 1.7.2 or later. Its value is True on Git 1.7.2 or later. ``retryFetch`` (optional, default: ``False``) If true, if the ``git fetch`` fails, then Buildbot retries to fetch again instead of failing the entire source checkout. ``clobberOnFailure`` (optional, default: ``False``) If a fetch or full clone fails, we can retry to checkout the source by removing everything and cloning the repository. If the retry fails, it fails the source checkout step. ``mode`` (optional, default: ``'incremental'``) Specifies whether to clean the build tree or not. ``incremental`` The source is update, but any built files are left untouched. ``full`` The build tree is clean of any built files. The exact method for doing this is controlled by the ``method`` argument. ``method`` (optional, default: ``fresh`` when mode is ``full``) Git's incremental mode does not require a method. The full mode has four methods defined: ``clobber`` It removes the build directory entirely then makes full clone from repo. This can be slow as it need to clone whole repository. To make faster clones enable the ``shallow`` option. If the shallow option is enabled and the build request has unknown revision value, then this step fails. ``fresh`` This removes all other files except those tracked by Git. First it does :command:`git clean -d -f -f -x`, then fetch/checkout to a specified revision (if any). This option is equal to update mode with ``ignore_ignores=True`` in old steps. ``clean`` All the files which are tracked by Git, as well as listed ignore files, are not deleted. All other remaining files will be deleted before the fetch/checkout. This is equivalent to :command:`git clean -d -f -f` then fetch. This is equivalent to ``ignore_ignores=False`` in old steps. ``copy`` This first checks out source into source directory, then copies the ``source`` directory to ``build`` directory, and then performs the build operation in the copied directory. This way, we make fresh builds with very little bandwidth to download source. The behavior of source checkout follows exactly the same as incremental. It performs all the incremental checkout behavior in ``source`` directory. ``getDescription`` (optional) After checkout, invoke a `git describe` on the revision and save the result in a property; the property's name is either ``commit-description`` or ``commit-description-foo``, depending on whether the ``codebase`` argument was also provided. The argument should either be a ``bool`` or ``dict``, and will change how `git describe` is called: * ``getDescription=False``: disables this feature explicitly * ``getDescription=True`` or empty ``dict()``: runs `git describe` with no args * ``getDescription={...}``: a dict with keys named the same as the Git option. Each key's value can be ``False`` or ``None`` to explicitly skip that argument. For the following keys, a value of ``True`` appends the same-named Git argument: * ``all`` : `--all` * ``always``: `--always` * ``contains``: `--contains` * ``debug``: `--debug` * ``long``: `--long`` * ``exact-match``: `--exact-match` * ``tags``: `--tags` * ``dirty``: `--dirty` For the following keys, an integer or string value (depending on what Git expects) will set the argument's parameter appropriately. Examples show the key-value pair: * ``match=foo``: `--match foo` * ``abbrev=7``: `--abbrev=7` * ``candidates=7``: `--candidates=7` * ``dirty=foo``: `--dirty=foo` ``config`` (optional) A dict of Git configuration settings to pass to the remote Git commands. ``sshPrivateKey`` (optional) The private key to use when running Git for fetch operations. The ssh utility must be in the system path in order to use this option. On Windows, only Git distribution that embeds MINGW has been tested (as of July 2017, the official distribution is MINGW-based). The worker must either have the host in the known hosts file or the host key must be specified via the `sshHostKey` option. ``sshHostKey`` (optional) Specifies public host key to match when authenticating with SSH public key authentication. This may be either a :ref:`Secret` or just a string. `sshPrivateKey` must be specified in order to use this option. The host key must be in the form of ` `, e.g. `ssh-rsa AAAAB3N<...>FAaQ==`. ``sshKnownHosts`` (optional) Specifies the contents of the SSH known_hosts file to match when authenticating with SSH public key authentication. This may be either a :ref:`Secret` or just a string. `sshPrivateKey` must be specified in order to use this option. `sshHostKey` must not be specified in order to use this option. buildbot-3.4.0/master/docs/manual/configuration/steps/source_github.rst000066400000000000000000000020111413250514000264100ustar00rootroot00000000000000.. bb:step:: GitHub .. _Step-GitHub: GitHub ++++++ .. py:class:: buildbot.steps.source.github.GitHub :bb:step:`GitHub` step is exactly like the :bb:step:`Git` step, except that it will ignore the revision sent by the :bb:chsrc:`GitHub` change hook, and rather take the branch if the branch ends with /merge. This allows to test github pull requests merged directly into the mainline. GitHub indeed provides ``refs/origin/pull/NNN/merge`` on top of ``refs/origin/pull/NNN/head`` which is a magic ref that always creates a merge commit to the latest version of the mainline (i.e., the target branch for the pull request). The revision in the GitHub event points to ``/head``, and it's important for the GitHub reporter as this is the revision that will be tagged with a CI status when the build is finished. If you want to use :bb:step:`Trigger` to create sub tests and want to have the GitHub reporter still update the original revision, make sure you set ``updateSourceStamp=False`` in the :bb:step:`Trigger` configuration. buildbot-3.4.0/master/docs/manual/configuration/steps/source_gitlab.rst000066400000000000000000000021141413250514000263740ustar00rootroot00000000000000.. bb:step:: GitLab .. _Step-GitLab: GitLab ++++++ .. py:class:: buildbot.steps.source.gitlab.GitLab :bb:step:`GitLab` step is exactly like the :bb:step:`Git` step, except that it uses the source repo and branch sent by the :bb:chsrc:`GitLab` change hook when processing merge requests. When configuring builders, you can use a ChangeFilter with ``category = "push"`` to select normal commits, and ``category = "merge_request"`` to select merge requests. See :file:`master/docs/examples/gitlab.cfg` in the Buildbot distribution for a tutorial example of integrating Buildbot with GitLab. .. note:: Your build worker will need access to the source project of the changeset, or it won't be able to check out the source. This means authenticating the build worker via ssh credentials in the usual way, then granting it access [via a GitLab deploy key or GitLab project membership](https://docs.gitlab.com/ee/ssh/). This needs to be done not only for the main git repo, but also for each fork that wants to be able to submit merge requests against the main repo. buildbot-3.4.0/master/docs/manual/configuration/steps/source_mercurial.rst000066400000000000000000000043461413250514000271260ustar00rootroot00000000000000.. bb:step:: Mercurial .. _Step-Mercurial: Mercurial +++++++++ .. py:class:: buildbot.steps.source.mercurial.Mercurial The :bb:step:`Mercurial` build step performs a `Mercurial `_ (aka ``hg``) checkout or update. Branches are available in two modes: ``dirname``, where the name of the branch is a suffix of the name of the repository, or ``inrepo``, which uses Hg's named-branches support. Make sure this setting matches your changehook, if you have that installed. .. code-block:: python from buildbot.plugins import steps factory.addStep(steps.Mercurial(repourl='path/to/repo', mode='full', method='fresh', branchType='inrepo')) The Mercurial step takes the following arguments: ``repourl`` where the Mercurial source repository is available. ``defaultBranch`` this specifies the name of the branch to use when a Build does not provide one of its own. This will be appended to ``repourl`` to create the string that will be passed to the ``hg clone`` command. If ``alwaysUseLatest`` is ``True`` then the branch and revision information that comes with the Build is ignored and branch specified in this parameter is used. ``branchType`` either 'dirname' (default) or 'inrepo' depending on whether the branch name should be appended to the ``repourl`` or the branch is a Mercurial named branch and can be found within the ``repourl``. ``clobberOnBranchChange`` boolean, defaults to ``True``. If set and using inrepos branches, clobber the tree at each branch change. Otherwise, just update to the branch. ``mode`` ``method`` Mercurial's incremental mode does not require a method. The full mode has three methods defined: ``clobber`` It removes the build directory entirely then makes full clone from repo. This can be slow as it need to clone whole repository ``fresh`` This remove all other files except those tracked by VCS. First it does :command:`hg purge --all` then pull/update ``clean`` All the files which are tracked by Mercurial and listed ignore files are not deleted. Remaining all other files will be deleted before pull/update. This is equivalent to :command:`hg purge` then pull/update. buildbot-3.4.0/master/docs/manual/configuration/steps/source_monotone.rst000066400000000000000000000055341413250514000270010ustar00rootroot00000000000000.. bb:step:: Monotone .. _Step-Monotone: Monotone ++++++++ .. py:class:: buildbot.steps.source.mtn.Monotone The :bb:step:`Monotone` build step performs a `Monotone `_ checkout or update. .. code-block:: python from buildbot.plugins import steps factory.addStep(steps.Monotone(repourl='http://path/to/repo', mode='full', method='clobber', branch='some.branch.name', retry=(10, 1))) Monotone step takes the following arguments: ``repourl`` the URL at which the Monotone source repository is available. ``branch`` this specifies the name of the branch to use when a Build does not provide one of its own. If ``alwaysUseLatest`` is ``True`` then the branch and revision information that comes with the Build is ignored and branch specified in this parameter is used. ``progress`` this is a boolean that has a pull from the repository use ``--ticker=dot`` instead of the default ``--ticker=none``. ``mode`` (optional): defaults to ``'incremental'``. Specifies whether to clean the build tree or not. In any case, the worker first pulls from the given remote repository to synchronize (or possibly initialize) its local database. The mode and method only affect how the build tree is checked-out or updated from the local database. ``incremental`` The source is update, but any built files are left untouched. ``full`` The build tree is clean of any built files. The exact method for doing this is controlled by the ``method`` argument. Even in this mode, the revisions already pulled remain in the database and a fresh pull is rarely needed. ``method`` (optional): defaults to ``copy`` when mode is ``full``. Monotone's incremental mode does not require a method. The full mode has four methods defined: ``clobber`` It removes the build directory entirely then makes fresh checkout from the database. ``clean`` This remove all other files except those tracked and ignored by Monotone. It will remove all the files that appear in :command:`mtn ls unknown`. Then it will pull from remote and update the working directory. ``fresh`` This remove all other files except those tracked by Monotone. It will remove all the files that appear in :command:`mtn ls ignored` and :command:`mtn ls unknows`. Then pull and update similar to ``clean`` ``copy`` This first checkout source into source directory then copy the ``source`` directory to ``build`` directory then performs the build operation in the copied directory. This way we make fresh builds with very less bandwidth to download source. The behavior of source checkout follows exactly same as incremental. It performs all the incremental checkout behavior in ``source`` directory. buildbot-3.4.0/master/docs/manual/configuration/steps/source_p4.rst000066400000000000000000000111311413250514000254540ustar00rootroot00000000000000.. bb:step:: P4 .. _Step-P4: P4 ++ .. py:class:: buildbot.steps.source.p4.P4 The :bb:step:`P4` build step creates a `Perforce `_ client specification and performs an update. .. code-block:: python from buildbot.plugins import steps, util factory.addStep(steps.P4( p4port=p4port, p4client=util.WithProperties('%(P4USER)s-%(workername)s-%(buildername)s'), p4user=p4user, p4base='//depot', p4viewspec=p4viewspec, mode='incremental')) You can specify the client spec in two different ways. You can use the ``p4base``, ``p4branch``, and (optionally) ``p4extra_views`` to build up the viewspec, or you can utilize the ``p4viewspec`` to specify the whole viewspec as a set of tuples. Using ``p4viewspec`` will allow you to add lines such as: .. code-block:: none //depot/branch/mybranch/... ///... -//depot/branch/mybranch/notthisdir/... ///notthisdir/... If you specify ``p4viewspec`` and any of ``p4base``, ``p4branch``, and/or ``p4extra_views`` you will receive a configuration error exception. ``p4base`` A view into the Perforce depot without branch name or trailing ``/...``. Typically ``//depot/proj``. ``p4branch`` (optional): A single string, which is appended to the p4base as follows ``//...`` to form the first line in the viewspec ``p4extra_views`` (optional): a list of ``(depotpath, clientpath)`` tuples containing extra views to be mapped into the client specification. Both will have ``/...`` appended automatically. The client name and source directory will be prepended to the client path. ``p4viewspec`` This will override any p4branch, p4base, and/or p4extra_views specified. The viewspec will be an array of tuples as follows: .. code-block:: python [('//depot/main/','')] It yields a viewspec with just: .. code-block:: none //depot/main/... ///... ``p4viewspec_suffix`` (optional): The ``p4viewspec`` lets you customize the client spec for a builder but, as the previous example shows, it automatically adds ``...`` at the end of each line. If you need to also specify file-level remappings, you can set the ``p4viewspec_suffix`` to ``None`` so that nothing is added to your viewspec: .. code-block:: python [('//depot/main/...', '...'), ('-//depot/main/config.xml', 'config.xml'), ('//depot/main/config.vancouver.xml', 'config.xml')] It yields a viewspec with: .. code-block:: none //depot/main/... ///... -//depot/main/config.xml ///main/config.xml Note how, with ``p4viewspec_suffix`` set to ``None``, you need to manually add ``...`` where you need it. ``p4client_spec_options`` (optional): By default, clients are created with the ``allwrite rmdir`` options. This string lets you change that. ``p4port`` (optional): the :samp:`{host}:{port}` string describing how to get to the P4 Depot (repository), used as the option `-p` argument for all p4 commands. ``p4user`` (optional): the Perforce user, used as the option `-u` argument to all p4 commands. ``p4passwd`` (optional): the Perforce password, used as the option `-p` argument to all p4 commands. ``p4client`` (optional): The name of the client to use. In ``mode='full'`` and ``mode='incremental'``, it's particularly important that a unique name is used for each checkout directory to avoid incorrect synchronization. For this reason, Python percent substitution will be performed on this value to replace ``%(prop:workername)s`` with the worker name and ``%(prop:buildername)s`` with the builder name. The default is ``buildbot_%(prop:workername)s_%(prop:buildername)s``. ``p4line_end`` (optional): The type of line ending handling P4 should use. This is added directly to the client spec's ``LineEnd`` property. The default is ``local``. ``p4extra_args`` (optional): Extra arguments to be added to the P4 command-line for the ``sync`` command. So for instance if you want to sync only to populate a Perforce proxy (without actually syncing files to disk), you can do: .. code-block:: python P4(p4extra_args=['-Zproxyload'], ...) ``use_tickets`` Set to ``True`` to use ticket-based authentication, instead of passwords (but you still need to specify ``p4passwd``). ``stream`` Set to ``True`` to use a stream-associated workspace, in which case ``p4base`` and ``p4branch`` are used to determine the stream path. buildbot-3.4.0/master/docs/manual/configuration/steps/source_repo.rst000066400000000000000000000105741413250514000261100ustar00rootroot00000000000000.. index:: double: Gerrit integration; Repo Build Step .. bb:step:: Repo .. _Step-Repo: Repo ++++ .. py:class:: buildbot.steps.source.repo.Repo The :bb:step:`Repo` build step performs a `Repo `_ init and sync. The Repo step takes the following arguments: ``manifestURL`` (required): the URL at which the Repo's manifests source repository is available. ``manifestBranch`` (optional, defaults to ``master``): the manifest repository branch on which repo will take its manifest. Corresponds to the ``-b`` argument to the :command:`repo init` command. ``manifestFile`` (optional, defaults to ``default.xml``): the manifest filename. Corresponds to the ``-m`` argument to the :command:`repo init` command. ``tarball`` (optional, defaults to ``None``): the repo tarball used for fast bootstrap. If not present the tarball will be created automatically after first sync. It is a copy of the ``.repo`` directory which contains all the Git objects. This feature helps to minimize network usage on very big projects with lots of workers. The suffix of the tarball determines if the tarball is compressed and which compressor is chosen. Supported suffixes are ``bz2``, ``gz``, ``lzma``, ``lzop``, and ``pigz``. ``jobs`` (optional, defaults to ``None``): Number of projects to fetch simultaneously while syncing. Passed to repo sync subcommand with "-j". ``syncAllBranches`` (optional, defaults to ``False``): renderable boolean to control whether ``repo`` syncs all branches. I.e. ``repo sync -c`` ``depth`` (optional, defaults to 0): Depth argument passed to repo init. Specifies the amount of git history to store. A depth of 1 is useful for shallow clones. This can save considerable disk space on very large projects. ``submodules`` (optional, defaults to ``False``): sync any submodules associated with the manifest repo. Corresponds to the ``--submodules`` argument to the :command:`repo init` command. ``updateTarballAge`` (optional, defaults to "one week"): renderable to control the policy of updating of the tarball given properties. Returns: max age of tarball in seconds, or ``None``, if we want to skip tarball update. The default value should be good trade off on size of the tarball, and update frequency compared to cost of tarball creation ``repoDownloads`` (optional, defaults to None): list of ``repo download`` commands to perform at the end of the Repo step each string in the list will be prefixed ``repo download``, and run as is. This means you can include parameter in the string. For example: * ``["-c project 1234/4"]`` will cherry-pick patchset 4 of patch 1234 in project ``project`` * ``["-f project 1234/4"]`` will enforce fast-forward on patchset 4 of patch 1234 in project ``project`` .. py:class:: buildbot.steps.source.repo.RepoDownloadsFromProperties ``util.repo.DownloadsFromProperties`` can be used as a renderable of the ``repoDownload`` parameter it will look in passed properties for string with following possible format: * ``repo download project change_number/patchset_number`` * ``project change_number/patchset_number`` * ``project/change_number/patchset_number`` All of these properties will be translated into a :command:`repo download`. This feature allows integrators to build with several pending interdependent changes, which at the moment cannot be described properly in Gerrit, and can only be described by humans. .. py:class:: buildbot.steps.source.repo.RepoDownloadsFromChangeSource ``util.repo.DownloadsFromChangeSource`` can be used as a renderable of the ``repoDownload`` parameter This rendereable integrates with :bb:chsrc:`GerritChangeSource`, and will automatically use the :command:`repo download` command of repo to download the additional changes introduced by a pending changeset. .. note:: You can use the two above Rendereable in conjunction by using the class ``buildbot.process.properties.FlattenList`` For example: .. code-block:: python from buildbot.plugins import steps, util factory.addStep(steps.Repo(manifestURL='git://gerrit.example.org/manifest.git', repoDownloads=util.FlattenList([ util.RepoDownloadsFromChangeSource(), util.RepoDownloadsFromProperties("repo_downloads") ]))) buildbot-3.4.0/master/docs/manual/configuration/steps/source_svn.rst000066400000000000000000000110361413250514000257430ustar00rootroot00000000000000.. bb:step:: SVN .. _Step-SVN: SVN +++ .. py:class:: buildbot.steps.source.svn.SVN The :bb:step:`SVN` build step performs a `Subversion `_ checkout or update. There are two basic ways of setting up the checkout step, depending upon whether you are using multiple branches or not. The :bb:step:`SVN` step should be created with the ``repourl`` argument: ``repourl`` (required): this specifies the ``URL`` argument that will be given to the :command:`svn checkout` command. It dictates both where the repository is located and which sub-tree should be extracted. One way to specify the branch is to use ``Interpolate``. For example, if you wanted to check out the trunk repository, you could use ``repourl=Interpolate("http://svn.example.com/repos/%(src::branch)s")``. Alternatively, if you are using a remote Subversion repository which is accessible through HTTP at a URL of ``http://svn.example.com/repos``, and you wanted to check out the ``trunk/calc`` sub-tree, you would directly use ``repourl="http://svn.example.com/repos/trunk/calc"`` as an argument to your :bb:step:`SVN` step. If you are building from multiple branches, then you should create the :bb:step:`SVN` step with the ``repourl`` and provide branch information with :ref:`Interpolate`: .. code-block:: python from buildbot.plugins import steps, util factory.addStep( steps.SVN(mode='incremental', repourl=util.Interpolate( 'svn://svn.example.org/svn/%(src::branch)s/myproject'))) Alternatively, the ``repourl`` argument can be used to create the :bb:step:`SVN` step without :ref:`Interpolate`: .. code-block:: python from buildbot.plugins import steps factory.addStep(steps.SVN(mode='full', repourl='svn://svn.example.org/svn/myproject/trunk')) ``username`` (optional): if specified, this will be passed to the ``svn`` binary with a ``--username`` option. ``password`` (optional): if specified, this will be passed to the ``svn`` binary with a ``--password`` option. ``extra_args`` (optional): if specified, an array of strings that will be passed as extra arguments to the ``svn`` binary. ``keep_on_purge`` (optional): specific files or directories to keep between purges, like some build outputs that can be reused between builds. ``depth`` (optional): Specify depth argument to achieve sparse checkout. Only available if worker has Subversion 1.5 or higher. If set to ``empty`` updates will not pull in any files or subdirectories not already present. If set to ``files``, updates will pull in any files not already present, but not directories. If set to ``immediates``, updates will pull in any files or subdirectories not already present, the new subdirectories will have depth: empty. If set to ``infinity``, updates will pull in any files or subdirectories not already present; the new subdirectories will have depth-infinity. Infinity is equivalent to SVN default update behavior, without specifying any depth argument. ``preferLastChangedRev`` (optional): By default, the ``got_revision`` property is set to the repository's global revision ("Revision" in the `svn info` output). Set this parameter to ``True`` to have it set to the "Last Changed Rev" instead. ``mode`` ``method`` SVN's incremental mode does not require a method. The full mode has five methods defined: ``clobber`` It removes the working directory for each build then makes full checkout. ``fresh`` This always always purges local changes before updating. This deletes unversioned files and reverts everything that would appear in a :command:`svn status --no-ignore`. This is equivalent to the old update mode with ``always_purge``. ``clean`` This is same as fresh except that it deletes all unversioned files generated by :command:`svn status`. ``copy`` This first checkout source into source directory then copy the ``source`` directory to ``build`` directory then performs the build operation in the copied directory. This way we make fresh builds with very less bandwidth to download source. The behavior of source checkout follows exactly same as incremental. It performs all the incremental checkout behavior in ``source`` directory. ``export`` Similar to ``method='copy'``, except using ``svn export`` to create build directory so that there are no ``.svn`` directories in the build directory. If you are using branches, you must also make sure your ``ChangeSource`` will report the correct branch names. buildbot-3.4.0/master/docs/manual/configuration/steps/sphinx.rst000066400000000000000000000026531413250514000250730ustar00rootroot00000000000000.. bb:step:: Sphinx .. _Step-Sphinx: Sphinx ++++++ .. py:class:: buildbot.steps.python.Sphinx `Sphinx `_ is the Python Documentation Generator. It uses `RestructuredText `_ as input format. The :bb:step:`Sphinx` step will run :program:`sphinx-build` or any other program specified in its ``sphinx`` argument and count the various warnings and error it detects. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.Sphinx(sphinx_builddir="_build")) This step takes the following arguments: ``sphinx_builddir`` (required) Name of the directory where the documentation will be generated. ``sphinx_sourcedir`` (optional, defaulting to ``.``), Name the directory where the :file:`conf.py` file will be found ``sphinx_builder`` (optional) Indicates the builder to use. ``sphinx`` (optional, defaulting to :program:`sphinx-build`) Indicates the executable to run. ``tags`` (optional) List of ``tags`` to pass to :program:`sphinx-build` ``defines`` (optional) Dictionary of defines to overwrite values of the :file:`conf.py` file. ``strict_warnings`` (optional) Boolean, defaults to False. Treat all warnings as errors. ``mode`` (optional) String, one of ``full`` or ``incremental`` (the default). If set to ``full``, indicates to Sphinx to rebuild everything without re-using the previous build results. buildbot-3.4.0/master/docs/manual/configuration/steps/subunit_shell_command.rst000066400000000000000000000013121413250514000301270ustar00rootroot00000000000000.. bb:step:: SubunitShellCommand .. _Step-SubunitShellCommand: SubunitShellCommand +++++++++++++++++++ .. py:class:: buildbot.steps.subunit.SubunitShellCommand This buildstep is similar to :bb:step:`ShellCommand`, except that it runs the log content through a subunit filter to extract test and failure counts. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.SubunitShellCommand(command="make test")) This runs ``make test`` and filters it through subunit. The 'tests' and 'test failed' progress metrics will now accumulate test data from the test run. If ``failureOnNoTests`` is ``True``, this step will fail if no test is run. By default ``failureOnNoTests`` is False. buildbot-3.4.0/master/docs/manual/configuration/steps/test.rst000066400000000000000000000005011413250514000245270ustar00rootroot00000000000000.. bb:step:: Test .. _Step-Test: Test ++++ .. code-block:: python from buildbot.plugins import steps f.addStep(steps.Test()) This is meant to handle unit tests. The default command is :command:`make test`, and the ``warnOnFailure`` flag is set. The other arguments are identical to :bb:step:`ShellCommand`. buildbot-3.4.0/master/docs/manual/configuration/steps/treesize.rst000066400000000000000000000010131413250514000254010ustar00rootroot00000000000000.. bb:step:: TreeSize .. index:: Properties; tree-size-KiB .. _Step-TreeSize: TreeSize ++++++++ .. code-block:: python from buildbot.plugins import steps f.addStep(steps.TreeSize()) This is a simple command that uses the :command:`du` tool to measure the size of the code tree. It puts the size (as a count of 1024-byte blocks, aka 'KiB' or 'kibibytes') on the step's status text, and sets a build property named ``tree-size-KiB`` with the same value. All arguments are identical to :bb:step:`ShellCommand`. buildbot-3.4.0/master/docs/manual/configuration/steps/trial.rst000066400000000000000000000064011413250514000246700ustar00rootroot00000000000000.. bb:step:: Trial .. _Step-Trial: Trial +++++ .. py:class:: buildbot.steps.python_twisted.Trial This step runs a unit test suite using :command:`trial`, a unittest-like testing framework that is a component of Twisted Python. The :bb:step:`Trial` takes the following arguments: ``python`` (string or list of strings, optional) Which python executable to use. Will form the start of the argv array that will launch ``trial``. If you use this, you should set ``trial`` to an explicit path (like /usr/bin/trial or ./bin/trial). Defaults to ``None``, which leaves it out entirely (running 'trial args' instead of python ./bin/trial args'). Likely values are ``'python'``, ``['python3.5']``, ``['python', '-Wall']``, etc. ``trial`` (string, optional) Which 'trial' executable to run Defaults to ``'trial'``, which will cause ``$PATH`` to be searched and probably find ``/usr/bin/trial``. If you set ``python``, this should be set to an explicit path (because ``python3.5 trial`` will not work). ``trialMode`` (list of strings, optional) A list of arguments to pass to trial to set the reporting mode. This defaults to ``['-to']`` which means 'verbose colorless output' to the trial that comes with Twisted-2.0.x and at least -2.1.0 . Newer versions of Twisted may come with a trial that prefers ``['--reporter=bwverbose']``. ``trialArgs`` (list of strings, optional) A list of arguments to pass to trial. This can be used to turn on any extra flags you like. Defaults to ``[]``. ``jobs`` (integer, optional) Defines the number of parallel jobs. ``tests`` (list of strings, optional) Defines the test modules to run. For example, ``['twisted.test.test_defer', 'twisted.test.test_process']`` If this is a string, it will be converted into a one-item list. ``testChanges`` (boolean, optional) Selects the tests according to the changes in the Build. If set, this will override the ``tests`` parameter and asks the Build for all the files that make up the Changes going into this build. The filenames will be passed to ``trial`` asking to run just the tests necessary to cover the changes. ``recurse`` (boolean, optional) Selects the ``--recurse`` option of trial. This allows test cases to be found in deeper subdirectories of the modules listed in ``tests``. When using ``testChanges`` this option is not necessary. ``reactor`` (boolean, optional) Selects the reactor to use within Trial. For example, options are ``gtk`` or ``java``. If not provided, the Twisted's usual platform-dependent default is used. ``randomly`` (boolean, optional) If ``True``, adds the ``--random=0`` argument, which instructs trial to run the unit tests in a random order each time. This occasionally catches problems that might be masked when one module always runs before another. ``**kwargs`` (dict, optional) The step inherits all arguments of ``ShellMixin`` except ``command``. Trial creates and switches into a directory named :file:`_trial_temp/` before running the tests, and sends the twisted log (which includes all exceptions) to a file named :file:`test.log`. This file will be pulled up to the master where it can be seen as part of the status output. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.Trial(tests='petmail.test')) buildbot-3.4.0/master/docs/manual/configuration/steps/trigger.rst000066400000000000000000000115331413250514000252220ustar00rootroot00000000000000.. index:: Properties; triggering schedulers .. bb:step:: Trigger .. _Step-Trigger: Trigger ------- .. py:class:: buildbot.steps.trigger.Trigger The counterpart to the :bb:Sched:`Triggerable` scheduler is the :bb:step:`Trigger` build step: .. code-block:: python from buildbot.plugins import steps f.addStep(steps.Trigger(schedulerNames=['build-prep'], waitForFinish=True, updateSourceStamp=True, set_properties={ 'quick' : False })) The SourceStamps to use for the triggered build are controlled by the arguments ``updateSourceStamp``, ``alwaysUseLatest``, and ``sourceStamps``. Hyperlinks are added to the build detail web pages for each triggered build. ``schedulerNames`` Lists the :bb:sched:`Triggerable` schedulers that should be triggered when this step is executed. .. note:: It is possible, but not advisable, to create a cycle where a build continually triggers itself, because the schedulers are specified by name. ``unimportantSchedulerNames`` When ``waitForFinish`` is ``True``, all schedulers in this list will not cause the trigger step to fail. unimportantSchedulerNames must be a subset of schedulerNames. If ``waitForFinish`` is ``False``, unimportantSchedulerNames will simply be ignored. ``waitForFinish`` If ``True``, the step will not finish until all of the builds from the triggered schedulers have finished. If ``False`` (the default) or not given, then the buildstep succeeds immediately after triggering the schedulers. ``updateSourceStamp`` If ``True`` (the default), then the step updates the source stamps given to the :bb:sched:`Triggerable` schedulers to include ``got_revision`` (the revision actually used in this build) as ``revision`` (the revision to use in the triggered builds). This is useful to ensure that all of the builds use exactly the same source stamps, even if other :class:`Change`\s have occurred while the build was running. If ``False`` (and neither of the other arguments are specified), then the exact same SourceStamps are used. ``alwaysUseLatest`` If ``True``, then no SourceStamps are given, corresponding to using the latest revisions of the repositories specified in the Source steps. This is useful if the triggered builds use to a different source repository. ``sourceStamps`` Accepts a list of dictionaries containing the keys ``branch``, ``revision``, ``repository``, ``project``, and optionally ``patch_level``, ``patch_body``, ``patch_subdir``, ``patch_author`` and ``patch_comment`` and creates the corresponding SourceStamps. If only one sourceStamp has to be specified then the argument ``sourceStamp`` can be used for a dictionary containing the keys mentioned above. The arguments ``updateSourceStamp``, ``alwaysUseLatest``, and ``sourceStamp`` can be specified using properties. ``set_properties`` Allows control of the properties that are passed to the triggered scheduler. The parameter takes a dictionary mapping property names to values. You may use :ref:`Interpolate` here to dynamically construct new property values. For the simple case of copying a property, this might look like: .. code-block:: python set_properties={"my_prop1" : Property("my_prop1"), "my_prop2" : Property("my_prop2")} where ``Property`` is an instance of ``buildbot.process.properties.Property``. .. note:: The ``copy_properties`` parameter, given a list of properties to copy into the new build request, has been deprecated in favor of explicit use of ``set_properties``. .. _Dynamic-Trigger: Dynamic Trigger +++++++++++++++ Sometimes it is desirable to select which scheduler to trigger, and which properties to set dynamically, at the time of the build. For this purpose, the Trigger step supports a method that you can customize in order to override statically defined ``schedulernames``, ``set_properties`` and optionally ``unimportant``. .. py:method:: getSchedulersAndProperties() :returns: list of dictionaries containing the keys 'sched_name', 'props_to_set' and 'unimportant' optionally via deferred. This method returns a list of dictionaries describing what scheduler to trigger, with which properties and if the scheduler is unimportant. Old style list of tuples is still supported, in which case unimportant is considered ``False``. The properties should already be rendered (ie, concrete value, not objects wrapped by ``Interpolate`` or ``Property``). Since this function happens at build-time, the property values are available from the step and can be used to decide what schedulers or properties to use. With this method, you can also trigger the same scheduler multiple times with different set of properties. The sourcestamp configuration is however the same for each triggered build request. buildbot-3.4.0/master/docs/manual/configuration/steps/visual_cxx.rst000066400000000000000000000110571413250514000257450ustar00rootroot00000000000000.. index:: Visual Studio, Visual C++ .. bb:step:: VC6 .. bb:step:: VC7 .. bb:step:: VC8 .. bb:step:: VC9 .. bb:step:: VC10 .. bb:step:: VC11 .. bb:step:: VC12 .. bb:step:: VC14 .. bb:step:: VC141 .. bb:step:: VS2003 .. bb:step:: VS2005 .. bb:step:: VS2008 .. bb:step:: VS2010 .. bb:step:: VS2012 .. bb:step:: VS2013 .. bb:step:: VS2015 .. bb:step:: VS2017 .. bb:step:: VCExpress9 .. bb:step:: MsBuild4 .. bb:step:: MsBuild12 .. bb:step:: MsBuild14 .. bb:step:: MsBuild141 .. _Step-VisualCxx: Visual C++ ++++++++++ These steps are meant to handle compilation using Microsoft compilers. VC++ 6-141 (aka Visual Studio 2003-2015 and VCExpress9) are supported via calling ``devenv``. Msbuild as well as Windows Driver Kit 8 are supported via the ``MsBuild4``, ``MsBuild12``, ``MsBuild14`` and ``MsBuild141`` steps. These steps will take care of setting up a clean compilation environment, parsing the generated output in real time, and delivering as detailed as possible information about the compilation executed. All of the classes are in :mod:`buildbot.steps.vstudio`. The available classes are: * ``VC6`` * ``VC7`` * ``VC8`` * ``VC9`` * ``VC10`` * ``VC11`` * ``VC12`` * ``VC14`` * ``VC141`` * ``VS2003`` * ``VS2005`` * ``VS2008`` * ``VS2010`` * ``VS2012`` * ``VS2013`` * ``VS2015`` * ``VS2017`` * ``VCExpress9`` * ``MsBuild4`` * ``MsBuild12`` * ``MsBuild14`` * ``MsBuild141`` The available constructor arguments are ``mode`` The mode default to ``rebuild``, which means that first all the remaining object files will be cleaned by the compiler. The alternate values are ``build``, where only the updated files will be recompiled, and ``clean``, where the current build files are removed and no compilation occurs. ``projectfile`` This is a mandatory argument which specifies the project file to be used during the compilation. ``config`` This argument defaults to ``release`` an gives to the compiler the configuration to use. ``installdir`` This is the place where the compiler is installed. The default value is compiler specific and is the default place where the compiler is installed. ``useenv`` This boolean parameter, defaulting to ``False`` instruct the compiler to use its own settings or the one defined through the environment variables :envvar:`PATH`, :envvar:`INCLUDE`, and :envvar:`LIB`. If any of the ``INCLUDE`` or ``LIB`` parameter is defined, this parameter automatically switches to ``True``. ``PATH`` This is a list of path to be added to the :envvar:`PATH` environment variable. The default value is the one defined in the compiler options. ``INCLUDE`` This is a list of path where the compiler will first look for include files. Then comes the default paths defined in the compiler options. ``LIB`` This is a list of path where the compiler will first look for libraries. Then comes the default path defined in the compiler options. ``arch`` That one is only available with the class VS2005 (VC8). It gives the target architecture of the built artifact. It defaults to ``x86`` and does not apply to ``MsBuild4`` or ``MsBuild12``. Please see ``platform`` below. ``project`` This gives the specific project to build from within a workspace. It defaults to building all projects. This is useful for building cmake generate projects. ``platform`` This is a mandatory argument for ``MsBuild4`` and ``MsBuild12`` specifying the target platform such as 'Win32', 'x64' or 'Vista Debug'. The last one is an example of driver targets that appear once Windows Driver Kit 8 is installed. ``defines`` That one is only available with the MsBuild family of classes. It allows to define pre-processor constants used by the compiler. Here is an example on how to drive compilation with Visual Studio 2013: .. code-block:: python from buildbot.plugins import steps f.addStep( steps.VS2013(projectfile="project.sln", config="release", arch="x64", mode="build", INCLUDE=[r'C:\3rd-party\libmagic\include'], LIB=[r'C:\3rd-party\libmagic\lib-x64'])) Here is a similar example using "MsBuild12": .. code-block:: python from buildbot.plugins import steps # Build one project in Release mode for Win32 f.addStep( steps.MsBuild12(projectfile="trunk.sln", config="Release", platform="Win32", workdir="trunk", project="tools\\protoc")) # Build the entire solution in Debug mode for x64 f.addStep( steps.MsBuild12(projectfile="trunk.sln", config='Debug', platform='x64', workdir="trunk")) buildbot-3.4.0/master/docs/manual/configuration/steps/worker_filesystem.rst000066400000000000000000000032711413250514000273340ustar00rootroot00000000000000.. _Worker-Filesystem-Steps: Worker Filesystem Steps ----------------------- Here are some buildsteps for manipulating the worker's filesystem. .. bb:step:: FileExists FileExists ++++++++++ This step will assert that a given file exists, failing if it does not. The filename can be specified with a property. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.FileExists(file='test_data')) This step requires worker version 0.8.4 or later. .. bb:step:: CopyDirectory CopyDirectory +++++++++++++ This command copies a directory on the worker. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.CopyDirectory(src="build/data", dest="tmp/data")) This step requires worker version 0.8.5 or later. The CopyDirectory step takes the following arguments: ``timeout`` If the copy command fails to produce any output for this many seconds, it is assumed to be locked up and will be killed. This defaults to 120 seconds. Pass ``None`` to disable. ``maxTime`` If the command takes longer than this many seconds, it will be killed. This is disabled by default. .. bb:step:: RemoveDirectory RemoveDirectory +++++++++++++++ This command recursively deletes a directory on the worker. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.RemoveDirectory(dir="build/build")) This step requires worker version 0.8.4 or later. .. bb:step:: MakeDirectory MakeDirectory +++++++++++++ This command creates a directory on the worker. .. code-block:: python from buildbot.plugins import steps f.addStep(steps.MakeDirectory(dir="build/build")) This step requires worker version 0.8.5 or later. buildbot-3.4.0/master/docs/manual/configuration/workers-docker.rst000066400000000000000000000530061413250514000253630ustar00rootroot00000000000000.. index:: Docker Workers; Docker .. bb:worker:: DockerLatentWorker Docker latent worker ==================== .. py:class:: buildbot.worker.docker.DockerLatentWorker .. py:class:: buildbot.plugins.worker.DockerLatentWorker Docker_ is an open-source project that automates the deployment of applications inside software containers. The :class:`DockerLatentWorker` attempts to instantiate a fresh image for each build to assure consistency of the environment between builds. Each image will be discarded once the worker finished processing the build queue (i.e. becomes ``idle``). See :ref:`build_wait_timeout ` to change this behavior. This document will guide you through the setup of such workers. .. contents:: :depth: 1 :local: .. _Docker: https://docker.com Docker Installation ------------------- An easy way to try Docker is through installation of dedicated Virtual machines. Two of them stands out: - CoreOS_ - boot2docker_ Beside, it is always possible to install Docker next to the buildmaster. Beware that in this case, overall performance will depend on how many builds the computer where you have your buildmaster can handle as everything will happen on the same one. .. note:: It is not necessary to install Docker in the same environment as your master as we will make use to the Docker API through docker-py_. More in `master setup`_. .. _CoreOS: https://coreos.com/ .. _boot2docker: https://github.com/boot2docker/boot2docker .. _docker-py: https://pypi.python.org/pypi/docker-py CoreOS ...... CoreOS is targeted at building infrastructure and distributed systems. In order to get the latent worker working with CoreOS, it is necessary to `expose the docker socket`_ outside of the Virtual Machine. If you installed it via Vagrant_, it is also necessary to uncomment the following line in your :file:`config.rb` file: .. code-block:: ruby $expose_docker_tcp=2375 The following command should allow you to confirm that your Docker socket is now available via the network: .. code-block:: bash docker -H tcp://127.0.0.1:2375 ps .. _`expose the docker socket`: https://coreos.com/docs/launching-containers/building/customizing-docker/ .. _Vagrant: https://coreos.com/docs/running-coreos/platforms/vagrant/ boot2docker ........... boot2docker is one of the fastest ways to boot to Docker. As it is meant to be used from outside of the Virtual Machine, the socket is already exposed. Please follow the installation instructions on how to find the address of your socket. Image Creation -------------- Our build master will need the name of an image to perform its builds. Each time a new build will be requested, the same base image will be used again and again, actually discarding the result of the previous build. If you need some persistent storage between builds, you can `use Volumes `_. Each Docker image has a single purpose. Our worker image will be running a buildbot worker. Docker uses ``Dockerfile``\s to describe the steps necessary to build an image. The following example will build a minimal worker. This example is voluntarily simplistic, and should probably not be used in production, see next paragraph. .. code-block:: Docker :linenos: :emphasize-lines: 11 FROM debian:stable RUN apt-get update && apt-get install -y \ python-dev \ python-pip RUN pip install buildbot-worker RUN groupadd -r buildbot && useradd -r -g buildbot buildbot RUN mkdir /worker && chown buildbot:buildbot /worker # Install your build-dependencies here ... USER buildbot WORKDIR /worker RUN buildbot-worker create-worker . ENTRYPOINT ["/usr/local/bin/buildbot-worker"] CMD ["start", "--nodaemon"] On line 11, the hostname for your master instance, as well as the worker name and password is setup. Don't forget to replace those values with some valid ones for your project. It is a good practice to set the ``ENTRYPOINT`` to the worker executable, and the ``CMD`` to ``["start", "--nodaemon"]``. This way, no parameter will be required when starting the image. When your Dockerfile is ready, you can build your first image using the following command (replace *myworkername* with a relevant name for your case): .. code-block:: bash docker build -t myworkername - < Dockerfile Reuse same image for different workers -------------------------------------- Previous simple example hardcodes the worker name into the dockerfile, which will not work if you want to share your docker image between workers. You can find in buildbot source code in :contrib-src:`master/contrib/docker` one example configurations: :contrib-src:`pythonnode_worker ` a worker with Python and node installed, which demonstrate how to reuse the base worker to create variations of build environments. It is based on the official ``buildbot/buildbot-worker`` image. The master setups several environment variables before starting the workers: ``BUILDMASTER`` The address of the master the worker shall connect to ``BUILDMASTER_PORT`` The port of the master's worker 'pb' protocol. ``WORKERNAME`` The name the worker should use to connect to master ``WORKERPASS`` The password the worker should use to connect to master Master Setup ------------ We will rely on docker-py to connect our master with docker. Now is the time to install it in your master environment. Before adding the worker to your master configuration, it is possible to validate the previous steps by starting the newly created image interactively. To do this, enter the following lines in a Python prompt where docker-py is installed: .. code-block:: python >>> import docker >>> docker_socket = 'tcp://localhost:2375' >>> client = docker.client.Client(base_url=docker_socket) >>> worker_image = 'my_project_worker' >>> container = client.create_container(worker_image) >>> client.start(container['Id']) >>> # Optionally examine the logs of the master >>> client.stop(container['Id']) >>> client.wait(container['Id']) 0 It is now time to add the new worker to the master configuration under :bb:cfg:`workers`. The following example will add a Docker latent worker for docker running at the following address: ``tcp://localhost:2375``, the worker name will be ``docker``, its password: ``password``, and the base image name will be ``my_project_worker``: .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.DockerLatentWorker('docker', 'password', docker_host='tcp://localhost:2375', image='my_project_worker') ] ``password`` (mandatory) The worker password part of the :ref:`Latent-Workers` API. If the password is ``None``, then it will be automatically generated from random number, and transmitted to the container via environment variable. In addition to the arguments available for any :ref:`Latent-Workers`, :class:`DockerLatentWorker` will accept the following extra ones: ``docker_host`` (mandatory) This is the address the master will use to connect with a running Docker instance. ``image`` This is the name of the image that will be started by the build master. It should start a worker. This option can be a renderable, like :ref:`Interpolate`, so that it generates from the build request properties. ``command`` (optional) This will override the command setup during image creation. ``volumes`` (optional) See `Setting up Volumes`_ ``dockerfile`` (optional if ``image`` is given) This is the content of the Dockerfile that will be used to build the specified image if the image is not found by Docker. It should be a multiline string. .. note:: In case ``image`` and ``dockerfile`` are given, no attempt is made to compare the image with the content of the Dockerfile parameter if the image is found. ``version`` (optional, default to the highest version known by docker-py) This will indicates which API version must be used to communicate with Docker. ``tls`` (optional) This allow to use TLS when connecting with the Docker socket. This should be a ``docker.tls.TLSConfig`` object. See `docker-py's own documentation `_ for more details on how to initialise this object. ``followStartupLogs`` (optional, defaults to false) This transfers docker container's log inside master logs during worker startup (before connection). This can be useful to debug worker startup. e.g network issues, etc. ``masterFQDN`` (optional, defaults to socket.getfqdn()) Address of the master the worker should connect to. Use if you master machine does not have proper fqdn. This value is passed to the docker image via environment variable ``BUILDMASTER`` ``hostconfig`` (optional) Extra host configuration parameters passed as a dictionary used to create HostConfig object. See `docker-py's HostConfig documentation `_ for all the supported options. ``autopull`` (optional, defaults to false) Automatically pulls image if requested image is not on docker host. ``alwaysPull`` (optional, defaults to false) Always pulls image if autopull is set to true. ``custom_context`` (optional) Boolean indicating that the user wants to use custom build arguments for the docker environment. Defaults to False. ``encoding`` (optional) String indicating the compression format for the build context. defaults to 'gzip', but 'bzip' can be used as well. ``buildargs`` (optional if ``custom_context`` is True) Dictionary, passes information for the docker to build its environment. Eg. {'DISTRO':'ubuntu', 'RELEASE':'11.11'}. Defaults to None. ``hostname`` (optional) This will set container's hostname. Setting up Volumes .................. The ``volume`` parameter allows to share directory between containers, or between a container and the host system. Refer to Docker documentation for more information about Volumes. The format of that variable has to be an array of string. Each string specify a volume in the following format: :samp:`{volumename}:{bindname}`. The volume name has to be appended with ``:ro`` if the volume should be mounted *read-only*. .. note:: This is the same format as when specifying volumes on the command line for docker's own ``-v`` option. Marathon latent worker ====================== Marathon_ Marathon is a production-grade container orchestration platform for Mesosphere's Data-center Operating System (DC/OS) and Apache ``Mesos``. Buildbot supports using Marathon_ to host your latent workers. It requires either `txrequests`_ or `treq`_ to be installed to allow interaction with http server. See :class:`HTTPClientService` for details. .. py:class:: buildbot.worker.marathon.MarathonLatentWorker .. py:class:: buildbot.plugins.worker.MarathonLatentWorker The :class:`MarathonLatentWorker` attempts to instantiate a fresh image for each build to assure consistency of the environment between builds. Each image will be discarded once the worker finished processing the build queue (i.e. becomes ``idle``). See :ref:`build_wait_timeout ` to change this behavior. In addition to the arguments available for any :ref:`Latent-Workers`, :class:`MarathonLatentWorker` will accept the following extra ones: ``marathon_url`` (mandatory) This is the URL to Marathon_ server. Its REST API will be used to start docker containers. ``marathon_auth`` (optional) This is the optional ``('userid', 'password')`` ``BasicAuth`` credential. If txrequests_ is installed, this can be a `requests authentication plugin`_. ``image`` (mandatory) This is the name of the image that will be started by the build master. It should start a worker. This option can be a renderable, like :ref:`Interpolate`, so that it generates from the build request properties. Images are by pulled from the default docker registry. MarathonLatentWorker does not support starting a worker built from a Dockerfile. ``masterFQDN`` (optional, defaults to socket.getfqdn()) Address of the master the worker should connect to. Use if you master machine does not have proper fqdn. This value is passed to the docker image via environment variable ``BUILDMASTER`` If the value contains a colon (``:``), then BUILDMASTER and BUILDMASTER_PORT environment variables will be passed, following scheme: ``masterFQDN="$BUILDMASTER:$BUILDMASTER_PORT"`` ``marathon_extra_config`` (optional, defaults to ``{}```) Extra configuration to be passed to `Marathon API`_. This implementation will setup the minimal configuration to run a worker (docker image, ``BRIDGED`` network) It will let the default for everything else, including memory size, volume mounting, etc. This configuration is voluntarily very raw so that it is easy to use new marathon features. This dictionary will be merged into the Buildbot generated config, and recursively override it. See `Marathon API`_ documentation to learn what to include in this config. .. _Marathon: https://mesosphere.github.io/marathon/ .. _Marathon API: http://mesosphere.github.io/marathon/docs/rest-api.html#post-v2-apps .. _txrequests: https://pypi.python.org/pypi/txrequests .. _treq: https://pypi.python.org/pypi/treq .. _requests authentication plugin: https://2.python-requests.org/en/master/user/authentication/ Kubernetes latent worker ======================== Kubernetes_ is an open-source system for automating deployment, scaling, and management of containerized applications. Buildbot supports using Kubernetes_ to host your latent workers. .. py:class:: buildbot.worker.kubernetes.KubeLatentWorker .. py:class:: buildbot.plugins.worker.KubeLatentWorker The :class:`KubeLatentWorker` attempts to instantiate a fresh container for each build to assure consistency of the environment between builds Each container will be discarded once the worker finished processing the build queue (i.e. becomes ``idle``). See :ref:`build_wait_timeout ` to change this behavior. .. _Kubernetes: https://kubernetes.io/ In addition to the arguments available for any :ref:`Latent-Workers`, :class:`KubeLatentWorker` will accept the following extra ones: ``image`` (optional, default to ``buildbot/buildbot-worker``) Docker image. Default to the `official buildbot image`. ``namespace`` (optional) This is the name of the namespace. Default to the current namespace ``kube_config`` (mandatory) This is the object specifying how to connect to the kubernetes cluster. This object must be an instance of abstract class :class:`KubeConfigLoaderBase`, which have 3 implementations: - :class:`KubeHardcodedConfig` - :class:`KubeCtlProxyConfigLoader` - :class:`KubeInClusterConfigLoader` ``masterFQDN`` (optional, default to ``None``) Address of the master the worker should connect to. Put the service master service name if you want to place a load-balancer between the workers and the masters. The default behaviour is to compute address IP of the master. This option works out-of-the box inside kubernetes but don't leverage the load-balancing through service. You can pass any callable, such as ``KubeLatentWorker.get_fqdn`` that will set ``masterFQDN=socket.getfqdn()``. For more customization, you can subclass :class:`KubeLatentWorker` and override following methods. All those methods can optionally return a deferred. All those methods take props object which is a L{IProperties} allowing to get some parameters from the build properties .. py:method:: createEnvironment(self, props) This method compute the environment from your properties. Don't forget to first call `super().createEnvironment(props)` to get the base properties necessary to connect to the master. .. py:method:: getBuildContainerResources(self, props) This method compute the `pod resources` part of the container spec (`spec.containers[].resources`. This is important to reserve some CPU and memory for your builds, and to trigger node auto-scaling if needed. You can also limit the CPU and memory for your container. .. py:method:: getServicesContainers(self, props) This method compute a list of containers spec to put alongside the worker container. This is useful for starting services around your build pod, like a database container. All containers within the same pod share the same localhost interface, so you can access the other containers TCP ports very easily. .. _official buildbot image: https://hub.docker.com/r/buildbot/buildbot-worker/ .. _pod resources: https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#resource-requests-and-limits-of-pod-and-container Kubernetes config loaders ------------------------- Kubernetes provides many options to connect to a cluster. It is especially more complicated as some cloud providers use specific methods to connect to their managed kubernetes. Config loaders objects can be shared between LatentWorker. There are three options you may use to connect to your clusters. When running both the master and slaves run on the same Kubernetes cluster, you should use the KubeInClusterConfigLoader. If not, but having a configured ``kubectl`` tool available to the build master is an option for you, you should use KubeCtlProxyConfigLoader. If neither of these options is convenient, use KubeHardcodedConfig. .. py:class:: buildbot.util.kubeclientservice.KubeCtlProxyConfigLoader .. py:class:: buildbot.plugins.util.KubeCtlProxyConfigLoader ``KubeCtlProxyConfigLoader`` ............................ With :class:`KubeCtlProxyConfigLoader`, buildbot will user ``kubectl proxy`` to get access to the cluster. This delegates the authentication to the ``kubectl`` ``golang`` binary, and thus avoid to implement a python version for every authentication scheme that kubernetes provides. ``kubectl`` must be available in the ``PATH``, and configured to be able to start pods. While this method is very convenient and easy, it also opens an unauthenticated http access to your cluster via localhost. You must ensure that this is properly secured, and your buildbot master machine is not on a shared multi-user server. ``proxy_port`` (optional defaults to 8001) HTTP port to use. ``namespace`` (optional defaults to ``"default"`` default namespace to use if the latent worker do not provide one already. .. py:class:: buildbot.util.kubeclientservice.KubeHardcodedConfig .. py:class:: buildbot.plugins.util.KubeHardcodedConfig ``KubeHardcodedConfig`` ....................... With :class:`KubeHardcodedConfig`, you just configure the necessary parameters to connect to the clusters. ``master_url`` (mandatory) The http url of you kubernetes master. Only http and https protocols are supported ``headers`` (optional) Additional headers to be passed to the HTTP request ``basicAuth`` (optional) Basic authorization info to connect to the cluster, as a `{'user': 'username', 'password': 'psw' }` dict. Unlike the headers argument, this argument supports secret providers, e.g: .. code-block:: python basicAuth={'user': 'username', 'password': Secret('k8spassword')} ``bearerToken`` (optional) A bearer token to authenticate to the cluster, as a string. Unlike the headers argument, this argument supports secret providers, e.g: .. code-block:: python bearerToken=Secret('k8s-token') When using the Google Kubernetes Engine (GKE), a bearer token for the default service account can be had with: .. code-block:: bash gcloud container clusters get-credentials --region [YOURREGION] YOURCLUSTER kubectl describe sa kubectl describe secret [SECRET_ID] Where SECRET_ID is displayed by the ``describe sa`` command line. The default service account does not have rights on the cluster (to create/delete pods), which is required by BuildBot's integration. You may give it this right by making it a cluster admin with .. code-block:: bash kubectl create clusterrolebinding service-account-admin \ --clusterrole=cluster-admin \ --serviceaccount default:default ``cert`` (optional) Client certificate and key to use to authenticate. This only works if ``txrequests`` is installed: .. code-block:: python cert=('/path/to/certificate.crt', '/path/to/certificate.key') ``verify`` (optional) Path to server certificate authenticate the server: .. code-block:: python verify='/path/to/kube_server_certificate.crt' When using the Google Kubernetes Engine (GKE), this certificate is available from the admin console, on the Cluster page. Verify that it is valid (i.e. no copy/paste errors) with ``openssl verify PATH_TO_PEM``. ``namespace`` (optional defaults to ``"default"`` default namespace to use if the latent worker do not provide one already. .. py:class:: buildbot.util.kubeclientservice.KubeInClusterConfigLoader .. py:class:: buildbot.plugins.util.KubeInClusterConfigLoader ``KubeInClusterConfigLoader`` ............................. Use :class:`KubeInClusterConfigLoader`, if your Buildbot master is itself located within the kubernetes cluster. In this case, you would associated a service account to the Buildbot master pod, and :class:`KubeInClusterConfigLoader` will get the credentials from that. This config loader takes no arguments. buildbot-3.4.0/master/docs/manual/configuration/workers-ec2.rst000066400000000000000000000425371413250514000245740ustar00rootroot00000000000000.. -*- rst -*- .. index:: AWS EC2 Workers; AWS EC2 .. bb:worker:: EC2LatentWorker Amazon Web Services Elastic Compute Cloud ("AWS EC2") ===================================================== .. @cindex EC2LatentWorker .. py:class:: buildbot.worker.ec2.EC2LatentWorker `EC2 `_ is a web service that allows you to start virtual machines in an Amazon data center. Please see their website for details, including costs. Using the AWS EC2 latent workers involves getting an EC2 account with AWS and setting up payment; customizing one or more EC2 machine images ("AMIs") on your desired operating system(s) and publishing them (privately if needed); and configuring the buildbot master to know how to start your customized images for "substantiating" your latent workers. This document will guide you through setup of a AWS EC2 latent worker: .. contents:: :depth: 1 :local: Get an AWS EC2 Account ---------------------- To start off, to use the AWS EC2 latent worker, you need to get an AWS developer account and sign up for EC2. Although Amazon often changes this process, these instructions should help you get started: 1. Go to http://aws.amazon.com/ and click to "Sign Up Now" for an AWS account. 2. Once you are logged into your account, you need to sign up for EC2. Instructions for how to do this have changed over time because Amazon changes their website, so the best advice is to hunt for it. After signing up for EC2, it may say it wants you to upload an x.509 cert. You will need this to create images (see below) but it is not technically necessary for the buildbot master configuration. 3. You must enter a valid credit card before you will be able to use EC2. Do that under 'Payment Method'. 4. Make sure you're signed up for EC2 by going to :menuselection:`Your Account --> Account Activity` and verifying EC2 is listed. Create an AMI ------------- Now you need to create an AMI and configure the master. You may need to run through this cycle a few times to get it working, but these instructions should get you started. Creating an AMI is out of the scope of this document. The `EC2 Getting Started Guide `_ is a good resource for this task. Here are a few additional hints. * When an instance of the image starts, it needs to automatically start a buildbot worker that connects to your master (to create a buildbot worker, :ref:`Creating-a-worker`; to make a daemon, :ref:`Launching-the-daemons`). * You may want to make an instance of the buildbot worker, configure it as a standard worker in the master (i.e., not as a latent worker), and test and debug it that way before you turn it into an AMI and convert to a latent worker in the master. * In order to avoid extra costs in case of master failure, you should configure the worker of the AMI with ``maxretries`` option (see :ref:`Worker-Options`) Also see `example systemd unit file example `_ Configure the Master with an :class:`~buildbot.worker.ec2.EC2LatentWorker` -------------------------------------------------------------------------- Now let's assume you have an AMI that should work with the :class:`~buildbot.worker.ec2.EC2LatentWorker`. It's now time to set up your buildbot master configuration. You will need some information from your AWS account: the `Access Key Id` and the `Secret Access Key`. If you've built the AMI yourself, you probably already are familiar with these values. If you have not, and someone has given you access to an AMI, these hints may help you find the necessary values: * While logged into your AWS account, find the "Access Identifiers" link (either on the left, or via :menuselection:`Your Account --> Access Identifiers`. * On the page, you'll see alphanumeric values for "Your Access Key Id:" and "Your Secret Access Key:". Make a note of these. Later on, we'll call the first one your ``identifier`` and the second one your ``secret_identifier``\. When creating an :class:`~buildbot.worker.ec2.EC2LatentWorker` in the buildbot master configuration, the first three arguments are required. The name and password are the first two arguments, and work the same as with normal workers. The next argument specifies the type of the EC2 virtual machine (available options as of this writing include ``m1.small``, ``m1.large``, ``m1.xlarge``, ``c1.medium``, and ``c1.xlarge``; see the EC2 documentation for descriptions of these machines). Here is the simplest example of configuring an EC2 latent worker. It specifies all necessary remaining values explicitly in the instantiation. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', ami='ami-12345', identifier='publickey', secret_identifier='privatekey' keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', ) ] The ``ami`` argument specifies the AMI that the master should start. The ``identifier`` argument specifies the AWS `Access Key Id`, and the ``secret_identifier`` specifies the AWS `Secret Access Key`\. Both the AMI and the account information can be specified in alternate ways. .. note:: Whoever has your ``identifier`` and ``secret_identifier`` values can request AWS work charged to your account, so these values need to be carefully protected. Another way to specify these access keys is to put them in a separate file. Buildbot supports the standard AWS credentials file. You can then make the access privileges stricter for this separate file, and potentially let more people read your main configuration file. If your master is running in EC2, you can also use IAM roles for EC2 to delegate permissions. ``keypair_name`` and ``security_name`` allow you to specify different names for these AWS EC2 values. You can make an :file:`.aws` directory in the home folder of the user running the buildbot master. In that directory, create a file called :file:`credentials`. The format of the file should be as follows, replacing ``identifier`` and ``secret_identifier`` with the credentials obtained before. .. code-block:: python [default] aws_access_key_id = identifier aws_secret_access_key = secret_identifier If you are using IAM roles, no config file is required. Then you can instantiate the worker as follows. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', ami='ami-12345', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', ) ] Previous examples used a particular AMI. If the Buildbot master will be deployed in a process-controlled environment, it may be convenient to specify the AMI more flexibly. Rather than specifying an individual AMI, specify one or two AMI filters. In all cases, the AMI that sorts last by its location (the S3 bucket and manifest name) will be preferred. One available filter is to specify the acceptable AMI owners, by AWS account number (the 12 digit number, usually rendered in AWS with hyphens like "1234-5678-9012", should be entered as in integer). .. code-block:: python from buildbot.plugins import worker bot1 = worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', valid_ami_owners=[11111111111, 22222222222], identifier='publickey', secret_identifier='privatekey', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', ) The other available filter is to provide a regular expression string that will be matched against each AMI's location (the S3 bucket and manifest name). .. code-block:: python from buildbot.plugins import worker bot1 = worker.EC2LatentWorker( 'bot1', 'sekrit', 'm1.large', valid_ami_location_regex=r'buildbot\-.*/image.manifest.xml', identifier='publickey', secret_identifier='privatekey', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', ) The regular expression can specify a group, which will be preferred for the sorting. Only the first group is used; subsequent groups are ignored. .. code-block:: python from buildbot.plugins import worker bot1 = worker.EC2LatentWorker( 'bot1', 'sekrit', 'm1.large', valid_ami_location_regex=r'buildbot\-.*\-(.*)/image.manifest.xml', identifier='publickey', secret_identifier='privatekey', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', ) If the group can be cast to an integer, it will be. This allows 10 to sort after 1, for instance. .. code-block:: python from buildbot.plugins import worker bot1 = worker.EC2LatentWorker( 'bot1', 'sekrit', 'm1.large', valid_ami_location_regex=r'buildbot\-.*\-(\d+)/image.manifest.xml', identifier='publickey', secret_identifier='privatekey', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', ) In addition to using the password as a handshake between the master and the worker, you may want to use a firewall to assert that only machines from a specific IP can connect as workers. This is possible with AWS EC2 by using the Elastic IP feature. To configure, generate a Elastic IP in AWS, and then specify it in your configuration using the ``elastic_ip`` argument. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', 'ami-12345', identifier='publickey', secret_identifier='privatekey', elastic_ip='208.77.188.166', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', ) ] One other way to configure a worker is by settings AWS tags. They can for example be used to have a more restrictive security `IAM `_ policy. To get Buildbot to tag the latent worker specify the tag keys and values in your configuration using the ``tags`` argument. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', 'ami-12345', identifier='publickey', secret_identifier='privatekey', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', tags={'SomeTag': 'foo'}) ] If the worker needs access to additional AWS resources, you can also enable your workers to access them via an EC2 instance profile. To use this capability, you must first create an instance profile separately in AWS. Then specify its name on EC2LatentWorker via instance_profile_name. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', ami='ami-12345', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', instance_profile_name='my_profile' ) ] You may also supply your own boto3.Session object to allow for more flexible session options (ex. cross-account) To use this capability, you must first create a boto3.Session object. Then provide it to EC2LatentWorker via ``session`` argument. .. code-block:: python import boto3 from buildbot.plugins import worker session = boto3.session.Session() c['workers'] = [ worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', ami='ami-12345', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', session=session ) ] The :class:`~buildbot.worker.ec2.EC2LatentWorker` supports all other configuration from the standard :class:`Worker`. The ``missing_timeout`` and ``notify_on_missing`` specify how long to wait for an EC2 instance to attach before considering the attempt to have failed, and email addresses to alert, respectively. ``missing_timeout`` defaults to 20 minutes. Volumes -------------- If you want to attach existing volumes to an ec2 latent worker, use the volumes attribute. This mechanism can be valuable if you want to maintain state on a conceptual worker across multiple start/terminate sequences. ``volumes`` expects a list of (volume_id, mount_point) tuples to attempt attaching when your instance has been created. If you want to attach new ephemeral volumes, use the the block_device_map attribute. This follows the AWS API syntax, essentially acting as a passthrough. The only distinction is that the volumes default to deleting on termination to avoid leaking volume resources when workers are terminated. See boto documentation for further details. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', ami='ami-12345', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', block_device_map= [ { "DeviceName": "/dev/xvdb", "Ebs" : { "VolumeType": "io1", "Iops": 1000, "VolumeSize": 100 } } ] ) ] VPC Support -------------- If you are managing workers within a VPC, your worker configuration must be modified from above. You must specify the id of the subnet where you want your worker placed. You must also specify security groups created within your VPC as opposed to classic EC2 security groups. This can be done by passing the ids of the vpc security groups. Note, when using a VPC, you can not specify classic EC2 security groups (as specified by security_name). .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', ami='ami-12345', keypair_name='latent_buildbot_worker', subnet_id='subnet-12345', security_group_ids=['sg-12345','sg-67890'] ) ] Spot instances -------------- If you would prefer to use spot instances for running your builds, you can accomplish that by passing in a True value to the ``spot_instance`` parameter to the :class:`~buildbot.worker.ec2.EC2LatentWorker` constructor. Additionally, you may want to specify ``max_spot_price`` and ``price_multiplier`` in order to limit your builds' budget consumption. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.EC2LatentWorker('bot1', 'sekrit', 'm1.large', 'ami-12345', region='us-west-2', identifier='publickey', secret_identifier='privatekey', elastic_ip='208.77.188.166', keypair_name='latent_buildbot_worker', security_name='latent_buildbot_worker', placement='b', spot_instance=True, max_spot_price=0.09, price_multiplier=1.15, product_description='Linux/UNIX') ] This example would attempt to create a m1.large spot instance in the us-west-2b region costing no more than $0.09/hour. The spot prices for 'Linux/UNIX' spot instances in that region over the last 24 hours will be averaged and multiplied by the ``price_multiplier`` parameter, then a spot request will be sent to Amazon with the above details. If the multiple exceeds the ``max_spot_price``, the bid price will be the ``max_spot_price``. Either ``max_spot_price`` or ``price_multiplier``, but not both, may be None. If ``price_multiplier`` is None, then no historical price information is retrieved; the bid price is simply the specified ``max_spot_price``. If the ``max_spot_price`` is None, then the multiple of the historical average spot prices is used as the bid price with no limit. buildbot-3.4.0/master/docs/manual/configuration/workers-libvirt.rst000066400000000000000000000203431413250514000255650ustar00rootroot00000000000000.. -*- rst -*- .. index:: libvirt Workers; libvirt .. bb:worker:: LibVirtWorker Libvirt ======= .. @cindex LibVirtWorker .. py:class:: buildbot.worker.libvirt.LibVirtWorker `libvirt `_ is a virtualization API for interacting with the virtualization capabilities of recent versions of Linux and other OSes. It is LGPL and comes with a stable C API, and Python bindings. This means we now have an API which when tied to buildbot allows us to have workers that run under Xen, QEMU, KVM, LXC, OpenVZ, User Mode Linux, VirtualBox and VMWare. The libvirt code in Buildbot was developed against libvirt 0.7.5 on Ubuntu Lucid. It is used with KVM to test Python code on VMs, but obviously isn't limited to that. Each build is run on a new VM, images are temporary and thrown away after each build. This document will guide you through setup of a libvirt latent worker: .. contents:: :depth: 1 :local: Setting up libvirt ------------------ We won't show you how to set up libvirt as it is quite different on each platform, but there are a few things you should keep in mind. * If you are using the system libvirt (libvirt and buildbot master are on same server), your buildbot master user will need to be in the libvirtd group. * If libvirt and buildbot master are on different servers, the user connecting to libvirt over ssh will need to be in the libvirtd group. Also need to setup authorization via ssh-keys (without password prompt). * If you are using KVM, your buildbot master user will need to be in the KVM group. * You need to think carefully about your virtual network *first*. Will NAT be enough? What IP will my VMs need to connect to for connecting to the master? Configuring your base image --------------------------- You need to create a base image for your builds that has everything needed to build your software. You need to configure the base image with a buildbot worker that is configured to connect to the master on boot. Because this image may need updating a lot, we strongly suggest scripting its creation. If you want to have multiple workers using the same base image it can be annoying to duplicate the image just to change the buildbot credentials. One option is to use libvirt's DHCP server to allocate an identity to the worker: DHCP sets a hostname, and the worker takes its identity from that. Doing all this is really beyond the scope of the manual, but there is a :contrib-src:`vmbuilder ` script and a :contrib-src:`network.xml ` file to create such a DHCP server in :contrib-src:`master/contrib/` (:ref:`Contrib-Scripts`) that should get you started: .. code-block:: bash sudo apt-get install ubuntu-vm-builder sudo contrib/libvirt/vmbuilder Should create an :file:`ubuntu/` folder with a suitable image in it. .. code-block:: none virsh net-define contrib/libvirt/network.xml virsh net-start buildbot-network Should set up a KVM compatible libvirt network for your buildbot VM's to run on. Configuring your Master ----------------------- If you want to add a simple on demand VM to your setup, you only need the following. We set the username to ``minion1``, the password to ``sekrit``. The base image is called ``base_image`` and a copy of it will be made for the duration of the VM's life. That copy will be thrown away every time a build is complete. .. code-block:: python from buildbot.plugins import worker, util c['workers'] = [ worker.LibVirtWorker('minion1', 'sekrit', uri="qemu:///session", hd_image='/home/buildbot/images/minion1', base_image='/home/buildbot/images/base_image') ] You can use virt-manager to define ``minion1`` with the correct hardware. If you don't, buildbot won't be able to find a VM to start. :class:`LibVirtWorker` accepts the following arguments: ``name`` Both a buildbot username and the name of the virtual machine. ``password`` A password for the buildbot to login to the master with. ``connection`` :class:`Connection` instance wrapping connection to libvirt. (deprecated, use ``uri``). ``hd_image`` The path to a libvirt disk image, normally in qcow2 format when using KVM. ``base_image`` If given a base image, buildbot will clone it every time it starts a VM. This means you always have a clean environment to do your build in. ``uri`` The URI of the connection to libvirt. ``masterFQDN`` (optional, defaults to ``socket.getfqdn()``) Address of the master the worker should connect to. Use if you master machine does not have proper fqdn. This value is passed to the libvirt image via domain metadata. ``xml`` If a VM isn't predefined in virt-manager, then you can instead provide XML like that used with ``virsh define``. The VM will be created automatically when needed, and destroyed when not needed any longer. .. note:: The ``hd_image`` and ``base_image`` must be on same machine with buildbot master. Connection to master -------------------- If ``xml`` configuration key is not provided, then Buildbot will set libvirt metadata for the domain. It will contain the following XML element: ````. Here ``username``, ``password`` and ``master`` are the name of the worker, password to use for connection and the FQDN of the master. The libvirt metadata will be placed in the XML namespace ``buildbot=http://buildbot.net/``. Configuring Master to use libvirt on remote server --------------------------------------------------- If you want to use libvirt on remote server configure remote libvirt server and buildbot server following way. 1. Define user to connect to remote machine using ssh. Configure connection of such user to remote libvirt server (see https://wiki.libvirt.org/page/SSHSetup) without password prompt. 2. Add user to libvirtd group on remote libvirt server ``sudo usermod -G libvirtd -a ``. Configure remote libvirt server: 1. Create virtual machine for buildbot and configure it. 2. Change virtual machine image file to new name, which will be used as temporary image and deleted after virtual machine stops. Execute command ``sudo virsh edit ``. In xml file locate ``devices/disk/source`` and change file path to new name. The file must not be exists, it will create via hook script. 3. Add hook script to ``/etc/libvirt/hooks/qemu`` to recreate VM image each start: .. code-block:: python #!/usr/bin/python # Script /etc/libvirt/hooks/qemu # Don't forget to execute service libvirt-bin restart # Also see https://www.libvirt.org/hooks.html # This script make clean VM for each start using base image import os import subprocess import sys images_path = '/var/lib/libvirt/images/' # build-vm - VM name in virsh list --all # vm_base_image.qcow2 - base image file name, must exist in path /var/lib/libvirt/images/ # vm_temp_image.qcow2 - temporary image. Must not exist in path /var/lib/libvirt/images/, but # defined in VM config file domains = { 'build-vm' : ['vm_base_image.qcow2', 'vm_temp_image.qcow2'], } def delete_image_clone(vir_domain): if vir_domain in domains: domain = domains[vir_domain] os.remove(images_path + domain[1]) def create_image_clone(vir_domain): if vir_domain in domains: domain = domains[vir_domain] cmd = ['/usr/bin/qemu-img', 'create', '-b', images_path + domain[0], '-f', 'qcow2', images_path + domain[1]] subprocess.call(cmd) if __name__ == "__main__": vir_domain, action = sys.argv[1:3] if action in ["prepare"]: create_image_clone(vir_domain) if action in ["release"]: delete_image_clone(vir_domain) Configure buildbot server: 1. On buildbot server in virtual environment install libvirt-python package: ``pip install libvirt-python`` 2. Create worker using remote ssh connection. .. code-block:: python from buildbot.plugins import worker, util c['workers'] = [ worker.LibVirtWorker( 'minion1', 'sekrit', util.Connection("qemu+ssh://@:/session"), '/home/buildbot/images/minion1') ] buildbot-3.4.0/master/docs/manual/configuration/workers-openstack.rst000066400000000000000000000204501413250514000261000ustar00rootroot00000000000000.. -*- rst -*- .. bb:worker:: OpenStackLatentWorker OpenStack ========= .. @cindex OpenStackLatentWorker .. py:class:: buildbot.worker.openstack.OpenStackLatentWorker `OpenStack `_ is a series of interconnected components that facilitates managing compute, storage, and network resources in a data center. It is available under the Apache License and has a REST interface along with a Python client. This document will guide you through setup of an OpenStack latent worker: .. contents:: :depth: 1 :local: Install dependencies -------------------- OpenStackLatentWorker requires python-novaclient to work, you can install it with pip install python-novaclient. Get an Account in an OpenStack cloud ------------------------------------ Setting up OpenStack is outside the domain of this document. There are four account details necessary for the Buildbot master to interact with your OpenStack cloud: username, password, a tenant name, and the auth URL to use. Create an Image --------------- OpenStack supports a large number of image formats. OpenStack maintains a short list of prebuilt images; if the desired image is not listed, The `OpenStack Compute Administration Manual `_ is a good resource for creating new images. You need to configure the image with a buildbot worker to connect to the master on boot. Configure the Master with an OpenStackLatentWorker -------------------------------------------------- With the configured image in hand, it is time to configure the buildbot master to create OpenStack instances of it. You will need the aforementioned account details. These are the same details set in either environment variables or passed as options to an OpenStack client. :class:`OpenStackLatentWorker` accepts the following arguments: ``name`` The worker name. ``password`` A password for the worker to login to the master with. ``flavor`` A string containing the flavor name or UUID to use for the instance. ``image`` A string containing the image name or UUID to use for the instance. ``os_username`` ``os_password`` ``os_tenant_name`` ``os_user_domain`` ``os_project_domain`` ``os_auth_url`` The OpenStack authentication needed to create and delete instances. These are the same as the environment variables with uppercase names of the arguments. ``os_auth_args`` Arguments passed directly to keystone. If this is specified, other authentication parameters (see above) are ignored. You can use ``auth_type`` to specify auth plugin to load. See `OpenStack documentation ` for more information. Usually this should contain ``auth_url``, ``username``, ``password``, ``project_domain_name`` and ``user_domain_name``. ``block_devices`` A list of dictionaries. Each dictionary specifies a block device to set up during instance creation. The values support using properties from the build and will be rendered when the instance is started. Supported keys ``uuid`` (required): The image, snapshot, or volume UUID. ``volume_size`` (optional): Size of the block device in GiB. If not specified, the minimum size in GiB to contain the source will be calculated and used. ``device_name`` (optional): defaults to ``vda``. The name of the device in the instance; e.g. vda or xda. ``source_type`` (optional): defaults to ``image``. The origin of the block device. Valid values are ``image``, ``snapshot``, or ``volume``. ``destination_type`` (optional): defaults to ``volume``. Destination of block device: ``volume`` or ``local``. ``delete_on_termination`` (optional): defaults to ``True``. Controls if the block device will be deleted when the instance terminates. ``boot_index`` (optional): defaults to ``0``. Integer used for boot order. ``meta`` A dictionary of string key-value pairs to pass to the instance. These will be available under the ``metadata`` key from the metadata service. ``nova_args`` (optional) A dict that will be appended to the arguments when creating a VM. Buildbot uses the OpenStack Nova version 2 API by default (see client_version). ``client_version`` (optional) A string containing the Nova client version to use. Defaults to ``2``. Supports using ``2.X``, where X is a micro-version. Use ``1.1`` for the previous, deprecated, version. If using ``1.1``, note that an older version of novaclient will be needed so it won't switch to using ``2``. ``region`` (optional) A string specifying region where to instantiate the worker. Here is the simplest example of configuring an OpenStack latent worker. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.OpenStackLatentWorker('bot2', 'sekrit', flavor=1, image='8ac9d4a4-5e03-48b0-acde-77a0345a9ab1', os_username='user', os_password='password', os_tenant_name='tenant', os_auth_url='http://127.0.0.1:35357/v2.0') ] The ``image`` argument also supports being given a callable. The callable will be passed the list of available images and must return the image to use. The invocation happens in a separate thread to prevent blocking the build master when interacting with OpenStack. .. code-block:: python from buildbot.plugins import worker def find_image(images): # Sort oldest to newest. def key_fn(x): return x.created candidate_images = sorted(images, key=key_fn) # Return the oldest candidate image. return candidate_images[0] c['workers'] = [ worker.OpenStackLatentWorker('bot2', 'sekrit', flavor=1, image=find_image, os_username='user', os_password='password', os_tenant_name='tenant', os_auth_url='http://127.0.0.1:35357/v2.0') ] The ``block_devices`` argument is minimally manipulated to provide some defaults and passed directly to novaclient. The simplest example is an image that is converted to a volume and the instance boots from that volume. When the instance is destroyed, the volume will be terminated as well. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.OpenStackLatentWorker('bot2', 'sekrit', flavor=1, image='8ac9d4a4-5e03-48b0-acde-77a0345a9ab1', os_username='user', os_password='password', os_tenant_name='tenant', os_auth_url='http://127.0.0.1:35357/v2.0', block_devices=[ {'uuid': '3f0b8868-67e7-4a5b-b685-2824709bd486', 'volume_size': 10}]) ] The ``nova_args`` can be used to specify additional arguments for the novaclient. For example network mappings, which is required if your OpenStack tenancy has more than one network, and default cannot be determined. Please refer to your OpenStack manual whether it wants net-id or net-name. Other useful parameters are ``availability_zone``, ``security_groups`` and ``config_drive``. Refer to `Python bindings to the OpenStack Nova API `_ for more information. It is found on section Servers, method create. .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.OpenStackLatentWorker('bot2', 'sekrit', flavor=1, image='8ac9d4a4-5e03-48b0-acde-77a0345a9ab1', os_username='user', os_password='password', os_tenant_name='tenant', os_auth_url='http://127.0.0.1:35357/v2.0', nova_args={ 'nics': [ {'net-id':'uid-of-network'} ]}) ] :class:`OpenStackLatentWorker` supports all other configuration from the standard :class:`Worker`. The ``missing_timeout`` and ``notify_on_missing`` specify how long to wait for an OpenStack instance to attach before considering the attempt to have failed and email addresses to alert, respectively. ``missing_timeout`` defaults to 20 minutes. buildbot-3.4.0/master/docs/manual/configuration/workers-upcloud.rst000066400000000000000000000067471413250514000256010ustar00rootroot00000000000000.. -*- rst -*- .. index:: Upcloud Workers; Upcloud .. bb:worker:: UpcloudLatentWorker UpCloud ======= .. @cindex UpcloudLatentWorker .. py:class:: buildbot.worker.upcloud.UpcloudLatentWorker `UpCloud `_ is a web service that allows you to start virtual machines in cloud. Please see their website for details, including costs. This document will guide you through setup of a UpCloud latent worker: .. contents:: :depth: 1 :local: Get an UpCloud Account ---------------------- To start off, to use the UpCloud latent worker, you need to sign up on UpCloud. 1. Go to https://www.upcloud.com/ and create an account. 2. Once you are logged into your account, create a sub-account for buildbot to use. You need to tick the box enabling it for API usage. You should disable the box enabling web interface. You should not use your primary account for safety and security reasons. Configure the Master with an :class:`~buildbot.worker.upcloud.UpcloudLatentWorker` ---------------------------------------------------------------------------------- Quick-start sample .. code-block:: python from buildbot.plugins import worker c['workers'].append(upcloud.UpcloudLatentWorker('upcloud-worker','pass', image='Debian GNU/Linux 9.3 (Stretch)', api_username="username", api_password="password", hostconfig = { "user_data":""" /usr/bin/apt-get update /usr/bin/apt-get install -y buildbot-slave /usr/bin/buildslave create-slave --umask=022 /buildslave buildbot.example.com upcloud-01 slavepass /usr/bin/buildslave start /buildslave """})) Complete example with default values .. code-block:: python from buildbot.plugins import worker c['workers'].append(upcloud.UpcloudLatentWorker('upcloud-worker','pass', image='Debian GNU/Linux 9.3 (Stretch)', api_username="username", api_password="password", hostconfig = { "zone":"de-fra1", "plan":"1xCPU-1GB", "hostname":"hostname", "ssh_keys":["ssh-rsa ...."], "os_disk_size":10, "core_number":1, "memory_amount":512, "user_data":"" })) The ``image`` argument specifies the name of image in the image library. UUID is not currently supported. The ``api_username`` and ``api_password`` are for the sub-account you created on UpCloud. ``hostconfig`` can be used to set various aspects about the created host. - ``zone`` is a valid execution zone in UpCloud environment, check their `API documentation ` for valid values. - ``plan`` is a valid pre-configured machine specification, or custom if you want to define your own. See their API documentation for valid values - ``user_data`` field is used to specify startup script to run on the host. - ``hostname`` specifies the hostname for the worker. Defaults to name of the worker. - ``ssh_keys`` specifies ssh key(s) to add for root account. Some images support only one SSH key. At the time of writing, only RSA keys are supported. - ``os_disk_size`` specifies size of the system disk. - ``core_number`` can be used to specify number of cores, when plan is custom. - ``memory_amount`` can be used to specify memory in megabytes, when plan is custom. - ``user_data`` can be used to specify either URL to script, or script to execute when machine is started. Note that by default buildbot retains latent workers for 10 minutes, see ``build_wait_time`` on how to change this. buildbot-3.4.0/master/docs/manual/configuration/workers.rst000066400000000000000000000265401413250514000241210ustar00rootroot00000000000000.. -*- rst -*- .. _Workers: .. bb:cfg:: workers Workers ------- The :bb:cfg:`workers` configuration key specifies a list of known workers. In the common case, each worker is defined by an instance of the :class:`buildbot.worker.Worker` class. It represents a standard, manually started machine that will try to connect to the Buildbot master as a worker. Buildbot also supports "on-demand", or latent, workers, which allow Buildbot to dynamically start and stop worker instances. .. contents:: :depth: 1 :local: Defining Workers ~~~~~~~~~~~~~~~~ A :class:`Worker` instance is created with a ``workername`` and a ``workerpassword``. These are the same two values that need to be provided to the worker administrator when they create the worker. The ``workername`` must be unique, of course. The password exists to prevent evildoers from interfering with Buildbot by inserting their own (broken) workers into the system and thus displacing the real ones. Password may be a :ref:`Secret`. Workers with an unrecognized ``workername`` or a non-matching password will be rejected when they attempt to connect, and a message describing the problem will be written to the log file (see :ref:`Logfiles`). A configuration for two workers would look like: .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.Worker('bot-solaris', 'solarispasswd'), worker.Worker('bot-bsd', 'bsdpasswd'), ] Worker Options ~~~~~~~~~~~~~~ Properties ++++++++++ .. index:: Properties; from worker :class:`Worker` objects can also be created with an optional ``properties`` argument, a dictionary specifying properties that will be available to any builds performed on this worker. For example: .. code-block:: python c['workers'] = [ worker.Worker('bot-solaris', 'solarispasswd', properties={'os': 'solaris'}), ] :class:`Worker` properties have priority over other sources (:class:`Builder`, :class:`Scheduler`, etc.). You may use the ``defaultProperties`` parameter that will only be added to :ref:`Build-Properties` if they are not already set by :ref:`another source `: .. code-block:: python c['workers'] = [ worker.Worker('fast-bot', 'fast-passwd', defaultProperties={'parallel_make': 10}), ] :class:`Worker` collects and exposes ``/etc/os-release`` fields for :ref:`interpolation `. These can be used to determine details about the running operating system, such as distribution and version. See https://www.linux.org/docs/man5/os-release.html for details on possible fields. Each field is imported with ``os_`` prefix and in lower case. ``os_id``, ``os_id_like``, ``os_version_id`` and ``os_version_codename`` are always set, but can be null. Limiting Concurrency ++++++++++++++++++++ .. index:: Workers; limiting concurrency The :class:`Worker` constructor can also take an optional ``max_builds`` parameter to limit the number of builds that it will execute simultaneously: .. code-block:: python c['workers'] = [ worker.Worker('bot-linux', 'linuxpassword', max_builds=2), ] .. note:: In :ref:`worker-for-builders` concept only one build from the same builder would run on the worker. Master-Worker TCP Keepalive +++++++++++++++++++++++++++ By default, the buildmaster sends a simple, non-blocking message to each worker every hour. These keepalives ensure that traffic is flowing over the underlying TCP connection, allowing the system's network stack to detect any problems before a build is started. The interval can be modified by specifying the interval in seconds using the ``keepalive_interval`` parameter of :class:`Worker` (defaults to 3600): .. code-block:: python c['workers'] = [ worker.Worker('bot-linux', 'linuxpasswd', keepalive_interval=3600) ] The interval can be set to ``None`` to disable this functionality altogether. .. _When-Workers-Go-Missing: When Workers Go Missing +++++++++++++++++++++++ Sometimes, the workers go away. One very common reason for this is when the worker process is started once (manually) and left running, but then later the machine reboots and the process is not automatically restarted. If you'd like to have the administrator of the worker (or other people) be notified by email when the worker has been missing for too long, just add the ``notify_on_missing=`` argument to the :class:`Worker` definition. This value can be a single email address, or a list of addresses: .. code-block:: python c['workers'] = [ worker.Worker('bot-solaris', 'solarispasswd', notify_on_missing='bob@example.com') ] By default, this will send an email when the worker has been disconnected for more than one hour. Only one email per connection-loss event will be sent. To change the timeout, use ``missing_timeout=`` and give it a number of seconds (the default is 3600). You can have the buildmaster send an email to multiple recipients by providing a list of addresses instead of a single one: .. code-block:: python c['workers'] = [ worker.Worker('bot-solaris', 'solarispasswd', notify_on_missing=['bob@example.com', 'alice@example.org'], missing_timeout=300) # notify after 5 minutes ] The email sent this way will use a :class:`MailNotifier` (see :bb:reporter:`MailNotifier`) status target, if one is configured. This provides a way for you to control the *from* address of the email, as well as the relayhost (aka *smarthost*) to use as an SMTP server. If no :class:`MailNotifier` is configured on this buildmaster, the worker-missing emails will be sent using a default configuration. Note that if you want to have a :class:`MailNotifier` for worker-missing emails but not for regular build emails, just create one with ``builders=[]``, as follows: .. code-block:: python from buildbot.plugins import status, worker m = status.MailNotifier(fromaddr='buildbot@localhost', builders=[], relayhost='smtp.example.org') c['reporters'].append(m) c['workers'] = [ worker.Worker('bot-solaris', 'solarispasswd', notify_on_missing='bob@example.com') ] .. _Worker-states: Workers States ++++++++++++++ There are some times when a worker misbehaves because of issues with its configuration. In those cases, you may want to pause the worker, or maybe completely shut it down. There are three actions that you may take (in the worker's web page *Actions* dialog): - *Pause*: If a worker is paused, it won't accept new builds. The action of pausing a worker will not affect any ongoing build. - *Graceful Shutdown*: If a worker is in graceful shutdown mode, it won't accept new builds, but will finish the current builds. When all of its build are finished, the :command:`buildbot-worker` process will terminate. - *Force Shutdown*: If a worker is in force shutdown mode, it will terminate immediately, and the build it was currently doing will be put to retry state. Those actions will put the worker in either of two states: - *paused*: the worker is paused if it is connected but doesn't accept new builds. - *graceful*: the worker is graceful if it doesn't accept new builds, and will shutdown when builds are finished. A worker might not be able to accept a job for a period of time if buildbot detects a misbehavior. This is called the *quarantine timer*. Quarantine timer is an exponential back-off mechanism for workers. This prevents a misbehaving worker from eating the build queue by quickly finishing builds in ``EXCEPTION`` state. When misbehavior is detected, the timer will pause the worker for 10 seconds, and then the time will double with each misbehavior detection until the worker finishes a build. The first case of misbehavior is for a latent worker to not start properly. The second case of misbehavior is for a build to end with an ``EXCEPTION`` status. Pausing and unpausing a worker will force it to leave quarantine immediately. The quarantine timeout will not be reset until the worker finishes a build. Worker states are stored in the database, can be queried via :ref:`REST_API`, and are visible in the UI's workers page. .. index:: Workers; local .. _Local-Workers: Local Workers ~~~~~~~~~~~~~ For smaller setups, you may want to just run the workers on the same machine as the master. To simplify the maintenance, you may even want to run them in the same process. This is what LocalWorker is for. Instead of configuring a ``worker.Worker``, you have to configure a ``worker.LocalWorker``. As the worker is running on the same process, password is not necessary. You can run as many local workers as your machine's CPU and memory allows. A configuration for two workers would look like: .. code-block:: python from buildbot.plugins import worker c['workers'] = [ worker.LocalWorker('bot1'), worker.LocalWorker('bot2'), ] In order to use local workers you need to have ``buildbot-worker`` package installed. .. index:: Workers; latent .. _Latent-Workers: Latent Workers ~~~~~~~~~~~~~~ The standard Buildbot model has workers started manually. The previous section described how to configure the master for this approach. Another approach is to let the Buildbot master start workers when builds are ready, on-demand. Thanks to services such as Amazon Web Services' Elastic Compute Cloud ("AWS EC2"), this is relatively easy to set up, and can be very useful for some situations. The workers that are started on-demand are called "latent" workers. You can find the list of :ref:`Supported-Latent-Workers` below. .. _Common-Latent-Workers-Options: Common Options ++++++++++++++ The following options are available for all latent workers. ``build_wait_timeout`` This option allows you to specify how long a latent worker should wait after a build for another build before it shuts down. It defaults to 10 minutes. If this is set to 0, then the worker will be shut down immediately. If it is less than 0, it will be shut down only when shutting down master. .. _Supported-Latent-Workers: Supported Latent Workers ++++++++++++++++++++++++ As of time of writing, Buildbot supports the following latent workers: .. toctree:: :maxdepth: 1 workers-ec2.rst workers-libvirt.rst workers-openstack.rst workers-docker.rst workers-upcloud.rst Dangers with Latent Workers +++++++++++++++++++++++++++ Any latent worker that interacts with a for-fee service, such as the :class:`~buildbot.worker.ec2.EC2LatentWorker`, brings significant risks. As already identified, the configuration will need access to account information that, if obtained by a criminal, can be used to charge services to your account. Also, bugs in the Buildbot software may lead to unnecessary charges. In particular, if the master neglects to shut down an instance for some reason, a virtual machine may be running unnecessarily, charging against your account. Manual and/or automatic (e.g. Nagios with a plugin using a library like boto) double-checking may be appropriate. A comparatively trivial note is that currently if two instances try to attach to the same latent worker, it is likely that the system will become confused. This should not occur, unless, for instance, you configure a normal worker to connect with the authentication of a latent buildbot. If this situation does occurs, stop all attached instances and restart the master. buildbot-3.4.0/master/docs/manual/configuration/www.rst000066400000000000000000001326221413250514000232500ustar00rootroot00000000000000.. bb:cfg:: www Web Server ---------- .. note:: As of Buildbot 0.9.0, the built-in web server replaces the old ``WebStatus`` plugin. Buildbot contains a built-in web server. This server is configured with the ``www`` configuration key, which specifies a dictionary with the following keys: ``port`` The TCP port on which to serve requests. It might be an integer or any string accepted by `serverFromString `_ (ex: "tcp:8010:interface=127.0.0.1" to listen on another interface). Note that SSL is not supported. To host Buildbot with SSL, use an HTTP proxy such as lighttpd, nginx, or Apache. If this is ``None`` (the default), then the master will not implement a web server. ``json_cache_seconds`` The number of seconds into the future at which an HTTP API response should expire. ``rest_minimum_version`` The minimum supported REST API version. Any versions less than this value will not be available. This can be used to ensure that no clients are depending on API versions that will soon be removed from Buildbot. ``plugins`` This key gives a dictionary of additional UI plugins to load, along with configuration for those plugins. These plugins must be separately installed in the Python environment, e.g., ``pip install buildbot-waterfall-view``. See :ref:`UI-Plugins`. For example: .. code-block:: python c['www'] = { 'plugins': {'waterfall_view': True} } ``default_page`` Configure the default landing page of the web server, for example, to forward directly to another plugin. For example: .. code-block:: python c['www']['default_page'] = 'console' ``debug`` If true, then debugging information will be output to the browser. This is best set to false (the default) on production systems, to avoid the possibility of information leakage. ``allowed_origins`` This gives a list of origins which are allowed to access the Buildbot API (including control via JSONRPC 2.0). It implements cross-origin request sharing (CORS), allowing pages at origins other than the Buildbot UI to use the API. Each origin is interpreted as filename match expression, with ``?`` matching one character and ``*`` matching anything. Thus ``['*']`` will match all origins, and ``['https://*.buildbot.net']`` will match secure sites under ``buildbot.net``. The Buildbot UI will operate correctly without this parameter; it is only useful for allowing access from other web applications. ``auth`` Authentication module to use for the web server. See :ref:`Web-Authentication`. ``avatar_methods`` List of methods that can be used to get avatar pictures to use for the web server. By default, Buildbot uses Gravatar to get images associated with each users, if you want to disable this you can just specify empty list: .. code-block:: python c['www'] = { 'avatar_methods': [] } You could also use the GitHub user avatar if GitHub authentication is enabled: .. code-block:: python c['www'] = { 'avatar_methods': [util.AvatarGitHub()] } .. py:class:: AvatarGitHub(github_api_endpoint=None, token=None, debug=False, verify=False) :param string github_api_endpoint: specify the github api endpoint if you work with GitHub Enterprise :param string token: a GitHub API token to execute all requests to the API authenticated. It is strongly recommended to use a API token since it increases GitHub API rate limits significantly :param string client_id: a GitHub OAuth client ID to use with client secret to execute all requests to the API authenticated in place of token :param string client_secret: a GitHub OAuth client secret to use with client ID above :param boolean debug: logs every requests and their response :param boolean verify: disable ssl verification for the case you use temporary self signed certificates on a GitHub Enterprise installation This class requires `txrequests`_ package to allow interaction with GitHub REST API. .. _txrequests: https://pypi.python.org/pypi/txrequests For use of corporate pictures, you can use LdapUserInfo, which can also act as an avatar provider. See :ref:`Web-Authentication`. ``logfileName`` Filename used for HTTP access logs, relative to the master directory. If set to ``None`` or the empty string, the content of the logs will land in the main :file:`twisted.log` log file. (Defaults to ``http.log``) ``logRotateLength`` The amount of bytes after which the :file:`http.log` file will be rotated. (Defaults to the same value as for the :file:`twisted.log` file, set in :file:`buildbot.tac`) ``maxRotatedFiles`` The amount of log files that will be kept when rotating (Defaults to the same value as for the :file:`twisted.log` file, set in :file:`buildbot.tac`) ``versions`` Custom component versions that you'd like to display on the About page. Buildbot will automatically prepend the versions of Python, twisted and Buildbot itself to the list. ``versions`` should be a list of tuples. For example: .. code-block:: python c['www'] = { # ... 'versions': [ ('master.cfg', '0.1'), ('OS', 'Ubuntu 14.04'), ] } The first element of a tuple stands for the name of the component, the second stands for the corresponding version. ``custom_templates_dir`` This directory will be parsed for custom angularJS templates to replace the one of the original website templates. You can use this to slightly customize buildbot look for your project, but to add any logic, you will need to create a full-blown plugin. If the directory string is relative, it will be joined to the master's basedir. Buildbot uses the jade file format natively (which has been renamed to 'pug' in the nodejs ecosystem), but you can also use HTML format if you prefer. Either ``*.jade`` files or ``*.html`` files can be used to override templates with the same name in the UI. On the regular nodejs UI build system, we use nodejs's pug module to compile jade into html. For custom_templates, we use the pypugjs interpreter to parse the jade templates, before sending them to the UI. ``pip install pypugjs`` is required to use jade templates. You can also override plugin's directives, but they have to be in another directory, corresponding to the plugin's name in its ``package.json``. For example: .. code-block:: none # replace the template whose source is in: # www/base/src/app/builders/build/build.tpl.jade build.jade # here we use a jade (aka pug) file # replace the template whose source is in # www/console_view/src/module/view/builders-header/console.tpl.jade console_view/console.html # here we use html format Known differences between nodejs's pug and pyjade: * quotes in attributes are not quoted (https://github.com/syrusakbary/pyjade/issues/132). This means you should use double quotes for attributes, e.g.: ``tr(ng-repeat="br in buildrequests | orderBy:'-submitted_at'")`` * pypugjs may have some differences but it is a maintained fork of pyjade. https://github.com/kakulukia/pypugjs ``change_hook_dialects`` See :ref:`Change-Hooks`. ``cookie_expiration_time`` This allows to define the timeout of the session cookie. Should be a `datetime.timedelta `_. Default is one week. .. code-block:: python import datetime c['www'] = { # ... 'cookie_expiration_time': datetime.timedelta(weeks=2) } ``ui_default_config`` Settings in the settings page are stored per browser. This configuration parameter allows to override the default settings for all your users. If a user already has changed a value from the default, this will have no effect to them. The settings page in the UI will tell you what to insert in your master.cfg to reproduce the configuration you have in your own browser. For example: .. code-block:: python c['www']['ui_default_config'] = { 'Builders.buildFetchLimit': 500, 'Workers.showWorkerBuilders': True, } ``ws_ping_interval`` Send websocket pings every ``ws_ping_interval`` seconds. This is useful to avoid websocket timeouts when using reverse proxies or CDNs. If the value is 0 (the default), pings are disabled. .. note:: The :bb:cfg:`buildbotURL` configuration value gives the base URL that all masters will use to generate links. The :bb:cfg:`www` configuration gives the settings for the webserver. In simple cases, the ``buildbotURL`` contains the hostname and port of the master, e.g., ``http://master.example.com:8010/``. In more complex cases, with multiple masters, web proxies, or load balancers, the correspondence may be less obvious. .. _UI-Plugins: UI plugins ~~~~~~~~~~ .. _WaterfallView: Waterfall View ++++++++++++++ Waterfall shows the whole Buildbot activity in a vertical time line. Builds are represented with boxes whose height vary according to their duration. Builds are sorted by builders in the horizontal axes, which allows you to see how builders are scheduled together. .. code-block:: bash pip install buildbot-waterfall-view .. code-block:: python c['www'] = { 'plugins': {'waterfall_view': True} } .. note:: Waterfall is the emblematic view of Buildbot Eight. It allowed to see the whole Buildbot activity very quickly. Waterfall however had big scalability issues, and larger installs had to disable the page in order to avoid tens of seconds master hang because of a big waterfall page rendering. The whole Buildbot Eight internal status API has been tailored in order to make Waterfall possible. This is not the case anymore with Buildbot Nine, which has a more generic and scalable :ref:`Data_API` and :ref:`REST_API`. This is the reason why Waterfall does not display the steps details anymore. However nothing is impossible. We could make a specific REST api available to generate all the data needed for waterfall on the server. Please step-in if you want to help improve the Waterfall view. .. _ConsoleView: Console View ++++++++++++++ Console view shows the whole Buildbot activity arranged by changes as discovered by :ref:`Change-Sources` vertically and builders horizontally. If a builder has no build in the current time range, it will not be displayed. If no change is available for a build, then it will generate a fake change according to the ``got_revision`` property. Console view will also group the builders by tags. When there are several tags defined per builders, it will first group the builders by the tag that is defined for most builders. Then given those builders, it will group them again in another tag cluster. In order to keep the UI usable, you have to keep your tags short! .. code-block:: bash pip install buildbot-console-view .. code-block:: python c['www'] = { 'plugins': {'console_view': True} } .. note:: Nine's Console View is the equivalent of Buildbot Eight's Console and tgrid views. Unlike Waterfall, we think it is now feature equivalent and even better, with its live update capabilities. Please submit an issue if you think there is an issue displaying your data, with screen shots of what happen and suggestion on what to improve. .. _GridView: Grid View +++++++++ Grid view shows the whole Buildbot activity arranged by builders vertically and changes horizontally. It is equivalent to Buildbot Eight's grid view. By default, changes on all branches are displayed but only one branch may be filtered by the user. Builders can also be filtered by tags. This feature is similar to the one in the builder list. .. code-block:: bash pip install buildbot-grid-view .. code-block:: python c['www'] = { 'plugins': {'grid_view': True} } .. _Badges: Badges ++++++ Buildbot badges plugin produces an image in SVG or PNG format with information about the last build for the given builder name. PNG generation is based on the CAIRO_ SVG engine, it requires a bit more CPU to generate. .. code-block:: bash pip install buildbot-badges .. code-block:: python c['www'] = { 'plugins': {'badges': {}} } You can the access your builder's badges using urls like ``http:///badges/.svg``. The default templates are very much configurable via the following options: .. code-block:: python { "left_pad" : 5, "left_text": "Build Status", # text on the left part of the image "left_color": "#555", # color of the left part of the image "right_pad" : 5, "border_radius" : 5, # Border Radius on flat and plastic badges # style of the template availables are "flat", "flat-square", "plastic" "style": "plastic", "template_name": "{style}.svg.j2", # name of the template "font_face": "DejaVu Sans", "font_size": 11, "color_scheme": { # color to be used for right part of the image "exception": "#007ec6", # blue "failure": "#e05d44", # red "retry": "#007ec6", # blue "running": "#007ec6", # blue "skipped": "a4a61d", # yellowgreen "success": "#4c1", # brightgreen "unknown": "#9f9f9f", # lightgrey "warnings": "#dfb317" # yellow } } Those options can be configured either using the plugin configuration: .. code-block:: python c['www'] = { 'plugins': {'badges': {"left_color": "#222"}} } or via the URL arguments like ``http:///badges/.svg?left_color=222``. Custom templates can also be specified in a ``template`` directory nearby the ``master.cfg``. The badgeio template ^^^^^^^^^^^^^^^^^^^^ A badges template was developed to standardize upon a consistent "look and feel" across the usage of multiple CI/CD solutions, e.g.: use of Buildbot, Codecov.io, and Travis-CI. An example is shown below. .. image:: ../../_images/badges-badgeio.png To ensure the correct "look and feel", the following Buildbot configuration is needed: .. code-block:: python c['www'] = { 'plugins': { 'badges': { "left_pad": 0, "right_pad": 0, "border_radius": 3, "style": "badgeio" } } } .. note:: It is highly recommended to use only with SVG. .. _CAIRO: https://www.cairographics.org/ .. _Web-Authentication: Authentication plugins ~~~~~~~~~~~~~~~~~~~~~~ By default, Buildbot does not require people to authenticate in order to access control features in the web UI. To secure Buildbot, you will need to configure an authentication plugin. .. note:: To secure the Buildbot web interface, authorization rules must be provided via the 'authz' configuration. If you simply wish to lock down a Buildbot instance so that only read only access is permitted, you can restrict access to control endpoints to an unpopulated 'admin' role. For example: .. code-block:: python c['www']['authz'] = util.Authz(allowRules=[util.AnyControlEndpointMatcher(role="admins")], roleMatchers=[]) .. note:: As of Buildbot 0.9.4, user session is managed via a JWT_ token, using HS256_ algorithm. The session secret is stored in the database in the ``object_state`` table with ``name`` column being ``session_secret``. Please make sure appropriate access restriction is made to this database table. .. _JWT: https://en.wikipedia.org/wiki/JSON_Web_Token .. _HS256: https://pyjwt.readthedocs.io/en/latest/algorithms.html Authentication plugins are implemented as classes, and passed as the ``auth`` parameter to :bb:cfg:`www`. The available classes are described here: .. py:class:: buildbot.www.auth.NoAuth() This class is the default authentication plugin, which disables authentication. .. py:class:: buildbot.www.auth.UserPasswordAuth(users) :param users: list of ``("user","password")`` tuples, or a dictionary of ``{"user": "password", ..}`` Simple username/password authentication using a list of user/password tuples provided in the configuration file. .. code-block:: python from buildbot.plugins import util c['www'] = { # ... 'auth': util.UserPasswordAuth({"homer": "doh!"}), } .. py:class:: buildbot.www.auth.CustomAuth() This authentication class means to be overridden with a custom ``check_credentials`` method that gets username and password as arguments and check if the user can login. You may use it e.g. to check the credentials against an external database or file. .. code-block:: python from buildbot.plugins import util class MyAuth(util.CustomAuth): def check_credentials(self, user, password): if user == 'snow' and password == 'white': return True else: return False from buildbot.plugins import util c['www']['auth'] = MyAuth() .. py:class:: buildbot.www.auth.HTPasswdAuth(passwdFile) :param passwdFile: An :file:`.htpasswd` file to read This class implements simple username/password authentication against a standard :file:`.htpasswd` file. .. code-block:: python from buildbot.plugins import util c['www'] = { # ... 'auth': util.HTPasswdAuth("my_htpasswd"), } .. py:class:: buildbot.www.oauth2.GoogleAuth(clientId, clientSecret) :param clientId: The client ID of your buildbot application :param clientSecret: The client secret of your buildbot application This class implements an authentication with Google_ single sign-on. You can look at the Google_ oauth2 documentation on how to register your Buildbot instance to the Google systems. The developer console will give you the two parameters you have to give to ``GoogleAuth``. Register your Buildbot instance with the ``BUILDBOT_URL/auth/login`` URL as the allowed redirect URI. Example: .. code-block:: python from buildbot.plugins import util c['www'] = { # ... 'auth': util.GoogleAuth("clientid", "clientsecret"), } In order to use this module, you need to install the Python ``requests`` module: .. code-block:: bash pip install requests .. _Google: https://developers.google.com/accounts/docs/OAuth2 .. py:class:: buildbot.www.oauth2.GitHubAuth(clientId, clientSecret) :param clientId: The client ID of your buildbot application :param clientSecret: The client secret of your buildbot application :param serverURL: The server URL if this is a GitHub Enterprise server :param apiVersion: The GitHub API version to use. One of ``3`` or ``4`` (V3/REST or V4/GraphQL). Defaults to 3. :param getTeamsMembership: When ``True`` fetch all team memberships for each of the organizations the user belongs to. The teams will be included in the user's groups as ``org-name/team-name``. :param debug: When ``True`` and using ``apiVersion=4`` show some additional log calls with the GraphQL queries and responses for debugging purposes. This class implements an authentication with GitHub_ single sign-on. It functions almost identically to the :py:class:`~buildbot.www.oauth2.GoogleAuth` class. Register your Buildbot instance with the ``BUILDBOT_URL/auth/login`` url as the allowed redirect URI. The user's email-address (for e.g. authorization) is set to the "primary" address set by the user in GitHub. When using group-based authorization, the user's groups are equal to the names of the GitHub organizations the user is a member of. Example: .. code-block:: python from buildbot.plugins import util c['www'] = { # ... 'auth': util.GitHubAuth("clientid", "clientsecret"), } Example for Enterprise GitHub: .. code-block:: python from buildbot.plugins import util c['www'] = { # ... 'auth': util.GitHubAuth("clientid", "clientsecret", "https://git.corp.mycompany.com"), } An example on fetching team membership could be: .. code-block:: python from buildbot.plugins import util c['www'] = { # ... 'auth': util.GitHubAuth("clientid", "clientsecret", apiVersion=4, getTeamsMembership=True), 'authz': util.Authz( allowRules=[ util.AnyControlEndpointMatcher(role="core-developers"), ], roleMatchers=[ util.RolesFromGroups(groupPrefix='buildbot/') ] ) } If the ``buildbot`` organization had two teams, for example, 'core-developers' and 'contributors', with the above example, any user belonging to those teams would be granted the roles matching those team names. In order to use this module, you need to install the Python ``requests`` module: .. code-block:: bash pip install requests .. _GitHub: https://developer.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow .. py:class:: buildbot.www.oauth2.GitLabAuth(instanceUri, clientId, clientSecret) :param instanceUri: The URI of your GitLab instance :param clientId: The client ID of your buildbot application :param clientSecret: The client secret of your buildbot application This class implements an authentication with GitLab_ single sign-on. It functions almost identically to the :py:class:`~buildbot.www.oauth2.GoogleAuth` class. Register your Buildbot instance with the ``BUILDBOT_URL/auth/login`` URL as the allowed redirect URI. Example: .. code-block:: python from buildbot.plugins import util c['www'] = { # ... 'auth': util.GitLabAuth("https://gitlab.com", "clientid", "clientsecret"), } In order to use this module, you need to install the Python ``requests`` module: .. code-block:: bash pip install requests .. _GitLab: http://doc.gitlab.com/ce/integration/oauth_provider.html .. py:class:: buildbot.www.oauth2.BitbucketAuth(clientId, clientSecret) :param clientId: The client ID of your buildbot application :param clientSecret: The client secret of your buildbot application This class implements an authentication with Bitbucket_ single sign-on. It functions almost identically to the :py:class:`~buildbot.www.oauth2.GoogleAuth` class. Register your Buildbot instance with the ``BUILDBOT_URL/auth/login`` URL as the allowed redirect URI. Example: .. code-block:: python from buildbot.plugins import util c['www'] = { # ... 'auth': util.BitbucketAuth("clientid", "clientsecret"), } In order to use this module, you need to install the Python ``requests`` module: .. code-block:: bash pip install requests .. _Bitbucket: https://confluence.atlassian.com/bitbucket/oauth-on-bitbucket-cloud-238027431.html .. py:class:: buildbot.www.auth.RemoteUserAuth :param header: header to use to get the username (defaults to ``REMOTE_USER``) :param headerRegex: regular expression to get the username from header value (defaults to ``"(?P[^ @]+)@(?P[^ @]+)")``\. Note that you need at least to specify a ``?P`` regular expression named group. :param userInfoProvider: user info provider; see :ref:`User-Information` If the Buildbot UI is served through a reverse proxy that supports HTTP-based authentication (like apache or lighttpd), it's possible to tell Buildbot to trust the web server and get the username from the request headers. The administrator must make sure that it's impossible to get access to Buildbot in any way other than through the frontend. Usually this means that Buildbot should listen for incoming connections only on localhost (or on some firewall-protected port). The reverse proxy must require HTTP authentication to access Buildbot pages (using any source for credentials, such as htpasswd, PAM, LDAP, Kerberos). Example: .. code-block:: python from buildbot.plugins import util c['www'] = { # ... 'auth': util.RemoteUserAuth(), } A corresponding Apache configuration example: .. code-block:: none AuthType Kerberos AuthName "Buildbot login via Kerberos" KrbMethodNegotiate On KrbMethodK5Passwd On KrbAuthRealms <> KrbVerifyKDC off KrbServiceName Any Krb5KeyTab /etc/krb5/krb5.keytab KrbSaveCredentials Off require valid-user Order allow,deny Satisfy Any #] SSO RewriteEngine On RewriteCond %{LA-U:REMOTE_USER} (.+)$ RewriteRule . - [E=RU:%1,NS] RequestHeader set REMOTE_USER %{RU}e The advantage of this sort of authentication is that it is uses a proven and fast implementation for authentication. The problem is that the only information that is passed to Buildbot is the username, and there is no way to pass any other information like user email, user groups, etc. That information can be very useful to the mailstatus plugin, or for authorization processes. See :ref:`User-Information` for a mechanism to supply that information. .. _User-Information: User Information ~~~~~~~~~~~~~~~~ For authentication mechanisms which cannot provide complete information about a user, Buildbot needs another way to get user data. This is useful both for authentication (to fetch more data about the logged-in user) and for avatars (to fetch data about other users). This extra information is provided, appropriately enough, by user info providers. These can be passed to :py:class:`~buildbot.www.auth.RemoteUserAuth` and as an element of ``avatar_methods``. This can also be passed to oauth2 authentication plugins. In this case the username provided by oauth2 will be used, and all other information will be taken from ldap (Full Name, email, and groups): Currently only one provider is available: .. py:class:: buildbot.ldapuserinfo.LdapUserInfo(uri, bindUser, bindPw, accountBase, accountPattern, groupBase=None, groupMemberPattern=None, groupName=None, accountFullName, accountEmail, avatarPattern=None, avatarData=None, accountExtraFields=None) :param uri: uri of the ldap server :param bindUser: username of the ldap account that is used to get the infos for other users (usually a "faceless" account) :param bindPw: password of the ``bindUser`` :param accountBase: the base dn (distinguished name)of the user database :param accountPattern: the pattern for searching in the account database. This must contain the ``%(username)s`` string, which is replaced by the searched username :param accountFullName: the name of the field in account ldap database where the full user name is to be found. :param accountEmail: the name of the field in account ldap database where the user email is to be found. :param groupBase: the base dn of the groups database :param groupMemberPattern: the pattern for searching in the group database. This must contain the ``%(dn)s`` string, which is replaced by the searched username's dn :param groupName: the name of the field in groups ldap database where the group name is to be found. :param avatarPattern: the pattern for searching avatars from emails in the account database. This must contain the ``%(email)s`` string, which is replaced by the searched email :param avatarData: the name of the field in groups ldap database where the avatar picture is to be found. This field is supposed to contain the raw picture, format is automatically detected from jpeg, png or git. :param accountExtraFields: extra fields to extracts for use with the authorization policies If one of the three optional groups parameters is supplied, then all of them become mandatory. If none is supplied, the retrieved user info has an empty list of groups. Example: .. code-block:: python from buildbot.plugins import util # this configuration works for MS Active Directory ldap implementation # we use it for user info, and avatars userInfoProvider = util.LdapUserInfo( uri='ldap://ldap.mycompany.com:3268', bindUser='ldap_user', bindPw='p4$$wd', accountBase='dc=corp,dc=mycompany,dc=com', groupBase='dc=corp,dc=mycompany,dc=com', accountPattern='(&(objectClass=person)(sAMAccountName=%(username)s))', accountFullName='displayName', accountEmail='mail', groupMemberPattern='(&(objectClass=group)(member=%(dn)s))', groupName='cn', avatarPattern='(&(objectClass=person)(mail=%(email)s))', avatarData='thumbnailPhoto', ) c['www'] = dict(port=PORT, allowed_origins=["*"], url=c['buildbotURL'], auth=util.RemoteUserAuth(userInfoProvider=userInfoProvider), avatar_methods=[userInfoProvider, util.AvatarGravatar()]) .. note:: In order to use this module, you need to install the ``ldap3`` module: .. code-block:: bash pip install ldap3 In the case of oauth2 authentications, you have to pass the userInfoProvider as keyword argument: .. code-block:: python from buildbot.plugins import util userInfoProvider = util.LdapUserInfo(...) c['www'] = { # ... 'auth': util.GoogleAuth("clientid", "clientsecret", userInfoProvider=userInfoProvider), } .. _Reverse_Proxy_Config: Reverse Proxy Configuration ~~~~~~~~~~~~~~~~~~~~~~~~~~~ It is usually better to put Buildbot behind a reverse proxy in production. * Provides automatic gzip compression * Provides SSL support with a widely used implementation * Provides support for http/2 or spdy for fast parallel REST api access from the browser Reverse proxy however might be problematic for websocket, you have to configure it specifically to pass web socket requests. Here is an nginx configuration that is known to work (nginx 1.6.2): .. code-block:: none server { # Enable SSL and http2 listen 443 ssl http2 default_server; server_name yourdomain.com; root html; index index.html index.htm; ssl on; ssl_certificate /etc/nginx/ssl/server.cer; ssl_certificate_key /etc/nginx/ssl/server.key; # put a one day session timeout for websockets to stay longer ssl_session_cache shared:SSL:10m; ssl_session_timeout 1440m; # please consult latest nginx documentation for current secure encryption settings ssl_protocols .. ssl_ciphers .. ssl_prefer_server_ciphers on; # # force https add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;"; spdy_headers_comp 5; proxy_set_header HOST $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Server $host; proxy_set_header X-Forwarded-Host $host; # you could use / if you use domain based proxy instead of path based proxy location /buildbot/ { proxy_pass http://127.0.0.1:5000/; } location /buildbot/sse/ { # proxy buffering will prevent sse to work proxy_buffering off; proxy_pass http://127.0.0.1:5000/sse/; } # required for websocket location /buildbot/ws { proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; proxy_pass http://127.0.0.1:5000/ws; # raise the proxy timeout for the websocket proxy_read_timeout 6000s; } } To run with Apache2, you'll need `mod_proxy_wstunnel `_ in addition to `mod_proxy_http `_. Serving HTTPS (`mod_ssl `_) is advised to prevent issues with enterprise proxies (see :ref:`SSE`), even if you don't need the encryption itself. Here is a configuration that is known to work (Apache 2.4.10 / Debian 8, Apache 2.4.25 / Debian 9, Apache 2.4.6 / CentOS 7), directly at the top of the domain. If you want to add access control directives, just put them in a ````. .. code-block:: none ServerName buildbot.example ServerAdmin webmaster@buildbot.example # replace with actual port of your Buildbot master ProxyPass /ws ws://127.0.0.1:8020/ws ProxyPassReverse /ws ws://127.0.0.1:8020/ws ProxyPass / http://127.0.0.1:8020/ ProxyPassReverse / http://127.0.0.1:8020/ SetEnvIf X-Url-Scheme https HTTPS=1 ProxyPreserveHost On SSLEngine on SSLCertificateFile /path/to/cert.pem SSLCertificateKeyFile /path/to/cert.key # check Apache2 documentation for current safe SSL settings # This is actually the Debian 8 default at the time of this writing: SSLProtocol all -SSLv3 .. _Web-Authorization: Authorization rules ~~~~~~~~~~~~~~~~~~~ The authorization framework in Buildbot is very generic and flexible. The drawback is that it is not very obvious for newcomers. The 'simple' example will however allow you to easily start by implementing an admins-have-all-rights setup. Please carefully read the following documentation to understand how to setup authorization in Buildbot. Authorization framework is tightly coupled to the REST API. Authorization framework only works for HTTP, not for other means of interaction like IRC or try scheduler. It allows or denies access to the REST APIs according to rules. .. image:: ../../_images/auth_rules.* :alt: Auth diagram - Roles is a label that you give to a user. It is similar but different to the usual notion of group: - A user can have several roles, and a role can be given to several users. - Role is an application specific notion, while group is more organization specific notion. - Groups are given by the auth plugin, e.g ``ldap``, ``github``, and are not always in the precise control of the buildbot admins. - Roles can be dynamically assigned, according to the context. For example, there is the ``owner`` role, which can be given to a user for a build that he is at the origin, so that he can stop or rebuild only builds of his own. - Endpoint matchers associate role requirements to REST API endpoints. The default policy is allow in case no matcher matches (see below why). - Role matchers associate authenticated users to roles. Restricting Read Access +++++++++++++++++++++++ Please note that you can use this framework to deny read access to the REST API, but there is no access control in websocket or SSE APIs. Practically this means user will still see live updates from running builds in the UI, as those will come from websocket. The only resources that are only available for read in REST API are the log data (a.k.a `logchunks`). From a strict security point of view you cannot really use Buildbot Authz framework to securely deny read access to your bot. The access control is rather designed to restrict control APIs which are only accessible through REST API. In order to reduce attack surface, we recommend to place Buildbot behind an access controlled reverse proxy like OAuth2Proxy_. .. _OAuth2Proxy: https://github.com/oauth2-proxy/oauth2-proxy Authz Configuration +++++++++++++++++++ .. py:class:: buildbot.www.authz.Authz(allowRules=[], roleMatcher=[], stringsMatcher=util.fnmatchStrMatcher) :param allowRules: List of :py:class:`EndpointMatcherBase` processed in order for each endpoint grant request. :param roleMatcher: List of RoleMatchers :param stringsMatcher: Selects algorithm used to make strings comparison (used to compare roles and builder names). Can be :py:class:`util.fnmatchStrMatcher` or :py:class:`util.reStrMatcher` from ``from buildbot.plugins import util`` :py:class:`Authz` needs to be configured in ``c['www']['authz']`` Endpoint matchers +++++++++++++++++ Endpoint matchers are responsible for creating rules to match REST endpoints, and requiring roles for them. Endpoint matchers are processed in the order they are configured. The first rule matching an endpoint will prevent further rules from being checked. To continue checking other rules when the result is `deny`, set `defaultDeny=False`. If no endpoint matcher matches, then access is granted. One can implement the default deny policy by putting an :py:class:`AnyEndpointMatcher` with nonexistent role in the end of the list. Please note that this will deny all REST apis, and most of the UI do not implement proper access denied message in case of such error. The following sequence is implemented by each EndpointMatcher class: - Check whether the requested endpoint is supported by this matcher - Get necessary info from data API and decide whether it matches - Look if the user has the required role Several endpoints matchers are currently implemented. If you need a very complex setup, you may need to implement your own endpoint matchers. In this case, you can look at the source code for detailed examples on how to write endpoint matchers. .. py:class:: buildbot.www.authz.endpointmatchers.EndpointMatcherBase(role, defaultDeny=True) :param role: The role which grants access to this endpoint. List of roles is not supported, but a ``fnmatch`` expression can be provided to match several roles. :param defaultDeny: The role matcher algorithm will stop if this value is true and the endpoint matched. This is the base endpoint matcher. Its arguments are inherited by all the other endpoint matchers. .. py:class:: buildbot.www.authz.endpointmatchers.AnyEndpointMatcher(role) :param role: The role which grants access to any endpoint. AnyEndpointMatcher grants all rights to people with given role (usually "admins"). .. py:class:: buildbot.www.authz.endpointmatchers.AnyControlEndpointMatcher(role) :param role: The role which grants access to any control endpoint. AnyControlEndpointMatcher grants control rights to people with given role (usually "admins"). This endpoint matcher matches current and future control endpoints. You need to add this in the end of your configuration to make sure it is future proof. .. py:class:: buildbot.www.authz.endpointmatchers.ForceBuildEndpointMatcher(builder, role) :param builder: Name of the builder. :param role: The role needed to get access to such endpoints. ForceBuildEndpointMatcher grants right to force builds. .. py:class:: buildbot.www.authz.endpointmatchers.StopBuildEndpointMatcher(builder, role) :param builder: Name of the builder. :param role: The role needed to get access to such endpoints. StopBuildEndpointMatcher grants rights to stop builds. .. py:class:: buildbot.www.authz.endpointmatchers.RebuildBuildEndpointMatcher(builder, role) :param builder: Name of the builder. :param role: The role needed to get access to such endpoints. RebuildBuildEndpointMatcher grants rights to rebuild builds. .. py:class:: buildbot.www.authz.endpointmatchers.EnableSchedulerEndpointMatcher(builder, role) :param builder: Name of the builder. :param role: The role needed to get access to such endpoints. EnableSchedulerEndpointMatcher grants rights to enable and disable schedulers via the UI. Role matchers +++++++++++++ Role matchers are responsible for creating rules to match people and grant them roles. You can grant roles from groups information provided by the Auth plugins, or if you prefer directly to people's email. .. py:class:: buildbot.www.authz.roles.RolesFromGroups(groupPrefix) :param groupPrefix: Prefix to remove from each group RolesFromGroups grants roles from the groups of the user. If a user has group ``buildbot-admin``, and groupPrefix is ``buildbot-``, then user will be granted the role 'admin' ex: .. code-block:: python roleMatchers=[ util.RolesFromGroups(groupPrefix="buildbot-") ] .. py:class:: buildbot.www.authz.roles.RolesFromEmails(roledict) :param roledict: Dictionary with key=role, and value=list of email strings RolesFromEmails grants roles to users according to the hardcoded emails. ex: .. code-block:: python roleMatchers=[ util.RolesFromEmails(admins=["my@email.com"]) ] .. py:class:: buildbot.www.authz.roles.RolesFromDomain(roledict) :param roledict: Dictionary with key=role, and value=list of domain strings RolesFromDomain grants roles to users according to their email domains. If a user tried to login with email ``foo@gmail.com``, then the user will be granted the role 'admins'. ex: .. code-block:: python roleMatchers=[ util.RolesFromDomain(admins=["gmail.com"]) ] .. py:class:: buildbot.www.authz.roles.RolesFromOwner(roledict) :param roledict: Dictionary with key=role, and value=list of email strings RolesFromOwner grants a given role when property owner matches the email of the user ex: .. code-block:: python roleMatchers=[ RolesFromOwner(role="owner") ] .. py:class:: buildbot.www.authz.roles.RolesFromUsername(roles, usernames) :param roles: Roles to assign when the username matches. :param usernames: List of usernames that have the roles. RolesFromUsername grants the given roles when the ``username`` property is within the list of usernames. ex: .. code-block:: python roleMatchers=[ RolesFromUsername(roles=["admins"], usernames=["root"]), RolesFromUsername(roles=["developers", "integrators"], usernames=["Alice", "Bob"]) ] Example Configs +++++++++++++++ Simple config which allows admin people to control everything, but allow anonymous to look at build results: .. code-block:: python from buildbot.plugins import * authz = util.Authz( allowRules=[ util.AnyControlEndpointMatcher(role="admins"), ], roleMatchers=[ util.RolesFromEmails(admins=["my@email.com"]) ] ) auth=util.UserPasswordAuth({'my@email.com': 'mypass'}) c['www']['auth'] = auth c['www']['authz'] = authz More complex config with separation per branch: .. code-block:: python from buildbot.plugins import * authz = util.Authz( stringsMatcher=util.fnmatchStrMatcher, # simple matcher with '*' glob character # stringsMatcher = util.reStrMatcher, # if you prefer regular expressions allowRules=[ # admins can do anything, # defaultDeny=False: if user does not have the admin role, we continue parsing rules util.AnyEndpointMatcher(role="admins", defaultDeny=False), util.StopBuildEndpointMatcher(role="owner"), # *-try groups can start "try" builds util.ForceBuildEndpointMatcher(builder="try", role="*-try"), # *-mergers groups can start "merge" builds util.ForceBuildEndpointMatcher(builder="merge", role="*-mergers"), # *-releasers groups can start "release" builds util.ForceBuildEndpointMatcher(builder="release", role="*-releasers"), # if future Buildbot implement new control, we are safe with this last rule util.AnyControlEndpointMatcher(role="admins") ], roleMatchers=[ RolesFromGroups(groupPrefix="buildbot-"), RolesFromEmails(admins=["homer@springfieldplant.com"], reaper-try=["007@mi6.uk"]), # role owner is granted when property owner matches the email of the user RolesFromOwner(role="owner") ] ) c['www']['authz'] = authz Using GitHub authentication and allowing access to control endpoints for users in the "Buildbot" organization: .. code-block:: python from buildbot.plugins import * authz = util.Authz( allowRules=[ util.AnyControlEndpointMatcher(role="BuildBot") ], roleMatchers=[ util.RolesFromGroups() ] ) auth=util.GitHubAuth('CLIENT_ID', 'CLIENT_SECRET') c['www']['auth'] = auth c['www']['authz'] = authz buildbot-3.4.0/master/docs/manual/configuration/wwwhooks.rst000066400000000000000000000460121413250514000243110ustar00rootroot00000000000000 .. _Change-Hooks: Change Hooks ~~~~~~~~~~~~ The ``/change_hook`` URL is a magic URL which will accept HTTP requests and translate them into changes for Buildbot. Implementations (such as a trivial json-based endpoint and a GitHub implementation) can be found in :src:`master/buildbot/www/hooks`. The format of the URL is :samp:`/change_hook/{DIALECT}` where DIALECT is a package within the hooks directory. ``change_hook`` is disabled by default and each DIALECT has to be enabled separately, for security reasons. An example ``www`` configuration line which enables change_hook and two DIALECTS: .. code-block:: python c['www'] = dict( change_hook_dialects={ 'base': True, 'somehook': {'option1':True, 'option2':False}, }, ) Within the ``www`` config dictionary arguments, the ``change_hook`` key enables/disables the module, and ``change_hook_dialects`` whitelists DIALECTs where the keys are the module names and the values are optional arguments which will be passed to the hooks. The :contrib-src:`master/contrib/post_build_request.py` script allows for the submission of an arbitrary change request. Run :command:`post_build_request.py --help` for more information. The ``base`` dialect must be enabled for this to work. .. _Change-Hooks-Auth: Change Hooks Auth +++++++++++++++++ By default, the change hook URL is not protected. Some hooks implement their own authentication method. Others require the generic method to be secured. To protect URL against unauthorized access, you may use ``change_hook_auth`` option. .. note:: This method uses ``HTTP BasicAuth``. It implies the use of SSL via :ref:`Reverse_Proxy_Config` in order to be fully secured. .. code-block:: python from twisted.cred import strcred c['www'] = dict(..., change_hook_auth=[strcred.makeChecker("file:changehook.passwd")], ) Create a file ``changehook.passwd`` with content: .. code-block:: none user:password ``change_hook_auth`` should be a list of :py:class:`ICredentialsChecker`. See the details of available options in `Twisted documentation `_. .. note:: In the case of the ``"file:changehook.passwd"`` description in makeChecker, Buildbot ``checkconfig`` might give you a warning "not a valid file: changehook.passwd". To resolve this, you need specify the full path to the file, ``f"file:{os.path.join(basedir, 'changehook.passwd')}"``. .. bb:chsrc:: Mercurial Mercurial hook ++++++++++++++ The Mercurial hook uses the base dialect: .. code-block:: python c['www'] = dict( ..., change_hook_dialects={'base': True}, ) Once this is configured on your buildmaster add the following hook on your server-side Mercurial repository's ``hgrc``: .. code-block:: ini [hooks] changegroup.buildbot = python:/path/to/hgbuildbot.py:hook You'll find :contrib-src:`master/contrib/hgbuildbot.py`, and its inline documentation, in the :contrib-src:`buildbot-contrib <../../>` repository. .. bb:chsrc:: GitHub GitHub hook +++++++++++ .. note:: There is a standalone HTTP server available for receiving GitHub notifications as well: :contrib-src:`master/contrib/github_buildbot.py`. This script may be useful in cases where you cannot expose the WebStatus for public consumption. Alternatively, you can setup a reverse proxy :ref:`Reverse_Proxy_Config`. The GitHub hook has the following parameters: ``secret`` (default `None`) Secret token to use to validate payloads. ``strict`` (default `False`) If the hook must be strict regarding valid payloads. If the value is `False` (default), the signature will only be checked if a secret is specified and a signature was supplied with the payload. If the value is `True`, a secret must be provided, and payloads without signature will be ignored. ``codebase`` (default `None`) The codebase value to include with created changes. If the value is a function (or any other callable), it will be called with the GitHub event payload as argument and the function must return the codebase value to use for the event. ``github_property_whitelist`` (default `[]`) A list of ``fnmatch`` expressions which match against the flattened pull request information JSON prefixed with ``github``. For example ``github.number`` represents the pull request number. Available entries can be looked up in the GitHub API Documentation or by examining the data returned for a pull request by the API. ``class`` (default `None`) A class to be used for processing incoming payloads. If the value is `None` (default), the default class -- :py:class:`buildbot.www.hooks.github.GitHubEventHandler` -- will be used. The default class handles `ping`, `push` and `pull_request` events only. If you'd like to handle other events (see `Event Types & Payloads `_ for more information), you'd need to subclass ``GitHubEventHandler`` and add handler methods for the corresponding events. For example, if you'd like to handle `blah` events, your code should look something like this: .. code-block:: python from buildbot.www.hooks.github import GitHubEventHandler class MyBlahHandler(GitHubEventHandler): def handle_blah(self, payload): # Do some magic here return [], 'git' ``skips`` (default ``[r'\[ *skip *ci *\]', r'\[ *ci *skip *\]']``) A list of regex pattern makes buildbot ignore the push event. For instance, if user push 3 commits and the commit message of branch head contains a key string ``[ci skip]``, buildbot will ignore this push event. If you want to disable the skip checking, please set it to ``[]``. ``github_api_endpoint`` (default ``https://api.github.com``) If you have a self-host GitHub Enterprise installation, please set this URL properly. ``token`` If your GitHub or GitHub Enterprise instance does not allow anonymous communication, you need to provide an access token. Instructions can be found here ``pullrequest_ref`` (default ``merge``) Remote ref to test if a pull request is sent to the endpoint. See the GitHub developer manual for possible values for pull requests. (e.g. ``head``) The simplest way to use GitHub hook is as follows: .. code-block:: python c['www'] = dict( change_hook_dialects={'github': {}}, ) Having added this line, you should add a webhook for your GitHub project (see `Creating Webhooks page at GitHub `_). The parameters are: :guilabel:`Payload URL` This URL should point to ``/change_hook/github`` relative to the root of the web status. For example, if the base URL is ``http://builds.example.com/buildbot``, then point GitHub to ``http://builds.example.com/buildbot/change_hook/github``. To specify a project associated to the repository, append ``?project=name`` to the URL. :guilabel:`Content Type` Specify ``application/x-www-form-urlencoded`` or ``application/json``. :guilabel:`Secret` Any value. If you provide a non-empty value (recommended), make sure that your hook is configured to use it: .. code-block:: python c['www'] = dict( ..., change_hook_dialects={ 'github': { 'secret': 'MY-SECRET', }, }, ) :guilabel:`Which events would you like to trigger this webhook?` Click -- ``Let me select individual events``, then select ``Push`` and ``Pull request`` -- other kind of events are not currently supported. And then press the ``Add Webhook`` button. Github hook creates 3 kinds of changes, distinguishable by their ``category`` field: - ``None``: This change is a push to a branch. Use ``util.ChangeFilter(category=None, repository="http://github.com//")`` - ``'tag'``: This change is a push to a tag. Use ``util.ChangeFilter(category='tag', repository="http://github.com//")`` - ``'pull'``: This change is from a pull-request creation or update. Use ``util.ChangeFilter(category='pull', repository="http://github.com//")``. In this case, the :bb:step:`GitHub` step must be used instead of the standard :bb:step:`Git` in order to be able to pull GitHub's magic refs. With this method, the :bb:step:`GitHub` step will always checkout the branch merged with latest master. This allows to test the result of the merge instead of just the source branch. Note that you can use the :bb:step:`GitHub` for all categories of event. .. warning:: Pull requests against every branch will trigger the webhook; the base branch name will be in the ``basename`` property of the build. .. warning:: The incoming HTTP requests for this hook are not authenticated by default. Anyone who can access the web server can "fake" a request from GitHub, potentially causing the buildmaster to run arbitrary code. To protect URL against unauthorized access you should use :ref:`Change-Hooks-Auth` option. Then change the the ``Payload URL`` of your GitHub webhook to ``https://user:password@builds.example.com/bbot/change_hook/github``. .. bb:chsrc:: BitBucket BitBucket hook ++++++++++++++ The BitBucket hook is as simple as the GitHub one and takes no options. .. code-block:: python c['www'] = dict(..., change_hook_dialects={'bitbucket': True}, ) When this is set up, you should add a `POST` service pointing to ``/change_hook/bitbucket`` relative to the root of the web status. For example, if the grid URL is ``http://builds.example.com/bbot/grid``, then point BitBucket to ``http://builds.example.com/change_hook/bitbucket``. To specify a project associated to the repository, append ``?project=name`` to the URL. Note that there is a standalone HTTP server available for receiving BitBucket notifications, as well: :contrib-src:`master/contrib/bitbucket_buildbot.py`. This script may be useful in cases where you cannot expose the WebStatus for public consumption. .. warning:: As in the previous case, the incoming HTTP requests for this hook are not authenticated by default. Anyone who can access the web status can "fake" a request from BitBucket, potentially causing the buildmaster to run arbitrary code. To protect URL against unauthorized access you should use :ref:`Change-Hooks-Auth` option. Then, create a BitBucket service hook (see https://confluence.atlassian.com/display/BITBUCKET/POST+Service+Management) with a WebHook URL like ``https://user:password@builds.example.com/bbot/change_hook/bitbucket``. Note that as before, not using ``change_hook_auth`` can expose you to security risks. Bitbucket Cloud hook +++++++++++++++++++++ .. code-block:: python c['www'] = dict( ..., change_hook_dialects={'bitbucketcloud': {}}, ) When this is set up, you should add a webhook pointing to ``/change_hook/bitbucketcloud`` relative to the root of the web status. According to the type of the event, the change category is set to ``push``, ``pull-created``, ``pull-rejected``, ``pull-updated``, ``pull-fulfilled`` or ``ref-deleted``. The Bitbucket Cloud hook may have the following optional parameters: ``codebase`` (default `None`) The codebase value to include with changes or a callable object that will be passed the payload in order to get it. ``bitbucket_property_whitelist`` (default `[]`) A list of ``fnmatch`` expressions which match against the flattened pull request information JSON prefixed with ``bitbucket``. For example ``bitbucket.id`` represents the pull request ID. Available entries can be looked up in the BitBucket API Documentation or by examining the data returned for a pull request by the API. .. Warning:: The incoming HTTP requests for this hook are not authenticated by default. Anyone who can access the web server can "fake" a request from Bitbucket Cloud, potentially causing the buildmaster to run arbitrary code. Bitbucket Server hook +++++++++++++++++++++ .. code-block:: python c['www'] = dict( ..., change_hook_dialects={'bitbucketserver': {}}, ) When this is set up, you should add a webhook pointing to ``/change_hook/bitbucketserver`` relative to the root of the web status. According to the type of the event, the change category is set to ``push``, ``pull-created``, ``pull-rejected``, ``pull-updated``, ``pull-fulfilled`` or ``ref-deleted``. The Bitbucket Server hook may have the following optional parameters: ``codebase`` (default `None`) The codebase value to include with changes or a callable object that will be passed the payload in order to get it. ``bitbucket_property_whitelist`` (default `[]`) A list of ``fnmatch`` expressions which match against the flattened pull request information JSON prefixed with ``bitbucket``. For example ``bitbucket.id`` represents the pull request ID. Available entries can be looked up in the BitBucket API Documentation or by examining the data returned for a pull request by the API. .. Warning:: The incoming HTTP requests for this hook are not authenticated by default. Anyone who can access the web server can "fake" a request from Bitbucket Server, potentially causing the buildmaster to run arbitrary code. .. Note:: This hook requires the `bitbucket-webhooks` plugin (see https://marketplace.atlassian.com/plugins/nl.topicus.bitbucket.bitbucket-webhooks/server/overview). Poller hook +++++++++++ The poller hook allows you to use GET or POST requests to trigger polling. One advantage of this is your buildbot instance can poll at launch (using the pollAtLaunch flag) to get changes that happened while it was down, but then you can still use a commit hook to get fast notification of new changes. Suppose you have a poller configured like this: .. code-block:: python c['change_source'] = SVNPoller( repourl="https://amanda.svn.sourceforge.net/svnroot/amanda/amanda", split_file=split_file_branches, pollInterval=24*60*60, pollAtLaunch=True, ) And you configure your WebStatus to enable this hook: .. code-block:: python c['www'] = dict(..., change_hook_dialects={'poller': True}, ) Then you will be able to trigger a poll of the SVN repository by poking the ``/change_hook/poller`` URL from a commit hook like this: .. code-block:: bash curl -s -F poller=https://amanda.svn.sourceforge.net/svnroot/amanda/amanda \ http://yourbuildbot/change_hook/poller If no ``poller`` argument is provided then the hook will trigger polling of all polling change sources. You can restrict which pollers the webhook has access to using the ``allowed`` option: .. code-block:: python c['www'] = { ..., 'change_hook_dialects': { 'poller': { 'allowed': ['https://amanda.svn.sourceforge.net/svnroot/amanda/amanda'] } } } .. bb:chsrc:: GitLab GitLab hook +++++++++++ .. code-block:: python c['www'] = dict(..., change_hook_dialects={ 'gitlab' : { 'secret': '...', }, }, ) The GitLab hook has the following parameters: ``secret`` (default `None`) Secret token to use to validate payloads. When this is set up, you should add a `POST` service pointing to ``/change_hook/gitlab`` relative to the root of the web status. For example, if the grid URL is ``http://builds.example.com/bbot/grid``, then point GitLab to ``http://builds.example.com/change_hook/gitlab``. The project and/or codebase can also be passed in the URL by appending ``?project=name`` or ``?codebase=foo`` to the URL. These parameters will be passed along to the scheduler. .. note:: To handle merge requests from forks properly, it's easiest to use a GitLab source step rather than a Git source step. .. note:: Your Git or GitLab step must be configured with a git@ repourl, not a https: one, else the change from the webhook will not trigger a build. .. warning:: As in the previous case, the incoming HTTP requests for this hook are not authenticated by default. Anyone who can access the web status can "fake" a request from your GitLab server, potentially causing the buildmaster to run arbitrary code. .. warning:: When applicable, you need to permit access to internal/local networks. See ``https://docs.gitlab.com/ee/security/webhooks.html`` for details. To protect URL against unauthorized access you should either * set secret token in the configuration above, then set it in the GitLab service hook declaration, or * use the :ref:`Change-Hooks-Auth` option. Then, create a GitLab service hook (see ``https://your.gitlab.server/help/web_hooks``) with a WebHook URL like ``https://user:password@builds.example.com/bbot/change_hook/gitlab``. Note that as before, not using ``change_hook_auth`` can expose you to security risks. .. bb:chsrc:: Gitorious Gitorious Hook ++++++++++++++ The Gitorious hook is as simple as GitHub one and it also takes no options. .. code-block:: python c['www'] = dict(..., change_hook_dialects={'gitorious': True}, ) When this is set up, you should add a `POST` service pointing to ``/change_hook/gitorious`` relative to the root of the web status. For example, if the grid URL is ``http://builds.example.com/bbot/grid``, then point Gitorious to ``http://builds.example.com/change_hook/gitorious``. .. warning:: As in the previous case, the incoming HTTP requests for this hook are not authenticated by default. Anyone who can access the web status can "fake" a request from your Gitorious server, potentially causing the buildmaster to run arbitrary code. To protect URL against unauthorized access you should use :ref:`Change-Hooks-Auth` option. Then, create a Gitorious web hook with a WebHook URL like ``https://user:password@builds.example.com/bbot/change_hook/gitorious``. Note that as before, not using ``change_hook_auth`` can expose you to security risks. .. note:: Web hooks are only available for local Gitorious installations, since this feature is not offered as part of Gitorious.org yet. Custom Hooks ++++++++++++ Custom hooks are supported via the :ref:`Plugins` mechanism. You can subclass any of the available hook handler classes available in :py:mod:`buildbot.www.hooks` and register it in the plugin system via a custom python module. For convenience, you can also use the generic option ``custom_class``, e.g.: .. code-block:: python from buildbot.plugins import webhooks class CustomBase(webhooks.base): def getChanges(self, request): args = request.args chdict = dict( revision=args.get(b'revision'), repository=args.get(b'repository'), project=args.get(b'project'), codebase=args.get(b'codebase')) return ([chdict], None) c['www'] = dict(..., change_hook_dialects={ 'base' : { 'custom_class': CustomBase, }, }, ) buildbot-3.4.0/master/docs/manual/customization.rst000066400000000000000000002063561413250514000224730ustar00rootroot00000000000000Customization ============= For advanced users, Buildbot acts as a framework supporting a customized build application. For the most part, such configurations consist of subclasses set up for use in a regular Buildbot configuration file. This chapter describes some of the more common idioms in advanced Buildbot configurations. At the moment, this chapter is an unordered set of suggestions: .. contents:: :local: If you'd like to clean it up, fork the project on GitHub and get started! Programmatic Configuration Generation ------------------------------------- Bearing in mind that ``master.cfg`` is a Python file, large configurations can be shortened considerably by judicious use of Python loops. For example, the following will generate a builder for each of a range of supported versions of Python: .. code-block:: python pythons = ['python2.4', 'python2.5', 'python2.6', 'python2.7', 'python3.2', 'python3.3'] pytest_workers = ["worker%s" % n for n in range(10)] for python in pythons: f = util.BuildFactory() f.addStep(steps.SVN(...)) f.addStep(steps.ShellCommand(command=[python, 'test.py'])) c['builders'].append(util.BuilderConfig( name="test-%s" % python, factory=f, workernames=pytest_workers)) Next step would be the loading of ``pythons`` list from a .yaml/.ini file. .. _Collapse-Request-Functions: Collapse Request Functions -------------------------- .. index:: Builds; collapsing The logic Buildbot uses to decide which build request can be merged can be customized by providing a Python function (a callable) instead of ``True`` or ``False`` described in :ref:`Collapsing-Build-Requests`. Arguments for the callable are: ``master`` pointer to the master object, which can be used to make additional data api calls via `master.data.get` ``builder`` dictionary of type :bb:rtype:`builder` ``req1`` dictionary of type :bb:rtype:`buildrequest` ``req2`` dictionary of type :bb:rtype:`buildrequest` .. warning:: The number of invocations of the callable is proportional to the square of the request queue length, so a long-running callable may cause undesirable delays when the queue length grows. It should return true if the requests can be merged, and False otherwise. For example: .. code-block:: python @defer.inlineCallbacks def collapseRequests(master, builder, req1, req2): "any requests with the same branch can be merged" # get the buildsets for each buildrequest selfBuildset , otherBuildset = yield defer.gatherResults([ master.data.get(('buildsets', req1['buildsetid'])), master.data.get(('buildsets', req2['buildsetid'])) ]) selfSourcestamps = selfBuildset['sourcestamps'] otherSourcestamps = otherBuildset['sourcestamps'] if len(selfSourcestamps) != len(otherSourcestamps): return False for selfSourcestamp, otherSourcestamp in zip(selfSourcestamps, otherSourcestamps): if selfSourcestamp['branch'] != otherSourcestamp['branch']: return False return True c['collapseRequests'] = collapseRequests In many cases, the details of the :bb:rtype:`sourcestamp` and :bb:rtype:`buildrequest` are important. In the following example, only :bb:rtype:`buildrequest` with the same "reason" are merged; thus developers forcing builds for different reasons will see distinct builds. Note the use of the :py:meth:`buildrequest.BuildRequest.canBeCollapsed` method to access the source stamp compatibility algorithm: .. code-block:: python @defer.inlineCallbacks def collapseRequests(master, builder, req1, req2): canBeCollapsed = yield buildrequest.BuildRequest.canBeCollapsed(master, req1, req2) if canBeCollapsed and req1.reason == req2.reason: return True else: return False c['collapseRequests'] = collapseRequests Another common example is to prevent collapsing of requests coming from a :bb:step:`Trigger` step. :bb:step:`Trigger` step can indeed be used in order to implement parallel testing of the same source. Buildrequests will all have the same sourcestamp, but probably different properties, and shall not be collapsed. .. note:: In most cases, just setting ``collapseRequests=False`` for triggered builders will do the trick. In other cases, ``parent_buildid`` from buildset can be used: .. code-block:: python @defer.inlineCallbacks def collapseRequests(master, builder, req1, req2): canBeCollapsed = yield buildrequest.BuildRequest.canBeCollapsed(master, req1, req2) selfBuildset , otherBuildset = yield defer.gatherResults([ master.data.get(('buildsets', req1['buildsetid'])), master.data.get(('buildsets', req2['buildsetid'])) ]) if canBeCollapsed and selfBuildset['parent_buildid'] != None and \ otherBuildset['parent_buildid'] != None: return True else: return False c['collapseRequests'] = collapseRequests If it's necessary to perform some extended operation to determine whether two requests can be merged, then the ``collapseRequests`` callable may return its result via Deferred. .. warning:: Again, the number of invocations of the callable is proportional to the square of the request queue length, so a long-running callable may cause undesirable delays when the queue length grows. For example: .. code-block:: python @defer.inlineCallbacks def collapseRequests(master, builder, req1, req2): info1, info2 = yield defer.gatherResults([ getMergeInfo(req1), getMergeInfo(req2), ]) return info1 == info2 c['collapseRequests'] = collapseRequests .. _Builder-Priority-Functions: Builder Priority Functions -------------------------- .. index:: Builders; priority The :bb:cfg:`prioritizeBuilders` configuration key specifies a function which is called with two arguments: a :class:`BuildMaster` and a list of :class:`Builder` objects. It should return a list of the same :class:`Builder` objects, in the desired order. It may also remove items from the list if builds should not be started on those builders. If necessary, this function can return its results via a Deferred (it is called with ``maybeDeferred``). A simple ``prioritizeBuilders`` implementation might look like this: .. code-block:: python def prioritizeBuilders(buildmaster, builders): """Prioritize builders. 'finalRelease' builds have the highest priority, so they should be built before running tests, or creating builds.""" builderPriorities = { "finalRelease": 0, "test": 1, "build": 2, } builders.sort(key=lambda b: builderPriorities.get(b.name, 0)) return builders c['prioritizeBuilders'] = prioritizeBuilders If the change frequency is higher than the turn-around of the builders, the following approach might be helpful: .. code-block:: python def prioritizeBuilders(buildmaster, builders): """Prioritize builders. First, prioritize inactive builders. Second, consider the last time a job was completed (no job is infinite past). Third, consider the time the oldest request has been queued. This provides a simple round-robin scheme that works with collapsed builds.""" def isBuilding(b): return bool(b.building) or bool(b.old_building) builders.sort(key = lambda b: (isBuilding(b), b.getNewestCompleteTime(), b.getOldestRequestTime())) return builders c['prioritizeBuilders'] = prioritizeBuilders .. index:: Builds; priority .. _Build-Priority-Functions: Build Priority Functions ------------------------ When a builder has multiple pending build requests, it uses a ``nextBuild`` function to decide which build it should start first. This function is given two parameters: the :class:`Builder`, and a list of :class:`BuildRequest` objects representing pending build requests. A simple function to prioritize release builds over other builds might look like this: .. code-block:: python def nextBuild(bldr, requests): for r in requests: if r.source.branch == 'release': return r return requests[0] If some non-immediate result must be calculated, the ``nextBuild`` function can also return a Deferred: .. code-block:: python def nextBuild(bldr, requests): d = get_request_priorities(requests) def pick(priorities): if requests: return sorted(zip(priorities, requests))[0][1] d.addCallback(pick) return d The ``nextBuild`` function is passed as parameter to :class:`BuilderConfig`: .. code-block:: python ... BuilderConfig(..., nextBuild=nextBuild, ...) ... .. _canStartBuild-Functions: ``canStartBuild`` Functions --------------------------- Sometimes, you cannot know in advance what workers to assign to a :class:`BuilderConfig`. For example, you might need to check for the existence of a file on a worker before running a build on it. It is possible to do that by setting the ``canStartBuild`` callback. Here is an example that checks if there is a ``vm`` property set for the build request. If it is set, it checks if a file named after it exists in the ``/opt/vm`` folder. If the file does not exist on the given worker, refuse to run the build to force the master to select another worker. .. code-block:: python @defer.inlineCallbacks def canStartBuild(builder, wfb, request): vm = request.properties.get('vm', builder.config.properties.get('vm')) if vm: args = {'file': os.path.join('/opt/vm', vm)} cmd = RemoteCommand('stat', args, stdioLogName=None) cmd.worker = wfb.worker res = yield cmd.run(None, wfb.worker.conn, builder.name) if res.rc != 0: return False return True Here is a more complete example that checks if a worker is fit to start a build. If the load average is higher than the number of CPU cores or if there is less than 2GB of free memory, refuse to run the build on that worker. Also, put that worker in quarantine to make sure no other builds are scheduled on it for a while. Otherwise, let the build start on that worker. .. code-block:: python class FakeBuild(object): properties = Properties() class FakeStep(object): build = FakeBuild() @defer.inlineCallbacks def shell(command, worker, builder): args = { 'command': command, 'logEnviron': False, 'workdir': worker.worker_basedir, 'want_stdout': False, 'want_stderr': False, } cmd = RemoteCommand('shell', args, stdioLogName=None) cmd.worker = worker yield cmd.run(FakeStep(), worker.conn, builder.name) return cmd.rc @defer.inlineCallbacks def canStartBuild(builder, wfb, request): # check that load is not too high rc = yield shell( 'test "$(cut -d. -f1 /proc/loadavg)" -le "$(nproc)"', wfb.worker, builder) if rc != 0: log.msg('loadavg is too high to take new builds', system=repr(wfb.worker)) wfb.worker.putInQuarantine() return False # check there is enough free memory sed_expr = r's/^MemAvailable:[[:space:]]+([0-9]+)[[:space:]]+kB$/\1/p' rc = yield shell( 'test "$(sed -nre \'%s\' /proc/meminfo)" -gt 2000000' % sed_expr, wfb.worker, builder) if rc != 0: log.msg('not enough free memory to take new builds', system=repr(wfb.worker)) wfb.worker.putInQuarantine() return False # The build may now proceed. # # Prevent this worker from taking any other build while this one is # starting for 2 min. This leaves time for the build to start consuming # resources (disk, memory, cpu). When the quarantine is over, if the # same worker is subject to start another build, the above checks will # better reflect the actual state of the worker. wfb.worker.quarantine_timeout = 120 wfb.worker.putInQuarantine() # This does not take the worker out of quarantine, it only resets the # timeout value to default. wfb.worker.resetQuarantine() return True You can extend these examples using any remote command described in the :doc:`../developer/master-worker`. .. _Customizing-SVNPoller: Customizing SVNPoller --------------------- Each source file that is tracked by a Subversion repository has a fully-qualified SVN URL in the following form: :samp:`({REPOURL})({PROJECT-plus-BRANCH})({FILEPATH})`. When you create the :bb:chsrc:`SVNPoller`, you give it a ``repourl`` value that includes all of the :samp:`{REPOURL}` and possibly some portion of the :samp:`{PROJECT-plus-BRANCH}` string. The :bb:chsrc:`SVNPoller` is responsible for producing Changes that contain a branch name and a :samp:`{FILEPATH}` (which is relative to the top of a checked-out tree). The details of how these strings are split up depend upon how your repository names its branches. :samp:`{PROJECT}/{BRANCHNAME}/{FILEPATH}` repositories ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ One common layout is to have all the various projects that share a repository get a single top-level directory each, with ``branches``, ``tags``, and ``trunk`` subdirectories: .. code-block:: none amanda/trunk /branches/3_2 /3_3 /tags/3_2_1 /3_2_2 /3_3_0 To set up a :bb:chsrc:`SVNPoller` that watches the Amanda trunk (and nothing else), we would use the following, using the default ``split_file``: .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.SVNPoller( repourl="https://svn.amanda.sourceforge.net/svnroot/amanda/amanda/trunk") In this case, every Change that our :bb:chsrc:`SVNPoller` produces will have its branch attribute set to ``None``, to indicate that the Change is on the trunk. No other sub-projects or branches will be tracked. If we want our ChangeSource to follow multiple branches, we have to do two things. First we have to change our ``repourl=`` argument to watch more than just ``amanda/trunk``. We will set it to ``amanda`` so that we'll see both the trunk and all the branches. Second, we have to tell :bb:chsrc:`SVNPoller` how to split the :samp:`({PROJECT-plus-BRANCH})({FILEPATH})` strings it gets from the repository out into :samp:`({BRANCH})` and :samp:`({FILEPATH})`. We do the latter by providing a ``split_file`` function. This function is responsible for splitting something like ``branches/3_3/common-src/amanda.h`` into ``branch='branches/3_3'`` and ``filepath='common-src/amanda.h'``. The function is always given a string that names a file relative to the subdirectory pointed to by the :bb:chsrc:`SVNPoller`\'s ``repourl=`` argument. It is expected to return a dictionary with at least the ``path`` key. The splitter may optionally set ``branch``, ``project`` and ``repository``. For backwards compatibility it may return a tuple of ``(branchname, path)``. It may also return ``None`` to indicate that the file is of no interest. .. note:: The function should return ``branches/3_3`` rather than just ``3_3`` because the SVN checkout step, will append the branch name to the ``baseURL``, which requires that we keep the ``branches`` component in there. Other VC schemes use a different approach towards branches and may not require this artifact. If your repository uses this same ``{PROJECT}/{BRANCH}/{FILEPATH}`` naming scheme, the following function will work: .. code-block:: python def split_file_branches(path): pieces = path.split('/') if len(pieces) > 1 and pieces[0] == 'trunk': return (None, '/'.join(pieces[1:])) elif len(pieces) > 2 and pieces[0] == 'branches': return ('/'.join(pieces[0:2]), '/'.join(pieces[2:])) else: return None In fact, this is the definition of the provided ``split_file_branches`` function. So to have our Twisted-watching :bb:chsrc:`SVNPoller` follow multiple branches, we would use this: .. code-block:: python from buildbot.plugins import changes, util c['change_source'] = changes.SVNPoller("svn://svn.twistedmatrix.com/svn/Twisted", split_file=util.svn.split_file_branches) Changes for all sorts of branches (with names like ``"branches/1.5.x"``, and ``None`` to indicate the trunk) will be delivered to the Schedulers. Each Scheduler is then free to use or ignore each branch as it sees fit. If you have multiple projects in the same repository your split function can attach a project name to the Change to help the Scheduler filter out unwanted changes: .. code-block:: python from buildbot.plugins import util def split_file_projects_branches(path): if not "/" in path: return None project, path = path.split("/", 1) f = util.svn.split_file_branches(path) if f: info = dict(project=project, path=f[1]) if f[0]: info['branch'] = f[0] return info return f Again, this is provided by default. To use it you would do this: .. code-block:: python from buildbot.plugins import changes, util c['change_source'] = changes.SVNPoller( repourl="https://svn.amanda.sourceforge.net/svnroot/amanda/", split_file=util.svn.split_file_projects_branches) Note here that we are monitoring at the root of the repository, and that within that repository is a ``amanda`` subdirectory which in turn has ``trunk`` and ``branches``. It is that ``amanda`` subdirectory whose name becomes the ``project`` field of the Change. :samp:`{BRANCHNAME}/{PROJECT}/{FILEPATH}` repositories ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Another common way to organize a Subversion repository is to put the branch name at the top, and the projects underneath. This is especially frequent when there are a number of related sub-projects that all get released in a group. For example, ``Divmod.org`` hosts a project named `Nevow` as well as one named `Quotient`. In a checked-out Nevow tree there is a directory named `formless` that contains a Python source file named :file:`webform.py`. This repository is accessible via webdav (and thus uses an `http:` scheme) through the divmod.org hostname. There are many branches in this repository, and they use a ``({BRANCHNAME})/({PROJECT})`` naming policy. The fully-qualified SVN URL for the trunk version of :file:`webform.py` is ``http://divmod.org/svn/Divmod/trunk/Nevow/formless/webform.py``. The 1.5.x branch version of this file would have a URL of ``http://divmod.org/svn/Divmod/branches/1.5.x/Nevow/formless/webform.py``. The whole Nevow trunk would be checked out with ``http://divmod.org/svn/Divmod/trunk/Nevow``, while the Quotient trunk would be checked out using ``http://divmod.org/svn/Divmod/trunk/Quotient``. Now suppose we want to have an :bb:chsrc:`SVNPoller` that only cares about the Nevow trunk. This case looks just like the :samp:`{PROJECT}/{BRANCH}` layout described earlier: .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.SVNPoller("http://divmod.org/svn/Divmod/trunk/Nevow") But what happens when we want to track multiple Nevow branches? We have to point our ``repourl=`` high enough to see all those branches, but we also don't want to include Quotient changes (since we're only building Nevow). To accomplish this, we must rely upon the ``split_file`` function to help us tell the difference between files that belong to Nevow and those that belong to Quotient, as well as figuring out which branch each one is on. .. code-block:: python from buildbot.plugins import changes c['change_source'] = changes.SVNPoller("http://divmod.org/svn/Divmod", split_file=my_file_splitter) The ``my_file_splitter`` function will be called with repository-relative pathnames like: :file:`trunk/Nevow/formless/webform.py` This is a Nevow file, on the trunk. We want the Change that includes this to see a filename of :file:`formless/webform.py`, and a branch of ``None`` :file:`branches/1.5.x/Nevow/formless/webform.py` This is a Nevow file, on a branch. We want to get ``branch='branches/1.5.x'`` and ``filename='formless/webform.py'``. :file:`trunk/Quotient/setup.py` This is a Quotient file, so we want to ignore it by having :meth:`my_file_splitter` return ``None``. :file:`branches/1.5.x/Quotient/setup.py` This is also a Quotient file, which should be ignored. The following definition for :meth:`my_file_splitter` will do the job: .. code-block:: python def my_file_splitter(path): pieces = path.split('/') if pieces[0] == 'trunk': branch = None pieces.pop(0) # remove 'trunk' elif pieces[0] == 'branches': pieces.pop(0) # remove 'branches' # grab branch name branch = 'branches/' + pieces.pop(0) else: return None # something weird projectname = pieces.pop(0) if projectname != 'Nevow': return None # wrong project return dict(branch=branch, path='/'.join(pieces)) If you later decide you want to get changes for Quotient as well you could replace the last 3 lines with simply: .. code-block:: python return dict(project=projectname, branch=branch, path='/'.join(pieces)) .. _Writing-Change-Sources: Writing Change Sources ---------------------- For some version-control systems, making Buildbot aware of new changes can be a challenge. If the pre-supplied classes in :ref:`Change-Sources` are not sufficient, then you will need to write your own. There are three approaches, one of which is not even a change source. The first option is to write a change source that exposes some service to which the version control system can "push" changes. This can be more complicated, since it requires implementing a new service, but delivers changes to Buildbot immediately on commit. The second option is often preferable to the first: implement a notification service in an external process (perhaps one that is started directly by the version control system, or by an email server) and delivers changes to Buildbot via :ref:`PBChangeSource`. This section does not describe this particular approach, since it requires no customization within the buildmaster process. The third option is to write a change source which polls for changes - repeatedly connecting to an external service to check for new changes. This works well in many cases, but can produce a high load on the version control system if polling is too frequent, and can take too long to notice changes if the polling is not frequent enough. Writing a Notification-based Change Source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A custom change source must implement :class:`buildbot.interfaces.IChangeSource`. The easiest way to do this is to subclass :class:`buildbot.changes.base.ChangeSource`, implementing the :meth:`describe` method to describe the instance. :class:`ChangeSource` is a Twisted service, so you will need to implement the :meth:`startService` and :meth:`stopService` methods to control the means by which your change source receives notifications. When the class does receive a change, it should call ``self.master.data.updates.addChange(..)`` to submit it to the buildmaster. This method shares the same parameters as ``master.db.changes.addChange``, so consult the API documentation for that function for details on the available arguments. You will probably also want to set ``compare_attrs`` to the list of object attributes which Buildbot will use to compare one change source to another when reconfiguring. During reconfiguration, if the new change source is different from the old, then the old will be stopped and the new started. Writing a Change Poller ~~~~~~~~~~~~~~~~~~~~~~~ Polling is a very common means of seeking changes, so Buildbot supplies a utility parent class to make it easier. A poller should subclass :class:`buildbot.changes.base.ReconfigurablePollingChangeSource`, which is a subclass of :class:`~buildbot.changes.base.ChangeSource`. This subclass implements the :meth:`Service` methods, and calls the :meth:`poll` method according to the ``pollInterval`` and ``pollAtLaunch`` options. The ``poll`` method should return a Deferred to signal its completion. Aside from the service methods, the other concerns in the previous section apply here, too. Writing a New Latent Worker Implementation ------------------------------------------ Writing a new latent worker should only require subclassing :class:`buildbot.worker.AbstractLatentWorker` and implementing :meth:`start_instance` and :meth:`stop_instance` at a minimum. .. bb:worker:: AbstractWorkerController AbstractLatentWorker ~~~~~~~~~~~~~~~~~~~~ .. py:class:: buildbot.worker.AbstractLatentWorker This class is the base class of all latent workers and implements some common functionality. A custom worker should only need to override :meth:`start_instance` and :meth:`stop_instance` methods. See :class:`buildbot.worker.ec2.EC2LatentWorker` for an example. Additionally, :meth:`builds_may_be_incompatible` and :attr:`isCompatibleWithBuild` members must be overridden if some qualities of the new instances is determined dynamically according to the properties of an incoming build. An example a build may require a certain Docker image or amount of allocated memory. Overriding these members ensures that builds aren't ran on incompatible workers that have already been started. .. py:method:: start_instance(self) This method is responsible for starting instance that will try to connect with this master. A deferred should be returned. Any problems should use an errback. The callback value can be ``None``, or can be an iterable of short strings to include in the "substantiate success" status message, such as identifying the instance that started. Buildbot will ensure that a single worker will never have its ``start_instance`` called before any previous calls to ``start_instance`` or ``stop_instance`` finish. Additionally, for each ``start_instance`` call, exactly one corresponding call to ``stop_instance`` will be done eventually. .. py:method:: stop_instance(self, fast=False) This method is responsible for shutting down instance. A deferred should be returned. If ``fast`` is ``True`` then the function should call back as soon as it is safe to do so, as, for example, the master may be shutting down. The value returned by the callback is ignored. Buildbot will ensure that a single worker will never have its ``stop_instance`` called before any previous calls to ``stop_instance`` finish. During master shutdown any pending calls to ``start_instance`` or ``stop_instance`` will be waited upon finish. .. py:attribute:: builds_may_be_incompatible Determines if new instances have qualities dependent on the build. If ``True``, the master will call ``isCompatibleWithBuild`` to determine whether new builds are compatible with the started instance. Unnecessarily setting ``builds_may_be_incompatible`` to ``True`` may result in unnecessary overhead when processing the builds. By default, this is ``False``. .. py:method:: isCompatibleWithBuild(self, build_props) This method determines whether a started instance is compatible with the build that is about to be started. ``build_props`` is the properties of the build that are known before the build has been started. A build may be incompatible with already started instance if, for example, it requests a different amount of memory or a different Docker image. A deferred should be returned, whose callback should return ``True`` if build is compatible and ``False`` otherwise. The method may be called when the instance is not yet started and should indicate compatible build in that case. In the default implementation the callback returns ``True``. Custom Build Classes -------------------- The standard :class:`BuildFactory` object creates :class:`Build` objects by default. These Builds will each execute a collection of :class:`BuildStep`\s in a fixed sequence. Each step can affect the results of the build, but in general there is little intelligence to tie the different steps together. By setting the factory's ``buildClass`` attribute to a different class, you can instantiate a different build class. This might be useful, for example, to create a build class that dynamically determines which steps to run. The skeleton of such a project would look like: .. code-block:: python class DynamicBuild(Build): # override some methods ... f = factory.BuildFactory() f.buildClass = DynamicBuild f.addStep(...) .. _Factory-Workdir-Functions: Factory Workdir Functions ------------------------- .. note:: While the factory workdir function is still supported, it is better to just use the fact that workdir is a :index:`renderable ` attribute of a step. A Renderable has access to much more contextual information and can also return a deferred. So you could say ``build_factory.workdir = util.Interpolate("%(src:repository)s`` to achieve similar goal. It is sometimes helpful to have a build's workdir determined at runtime based on the parameters of the build. To accomplish this, set the ``workdir`` attribute of the build factory to a callable. That callable will be invoked with the list of :class:`SourceStamp` for the build, and should return the appropriate workdir. Note that the value must be returned immediately - Deferreds are not supported. This can be useful, for example, in scenarios with multiple repositories submitting changes to Buildbot. In this case you likely will want to have a dedicated workdir per repository, since otherwise a sourcing step with mode = "update" will fail as a workdir with a working copy of repository A can't be "updated" for changes from a repository B. Here is an example how you can achieve workdir-per-repo: .. code-block:: python def workdir(source_stamps): return hashlib.md5(source_stamps[0].repository).hexdigest()[:8] build_factory = factory.BuildFactory() build_factory.workdir = workdir build_factory.addStep(Git(mode="update")) # ... builders.append ({'name': 'mybuilder', 'workername': 'myworker', 'builddir': 'mybuilder', 'factory': build_factory}) The end result is a set of workdirs like .. code-block:: none Repo1 => /mybuilder/a78890ba Repo2 => /mybuilder/0823ba88 You could make the :func:`workdir()` function compute other paths, based on parts of the repo URL in the sourcestamp, or lookup in a lookup table based on repo URL. As long as there is a permanent 1:1 mapping between repos and workdir, this will work. .. _Writing-New-BuildSteps: Writing New BuildSteps ---------------------- .. warning:: The API of writing custom build steps has changed significantly in Buildbot-0.9.0. See :ref:`New-Style-Build-Steps` for details about what has changed since pre 0.9.0 releases. This section documents new-style steps. While it is a good idea to keep your build process self-contained in the source code tree, sometimes it is convenient to put more intelligence into your Buildbot configuration. One way to do this is to write a custom :class:`~buildbot.process.buildstep.BuildStep`. Once written, this Step can be used in the :file:`master.cfg` file. The best reason for writing a custom :class:`BuildStep` is to better parse the results of the command being run. For example, a :class:`~buildbot.process.buildstep.BuildStep` that knows about JUnit could look at the logfiles to determine which tests had been run, how many passed and how many failed, and then report more detailed information than a simple ``rc==0`` -based `good/bad` decision. Buildbot has acquired a large fleet of build steps, and sports a number of knobs and hooks to make steps easier to write. This section may seem a bit overwhelming, but most custom steps will only need to apply one or two of the techniques outlined here. For complete documentation of the build step interfaces, see :doc:`../developer/cls-buildsteps`. .. _Writing-BuildStep-Constructors: Writing BuildStep Constructors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Build steps act as their own factories, so their constructors are a bit more complex than necessary. The configuration file instantiates a :class:`~buildbot.process.buildstep.BuildStep` object, but the step configuration must be re-used for multiple builds, so Buildbot needs some way to create more steps. Consider the use of a :class:`BuildStep` in :file:`master.cfg`: .. code-block:: python f.addStep(MyStep(someopt="stuff", anotheropt=1)) This creates a single instance of class ``MyStep``. However, Buildbot needs a new object each time the step is executed. An instance of :class:`~buildbot.process.buildstep.BuildStep` remembers how it was constructed, and can create copies of itself. When writing a new step class, then, keep in mind that you cannot do anything "interesting" in the constructor -- limit yourself to checking and storing arguments. It is customary to call the parent class's constructor with all otherwise-unspecified keyword arguments. Keep a ``**kwargs`` argument on the end of your options, and pass that up to the parent class's constructor. The whole thing looks like this: .. code-block:: python class Frobnify(BuildStep): def __init__(self, frob_what="frobee", frob_how_many=None, frob_how=None, **kwargs): # check if frob_how_many is None: raise TypeError("Frobnify argument how_many is required") # override a parent option kwargs['parentOpt'] = 'xyz' # call parent super().__init__(**kwargs) # set Frobnify attributes self.frob_what = frob_what self.frob_how_many = how_many self.frob_how = frob_how class FastFrobnify(Frobnify): def __init__(self, speed=5, **kwargs): super().__init__(**kwargs) self.speed = speed Step Execution Process ~~~~~~~~~~~~~~~~~~~~~~ A step's execution occurs in its :py:meth:`~buildbot.process.buildstep.BuildStep.run` method. When this method returns (more accurately, when the Deferred it returns fires), the step is complete. The method's result must be an integer, giving the result of the step. Any other output from the step (logfiles, status strings, URLs, etc.) is the responsibility of the ``run`` method. The :bb:step:`ShellCommand` class implements this ``run`` method, and in most cases steps subclassing ``ShellCommand`` simply implement some of the subsidiary methods that its ``run`` method calls. Running Commands ~~~~~~~~~~~~~~~~ To spawn a command in the worker, create a :class:`~buildbot.process.remotecommand.RemoteCommand` instance in your step's ``run`` method and run it with :meth:`~buildbot.process.remotecommand.BuildStep.runCommand`: .. code-block:: python cmd = RemoteCommand(args) d = self.runCommand(cmd) The :py:class:`~buildbot.process.buildstep.CommandMixin` class offers a simple interface to several common worker-side commands. For the much more common task of running a shell command on the worker, use :py:class:`~buildbot.process.buildstep.ShellMixin`. This class provides a method to handle the myriad constructor arguments related to shell commands, as well as a method to create new :py:class:`~buildbot.process.remotecommand.RemoteCommand` instances. This mixin is the recommended method of implementing custom shell-based steps. For simple steps that don't involve much logic the `:bb:step:`ShellCommand` is recommended. A simple example of a step using the shell mixin is: .. code-block:: python class RunCleanup(buildstep.ShellMixin, buildstep.BuildStep): def __init__(self, cleanupScript='./cleanup.sh', **kwargs): self.cleanupScript = cleanupScript kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand( command=[self.cleanupScript]) yield self.runCommand(cmd) if cmd.didFail(): cmd = yield self.makeRemoteShellCommand( command=[self.cleanupScript, '--force'], logEnviron=False) yield self.runCommand(cmd) return cmd.results() @defer.inlineCallbacks def run(self): cmd = RemoteCommand(args) log = yield self.addLog('output') cmd.useLog(log, closeWhenFinished=True) yield self.runCommand(cmd) Updating Status Strings ~~~~~~~~~~~~~~~~~~~~~~~ Each step can summarize its current status in a very short string. For example, a compile step might display the file being compiled. This information can be helpful to users eager to see their build finish. Similarly, a build has a set of short strings collected from its steps summarizing the overall state of the build. Useful information here might include the number of tests run, but probably not the results of a ``make clean`` step. As a step runs, Buildbot calls its :py:meth:`~buildbot.process.buildstep.BuildStep.getCurrentSummary` method as necessary to get the step's current status. "As necessary" is determined by calls to :py:meth:`buildbot.process.buildstep.BuildStep.updateSummary`. Your step should call this method every time the status summary may have changed. Buildbot will take care of rate-limiting summary updates. When the step is complete, Buildbot calls its :py:meth:`~buildbot.process.buildstep.BuildStep.getResultSummary` method to get a final summary of the step along with a summary for the build. About Logfiles ~~~~~~~~~~~~~~ Each BuildStep has a collection of log files. Each one has a short name, like `stdio` or `warnings`. Each log file contains an arbitrary amount of text, usually the contents of some output file generated during a build or test step, or a record of everything that was printed to :file:`stdout`/:file:`stderr` during the execution of some command. Each can contain multiple `channels`, generally limited to three basic ones: stdout, stderr, and `headers`. For example, when a shell command runs, it writes a few lines to the headers channel to indicate the exact argv strings being run, which directory the command is being executed in, and the contents of the current environment variables. Then, as the command runs, it adds a lot of :file:`stdout` and :file:`stderr` messages. When the command finishes, a final `header` line is added with the exit code of the process. Status display plugins can format these different channels in different ways. For example, the web page shows log files as text/html, with header lines in blue text, stdout in black, and stderr in red. A different URL is available which provides a text/plain format, in which stdout and stderr are collapsed together, and header lines are stripped completely. This latter option makes it easy to save the results to a file and run :command:`grep` or whatever against the output. Writing Log Files ~~~~~~~~~~~~~~~~~ Most commonly, logfiles come from commands run on the worker. Internally, these are configured by supplying the :class:`~buildbot.process.remotecommand.RemoteCommand` instance with log files via the :meth:`~buildbot.process.remoteCommand.RemoteCommand.useLog` method: .. code-block:: python @defer.inlineCallbacks def run(self): ... log = yield self.addLog('stdio') cmd.useLog(log, closeWhenFinished=True, 'stdio') yield self.runCommand(cmd) The name passed to :meth:`~buildbot.process.remoteCommand.RemoteCommand.useLog` must match that configured in the command. In this case, ``stdio`` is the default. If the log file was already added by another part of the step, it can be retrieved with :meth:`~buildbot.process.buildstep.BuildStep.getLog`: .. code-block:: python stdioLog = self.getLog('stdio') Less frequently, some master-side processing produces a log file. If this log file is short and easily stored in memory, this is as simple as a call to :meth:`~buildbot.process.buildstep.BuildStep.addCompleteLog`: .. code-block:: python @defer.inlineCallbacks def run(self): ... summary = u'\n'.join('%s: %s' % (k, count) for (k, count) in self.lint_results.items()) yield self.addCompleteLog('summary', summary) Note that the log contents must be a unicode string. Longer logfiles can be constructed line-by-line using the ``add`` methods of the log file: .. code-block:: python @defer.inlineCallbacks def run(self): ... updates = yield self.addLog('updates') while True: ... yield updates.addStdout(some_update) Again, note that the log input must be a unicode string. Finally, :meth:`~buildbot.process.buildstep.BuildStep.addHTMLLog` is similar to :meth:`~buildbot.process.buildstep.BuildStep.addCompleteLog`, but the resulting log will be tagged as containing HTML. The web UI will display the contents of the log using the browser. The ``logfiles=`` argument to :bb:step:`ShellCommand` and its subclasses creates new log files and fills them in realtime by asking the worker to watch an actual file on disk. The worker will look for additions in the target file and report them back to the :class:`BuildStep`. These additions will be added to the log file by calling :meth:`addStdout`. All log files can be used as the source of a :class:`~buildbot.process.logobserver.LogObserver` just like the normal :file:`stdio` :class:`LogFile`. In fact, it's possible for one :class:`~buildbot.process.logobserver.LogObserver` to observe a logfile created by another. Reading Logfiles ~~~~~~~~~~~~~~~~ For the most part, Buildbot tries to avoid loading the contents of a log file into memory as a single string. For large log files on a busy master, this behavior can quickly consume a great deal of memory. Instead, steps should implement a :class:`~buildbot.process.logobserver.LogObserver` to examine log files one chunk or line at a time. For commands which only produce a small quantity of output, :class:`~buildbot.process.remotecommand.RemoteCommand` will collect the command's stdout into its :attr:`~buildbot.process.remotecommand.RemoteCommand.stdout` attribute if given the ``collectStdout=True`` constructor argument. .. _Adding-LogObservers: Adding LogObservers ~~~~~~~~~~~~~~~~~~~ Most shell commands emit messages to stdout or stderr as they operate, especially if you ask them nicely with a option `--verbose` flag of some sort. They may also write text to a log file while they run. Your :class:`BuildStep` can watch this output as it arrives, to keep track of how much progress the command has made or to process log output for later summarization. To accomplish this, you will need to attach a :class:`~buildbot.process.logobserver.LogObserver` to the log. This observer is given all text as it is emitted from the command, and has the opportunity to parse that output incrementally. There are a number of pre-built :class:`~buildbot.process.logobserver.LogObserver` classes that you can choose from (defined in :mod:`buildbot.process.buildstep`, and of course you can subclass them to add further customization. The :class:`LogLineObserver` class handles the grunt work of buffering and scanning for end-of-line delimiters, allowing your parser to operate on complete :file:`stdout`/:file:`stderr` lines. For example, let's take a look at the :class:`TrialTestCaseCounter`, which is used by the :bb:step:`Trial` step to count test cases as they are run. As Trial executes, it emits lines like the following: .. code-block:: none buildbot.test.test_config.ConfigTest.testDebugPassword ... [OK] buildbot.test.test_config.ConfigTest.testEmpty ... [OK] buildbot.test.test_config.ConfigTest.testIRC ... [FAIL] buildbot.test.test_config.ConfigTest.testLocks ... [OK] When the tests are finished, trial emits a long line of `======` and then some lines which summarize the tests that failed. We want to avoid parsing these trailing lines, because their format is less well-defined than the `[OK]` lines. A simple version of the parser for this output looks like this. The full version is in :src:`master/buildbot/steps/python_twisted.py`. .. code-block:: python from buildbot.plugins import util class TrialTestCaseCounter(util.LogLineObserver): _line_re = re.compile(r'^([\w\.]+) \.\.\. \[([^\]]+)\]$') numTests = 0 finished = False def outLineReceived(self, line): if self.finished: return if line.startswith("=" * 40): self.finished = True return m = self._line_re.search(line.strip()) if m: testname, result = m.groups() self.numTests += 1 self.step.setProgress('tests', self.numTests) This parser only pays attention to stdout, since that's where trial writes the progress lines. It has a mode flag named ``finished`` to ignore everything after the ``====`` marker, and a scary-looking regular expression to match each line while hopefully ignoring other messages that might get displayed as the test runs. Each time it identifies that a test has been completed, it increments its counter and delivers the new progress value to the step with ``self.step.setProgress``. This helps Buildbot to determine the ETA for the step. To connect this parser into the :bb:step:`Trial` build step, ``Trial.__init__`` ends with the following clause: .. code-block:: python # this counter will feed Progress along the 'test cases' metric counter = TrialTestCaseCounter() self.addLogObserver('stdio', counter) self.progressMetrics += ('tests',) This creates a :class:`TrialTestCaseCounter` and tells the step that the counter wants to watch the :file:`stdio` log. The observer is automatically given a reference to the step in its :attr:`step` attribute. Using Properties ~~~~~~~~~~~~~~~~ In custom :class:`BuildSteps`, you can get and set the build properties with the :meth:`getProperty` and :meth:`setProperty` methods. Each takes a string for the name of the property, and returns or accepts an arbitrary JSON-able (lists, dicts, strings, and numbers) object. For example: .. code-block:: python class MakeTarball(buildstep.ShellMixin, buildstep.BuildStep): def __init__(self, **kwargs): kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) @defer.inlineCallbacks def run(self): if self.getProperty("os") == "win": # windows-only command cmd = yield self.makeRemoteShellCommand(commad=[ ... ]) else: # equivalent for other systems cmd = yield self.makeRemoteShellCommand(commad=[ ... ]) yield self.runCommand(cmd) return cmd.results() Remember that properties set in a step may not be available until the next step begins. In particular, any :class:`Property` or :class:`Interpolate` instances for the current step are interpolated before the step starts, so they cannot use the value of any properties determined in that step. .. index:: links, BuildStep URLs, addURL Using Statistics ~~~~~~~~~~~~~~~~ Statistics can be generated for each step, and then summarized across all steps in a build. For example, a test step might set its ``warnings`` statistic to the number of warnings observed. The build could then sum the ``warnings`` on all steps to get a total number of warnings. Statistics are set and retrieved with the :py:meth:`~buildbot.process.buildstep.BuildStep.setStatistic` and :py:meth:`~buildbot.process.buildstep.BuildStep.getStatistic` methods. The :py:meth:`~buildbot.process.buildstep.BuildStep.hasStatistic` method determines whether a statistic exists. The Build method :py:meth:`~buildbot.process.build.Build.getSummaryStatistic` can be used to aggregate over all steps in a Build. BuildStep URLs ~~~~~~~~~~~~~~ Each BuildStep has a collection of `links`. Each has a name and a target URL. The web display displays clickable links for each link, making them a useful way to point to extra information about a step. For example, a step that uploads a build result to an external service might include a link to the uploaded file. To set one of these links, the :class:`BuildStep` should call the :meth:`~buildbot.process.buildstep.BuildStep.addURL` method with the name of the link and the target URL. Multiple URLs can be set. For example: .. code-block:: python @defer.inlineCallbacks def run(self): ... # create and upload report to coverage server url = 'http://coverage.example.com/reports/%s' % reportname yield self.addURL('coverage', url) This also works from log observers, which is helpful for instance if the build output points to an external page such as a detailed log file. The following example parses output of *poudriere*, a tool for building packages on the FreeBSD operating system. Example output: .. code-block:: none [00:00:00] Creating the reference jail... done ... [00:00:01] Logs: /usr/local/poudriere/data/logs/bulk/103amd64-2018Q4/2018-10-03_05h47m30s ... ... build log without details (those are in the above logs directory) ... Log observer implementation: .. code-block:: python c = BuildmasterConfig = {} c['titleURL'] = 'https://my-buildbot.example.com/' # ... class PoudriereLogLinkObserver(util.LogLineObserver): _regex = re.compile( r'Logs: /usr/local/poudriere/data/logs/bulk/([-_/0-9A-Za-z]+)$') def __init__(self): super().__init__() self._finished = False def outLineReceived(self, line): # Short-circuit if URL already found if self._finished: return m = self._regex.search(line.rstrip()) if m: self._finished = True # Let's assume local directory /usr/local/poudriere/data/logs/bulk # is available as https://my-buildbot.example.com/poudriere/logs poudriere_ui_url = c['titleURL'] + 'poudriere/logs/' + m.group(1) # Add URLs for build overview page and for per-package log files self.step.addURL('Poudriere build web interface', poudriere_ui_url) self.step.addURL('Poudriere logs', poudriere_ui_url + '/logs/') Discovering files ~~~~~~~~~~~~~~~~~ When implementing a :class:`BuildStep` it may be necessary to know about files that are created during the build. There are a few worker commands that can be used to find files on the worker and test for the existence (and type) of files and directories. The worker provides the following file-discovery related commands: * `stat` calls :func:`os.stat` for a file in the worker's build directory. This can be used to check if a known file exists and whether it is a regular file, directory or symbolic link. * `listdir` calls :func:`os.listdir` for a directory on the worker. It can be used to obtain a list of files that are present in a directory on the worker. * `glob` calls :func:`glob.glob` on the worker, with a given shell-style pattern containing wildcards. For example, we could use stat to check if a given path exists and contains ``*.pyc`` files. If the path does not exist (or anything fails) we mark the step as failed; if the path exists but is not a directory, we mark the step as having "warnings". .. code-block:: python from buildbot.plugins import steps, util from buildbot.process import remotecommand from buildbot.interfaces import WorkerSetupError import stat class MyBuildStep(steps.BuildStep): def __init__(self, dirname, **kwargs): super().__init__(**kwargs) self.dirname = dirname @defer.inlineCallbacks def run(self): # make sure the worker knows about stat workerver = (self.workerVersion('stat'), self.workerVersion('glob')) if not all(workerver): raise WorkerSetupError('need stat and glob') cmd = remotecommand.RemoteCommand('stat', {'file': self.dirname}) yield self.runCommand(cmd) if cmd.didFail(): self.description = ["File not found."] return util.FAILURE s = cmd.updates["stat"][-1] if not stat.S_ISDIR(s[stat.ST_MODE]): self.description = ["'tis not a directory"] return util.WARNINGS cmd = remotecommand.RemoteCommand('glob', {'path': self.dirname + '/*.pyc'}) yield self.runCommand(cmd) if cmd.didFail(): self.description = ["Glob failed."] return util.FAILURE files = cmd.updates["files"][-1] if len(files): self.description = ["Found pycs"] + files else: self.description = ["No pycs found"] return util.SUCCESS For more information on the available commands, see :doc:`../developer/master-worker`. .. todo:: Step Progress BuildStepFailed .. _buildbot_wsgi_dashboards: Writing Dashboards with Flask_ or Bottle_ ----------------------------------------- Buildbot Nine UI is written in Javascript. This allows it to be reactive and real time, but comes at a price of a fair complexity. Sometimes, you need a dashboard displaying your build results in your own manner but learning AngularJS for that is just too much. There is a Buildbot plugin which allows to write a server side generated dashboard, and integrate it in the UI. .. code-block:: python # This needs buildbot and buildbot_www >= 0.9.5 pip install buildbot_wsgi_dashboards flask - This plugin can use any WSGI compatible web framework, Flask_ is a very common one, Bottle_ is another popular option. - The application needs to implement a ``/index.html`` route, which will render the html code representing the dashboard. - The application framework runs in a thread outside of Twisted. No need to worry about Twisted and asynchronous code. You can use python-requests_ or any library from the python ecosystem to access other servers. - You could use HTTP in order to access Buildbot :ref:`REST_API`, but you can also use the :ref:`Data_API`, via the provided synchronous wrapper. .. py:method:: buildbot_api.dataGet(path, filters=None, fields=None, order=None, limit=None, offset=None) :param tuple path: A tuple of path elements representing the API path to fetch. Numbers can be passed as strings or integers. :param filters: result spec filters :param fields: result spec fields :param order: result spec order :param limit: result spec limit :param offset: result spec offset :raises: :py:exc:`~buildbot.data.exceptions.InvalidPathError` :returns: a resource or list, or None This is a blocking wrapper to master.data.get as described in :ref:`Data_API`. The available paths are described in the :ref:`REST_API`, as well as the nature of return values depending on the kind of data that is fetched. Path can be either the REST path e.g. ``"builders/2/builds/4"`` or tuple e.g. ``("builders", 2, "builds", 4)``. The latter form being more convenient if some path parts are coming from variables. The :ref:`Data_API` and :ref:`REST_API` are functionally equivalent except: - :ref:`Data_API` does not have HTTP connection overhead. - :ref:`Data_API` does not enforce authorization rules. ``buildbot_api.dataGet`` is accessible via the WSGI application object passed to ``wsgi_dashboards`` plugin (as per the example). - That html code output of the server runs inside AngularJS application. - It will use the CSS of the AngularJS application (including the Bootstrap_ CSS base). You can use custom style-sheet with a standard ``style`` tag within your html. Custom CSS will be shared with the whole Buildbot application once your dashboard is loaded. So you should make sure your custom CSS rules only apply to your dashboard (e.g. by having a specific class for your dashboard's main div) - It can use some of the AngularJS directives defined by Buildbot UI (currently only buildsummary is usable). - It has full access to the application JS context. Here is an example of code that you can use in your ``master.cfg`` to create a simple dashboard: .. literalinclude:: mydashboard.py :language: python Then you need a ``templates/mydashboard.html`` file near your ``master.cfg``. This template is a standard Jinja_ template which is the default templating engine of Flask_. .. literalinclude:: mydashboard.html :language: html+django .. _Flask: http://flask.pocoo.org/ .. _Bottle: https://bottlepy.org/docs/dev/ .. _Bootstrap: http://getbootstrap.com/css/ .. _Jinja: http://jinja.pocoo.org/ .. _python-requests: https://requests.readthedocs.io/en/master/ A Somewhat Whimsical Example (or "It's now customized, how do I deploy it?") ---------------------------------------------------------------------------- Let's say that we've got some snazzy new unit-test framework called Framboozle. It's the hottest thing since sliced bread. It slices, it dices, it runs unit tests like there's no tomorrow. Plus if your unit tests fail, you can use its name for a Web 2.1 startup company, make millions of dollars, and hire engineers to fix the bugs for you, while you spend your afternoons lazily hang-gliding along a scenic pacific beach, blissfully unconcerned about the state of your tests. [#framboozle_reg]_ To run a Framboozle-enabled test suite, you just run the 'framboozler' command from the top of your source code tree. The 'framboozler' command emits a bunch of stuff to stdout, but the most interesting bit is that it emits the line "FNURRRGH!" every time it finishes running a test case You'd like to have a test-case counting LogObserver that watches for these lines and counts them, because counting them will help the buildbot more accurately calculate how long the build will take, and this will let you know exactly how long you can sneak out of the office for your hang-gliding lessons without anyone noticing that you're gone. This will involve writing a new :class:`BuildStep` (probably named "Framboozle") which inherits from :bb:step:`ShellCommand`. The :class:`BuildStep` class definition itself will look something like this: .. code-block:: python from buildbot.plugins import steps, util class FNURRRGHCounter(util.LogLineObserver): numTests = 0 def outLineReceived(self, line): if "FNURRRGH!" in line: self.numTests += 1 self.step.setProgress('tests', self.numTests) class Framboozle(steps.ShellCommand): command = ["framboozler"] def __init__(self, **kwargs): super().__init__(**kwargs) # always upcall! counter = FNURRRGHCounter() self.addLogObserver('stdio', counter) self.progressMetrics += ('tests',) So that's the code that we want to wind up using. How do we actually deploy it? You have a number of different options: .. contents:: :local: Inclusion in the :file:`master.cfg` file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The simplest technique is to simply put the step class definitions in your :file:`master.cfg` file, somewhere before the :class:`BuildFactory` definition where you actually use it in a clause like: .. code-block:: python f = BuildFactory() f.addStep(SVN(repourl="stuff")) f.addStep(Framboozle()) Remember that :file:`master.cfg` is secretly just a Python program with one job: populating the :data:`BuildmasterConfig` dictionary. And Python programs are allowed to define as many classes as they like. So you can define classes and use them in the same file, just as long as the class is defined before some other code tries to use it. This is easy, and it keeps the point of definition very close to the point of use, and whoever replaces you after that unfortunate hang-gliding accident will appreciate being able to easily figure out what the heck this stupid "Framboozle" step is doing anyways. The downside is that every time you reload the config file, the Framboozle class will get redefined, which means that the buildmaster will think that you've reconfigured all the Builders that use it, even though nothing changed. Bleh. Python file somewhere on the system ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Instead, we can put this code in a separate file, and import it into the master.cfg file just like we would the normal buildsteps like :bb:step:`ShellCommand` and :bb:step:`SVN`. Create a directory named :file:`~/lib/python`, put the step class definitions in :file:`~/lib/python/framboozle.py`, and run your buildmaster using: .. code-block:: bash PYTHONPATH=~/lib/python buildbot start MASTERDIR or use the :file:`Makefile.buildbot` to control the way ``buildbot start`` works. Or add something like this to something like your :file:`~/.bashrc` or :file:`~/.bash_profile` or :file:`~/.cshrc`: .. code-block:: bash export PYTHONPATH=~/lib/python Once we've done this, our :file:`master.cfg` can look like: .. code-block:: python from framboozle import Framboozle f = BuildFactory() f.addStep(SVN(repourl="stuff")) f.addStep(Framboozle()) or: .. code-block:: python import framboozle f = BuildFactory() f.addStep(SVN(repourl="stuff")) f.addStep(framboozle.Framboozle()) (check out the Python docs for details about how ``import`` and ``from A import B`` work). What we've done here is to tell Python that every time it handles an "import" statement for some named module, it should look in our :file:`~/lib/python/` for that module before it looks anywhere else. After our directories, it will try in a bunch of standard directories too (including the one where buildbot is installed). By setting the :envvar:`PYTHONPATH` environment variable, you can add directories to the front of this search list. Python knows that once it "import"s a file, it doesn't need to re-import it again. This means that reconfiguring the buildmaster (with ``buildbot reconfig``, for example) won't make it think the Framboozle class has changed every time, so the Builders that use it will not be spuriously restarted. On the other hand, you either have to start your buildmaster in a slightly weird way, or you have to modify your environment to set the :envvar:`PYTHONPATH` variable. Install this code into a standard Python library directory ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Find out what your Python's standard include path is by asking it: .. code-block:: none 80:warner@luther% python Python 2.4.4c0 (#2, Oct 2 2006, 00:57:46) [GCC 4.1.2 20060928 (prerelease) (Debian 4.1.1-15)] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> import sys >>> import pprint >>> pprint.pprint(sys.path) ['', '/usr/lib/python24.zip', '/usr/lib/python2.4', '/usr/lib/python2.4/plat-linux2', '/usr/lib/python2.4/lib-tk', '/usr/lib/python2.4/lib-dynload', '/usr/local/lib/python2.4/site-packages', '/usr/lib/python2.4/site-packages', '/usr/lib/python2.4/site-packages/Numeric', '/var/lib/python-support/python2.4', '/usr/lib/site-python'] In this case, putting the code into :file:`/usr/local/lib/python2.4/site-packages/framboozle.py` would work just fine. We can use the same :file:`master.cfg` ``import framboozle`` statement as in Option 2. By putting it in a standard include directory (instead of the decidedly non-standard :file:`~/lib/python`), we don't even have to set :envvar:`PYTHONPATH` to anything special. The downside is that you probably have to be root to write to one of those standard include directories. .. _Plugin-Module: Distribute a Buildbot Plug-In ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ First of all, you must prepare a Python package (if you do not know what that is, please check :doc:`../developer/plugins-publish`, where you can find a couple of pointers to tutorials). When you have a package, you will have a special file called :file:`setup.py`. This file needs to be updated to include a pointer to your new step: .. code-block:: python setup( ... entry_points = { ..., 'buildbot.steps': [ 'Framboozle = framboozle:Framboozle' ] }, ... ) Where: * ``buildbot.steps`` is the kind of plugin you offer (more information about possible kinds you can find in :doc:`../developer/plugins-publish`) * ``framboozle:Framboozle`` consists of two parts: ``framboozle`` is the name of the Python module where to look for ``Framboozle`` class, which implements the plugin * ``Framboozle`` is the name of the plugin. This will allow users of your plugin to use it just like any other Buildbot plugins: .. code-block:: python from buildbot.plugins import steps ... steps.Framboozle ... Now you can upload it to PyPI_ where other people can download it from and use in their build systems. Once again, the information about how to prepare and upload a package to PyPI_ can be found in tutorials listed in :doc:`../developer/plugins-publish`. .. _PyPI: http://pypi.python.org/ Submit the code for inclusion in the Buildbot distribution ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Make a fork of buildbot on http://github.com/buildbot/buildbot or post a patch in a bug at http://trac.buildbot.net/. In either case, post a note about your patch to the mailing list, so others can provide feedback and, eventually, commit it. When it's committed to the master, the usage is the same as in the previous approach: .. code-block:: python from buildbot.plugins import steps, util ... f = util.BuildFactory() f.addStep(steps.SVN(repourl="stuff")) f.addStep(steps.Framboozle()) ... And then you don't even have to install :file:`framboozle.py` anywhere on your system, since it will ship with Buildbot. You don't have to be root, you don't have to set :envvar:`PYTHONPATH`. But you do have to make a good case for Framboozle being worth going into the main distribution, you'll probably have to provide docs and some unit test cases, you'll need to figure out what kind of beer the author likes (IPA's and Stouts for Dustin), and then you'll have to wait until the next release. But in some environments, all this is easier than getting root on your buildmaster box, so the tradeoffs may actually be worth it. Summary ~~~~~~~ Putting the code in master.cfg (1) makes it available to that buildmaster instance. Putting it in a file in a personal library directory (2) makes it available for any buildmasters you might be running. Putting it in a file in a system-wide shared library directory (3) makes it available for any buildmasters that anyone on that system might be running. Getting it into the buildbot's upstream repository (4) makes it available for any buildmasters that anyone in the world might be running. It's all a matter of how widely you want to deploy that new class. .. [#framboozle_reg] framboozle.com is still available. Remember, I get 10% :). buildbot-3.4.0/master/docs/manual/deploy.rst000066400000000000000000000214771413250514000210560ustar00rootroot00000000000000Deployment ========== This page aims at describing the common pitfalls and best practices when deploying buildbot. .. contents:: :depth: 1 :local: .. _Database-Server: Using A Database Server ----------------------- Buildbot uses the sqlite3 database backend by default. .. important:: SQLite3 is perfectly suitable for small setups with a few users. However, it does not scale well with large numbers of builders, workers and users. If you expect your Buildbot to grow over time, it is strongly advised to use a real database server (e.g., MySQL or Postgres). If you want to use a database server as the database backend for your Buildbot, use option `buildbot create-master --db` to specify the :ref:`connection string ` for the database, and make sure that the same URL appears in the ``db_url`` of the :bb:cfg:`db` parameter in your configuration file. Server Setup Example ~~~~~~~~~~~~~~~~~~~~ Installing and configuring a database server can be complex. Here is a minimalist example on how to install and configure a PostgreSQL server for your Buildbot on a recent Ubuntu system. .. note:: To install PostgreSQL on Ubuntu, you need root access. There are other ways to do it without root access (e.g. docker, build from source, etc.) but outside the scope of this example. First, let's install the server with ``apt-get``: .. code-block:: console $ sudo apt-get update <...> $ sudo apt-get install postgresql <...> $ sudo systemctl status postgresql@10-main.service ● postgresql@10-main.service - PostgreSQL Cluster 10-main Loaded: loaded (/lib/systemd/system/postgresql@.service; indirect; vendor preset: enabled) Active: active (running) since Wed 2019-05-29 11:33:40 CEST; 3min 1s ago Main PID: 24749 (postgres) Tasks: 7 (limit: 4915) CGroup: /system.slice/system-postgresql.slice/postgresql@10-main.service ├─24749 /usr/lib/postgresql/10/bin/postgres -D /var/lib/postgresql/10/main | -c config_file=/etc/postgresql/10/main/postgresql.conf ├─24751 postgres: 10/main: checkpointer process ├─24752 postgres: 10/main: writer process ├─24753 postgres: 10/main: wal writer process ├─24754 postgres: 10/main: autovacuum launcher process ├─24755 postgres: 10/main: stats collector process └─24756 postgres: 10/main: bgworker: logical replication launcher May 29 11:33:38 ubuntu1804 systemd[1]: Starting PostgreSQL Cluster 10-main... May 29 11:33:40 ubuntu1804 systemd[1]: Started PostgreSQL Cluster 10-main. Once the server is installed, create a user and associated database for your Buildbot. .. code-block:: console $ sudo su - postgres postgres$ createuser -P buildbot Enter password for new role: bu1ldb0t Enter it again: bu1ldb0t postgres$ createdb -O buildbot buildbot postgres$ exit After which, you can configure a proper `SQLAlchemy`_ URL: .. code-block:: python c['db'] = {'db_url': 'postgresql://buildbot:bu1ldb0t@127.0.0.1/buildbot'} And initialize the database tables with the following command: .. code-block:: console $ buildbot upgrade-master checking basedir checking for running master checking master.cfg upgrading basedir creating master.cfg.sample upgrading database (postgresql://buildbot:xxxx@127.0.0.1/buildbot) upgrade complete Additional Requirements ~~~~~~~~~~~~~~~~~~~~~~~ Depending on the selected database, further Python packages will be required. Consult the `SQLAlchemy`_ dialect list for a full description. The most common choice for MySQL is `mysqlclient`_. Any reasonably recent version should suffice. The most common choice for Postgres is `Psycopg`_. Any reasonably recent version should suffice. .. _SQLAlchemy: http://www.sqlalchemy.org/ .. _Psycopg: http://initd.org/psycopg/ .. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python .. _Maintenance: Maintenance ----------- The buildmaster can be configured to send out email notifications when a worker has been offline for a while. Be sure to configure the buildmaster with a contact email address for each worker so these notifications are sent to someone who can bring it back online. If you find you can no longer provide a worker to the project, please let the project admins know, so they can put out a call for a replacement. The Buildbot records status and logs output continually, each time a build is performed. The status tends to be small, but the build logs can become quite large. Each build and log are recorded in a separate file, arranged hierarchically under the buildmaster's base directory. To prevent these files from growing without bound, you should periodically delete old build logs. A simple cron job to delete anything older than, say, two weeks should do the job. The only trick is to leave the :file:`buildbot.tac` and other support files alone, for which :command:`find`'s ``-mindepth`` argument helps skip everything in the top directory. You can use something like the following (assuming builds are stored in :file:`./builds/` directory): .. code-block:: none @weekly cd BASEDIR && find . -mindepth 2 i-path './builds/*' \ -prune -o -type f -mtime +14 -exec rm {} \; @weekly cd BASEDIR && find twistd.log* -mtime +14 -exec rm {} \; Alternatively, you can configure a maximum number of old logs to be kept using the ``--log-count`` command line option when running ``buildbot-worker create-worker`` or ``buildbot create-master``. .. _Troubleshooting: Troubleshooting --------------- Here are a few hints on diagnosing common problems. .. _Starting-the-worker: Starting the worker ~~~~~~~~~~~~~~~~~~~ Cron jobs are typically run with a minimal shell (:file:`/bin/sh`, not :file:`/bin/bash`), and tilde expansion is not always performed in such commands. You may want to use explicit paths, because the :envvar:`PATH` is usually quite short and doesn't include anything set by your shell's startup scripts (:file:`.profile`, :file:`.bashrc`, etc). If you've installed buildbot (or other Python libraries) to an unusual location, you may need to add a :envvar:`PYTHONPATH` specification (note that Python will do tilde-expansion on :envvar:`PYTHONPATH` elements by itself). Sometimes it is safer to fully-specify everything: .. code-block:: none @reboot PYTHONPATH=~/lib/python /usr/local/bin/buildbot \ start /usr/home/buildbot/basedir Take the time to get the ``@reboot`` job set up. Otherwise, things will work fine for a while, but the first power outage or system reboot you have will stop the worker with nothing but the cries of sorrowful developers to remind you that it has gone away. .. _Connecting-to-the-buildmaster: Connecting to the buildmaster ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If the worker cannot connect to the buildmaster, the reason should be described in the :file:`twistd.log` logfile. Some common problems are an incorrect master hostname or port number, or a mistyped bot name or password. If the worker loses the connection to the master, it is supposed to attempt to reconnect with an exponentially-increasing backoff. Each attempt (and the time of the next attempt) will be logged. If you get impatient, just manually stop and re-start the worker. When the buildmaster is restarted, all workers will be disconnected, and will attempt to reconnect as usual. The reconnect time will depend upon how long the buildmaster is offline (i.e. how far up the exponential backoff curve the workers have travelled). Again, :samp:`buildbot-worker restart {BASEDIR}` will speed up the process. .. _Logging-to-stdout: Logging to stdout ~~~~~~~~~~~~~~~~~ It can be useful to let buildbot output it's log to stdout instead of a logfile. For example when running via docker, supervisor or when buildbot is started with --no-daemon. This can be accomplished by editing :file:`buildbot.tac`. It's already enabled in the docker :file:`buildbot.tac` Change the line: `application.setComponent(ILogObserver, FileLogObserver(logfile).emit)` to: `application.setComponent(ILogObserver, FileLogObserver(sys.stdout).emit)` .. _Debugging-with-the-python-debugger: Debugging with the python debugger ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Sometimes it's necessary to see what is happening inside a program. To enable this, start buildbot with: .. code-block:: none twistd --no_save -n -b --logfile=- -y buildbot.tac This will load the debugger on every exception and breakpoints in the program. More information on the python debugger can be found here: https://docs.python.org/3/library/pdb.html .. _Contrib-Scripts: Contrib Scripts ~~~~~~~~~~~~~~~ While some features of Buildbot are included in the distribution, others are only available in :contrib-src:`master/contrib/` in the ``buildbot-contrib`` source directory. The latest versions of such scripts are available at :contrib-src:`master/contrib`. buildbot-3.4.0/master/docs/manual/index.rst000066400000000000000000000004731413250514000206620ustar00rootroot00000000000000This is the Buildbot manual for Buildbot version |version|. Buildbot Manual --------------- .. toctree:: :maxdepth: 2 introduction installation/index concepts secretsmanagement configuration/index customization cmdline resources optimization plugins deploy upgrading/index buildbot-3.4.0/master/docs/manual/installation/000077500000000000000000000000001413250514000215165ustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/installation/buildmaster.rst000066400000000000000000000067011413250514000245670ustar00rootroot00000000000000Buildmaster Setup ================= .. _Creating-a-buildmaster: Creating a buildmaster ---------------------- As you learned earlier (:ref:`System-Architecture`), the buildmaster runs on a central host (usually one that is publicly visible, so everybody can check on the status of the project), and controls all aspects of the buildbot system You will probably wish to create a separate user account for the buildmaster, perhaps named ``buildmaster``. Do not run the buildmaster as ``root``! You need to choose a directory for the buildmaster, called the ``basedir``. This directory will be owned by the buildmaster. It will contain the configuration, database, and status information - including logfiles. On a large buildmaster this directory will see a lot of activity, so it should be on a disk with adequate space and speed. Once you've picked a directory, use the ``buildbot create-master`` command to create the directory and populate it with startup files: .. code-block:: bash buildbot create-master -r basedir You will need to create a :ref:`configuration file ` before starting the buildmaster. Most of the rest of this manual is dedicated to explaining how to do this. A sample configuration file is placed in the working directory, named :file:`master.cfg.sample`, which can be copied to :file:`master.cfg` and edited to suit your purposes. (Internal details: This command creates a file named :file:`buildbot.tac` that contains all the state necessary to create the buildmaster. Twisted has a tool called ``twistd`` which can use this .tac file to create and launch a buildmaster instance. Twistd takes care of logging and daemonization (running the program in the background). :file:`/usr/bin/buildbot` is a front end which runs `twistd` for you.) Your master will need a database to store the various information about your builds, and its configuration. By default, the ``sqlite3`` backend will be used. This needs no configuration, neither extra software. All information will be stored in the file :file:`state.sqlite`. Buildbot however supports multiple backends. See :ref:`Database-Server` for more options. Buildmaster Options ~~~~~~~~~~~~~~~~~~~ This section lists options to the ``create-master`` command. You can also type ``buildbot create-master --help`` for an up-to-the-moment summary. .. program:: buildbot create-master .. option:: --force This option will allow to re-use an existing directory. .. option:: --no-logrotate This disables internal worker log management mechanism. With this option worker does not override the default logfile name and its behaviour giving a possibility to control those with command-line options of twistd daemon. .. option:: --relocatable This creates a "relocatable" ``buildbot.tac``, which uses relative paths instead of absolute paths, so that the buildmaster directory can be moved about. .. option:: --config The name of the configuration file to use. This configuration file need not reside in the buildmaster directory. .. option:: --log-size This is the size in bytes when exceeded to rotate the Twisted log files. The default is 10MiB. .. option:: --log-count This is the number of log rotations to keep around. You can either specify a number or ``None`` to keep all :file:`twistd.log` files around. The default is 10. .. option:: --db The database that the Buildmaster should use. Note that the same value must be added to the configuration file. buildbot-3.4.0/master/docs/manual/installation/components.rst000066400000000000000000000011261413250514000244350ustar00rootroot00000000000000.. _Buildbot-Components: Buildbot Components =================== Buildbot is shipped in two components: the *buildmaster* (called ``buildbot`` for legacy reasons) and the *worker*. The worker component has far fewer requirements, and is more broadly compatible than the buildmaster. You will need to carefully pick the environment in which to run your buildmaster, but the worker should be able to run just about anywhere. It is possible to install the buildmaster and worker on the same system, although for anything but the smallest installation this arrangement will not be very efficient. buildbot-3.4.0/master/docs/manual/installation/index.rst000066400000000000000000000002101413250514000233500ustar00rootroot00000000000000Installation ============ .. toctree:: :maxdepth: 2 components requirements installation buildmaster worker misc buildbot-3.4.0/master/docs/manual/installation/installation.rst000066400000000000000000000104041413250514000247500ustar00rootroot00000000000000.. _Installing-the-code: Installing the code ------------------- The Buildbot Packages ~~~~~~~~~~~~~~~~~~~~~ Buildbot comes in several parts: ``buildbot`` (the buildmaster), ``buildbot-worker`` (the worker), ``buildbot-www``, and several web plugins such as ``buildbot-waterfall-view``. The worker and buildmaster can be installed individually or together. The base web (``buildbot.www``) and web plugins are required to run a master with a web interface (the common configuration). Installation From PyPI ~~~~~~~~~~~~~~~~~~~~~~ The preferred way to install Buildbot is using ``pip``. For the master: .. code-block:: bash pip install buildbot and for the worker: .. code-block:: bash pip install buildbot-worker When using ``pip`` to install, instead of distribution specific package managers, e.g. via `apt` or `ports`, it is simpler to choose exactly which version one wants to use. It may however be easier to install via distribution specific package managers, but note that they may provide an earlier version than what is available via ``pip``. If you plan to use TLS or SSL in master configuration (e.g. to fetch resources over HTTPS using ``twisted.web.client``), you need to install Buildbot with ``tls`` extras: .. code-block:: bash pip install buildbot[tls] Installation From Tarballs ~~~~~~~~~~~~~~~~~~~~~~~~~~ Buildbot master and ``buildbot-worker`` are installed using the standard Python `distutils `_ process. For either component, after unpacking the tarball, the process is: .. code-block:: bash python setup.py build python setup.py install where the install step may need to be done as root. This will put the bulk of the code in somewhere like :file:`/usr/lib/pythonx.y/site-packages/buildbot`. It will also install the :command:`buildbot` command-line tool in :file:`/usr/bin/buildbot`. If the environment variable ``$NO_INSTALL_REQS`` is set to ``1``, then :file:`setup.py` will not try to install Buildbot's requirements. This is usually only useful when building a Buildbot package. To test this, shift to a different directory (like :file:`/tmp`), and run: .. code-block:: bash buildbot --version # or buildbot-worker --version If it shows you the versions of Buildbot and Twisted, the install went ok. If it says "no such command" or gets an ``ImportError`` when it tries to load the libraries, then something went wrong. ``pydoc buildbot`` is another useful diagnostic tool. Windows users will find these files in other places. You will need to make sure that Python can find the libraries, and will probably find it convenient to have :command:`buildbot` in your :envvar:`PATH`. .. _Installation-in-a-Virtualenv: Installation in a Virtualenv ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If you cannot or do not wish to install buildbot into a site-wide location like :file:`/usr` or :file:`/usr/local`, you can also install it into the account's home directory or any other location using a tool like `virtualenv `_. .. _Running-Buildbots-Tests-optional: Running Buildbot's Tests (optional) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If you wish, you can run the buildbot unit test suite. First, ensure that you have the `mock `_ Python module installed from PyPI. You must not be using a Python wheels packaged version of Buildbot or have specified the bdist_wheel command when building. The test suite is not included with the PyPi packaged version. This module is not required for ordinary Buildbot operation - only to run the tests. Note that this is not the same as the Fedora ``mock`` package! You can check if you have mock with: .. code-block:: bash python -mmock Then, run the tests: .. code-block:: bash PYTHONPATH=. trial buildbot.test # or PYTHONPATH=. trial buildbot_worker.test Nothing should fail, although a few might be skipped. If any of the tests fail for reasons other than a missing ``mock``, you should stop and investigate the cause before continuing the installation process, as it will probably be easier to track down the bug early. In most cases, the problem is incorrectly installed Python modules or a badly configured :envvar:`PYTHONPATH`. This may be a good time to contact the Buildbot developers for help. buildbot-3.4.0/master/docs/manual/installation/misc.rst000066400000000000000000000222031413250514000232020ustar00rootroot00000000000000Next Steps ========== .. _Launching-the-daemons: Launching the daemons --------------------- Both the buildmaster and the worker run as daemon programs. To launch them, pass the working directory to the :command:`buildbot` and :command:`buildbot-worker` commands, as appropriate: .. code-block:: bash # start a master buildbot start [ BASEDIR ] # start a worker buildbot-worker start [ WORKER_BASEDIR ] The *BASEDIR* is optional and can be omitted if the current directory contains the buildbot configuration (the :file:`buildbot.tac` file). .. code-block:: bash buildbot start This command will start the daemon and then return, so normally it will not produce any output. To verify that the programs are indeed running, look for a pair of files named :file:`twistd.log` and :file:`twistd.pid` that should be created in the working directory. :file:`twistd.pid` contains the process ID of the newly-spawned daemon. When the worker connects to the buildmaster, new directories will start appearing in its base directory. The buildmaster tells the worker to create a directory for each Builder which will be using that worker. All build operations are performed within these directories: CVS checkouts, compiles, and tests. Once you get everything running, you will want to arrange for the buildbot daemons to be started at boot time. One way is to use :command:`cron`, by putting them in a ``@reboot`` crontab entry [#f1]_ .. code-block:: none @reboot buildbot start [ BASEDIR ] When you run :command:`crontab` to set this up, remember to do it as the buildmaster or worker account! If you add this to your crontab when running as your regular account (or worse yet, root), then the daemon will run as the wrong user, quite possibly as one with more authority than you intended to provide. It is important to remember that the environment provided to cron jobs and init scripts can be quite different than your normal runtime. There may be fewer environment variables specified, and the :envvar:`PATH` may be shorter than usual. It is a good idea to test out this method of launching the worker by using a cron job with a time in the near future, with the same command, and then check :file:`twistd.log` to make sure the worker actually started correctly. Common problems here are for :file:`/usr/local` or :file:`~/bin` to not be on your :envvar:`PATH`, or for :envvar:`PYTHONPATH` to not be set correctly. Sometimes :envvar:`HOME` is messed up too. If using systemd to launch :command:`buildbot-worker`, it may be a good idea to specify a fixed :envvar:`PATH` using the :envvar:`Environment` directive (see `systemd unit file example `_). Some distributions may include conveniences to make starting buildbot at boot time easy. For instance, with the default buildbot package in Debian-based distributions, you may only need to modify :file:`/etc/default/buildbot` (see also :file:`/etc/init.d/buildbot`, which reads the configuration in :file:`/etc/default/buildbot`). Buildbot also comes with its own init scripts that provide support for controlling multi-worker and multi-master setups (mostly because they are based on the init script from the Debian package). With a little modification, these scripts can be used on both Debian and RHEL-based distributions. Thus, they may prove helpful to package maintainers who are working on buildbot (or to those who haven't yet split buildbot into master and worker packages). .. code-block:: bash # install as /etc/default/buildbot-worker # or /etc/sysconfig/buildbot-worker worker/contrib/init-scripts/buildbot-worker.default # install as /etc/default/buildmaster # or /etc/sysconfig/buildmaster master/contrib/init-scripts/buildmaster.default # install as /etc/init.d/buildbot-worker worker/contrib/init-scripts/buildbot-worker.init.sh # install as /etc/init.d/buildmaster master/contrib/init-scripts/buildmaster.init.sh # ... and tell sysvinit about them chkconfig buildmaster reset # ... or update-rc.d buildmaster defaults .. _Launching-worker-as-Windows-service: Launching worker as Windows service ----------------------------------- .. admonition:: Security consideration Setting up the buildbot worker as a Windows service requires Windows administrator rights. It is important to distinguish installation stage from service execution. It is strongly recommended run Buildbot worker with lowest required access rights. It is recommended run a service under machine local non-privileged account. If you decide run Buildbot worker under domain account it is recommended to create dedicated strongly limited user account that will run Buildbot worker service. Windows service setup ````````````````````` In this description, we assume that the buildbot worker account is the local domain account `worker`. In case worker should run under domain user account please replace ``.\worker`` with ``\worker``. Please replace ```` with given user password. Please replace ```` with the full/absolute directory specification to the created worker (what is called ``BASEDIR`` in :ref:`Creating-a-worker`). .. code-block:: bat buildbot_worker_windows_service --user .\worker --password --startup auto install powershell -command "& {&'New-Item' -path Registry::HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\BuildBot\Parameters}" powershell -command "& {&'set-ItemProperty' -path Registry::HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\BuildBot\Parameters -Name directories -Value ''}" The first command automatically adds user rights to run Buildbot as service. Modify environment variables ```````````````````````````` This step is optional and may depend on your needs. At least we have found useful to have dedicated temp folder worker steps. It is much easier discover what temporary files your builds leaks/misbehaves. 1. As Administrator run ``regedit`` 2. Open the key ``Computer\HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\Buildbot``. 3. Create a new value of type ``REG_MULTI_SZ`` called ``Environment``. 4. Add entries like :: TMP=c:\bbw\tmp TEMP=c:\bbw\tmp Check if Buildbot can start correctly configured as Windows service ``````````````````````````````````````````````````````````````````` As admin user run the command ``net start buildbot``. In case everything goes well, you should see following output :: The BuildBot service is starting. The BuildBot service was started successfully. Troubleshooting ``````````````` If anything goes wrong check - Twisted log on ``C:\bbw\worker\twistd.log`` - Windows system event log (``eventvwr.msc`` in command line, ``Show-EventLog`` in PowerShell). .. _Logfiles: Logfiles -------- While a buildbot daemon runs, it emits text to a logfile, named :file:`twistd.log`. A command like ``tail -f twistd.log`` is useful to watch the command output as it runs. The buildmaster will announce any errors with its configuration file in the logfile, so it is a good idea to look at the log at startup time to check for any problems. Most buildmaster activities will cause lines to be added to the log. .. _Shutdown: Shutdown -------- To stop a buildmaster or worker manually, use: .. code-block:: bash buildbot stop [ BASEDIR ] # or buildbot-worker stop [ WORKER_BASEDIR ] This simply looks for the :file:`twistd.pid` file and kills whatever process is identified within. At system shutdown, all processes are sent a ``SIGKILL``. The buildmaster and worker will respond to this by shutting down normally. The buildmaster will respond to a ``SIGHUP`` by re-reading its config file. Of course, this only works on Unix-like systems with signal support and not on Windows. The following shortcut is available: .. code-block:: bash buildbot reconfig [ BASEDIR ] When you update the Buildbot code to a new release, you will need to restart the buildmaster and/or worker before they can take advantage of the new code. You can do a :samp:`buildbot stop {BASEDIR}` and :samp:`buildbot start {BASEDIR}` in succession, or you can use the ``restart`` shortcut, which does both steps for you: .. code-block:: bash buildbot restart [ BASEDIR ] Workers can similarly be restarted with: .. code-block:: bash buildbot-worker restart [ BASEDIR ] There are certain configuration changes that are not handled cleanly by ``buildbot reconfig``. If this occurs, ``buildbot restart`` is a more robust way to fully switch over to the new configuration. ``buildbot restart`` may also be used to start a stopped Buildbot instance. This behavior is useful when writing scripts that stop, start, and restart Buildbot. A worker may also be gracefully shutdown from the web UI. This is useful to shutdown a worker without interrupting any current builds. The buildmaster will wait until the worker has finished all its current builds, and will then tell the worker to shutdown. .. [#f1] This ``@reboot`` syntax is understood by Vixie cron, which is the flavor usually provided with Linux systems. Other unices may have a cron that doesn't understand ``@reboot`` buildbot-3.4.0/master/docs/manual/installation/requirements.rst000066400000000000000000000143161413250514000250000ustar00rootroot00000000000000.. _Requirements: Requirements ============ .. _Common-Requirements: Common Requirements ------------------- At a bare minimum, you'll need the following for both the buildmaster and a worker: Python: https://www.python.org Buildbot master works with Python-3.6+. Buildbot worker works with Python 2.7, or Python 3.5+. .. note:: This should be a "normal" build of Python. Builds of Python with debugging enabled or other unusual build parameters are likely to cause incorrect behavior. Twisted: http://twistedmatrix.com Buildbot requires Twisted-17.9.0 or later on the master and the worker. In upcoming versions of Buildbot, a newer Twisted will also be required on the worker. As always, the most recent version is recommended. Certifi: https://github.com/certifi/python-certifi Certifi provides collection of Root Certificates for validating the trustworthiness of SSL certificates. Unfortunately it does not support any addition of own company certificates. At the moment you need to add your own .PEM content to cacert.pem manually. Of course, your project's build process will impose additional requirements on the workers. These hosts must have all the tools necessary to compile and test your project's source code. .. note:: If your internet connection is secured by a proxy server, please check your ``http_proxy`` and ``https_proxy`` environment variables. Otherwise ``pip`` and other tools will fail to work. Windows Support ~~~~~~~~~~~~~~~ Buildbot - both master and worker - runs well natively on Windows. The worker runs well on Cygwin, but because of problems with SQLite on Cygwin, the master does not. Buildbot's windows testing is limited to the most recent Twisted and Python versions. For best results, use the most recent available versions of these libraries on Windows. Pywin32: http://sourceforge.net/projects/pywin32/ Twisted requires PyWin32 in order to spawn processes on Windows. Build Tools for Visual Studio 2019 - Microsoft Visual C++ compiler Twisted requires MSVC to compile some parts like tls during the installation, see https://twistedmatrix.com/trac/wiki/WindowsBuilds and https://wiki.python.org/moin/WindowsCompilers. .. _Buildmaster-Requirements: Buildmaster Requirements ------------------------ Note that all of these requirements aside from SQLite can easily be installed from the Python package repository, PyPI. sqlite3: http://www.sqlite.org Buildbot requires a database to store its state, and by default uses SQLite. Version 3.7.0 or higher is recommended, although Buildbot will run down to 3.6.16 -- at the risk of "Database is locked" errors. The minimum version is 3.4.0, below which parallel database queries and schema introspection fail. Please note that Python ships with sqlite3 by default since Python 2.6. If you configure a different database engine, then SQLite is not required. however note that Buildbot's own unit tests require SQLite. Jinja2: http://jinja.pocoo.org/ Buildbot requires Jinja version 2.1 or higher. Jinja2 is a general purpose templating language and is used by Buildbot to generate the HTML output. SQLAlchemy: http://www.sqlalchemy.org/ Buildbot requires SQLAlchemy version 1.3.0 or higher. SQLAlchemy allows Buildbot to build database schemas and queries for a wide variety of database systems. Alembic: https://alembic.sqlalchemy.org/en/latest/ Buildbot requires Alembic version 1.6.0 or higher. Buildbot uses Alembic to manage schema upgrades from version to version. Python-Dateutil: http://labix.org/python-dateutil Buildbot requires Python-Dateutil in version 1.5 or higher (the last version to support Python-2.x). This is a small, pure-Python library. Autobahn: The master requires Autobahn version 0.16.0 or higher with Python 2.7. txrequests: https://github.com/tardyp/txrequests or treq: https://github.com/twisted/treq Both libraries are optional, but a lot of Buildbot plugins assume that one of it is installed. Otherwise plugins will complain in the twisted log file if it is not installed. Here is a little comparison table: +----------------------------------+------------+----------+ | | txrequests | treq | +----------------------------------+------------+----------+ | International Domains and URLs | yes | yes | +----------------------------------+------------+----------+ | Keep-Alive & Connection Pooling | yes | yes | +----------------------------------+------------+----------+ | Sessions with Cookie Persistence | yes | yes | +----------------------------------+------------+----------+ | Browser-style SSL Verification | yes | yes | +----------------------------------+------------+----------+ | Basic Authentication | yes | yes | +----------------------------------+------------+----------+ | Digest Authentication | yes | no | +----------------------------------+------------+----------+ | Elegant Key/Value Cookies | yes | yes | +----------------------------------+------------+----------+ | Automatic Decompression | yes | yes | +----------------------------------+------------+----------+ | Unicode Response Bodies | yes | yes | +----------------------------------+------------+----------+ | Multi-part File Uploads | yes | yes | +----------------------------------+------------+----------+ | Connection Timeouts | yes | yes | +----------------------------------+------------+----------+ | HTTP(S) Proxy Support | yes | no | +----------------------------------+------------+----------+ | .netrc support | yes | no | +----------------------------------+------------+----------+ | Python 2.7 | yes | yes | +----------------------------------+------------+----------+ | Python 3.x | yes | yes | +----------------------------------+------------+----------+ | Speed | slower | fast | +----------------------------------+------------+----------+ buildbot-3.4.0/master/docs/manual/installation/worker.rst000066400000000000000000000360771413250514000235760ustar00rootroot00000000000000Worker Setup ============ .. _Creating-a-worker: Creating a worker ----------------- Typically, you will be adding a worker to an existing buildmaster, to provide additional architecture coverage. The Buildbot administrator will give you several pieces of information necessary to connect to the buildmaster. You should also be somewhat familiar with the project being tested so that you can troubleshoot build problems locally. Buildbot exists to make sure that the project's stated ``how to build it`` process actually works. To this end, the worker should run in an environment just like that of your regular developers. Typically the project's build process is documented somewhere (:file:`README`, :file:`INSTALL`, etc), in a document that should mention all library dependencies and contain a basic set of build instructions. This document will be useful as you configure the host and account in which worker runs. Here's a good checklist for setting up a worker: 1. Set up the account It is recommended (although not mandatory) to set up a separate user account for the worker. This account is frequently named ``buildbot`` or ``worker``. This serves to isolate your personal working environment from that of the worker's, and helps to minimize the security threat posed by letting possibly-unknown contributors run arbitrary code on your system. The account should have a minimum of fancy init scripts. 2. Install the Buildbot code Follow the instructions given earlier (:ref:`Installing-the-code`). If you use a separate worker account, and you didn't install the Buildbot code to a shared location, then you will need to install it with ``--home=~`` for each account that needs it. 3. Set up the host Make sure the host can actually reach the buildmaster. Usually the buildmaster is running a status webserver on the same machine, so simply point your web browser at it and see if you can get there. Install whatever additional packages or libraries the project's INSTALL document advises. (or not: if your worker is supposed to make sure that building without optional libraries still works, then don't install those libraries.) Again, these libraries don't necessarily have to be installed to a site-wide shared location, but they must be available to your build process. Accomplishing this is usually very specific to the build process, so installing them to :file:`/usr` or :file:`/usr/local` is usually the best approach. 4. Test the build process Follow the instructions in the :file:`INSTALL` document, in the worker's account. Perform a full CVS (or whatever) checkout, configure, make, run tests, etc. Confirm that the build works without manual fussing. If it doesn't work when you do it manually, it will be unlikely to work when Buildbot attempts to do it in an automated fashion. 5. Choose a base directory This should be somewhere in the worker's account, typically named after the project which is being tested. The worker will not touch any file outside of this directory. Something like :file:`~/Buildbot` or :file:`~/Workers/fooproject` is appropriate. 6. Get the buildmaster host/port, workername, and password When the Buildbot admin configures the buildmaster to accept and use your worker, they will provide you with the following pieces of information: * your worker's name * the password assigned to your worker * the hostname and port number of the buildmaster 7. Create the worker Now run the 'worker' command as follows: :samp:`buildbot-worker create-worker {BASEDIR} {MASTERHOST}:{PORT} {WORKERNAME} {PASSWORD}` This will create the base directory and a collection of files inside, including the :file:`buildbot.tac` file that contains all the information you passed to the :command:`buildbot-worker` command. 8. Fill in the hostinfo files When it first connects, the worker will send a few files up to the buildmaster which describe the host that it is running on. These files are presented on the web status display so that developers have more information to reproduce any test failures that are witnessed by the Buildbot. There are sample files in the :file:`info` subdirectory of the Buildbot's base directory. You should edit these to correctly describe you and your host. :file:`{BASEDIR}/info/admin` should contain your name and email address. This is the ``worker admin address``, and will be visible from the build status page (so you may wish to munge it a bit if address-harvesting spambots are a concern). :file:`{BASEDIR}/info/host` should be filled with a brief description of the host: OS, version, memory size, CPU speed, versions of relevant libraries installed, and finally the version of the Buildbot code which is running the worker. The optional :file:`{BASEDIR}/info/access_uri` can specify a URI which will connect a user to the machine. Many systems accept ``ssh://hostname`` URIs for this purpose. If you run many workers, you may want to create a single :file:`~worker/info` file and share it among all the workers with symlinks. .. _Worker-Options: Worker Options ~~~~~~~~~~~~~~ There are a handful of options you might want to use when creating the worker with the :samp:`buildbot-worker create-worker DIR ` command. You can type ``buildbot-worker create-worker --help`` for a summary. To use these, just include them on the ``buildbot-worker create-worker`` command line, like this .. code-block:: bash buildbot-worker create-worker --umask=0o22 ~/worker buildmaster.example.org:42012 \ {myworkername} {mypasswd} .. program:: buildbot-worker create-worker .. option:: --no-logrotate This disables internal worker log management mechanism. With this option worker does not override the default logfile name and its behaviour giving a possibility to control those with command-line options of twistd daemon. .. option:: --umask This is a string (generally an octal representation of an integer) which will cause the worker process' ``umask`` value to be set shortly after initialization. The ``twistd`` daemonization utility forces the umask to 077 at startup (which means that all files created by the worker or its child processes will be unreadable by any user other than the worker account). If you want build products to be readable by other accounts, you can add ``--umask=0o22`` to tell the worker to fix the umask after twistd clobbers it. If you want build products to be *writable* by other accounts too, use ``--umask=0o000``, but this is likely to be a security problem. .. option:: --keepalive This is a number that indicates how frequently ``keepalive`` messages should be sent from the worker to the buildmaster, expressed in seconds. The default (600) causes a message to be sent to the buildmaster at least once every 10 minutes. To set this to a lower value, use e.g. ``--keepalive=120``. If the worker is behind a NAT box or stateful firewall, these messages may help to keep the connection alive: some NAT boxes tend to forget about a connection if it has not been used in a while. When this happens, the buildmaster will think that the worker has disappeared, and builds will time out. Meanwhile the worker will not realize that anything is wrong. .. option:: --maxdelay This is a number that indicates the maximum amount of time the worker will wait between connection attempts, expressed in seconds. The default (300) causes the worker to wait at most 5 minutes before trying to connect to the buildmaster again. .. option:: --maxretries This is a number that indicates the maximum number of times the worker will make connection attempts. After that amount, the worker process will stop. This option is useful for :ref:`Latent-Workers` to avoid consuming resources in case of misconfiguration or master failure. For VM based latent workers, the user is responsible for halting the system when the Buildbot worker has exited. This feature is heavily OS dependent, and cannot be managed by the Buildbot worker. For example, with systemd_, one can add ``ExecStopPost=shutdown now`` to the Buildbot worker service unit configuration. .. _systemd: https://www.freedesktop.org/software/systemd/man/systemd.service.html .. option:: --log-size This is the size in bytes when exceeded to rotate the Twisted log files. .. option:: --log-count This is the number of log rotations to keep around. You can either specify a number or ``None`` to keep all :file:`twistd.log` files around. The default is 10. .. option:: --allow-shutdown Can also be passed directly to the worker constructor in :file:`buildbot.tac`. If set, it allows the worker to initiate a graceful shutdown, meaning that it will ask the master to shut down the worker when the current build, if any, is complete. Setting allow_shutdown to ``file`` will cause the worker to watch :file:`shutdown.stamp` in basedir for updates to its mtime. When the mtime changes, the worker will request a graceful shutdown from the master. The file does not need to exist prior to starting the worker. Setting allow_shutdown to ``signal`` will set up a SIGHUP handler to start a graceful shutdown. When the signal is received, the worker will request a graceful shutdown from the master. The default value is ``None``, in which case this feature will be disabled. Both master and worker must be at least version 0.8.3 for this feature to work. .. option:: --use-tls Can also be passed directly to the Worker constructor in :file:`buildbot.tac`. If set, the generated connection string starts with ``tls`` instead of with ``tcp``, allowing encrypted connection to the buildmaster. Make sure the worker trusts the buildmasters certificate. If you have an non-authoritative certificate (CA is self-signed) see ``connection_string`` below. .. option:: --delete-leftover-dirs Can also be passed directly to the Worker constructor in :file:`buildbot.tac`. If set, unexpected directories in worker base directory will be removed. Otherwise, a warning will be displayed in :file:`twistd.log` so that you can manually remove them. .. option:: --proxy-connection-string Can also be passed directly to the Worker constructor in :file:`buildbot.tac`. If set, the worker connection will be tunneled through a HTTP proxy specified by the option value. .. _Other-Worker-Configuration: Other Worker Configuration ~~~~~~~~~~~~~~~~~~~~~~~~~~ ``unicode_encoding`` This represents the encoding that Buildbot should use when converting unicode commandline arguments into byte strings in order to pass to the operating system when spawning new processes. The default value is what Python's :func:`sys.getfilesystemencoding()` returns, which on Windows is 'mbcs', on Mac OSX is 'utf-8', and on Unix depends on your locale settings. If you need a different encoding, this can be changed in your worker's :file:`buildbot.tac` file by adding a ``unicode_encoding`` argument to the Worker constructor. .. code-block:: python s = Worker(buildmaster_host, port, workername, passwd, basedir, keepalive, usepty, umask=umask, maxdelay=maxdelay, unicode_encoding='utf-8', allow_shutdown='signal') .. _Worker-TLS-Config: Worker TLS Configuration ~~~~~~~~~~~~~~~~~~~~~~~~ ``tls`` See ``--useTls`` option above as an alternative to setting the ``conneciton_string`` manually. ``connection_string`` For TLS connections to the master, the ``connection_string``-argument must be passed to the worker constructor. ``buildmaster_host`` and ``port`` must then be ``None``. ``connection_string`` will be used to create a client endpoint with clientFromString_. An example of ``connection_string`` is ``"TLS:buildbot-master.com:9989"``. See more about how to formulate the connection string in ConnectionStrings_. Example TLS connection string: .. code-block:: python s = Worker(None, None, workername, passwd, basedir, keepalive, connection_string='TLS:buildbot-master.com:9989') Make sure the worker trusts the certificate of the master. If you have a non-authoritative certificate (CA is self-signed), the trustRoots parameter can be used. .. code-block:: python s = Worker(None, None, workername, passwd, basedir, keepalive, connection_string= 'TLS:buildbot-master.com:9989:trustRoots=/dir-with-ca-certs') It must point to a directory with PEM-encoded certificates. For example: .. code-block:: bash $ cat /dir-with-ca-certs/ca.pem -----BEGIN CERTIFICATE----- MIIE9DCCA9ygAwIBAgIJALEqLrC/m1w3MA0GCSqGSIb3DQEBCwUAMIGsMQswCQYD VQQGEwJaWjELMAkGA1UECBMCUUExEDAOBgNVBAcTB05vd2hlcmUxETAPBgNVBAoT CEJ1aWxkYm90MRkwFwYDVQQLExBEZXZlbG9wbWVudCBUZWFtMRQwEgYDVQQDEwtC dWlsZGJvdCBDQTEQMA4GA1UEKRMHRWFzeVJTQTEoMCYGCSqGSIb3DQEJARYZYnVp bGRib3RAaW50ZWdyYXRpb24udGVzdDAeFw0xNjA5MDIxMjA5NTJaFw0yNjA4MzEx MjA5NTJaMIGsMQswCQYDVQQGEwJaWjELMAkGA1UECBMCUUExEDAOBgNVBAcTB05v d2hlcmUxETAPBgNVBAoTCEJ1aWxkYm90MRkwFwYDVQQLExBEZXZlbG9wbWVudCBU ZWFtMRQwEgYDVQQDEwtCdWlsZGJvdCBDQTEQMA4GA1UEKRMHRWFzeVJTQTEoMCYG CSqGSIb3DQEJARYZYnVpbGRib3RAaW50ZWdyYXRpb24udGVzdDCCASIwDQYJKoZI hvcNAQEBBQADggEPADCCAQoCggEBALJZcC9j4XYBi1fYT/fibY2FRWn6Qh74b1Pg I7iIde6Sf3DPdh/ogYvZAT+cIlkZdo4v326d0EkuYKcywDvho8UeET6sIYhuHPDW lRl1Ret6ylxpbEfxFNvMoEGNhYAP0C6QS2eWEP9LkV2lCuMQtWWzdedjk+efqBjR Gozaim0lr/5lx7bnVx0oRLAgbI5/9Ukbopansfr+Cp9CpFpbNPGZSmELzC3FPKXK 5tycj8WEqlywlha2/VRnCZfYefB3aAuQqQilLh+QHyhn6hzc26+n5B0l8QvrMkOX atKdznMLzJWGxS7UwmDKcsolcMAW+82BZ8nUCBPF3U5PkTLO540CAwEAAaOCARUw ggERMB0GA1UdDgQWBBT7A/I+MZ1sFFJ9jikYkn51Q3wJ+TCB4QYDVR0jBIHZMIHW gBT7A/I+MZ1sFFJ9jikYkn51Q3wJ+aGBsqSBrzCBrDELMAkGA1UEBhMCWloxCzAJ BgNVBAgTAlFBMRAwDgYDVQQHEwdOb3doZXJlMREwDwYDVQQKEwhCdWlsZGJvdDEZ MBcGA1UECxMQRGV2ZWxvcG1lbnQgVGVhbTEUMBIGA1UEAxMLQnVpbGRib3QgQ0Ex EDAOBgNVBCkTB0Vhc3lSU0ExKDAmBgkqhkiG9w0BCQEWGWJ1aWxkYm90QGludGVn cmF0aW9uLnRlc3SCCQCxKi6wv5tcNzAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEB CwUAA4IBAQCJGJVMAmwZRK/mRqm9E0e3s4YGmYT2jwX5IX17XljEy+1cS4huuZW2 33CFpslkT1MN/r8IIZWilxT/lTujHyt4eERGjE1oRVKU8rlTH8WUjFzPIVu7nkte 09abqynAoec8aQukg79NRCY1l/E2/WzfnUt3yTgKPfZmzoiN0K+hH4gVlWtrizPA LaGwoslYYTA6jHNEeMm8OQLNf17OTmAa7EpeIgVpLRCieI9S3JIG4WYU8fVkeuiU cB439SdixU4cecVjNfFDpq6JM8N6+DQoYOSNRt9Dy0ioGyx5D4lWoIQ+BmXQENal gw+XLyejeNTNgLOxf9pbNYMJqxhkTkoE -----END CERTIFICATE----- Using TCP in ``connection_string`` is the equivalent to using the ``buildmaster_host`` and ``port`` arguments. .. code-block:: python s = Worker(None, None, workername, passwd, basedir, keepalive connection_string='TCP:buildbot-master.com:9989') is equivalent to .. code-block:: python s = Worker('buildbot-master.com', 9989, workername, passwd, basedir, keepalive) .. _ConnectionStrings: https://twistedmatrix.com/documents/current/core/howto/endpoints.html .. _clientFromString: https://twistedmatrix.com/documents/current/api/twisted.internet.endpoints.clientFromString.html buildbot-3.4.0/master/docs/manual/introduction.rst000066400000000000000000000106201413250514000222670ustar00rootroot00000000000000.. _Introduction: Introduction ============ Buildbot is a framework to automate the compile and test cycle that is used to validate code changes in most software projects. Features: * run builds on a variety of worker platforms * arbitrary build process: handles projects using C, Python, whatever * minimal host requirements: Python and Twisted * workers can be behind a firewall if they can still do checkout * status delivery through web page, email, IRC, other protocols * flexible configuration by subclassing generic build process classes * debug tools to force a new build, submit fake :class:`Change`\s, query worker status * released under the `GPL `_ .. _System-Architecture: System Architecture ------------------- Buildbot consists of a single *buildmaster* and one or more *workers* that connect to the master. The buildmaster makes all decisions about what, when, and how to build. The workers only connect to master and execute whatever commands they are instructed to execute. The usual flow of information is as follows: - the buildmaster fetches new code changes from version control systems - the buildmaster decides what builds (if any) to start - the builds are performed by executing commands on the workers (e.g. ``git clone``, ``make``, ``make check``). - the workers send the results of the commands back to the buildmaster - buildmaster interprets the results of the commands and marks the builds as successful or failing - buildmaster sends success or failure reports to external services to e.g. inform the developers. .. image:: ../_images/overview.* :alt: Overview Diagram .. Worker-Connections: Worker Connections ~~~~~~~~~~~~~~~~~~ The workers connect to the buildmaster over a TCP connection to a publicly-visible port. This allows workers to live behind a NAT or similar firewalls as long as they can get to buildmaster. After the connection is established, the connection is bidirectional: commands flow from the buildmaster to the worker and results flow from the worker to the buildmaster. The buildmaster does not provide the workers with the source code itself, only with commands necessary to perform the source code checkout. As a result, the workers need to be able to reach the source code repositories that they are supposed to build. .. image:: ../_images/workers.* :alt: Worker Connections .. _Buildmaster-Architecture: Buildmaster Architecture ~~~~~~~~~~~~~~~~~~~~~~~~ The following is rough overview of the data flow within the buildmaster. .. image:: ../_images/master.* :alt: Buildmaster Architecture The following provides a short overview of the core components of Buildbot master. For a more detailed description see the :ref:`Concepts` page. The core components of Buildbot master are as follows: Builders A :ref:`builder ` is a user-configurable description of how to perform a build. It defines what steps a new build will have, what workers it may run on and a couple of other properties. A builder takes a :ref:`build request ` which specifies the intention to create a build for specific versions of code and produces a :ref:`build` which is a concrete description of a build including a list of :ref:`steps ` to perform, the worker this needs to be performed on and so on. Schedulers: A :ref:`scheduler` is a user-configurable component that decides when to start a build. The decision could be based on time, on new code being committed or on similar events. Change Sources: :ref:`Change sources` are user-configurable components that interact with external version control systems and retrieve new code. Internally new code is represented as :ref:`Changes ` which roughly correspond to single commit or changeset. The design of Buildbot requires the workers to have their own copies of the source code, thus change sources is an optional component as long as there are no schedulers that create new builds based on new code commit events. Reporters Reporters are user-configurable components that send information about started or completed builds to external sources. Buildbot provides its own web application to observe this data, so reporters are optional. However they can be used to provide up to date build status on platforms such as GitHub or sending emails. buildbot-3.4.0/master/docs/manual/mydashboard.html000077700000000000000000000000001413250514000315162../../../smokes/templates/mydashboard.htmlustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/mydashboard.py000077700000000000000000000000001413250514000266702../../../smokes/mydashboard.pyustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/optimization.rst000066400000000000000000000021231413250514000222730ustar00rootroot00000000000000.. _Optimization: Optimization ============ If you're feeling your Buildbot is running a bit slow, here are some tricks that may help you, but use them at your own risk. Properties load speedup ----------------------- For example, if most of your build properties are strings, you can gain an approx. 30% speedup if you put this snippet of code inside your master.cfg file: .. code-block:: python def speedup_json_loads(): import json, re original_decode = json._default_decoder.decode my_regexp = re.compile(r'^\[\"([^"]*)\",\s+\"([^"]*)\"\]$') def decode_with_re(str, *args, **kw): m = my_regexp.match(str) try: return list(m.groups()) except Exception: return original_decode(str, *args, **kw) json._default_decoder.decode = decode_with_re speedup_json_loads() It patches json decoder so that it would first try to extract a value from JSON that is a list of two strings (which is the case for a property being a string), and would fallback to general JSON decoder on any error. buildbot-3.4.0/master/docs/manual/plugins.rst000066400000000000000000000045511413250514000212350ustar00rootroot00000000000000.. _Plugins: ================================= Plugin Infrastructure in Buildbot ================================= .. versionadded:: 0.8.11 Plugin infrastructure in Buildbot allows easy use of components that are not part of the core. It also allows unified access to components that are included in the core. The following snippet .. code-block:: python from buildbot.plugins import kind ... kind.ComponentClass ... allows to use a component of kind ``kind``. Available ``kind``\s are: ``worker`` workers, described in :doc:`configuration/workers` ``changes`` change source, described in :doc:`configuration/changesources` ``schedulers`` schedulers, described in :doc:`configuration/schedulers` ``steps`` build steps, described in :doc:`configuration/steps/index` ``reporters`` reporters (or reporter targets), described in :doc:`configuration/reporters/index` ``util`` utility classes. For example, :doc:`BuilderConfig `, :doc:`configuration/buildfactories`, :ref:`ChangeFilter ` and :doc:`Locks ` are accessible through ``util``. Web interface plugins are not used directly: as described in :doc:`web server configuration ` section, they are listed in the corresponding section of the web server configuration dictionary. .. note:: If you are not very familiar with Python and you need to use different kinds of components, start your ``master.cfg`` file with: .. code-block:: python from buildbot.plugins import * As a result, all listed above components will be available for use. This is what sample ``master.cfg`` file uses. Finding Plugins =============== Buildbot maintains a list of plugins at https://github.com/buildbot/buildbot/wiki/PluginList. Developing Plugins ================== :ref:`Plugin-Module` contains all necessary information for you to develop new plugins. Please edit https://github.com/buildbot/buildbot/wiki/PluginList to add a link to your plugin! Plugins of note =============== Plugins were introduced in Buildbot-0.8.11, so as of this writing, only components that are bundled with Buildbot are available as plugins. If you have an idea/need about extending Buildbot, head to :doc:`../developer/plugins-publish`, create your own plugins and let the world know how Buildbot can be made even more useful. buildbot-3.4.0/master/docs/manual/resources.rst000066400000000000000000000006671413250514000215720ustar00rootroot00000000000000.. _Resources: Resources ========= The Buildbot home page is http://buildbot.net/. For configuration questions and general discussion, please use the ``buildbot-devel`` mailing list. The subscription instructions and archives are available at https://lists.buildbot.net/pipermail/devel/ The ``#buildbot`` channel on Freenode's IRC servers hosts development discussion, and often folks are available to answer questions there, as well. buildbot-3.4.0/master/docs/manual/secretsmanagement.rst000066400000000000000000000276301413250514000232640ustar00rootroot00000000000000 .. _secretManagement: ================= Secret Management ================= Requirements ============ Buildbot steps might need secrets to execute their actions. Secrets are used to execute commands or to create authenticated network connections. Secrets may be a SSH key, a password, or a file content like a wgetrc file or a public SSH key. To preserve confidentiality, the secret values must not be printed or logged in the twisted or step logs. Secrets must not be stored in the Buildbot configuration (master.cfg), as the source code is usually shared in SCM like git. How to use Buildbot Secret Management ===================================== Secrets and providers --------------------- Buildbot implements several providers for secrets retrieval: - File system based: secrets are written in a file. This is a simple solution for example when secrets are managed by a config management system like Ansible Vault. - Third party backend based: secrets are stored by a specialized software. These solutions are usually more secure. Secrets providers are configured if needed in the master configuration. Multiple providers can be configured at once. The secret manager is a Buildbot service. The secret manager returns the specific provider results related to the providers registered in the configuration. How to use secrets in Buildbot ------------------------------ Secret can be used in Buildbot via the :class:`~IRenderable` mechanism. Two :class:`~IRenderable` actually implement secrets. :ref:`Interpolate` can be used if you need to mix secrets and other interpolation in the same argument. :ref:`Secret` can be used if your secret is directly used as a component argument. .. _Secret: Secret `````` :ref:`Secret` is a simple renderable which directly renders a secret. .. code-block:: python Secret("secretName") As argument to steps ```````````````````` The following example shows a basic usage of secrets in Buildbot. .. code-block:: python from buildbot.plugins import secrets, util # First we declare that the secrets are stored in a directory of the filesystem # each file contains one secret identified by the filename c['secretsProviders'] = [secrets.SecretInAFile(dirname="/path/toSecretsFiles")] # then in a buildfactory: # use a secret on a shell command via Interpolate f1.addStep(ShellCommand( util.Interpolate("wget -u user -p '%(secret:userpassword)s' '%(prop:urltofetch)s'"))) # .. or non shell form: f1.addStep(ShellCommand(["wget", "-u", "user", "-p", util.Secret("userpassword"), util.Interpolate("%(prop:urltofetch)s")])) Secrets are also interpolated in the build like properties are. Their values will be used in a command line for example. As argument to services ``````````````````````` You can use secrets to configure services. All services arguments are not compatible with secrets. See their individual documentation for details. .. code-block:: python # First we declare that the secrets are stored in a directory of the filesystem # each file contains one secret identified by the filename c['secretsProviders'] = [secrets.SecretInAFile(dirname="/path/toSecretsFiles")] # then for a reporter: c['services'] = [GitHubStatusPush(token=util.Secret("githubToken"))] Secrets storages ---------------- .. _SecretInAFile: SecretInAFile ````````````` .. code-block:: python c['secretsProviders'] = [secrets.SecretInAFile(dirname="/path/toSecretsFiles")] In the passed directory, every file contains a secret identified by the filename. e.g: a file ``user`` contains the text ``pa$$w0rd``. Arguments: ``dirname`` (required) Absolute path to directory containing the files with a secret. ``strip`` (optional) if ``True`` (the default), trailing newlines are removed from the file contents. .. _HashiCorpVaultKvSecretProvider: HashiCorpVaultKvSecretProvider `````````````````````````````` .. code-block:: python c['secretsProviders'] = [ secrets.HashiCorpVaultKvSecretProvider( authenticator=secrets.VaultAuthenticatorApprole(roleId="", secretId=""), vault_server="http://localhost:8200", secrets_mount="kv") ] HashiCorpVaultKvSecretProvider allows to use HashiCorp Vault KV secret engine as secret provider. Other secret engines are not supported by this particular provider. For more information about Vault please visit: _`Vault`: https://www.vaultproject.io/ In order to use this secret provider, optional dependency ``hvac`` needs to be installed (``pip install hvac``). It supports different authentication methods with ability to re-authenticate when authentication token expires (not possible using ``HvacAuthenticatorToken``). Parameters accepted by ``HashiCorpVaultKvSecretProvider``: - ``authenticator``: required parameter, specifies Vault authentication method. Possible authenticators are: - ``VaultAuthenticatorToken(token)``: simplest authentication by directly providing the authentication token. This method cannot benefit from re-authentication mechanism and when token expires, secret provider will just stop working. - ``VaultAuthenticatorApprole(roleId, secretId)``: approle authentication using roleId and secretId. This is common method for automation tools fetching secrets from vault. - ``vault_server``: required parameter, specifies URL of vault server. - ``secrets_mount``: specifies mount point of KV secret engine in vault, default value is "secret". - ``api_version``: version of vault KV secret engine. Supported versions are 1 and 2, default value is 2. - ``path_delimiter``: character used to separate path and key name in secret identifiers. Default value is "|". - ``path_escape``: escape character used in secret identifiers to allow escaping of ``path_delimiter`` character in path or key values. Default value is "\". The secret identifiers that need to be passed to, e.g. :ref:`Interpolate`, have format: ``"path/to/secret:key"``. In case path or key name does contain colon character, it is possible to escape it using "\" or specify different separator character using ``path_delimiter`` parameter when initializing secret provider. Example use: .. code-block:: python passwd = util.Secret('path/to/secret:password') .. _HashiCorpVaultSecretProvider: HashiCorpVaultSecretProvider ```````````````````````````` .. note:: Use of ``HashiCorpVaultSecretProvider`` is deprecated in favor of newer :ref:`HashiCorpVaultKvSecretProvider` and will be removed in future releases. .. code-block:: python c['secretsProviders'] = [secrets.HashiCorpVaultSecretProvider( vaultToken=open('VAULT_TOKEN').read().strip(), vaultServer="http://localhost:8200", secretsmount="secret", apiVersion=2 )] Vault secures, stores, and tightly controls access to secrets. Vault presents a unified API to access multiple backends. At the moment, Buildbot supports KV v1 and v2 backends via the apiVersion argument. Buildbot's Vault authentication/authorisation is via a token. The "Initial Root Token", generated on Vault initialization, can be used but has ‘root’ authorization. Vault policies, and subsequent tokens assigned to them, provide for a more restrictive approach. In the master configuration, the Vault provider is instantiated through the Buildbot service manager as a secret provider with the Vault server address and the Vault token. The provider SecretInVault allows Buildbot to read secrets in Vault. The secret identifiers that need to be passed to, e.g. :ref:`Interpolate`, accept one of the following formats: - ``key``: The provider will fetch the secret with name ``key`` and return the value of ``value`` attribute stored therein. - ``key/attr``: The provider will fetch the secret with name ``key`` and return the value of ``attr`` attribute stored therein. Vault stores secrets in form of key-value pairs. - Simple keys .. image:: ../_images/vault_simple_key.png The key value with key name ``keyname`` can be read like: .. code-block:: python text = Interpolate("your key equals %(secret:folder1/folder2/secretname/keyname)s") - Multipart keys .. image:: ../_images/vault_multipart_key.png Each part of a multipart value can be read like .. code-block:: python url = Interpolate("site url is %(secret:folder1/folde2/folde3/secretname/url)s") pass = Interpolate("your password is %(secret:folder1/folde2/folde3/secretname/pass)s") cert = Interpolate("your cert is %(secret:folder1/folde2/folde3/secretname/ssh-cert)s") .. _SecretInPass: SecretInPass ````````````` .. code-block:: python c['secretsProviders'] = [secrets.SecretInPass( gpgPassphrase="passphrase", dirname="/path/to/password/store" )] Passwords can be stored in a unix password store, encrypted using GPG keys. Buildbot can query secrets via the ``pass`` binary found in the PATH of each worker. While ``pass`` allows for multiline entries, the secret must be on the first line of each entry. The only caveat is that all passwords Buildbot needs to access have to be encrypted using the same GPG key. For more information about ``pass``, please visit _`pass`: https://www.passwordstore.org/ Arguments: ``gpgPassphrase`` (optional) Pass phrase to the GPG decryption key, if any ``dirname`` (optional) Absolute path to the password store directory, defaults to ~/.password-store How to populate secrets in a build ---------------------------------- To populate secrets in files during a build, 2 steps are used to create and delete the files on the worker. The files will be automatically deleted at the end of the build. .. code-block:: python f = BuildFactory() with f.withSecrets(secrets_list): f.addStep(step_definition) or .. code-block:: python f = BuildFactory() f.addSteps([list_of_step_definitions], withSecrets=[secrets_list]) In both cases the secrets_list is a list of (secret path, secret value) tuples. .. code-block:: python secrets_list = [('/first/path', Interpolate('write something and %(secret:somethingmore)s')), ('/second/path', Interpolate('%(secret:othersecret)s')] The Interpolate class is used to render the value during the build execution. How to configure a Vault instance --------------------------------- Vault being a very generic system, it can be complex to install for the first time. Here is a simple tutorial to install the minimal Vault to use with Buildbot. Use Docker to install Vault ``````````````````````````` A Docker image is available to help users installing Vault. Without any arguments, the command launches a Docker Vault developer instance, easy to use and test the functions. The developer version is already initialized and unsealed. To launch a Vault server please refer to the VaultDocker_ documentation: .. _vaultDocker: https://hub.docker.com/_/vault/ In a shell: .. code-block:: shell docker run vault Starting the vault instance ``````````````````````````` Once the Docker image is created, launch a shell terminal on the Docker image: .. code-block:: shell docker exec -i -t ``docker_vault_image_name`` /bin/sh Then, export the environment variable VAULT_ADDR needed to init Vault. .. code-block:: shell export VAULT_ADDR='vault.server.adress' Writing secrets ``````````````` By default the official docker instance of Vault is initialized with a mount path of 'secret', a KV v1 secret engine, and a second KV engine (v2) at 'secret/data'. Currently, Buildbot is "hard wired" to expect KV v2 engines to reside within this "data" sub path. Provision is made to set a top level path via the "secretsmount" argument: defaults to "secret". To add a new secret: .. code-block:: shell vault kv put secret/new_secret_key value=new_secret_value buildbot-3.4.0/master/docs/manual/upgrading/000077500000000000000000000000001413250514000207755ustar00rootroot00000000000000buildbot-3.4.0/master/docs/manual/upgrading/0.9-new-style-steps.rst000066400000000000000000000203261413250514000251210ustar00rootroot00000000000000.. _New-Style-Build-Steps: New-Style Build Steps in Buildbot 0.9.0 ======================================= In Buildbot-0.9.0, many operations performed by BuildStep subclasses return a Deferred. As a result, custom build steps which call these methods will need to be rewritten. Buildbot-0.8.9 supports old-style steps natively, while new-style steps are emulated. Buildbot-0.9.0 supports new-style steps natively, while old-style steps are emulated. Buildbot-3.0 no longer supports old-style steps at all. All custom steps should be rewritten in the new style as soon as possible. Buildbot distinguishes new-style from old-style steps by the presence of a :py:meth:`~buildbot.process.buildstep.BuildStep.run` method. If this method is present, then the step is a new-style step. Summary of Changes ++++++++++++++++++ * New-style steps have a ``run`` method that is simpler to implement than the old ``start`` method. * Many methods are now asynchronous (return Deferreds), as they perform operations on the database. * Logs are now implemented by a completely different class. This class supports the same log-writing methods (``addStderr`` and so on), although they are now asynchronous. However, it does not support log-reading methods such as ``getText``. It was never advisable to handle logs as enormous strings. New-style steps should, instead, use a LogObserver or (in Buildbot-0.9.0) fetch log lines bit by bit using the data API. * :py:class:`buildbot.process.buildstep.LoggingBuildStep` is deprecated and cannot be used in new-style steps. Mix in :py:class:`buildbot.process.buildstep.ShellMixin` instead. * Step strings, derived by parameters like ``description``, ``descriptionDone``, and ``descriptionSuffix``, are no longer treated as lists. For backward compatibility, the parameters may still be given as lists, but will be joined with spaces during execution (using :py:func:`~buildbot.util.join_list`). Backward Compatibility ++++++++++++++++++++++ Some hacks are in place to support old-style steps. These hacks are only activated when an old-style step is detected. Support for old-style steps has been dropped in Buildbot-3.0. * The Deferreds from all asynchronous methods invoked during step execution are gathered internally. The step is not considered finished until all such Deferreds have fired, and is marked EXCEPTION if any fail. For logfiles, this is accomplished by means of a synchronous wrapper class. * Logfile data is available while the step is still in memory. This means that logs returned from ``step.getLog`` have the expected methods ``getText``, ``readlines`` and so on. * :bb:step:`ShellCommand` subclasses implicitly gather all stdio output in memory and provide it to the ``createSummary`` method. Rewriting ``start`` +++++++++++++++++++ If your custom buildstep implements the ``start`` method, then rename that method to ``run`` and set it up to return a Deferred, either explicitly or via ``inlineCallbacks``. The value of the Deferred should be the result of the step (one of the codes in :py:mod:`buildbot.process.results`), or a Twisted failure instance to complete the step as EXCEPTION. The new ``run`` method should *not* call ``self.finished`` or ``self.failed``, instead signalling the same via Deferred. For example, the following old-style ``start`` method : .. code-block:: python def start(self): ## old style cmd = remotecommand.RemoteCommand('stat', {'file': self.file }) d = self.runCommand(cmd) d.addCallback(lambda res: self.convertResult(cmd)) d.addErrback(self.failed) Becomes : .. code-block:: python @defer.inlineCallbacks def run(self): ## new style cmd = remotecommand.RemoteCommand('stat', {'file': self.file }) yield self.runCommand(cmd) return self.convertResult(cmd) Newly Asynchronous Methods ++++++++++++++++++++++++++ The following methods now return a Deferred: * :py:meth:`buildbot.process.buildstep.BuildStep.addLog` * ``log.addStdout`` * ``log.addStderr`` * ``log.addHeader`` * ``log.finish`` (see "Log Objects", below) * :py:meth:`buildbot.process.remotecommand.RemoteCommand.addStdout` * :py:meth:`buildbot.process.remotecommand.RemoteCommand.addStderr` * :py:meth:`buildbot.process.remotecommand.RemoteCommand.addHeader` * :py:meth:`buildbot.process.remotecommand.RemoteCommand.addToLog` * :py:meth:`buildbot.process.buildstep.BuildStep.addCompleteLog` * :py:meth:`buildbot.process.buildstep.BuildStep.addHTMLLog` * :py:meth:`buildbot.process.buildstep.BuildStep.addURL` Any custom code in a new-style step that calls these methods must handle the resulting Deferred. In some cases, that means that the calling method's signature will change. For example : .. code-block:: python def summarize(self): ## old-style for m in self.MESSAGES: if counts[m]: self.addCompleteLog(m, "".join(summaries[m])) self.setProperty("count-%s" % m, counts[m], "counter") Is a synchronous function, not returning a Deferred. However, when converted to a new-style test, it must handle Deferreds from the methods it calls, so it must be asynchronous. Syntactically, ``inlineCallbacks`` makes the change fairly simple: .. code-block:: python @defer.inlineCallbacks def summarize(self): ## new-style for m in self.MESSAGES: if counts[m]: yield self.addCompleteLog(m, "".join(summaries[m])) self.setProperty("count-%s" % m, counts[m], "counter") However, this method's callers must now handle the Deferred that it returns. All methods that can be overridden in custom steps can return a Deferred. Properties ++++++++++ The API for properties is the same synchronous API as was available in old-style steps. Properties are handled synchronously during the build, and persisted to the database at completion of each step. Log Objects +++++++++++ Old steps had two ways of interacting with logfiles, both of which have changed. The first is writing to logs while a step is executing. When using :py:meth:`~buildbot.process.buildstep.BuildStep.addCompleteLog` or :py:meth:`~buildbot.process.buildstep.BuildStep.addHTMLLog`, this is straightforward, except that in new-style steps these methods return a Deferred. The second method is via :py:meth:`buildbot.process.buildstep.BuildStep.addLog`. In new-style steps, the returned object (via Deferred) has the following methods to add log content: * :py:meth:`~buildbot.process.log.StreamLog.addStdout` * :py:meth:`~buildbot.process.log.StreamLog.addStderr` * :py:meth:`~buildbot.process.log.StreamLog.addHeader` * :py:meth:`~buildbot.process.log.Log.finish` All of these methods now return Deferreds. None of the old log-reading methods are available on this object: * ``hasContents`` * ``getText`` * ``readLines`` * ``getTextWithHeaders`` * ``getChunks`` If your step uses such methods, consider using a :class:`~buildbot.process.logobserver.LogObserver` instead, or using the Data API to get the required data. The undocumented and unused ``subscribeConsumer`` method of logfiles has also been removed. The :py:meth:`~buildbot.process.log.Log.subscribe` method now takes a callable, rather than an instance, and does not support catchup. This method was primarily used by :py:class:`~buildbot.process.logobserver.LogObserver`, the implementation of which has been modified accordingly. Any other uses of the subscribe method should be refactored to use a :py:class:`~buildbot.process.logobserver.LogObserver`. Status Strings ++++++++++++++ The ``self.step_status.setText`` and ``setText2`` methods have been removed. Similarly, the ``_describe`` and ``describe`` methods are not used in new-style steps. In fact, steps no longer set their status directly. Instead, steps call :py:meth:`buildbot.process.buildstep.BuildStep.updateSummary` whenever the status may have changed. This method calls :py:meth:`~buildbot.process.buildstep.BuildStep.getCurrentSummary` or :py:meth:`~buildbot.process.buildstep.BuildStep.getResultSummary` as appropriate and update displays of the step's status. Steps override the latter two methods to provide appropriate summaries. Statistics ++++++++++ Support for statistics has been moved to the ``BuildStep`` and ``Build`` objects. Calls to ``self.step_status.setStatistic`` should be rewritten as ``self.setStatistic``. buildbot-3.4.0/master/docs/manual/upgrading/0.9-upgrade.rst000066400000000000000000000232411413250514000234640ustar00rootroot00000000000000.. _Upgrading To Nine: Upgrading to Buildbot 0.9.0 =========================== Upgrading a Buildbot instance from 0.8.x to 0.9.x may require a number of changes to the master configuration. Those changes are summarized here. If you are starting fresh with 0.9.0 or later, you can safely skip this section. First important note is that Buildbot does not support an upgrade of a 0.8.x instance to 0.9.x. Notably the build data and logs will not be accessible anymore if you upgraded, thus the database migration scripts have been dropped. You should not ``pip upgrade -U buildbot``, but rather start from a clean virtualenv aside from your old master. You can keep your old master instance to serve the old build status. Buildbot is now composed of several Python packages and Javascript UI, and the easiest way to install it is to run the following command within a virtualenv: .. code-block:: bash pip install 'buildbot[bundle]' Config File Syntax ------------------ In preparation for compatibility with Python 3, Buildbot configuration files no longer allow the print statement: .. code-block:: python print "foo" To fix, simply enclose the print arguments in parentheses: .. code-block:: python print("foo") Plugins ------- Although plugin support was available in 0.8.12, its use is now highly recommended. Instead of importing modules directly in ``master.cfg``, import the plugin kind from ``buildbot.plugins``: .. code-block:: python from buildbot.plugins import steps Then access the plugin itself as an attribute: .. code-block:: python steps.SetProperty(..) See :ref:`Plugins` for more information. Web Status ---------- The most prominent change is that the existing ``WebStatus`` class is now gone, replaced by the new ``www`` functionality. Thus an ``html.WebStatus`` entry in ``c['status']`` should be removed and replaced with configuration in ``c['www']``. For example, replace: .. code-block:: python from buildbot.status import html c['status'].append(html.WebStatus(http_port=8010, allowForce=True) with: .. code-block:: python c['www'] = dict(port=8010, plugins=dict(waterfall_view={}, console_view={})) See :bb:cfg:`www` for more information. Status Classes -------------- Where in 0.8.x most of the data about a build was available synchronously, it must now be fetched dynamically using the :ref:`Data_API`. All classes under the Python package ``buildbot.status`` should be considered deprecated. Many have already been removed, and the remainder have limited functionality. Any custom code which refers to these classes must be rewritten to use the Data API. Avoid the temptation to reach into the Buildbot source code to find other useful-looking methods! Common uses of the status API are: * ``getBuild`` in a custom renderable * ``MailNotifier`` message formatters (see below for upgrade hints) * ``doStepIf`` functions on steps Import paths for several classes under the ``buildbot.status`` package but which remain useful have changed. Most of these are now available as plugins (see above), but for the remainder, consult the source code. BuildRequest Merging -------------------- Buildbot 0.9.x has replaced the old concept of request merging (``mergeRequests``) with a more flexible request-collapsing mechanism. See :bb:cfg:`collapseRequests` for more information. Status Reporters ---------------- In fact, the whole ``c['status']`` configuration parameter is gone. Many of the status listeners used in the status hierarchy in 0.8.x have been replaced with "reporters" that are available as buildbot plugins. However, note that not all status listeners have yet been ported. See the release notes for details. Including the ``"status"`` key in the configuration object will cause a configuration error. All reporters should be included in ``c['services']`` as described in :ref:`Reporters`. The available reporters as of 0.9.0 are * :bb:reporter:`MailNotifier` * :bb:reporter:`IRC` * :bb:reporter:`HttpStatusPush` * :bb:reporter:`GerritStatusPush` * :bb:reporter:`GitHubStatusPush` (replaces ``buildbot.status.github.GitHubStatus``) See the reporter index for the full, current list. A few notes on changes to the configuration of these reporters: * :bb:reporter:`MailNotifier` argument ``messageFormatter`` should now be a :py:class:`buildbot.reporters.message.MessageFormatter`, due to the removal of the status classes (see above), such formatters must be re-implemented using the Data API. * :bb:reporter:`MailNotifier` argument ``previousBuildGetter`` is not supported anymore * :bb:reporter:`MailNotifier` no longer forces SSL 3.0 when ``useTls`` is true. * :bb:reporter:`GerritStatusPush` callbacks slightly changed signature, and include a master reference instead of a status reference. * :bb:reporter:`GitHubStatusPush` now accepts a ``context`` parameter to be passed to the GitHub Status API. * :py:class:`buildbot.status.builder.Results` and the constants :py:class:`buildbot.status.results.SUCCESS` should be imported from the :py:class:`buildbot.process.results` module instead. Steps ----- Buildbot-0.8.9 introduced "new-style steps", with an asynchronous ``run`` method. In the remaining 0.8.x releases, use of new-style and old-style steps were supported side-by-side. In 0.9.x, old-style steps are emulated using a collection of hacks to allow asynchronous calls to be called from synchronous code. This emulation is imperfect, and you are strongly encouraged to rewrite any custom steps as :ref:`New-Style-Build-Steps`. Note that new-style steps now "push" their status when it changes, so the ``describe`` method no longer exists. Identifiers ----------- Many strings in Buildbot must now be identifiers. Identifiers are designed to fit easily and unambiguously into URLs, AMQP routes, and the like. An "identifier" is a nonempty unicode string of limited length, containing only UTF-8 alphanumeric characters along with ``-`` (dash) and ``_`` (underscore), and not beginning with a digit Unfortunately, many existing names do not fit this pattern. The following fields are identifiers: * worker name (50-character) * builder name (70-character) * step name (50-character) Serving static files -------------------- Since version 0.9.0 Buildbot doesn't use and doesn't serve master's ``public_html`` directory. You need to use third-party HTTP server for serving static files. Transition to "worker" terminology ---------------------------------- Since version 0.9.0 of Buildbot "slave"-based terminology is deprecated in favor of "worker"-based terminology. All identifiers, messages and documentation were updated to use "worker" instead of "slave". Old API names are still available in Buildbot versions from 0.9.0 to 1.8.0, but deprecated. The support for old API names has been removed in Buildbot version 2.0.0. To upgrade pre-0.9.0 Buildbot installation a two-stage upgrade is recommended. First, upgrade to Buildbot version 1.8.0, then fix all deprecation warnings and finally upgrade to Buildbot version 2.x.y. For details about changed API and how to control generated warnings see :ref:`Transition-to-worker-terminology`. Other Config Settings --------------------- The default master.cfg file contains some new changes, which you should look over: * ``c['protocols'] = {'pb': {'port': 9989}}`` (the default port used by the workers) * Waterfall View: requires installation (``pip install buildbot-waterfall-view``) and configuration (``c['www'] = { ..., 'plugins': {'waterfall_view': {} }``). Build History ------------- There is no support for importing build history from 0.8.x (where the history was stored on-disk in pickle files) into 0.9.x (where it is stored in the database). Data LifeTime ------------- Buildbot Nine data being implemented fully in an SQL database, the ``buildHorizon`` feature had to be reworked. Instead of being number-of-things based, it is now time based. This makes more sense from a user perspective but makes it harder to predict the database average size. Please be careful to provision enough disk space for your database. The old ``c['logHorizon']`` way of configuring is not supported anymore. See :bb:configurator:`JanitorConfigurator` to learn how to configure. A new ``__Janitor`` builder will be created to help keep an eye on the cleanup activities. Upgrading worker ---------------- Upgrading worker requires updating the :file:`buildbot.tac` file to use the new APIs. The easiest solution is to simply delete the worker directory and re-run ``buildbot-worker create-worker`` to get the stock `buildbot.tac`. If the loss of the cached worker state is a problem, then the `buildbot.tac` can be updated manually: 1. Replace: .. code-block:: python from buildslave.bot import BuildSlave with: .. code-block:: python from buildbot_worker.bot import Worker 2. Replace: .. code-block:: python application = service.Application('buildslave') with: .. code-block:: python application = service.Application('buildbot-worker') 3. Replace: .. code-block:: python s = BuildSlave(buildmaster_host, port, slavename, passwd, basedir, keepalive, usepty, umask=umask, maxdelay=maxdelay, numcpus=numcpus, allow_shutdown=allow_shutdown) with: .. code-block:: python s = Worker(buildmaster_host, port, slavename, passwd, basedir, keepalive, umask=umask, maxdelay=maxdelay, numcpus=numcpus, allow_shutdown=allow_shutdown) More Information ---------------- For minor changes not mentioned here, consult the release notes for the versions over which you are upgrading. Buildbot-0.9.0 represents several years' work, and as such we may have missed potential migration issues. buildbot-3.4.0/master/docs/manual/upgrading/0.9-worker-transition.rst000066400000000000000000000427311413250514000255430ustar00rootroot00000000000000.. _Transition-to-worker-terminology: Transition to "worker" terminology in BuildBot 0.9.0 ==================================================== Since version 0.9.0 of Buildbot "slave"-based terminology is deprecated in favor of "worker"-based terminology. API change is done in backward compatible way, so old "slave"-containing classes, functions and attributes are still available and can be used. Old API support will be removed in the future versions of Buildbot. Rename of API introduced in beta versions of Buildbot 0.9.0 done without providing fallback. See release notes for the list of breaking changes of private interfaces. The fallbacks have been removed in Buildbot version 2.0.0. Old names fallback settings --------------------------- Use of obsolete names will raise Python warnings with category :py:exc:`buildbot.worker_transition.DeprecatedWorkerAPIWarning`. By default these warnings are printed in the application log. This behaviour can be changed by setting appropriate Python warnings settings via Python's :py:mod:`warnings` module: .. code-block:: python import warnings from buildbot.worker_transition import DeprecatedWorkerAPIWarning # Treat old-name usage as errors: warnings.simplefilter("error", DeprecatedWorkerAPIWarning) See Python's :py:mod:`warnings` module documentation for complete list of available actions, in particular warnings can be disabled using ``"ignore"`` action. It's recommended to configure warnings inside :file:`buildbot.tac`, before using any other Buildbot classes. Changed API ----------- In general "Slave" and "Buildslave" parts in identifiers and messages were replaced with "Worker"; "SlaveBuilder" with "WorkerForBuilder". Below is the list of changed API (use of old names from this list will work). Note that some of these symbols are not included in Buildbot's public API. Compatibility is provided as a convenience to those using the private symbols anyway. - :py:class:`buildbot.interfaces.IBuildSlave` was renamed to :py:class:`~buildbot.interfaces.IWorker` - :py:class:`buildbot.interfaces.NoSlaveError` (private) left as is, but deprecated (it shouldn't be used at all) - :py:class:`buildbot.interfaces.BuildSlaveTooOldError` was renamed to :py:class:`~buildbot.interfaces.WorkerTooOldError` - :py:class:`buildbot.interfaces.LatentBuildSlaveFailedToSubstantiate` (private) was renamed to :py:class:`~buildbot.interfaces.LatentWorkerFailedToSubstantiate` - :py:class:`buildbot.interfaces.ILatentBuildSlave` was renamed to :py:class:`~buildbot.interfaces.ILatentWorker` - :py:class:`buildbot.interfaces.ISlaveStatus` (will be removed in 0.9.x) was renamed to :py:class:`~buildbot.interfaces.IWorkerStatus` - :py:mod:`buildbot.buildslave` module with all contents was renamed to :py:mod:`buildbot.worker` - :py:class:`buildbot.buildslave.AbstractBuildSlave` was renamed to :py:class:`buildbot.worker.AbstractWorker` - :py:attr:`buildbot.buildslave.AbstractBuildSlave.slavename` (private) was renamed to :py:attr:`buildbot.worker.AbstractWorker.workername` - :py:class:`buildbot.buildslave.AbstractLatentBuildSlave` was renamed to :py:class:`buildbot.worker.AbstractLatentWorker` - :py:class:`buildbot.buildslave.BuildSlave` was renamed to :py:class:`buildbot.worker.Worker` - :py:mod:`buildbot.buildslave.ec2` was renamed to :py:mod:`buildbot.worker.ec2` - :py:class:`buildbot.buildslave.ec2.EC2LatentBuildSlave` was renamed to :py:class:`buildbot.worker.ec2.EC2LatentWorker` - :py:mod:`buildbot.buildslave.libvirt` was renamed to :py:mod:`buildbot.worker.libvirt` - :py:class:`buildbot.buildslave.libvirt.LibVirtSlave` was renamed to :py:class:`buildbot.worker.libvirt.LibVirtWorker` - :py:mod:`buildbot.buildslave.openstack` was renamed to :py:mod:`buildbot.worker.openstack` - :py:class:`buildbot.buildslave.openstack.OpenStackLatentBuildSlave` was renamed to :py:class:`buildbot.worker.openstack.OpenStackLatentWorker` - :py:attr:`buildbot.config.MasterConfig.slaves` was renamed to :py:attr:`~buildbot.config.MasterConfig.workers` - :py:attr:`buildbot.config.BuilderConfig` constructor keyword argument ``slavename`` was renamed to ``workername`` - :py:attr:`buildbot.config.BuilderConfig` constructor keyword argument ``slavenames`` was renamed to ``workernames`` - :py:attr:`buildbot.config.BuilderConfig` constructor keyword argument ``slavebuilddir`` was renamed to ``workerbuilddir`` - :py:attr:`buildbot.config.BuilderConfig` constructor keyword argument ``nextSlave`` was renamed to ``nextWorker`` - :py:attr:`buildbot.config.BuilderConfig.slavenames` was renamed to :py:attr:`~buildbot.config.BuilderConfig.workernames` - :py:attr:`buildbot.config.BuilderConfig.slavebuilddir` was renamed to :py:attr:`~buildbot.config.BuilderConfig.workerbuilddir` - :py:attr:`buildbot.config.BuilderConfig.nextSlave` was renamed to :py:attr:`~buildbot.config.BuilderConfig.nextWorker` - :py:mod:`buildbot.process.slavebuilder` was renamed to :py:mod:`buildbot.process.workerforbuilder` - :py:class:`buildbot.process.slavebuilder.AbstractSlaveBuilder` was renamed to :py:class:`buildbot.process.workerforbuilder.AbstractWorkerForBuilder` - :py:attr:`buildbot.process.slavebuilder.AbstractSlaveBuilder.slave` was renamed to :py:attr:`buildbot.process.workerforbuilder.AbstractWorkerForBuilder.worker` - :py:class:`buildbot.process.slavebuilder.SlaveBuilder` was renamed to :py:class:`buildbot.process.workerforbuilder.WorkerForBuilder` - :py:class:`buildbot.process.slavebuilder.LatentSlaveBuilder` was renamed to :py:class:`buildbot.process.workerforbuilder.LatentWorkerForBuilder` - :py:meth:`buildbot.process.build.Build.getSlaveName` was renamed to :py:meth:`~buildbot.process.build.Build.getWorkerName` - :py:meth:`buildbot.process.build.Build.slavename` was renamed to :py:meth:`~buildbot.process.build.Build.workername` - :py:func:`buildbot.process.builder.enforceChosenSlave` was renamed to :py:func:`~buildbot.process.builder.enforceChosenWorker` - :py:meth:`buildbot.process.builder.Builder.canStartWithSlavebuilder` was renamed to :py:meth:`~buildbot.process.builder.Builder.canStartWithWorkerForBuilder` - :py:attr:`buildbot.process.builder.Builder.attaching_slaves` was renamed to :py:attr:`~buildbot.process.builder.Builder.attaching_workers` - :py:attr:`buildbot.process.builder.Builder.slaves` was renamed to :py:attr:`~buildbot.process.builder.Builder.workers` - :py:meth:`buildbot.process.builder.Builder.addLatentSlave` was renamed to :py:meth:`~buildbot.process.builder.Builder.addLatentWorker` - :py:meth:`buildbot.process.builder.Builder.getAvailableSlaves` was renamed to :py:meth:`~buildbot.process.builder.Builder.getAvailableWorkers` - :py:class:`buildbot.schedulers.forcesched.BuildslaveChoiceParameter` was renamed to :py:class:`~buildbot.schedulers.forcesched.WorkerChoiceParameter` - :py:attr:`buildbot.process.buildstep.BuildStep.buildslave` was renamed to :py:attr:`buildbot.process.buildstep.BuildStep.worker` (also it was moved from class static attribute to instance attribute) - :py:meth:`buildbot.process.buildstep.BuildStep.setBuildSlave` was renamed to :py:meth:`buildbot.process.buildstep.BuildStep.setWorker` - :py:meth:`buildbot.process.buildstep.BuildStep.slaveVersion` was renamed to :py:meth:`buildbot.process.buildstep.BuildStep.workerVersion` - :py:meth:`buildbot.process.buildstep.BuildStep.slaveVersionIsOlderThan` was renamed to :py:meth:`buildbot.process.buildstep.BuildStep.workerVersionIsOlderThan` - :py:meth:`buildbot.process.buildstep.BuildStep.checkSlaveHasCommand` was renamed to :py:meth:`buildbot.process.buildstep.BuildStep.checkWorkerHasCommand` - :py:meth:`buildbot.process.buildstep.BuildStep.getSlaveName` was renamed to :py:meth:`buildbot.process.buildstep.BuildStep.getWorkerName` - :py:class:`buildbot.locks.SlaveLock` was renamed to :py:class:`buildbot.locks.WorkerLock` - :py:attr:`buildbot.locks.SlaveLock.maxCountForSlave` was renamed to :py:attr:`buildbot.locks.WorkerLock.maxCountForWorker` - :py:class:`buildbot.locks.SlaveLock` constructor argument ``maxCountForSlave`` was renamed to ``maxCountForWorker`` - :py:mod:`buildbot.steps.slave` was renamed to :py:mod:`buildbot.steps.worker` - :py:class:`buildbot.steps.slave.SlaveBuildStep` was renamed to :py:class:`buildbot.steps.worker.WorkerBuildStep` - :py:class:`buildbot.steps.slave.CompositeStepMixin.getFileContentFromSlave` was renamed to :py:class:`buildbot.steps.worker.CompositeStepMixin.getFileContentFromWorker` - :py:attr:`buildbot.steps.transfer.FileUpload.slavesrc` was renamed :py:attr:`~buildbot.steps.transfer.FileUpload.workersrc` - :py:class:`buildbot.steps.transfer.FileUpload` constructor argument ``slavesrc`` was renamed to ``workersrc`` - :py:attr:`buildbot.steps.transfer.DirectoryUpload.slavesrc` was renamed to :py:attr:`~buildbot.steps.transfer.DirectoryUpload.workersrc` - :py:class:`buildbot.steps.transfer.DirectoryUpload` constructor argument ``slavesrc`` was renamed to ``workersrc`` - :py:attr:`buildbot.steps.transfer.MultipleFileUpload.slavesrcs` was renamed to :py:attr:`~buildbot.steps.transfer.MultipleFileUpload.workersrcs` - :py:class:`buildbot.steps.transfer.MultipleFileUpload` constructor argument ``slavesrcs`` was renamed to ``workersrcs`` - :py:attr:`buildbot.steps.transfer.FileDownload.slavedest` was renamed to :py:attr:`~buildbot.steps.transfer.FileDownload.workerdest` - :py:class:`buildbot.steps.transfer.FileDownload` constructor argument ``slavedest`` was renamed to ``workerdest`` - :py:attr:`buildbot.steps.transfer.StringDownload.slavedest` was renamed to :py:attr:`~buildbot.steps.transfer.StringDownload.workerdest` - :py:class:`buildbot.steps.transfer.StringDownload` constructor argument ``slavedest`` was renamed to ``workerdest`` - :py:attr:`buildbot.steps.transfer.JSONStringDownload.slavedest` was renamed to :py:attr:`~buildbot.steps.transfer.JSONStringDownload.workerdest` - :py:class:`buildbot.steps.transfer.JSONStringDownload` constructor argument ``slavedest`` was renamed to ``workerdest`` - :py:attr:`buildbot.steps.transfer.JSONPropertiesDownload.slavedest` was renamed to :py:attr:`~buildbot.steps.transfer.JSONPropertiesDownload.workerdest` - :py:class:`buildbot.steps.transfer.JSONPropertiesDownload` was renamed to constructor argument ``slavedest`` was renamed to ``workerdest`` - :py:attr:`buildbot.process.remotecommand.RemoteCommand.buildslave` was renamed to :py:attr:`~buildbot.process.remotecommand.RemoteCommand.worker` Plugins ------- ``buildbot.buildslave`` entry point was renamed to ``buildbot.worker``, new plugins should be updated accordingly. Plugins that use old ``buildbot.buildslave`` entry point are still available in the configuration file in the same way, as they were in versions prior 0.9.0: .. code-block:: python from buildbot.plugins import buildslave # deprecated, use "worker" instead w = buildslave.ThirdPartyWorker() But also they available using new namespace inside configuration file, so its recommended to use ``buildbot.plugins.worker`` name even if plugin uses old entry points: .. code-block:: python from buildbot.plugins import worker # ThirdPartyWorker can be defined in using `buildbot.buildslave` entry # point, this still will work. w = worker.ThirdPartyWorker() Other changes: * ``buildbot.plugins.util.BuildslaveChoiceParameter`` is deprecated in favor of ``WorkerChoiceParameter``. * ``buildbot.plugins.util.enforceChosenSlave`` is deprecated in favor of ``enforceChosenWorker``. * ``buildbot.plugins.util.SlaveLock`` is deprecated in favor of ``WorkerLock``. ``BuildmasterConfig`` changes ----------------------------- * ``c['slaves']`` was replaced with ``c['workers']``. Use of ``c['slaves']`` will work, but is considered deprecated, and will be removed in the future versions of Buildbot. * Configuration key ``c['slavePortnum']`` is deprecated in favor of ``c['protocols']['pb']['port']``. Docker latent worker changes ---------------------------- In addition to class being renamed, environment variables that are set inside container ``SLAVENAME`` and ``SLAVEPASS`` were renamed to ``WORKERNAME`` and ``WORKERPASS`` accordingly. Old environment variable are still available, but are deprecated and will be removed in the future. EC2 latent worker changes ------------------------- Use of default values of ``keypair_name`` and ``security_name`` constructor arguments of :py:class:`buildbot.worker.ec2.EC2LatentWorker` is deprecated. Please specify them explicitly. ``steps.slave.SetPropertiesFromEnv`` changes -------------------------------------------- In addition to ``buildbot.steps.slave`` module being renamed to :py:mod:`buildbot.steps.worker`, default ``source`` value for :py:class:`~buildbot.steps.worker.SetPropertiesFromEnv` was changed from ``"SlaveEnvironment"`` to ``"WorkerEnvironment"``. Local worker changes -------------------- Working directory for local workers were changed from ``master-basedir/slaves/name`` to ``master-basedir/workers/name``. Worker Manager changes ---------------------- ``slave_config`` function argument was renamed to ``worker_config``. Properties ---------- * ``slavename`` property is deprecated in favor of ``workername`` property. Render of deprecated property will produce warning. :py:class:`buildbot.worker.AbstractWorker` (previously ``buildbot.buildslave.AbstractBuildSlave``) ``slavename`` property source were changed from ``BuildSlave`` to ``Worker (deprecated)`` :py:class:`~buildbot.worker.AbstractWorker` now sets ``workername`` property with source ``Worker`` which should be used. Metrics ------- * :py:class:`buildbot.process.metrics.AttachedSlavesWatcher` was renamed to :py:class:`buildbot.process.metrics.AttachedWorkersWatcher`. * :py:attr:`buildbot.worker.manager.WorkerManager.name` (previously ``buildbot.buildslave.manager.BuildslaveManager.name``) metric measurement class name changed from ``BuildslaveManager`` to ``WorkerManager`` * :py:attr:`buildbot.worker.manager.WorkerManager.managed_services_name` (previously ``buildbot.buildslave.manager.BuildslaveManager.managed_services_name`) metric measurement managed service name changed from ``buildslaves`` to ``workers`` Renamed events: .. list-table:: :header-rows: 1 * - Old name - New name * - ``AbstractBuildSlave.attached_slaves`` - ``AbstractWorker.attached_workers`` * - ``BotMaster.attached_slaves`` - ``BotMaster.attached_workers`` * - ``BotMaster.slaveLost()`` - ``BotMaster.workerLost()`` * - ``BotMaster.getBuildersForSlave()`` - ``BotMaster.getBuildersForWorker()`` * - ``AttachedSlavesWatcher`` - ``AttachedWorkersWatcher`` * - ``attached_slaves`` - ``attached_workers`` Database -------- Schema changes: .. list-table:: :header-rows: 1 * - Old name - New name * - ``buildslaves`` table - ``workers`` * - ``builds.buildslaveid`` (not ForeignKey) column - ``workerid`` (now ForeignKey) * - ``configured_buildslaves`` table - ``configured_workers`` * - ``configured_buildslaves.buildslaveid`` (ForeignKey) column - ``workerid`` * - ``connected_buildslaves`` table - ``connected_workers`` * - ``connected_buildslaves.buildslaveid`` (ForeignKey) column - ``workerid`` * - ``buildslaves_name`` index - ``workers_name`` * - ``configured_slaves_buildmasterid`` index - ``configured_workers_buildmasterid`` * - ``configured_slaves_slaves`` index - ``configured_workers_workers`` * - ``configured_slaves_identity`` index - ``configured_workers_identity`` * - ``connected_slaves_masterid`` index - ``connected_workers_masterid`` * - ``connected_slaves_slaves`` index - ``connected_workers_workers`` * - ``connected_slaves_identity`` index - ``connected_workers_identity`` * - ``builds_buildslaveid`` index - ``builds_workerid`` List of database-related changes in API (fallback for old API is provided): - :py:mod:`buildbot.db.buildslaves` was renamed to :py:mod:`~buildbot.db.workers` - :py:class:`buildbot.db.buildslaves.BuildslavesConnectorComponent` was renamed to :py:class:`buildbot.db.workers.WorkersConnectorComponent` - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.getBuildslaves` (rewritten in nine) was renamed to :py:meth:`buildbot.db.workers.WorkersConnectorComponent.getWorkers` - :py:attr:`buildbot.db.connector.DBConnector.buildslaves` was renamed to :py:attr:`buildbot.db.connector.DBConnector.workers` ``usePTY`` changes ------------------ ``usePTY`` default value has been changed from ``slave-config`` to ``None`` (use of ``slave-config`` will still work, but discouraged). .. _Worker-Transition-Buildbot-Worker: ``buildbot-worker`` ------------------- ``buildbot-slave`` package has been renamed to ``buildbot-worker``. ``buildbot-worker`` has backward incompatible changes and requires buildmaster >= 0.9.0b8. ``buildbot-slave`` from 0.8.x will work with both 0.8.x and 0.9.x versions of buildmaster, so there is no need to upgrade currently deployed buildbot-slaves during switch from 0.8.x to 0.9.x. .. list-table:: Master/worker compatibility table :header-rows: 1 :stub-columns: 1 * - - master 0.8.x - master 0.9.x * - buildbot-slave - yes - yes * - buildbot-worker - no - yes ``buildbot-worker`` doesn't support worker-side specification of ``usePTY`` (with ``--usepty`` command line switch of ``buildbot-worker create-worker``), you need to specify this option on master side. ``getSlaveInfo`` remote command was renamed to ``getWorkerInfo`` in ``buildbot-worker``. buildbot-3.4.0/master/docs/manual/upgrading/1.0-upgrade.rst000066400000000000000000000004771413250514000234620ustar00rootroot00000000000000.. _1.0_Upgrading: Upgrading to Buildbot 1.0 ========================= Upgrading a Buildbot instance from 0.9.x to 1.0 does not require any changes in the master configuration. Despite the major version bump, Buildbot 1.0 does not have major difference with the 0.9 series. 1.0.0 is rather the mark of API stability. buildbot-3.4.0/master/docs/manual/upgrading/2.0-upgrade.rst000066400000000000000000000022141413250514000234520ustar00rootroot00000000000000.. _2.0_Upgrading: Upgrading to Buildbot 2.0 ========================= Upgrading a Buildbot instance from 1.x to 2.0 may require some work to achieve. The primary changes are removal of deprecated APIs and removal of Python 2.7 support. The recommended upgrade procedure is as follows: - Upgrade to the last released BuildBot version in 1.x series. - Remove usage of the deprecated APIs. All usages of deprecated APIs threw a deprecation warning at the point of use. If the code does not emit deprecation warnings, it's in a good shape in this regard. - Upgrade master to Python 3. Note that 1.x series has some bugs in Python 3 support, so any Python-related issues encountered in this step are relatively harmless as they will be fixed after upgrading to 2.0. You may need to run the master on a real workload in order to force all deprecated code paths to be exercised. - Upgrade to Buildbot 2.0. - (Optional) Upgrade to newest Buildbot 2.x. The newest point release will contain bug fixes and functionality improvements. Note that BuildBot 2.3.0 drops support for Internet Explorer 11 and some other old browsers. buildbot-3.4.0/master/docs/manual/upgrading/3.0-upgrade.rst000066400000000000000000000275731413250514000234720ustar00rootroot00000000000000.. _3.0_Upgrading: Upgrading to Buildbot 3.0 ============================================ Upgrading a Buildbot instance from 2.x to 3.0 may require some work to achieve. The recommended upgrade procedure is as follows: - Upgrade to the last released BuildBot version in 2.x series. - Remove usage of the deprecated APIs. All usages of deprecated APIs threw a deprecation warning at the point of use. If the code does not emit deprecation warnings, it's in a good shape in this regard. You may need to run the master on a real workload in order to force all deprecated code paths to be exercised. - Upgrade to the latest Buildbot 3.0.x release. - Fix all usages of deprecated APIs. In this case, the only deprecated APIs are temporary ``*NewStyle`` build step aliases. - (Optional) Upgrade to newest Buildbot 3.x. The newest point release will contain bug fixes and functionality improvements. Build steps ----------- Buildbot 3.0 no longer supports old-style steps (steps which implement ``start`` method as opposed to ``run`` method). This only affects users who use steps as base classes for their own steps. New style steps provide a completely different set of functions that may be overridden. Direct instantiation of step classes is not affected. Old and new style steps work exactly the same in that case and users don't need to do anything. See :ref:`New-Style-Build-Steps` for instructions of migration to new-style steps. Migrating build steps that subclass one of the build steps provided by Buildbot is a little bit more involved. The new and old-style step APIs cannot be provided by a single class. Therefore Buildbot 2.9 introduces a number of new-style build steps that are direct equivalent of their old-style counterparts. These build steps are named as ``NewStyle`` where ```` is the old-style step they provide compatibility interface for. Buildbot 3.0 removes old-style step support and changes the ```` classes to be equivalent to ``NewStyle`` counterparts. Buildbot 3.2 removes the ``NewStyle`` aliases. If a custom step is a subclass of ``NewStyle`` equivalent and use the new-style APIs as specified in :ref:`New-Style-Build-Steps`. This part of the migration must be done before the build master is migrated to 3.0. The resulting custom step will work in Buildbot 2.9.x-3.1.x. After the build master is migrated to 3.0, the custom step may be changed to subclass ``)``. * ``watchedWorkers``. Replacement is ``workers`` parameter of the *missing worker generator*. If the value was ``None``, then there's no *missing worker generator* and the value of ``messageFormatterMissingWorker`` is ignored. * ``messageFormatterMissingWorker``. Replacement is ``message_formatter`` parameter of the *missing worker generator*. In the case of ``PushjetNotifier`` and ``PushoverNotifier``, the default message formatter is ``MessageFormatterMissingWorker(template=)``. BitbucketServerCoreAPIStatusPush, BitbucketServerStatusPush, GerritVerifyStatusPush, GitHubStatusPush, GitHubCommentPush, GitLabStatusPush ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The ``generators`` list will contain one report generator of instance :bb:reportgen:`BuildStartEndStatusGenerator`. The following arguments have been removed: * ``builders``. Replacement is ``builders`` parameter of the *status generator*. * ``wantProperties``. Replacement is ``wantProperties`` parameter of the message formatter passed to the *status generator*. * ``wantSteps``. Replacement is ``wantSteps`` parameter of the message formatter passed to the *status generator*. * ``wantLogs``. Replacement is ``wantLogs`` parameter of the message formatter passed to the *status generator*. * ``wantPreviousBuild``. There is no replacement, the value is computed automatically when information on previous build is needed. * ``startDescription``. Replacement is a message formatter of type ``MessageFormatterRenderable`` passed as the ``start_formatter`` parameter to the *status generator*. * ``endDescription``. Replacement is a message formatter of type ``MessageFormatterRenderable`` passed as the ``end_formatter`` parameter to the *status generator*. HttpStatusPush ^^^^^^^^^^^^^^ The ``generators`` list will contain one report generator of instance :bb:reportgen:`BuildStatusGenerator`. The following arguments have been removed: * ``builders``. Replacement is ``builders`` parameter of the *status generator*. * ``wantProperties``. Replacement is ``wantProperties`` parameter of the message formatter passed to the *status generator*. * ``wantSteps``. Replacement is ``wantSteps`` parameter of the message formatter passed to the *status generator*. * ``wantLogs``. Replacement is ``wantLogs`` parameter of the message formatter passed to the *status generator*. * ``wantPreviousBuild``. There is no replacement, the value is computed automatically when information on previous build is needed. * ``format_fn``. Replacement is a message formatter of type ``MessageFormatterFunction`` passed as the ``message_formatter`` parameter to the *status generator*. The ``MessageFormatterFunction`` should be passed a callable function as the ``function`` parameter. This ``function`` parameter has a different signature than ``format_fn``. ``format_fn`` was previously passed a build dictionary directly as the first argument. ``function`` will be passed a dictionary, which contains a ``build`` key which will contain the build dictionary as the value. BitbucketStatusPush ^^^^^^^^^^^^^^^^^^^ The ``generators`` list will contain one report generator of instance :bb:reportgen:`BuildStartEndStatusGenerator`. The following arguments have been removed: * ``builders``. Replacement is ``builders`` parameter of the *status generator*. * ``wantProperties``, ``wantSteps``, ``wantLogs`` and ``wantPreviousBuild`` were previously accepted, but they do not affect the behavior of the reporter. Template files in message formatters ------------------------------------ Paths to template files that are passed to message formatters for rendering are no longer supported. Please read the templates in the configuration file and pass strings instead. buildbot-3.4.0/master/docs/manual/upgrading/4.0-upgrade.rst000066400000000000000000000022011413250514000234500ustar00rootroot00000000000000.. _4.0_Upgrading: Upgrading to Buildbot 4.0 (not released) ======================================== Upgrading a Buildbot instance from 3.x to 4.0 may require some work to achieve. The recommended upgrade procedure is as follows: - Upgrade to the last released BuildBot version in 3.x series. - Remove usage of the deprecated APIs. All usages of deprecated APIs threw a deprecation warning at the point of use. If the code does not emit deprecation warnings, it's in a good shape in this regard. You may need to run the master on a real workload in order to force all deprecated code paths to be exercised. - Upgrade to the latest Buildbot 4.0.x release. - (Optional) Upgrade to newest Buildbot 4.x. The newest point release will contain bug fixes and functionality improvements. Message formatters ------------------ The ``wantLogs`` argument to message formatters has been removed. The equivalent is setting both ``want_logs`` and ``want_logs_content`` to the previous value of ``wantLogs``. The ``wantSteps`` and ``wantProperties`` arguments have been renamed to ``want_steps`` and ``want_properties`` respectively. buildbot-3.4.0/master/docs/manual/upgrading/index.rst000066400000000000000000000034761413250514000226500ustar00rootroot00000000000000.. _Upgrading: Upgrading ========= This section describes the process of upgrading the master and workers from old versions of Buildbot. The users of the Buildbot project will be warned about backwards-incompatible changes by warnings produced by the code. Additionally, all backwards-incompatible changes will be done at a major version change (e.g. 1.x to 2.0). Minor version change (e.g. 2.3 to 2.4) will only introduce backwards-incompatible changes only if they affect small part of the users and are absolutely necessary. Direct upgrades between more than two major releases (e.g. 1.x to 3.x) are not supported. The versions of the master and the workers do not need to match, so it's possible to upgrade them separately. Usually there are no actions needed to upgrade a worker except to install a new version of the code and restart it. Usually the process of upgrading the master is as simple as running the following command: .. code-block:: bash buildbot upgrade-master basedir This command will also scan the :file:`master.cfg` file for incompatibilities (by loading it and printing any errors or deprecation warnings that occur). It is safe to run this command multiple times. .. warning:: The ``upgrade-master`` command may perform database schema modifications. To avoid any data loss or corruption, it should **not** be interrupted. As a safeguard, it ignores all signals except ``SIGKILL``. To upgrade between major releases the best approach is first to upgrade to the latest minor release on the same major release. Then, fix all deprecation warnings by upgrading the configuration code to the replacement APIs. Finally, upgrade to the next major release. .. toctree:: :maxdepth: 1 4.0-upgrade 3.0-upgrade 2.0-upgrade 1.0-upgrade 0.9-upgrade 0.9-new-style-steps 0.9-worker-transition buildbot-3.4.0/master/docs/relnotes.rst.jinja000066400000000000000000000014151413250514000212200ustar00rootroot00000000000000{% for section, _ in sections|dictsort(by='key') %} {% set underline = "-" %} {% if section %} {{section}} {{ underline * section|length }}{% set underline = "~" %} {% endif %} {% if sections[section] %} {% for category, val in definitions|dictsort if category in sections[section]%} {{ definitions[category]['name'] }} {{ underline * definitions[category]['name']|length }} {% if definitions[category]['showcontent'] %} {% for text, values in sections[section][category]|dictsort(by='value') %} - {{ text }} {% endfor %} {% else %} - {{ sections[section][category]['']|sort|join(', ') }} {% endif %} {% if sections[section][category]|length == 0 %} No significant changes. {% else %} {% endif %} {% endfor %} {% else %} No significant changes. {% endif %} {% endfor %} buildbot-3.4.0/master/docs/relnotes/000077500000000000000000000000001413250514000173735ustar00rootroot00000000000000buildbot-3.4.0/master/docs/relnotes/0.3.1.txt000066400000000000000000000005751413250514000206020ustar00rootroot00000000000000Buildbot 0.3.1 was released 29 Apr 2003 ** First release. ** Features implemented: change notification from FreshCVS server or parsed maildir contents timed builds basic builds, configure/compile/test some Twisted-specific build steps: docs, unit tests, debuild status reporting via web page ** Features still experimental/unpolished status reporting via PB client buildbot-3.4.0/master/docs/relnotes/0.3.2.txt000066400000000000000000000025501413250514000205760ustar00rootroot00000000000000Buildbot 0.3.2 was released 7 May 2003 ** packaging changes *** fix major packaging bug: none of the buildbot/* subdirectories were included in the 0.3.1 release. Sorry, I'm still figuring out distutils here.. ** internal changes *** use pb.Cacheable to update Events in remote status client. much cleaner. *** start to clean up BuildProcess->status.builder interface ** bug fixes *** waterfall display was missing a , causing it to be misrendered in most browsers (except the one I was testing it with, of course) *** URL without trailing slash (when served in a twisted-web distributed server, with a url like "http://twistedmatrix.com/~warner.twistd") should do redirect to URL-with-trailing-slash, otherwise internal hrefs are broken. *** remote status clients: forget RemoteReferences at shutdown, removes warnings about "persisting Ephemerals" ** Twisted buildprocess updates: *** match build process as of twisted-1.0.5 **** use python2.2 everywhere now that twisted rejects python2.1 **** look for test-result constants in multiple places *** move experimental 'trial --jelly' code to separate module *** add FreeBSD builder *** catch rc!=0 in HLint step *** remove RunUnitTestsRandomly, use randomly=1 parameter instead *** parameterize ['twisted.test'] default test case to make subclassing easier *** ignore internal distutils warnings in python2.3 builder buildbot-3.4.0/master/docs/relnotes/0.3.3.txt000066400000000000000000000062311413250514000205770ustar00rootroot00000000000000Buildbot 0.3.3 was released 21 May 2003 ** packaging changes *** include doc/examples in the release. Oops again. ** network changes *** add keepalives to deal with NAT boxes Some NAT boxes drop port mappings if the TCP connection looks idle for too long (maybe 30 minutes?). Add application-level keepalives (dummy commands sent from slave to master every 10 minutes) to appease the NAT box and keep our connection alive. Enable this with --keepalive in the slave mktap command line. Check the README for more details. ** UI changes *** allow slaves to trigger any build that they host Added an internal function to ask the buildmaster to start one of their builds. Must be triggered with a debugger or manhole on the slave side for now, will add a better UI later. *** allow web page viewers to trigger any build Added a button to the per-build page (linked by the build names on the third row of the waterfall page) to allow viewers to manually trigger builds. There is a field for them to indicate who they are and why they are triggering the build. It is possible to abuse this, but for now the benefits outweigh the damage that could be done (worst case, someone can make your machine run builds continuously). ** generic buildprocess changes *** don't queue multiple builds for offline slaves If a slave is not online when a build is ready to run, that build is queued so the slave will run it when it next connects. However, the buildmaster used to queue every such build, so the poor slave machine would be subject to tens or hundreds of builds in a row when they finally did come online. The buildmaster has been changed to merge these multiple builds into a single one. *** bump ShellCommand default timeout to 20 minutes Used for testing out the win32 twisted builder. I will probably revert this in the next relese. *** split args in ShellCommand ourselves instead of using /bin/sh This should remove the need for /bin/sh on the slave side, improving the chances that the buildslave can run on win32. *** add configureEnv argument to Configure step, pass env dict to slave Allows build processes to do things like 'CFLAGS=-O0 ./configure' without using /bin/sh to set the environment variable ** Twisted buildprocess changes *** warn instead of flunk the build when cReactor or qtreactor tests fail These two always fail. For now, downgrade those failures to a warning (orange box instead of red). *** don't use 'clobber' on remote builds Builds that run on remote machines (freebsd, OS-X) now use 'cvs update' instead of clobbering their trees and doing a fresh checkout. The multiple simultaneous CVS checkouts were causing a strain on Glyph's upstream bandwidth. *** use trial --testmodule instead of our own test-case-name grepper The Twisted coding/testing convention has developers put 'test-case-name' tags (emacs local variables, actually) in source files to indicate which test cases should be run to exercise that code. Twisted's unit-test framework just acquired an argument to look for these tags itself. Use that instead of the extra FindUnitTestsForFiles build step we were doing before. Removes a good bit of code from buildbot and into Twisted where it really belongs. buildbot-3.4.0/master/docs/relnotes/0.3.4.txt000066400000000000000000000025621413250514000206030ustar00rootroot00000000000000Buildbot 0.3.4 was released 28 Jul 2003 ** IRC client The buildmaster can now join a set of IRC channels and respond to simple queries about builder status. ** slave information The build slaves can now report information from a set of info/* files in the slave base directory to the buildmaster. This will be used by the slave administrator to announce details about the system hosting the slave, contact information, etc. For now, info/admin should contain the name/email of the person who is responsible for the buildslave, and info/host should describe the system hosting the build slave (OS version, CPU speed, memory, etc). The contents of these files are made available through the waterfall display. ** change notification email parsers A parser for Syncmail (syncmail.sourceforge.net) was added. SourceForge provides examples of setting up syncmail to deliver CVS commit messages to mailing lists, so hopefully this will make it easier for sourceforge-hosted projects to set up a buildbot. email processors were moved into buildbot.changes.mail . FCMaildirSource was moved, and the compatibility location (buildbot.changes.freshcvsmail) will go away in the next release. ** w32 buildslave ought to work Some non-portable code was changed to make it more likely that the buildslave will run under windows. The Twisted buildbot now has a (more-or-less) working w32 buildslave. buildbot-3.4.0/master/docs/relnotes/0.3.5.txt000066400000000000000000000050061413250514000206000ustar00rootroot00000000000000Buildbot-0.3.5 was released 19 Sep 2003 ** newcred Buildbot has moved to "newcred", a new authorization framework provided by Twisted, which is a good bit cleaner and easier to work with than the "oldcred" scheme in older versions. This causes both buildmaster and buildslaves to depend upon Twisted 1.0.7 or later. The interface to 'makeApp' has changed somewhat (the multiple kinds of remote connections all use the same TCP port now). Old buildslaves will get "_PortalWrapper instance has no attribute 'remote_username'" errors when they try to connect. They must be upgraded. The FreshCVSSource uses PB to connect to the CVSToys server. This has been upgraded to use newcred too. If you get errors (TODO: what do they look like?) in the log when the buildmaster tries to connect, you need to upgrade your FreshCVS service or use the 'useOldcred' argument when creating your FreshCVSSource. This is a temporary hack to allow the buildmaster to talk to oldcred CVSToys servers. Using it will trigger deprecation warnings. It will go away eventually. In conjunction with this change, makeApp() now accepts a password which can be applied to the debug service. ** new features *** "copydir" for CVS checkouts The CVS build step can now accept a "copydir" parameter, which should be a directory name like "source" or "orig". If provided, the CVS checkout is done once into this directory, then copied into the actual working directory for compilation etc. Later updates are done in place in the copydir, then the workdir is replaced with a copy. This reduces CVS bandwidth (update instead of full checkout) at the expense of twice the disk space (two copies of the tree). *** Subversion (SVN) support Radix (Christopher Armstrong) contributed early support for building Subversion-based trees. The new 'SVN' buildstep behaves roughly like the 'CVS' buildstep, and the contrib/svn_buildbot.py script can be used as a checkin trigger to feed changes to a running buildmaster. ** notable bugfixes *** .tap file generation We no longer set the .tap filename, because the buildmaster/buildslave service might be added to an existing .tap file and we shouldn't presume to own the whole thing. You may want to manually rename the "buildbot.tap" file to something more meaningful (like "buildslave-bot1.tap"). *** IRC reconnect If the IRC server goes away (it was restarted, or the network connection was lost), the buildmaster will now schedule a reconnect attempt. *** w32 buildslave fixes An "rm -rf" was turned into shutil.rmtree on non-posix systems. buildbot-3.4.0/master/docs/relnotes/0.4.0.txt000066400000000000000000000111551413250514000205760ustar00rootroot00000000000000Buildbot 0.4.0 was released 05 Dec 2003 ** newapp I've moved the codebase to Twisted's new 'application' framework, which drastically cleans up service startup/shutdown just like newcred did for authorization. This is mostly an internal change, but the interface to IChangeSources was modified, so in the off chance that someone has written a custom change source, it may have to be updated to the new scheme. The most user-visible consequence of this change is that now both buildmasters and buildslaves are generated with the standard Twisted 'mktap' utility. Basic documentation is in the README file. Both buildmaster and buildslave .tap files need to be re-generated to run under the new code. I have not figured out the styles.Versioned upgrade path well enough to avoid this yet. Sorry. This also means that both buildslaves and the buildmaster require Twisted-1.1.0 or later. ** reloadable master.cfg Most aspects of a buildmaster is now controlled by a configuration file which can be re-read at runtime without losing build history. This feature makes the buildmaster *much* easier to maintain. In the previous release, you would create the buildmaster by writing a program to define the Builders and ChangeSources and such, then run it to create the .tap file. In the new release, you use 'mktap' to create the .tap file, and the only parameter you give it is the base directory to use. Each time the buildmaster starts, it will look for a file named 'master.cfg' in that directory and parse it as a python script. That script must define a dictionary named 'BuildmasterConfig' with various keys to define the builders, the known slaves, what port to use for the web server, what IRC channels to connect to, etc. This config file can be re-read at runtime, and the buildmaster will compute the differences and add/remove services as necessary. The re-reading is currently triggered through the debug port (contrib/debugclient.py is the debug port client), but future releases will add the ability to trigger the reconfiguration by IRC command, web page button, and probably a local UNIX socket (with a helper script to trigger a rebuild locally). docs/examples/twisted_master.cfg contains a sample configuration file, which also lists all the keys that can be set. There may be some bugs lurking, such as re-configuring the buildmaster while a build is running. It needs more testing. ** MaxQ support Radix contributed some support scripts to run MaxQ test scripts. MaxQ (http://maxq.tigris.org/) is a web testing tool that allows you to record HTTP sessions and play them back. ** Builders can now wait on multiple Interlocks The "Interlock" code has been enhanced to allow multiple builders to wait on each one. This was done to support the new config-file syntax for specifying Interlocks (in which each interlock is a tuple of A and [B], where A is the builder the Interlock depends upon, and [B] is a list of builders that depend upon the Interlock). "Interlock" is misnamed. In the next release it will be changed to "Dependency", because that's what it really expresses. A new class (probably called Interlock) will be created to express the notion that two builders should not run at the same time, useful when multiple builders are run on the same machine and thrashing results when several CPU- or disk- intensive compiles are done simultaneously. ** FreshCVSSource can now handle newcred-enabled FreshCVS daemons There are now two FreshCVSSource classes: FreshCVSSourceNewcred talks to newcred daemons, and FreshCVSSourceOldcred talks to oldcred ones. Mind you, FreshCVS doesn't yet do newcred, but when it does, we'll be ready. 'FreshCVSSource' maps to the oldcred form for now. That will probably change when the current release of CVSToys supports newcred by default. ** usePTY=1 on posix buildslaves When a buildslave is running under POSIX (i.e. pretty much everything except windows), child processes are created with a pty instead of separate stdin/stdout/stderr pipes. This makes it more likely that a hanging build (when killed off by the timeout code) will have all its sub-childred cleaned up. Non-pty children would tend to leave subprocesses running because the buildslave was only able to kill off the top-level process (typically 'make'). Windows doesn't have any concept of ptys, so non-posix systems do not try to enable them. ** mail parsers should actually work now The email parsing functions (FCMaildirSource and SyncmailMaildirSource) were broken because of my confused understanding of how python class methods work. These sources should be functional now. ** more irc bot sillyness The IRC bot can now perform half of the famous AYBABTO scene. buildbot-3.4.0/master/docs/relnotes/0.4.1.txt000066400000000000000000000015461413250514000206020ustar00rootroot00000000000000Buildbot-0.4.1 was released 09 Dec 2003 ** MaildirSources fixed Several bugs in MaildirSource made them unusable. These have been fixed (for real this time). The Twisted buildbot is using an FCMaildirSource while they fix some FreshCVS daemon problems, which provided the encouragement for getting these bugs fixed. In addition, the use of DNotify (only available under linux) was somehow broken, possibly by changes in some recent version of Python. It appears to be working again now (against both python-2.3.3c1 and python-2.2.1). ** master.cfg can use 'basedir' variable As documented in the sample configuration file (but not actually implemented until now), a variable named 'basedir' is inserted into the namespace used by master.cfg . This can be used with something like: os.path.join(basedir, "maildir") to obtain a master-basedir-relative location. buildbot-3.4.0/master/docs/relnotes/0.4.2.txt000066400000000000000000000023711413250514000206000ustar00rootroot00000000000000Buildbot-0.4.2 was released 08 Jan 2004 ** test suite updated The test suite has been completely moved over to Twisted's "Trial" framework, and all tests now pass. To run the test suite (consisting of 64 tests, probably covering about 30% of Buildbot's logic), do this: PYTHONPATH=. trial -v buildbot.test ** Mail parsers updated Several bugs in the mail-parsing code were fixed, allowing a buildmaster to be triggered by mail sent out by a CVS repository. (The Twisted Buildbot is now using this to trigger builds, as their CVS server machine is having some difficulties with FreshCVS). The FreshCVS mail format for directory additions appears to have changed recently: the new parser should handle both old and new-style messages. A parser for Bonsai commit messages (buildbot.changes.mail.parseBonsaiMail) was contributed by Stephen Davis. Thanks Stephen! ** CVS "global options" now available The CVS build step can now accept a list of "global options" to give to the cvs command. These go before the "update"/"checkout" word, and are described fully by "cvs --help-options". Two useful ones might be "-r", which causes checked-out files to be read-only, and "-R", which assumes the repository is read-only (perhaps by not attempting to write to lock files). buildbot-3.4.0/master/docs/relnotes/0.4.3.txt000066400000000000000000000116511413250514000206020ustar00rootroot00000000000000Buildbot-0.4.3 was released 30 Apr 2004 ** PBChangeSource made explicit In 0.4.2 and before, an internal interface was available which allowed special clients to inject changes into the Buildmaster. This interface is used by the contrib/svn_buildbot.py script. The interface has been extracted into a proper PBChangeSource object, which should be created in the master.cfg file just like the other kinds of ChangeSources. See docs/sources.xhtml for details. If you were implicitly using this change source (for example, if you use Subversion and the svn_buildbot.py script), you *must* add this source to your master.cfg file, or changes will not be delivered and no builds will be triggered. The PBChangeSource accepts the same "prefix" argument as all other ChangeSources. For a SVN repository that follows the recommended practice of using "trunk/" for the trunk revisions, you probably want to construct the source like this: source = PBChangeSource(prefix="trunk") to make sure that the Builders are given sensible (trunk-relative) filenames for each changed source file. ** Twisted changes *** step_twisted.RunUnitTests can change "bin/trial" The twisted RunUnitTests step was enhanced to let you run something other than "bin/trial", making it easier to use a buildbot on projects which use Twisted but aren't actually Twisted itself. *** Twisted now uses Subversion Now that Twisted has moved from CVS to SVN, the Twisted build processes have been modified to perform source checkouts from the Subversion repository. ** minor feature additions *** display Changes with HTML Changes are displayed with a bit more pizazz, and a links= argument was added to allow things like ViewCVS links to be added to the display (although it is not yet clear how this argument should be used: the interface remains subject to change until it has been documented). *** display ShellCommand logs with HTML Headers are in blue, stderr is in red (unless usePTY=1 in which case stderr and stdout are indistinguishable). A link is provided which returns the same contents as plain text (by appending "?text=1" to the URL). *** buildslaves send real tracebacks upon error The .unsafeTracebacks option has been turned on for the buildslaves, allowing them to send a full stack trace when an exception occurs, which is logged in the buildmaster's twistd.log file. This makes it much easier to determine what went wrong on the slave side. *** BasicBuildFactory refactored The BasicBuildFactory class was refactored to make it easier to create derivative classes, in particular the BasicSVN variant. *** "ping buildslave" web button added There is now a button on the "builder information" page that lets a web user initiate a ping of the corresponding build slave (right next to the button that lets them force a build). This was added to help track down a problem with the slave keepalives. ** bugs fixed: You can now have multiple BuildSteps with the same name (the names are used as hash keys in the data structure that helps determine ETA values for each step, the new code creates unique key names if necessary to avoid collisions). This means that, for example, you do not have to create a BuildStep subclass just to have two Compile steps in the same process. If CVSToys is not installed, the tests that depend upon it are skipped. Some tests in 0.4.2 failed because of a missing set of test files, they are now included in the tarball properly. Slave keepalives should work better now in the face of silent connection loss (such as when an intervening NAT box times out the association), the connection should be reestablished in minutes instead of hours. Shell commands on the slave are invoked with an argument list instead of the ugly and error-prone split-on-spaces approach. If the ShellCommand is given a string (instead of a list), it will fall back to splitting on spaces. Shell commands should work on win32 now (using COMSPEC instead of /bin/sh). Buildslaves under w32 should theoretically work now, and one was running for the Twisted buildbot for a while until the machine had to be returned. The "header" lines in ShellCommand logs (which include the first line, that displays the command being run, and the last, which shows its exit status) are now generated by the buildslave side instead of the local (buildmaster) side. This can provide better error handling and is generally cleaner. However, if you have an old buildslave (running 0.4.2 or earlier) and a new buildmaster, then neither end will generate these header lines. CVSCommand was improved, in certain situations 0.4.2 would perform unnecessary checkouts (when an update would have sufficed). Thanks to Johan Dahlin for the patches. The status output was fixed as well, so that failures in CVS and SVN commands (such as not being able to find the 'svn' executable) make the step status box red. Subversion support was refactored to make it behave more like CVS. This is a work in progress and will be improved in the next release. buildbot-3.4.0/master/docs/relnotes/0.5.0.txt000066400000000000000000000066741413250514000206110ustar00rootroot00000000000000Buildbot 0.5.0 was released 22 Jul 2004 ** new features *** web.distrib servers via TCP The 'webPathname' config option, which specifies a UNIX socket on which to publish the waterfall HTML page (for use by 'mktap web -u' or equivalent), now accepts a numeric port number. This publishes the same thing via TCP, allowing the parent web server to live on a separate machine. This config option could be named better, but it will go away altogether in a few releases, when status delivery is unified. It will be replaced with a WebStatusTarget object, and the config file will simply contain a list of various kinds of status targets. *** 'master.cfg' filename is configurable The buildmaster can use a config file named something other than "master.cfg". Use the --config=foo.cfg option to mktap to control this. *** FreshCVSSource now uses newcred (CVSToys >= 1.0.10) The FreshCVSSource class now defaults to speaking to freshcvs daemons from modern CVSToys releases. If you need to use the buildbot with a daemon from CVSToys-1.0.9 or earlier, use FreshCVSSourceOldcred instead. Note that the new form only requires host/port/username/passwd: the "serviceName" parameter is no longer meaningful. *** Builders are now configured with a dictionary, not a tuple The preferred way to set up a Builder in master.cfg is to provide a dictionary with various keys, rather than a (non-extensible) 4-tuple. See docs/config.xhtml for details. The old tuple-way is still supported for now, it will probably be deprecated in the next release and removed altogether in the following one. *** .periodicBuildTime is now exposed to the config file To set a builder to run at periodic intervals, simply add a 'periodicBuildTime' key to its master.cfg dictionary. Again, see docs/config.xhtml for details. *** svn_buildbot.py adds --include, --exclude The commit trigger script now gives you more control over which files are sent to the buildmaster and which are not. *** usePTY is controllable at slave mktap time The buildslaves usually run their child processes in a pty, which creates a process group for all the children, which makes it much easier to kill them all at once (i.e. if a test hangs). However this causes problems on some systems. Rather than hacking slavecommand.py to disable the use of these ptys, you can now create the slave's .tap file with --usepty=0 at mktap time. ** Twisted changes A summary of warnings (e.g. DeprecationWarnings) is provided as part of the test-case summarizer. The summarizer also counts Skips, expectedFailures, and unexpectedSuccesses, displaying the counts on the test step's event box. The RunUnitTests step now uses "trial -R twisted" instead of "trial twisted.test", which is a bit cleaner. All .pyc files are deleted before starting trial, to avoid getting tripped up by deleted .py files. ** documentation docs/config.xhtml now describes the syntax and allowed contents of the 'master.cfg' configuration file. ** bugfixes Interlocks had a race condition that could cause the lock to get stuck forever. FreshCVSSource has a prefix= argument that was moderately broken (it used to only work if the prefix was a single directory component). It now works with subdirectories. The buildmaster used to complain when it saw the "info" directory in a slave's workspace. This directory is used to publish information about the slave host and its administrator, and is not a leftover build directory as the complaint suggested. This complain has been silenced. buildbot-3.4.0/master/docs/relnotes/0.6.0.txt000066400000000000000000000224771413250514000206110ustar00rootroot00000000000000Buildbot 0.6.0 was released 30 Sep 2004 ** new features *** /usr/bin/buildbot control tool There is now an executable named 'buildbot'. For now, this just provides a convenient front-end to mktap/twistd/kill, but eventually it will provide access to other client functionality (like the 'try' builds, and a status client). Assuming you put your buildbots in /var/lib/buildbot/master/FOO, you can do 'buildbot create-master /var/lib/buildbot/master/FOO' and it will create the .tap file and set up a sample master.cfg for you. Later, 'buildbot start /var/lib/buildbot/master/FOO' will start the daemon. *** build status now saved in external files, -shutdown.tap unnecessary The status rewrite included a change to save all build status in a set of external files. These files, one per build, are put in a subdirectory of the master's basedir (named according to the 'builddir' parameter of the Builder configuration dictionary). This helps keep the buildmaster's memory consumption small: the (potentially large) build logs are kept on disk instead of in RAM. There is a small cache (2 builds per builder) kept in memory, but everything else lives on disk. The big change is that the buildmaster now keeps *all* status in these files. It is no longer necessary to preserve the buildbot-shutdown.tap file to run a persistent buildmaster. The buildmaster may be launched with 'twistd -f buildbot.tap' each time, in fact the '-n' option can be added to prevent twistd from automatically creating the -shutdown.tap file. There is still one lingering bug with this change: the Expectations object for each builder (which records how long the various steps took, to provide an ETA value for the next time) is not yet saved. The result is that the first build after a restart will not provide an ETA value. 0.6.0 keeps status in a single file per build, as opposed to 0.5.0 which kept status in many subdirectories (one layer for builds, another for steps, and a third for logs). 0.6.0 will detect and delete these subdirectories as it overwrites them. The saved builds are optional. To prevent disk usage from growing without bounds, you may want to set up a cron job to run 'find' and delete any which are too old. The status displays will happily survive without those saved build objects. The set of recorded Changes is kept in a similar file named 'changes.pck'. *** source checkout now uses timestamp/revision Source checkouts are now performed with an appropriate -D TIMESTAMP (for CVS) or -r REVISION (for SVN) marker to obtain the exact sources that were specified by the most recent Change going into the current Build. This avoids a race condition in which a change might be committed after the build has started but before the source checkout has completed, resulting in a mismatched set of source files. Such changes are now ignored. This works by keeping track of repository-wide revision/transaction numbers (for version control systems that offer them, like SVN). The checkout or update is performed with the highest such revision number. For CVS (which does not have them), the timestamp of each commit message is used, and a -D argument is created to place the checkout squarely in the middle of the "tree stable timer"'s window. This also provides the infrastructure for the upcoming 'try' feature. All source-checkout commands can now obtain a base revision marker and a patch from the Build, allowing certain builds to be performed on something other than the most recent sources. See source.xhtml and steps.xhtml for details. *** Darcs and Arch support added There are now build steps which retrieve a source tree from Darcs and Arch repositories. See steps.xhtml for details. Preliminary P4 support has been added, thanks to code from Dave Peticolas. You must manually set up each build slave with an appropriate P4CLIENT: all buildbot does is run 'p4 sync' at the appropriate times. *** Status reporting rewritten Status reporting was completely revamped. The config file now accepts a BuildmasterConfig['status'] entry, with a list of objects that perform status delivery. The old config file entries which controlled the web status port and the IRC bot have been deprecated in favor of adding instances to ['status']. The following status-delivery classes have been implemented, all in the 'buildbot.status' package: client.PBListener(port, username, passwd) html.Waterfall(http_port, distrib_port) mail.MailNotifier(fromaddr, mode, extraRecipients..) words.IRC(host, nick, channels) See the individual docstrings for details about how to use each one. You can create new status-delivery objects by following the interfaces found in the buildbot.interfaces module. *** BuildFactory configuration process changed The basic BuildFactory class is now defined in buildbot.process.factory rather than buildbot.process.base, so you will have to update your config files. factory.BuildFactory is the base class, which accepts a list of Steps to run. See docs/factories.xhtml for details. There are now easier-to-use BuildFactory classes for projects which use GNU Autoconf, perl's MakeMaker (CPAN), python's distutils (but no unit tests), and Twisted's Trial. Each one takes a separate 'source' Step to obtain the source tree, and then fills in the rest of the Steps for you. *** CVS/SVN VC steps unified, simplified The confusing collection of arguments for the CVS step ('clobber=', 'copydir=', and 'export=') have been removed in favor of a single 'mode' argument. This argument describes how you want to use the sources: whether you want to update and compile everything in the same tree (mode='update'), or do a fresh checkout and full build each time (mode='clobber'), or something in between. The SVN (Subversion) step has been unified and accepts the same mode= parameter as CVS. New version control steps will obey the same interface. Most of the old configuration arguments have been removed. You will need to update your configuration files to use the new arguments. See docs/steps.xhtml for a description of all the new parameters. *** Preliminary Debian packaging added Thanks to the contributions of Kirill Lapshin, we can now produce .deb installer packages. These are still experimental, but they include init.d startup/shutdown scripts, which the the new /usr/bin/buildbot to invoke twistd. Create your buildmasters in /var/lib/buildbot/master/FOO, and your slaves in /var/lib/buildbot/slave/BAR, then put FOO and BAR in the appropriate places in /etc/default/buildbot . After that, the buildmasters and slaves will be started at every boot. Pre-built .debs are not yet distributed. Use 'debuild -uc -us' from the source directory to create them. ** minor features *** Source Stamps Each build now has a "source stamp" which describes what sources it used. The idea is that the sources for this particular build can be completely regenerated from the stamp. The stamp is a tuple of (revision, patch), where the revision depends on the VC system being used (for CVS it is either a revision tag like "BUILDBOT-0_5_0" or a datestamp like "2004/07/23", for Subversion it is a revision number like 11455). This must be combined with information from the Builder that is constant across all builds (something to point at the repository, and possibly a branch indicator for CVS and other VC systems that don't fold this into the repository string). The patch is an optional unified diff file, ready to be applied by running 'patch -p0 ' on a builder which is currently performing a build. When that build is finished, the buildbot will make an announcement (including the results of the build). The IRC 'force build' command will also announce when the resulting build has completed. *** the 'force build' option on HTML and IRC status targets can be disabled The html.Waterfall display and the words.IRC bot may be constructed with an allowForce=False argument, which removes the ability to force a build through these interfaces. Future versions will be able to restrict this build-forcing capability to authenticated users. The per-builder HTML page no longer displays the 'Force Build' buttons if it does not have this ability. Thanks to Fred Drake for code and design suggestions. *** master now takes 'projectName' and 'projectURL' settings These strings allow the buildbot to describe what project it is working for. At the moment they are only displayed on the Waterfall page, but in the next release they will be retrieveable from the IRC bot as well. *** survive recent (SVN) Twisted versions The buildbot should run correctly (albeit with plenty of noisy deprecation warnings) under the upcoming Twisted-2.0 release. *** work-in-progress realtime Trial results acquisition Jonathan Simms () has been working on 'retrial', a rewrite of Twisted's unit test framework that will most likely be available in Twisted-2.0 . Although it is not yet complete, the buildbot will be able to use retrial in such a way that build status is reported on a per-test basis, in real time. This will be the beginning of fine-grained test tracking and Problem management, described in docs/users.xhtml . buildbot-3.4.0/master/docs/relnotes/0.6.1.txt000066400000000000000000000105611413250514000206010ustar00rootroot00000000000000Buildbot 0.6.1 was released 23 Nov 2004 ** win32 improvements/bugfixes Several changes have gone in to improve portability to non-unix systems. It should be possible to run a build slave under windows without major issues (although step-by-step documentation is still greatly desired: check the mailing list for suggestions from current win32 users). *** PBChangeSource: use configurable directory separator, not os.sep The PBChangeSource, which listens on a TCP socket for change notices delivered from tools like contrib/svn_buildbot.py, was splitting source filenames with os.sep . This is inappropriate, because those file names are coming from the VC repository, not the local filesystem, and the repository host may be running a different OS (with a different separator convention) than the buildmaster host. In particular, a win32 buildmaster using a CVS repository running on a unix box would be confused. PBChangeSource now takes a sep= argument to indicate the separator character to use. *** build saving should work better windows cannot do the atomic os.rename() trick that unix can, so under win32 the buildmaster falls back to save/delete-old/rename, which carries a slight risk of losing a saved build log (if the system were to crash between the delete-old and the rename). ** new features *** test-result tracking Work has begun on fine-grained test-result handling. The eventual goal is to be able to track individual tests over time, and create problem reports when a test starts failing (which then are resolved when the test starts passing again). The first step towards this is an ITestResult interface, and code in the TrialTestParser to create such results for all non-passing tests (the ones for which Trial emits exception tracebacks). These test results are currently displayed in a tree-like display in a page accessible from each Build's page (follow the numbered link in the yellow box at the start of each build to get there). This interface is still in flux, as it really wants to be able to accommodate things like compiler warnings and tests that are skipped because of missing libraries or unsupported architectures. ** bug fixes *** VC updates should survive temporary failures Some VC systems (CVS and SVN in particular) get upset when files are turned into directories or vice versa, or when repository items are moved without the knowledge of the VC system. The usual symptom is that a 'cvs update' fails where a fresh checkout succeeds. To avoid having to manually intervene, the build slaves' VC commands have been refactored to respond to update failures by deleting the tree and attempting a full checkout. This may cause some unnecessary effort when, e.g., the CVS server falls off the net, but in the normal case it will only come into play when one of these can't-cope situations arises. *** forget about an existing build when the slave detaches If the slave was lost during a build, the master did not clear the .currentBuild reference, making that builder unavailable for later builds. This has been fixed, so that losing a slave should be handled better. This area still needs some work, I think it's still possible to get both the slave and the master wedged by breaking the connection at just the right time. Eventually I want to be able to resume interrupted builds (especially when the interruption is the result of a network failure and not because the slave or the master actually died). *** large logfiles now consume less memory Build logs are stored as lists of (type,text) chunks, so that stdout/stderr/headers can be displayed differently (if they were distinguishable when they were generated: stdout and stderr are merged when usePTY=1). For multi-megabyte logfiles, a large list with many short strings could incur a large overhead. The new behavior is to merge same-type string chunks together as they are received, aiming for a chunk size of about 10kb, which should bring the overhead down to a more reasonable level. There remains an issue with actually delivering large logfiles over, say, the HTML interface. The string chunks must be merged together into a single string before delivery, which causes a spike in the memory usage when the logfile is viewed. This can also break twisted.web.distrib -type servers, where the underlying PB protocol imposes a 640k limit on the size of strings. This will be fixed (with a proper Producer/Consumer scheme) in the next release. buildbot-3.4.0/master/docs/relnotes/0.6.2.txt000066400000000000000000000061431413250514000206030ustar00rootroot00000000000000Buildbot 0.6.2 was released 13 Dec 2004 ** new features It is now possible to interrupt a running build. Both the web page and the IRC bot feature 'stop build' commands, which can be used to interrupt the current BuildStep and accelerate the termination of the overall Build. The status reporting for these still leaves something to be desired (an 'interrupt' event is pushed into the column, and the reason for the interrupt is added to a pseudo-logfile for the step that was stopped, but if you only look at the top-level status it appears that the build failed on its own). Builds are also halted if the connection to the buildslave is lost. On the slave side, any active commands are halted if the connection to the buildmaster is lost. ** minor new features The IRC log bot now reports ETA times in a MMSS format like "2m45s" instead of the clunky "165 seconds". ** bug fixes *** Slave Disconnect Slave disconnects should be handled better now: the current build should be abandoned properly. Earlier versions could get into weird states where the build failed to finish, clogging the builder forever (or at least until the buildmaster was restarted). In addition, there are weird network conditions which could cause a buildslave to attempt to connect twice to the same buildmaster. This can happen when the slave is sending large logfiles over a slow link, while using short keepalive timeouts. The buildmaster has been fixed to allow the second connection attempt to take precedence over the first, so that the older connection is jettisoned to make way for the newer one. In addition, the buildslave has been fixed to be less twitchy about timeouts. There are now two parameters: keepaliveInterval (which is controlled by the mktap 'keepalive' argument), and keepaliveTimeout (which requires editing the .py source to change from the default of 30 seconds). The slave expects to see *something* from the master at least once every keepaliveInterval seconds, and will try to provoke a response (by sending a keepalive request) 'keepaliveTimeout' seconds before the end of this interval just in case there was no regular traffic. Any kind of traffic will qualify, including acknowledgements of normal build-status updates. The net result is that, as long as any given PB message can be sent over the wire in less than 'keepaliveTimeout' seconds, the slave should not mistakenly disconnect because of a timeout. There will be traffic on the wire at least every 'keepaliveInterval' seconds, which is what you want to pay attention to if you're trying to keep an intervening NAT box from dropping what it thinks is an abandoned connection. A quiet loss of connection will be detected within 'keepaliveInterval' seconds. *** Large Logfiles The web page rendering code has been fixed to deliver large logfiles in pieces, using a producer/consumer apparatus. This avoids the large spike in memory consumption when the log file body was linearized into a single string and then buffered in the socket's application-side transmit buffer. This should also avoid the 640k single-string limit for web.distrib servers that could be hit by large (>640k) logfiles. buildbot-3.4.0/master/docs/relnotes/0.6.3.txt000066400000000000000000000125701413250514000206050ustar00rootroot00000000000000Buildbot 0.6.3 was released 25 Apr 2005 ** 'buildbot' tool gets more uses The 'buildbot' executable has acquired three new subcommands. 'buildbot debugclient' brings up the small remote-control panel that connects to a buildmaster (via the slave port and the c['debugPassword']). This tool, formerly in contrib/debugclient.py, lets you reload the config file, force builds, and simulate inbound commit messages. It requires gtk2, glade, and the python bindings for both to be installed. 'buildbot statusgui' brings up a live status client, formerly available by running buildbot/clients/gtkPanes.py as a program. This connects to the PB status port that you create with: c['status'].append(client.PBListener(portnum)) and shows two boxes per Builder, one for the last build, one for current activity. These boxes are updated in realtime. The effect is primitive, but is intended as an example of what's possible with the PB status interface. 'buildbot statuslog' provides a text-based running log of buildmaster events. Note: command names are subject to change. These should get much more useful over time. ** web page has a favicon When constructing the html.Waterfall instance, you can provide the filename of an image that will be provided when the "favicon.ico" resource is requested. Many web browsers display this as an icon next to the URL or bookmark. A goofy little default icon is included. ** web page has CSS Thanks to Thomas Vander Stichele, the Waterfall page is now themable through CSS. The default CSS is located in buildbot/status/classic.css, and creates a page that is mostly identical to the old, non-CSS based table. You can specify a different CSS file to use by passing it as the css= argument to html.Waterfall(). See the docstring for Waterfall for some more details. ** builder "categories" Thomas has added code which places each Builder in an optional "category". The various status targets (Waterfall, IRC, MailNotifier) can accept a list of categories, and they will ignore any activity in builders outside this list. This makes it easy to create some Builders which are "experimental" or otherwise not yet ready for the world to see, or indicate that certain builders should not harass developers when their tests fail, perhaps because the build slaves for them are not yet fully functional. ** Deprecated features *** defining Builders with tuples is deprecated For a long time, the preferred way to define builders in the config file has been with a dictionary. The less-flexible old style of a 4-item tuple (name, slavename, builddir, factory) is now officially deprecated (i.e., it will emit a warning if you use it), and will be removed in the next release. Dictionaries are more flexible: additional keys like periodicBuildTime are simply unavailable to tuple-defined builders. Note: it is a good idea to watch the logfile (usually in twistd.log) when you first start the buildmaster, or whenever you reload the config file. Any warnings or errors in the config file will be found there. *** c['webPortnum'], c['webPathname'], c['irc'] are deprecated All status reporters should be defined in the c['status'] array, using buildbot.status.html.Waterfall or buildbot.status.words.IRC . These have been deprecated for a while, but this is fair warning that these keys will be removed in the next release. *** c['manholePort'] is deprecated Again, this has been deprecated for a while, in favor of: c['manhole'] = master.Manhole(port, username, password) The preferred syntax will eventually let us use other, better kinds of debug shells, such as the experimental curses-based ones in the Twisted sandbox (which would offer command-line editing and history). ** bug fixes The waterfall page has been improved a bit. A circular-reference bug in the web page's TextLog class was fixed, which caused a major memory leak in a long-running buildmaster with large logfiles that are viewed frequently. Modifying the config file in a way which only changed a builder's base directory now works correctly. The 'buildbot' command tries to create slightly more useful master/slave directories, adding a Makefile entry to re-create the .tap file, and removing global-read permissions from the files that may contain buildslave passwords. ** twisted-2.0.0 compatibility Both buildmaster and buildslave should run properly under Twisted-2.0 . There are still some warnings about deprecated functions, some of which could be fixed, but there are others that would require removing compatibility with Twisted-1.3, and I don't expect to do that until 2.0 has been out and stable for at least several months. The unit tests should pass under 2.0, whereas the previous buildbot release had tests which could hang when run against the new "trial" framework in 2.0. The Twisted-specific steps (including Trial) have been updated to match 2.0 functionality. ** win32 compatibility Thankt to Nick Trout, more compatibility fixes have been incorporated, improving the chances that the unit tests will pass on windows systems. There are still some problems, and a step-by-step "running buildslaves on windows" document would be greatly appreciated. ** API docs Thanks to Thomas Vander Stichele, most of the docstrings have been converted to epydoc format. There is a utility in docs/gen-reference to turn these into a tree of cross-referenced HTML pages. Eventually these docs will be auto-generated and somehow published on the buildbot web page. buildbot-3.4.0/master/docs/relnotes/0.6.4.txt000066400000000000000000000054611413250514000206070ustar00rootroot00000000000000Buildbot 0.6.4 was released 28 Apr 2005 ** major bugs fixed The 'buildbot' tool in 0.6.3, when used to create a new buildmaster, failed unless it found a 'changes.pck' file. As this file is created by a running buildmaster, this made 0.6.3 completely unusable for first-time installations. This has been fixed. ** minor bugs fixed The IRC bot had a bug wherein asking it to watch a certain builder (the "I'll give a shout when the build finishes" message) would cause an exception, so it would not, in fact, shout. The HTML page had an exception in the "change sources" page (reached by following the "Changes" link at the top of the column that shows the names of commiters). Re-loading the config file while builders were already attached would result in a benign error message. The server side of the PBListener status client had an exception when providing information about a non-existent Build (e.g., when the client asks for the Build that is currently running, and the server says "None"). These bugs have all been fixed. The unit tests now pass under python2.2; they were failing before because of some 2.3isms that crept in. More unit tests which failed under windows now pass, only one (test_webPathname_port) is still failing. ** 'buildbot' tool looks for a .buildbot/options file The 'statusgui' and the 'debugclient' subcommands can both look for a .buildbot/ directory, and an 'options' file therein, to extract default values for the location of the buildmaster. This directory is searched in the current directory, its parent, etc, all the way up to the filesystem root (assuming you own the directories in question). It also look in ~/.buildbot/ for this file. This feature allows you to put a .buildbot at the top of your working tree, telling any 'buildbot' invocations you perform therein how to get to the buildmaster associated with that tree's project. Windows users get something similar, using %APPDATA%/buildbot instead of ~/.buildbot . ** windows ShellCommands are launched with 'cmd.exe' The buildslave has been modified to run all list-based ShellCommands by prepending [os.environ['COMSPEC'], '/c'] to the argv list before execution. This should allow the buildslave's PATH to be searched for commands, improving the chances that it can run the same 'trial -o foo' commands as a unix buildslave. The potential downside is that spaces in argv elements might be re-parsed, or quotes might be re-interpreted. The consensus on the mailing list was that this is a useful thing to do, but please report any problems you encounter with it. ** minor features The Waterfall display now shows the buildbot's home timezone at the top of the timestamp column. The default favicon.ico is now much nicer-looking (it is generated with Blender.. the icon.blend file is available in CVS in docs/images/ should you care to play with it). buildbot-3.4.0/master/docs/relnotes/0.6.5.txt000066400000000000000000000111021413250514000205750ustar00rootroot00000000000000Buildbot 0.6.5 was released 18 May 2005 ** deprecated config keys removed The 'webPortnum', 'webPathname', 'irc', and 'manholePort' config-file keys, which were deprecated in the previous release, have now been removed. In addition, Builders must now always be configured with dictionaries: the support for configuring them with tuples has been removed. ** master/slave creation and startup changed The buildbot no longer uses .tap files to store serialized representations of the buildmaster/buildslave applications. Instead, this release now uses .tac files, which are human-readable scripts that create new instances (rather than .tap files, which were pickles of pre-created instances). 'mktap buildbot' is gone. You will need to update your buildbot directories to handle this. The procedure is the same as creating a new buildmaster or buildslave: use 'buildbot master BASEDIR' or 'buildbot slave BASEDIR ARGS..'. This will create a 'buildbot.tac' file in the target directory. The 'buildbot start BASEDIR' will use twistd to start the application. The 'buildbot start' command now looks for a Makefile.buildbot, and if it finds one (and /usr/bin/make exists), it will use it to start the application instead of calling twistd directly. This allows you to customize startup, perhaps by adding environment variables. The setup commands create a sample file in Makefile.sample, but you must copy this to Makefile.buildbot to actually use it. The previous release looked for a bare 'Makefile', and also installed a 'Makefile', so you were always using the customized approach, even if you didn't ask for it. That old Makefile launched the .tap file, so changing names was also necessary to make sure that the new 'buildbot start' doesn't try to run the old .tap file. 'buildbot stop' now uses os.kill instead of spawning an external process, making it more likely to work under windows. It waits up to 5 seconds for the daemon to go away, so you can now do 'buildbot stop BASEDIR; buildbot start BASEDIR' with less risk of launching the new daemon before the old one has fully shut down. Likewise, 'buildbot start' imports twistd's internals directly instead of spawning an external copy, so it should work better under windows. ** new documentation All of the old Lore-based documents were converted into a new Texinfo-format manual, and considerable new text was added to describe the installation process. The docs are not yet complete, but they're slowly shaping up to form a proper user's manual. ** new features Arch checkouts can now use precise revision stamps instead of always using the latest revision. A separate Source step for using Bazaar (an alternative Arch client) instead of 'tla' was added. A Source step for Cogito (the new linux kernel VC system) was contributed by Brandon Philips. All Source steps now accept a retry= argument to indicate that failing VC checkouts should be retried a few times (SF#1200395), note that this requires an updated buildslave. The 'buildbot sendchange' command was added, to be used in VC hook scripts to send changes at a pb.PBChangeSource . contrib/arch_buildbot.py was added to use this tool; it should be installed using the 'Arch meta hook' scheme. Changes can now accept a branch= parameter, and Builders have an isBranchImportant() test that acts like isFileImportant(). Thanks to Thomas Vander Stichele. Note: I renamed his tag= to branch=, in anticipation of an upcoming feature to build specific branches. "tag" seemed too CVS-centric. LogFiles have been rewritten to stream the incoming data directly to disk rather than keeping a copy in memory all the time (SF#1200392). This drastically reduces the buildmaster's memory requirements and makes 100MB+ log files feasible. The log files are stored next to the serialized Builds, in files like BASEDIR/builder-dir/12-log-compile-output, so you'll want a cron job to delete old ones just like you do with old Builds. Old-style Builds from 0.6.4 and earlier are converted when they are first read, so the first load of the Waterfall display after updating to this release may take quite some time. ** build process updates BuildSteps can now return a status of EXCEPTION, which terminates the build right away. This allows exceptions to be caught right away, but still make sure the build stops quickly. ** bug fixes Some more windows incompatibilities were fixed. The test suite now has two failing tests remaining, both of which appear to be Twisted issues that should not affect normal operation. The test suite no longer raises any deprecation warnings when run against twisted-2.0 (except for the ones which come from Twisted itself). buildbot-3.4.0/master/docs/relnotes/0.6.6.txt000066400000000000000000000027361413250514000206130ustar00rootroot00000000000000Buildbot 0.6.6 was released 23 May 2005 ** bugs fixed The 'sendchange', 'stop', and 'sighup' subcommands were broken, simple bugs that were not caught by the test suite. Sorry. The 'buildbot master' command now uses "raw" strings to create .tac files that will still function under windows (since we must put directory names that contain backslashes into that file). The keep-on-disk behavior added in 0.6.5 included the ability to upgrade old in-pickle LogFile instances. This upgrade function was not added to the HTMLLogFile class, so an exception would be raised when attempting to load or display any build with one of these logs (which are normally used only for showing build exceptions). This has been fixed. Several unnecessary imports were removed, so the Buildbot should function normally with just Twisted-2.0.0's "Core" module installed. (of course you will need TwistedWeb, TwistedWords, and/or TwistedMail if you use status targets that require them). The test suite should skip all tests that cannot be run because of missing Twisted modules. The master/slave's basedir is now prepended to sys.path before starting the daemon. This used to happen implicitly (as a result of twistd's setup preamble), but 0.6.5 internalized the invocation of twistd and did not copy this behavior. This change restores the ability to access "private.py"-style modules in the basedir from the master.cfg file with a simple "import private" statement. Thanks to Thomas Vander Stichele for the catch. buildbot-3.4.0/master/docs/relnotes/0.7.0.txt000066400000000000000000000121261413250514000206000ustar00rootroot00000000000000Buildbot 0.7.0 was released 24 Oct 2005 ** new features *** new c['schedulers'] config-file element (REQUIRED) The code which decides exactly *when* a build is performed has been massively refactored, enabling much more flexible build scheduling. YOU MUST UPDATE your master.cfg files to match: in general this will merely require you to add an appropriate c['schedulers'] entry. Any old ".treeStableTime" settings on the BuildFactory instances will now be ignored. The user's manual has complete details with examples of how the new Scheduler classes work. *** c['interlocks'] removed, Locks and Dependencies now separate items The c['interlocks'] config element has been removed, and its functionality replaced with two separate objects. Locks are used to tell the buildmaster that certain Steps or Builds should not run at the same time as other Steps or Builds (useful for test suites that require exclusive access to some external resource: of course the real fix is to fix the tests, because otherwise your developers will be suffering from the same limitations). The Lock object is created in the config file and then referenced by a Step specification tuple or by the 'locks' key of the Builder specification dictionary. Locks come in two flavors: MasterLocks are buildmaster-wide, while SlaveLocks are specific to a single buildslave. When you want to have one Build run or not run depending upon whether some other set of Builds have passed or failed, you use a special kind of Scheduler defined in the scheduler.Dependent class. This scheduler watches an upstream Scheduler for builds of a given source version to complete, and only fires off its own Builders when all of the upstream's Builders have built that version successfully. Both features are fully documented in the user's manual. *** 'buildbot try' The 'try' feature has finally been added. There is some configuration involved, both in the buildmaster config and on the developer's side, but once in place this allows the developer to type 'buildbot try' in their locally-modified tree and to be given a report of what would happen if their changes were to be committed. This works by computing a (base revision, patch) tuple that describes the developer's tree, sending that to the buildmaster, then running a build with that source on a given set of Builders. The 'buildbot try' tool then emits status messages until the builds have finished. 'try' exists to allow developers to run cross-platform tests on their code before committing it, reducing the chances they will inconvenience other developers by breaking the build. The UI is still clunky, but expect it to change and improve over the next few releases. Instructions for developers who want to use 'try' (and the configuration changes necessary to enable its use) are in the user's manual. *** Build-On-Branch When suitably configured, the buildbot can be used to build trees from a variety of related branches. You can set up Schedulers to build a tree using whichever branch was last changed, or users can request builds of specific branches through IRC, the web page, or (eventually) the CLI 'buildbot force' subcommand. The IRC 'force' command now takes --branch and --revision arguments (not that they always make sense). Likewise the HTML 'force build' button now has an input field for branch and revision. Your build's source-checkout step must be suitably configured to support this: for SVN it involves giving both a base URL and a default branch. Other VC systems are configured differently. The ChangeSource must also provide branch information: the 'buildbot sendchange' command now takes a --branch argument to help hook script writers accomplish this. *** Multiple slaves per Builder You can now attach multiple buildslaves to each Builder. This can provide redundancy or primitive load-balancing among many machines equally capable of running the build. To use this, define a key in the Builder specification dictionary named 'slavenames' with a list of buildslave names (instead of the usual 'slavename' that contains just a single slavename). *** minor new features The IRC and email status-reporting facilities now provide more specific URLs for particular builds, in addition to the generic buildmaster home page. The HTML per-build page now has more information. The Twisted-specific test classes have been modified to match the argument syntax preferred by Trial as of Twisted-2.1.0 and newer. The generic trial steps are still suitable for the Trial that comes with older versions of Twisted, but may produce deprecation warnings or errors when used with the latest Trial. ** bugs fixed DNotify, used by the maildir-watching ChangeSources, had problems on some 64-bit systems relating to signed-vs-unsigned constants and the DN_MULTISHOT flag. A workaround was provided by Brad Hards. The web status page should now be valid XHTML, thanks to a patch by Brad Hards. The charset parameter is specified to be UTF-8, so VC comments, builder names, etc, should probably all be in UTF-8 to be displayed properly. ** creeping version dependencies The IRC 'force build' command now requires python2.3 (for the shlex.split function). buildbot-3.4.0/master/docs/relnotes/0.7.1.txt000066400000000000000000000076251413250514000206110ustar00rootroot00000000000000Buildbot 0.7.1 was released 26 Nov 2005 ** new features *** scheduler.Nightly Dobes Vandermeer contributed a cron-style 'Nightly' scheduler. Unlike the more-primitive Periodic class (which only lets you specify the duration between build attempts), Nightly lets you schedule builds for specific times of day, week, month, or year. The interface is very much like the crontab(5) file. See the buildbot.scheduler.Nightly docstring for complete details. ** minor new features *** step.Trial can work with Trial from Twisted >2.1.0 The 'Trial' step now accepts the trialMode= argument, which should be a list of strings to be added to trial's argv array. This defaults to ["-to"], which is appropriate for the Trial that ships in Twisted-2.1.0 and earlier, and tells Trial to emit non-colorized verbose output. To use this step with trials from later versions of Twisted, this should be changed to ["--reporter=bwverbose"]. In addition, you can now set other Trial command-line parameters through the trialArgs= argument. This is a list of strings, and defaults to an empty list. *** Added a 'resubmit this build' button to the web page *** Make the VC-checkout step's description more useful Added the word "[branch]" to the VC step's description (used in the Step's box on the Waterfall page, among others) when we're checking out a non-default branch. Also add "rNNN" where appropriate to indicate which revision is being checked out. Thanks to Brad Hards and Nathaniel Smith for the suggestion. ** bugs fixed Several patches from Dobes Vandermeer: Escape the URLs in email, in case they have spaces and such. Fill otherwise-empty elements, as a workaround for buggy browsers that might optimize them away. Also use binary mode when opening status pickle files, to make windows work better. The AnyBranchScheduler now works even when you don't provide a fileIsImportant= argument. Stringify the base revision before stuffing it into a 'try' jobfile, helping SVN and Arch implement 'try' builds better. Thanks to Steven Walter for the patch. Fix the compare_attrs list in PBChangeSource, FreshCVSSource, and Waterfall. Before this, certain changes to these objects in the master.cfg file were ignored, such that you would have to stop and re-start the buildmaster to make them take effect. The config file is now loaded serially, shutting down old (or replaced) Status/ChangeSource plugins before starting new ones. This fixes a bug in which changing an aspect of, say, the Waterfall display would cause an exception as both old and new instances fight over the same TCP port. This should also fix a bug whereby new Periodic Schedulers could fire a build before the Builders have finished being added. There was a bug in the way Locks were handled when the config file was reloaded: changing one Builder (but not the others) and reloading master.cfg would result in multiple instances of the same Lock object, so the Locks would fail to prevent simultaneous execution of Builds or Steps. This has been fixed. ** other changes For a long time, certain StatusReceiver methods (like buildStarted and stepStarted) have been able to return another StatusReceiver instance (usually 'self') to indicate that they wish to subscribe to events within the new object. For example, if the buildStarted() method returns 'self', the status receiver will also receive events for the new build, like stepStarted() and buildETAUpdate(). Returning a 'self' from buildStarted() is equivalent to calling build.subscribe(self). Starting with buildbot-0.7.1, this auto-subscribe convenience will also register to automatically unsubscribe the target when the build or step has finished, just as if build.unsubscribe(self) had been called. Also, the unsubscribe() method has been changed to not explode if the same receiver is unsubscribed multiple times. (note that it will still explode is the same receiver is *subscribed* multiple times, so please continue to refrain from doing that). buildbot-3.4.0/master/docs/relnotes/0.7.10.txt000066400000000000000000000131131413250514000206560ustar00rootroot00000000000000Buildbot 0.7.10 was released 25 Feb 2009 This release is mainly a collection of user-submitted patches since the last release. ** New Features *** Environment variables in a builder (#100) It is useful to be able to pass environment variables to all steps in a builder. This is now possible by adding { .. 'env': { 'var' : 'value' }, ... } to the builder specification. *** IRC status plugin improvements (#330, #357, #378, #280, #381, #411, #368) *** usePTY specified in master.cfg, defaults to False (#158, #255) Using a pty has some benefits in terms of supporting "Stop Build", but causes numerous problems with simpler jobs which can be killed by a SIGHUP when their standard input is closed. With this change, PTYs are not used by default, although you can enable them either on slaves (with the --usepty option to create-slave) or on the master. *** More information about buildslaves via the web plugin (#110) A new page, rooted at /buildslave/$SLAVENAME, gives extensive information about the buildslave. *** More flexible merging of requests (#415) The optional c['mergeRequests'] configuration parameter takes a function which can decide whether two requests are mergeable. *** Steps can be made to run even if the build has halted (#414) Adding alwaysRun=True to a step will cause it to run even if some other step has failed and has haltOnFailure=True. *** Compress buildstep logfiles (#26) Logs for each buildstep, which can take a lot of space on a busy buildmaster, are automatically compressed after the step has finished. *** Support for "latent" buildslaves The buildslaves that are started on-demand are called "latent" buildslaves. Buildbot ships with an abstract base class for building latent buildslaves, and a concrete implementation for AWS EC2. *** Customized MailNotifier messages (#175) MailNotifier now takes an optional function to build the notification message, allowing ultimate site-level control over the format of buildbot's notification emails. *** Nightly scheduler support for building only if changes have occurred With the addition of onlyIfChanged=True, the Nightly scheduler will not schedule a new build if no changes have been made since its last scheduled build. *** Add ATOM/RSS feeds to WebStatus (#372) Two new pages, /atom and /rss, provide feeds of build events to any feed reader. These paths take the same "category" and "branch" arguments as the waterfall and grid. *** Add categories to Schedulers and Changes (#182) This allows a moderate amount of support for multiple projects built in a single buildmaster. *** Gracefully shut down a buildslave after its build is complete The /buildslaves/$SLAVENAME pages have a "Gracefully Shutdown" button which will cause the corresponding slave to shut itself down when it finishes its current build. This is a good way to do work on a slave without causing a spurious build failure. *** SVN source steps can send usernames and passwords (#41) Adding username="foo" and/or password="bar" to an SVN step will cause --username and --password arguments to be passed to 'svn' on the slave side. Passwords are suitably obfuscated in logfiles. ** New Steps *** DirectoryUpload (#393) This step uploads an entire directory to the master, and can be useful when a build creates several products (e.g., a client and server package). *** MasterShellCommand This step runs a shell command on the server, and can be useful for post-processing build products, or performing other maintenance tasks on the master. *** PyLint (#259) A PyLint step is available to complement the existing PyFlakes step. ** Bugs Fixed *** Process output from new versions of Test::Harness (#346) *** Fixes to the try client and scheduler *** Remove redundant loop in MailNotifier (#315) *** Display correct $PWD in logfiles (#179) *** Do not assume a particular python version on Windows (#401) *** Sort files in changes (#402) *** Sort buildslaves lexically (#416) *** Send properties to all builds initiated by AnyBranchScheduler *** Dependent Schedulers are more robust to reconfiguration (#35) *** Fix properties handling in triggered buidls (#392) *** Use "call" on Windows to avoid errors (#417) *** Support setDefaultWorkdir in FileUpload and FileDownload (#209) *** Support WithProperties in FileUpload and FileDownload (#210) *** Fix a bug where changes could be lost on a master crash (#202) *** Remove color settings from non-presentation code (#251) *** Fix builders which stopped working after a PING (#349, #85) *** Isolate Python exceptions in status plugins (#388) *** Notify about slaves missing at master startup (#302) *** Fix tracebacks in web display after a reconfig (#176) ** Version-Control Changes *** Many Mercurial fixes - Inrepo branch support finalized (source step + changegroup hook + test case) (#65 #185 #187) - Reduced amount of full clones by separating clone with update into clone/pull/update steps (#186, #227) (see #412 for future work here) - Fixed mercurial changegroup hook to work with Mercurial 1.1 API (#181, #380) *** Many git fixes *** Add got_revision to Perforce support (#127) *** Use "git foo" everywhere instead of deprecated "git-foo" ** Minor Changes *** factory.addSteps (#317) If you have a common list of steps that are included in multiple factories, you can use f.addSteps(steplist) to add them all at once. *** Twisted logfile rotation and cleanup (#108) By default, Buildbot now rotates and cleans up the (potentially voluminous) twistd.log files. *** Prioritize build requests based on the time they wre submitted (#334) Balancing of load is a bit more fair, although not true load balancing. buildbot-3.4.0/master/docs/relnotes/0.7.11.txt000066400000000000000000000040311413250514000206560ustar00rootroot00000000000000Buildbot 0.7.11p was released July 16, 2009 Fixes a few test failures in 0.7.11, and gives a default value for branchType if it is not specified by the master. Buildbot 0.7.11 was released July 5, 2009 Developers too numerous to mention contributed to this release. Buildbot has truly become a community-maintained application. Much hard work is not mentioned here, so please consult the git logs for the detailed changes in this release. ** Better Memory Performance, Disk Cleanup Buildbot handles its memory usage a bit better, and can automatically purge old history to keep memory and disk usage low. Look for eventHorizon, buildHorizon, logHorizon, and changeHorizon. ** Password Protection for Force Build and Stop actions It is now possible to require authentication to force build and stop via the WebStatus interface. To use this, set the 'auth' field of WebStatus to a valid IAuth implementation. Current implementations are: BasicAuth with a list of user/passwords HTPasswdAuth with an .htpasswd file By default, the unauthenticated behavior will occur. ** Web Status changes The "Graceful Shutdown" feature, as a kind of "force", now obeys allowForce. The waterfall and other pages are more deeply interlinked. Pending builds can be individually cancelled, or cancelled in bulk. ** Fixed Transfer Steps Transfer step classes are more reliable; DirectoryUpload and DirectoryDownload use tarfile instead of manually framing files. The DirectoryUpload step also now supports compression. ** Conditional Steps Steps now take a doStepIf parameter which can be used to implement simple conditional execution of a step. ** Colorized Steps Steps are now hilighted with a color in the build view to indicate their success or failure. ** Improved build prioritization Bugfixes and fairer scheduling ** Transposed Grid Similar to the grid view, but with the axes reversed and showing different info. Located at /tgrid. ** Trigger steps improvements Trigger now supports copy_properties, to send selected properties to the triggered build. buildbot-3.4.0/master/docs/relnotes/0.7.12.txt000066400000000000000000000050321413250514000206610ustar00rootroot00000000000000Buildbot 0.7.12 was released 21 Jan 2010 ** New 'console' display This is a new web status view combining the best of the (t)grid and waterfall views. ** New 'extended' stylesheet Buildbot has a new, much nicer stylesheet available. Copy the file buildbot/status/web/extended.css over your existing public_html/buildbot.css to se it. ** Builders can be configured with an object Instead of a list of dictionaries, builders can now specified using a BuilderConfig object in the configuration file. This will allow for better argument checking and default values, and also makes it easier for users to create subclasses to handle site-specific builder details. The old, dictionary-based method of configuration is still supported. ** Check for common mis-configuration in addStep When adding a new step to a factory, either of these are acceptable: f.addStep(ShellCommand(command="echo hello, world", description="say hi")) f.addStep(ShellCommand, command="echo hello, world", description="say hi") but trying to mix these syntaxes is a common misconfiguration: f.addStep(ShellCommand(command="echo hello, world"), description="say hi") in which case the description argument was silently ignored. This is now an error. ** Support for log compression Log files can be compressed on the master side using either gzip or bzip2. ** Builder.ping no longer accepts timeout argument (bug #664). The implementation was not robust enough and could cause the master to unexpectedly disconnect the slave. ** MailNotifier's customMesg replaced by messageFormatter The customMesg mechanism had the unfortunate side effect of loading all data for a build into memory simultaneously, which for some builds could cause memory exhaustion. ** Suppression of selected compiler warnings The WarningCountingShellCommand class has been extended with the ability to upload from the slave a file contain warnings to be ignored. See the documentation of the suppressionFile argument to the Compile build step. ** New buildstep `MTR' A new class buildbot.process.mtrlogobserver.MTR was added. This buildstep is used to run test suites using mysql-test-run. It parses the stdio output for test failures and summarizes them on the waterfall page. It also makes server error logs available for debugging failures, and optionally inserts information about test runs and test failures into an external database. ** Python API Docs The docstrings for buildbot are now available in a web-friendly format: http://buildbot.net/buildbot/docs/latest/reference ** Many, many bugfixes buildbot-3.4.0/master/docs/relnotes/0.7.2.txt000066400000000000000000000054371413250514000206110ustar00rootroot00000000000000Buildbot 0.7.2 was released 17 Feb 2006 ** new features *** all TCP port numbers in config file now accept a strports string Sometimes it is useful to restrict certain TCP ports that the buildmaster listens on to use specific network interfaces. In particular, if the buildmaster and SVN repository live on the same machine, you may want to restrict the PBChangeSource to only listen on the loopback interface, insuring that no external entities can inject Changes into the buildbot. Likewise, if you are using something like Apache's reverse-proxy feature to provide access to the buildmaster's HTML status page, you might want to hide the real Waterfall port by having it only bind to the loopback interface. To accomplish this, use a string like "tcp:12345:interface=127.0.0.1" instead of a number like 12345. These strings are called "strports specification strings", and are documented in twisted's twisted.application.strports module (you can probably type 'pydoc twisted.application.strports' to see this documentation). Pretty much everywhere the buildbot takes a port number will now accept a strports spec, and any bare numbers are translated into TCP port numbers (listening on all network interfaces) for compatibility. *** buildslave --umask control Twisted's daemonization utility (/usr/bin/twistd) automatically sets the umask to 077, which means that all files generated by both the buildmaster and the buildslave will only be readable by the account under which the respective daemon is running. This makes it unnecessarily difficult to share build products (e.g. by symlinking ~/public_html/current_docs/ to a directory within the slave's build directory where each build puts the results of a "make docs" step). The 'buildbot slave ' command now accepts a --umask argument, which can be used to override the umask set by twistd. If you create the buildslave with '--umask=022', then all build products will be world-readable, making it easier for other processes (run under other accounts) to access them. ** bug fixes The 0.7.1 release had a bug whereby reloading the config file could break all configured Schedulers, causing them to raise an exception when new changes arrived but not actually schedule a new build. This has been fixed. Fixed a bug which caused the AnyBranchScheduler to explode when branch==None. Thanks to Kevin Turner for the catch. I also think I fixed a bug whereby the TryScheduler would explode when it was given a Change (which it is supposed to simply ignore). The Waterfall display now does more quoting of names (including Builder names, BuildStep names, etc), so it is more likely that these names can contain unusual characters like spaces, quotes, and slashes. There may still be some problems with these kinds of names, however.. please report any bugs to the mailing list. buildbot-3.4.0/master/docs/relnotes/0.7.3.txt000066400000000000000000000061631413250514000206070ustar00rootroot00000000000000Buildbot 0.7.3 was released 23 May 2006 ** compatibility This release is compatible with Twisted-1.3.0, but the next one will not be. Please upgrade to at least Twisted-2.0.x soon, as the next buildbot release will require it. ** new features *** Mercurial support Support for Mercurial version control system (http://selenic.com/mercurial) has been added. This adds a buildbot.process.step.Mercurial BuildStep. A suitable hook script to deliver changes to the buildmaster is still missing. *** 'buildbot restart' command The 'buildbot restart BASEDIR' command will perform a 'buildbot stop' and 'buildbot start', and will attempt to wait for the buildbot process to shut down in between. This is useful when you need to upgrade the code on your buildmaster or buildslave and want to take it down for a minimum amount of time. *** build properties Each build now has a set of named "Build Properties", which can be set by steps and interpolated into ShellCommands. The 'revision' and 'got_revision' properties are the most interesting ones available at this point, and can be used e.g. to get the VC revision number into the filename of a generated tarball. See the user's manual section entited "Build Properties" for more details. ** minor features *** IRC now takes password= argument Useful for letting your bot claim a persistent identity. *** svn_buildbot.py is easier to modify to understand branches *** BuildFactory has a new .addStep method *** p4poller has new arguments *** new contrib scripts: viewcvspoll, svnpoller, svn_watcher These poll an external VC repository to watch for changes, as opposed to adding a hook script to the repository that pushes changes into the buildmaster. This means higher latency but may be easier to configure, especially if you do not have authority on the repository host. *** VC build property 'got_revision' The 'got_revision' property reports what revision a VC step actually acquired, which may be useful to know when building from HEAD. *** improved CSS in Waterfall The Waterfall display has a few new class= tags, which may make it easier to write custom CSS to make it look prettier. *** robots_txt= argument in Waterfall You can now pass a filename to the robots_txt= argument, which will be served as the "robots.txt" file. This can be used to discourage search engine spiders from crawling through the numerous build-status pages. ** bugfixes *** tests more likely to pass on non-English systems The unit test suite now sets $LANG='C' to make subcommands emit error messages in english instead of whatever native language is in use on the host. This improves the chances that the unit tests will pass on such systems. This affects certain VC-related subcommands too. test_vc was assuming that the system time was expressed with a numeric timezone, which is not always the case, especially under windows. This probably works better now than it did before. This only affects the CVS tests. 'buildbot try' (for CVS) now uses UTC instead of the local timezone. The 'got_revision' property is also expressed in UTC. Both should help deal with buggy versions of CVS that don't parse numeric timezones properly. buildbot-3.4.0/master/docs/relnotes/0.7.4.txt000066400000000000000000000133031413250514000206020ustar00rootroot00000000000000Buildbot 0.7.4 was released 23 Aug 2006 ** Things You Need To Know The PBChangeSource's prefix= argument has changed, you probably need to add a slash now. This is mostly used by sites which use Subversion and svn_buildbot.py. The subcommands that are used to create a buildmaster or a buildslave have changed. They used to be called 'buildbot master' and 'buildbot slave'. Now they are called 'buildbot create-master' and 'buildbot create-slave'. Zipf's Law suggests that these are more appropriate names for these infrequently-used commands. The syntax for the c['manhole'] feature has changed. ** new features *** full Perforce support SF#1473939: large patch from Scott Lamb, with docs and unit tests! This includes both the step.P4 source-checkout BuildStep, and the changes.p4poller ChangeSource you'll want to feed it. P4 is now supported just as well as all the other VC systems. Thanks Scott! *** SSH-based Manhole The 'manhole' feature allows buildbot developers to get access to a python read/eval/print loop (REPL) inside the buildmaster through a network connection. Previously, this ran over unencrypted telnet, using a simple username/password for access control. The new release defaults to encrypted SSH access, using either username/password or an authorized_keys file (just like sshd). There also exists an unencrypted telnet form, but its use is discouraged. The syntax for setting up a manhole has changed, so master.cfg files that use them must be updated. The "Debug options" section in the user's manual provides a complete description. *** Multiple Logfiles BuildSteps can watch multiple log files in realtime, not just stdout/stderr. This works in a similar fashion to 'tail -f': the file is polled once per second, and any new data is sent to the buildmaster. This requires a buildslave running 0.7.4 or later, and a warning message is produced if used against an old buildslave (which will otherwise produce no data). Use "logfiles={'name': 'filename'}" to take advantage of this feature from master.cfg, and see the "ShellCommand" section of the user's manual for full documentation. The 'Trial' buildstep has been updated to use this, to display _trial_temp/test.log in realtime. It also knows to fall back to the previous "cat" command if the buildslave is too old. *** BuildStep URLs BuildSteps can now add arbitrary URLs which will be displayed on the Waterfall page in the same place that Logs are presented. This is intended to provide a link to generated HTML pages, such as the output of a code coverage tool. The step is responsible for somehow uploading the HTML to a web server: this feature merely provides an easy way to present the HREF link to the user. See the "BuildStep URLs" section of the user's manual for details and examples. *** LogObservers BuildSteps can now attach LogObservers to various logfiles, allowing them to get real-time log output. They can use this to watch for progress-indicating events (like counting the number of files compiled, or the number of tests which have run), and update both ETA/progress-tracking and step text. This allows for more accurate ETA information, and more information passed to the user about how much of the process has completed. The 'Trial' buildstep has been updated to use this for progress tracking, by counting how many test cases have run. ** new documentation What classes are useful in your master.cfg file? A table of them has been added to the user's manual, in a section called "Index of Useful Classes". Want a list of all the keys in master.cfg? Look in the "Index of master.cfg keys" section. A number of pretty diagrams have been added to the "System Architecture" portion of the manual, explaining how all the buildbot pieces fit together. An HTML form of the user's manual is now shipped in the source tarball. This makes it a bit bigger: sorry about that. The old PyCon-2003 paper has been removed from the distribution, as it is mostly supplanted by the user's manual by this point. ** bugfixes SF#1217699 + SF#1381867: The prefix= argument to PBChangeSource has been changed: now it does just a simple string-prefix match and strip. The previous behavior was buggy and unhelpful. NOTE: if you were using prefix= before, you probably need to add a slash to the end of it. SF#1398174: ignore SVN property changes better, fixed by Olivier Bonnet SF#1452801: don't double-escape the build URL, fixed by Olivier Bonnet SF#1401121: add support for running py2exe on windows, by Mark Hammond reloading unchanged config files with WithProperties shouldn't change anything. All svn commands now include --non-interactive so they won't ask for passwords. Instead, the command will fail if it cannot be performed without user input. Deprecation warnings with newer versions of Twisted have been hushed. ** compatibility I haven't actually removed support for Twisted-1.3.0 yet, but I'd like to. The step_twisted default value for --reporter matches modern Twisteds, though, and won't work under 1.3.0. ShellCommand.flunkOnFailure now defaults to True, so any shell command which fails counts as a build failure. Set this to False if you don't want this behavior. ** minor features contrib/darcs_buildbot.py contains a new script suitable for use in a darcs commit-hook. Hovering a cursor over the yellow "Build #123" box in the Waterfall display will pop up an HTML tooltip to show the reason for the build. Thanks to Zandr Milewski for the suggestion. contrib/CSS/*.css now contains several contributed stylesheets to make the Waterfall display a bit less ugly. Thanks to John O'Duinn for gathering them. ShellCommand and its derivatives can now accept either a string or a list of strings in the description= and descriptionDone= arguments. Thanks to Paul Winkler for the catch. buildbot-3.4.0/master/docs/relnotes/0.7.5.txt000066400000000000000000000150531413250514000206070ustar00rootroot00000000000000Buildbot 0.7.5 was released 10 Dec 2006 ** Things You Need To Know *** The Great BuildStep Renaming All BuildSteps have moved! They used to be classes in buildbot.process.step, but now they all have separate modules in buildbot.steps.* . They have been split out into separate categories: for example, the source checkout steps are now buildbot.steps.source.CVS, buildbot.steps.source.Darcs, etc. The most commonly used one is probably buildbot.steps.shell.ShellCommand . The python-specific steps are in buildbot.steps.python, and the Twisted-specific steps are in buildbot.steps.python_twisted . You will need to update your master.cfg files to use the new names. The old names are deprecated and will be removed altogether in the next release. *** Compatibility Buildbot now requires python-2.3 or later. Buildbot now requires Twisted-2.0.0 or later. Support for earlier versions of both has finally been removed. If you discover it works with unsupported versions, please return your Buildbot to the factory for repairs :-). Buildbot has *not* yet been tested against the recent python-2.5 release. It has been tested against the latest SVN version of Twisted, but only in conjunction with python-2.4 . ** new features *** reconfiguring a Builder no longer causes a disconnect/reconnect cycle This means that sending SIGHUP to the master or running 'buildbot reconfig MASTERDIR' command no longer interrupts any current builds, nor does it lose pending builds like it did before. This involved a fairly substantial refactoring of the various internal BotPerspective/BotMaster/Builder classes. Note that reconfiguring Schedulers still loses any Changes that were waiting for the tree to become stable: hopefully this will be fixed in the next release. *** 'buildbot start/restart/reconfig' now show logs until startup is complete These commands now have additional code to follow twistd.log and display all the lines that are emitted from the beginning of the start/reconfig action until it has completed. This gives you a chance to see any problems detected in the config file without needing to manually look in twistd.log or use another shell to 'tail -f' it. This also makes it clear which config file is being used. This functionality is not available under windows. In addition, if any problems are detected during 'start' or 'restart' (but not reconfig), the buildbot command will terminate with a non-zero exit status, making it easier to use in scripts. Closes SF#1517975. *** Locks now take maxCount=N to allow multiple simultaneous owners This allows Locks to be non-exclusive but still limit maximum concurrency. Thanks to James Knight for the patch. Closes SF#1434997. *** filetransfer steps buildbot.steps.transfer.FileUpload is a buildstep that will move files from the slave to the master. Likewise, FileDownload will move files from the master down to the buildslave. Many thanks to Albert Hofkamp for contributing these classes. Closes SF#1504631. *** pyflakes step buildbot.steps.python.PyFlakes will run the simple 'pyflakes' static analysis tool and parse the results to tell you about undefined names, unused imports, etc. You'll need to tell it how to run pyflakes, usually with something like command=["pyflakes", "src/packagedir"] or the like. The default command is "make pyflakes", which assumes that you have a suitable target in your top-level Makefile. *** Monotone support Nathaniel Smith has contributed initial support for the Monotone version control system. The code still needs docs and tests, but on the other hand it has been in use by the Monotone buildbot for a long time now, so it is probably fairly stable. *** Tinderbox support Ben Hearsum and the Mozilla crew have contributed some classes to allow Buildbot to work with Tinderbox clients. One piece is buildbot.changes.bonsaipoller.BonsaiPoller, which is a ChangeSource that polls a Bonsai server (which is a kind of web-vased viewcvs CGI script) to discover source code changes. The other piece is buildbot.status.tinderbox.TinderboxMailNotifier, which is a status plugin that sends email in the same format as Tinderbox does, which allows a number of Tinderbox tools to be driven by Buildbot instead. *** SVN Poller Niklaus Giger contributed a ChangeSource (buildbot.changes.svnpoller) which polls a remote SVN repository on a periodic basis. This is useful when, for whatever reason, you cannot add a post-commit hook script to the repository. This obsoletes the external contrib/svn_watcher.py script. ** notes for plugin developers *** IStatusLog.readlines() This new method makes it easier for a status plugin (or a BuildStep.createSummary method) to walk through a StatusLog one line at a time. For example, if you wanted to create an extra logfile that just contained all the GCC warnings from the main log, you could use the following: def createSummary(self, log): warnings = [] for line in log.readlines(): if "warning:" in line: warnings.append() self.addCompleteLog('warnings', "".join(warnings)) The "BuildStep LogFiles" section of the user's manual contains more information. This method is not particularly memory-efficient yet (it reads the whole logfile into memory first, then splits it into lines); this will be improved in a future release. ** bug fixes *** Update source.SVN to work with the new SVN-1.4.0 The latest subversion changed the behavior in an unusual situation which caused the unit tests to fail. This was unlikely to cause a problem in actual usage, but the tests have been updated to pass with the new version. *** update svn_buildbot.py to avoid mangling filenames Older versions of this script were stripping the wrong number of columns from the output of 'svnlook changed', and would sometimes mangle filenames. This has been fixed. Closes SF#1545146. *** logfiles= caused subsequent build failures under Windows Earlier versions of buildbot didn't explicitly close any logfiles= file handles when the build finished. On windows (where you cannot delete a file that someone else is reading), this could cause the next build to fail as the source checkout step was unable to delete the old working directory. This has been fixed. Closes SF#1568415. *** logfiles= didn't work on OS-X Macintosh OS-X has a different behavior when reading files that have reached EOF, the result was that logfiles= sometimes didn't work. Thanks to Mark Rowe for the patch. ** other changes The 'buildbot sighup MASTERDIR' command has been replaced with 'buildbot reconfig MASTERDIR', since that seems to be a slightly more meaningful name. The 'sighup' form will remain as an alias. buildbot-3.4.0/master/docs/relnotes/0.7.6.txt000066400000000000000000000212741413250514000206120ustar00rootroot00000000000000Buildbot 0.7.6 was released 30 Sep 2007 ** Things You Need To Know *** 'buildbot upgrade-master' Each time you install a new version of Buildbot, you should run the new 'buildbot upgrade-master' command on each of your pre-existing buildmasters. This will add files and fix (or at least detect) incompatibilities between your old config and the new code. *** new WebStatus page The Waterfall has been replaced by the more general WebStatus display, described below. WebStatus serves static files from a new public_html/ directory that lives in the buildmaster's basedir. Files like index.html, buildbot.css, and robots.txt are served directly from that directory, so any modifications you wish to make should be made to those files. In particular, any custom CSS you've written should be copied into public_html/buildbot.css. The 'upgrade-master' command will populate this directory for you. The old Waterfall page is deprecated, but it should continue to work for another few releases. It is now a subclass of WebStatus which just replaces the default root URL with another copy of the /waterfall resource. *** Compatibility: Python-2.3 or newer, Twisted-2.0 or newer No compatibility losses here, buildbot-0.7.6 is compatible with the same versions of python and twisted that 0.7.5 was. Buildbot is tested on a regular basis (http://buildbot.buildbot.net) against nearly a full matrix of Python-(2.3,2.4,2.5) * Twisted-(2.0,2.1,2.2,2.4,2.5). *** New Buildbot Home Page Buildbot has moved to a new Trac instance at http://buildbot.net/ , and all new bugs and tickets should be filed there. The old sourceforge bugs at http://buildbot.sf.net/ will slowly be migrated over. Mailing lists are still managed at sourceforge, and downloads are still available there. *** Changed/Deprecated master.cfg Keys and Classes c['sources'] (plural) has been replaced by c['change_source'] (singular). c['bots'] has been replaced by c['buildslaves'], and it expects a list of BuildSlave instances instead of tuples. See below for more details. The 'freshcvsmail' change source has been deprecated, and will be removed in the next release. The html.Waterfall status target has been deprecated, and replaced by html.WebStatus . ** New Features *** WebStatus The new WebStatus display is a superset of the old Waterfall. It contains a waterfall as a sub-page, but it also contains pages with more compact representations of recent build status. The "one_line_per_build" page contains just that, and "one_box_per_builder" shows just the information from the top of the waterfall page (last-finished-build and current-activity). The initial page (when you hit the root of the web site) is served from index.html, and provides links to the Waterfall as well as the other pages. Most of these pages can be filtered by adding query arguments to the URL. Adding "?builder=XYZ" will cause the page to only show results for the given builder. Adding "?builder=XYZ&builder=ABC" will show results for either builder. "?branch=trunk" will limit the results to builds that involved code from the trunk. The /waterfall page has arguments to hide those annoying "buildslave connected" messages, to start and and at arbitrary times, and to auto-refresh at a chosen interval (with a hardcoded minimum of 15 seconds). It also has a "help" page with forms that will help you add all of these nifty filtering arguments. The recommended practice is to modify the index.html file to include links to the filtered pages that you find most useful. Note that WebStatus defaults to allowForce=False, meaning that the display will not offer or accept "Force Build" or "Stop Build" controls. (The old Waterfall defaults to allowForce=True). The new WebStatus pages try very hard to use only relative links, making life better when the Buildbot sits behind an HTTP reverse proxy. In addition, there is a rudimentary XMLRPC server run by the WebStatus object. It only has two methods so far, but it will acquire more in the future. The first customer of this is a project to add a buildbot plugin to Trac. *** BuildFactory.addStep(Step(args)) BuildFactories can be set up either with a complete list of steps, or by calling the .addStep() method repeatedly. The preferred way to provide a step is by instantiating it, rather than giving a class/kwargs pair. This gives the BuildStep class a chance to examine the arguments (and complain about anything it doesn't like) while the config file is being read and problems are being logged. For example, the old-style: from buildbot.process.factory import BuildFactory, s steps = [s(CVS, cvsroot="blah", mode="copy"), s(Compile, command=["make", "all"]), s(Test, command=["make", "test"]), ] f = BuildFactory(steps) is now: f = BuildFactory() f.addStep( CVS(cvsroot="blah", mode="copy") ) f.addStep( Compile(command=["make", "all"]) ) f.addStep( Test(command=["make", "test"]) ) Authors of BuildStep subclasses which override __init__ to add new arguments must register them with self.addFactoryArguments(**newargs) to make sure that those classes will work with this new style, otherwise the new arguments will be lost. Using class/kwargs pairs is deprecated, and will be removed in a future release. *** BuildSlave instances, max_builds=, notify_on_missing= Buildslave specification has changed a lot in this release. The old config: c['bots'] = [ ("bot1name", "bot1passwd"), ("bot2name", "bot2passwd") ] is now: from buildbot.buildslave import BuildSlave c['slaves'] = [ BuildSlave("bot1name", "bot1passwd"), BuildSlave("bot2name", "bot2passwd") ] This new form gives us the ability to add new controls. The first is "max_builds=", which imposes a concurrency limit that is like the usual SlaveLock, but gives the buildmaster the opportunity to find a different slave to run the build. (the buildslave is chosen before the SlaveLock is claimed, so pure SlaveLocks don't let you take full advantage of build farms). The other addition is "notify_on_missing=", which accepts an email address (or list of addresses), and sends a message when the buildslave has been disconnected for more than an hour (configurable with missing_timeout=). This may be useful when you expect that the buildslave hosts should be available most of the time, and want to investigate the reasons that it went offline. ** Other Improvements The IRC bot has been refactored to make it easier to add instant-messaging status delivery in the future. The IM plugins are not yet written, though. When multiple buildslaves are available for a given build, one of them will be picked at random. In previous releases, the first one on the list was always picked. This helps to add a certain measure of load-balancing. More improvements will be made in the future. When the buildslave does a VC checkout step that requires clobbering the build directory (i.e. in all modes except for 'update'), the buildslave will first set the permissions on all build files to allow their deletion, before it attempts to delete them. This should fix some problems in which a build process left non-user-writable files lying around (frequently a result of enthusiastic unit tests). The BuildStep's workdir= argument can now accept a WithProperties() specification, allowing greater control over the workdir. Support for the 'Bazaar' version control system (/usr/bin/bzr) has been added, using the buildbot.steps.source.Bzr class. This is a replacement for the old 'Arch' (/usr/bin/tla and /usr/bin/baz) systems, which are still supported by Buildbot with the source.Arch and source.Bazaar classes, respectively. Unfortunately the old baz system claimed the 'Bazaar' classname early, so the new system must use source.Bzr instead of the desired source.Bazaar . A future release might change this. A rudimentary Gnome Panel applet is provided in contrib/bb_applet.py, which provides 'buildbot statusgui' -like colored status boxes inside the panel. Installing it is a bit tricky, though. The 'buildbot try' command now accepts a '--diff=foo.patch' argument, to let you provide a pre-computed patch. This makes it easier to test out patches that you've looked over for safety, without first applying them to your local source tree. A new Mercurial change source was added, hg_buildbot.py, which runs as an in-process post-commit hook. This gives us access to much more information about the change, as well as being much faster. The email-based changesource have been refactored, to make it easier to write new mail parsers. A parser for the SVN "commit-email.pl" script has been added. ** Bugs Fixed Far too many to count. Please see http://buildbot.net/trac/query?status=closed&milestone=0.7.6 for a partial list of tickets closed for this release, and the ChangeLog for a complete list of all changes since 0.7.5 . buildbot-3.4.0/master/docs/relnotes/0.7.7.txt000066400000000000000000000073301413250514000206100ustar00rootroot00000000000000Buildbot 0.7.7 was released 28 Mar 2008 ** Things You Need To Know *** builder names must not start with an underscore (`_'). These are now reserved for internal buildbot purposes, such as the magic "_all" pseudo-builder that the web pages use to allow force-build buttons that start builds on all Builders at once. ** New Features *** "buildbot checkconfig" The "buildbot checkconfig" command will look at your master.cfg file and tell you if there are any problems with it. This can be used to test potential changes to your config file before submitting them to the running buildmaster. This is particularly useful to run just before doing "buildbot restart", since the restart will fail if the config file has an error. By running "buildbot checkconfig master.cfg && buildbot restart", you'll only perform the restart if the config file was ok. Many thanks to Ben Hearsum for the patch. *** Waterfall "?category=FOO" query-arguments The Waterfall page now accepts one or more "category=" query arguments in the URL, to filter the display by categories. These behave a lot like the "builder=" query argument. Thanks to Jermo Davann for the patch. ** Bugs Fixed Many bugs were fixed, and many minor features were added. Many thanks to Dustin Mitchell who fixed and coordinated many of these. Here is a terse list, for more details, please see the Trac page for the 0.7.7 release, at http://buildbot.net/trac/query?status=closed&milestone=0.7.7 : Many of the URLs generated by the buildbot were wrong. Display of last-heard-from timestamps on the buildslaves web page were wrong. Asking an IRC bot about a build waiting on a Lock should no longer crash. Same for the web viewer. Stop treating the encouraged info/ directory as leftover. Add more force/stop build buttons. Timestamps displayed on the waterfall now handle daylight savings properly. p4poller no longer quits after a single failure. Improved Git support, including 'try', branch, and revisions. Buildslaves now use 'git', not 'cogito'. Make older hg client/servers handle specific-revision builds properly. Fix twisted.scripts._twistw problem on twisted-2.5.0 and windows. Fix workdir= and env= on ShellCommands Fix logfile-watching in 'buildbot start' on OS-X. Fix ShellCommand crashes when the program emits >640kB of output per chunk. New WarningCountingShellCommand step. Fix TreeSize step. Fix transfer.FileUpload/FileDownload crashes for large files. Make 'buildbor reconfig' on windows tell you that it doesn't work. Add a To: header to the mail sent by the slave-missing timeout. Disable usePTY= for most unit tests, it makes some debian systems flunk tests. Add 'absolute source stamps' Add 'triggerable schedulers', and a buildstep to trigger them. Remove buildbot.changes.freshcvsmail Add new XMLRPC methods: getAllBuilders, getStatus, getLastBuilds. Accept WithProperties in more places: env=, workdir=, others. Use --no-auth-cache with SVN commands to avoid clobbering shared svn state. Add hours/minutes/seconds in the waterfall's ETA display. Trial: count Doctest lines too. ShellCommand: record more info in the headers: stdin closing, PTY usage. Make it possible to stop builds across reconfig boundaries. SVN revision numbers are now passed as strings, which was breaking MailNotifier ** Deprecation Schedule The changes.freshcvsmail change source was replaced by changes.mail.FCMaildirSource in 0.7.6, and has been removed in 0.7.7 . c['sources'] (plural) was replaced by c['change_source'] (singular) in 0.7.6, and will be removed by 0.8.0. c['bots'] was replaced by c['buildslaves'] in 0.7.6, and will be removed by 0.8.0 . c['bots'] only accepts BuildSlave instances, not name/passwd tuples. The html.Waterfall status target was replaced by html.WebStatus in 0.7.6, and will be removed by 0.8.0. buildbot-3.4.0/master/docs/relnotes/0.7.8.txt000066400000000000000000000102121413250514000206020ustar00rootroot00000000000000Buildbot 0.7.8 was released 24 Jul 2008 ** New features The IRC bot will respond to three new commands: 'notify' subscribes the channel (or the sender, if the command is sent as a private "/msg") to hear about build events. 'join' tells the bot to join some new IRC channel. 'leave' tells it to leave a channel. See the "IRC Bot" section of the User's Manual for details. (#171) Build Steps now have "statistics", in addition to logfiles. These are used to count things like how many tests passed or failed. There are methods to sum these counters across all steps and display the results in the Build status. The Waterfall display now shows the count of failed tests on the top-most box in each column, using this mechanism. The new buildbot.steps.shell.PerlModuleTest step was added, to run Perl unit tests. This is a wrapper around the regular ShellCommand that parses the output of the standard perl unit test system and counts how many tests passed/failed/etc. The results are put into the step's summary text, and a count of tests passed/failed/skipped are tracked in the steps's statistics. The factory.CPAN build factory has been updated to use this, so configuring a Buildbot to test a perl module available from CPAN should be as easy as: s = source.CVS(cvsroot, cvsmodule) f = factory.CPAN(s) Build Properties have been generalized: they remain associated with a single Build, but the properties can be set from a variety of sources. In previous releases, the Build itself would set properties like 'buildername', 'branch', and 'revision' (the latter two indicating which version of the source code it was trying to get), and the source-checkout BuildSteps would set a property named 'got_revision' (to indicate what version of the soruce code it actually got). In this release, the 'scheduler' property is set to indicate which Scheduler caused the build to be started. In addition, the config file can specify properties to be set on all Builds, or on all Builds for a specific Builder. All these properties are available for interpolation into ShellCommands and environment variables by using the WithProperties() marker. It may be easier to implement simple build parameterization (e.g. to upload generated binaries to a specific directory, or to only perform long-running tests on a nightly build instead of upon every checkin) by using these Build Properties than to write custom BuildSteps. ** Other improvements The /buildslaves web page shows which slaves are currently running builds. Offline slaves are displayed in bold. Buildbot's setup.py now provides metadata to setuptools (if installed): an entry_points script was added, and a dependency upon twisted-2.4.x or newer was declared. This makes it more likely that 'easy_install buildbot' will work. The MailNotifier class acquired a mode="passing" flag: in this mode, the buildbot will only send mail about passing builds (versus only on failing builds, or only on builds which failed when the previous build had passed). ** Bugs fixed Don't display force/stop build buttons when build control is disabled (#246) When a build is waiting on a lock, don't claim that it has started (#107) Make SVN mode=copy tolerate symlinks on freebsd, "cp -rp" -> "cp -RPp" (#86) The svnpoller changesource now ignores branch deletion (#261) The Git unit tests should run even if the user has not told Git about their username/email. The WebStatus /xmlrpc server's getStatus() method was renamed to the more-accurate getLastBuildResults(). The TinderboxMailNotifier status output acquired an useChangeTime= argument. The bonsaipoller changesource got some fixes. ** Deprecation Schedule No features have been deprecated in this release, and no deprecated features have been removed. As a reminder, the following deprecated features are scheduled for removal in an upcoming release: c['sources'] (plural) was replaced by c['change_source'] (singular) in 0.7.6, and will be removed by 0.8.0. c['bots'] was replaced by c['buildslaves'] in 0.7.6, and will be removed by 0.8.0 . c['bots'] only accepts BuildSlave instances, not name/passwd tuples. The html.Waterfall status target was replaced by html.WebStatus in 0.7.6, and will be removed by 0.8.0. buildbot-3.4.0/master/docs/relnotes/0.7.9.txt000066400000000000000000000107661413250514000206210ustar00rootroot00000000000000Buildbot 0.7.9 was released 15 Sep 2008 ** New Features *** Configurable public_html directory (#162) The public_html/ directory, which provides static content for the WebStatus() HTTP server, is now configurable. The default location is still the public_html/ subdirectory of the buildmaster's base directory, but you can change this by passing a suitable argument when creating the WebStatus() instance in your master.cfg file: c['status'].append( WebStatus(8080, public_html="/var/www/buildbot") ) *** Lock access modes (#313) Albert Hofkamp added code to provide two distinct access modes to Locks: "counting" and "exclusive". Locks can accept a configurable number of "counting"-mode users, or a single "exclusive"-mode. For example, a Lock is defined with maxCount=3, and then a 'compile' BuildStep uses this lock in counting mode, while a 'cleanup' BuildStep uses this lock in exclusive mode. Then, there can be one, two, or three simultaneous Builds in the compile step (as long as there are no builds in the cleanup step). Only one build can be in the cleanup step at a time, and if there is such a build in the cleanup step, then the compile steps in other builds will wait for it to finish. Please see the "Interlocks" section of the user's manual for more details. ** Bugs Fixed *** Buildslave missing_timeout= fired too quickly (#211) By providing a missing_timeout= argument when creating the BuildSlave instance, you can ask the buildmaster to send email if a buildslave is disconnected for too long. A bug in the previous version caused this notification to be sent too soon, rather than waiting until the timeout period expired. This should be fixed now. *** Test command display fixed (#332) In the previous version, a steps.shell.Test step would display the parsed test results (in the step's box on the waterfall display) in lieu of any other descriptive text the step might provide. In this release, these two pieces of information are combined. ** Minor Changes The buildmaster's version is logged to its twistd.log file at startup. The buildslave does the same, to its own logfile. Remote commands now record how long each command took. The "elapsedTime=" message will appear in the step's main logfile. The "buildbot restart" command no longer fails if the buildbot wasn't already running. The FileUpload and FileDownload steps now create their target directories (and any missing intermediate directories) before writing to the destination file. The per-build and per-step web pages now show the start, finish, and elapsed time of their build or step. If a Subversion-based build is started with a mixture of Changes that specify particular numeric revisions and "HEAD" Changes (which indicate that a trunk checkout is desired), the build will use a trunk checkout. Previously this would probably cause an error. It is not clear how this situation might arise. ** Compatibility With Other Tools The mercurial commit hook (buildbot.changes.hgbuildbot) in the previous version doesn't work with hg-1.0 or later (it uses an API function that was present in the hg-0.9.5 release, but was removed from hg-1.0). This incompability has been fixed: the new version of buildbot should be compatible with hg-1.0 and newer (and it probably retains compatibility with hg-0.9.5 and earlier too). (#328) The Git tool has traditionally provided two ways to run each command, either as subcommands of /usr/bin/git (like "git checkout"), or as individual tools (like /usr/bin/git-checkout). The latter form is being removed in the upcoming 1.6 Git release. Previous versions of Buildbot have used the git-checkout form, and will break when Git is upgraded to 1.6 or beyond. The new Buildbot release switches to the subcommand form. Note that this is a change on the buildslave side. The Git checkout command will now use the default branch (as set in the steps.source.Git() step definition) if the changes that it is building do not specify some other branch to build. (#340) ** Deprecation Schedule No features have been deprecated in this release, and no deprecated features have been removed. As a reminder, the following deprecated features are scheduled for removal in an upcoming release: c['sources'] (plural) was replaced by c['change_source'] (singular) in 0.7.6, and will be removed by 0.8.0. c['bots'] was replaced by c['buildslaves'] in 0.7.6, and will be removed by 0.8.0 . c['bots'] only accepts BuildSlave instances, not name/passwd tuples. The html.Waterfall status target was replaced by html.WebStatus in 0.7.6, and will be removed by 0.8.0. buildbot-3.4.0/master/docs/relnotes/0.8.0.txt000066400000000000000000000061641413250514000206060ustar00rootroot00000000000000Buildbot 0.8.0 was released 25 May 2010 ** (NOTE!) Scheduler requires keyword arguments If you are creating your Scheduler like this: Scheduler("mysched", "mybranch", 0, ["foo", "bar"]) then it's time to change that to specify each of the arguments with a keyword: Scheduler(name="mysched", branch="mybranch", treeStableTimer=0, builderNames=["foo", "bar"]) ** Database Backend Scheduler, change, and build request information is now stored in a database - by default, in SQLite, although MySQL is also supported. With this change, scheduled builds will persist over buildmaster restarts, as will interrelationships between schedulers (e.g., Triggerable and Dependent). Upgrading to the new database backend is easy, although it brings additional requirements on the buildmaster. See the Buildbot documentation for more information. ** Visual Studio / VC++ Compile Steps ** New Change/SourceStamp attributes 'project' and 'repository' These attributes can be used to further refine matching by schedulers. Repository completes the SourceStamp: the tuple of (repository, branch, revision) completely specifies a source code tree. Likewise, the project attribute can be used to support building several distinct projects within one buildmaster, replacing the use of category for this purpose. Matching can be done using regular expressions, so it's even possible to support nested projects! ** ShellCommands expand environment variables If you pass to a shell command an environment variable like this: ShellCommand(..., env={"FOO": "${BAR}"}) then, on the slave side the variable FOO will have the same value as the alread existing BAR variable on the slave. This is mostly used to expand variable like this: "PATH": "/my/directory:${PATH}" where PATH will have "/my/directory" prepended to it. ** Builders can setup properties There is a new parameter to the builders to setup properties on a per-builder basis. ** New /json web status This view has lots of useful information perfectly formed for serving as input to JavaScript status displays. See /json/help for details. ** Jinja All web status is now generated using the Jinja templating engine, which gives buildbot a much more attractive and maintainable appearance. Buildbot's output is also now XHTML-compliant! ** Authorization Framework The web-based status displays now provide fine-grained control over who can do what - force builds, stop builds, cancel builds, etc. See the manual for configuration details. ** Mercurial uses full revisions Mercurial now sets got_revision to the full 40-character revision id instead of the short IDs. ** Cleanup, Bug Fixes, and Test Fixes Thanks to help from a number of devoted contributors, this version of Buildbot has seen a lot of house-cleaning, and even passes all of its own unit tests! ** Removals *** Removed buildbot.status.html.Waterfall (deprecated in 0.7.6) Note that this does not remove the waterfall -- just an old version of it which did not include the rest of the WebStatus pages. *** BuildmasterConfig no longer accepts 'bots' and 'sources' as keys (deprecated in 0.7.6). Use 'slaves' and 'change_source' instead. buildbot-3.4.0/master/docs/relnotes/0.8.1.txt000066400000000000000000000041261413250514000206030ustar00rootroot00000000000000Buildbot 0.8.1 was released 16 June 2010 ** Slave Split into separate component Installing 'buildbot' will no longer allow you to run a slave - for that, you'll now need the 'buildslave' component, which is available by easy_install. This is merely a packaging change - the buildslave and buildbot components are completely inter-compatible, just as they always have been. ** Features *** Add googlecode_atom.py to contrib (ticket #842) *** Implement clean master shutdown, available through WebStatus ** Fixes *** Pass local environment variables along with getProcessOutput. Required for ssh agent authentication. *** IRC doc fixes (ticket #852) *** Remove builder count from one_line_per_build (ticket #854) *** Set the 'revision' property more often (ticket #101) *** Change property priority ordering (ticket #809) *** Fixes to MaildirSource for CVS *** Use shutil.rmtree on POSIX systems *** Fix NameError in MailNotifier (ticket #758) *** Reduce verbosity of patches in twistd.log (ticket #803) *** Documentation updates to reflect UI customization via templates (ticket #866) ** Deprecations *** Arch, Bazaar, and Monotone to be removed in 0.8.2 This decision isn't final, but support for these VC's will be removed in version 0.8.2 unless a maintainers steps forward to document, test, and update them. *** Support for starting buildmaster from Makefiles to be removed in 0.8.2 In a little-used feature, 'buildbot start' would run 'make start' if a Makefile.buildbot existed in the master directory. This functionality will be removed in Buildbot-0.8.2, and the create-master command will no longer create a Makefile.sample. Of course, Buildbot still supports build processes on the slave using make! * Slave Changes ** First release of buildslave as a separate package ** Fixes *** Command-line options changed Added new `-n|--no-logrotate` flag to create-slave command which disables internal logging and log rotation mechanism in buildbot.tac (ticket #973) *** Delete srcdir before retrying git clone (ticket #884) *** Fix setup.py to install a launcher script properly in all cases. buildbot-3.4.0/master/docs/relnotes/0.8.10.rst000066400000000000000000000035261413250514000206570ustar00rootroot00000000000000Release Notes for Buildbot 0.8.10 ================================= .. Any change that adds a feature or fixes a bug should have an entry here. Most simply need an additional bulleted list item, but more significant changes can be given a subsection of their own. The following are the release notes for Buildbot 0.8.10. Buildbot 0.8.10 was released on the 2nd of December, 2014. Master ------ Features ~~~~~~~~ * Both the P4 source step and P4 change source support ticket-based authentication. * Clickable 'categories' links added in 'Waterfall' page (web UI). Fixes ~~~~~ * Buildbot is now compatible with SQLAlchemy 0.8 and higher, using the newly-released SQLAlchemy-Migrate. * The :bb:step:`HTTPStep` step's request parameters are now renderable. * Fixed content spoofing vulnerabilities (:bug:`2589`). * Fixed cross-site scripting in status_json (:bug:`2943`). * :class:`~buildbot.status.status_gerrit.GerritStatusPush` supports specifying an SSH identity file explicitly. * Fixed bug which made it impossible to specify the project when using the BitBucket dialect. * Fixed SVN master-side source step: if a SVN operation fails, the repository end up in a situation when a manual intervention is required. Now if SVN reports such a situation during initial check, the checkout will be clobbered. * Fixed master-side source steps to respect the specified timeout when removing files. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ Slave ----- Features ~~~~~~~~ Fixes ~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.8.9..eight buildbot-3.4.0/master/docs/relnotes/0.8.12.rst000066400000000000000000000111351413250514000206540ustar00rootroot00000000000000Release Notes for Buildbot 0.8.11 ================================= .. Any change that adds a feature or fixes a bug should have an entry here. Most simply need an additional bulleted list item, but more significant changes can be given a subsection of their own. The following are the release notes for Buildbot 0.8.11. This version was released on the 20th of April, 2015. Master ------ Requirements: * Buildbot works python-dateutil >= 1.5 Features ~~~~~~~~ * GitHub change hook now supports application/json format. * Buildbot is now compatible with Gerrit v2.6 and higher. To make this happen, the return result of ``reviewCB`` and ``summaryCB`` callback has changed from .. code-block:: python (message, verified, review) to .. code-block:: python {'message': message, 'labels': {'label-name': value, ... } } The implications are: * there are some differences in behaviour: only those labels that were provided will be updated * Gerrit server must be able to provide a version, if it can't the ``GerritStatusPush`` will not work .. note:: If you have an old style ``reviewCB`` and/or ``summaryCB`` implemented, these will still work, however there could be more labels updated than anticipated. More detailed information is available in ``GerritStatusPush`` section. * Buildbot now supports plugins. They allow Buildbot to be extended by using components distributed independently from the main code. They also provide for a unified way to access all components. When previously the following construction was used:: from buildbot.kind.other.bits import ComponentClass ... ComponentClass ... the following construction achieves the same result:: from buildbot.plugins import kind ... kind.ComponentClass ... Kinds of components that are available this way are described in :doc:`../manual/plugins`. .. note:: While the components can be still directly imported as ``buildbot.kind.other.bits``, this might not be the case after Buildbot v1.0 is released. * :bb:chsrc:`GitPoller` now supports detecting new branches * :bb:step:`MasterShellCommand` now renders the ``path`` argument. * :class:`~buildbot.process.buildstep.ShellMixin`: the ``workdir`` can now be overridden in the call to ``makeRemoteShellCommand``. * GitHub status target now allows to specify a different base URL for the API (usefule for GitHub enterprise installations). This feature requires `txgithub` of version 0.2.0 or better. * GitHub change hook now supports payload validation using shared secret, see the GitHub hook documentation for details. * Added StashStatusPush status hook for Atlassian Stash * Builders can now have multiple "tags" associated with them. Tags can be used in various status classes as filters (eg, on the waterfall page). * ``MailNotifier`` no longer forces SSL 3.0 when ``useTls`` is true. * GitHub change hook now supports function as codebase argument. * GitHub change hook now supports pull_request events. * :class:`~buildbot.process.buildstep.Trigger`: the ``getSchedulersAndProperties`` customization method has been backported from Nine. This provides a way to dynamically specify which schedulers (and the properties for that scheduler) to trigger at runtime. Fixes ~~~~~ * GitHub change hook now correctly responds to ping events. * ``buildbot.steps.http`` steps now correctly have ``url`` parameter renderable * :bb:step:`MasterShellCommand` now correctly logs the working directory where it was run. * With Git(), force the updating submodules to ensure local changes by the build are overwritten. This both ensures more consistent builds and avoids errors when updating submodules. * With Git(), make sure 'git submodule sync' is called before 'git submodule update' to update stale remote urls (:bug:`2155`). Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * The builder parameter "category" is deprecated and is replaced by a parameter called "tags". Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * :class:`~buildbot.process.buildstep.Trigger`: ``createTriggerProperties`` now takes one argument (the properties to generate). * :class:`~buildbot.process.buildstep.Trigger`: ``getSchedulers`` method is no longer used and was removed. Slave ----- Features ~~~~~~~~ Fixes ~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.8.10..532cf49 buildbot-3.4.0/master/docs/relnotes/0.8.2.txt000066400000000000000000000057601413250514000206110ustar00rootroot00000000000000Buildbot 0.8.2 was released 29 Oct 2010 ** Upgrading Upgrading to from the previous version will require an 'upgrade-master' run. However, the schema changes are backward-compatible, so if a downgrade is required, it will not be difficult. ** New Requirements The Buildmaster now requires Jinja-2.1 or higher. Both master and slave now require Twisted-8.0.0. Although Twisted-2.5.0 may still work, it is not tested and not supported. ** Command-line options changed To resolve conflicting command-line options (ticket #972) for sendchange command the following changes were done: * `-m` option now means `--master` * `-c` option now means `--comments` * `-C` option now means `--category` Added new `-n|--no-logrotate` flag to create-master command which disables internal logging and log rotation mechanism in buildbot.tac (ticket #973) ** MasterShellCommand semantics change The MasterShellCommand now provides the buildmaster's environment to the step by default; pass env={} to pass a clean environment, instead. ** Log Rotation The default 'create-master' output now rotates ten twistd.log files, each of about 10MiB. This is a change from older versions, which would rotate an unbounded number of 1MiB files. ** New configuration key, 'changeCacheSize' This sets the number of changes that buildbot will keep in memory at once. Users of distributed version control systems should consider setting this to a high value (e.g. 10,000) ** New libvirt-based Latent Buildslave Support This extends the support already included for EC2 buildslaves to include any virtualization platform supported by libvirt. ** Canceling Pending Builds for a Change Change pages on the webstatus now have buttons to cancel any pending builds that include that change (across all builders). The corresponding authz privilege to control access to this feature is 'stopChange'. ** New Change source *** CVSMaildirSource This parses mail sent by buildbot_cvs_mail.py in contrib directory. See docs for more info. ** New Steps *** VC++ 9, VS2008, VCExpress9 - part of the vstudio suite of steps ** Deprecations and Removals *** Removed sendchange's --revision_number argument (use --revision) *** Deprecating old CVS MairdirSources: Please post to the list if you are using FreshCVS FCMaildirSource Syncmail SyncmailMaildirSource Bonsai BonsaiMaildirSource *** statusgui is deprecated in this version and will be removed in the next release. Please file a bug at http://buildbot.net if you wish to reverse this decision. *** The Twisted-only steps BuildDebs and ProcessDocs have been removed. * Slave Changes ** Log Rotation The default 'create-slave' output now rotates ten twistd.log files, each of about 10MiB. This is a change from older versions, which would rotate an unbounded number of 1MiB files. ** twistd.hostname On startup, the buildslave writes its hostname to twistd.hostname. This is intended to contextualize twistd.pid, which does not specify the host on which the buildslave is running. buildbot-3.4.0/master/docs/relnotes/0.8.3.txt000066400000000000000000000044411413250514000206050ustar00rootroot00000000000000Buildbot 0.8.3 was released 19 Dec 2010 ** Deprecations and Removals *** Change sources can no longer call change-related methods on self.parent. Instead, use self.master methods, e.g., self.master.addChange. ** PBChangeSource now supports authentication PBChangeSource now supports the `user` and `passwd` arguments. Users with a publicly exposed PB port should use these parameters to limit sendchange access. Previous versions of Buildbot should never be configured with a PBChangeSource and a publicly accessible slave port, as that arrangement allows anyone to connect and inject a change into the Buildmaster without any authentication at all, aside from the hard-coded 'change'/'changepw' credentials. In many cases, this can lead to arbitrary code injection on slaves. ** Experiemental Gerrit and Repo support A new ChangeSource (GerritChangeSource), status listener (GerritStatusPush), and source step (Repo) are available in this version. These are not fully documented and still have a number of known bugs outstanding (see http://buildbot.net/trac/wiki/RepoProject), and as such are considered experimental in this release. ** WithProperties now supports lambda substitutions WithProperties now has the option to pass callable functions as keyword arguments to substitute in the results of more complex Python code at evaluation-time. ** New 'SetPropertiesFromEnv' step This step uses the slave environment to set build properties. ** Deprecations and Removals *** The console view previously had an undocumented feature that would strip leading digits off the category name. This was undocumented and apparently non-functional, and has been removed. (#1059) *** contrib/hg_buildbot.py was removed in favor of buildbot.changes.hgbuildbot. *** The misnamed sendchange option 'username' has been renamed to 'who'; the old option continues to work, but is deprecated and will be removed. (#1711) * Slave Changes ** Slave-initiated Graceful Shutdown If the allow_shutdown parameter in buildbot.tac is set, then the slave can be gracefully shut down locally by the slave admin. The shutdown operates by the slave informing the master that it would like to shut down; the master then finishes any active builds on the slave, and instructs the slave to shut down. See the documentation for more information. buildbot-3.4.0/master/docs/relnotes/0.8.4.txt000066400000000000000000000112101413250514000205760ustar00rootroot00000000000000Buildbot 0.8.4 was released 12 Jun 2010 ** Buildmaster Metrics The buildmaster now actively measures a number of quantities that can be useful in debugging and tuning its performance. See the documentation for more information. ** Monotone support Monotone support has returned to Buildbot, thanks to Richard Levitte. ** `Blocker` step A "beta" version of the Blocker step has been added; this step allows multiple concurrent builds to be synchronized. It is "beta" in the sense that it may contain significant bugs, is only documented in the source code, and has an interface that is subject to non-compatible change in later versions of Buildbot. See `contrib/blockertest` for a test and demonstration of the new step's functionality. ** Deprecations, Removals, and Non-Compatible Changes *** Init script now uses /etc/default/buildmaster for instance configuration. Also MASTER_ENABLED used in /etc/default/buildmaster now accepts 'true|yes|1' to enable instance and 'false|no|0' to disable(not case sensitive). Other values will be considered as syntax error. *** 'buildbot.status.words.IRC' now defaults to `AllowForce=False` to prevent IRC bots from being allowed to force builds by default. *** MasterShellCommand and all of the transfer steps now default to haltOnFailure=True and flunkOnFailure=True *** GitPoller's 'workdir' parameter should always be supplied; using the default (/tmp/gitpoller_work) is deprecated and will not be supported in future versions. *** ChangeFilter should now be imported from `buildbot.changes.filter'; the old import path will still work. *** What used to be called simply 'Scheduler' should now be instantiated as 'SingleBranchScheduler', and its branch argument is mandatory. *** The Dependent scheduler is now in its own module, 'buildbot.schedulers.dependent', although the old name will continue to work. *** The mergeRequests parameters are now more flexible, but an incompatible change was made: if the BuilderConfig mergeRequests argument is explicitly set to True, then the default merge method will be used. In earlier versions, this configuration fell back to the global c['mergeRequests'] parameter's value. To avoid this, remove `mergeRequests=True` from any BuilderConfig constructor invocations. *** The `Status.getBuildSets` method now returns its result via Deferred. *** The `BuilderControl.getPendingBuilds` method has been renamed to `getPendingBuildRequestControls`; `BuilderStatus.getPendingBuilds` has been renamed to `getPendingBuildStatuses`. Both now return their results via Deferred. *** The utility method `Builder.getOldesetRequestTime` now returns its result via a Deferred, and that result is now a DateTime object. *** The remote BuildSetStatus method `waitForSuccess` is no longer available. *** The BuildRequestStatus methods `getSubmitTime` and `getSourceStamp` now return their results via a Deferred. The `asDict` method omits these values, as it returns synchronously. *** Buildbot now uses temporary tables, which can cause problems with replication in MySQL. See "Database Specification" in the manual for more details. ** Scheduler Improvements *** Nightly scheduler now accepts a change_filter argument ** SQLAlchemy & SQLAlchemy-Migrate Buildbot now uses SQLAlchemy as a database abstraction layer. This gives greater inter-database compatibility and a more stable and reliable basis for this core component of the framework. SQLAlchemy-Migrate is used to manage changes to the database schema from version to version. *** Postgres support Buildbot should now work with a Postgres backend just as well as it does with MySQL or SQLite. Buildbot is actively tested against all three backends. ** Less garish color scheme The default color scheme for Buildbot has been modified to make it slightly less, well, neon. Note: This will not affect already-created masters, as their default.css file has already been created. If you currently use the default and want to get the new version, just overwrite public_html/default.css with the copy in this version. * Slave Changes ** Monotone support Monotone support has returned to Buildbot, thanks to Richard Levitte. ** Buildslave now places all spawned commands into process groups on POSIX systems. This means that in most cases child processes are cleaned up properly, and removes the most common use for usePTY. As of this version, usePTY should be set to False for almost all users of Buildbot. ** Init script now uses /etc/default/buildslave for instance configuration. Also SLAVE_ENABLED used in /etc/default/buildslave now accepts 'true|yes|1' to enable instance and 'false|no|0' to disable(not case sensitive). Other values will be considered as syntax error. buildbot-3.4.0/master/docs/relnotes/0.8.5.txt000066400000000000000000000071661413250514000206160ustar00rootroot00000000000000Buildbot 0.8.5 was released 3 Sept 2010 ** Updated, sphinx-based documentation The Buildbot documentation has been ported to Sphinx and significantly refactored and extended. ** Better support for users in Buildbot (GSoC project) Buildbot now tracks user identity across version-control commits, IRC and web interactions, and Try submissions. ** New and improved Source steps (GSoC project) Source steps have been rewritten to have a simpler, more consistent configuration, and to run on the master instead of the slave, allowing much more control over their behavior. ** EC2 instances are now terminated instead of stopped. This is really only relevant for EBS-backed instances, as Buildbot will now free the instance and associated EBS storage when shutting down the slave. ** SQLite databases use write-ahead logging WAL mode offers much greater concurrency (preventing the dreaded 'database is locked' errors) and is also more efficient and durable. ** Deprecations, Removals, and Non-Compatible Changes *** Any custom IStatusListener providers which do not inherit from StatusListener should provide a checkConfig(all_statuses): method. This is to verify at startup that there are no conflicting status configurations. *** The db.buildrequests.claimBuildRequests method can no longer re-claim already-claimed requests; use reclaimBuildRequests instead. The database no longer tracks master instances, so the unclaimOldIncarnationRequests method has been removed. Note that several of the methods in this module now perform fewer consistency checks, for efficiency. *** Upgrades directly from versions older than 0.6.5 will no longer automatically migrate logfiles. *** Any custom change_hook_dialects should now return a (changes, src) tuple from its getChange method, instead of just the changes. The src is used for noting what VCS the changes came from, and is just a string such as 'git'. *** Scripts in the contrib directory that use addChange() to send Changes to the buildmaster now require an additional `src` argument when calling addChange(). This lets the buildmaster know which VCS the Change is coming from, such as 'git' or 'svn'. This means that you need to use the version of your contrib script that corresponds to your buildmaster. *** The un-documented P4Sync source step has been deprecated and will be removed in the next version. ** Customizable validation regexps The global c['validation'] parameter can be used to adjust the regular expressions used to validate branches, revisions, and properties input by the user. ** Logging for SVNPoller cleaned up All logging for SVNPoller now starts with "SVNPoller: ". Previously it was mixed case and not uniform. ** Source steps have logEnviron parameter Similar to shell commands, a logEnviron parameter is now supported for Source steps. ** Interested users for Try Try jobs can now include the name of an interested user, which will be kept with the patch and displayed in the web status. ** 'buildbot checkconfig' improved This command no longer copies the configuration to a temporary directory. This change allows more complex configurations to be tested with checkconfig. * Slave Changes ** Retry on UnauthorizedLogin In previous versions, if a slave received UnauthorizedLogin from the master, it would stop retrying and exit. This has proven to be less helpful than simply retrying, so as of this version the slave will continue to retry. ** Deprecations, Removals, and Non-Compatible Changes *** The format of the data that determines whether a directory requires a new checkout has changed for Perforce. The first build (only) after an upgrade may do an unnecessary full checkout. buildbot-3.4.0/master/docs/relnotes/0.8.6.rst000066400000000000000000000210311413250514000205730ustar00rootroot00000000000000Release Notes for Buildbot v0.8.6p1 =================================== .. Any change that adds a feature or fixes a bug should have an entry here. Most simply need an additional bulleted list item, but more significant changes can be given a subsection of their own. The following are the release notes for Buildbot v0.8.6p1. Buildbot v0.8.6 was released on March 11, 2012. Buildbot v0.8.6p1 was released on March 25, 2012. 0.8.6p1 ------- In addition to what's listed below, the 0.8.6p1 release adds the following. * Builders are no longer displayed in the order they were configured. This was never intended behavior, and will become impossible in the distributed architecture planned for Buildbot-0.9.x. As of 0.8.6p1, builders are sorted naturally: lexically, but with numeric segments sorted numerically. * Slave properties in the configuration are now handled correctly. * The web interface buttons to cancel individual builds now appear when configured. * The ForceScheduler's properties are correctly updated on reconfig - :bug:`2248`. * If a slave is lost while waiting for locks, it is properly cleaned up - :bug:`2247`. * Crashes when adding new steps to a factory in a reconfig are fixed - :bug:`2252`. * MailNotifier AttributeErrors are fixed - :bug:`2254`. * Cleanup from failed builds is improved - :bug:`2253`. Master ------ * If you are using the GitHub hook, carefully consider the security implications of allowing un-authenticated change requests, which can potentially build arbitrary code. See :bug:`2186`. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Forced builds now require that a :bb:sched:`ForceScheduler` be defined in the Buildbot configuration. For compatible behavior, this should look like:: from buildbot.schedulers.forcesched import ForceScheduler c['schedulers'].append(ForceScheduler( name="force", builderNames=["b1", "b2", ... ])) Where all of the builder names in the configuration are listed. See the documentation for the *much* more flexible configuration options now available. * This is the last release of Buildbot that will be compatible with Python 2.4. The next version will minimally require Python-2.5. See :bug:`2157`. * This is the last release of Buildbot that will be compatible with Twisted-8.x.y. The next version will minimally require Twisted-9.0.0. See :bug:`2182`. * ``buildbot start`` no longer invokes make if a ``Makefile.buildbot`` exists. If you are using this functionality, consider invoking make directly. * The ``buildbot sendchange`` option ``--username`` has been removed as promised in :bug:`1711`. * StatusReceivers' checkConfig method should now take an additional `errors` parameter and call its :py:meth:`~buildbot.config.ConfigErrors.addError` method to indicate errors. * The Gerrit status callback now gets an additional parameter (the master status). If you use this callback, you will need to adjust its implementation. * SQLAlchemy-Migrate version 0.6.0 is no longer supported. See :ref:`Buildmaster-Requirements`. * Older versions of SQLite which could limp along for previous versions of Buildbot are no longer supported. The minimum version is 3.4.0, and 3.7.0 or higher is recommended. * The master-side Git step now checks out 'HEAD' by default, rather than master, which translates to the default branch on the upstream repository. See :pull:`301`. * The format of the repository strings created by ``hgbuildbot`` has changed to contain the entire repository URL, based on the ``web.baseurl`` value in ``hgrc``. To continue the old (incorrect) behavior, set ``hgbuildbot.baseurl`` to an empty string as suggested in the Buildbot manual. * Master Side :bb:step:`SVN` Step has been corrected to properly use ``--revision`` when ``alwaysUseLatest`` is set to ``False`` when in the ``full`` mode. See :bug:`2194` * Master Side :bb:step:`SVN` Step parameter svnurl has been renamed repourl, to be consistent with other master-side source steps. * Master Side :bb:step:`Mercurial` step parameter ``baseURL`` has been merged with ``repourl`` parameter. The behavior of the step is already controlled by ``branchType`` parameter, so just use a single argument to specify the repository. * Passing a :py:class:`buildbot.process.buildstep.BuildStep` subclass (rather than instance) to :py:meth:`buildbot.process.factory.BuildFactory.addStep` has long been deprecated, and will be removed in version 0.8.7. * The `hgbuildbot` tool now defaults to the 'inrepo' branch type. Users who do not explicitly set a branch type would previously have seen empty branch strings, and will now see a branch string based on the branch in the repository (e.g., `default`). Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * The interface for runtime access to the master's configuration has changed considerably. See :doc:`/developer/config` for more details. * The DB connector methods ``completeBuildset``, ``completeBuildRequest``, and ``claimBuildRequest`` now take an optional ``complete_at`` parameter to specify the completion time explicitly. * Buildbot now sports sourcestamp sets, which collect multiple sourcestamps used to generate a single build, thanks to Harry Borkhuis. See :pull:`287`. * Schedulers no longer have a ``schedulerid``, but rather an ``objectid``. In a related change, the ``schedulers`` table has been removed, along with the :py:meth:`buildbot.db.schedulers.SchedulersConnectorComponent.getSchedulerId` method. * The Dependent scheduler tracks its upstream buildsets using :py:class:`buildbot.db.schedulers.StateConnectorComponent`, so the ``scheduler_upstream_buildsets`` table has been removed, along with corresponding (undocumented) :py:class:`buildbot.db.buildsets.BuildsetsConnector` methods. * Errors during configuration (in particular in :py:class:`BuildStep` constructors), should be reported by calling :py:func:`buildbot.config.error`. Features ~~~~~~~~ * The IRC status bot now display build status in colors by default. It is controllable and may be disabled with useColors=False in constructor. * Buildbot can now take advantage of authentication done by a front-end web server - see :pull:`266`. * Buildbot supports a simple cookie-based login system, so users no longer need to enter a username and password for every request. See the earlier commits in :pull:`278`. * The master-side SVN step now has an `export` method which is similar to `copy`, but the build directory does not contain Subversion metadata. (:bug:`2078`) * :py:class:`Property` instances will now render any properties in the default value if necessary. This makes possible constructs like :: command=Property('command', default=Property('default-command')) * Buildbot has a new web hook to handle push notifications from Google Code - see :pull:`278`. * Revision links are now generated by a flexible runtime conversion configured by :bb:cfg:`revlink` - see :pull:`280`. * Shell command steps will now "flatten" nested lists in the ``command`` argument. This allows substitution of multiple command-line arguments using properties. See :bug:`2150`. * Steps now take an optional ``hideStepIf`` parameter to suppress the step from the waterfall and build details in the web. (:bug:`1743`) * :py:class:`Trigger` steps with ``waitForFinish=True`` now receive a URL to all the triggered builds. This URL is displayed in the waterfall and build details. See :bug:`2170`. * The ``master/contrib/fakemaster.py`` script allows you to run arbitrary commands on a slave by emulating a master. See the file itself for documentation. * MailNotifier allows multiple notification modes in the same instance. See :bug:`2205`. * SVNPoller now allows passing extra arguments via argument ``extra_args``. See :bug:`1766` Slave ----- Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * BitKeeper support is in the "Last-Rites" state, and will be removed in the next version unless a maintainer steps forward. Features ~~~~~~~~ Details ------- For a more detailed description of the changes made in this version, see the Git log itself: .. code-block:: bash git log buildbot-0.8.5..buildbot-0.8.6 Older Versions -------------- Release notes for older versions of Buildbot are available in the :src:`master/docs/relnotes/` directory of the source tree, or in the archived documentation for those versions at http://buildbot.net/buildbot/docs. buildbot-3.4.0/master/docs/relnotes/0.8.7.rst000066400000000000000000000260341413250514000206040ustar00rootroot00000000000000Release Notes for Buildbot v0.8.7 ================================= .. Any change that adds a feature or fixes a bug should have an entry here. Most simply need an additional bulleted list item, but more significant changes can be given a subsection of their own. The following are the release notes for Buildbot v0.8.7. Buildbot v0.8.7 was released on September 22, 2012. Buildbot 0.8.7p1 was released on November 21, 2012. 0.8.7p1 ------- In addition to what's listed below, the 0.8.7p1 release adds the following. * The ``SetPropertiesFromEnv`` step now correctly gets environment variables from the slave, rather than those set on the master. Also, it logs the changes made to properties. * The master-side ``Git`` source step now doesn't try to clone a branch called ``HEAD``. This is what ``git`` does by default, and specifying it explicitly doesn't work as expected. * The ``Git`` step properly deals with the case when there is a file called ``FETCH_HEAD`` in the checkout. * Buildbot no longer forks when told not to daemonize. * Buildbot's startup is now more robust. See :bug:`1992`. * The ``Trigger`` step uses the provided list of source stamps exactly, if given, instead of adding them to the sourcestamps of the current build. In 0.8.7, they were combined with the source stamps for the current build. * The ``Trigger`` step again completely ignores the source stamp of the current build, if ``alwaysUseLatest`` is set. In 0.8.7, this was mistakenly changed to only ignore the specified revision of the source stamp. * The ``Triggerable`` scheduler is again properly passing changes through to the scheduled builds. See :bug:`2376`. * Web change hooks log errors, allowing debugging. * The ``base`` change hook now properly decodes the provided date. * ``CVSMailDir`` has been fixed. * Importing ``buildbot.test`` no longer causes python to exit, if ``mock`` isn't installed. The fixes ``pydoc -k`` when buildbot is installed. * ``Mercurial`` properly updates to the correct branch, when using ``inrepo`` branches. * Buildbot now doesn't fail on invalid UTF-8 in a number of places. * Many documentation updates and fixes. Master ------ Features ~~~~~~~~ * Buildbot now supports building projects composed of multiple codebases. New schedulers can aggregate changes to multiple codebases into source stamp sets (with one source stamp for each codebase). Source steps then check out each codebase as required, and the remainder of the build process proceeds normally. See the :ref:`Multiple-Codebase-Builds` for details. * The format of the ``got_revision`` property has changed for multi-codebase builds. It is now a dictionary keyed by codebase. * ``Source`` and ``ShellCommand`` steps now have an optional ``descriptionSuffix``, a suffix to the ``description``/``descriptionDone`` values. For example this can help distinguish between multiple ``Compile`` steps that are applied to different codebases. * The ``Git`` step has a new ``getDescription`` option, which will run ``git describe`` after checkout normally. See :bb:step:`Git` for details. * A new interpolation placeholder :ref:`Interpolate`, with more regular syntax, is available. * A new ternary substitution operator ``:?`` and ``:#?`` is available with the ``Interpolate`` class. * ``IRenderable.getRenderingFor`` can now return a deferred. * The Mercurial hook now supports multiple masters. See :pull:`436`. * There's a new poller for Mercurial: :bb:chsrc:`HgPoller`. * The new ``HTPasswdAprAuth`` uses libaprutil (through ctypes) to validate the password against the hash from the .htpasswd file. This adds support for all hash types htpasswd can generate. * ``GitPoller`` has been rewritten. It now supports multiple branches and can share a directory between multiple pollers. It is also more resilient to changes in configuration, or in the underlying repository. * Added a new property ``httpLoginUrl`` to ``buildbot.status.web.authz.Authz`` to render a nice Login link in WebStatus for unauthenticated users if ``useHttpHeader`` and ``httpLoginUrl`` are set. * ``ForceScheduler`` has been updated: * support for multiple :ref:`codebases` via the ``codebases`` parameter * ``NestedParameter`` to provide a logical grouping of parameters. * ``CodebaseParameter`` to set the branch/revision/repository/project for a codebase * new HTML/CSS customization points. Each parameter is contained in a ``row`` with multiple 'class' attributes associated with them (eg, 'force-string' and 'force-nested') as well as a unique id to use with Javascript. Explicit line-breaks have been removed from the HTML generator and are now controlled using CSS. * The :bb:chsrc:`SVNPoller` now supports multiple projects and codebases. See :pull:`443`. * The :bb:reporter:`MailNotifier` now takes a callable to calculate the "previous" build for purposes of determining status changes. See :pull:`489`. * The ``copy_properties`` parameter, given a list of properties to copy into the new build request, has been deprecated in favor of explicit use of ``set_properties``. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Buildbot master now requires at least Python-2.5 and Twisted-9.0.0. * Passing a :py:class:`~buildbot.process.buildstep.BuildStep` subclass (rather than instance) to :py:meth:`~buildbot.process.factory.BuildFactory.addStep` is no longer supported. The ``addStep`` method now takes exactly one argument. * Buildbot master requires ``python-dateutil`` version 1.5 to support the Nightly scheduler. * ``ForceScheduler`` has been updated to support multiple :ref:`codebases`. The branch/revision/repository/project are deprecated; if you have customized these values, simply provide them as ``codebases=[CodebaseParameter(name='', ...)]``. * The POST URL names for ``AnyPropertyParameter`` fields have changed. For example, 'property1name' is now 'property1_name', and 'property1value' is now 'property1_value'. Please update any bookmarked or saved URL's that used these fields. * ``forcesched.BaseParameter`` API has changed quite a bit and is no longer backwards compatible. Updating guidelines: * ``get_from_post`` is renamed to ``getFromKwargs`` * ``update_from_post`` is renamed to ``updateFromKwargs``. This function's parameters are now called via named parameters to allow subclasses to ignore values it doesn't use. Subclasses should add ``**unused`` for future compatibility. A new parameter ``sourcestampset`` is provided to allow subclasses to modify the sourcestamp set, and will probably require you to add the ``**unused`` field. * The parameters to the callable version of ``build.workdir`` have changed. Instead of a single sourcestamp, a list of sourcestamps is passed. Each sourcestamp in the list has a different :ref:`codebase` * The undocumented renderable ``_ComputeRepositoryURL`` is no longer imported to :py:mod:`buildbot.steps.source`. It is still available at ``buildbot.steps.source.oldsource``. * ``IProperties.render`` now returns a deferred, so any code rendering properties by hand will need to take this into account. * ``baseURL`` has been removed in :bb:step:`SVN` to use just ``repourl`` - see :bug:`2066`. Branch info should be provided with ``Interpolate``. :: from buildbot.steps.source.svn import SVN factory.append(SVN(baseURL="svn://svn.example.org/svn/")) can be replaced with :: from buildbot.process.properties import Interpolate from buildbot.steps.source.svn import SVN factory.append(SVN(repourl=Interpolate("svn://svn.example.org/svn/%(src::branch)s"))) and :: from buildbot.steps.source.svn import SVN factory.append(SVN(baseURL="svn://svn.example.org/svn/%%BRANCH%%/project")) can be replaced with :: from buildbot.process.properties import Interpolate from buildbot.steps.source.svn import SVN factory.append(SVN(repourl=Interpolate( "svn://svn.example.org/svn/%(src::branch)s/project"))) and :: from buildbot.steps.source.svn import SVN factory.append(SVN(baseURL="svn://svn.example.org/svn/", defaultBranch="branches/test")) can be replaced with :: from buildbot.process.properties import Interpolate from buildbot.steps.source.svn import SVN factory.append(SVN(repourl=Interpolate( "svn://svn.example.org/svn/%(src::branch:-branches/test)s"))) * The ``P4Sync`` step, deprecated since 0.8.5, has been removed. The ``P4`` step remains. * The ``fetch_spec`` argument to ``GitPoller`` is no longer supported. ``GitPoller`` now only downloads branches that it is polling, so specifies a refspec itself. * The format of the changes produced by :bb:chsrc:`SVNPoller` has changed: directory pathnames end with a forward slash. This allows the ``split_file`` function to distinguish between files and directories. Customized split functions may need to be adjusted accordingly. * :ref:`WithProperties` has been deprecated in favor of :ref:`Interpolate`. `Interpolate` doesn't handle functions as keyword arguments. The following code using ``WithProperties`` :: from buildbot.process.properties import WithProperties def determine_foo(props): if props.hasProperty('bar'): return props['bar'] elif props.hasProperty('baz'): return props['baz'] return 'qux' WithProperties('%(foo)s', foo=determine_foo) can be replaced with :: from zope.interface import implementer from buildbot.interfaces import IRenderable from buildbot.process.properties import Interpolate @implementer(IRenderable) class determineFoo(object): def getRenderingFor(self, props): if props.hasProperty('bar'): return props['bar'] elif props.hasProperty('baz'): return props['baz'] return 'qux' Interpolate('%s(kw:foo)s', foo=determineFoo()) Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * ``BuildStep.start`` can now optionally return a deferred and any errback will be handled gracefully. If you use ``inlineCallbacks``, this means that unexpected exceptions and failures raised will be captured and logged and the build shut down normally. * The helper methods ``getState`` and ``setState`` from ``BaseScheduler`` have been factored into ``buildbot.util.state.StateMixin`` for use elsewhere. Slave ----- Features ~~~~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * The ``P4Sync`` step, deprecated since 0.8.5, has been removed. The ``P4`` step remains. Details ------- For a more detailed description of the changes made in this version, see the Git log itself: .. code-block:: bash git log v0.8.6..v0.8.7 Older Versions -------------- Release notes for older versions of Buildbot are available in the :src:`master/docs/relnotes/` directory of the source tree. Starting with version 0.8.6, they are also available under the appropriate version at http://buildbot.net/buildbot/docs. buildbot-3.4.0/master/docs/relnotes/0.8.8.rst000066400000000000000000000150141413250514000206010ustar00rootroot00000000000000Release Notes for Buildbot v0.8.8 ================================= .. Any change that adds a feature or fixes a bug should have an entry here. Most simply need an additional bulleted list item, but more significant changes can be given a subsection of their own. The following are the release notes for Buildbot v0.8.8 Buildbot v0.8.8 was released on August 22, 2013 Master ------ Features ~~~~~~~~ * The ``MasterShellCommand`` step now correctly handles environment variables passed as list. * The master now poll the database for pending tasks when running buildbot in multi-master mode. * The algorithm to match build requests to slaves has been rewritten :pull:`615`. The new algorithm automatically takes locks into account, and will not schedule a build only to have it wait on a lock. The algorithm also introduces a ``canStartBuild`` builder configuration option which can be used to prevent a build request being assigned to a slave. * ``buildbot stop`` and ``buildbot restart`` now accept ``--clean`` to stop or restart the master cleanly (allowing all running builds to complete first). * The :bb:reporter:`IRC` bot now supports clean shutdown and immediate shutdown by using the command 'shutdown'. To allow the command to function, you must provide `allowShutdown=True`. * :bb:step:`CopyDirectory` has been added. * ``BuildslaveChoiceParameter`` has been added to provide a way to explicitly choose a buildslave for a given build. * default.css now wraps preformatted text by default. * Slaves can now be paused through the web status. * The latent buildslave support is less buggy, thanks :pull:`646`. * The ``treeStableTimer`` for ``AnyBranchScheduler`` now maintains separate timers for separate branches, codebases, projects, and repositories. * :bb:step:`SVN` has a new option `preferLastChangedRev=True` to use the last changed revision for ``got_revision`` * The build request DB connector method :py:meth:`~buildbot.db.buildrequests.BuildRequestsConnectorComponent.getBuildRequests` can now filter by branch and repository. * A new :bb:step:`SetProperty` step has been added in ``buildbot.steps.master`` which can set a property directly without accessing the slave. * The new :bb:step:`LogRenderable` step logs Python objects, which can contain renderables, to the logfile. This is helpful for debugging property values during a build. * 'buildbot try' now has an additional option `--property` option to set properties. Unlike the existing option `--properties` option, this new option supports setting only a single property and therefore allows commas to be included in the property name and value. * The ``Git`` step has a new ``config`` option, which accepts a dict of git configuration options to pass to the low-level git commands. See :bb:step:`Git` for details. * In :bb:step:`ShellCommand` ShellCommand now validates its arguments during config and will identify any invalid arguments before a build is started. * The list of force schedulers in the web UI is now sorted by name. * OpenStack-based Latent Buildslave support was added. See :pull:`666`. * Master-side support for P4 is available, and provides a great deal more flexibility than the old slave-side step. See :pull:`596`. * Master-side support for Repo is available. The step parameters changed to camelCase. ``repo_downloads``, and ``manifest_override_url`` properties are no longer hardcoded, but instead consult as default values via renderables. Renderable are used in favor of callables for ``syncAllBranches`` and ``updateTarball``. * Builder configurations can now include a ``description``, which will appear in the web UI to help humans figure out what the builder does. * GNUAutoconf and other pre-defined factories now work correctly (:bug:`2402`) * The pubDate in RSS feeds is now rendered correctly (:bug:`2530`) Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * The ``split_file`` function for :bb:chsrc:`SVNPoller` may now return a dictionary instead of a tuple. This allows it to add extra information about a change (such as ``project`` or ``repository``). * The ``workdir`` build property has been renamed to ``builddir``. This change accurately reflects its content; the term "workdir" means something different. ``workdir`` is currently still supported for backwards compatibility, but will be removed eventually. * The ``Blocker`` step has been removed. * Several polling ChangeSources are now documented to take a ``pollInterval`` argument, instead of ``pollinterval``. The old name is still supported. * StatusReceivers' checkConfig method should no longer take an `errors` parameter. It should indicate errors by calling :py:func:`~buildbot.config.error`. * Build steps now require that their name be a string. Previously, they would accept anything, but not behave appropriately. * The web status no longer displays a potentially misleading message, indicating whether the build can be rebuilt exactly. * The ``SetProperty`` step in ``buildbot.steps.shell`` has been renamed to :bb:step:`SetPropertyFromCommand`. * The EC2 and libvirt latent slaves have been moved to ``buildbot.buildslave.ec2`` and ``buildbot.buildslave.libirt`` respectively. * Pre v0.8.7 versions of buildbot supported passing keyword arguments to ``buildbot.process.BuildFactory.addStep``, but this was dropped. Support was added again, while still being deprecated, to ease transition. Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * Added an optional build start callback to ``buildbot.status.status_gerrit.GerritStatusPush`` This release includes the fix for :bug:`2536`. * An optional ``startCB`` callback to :bb:reporter:`GerritStatusPush` can be used to send a message back to the committer. See the linked documentation for details. * bb:sched:`ChoiceStringParameter` has a new method ``getChoices`` that can be used to generate content dynamically for Force scheduler forms. Slave ----- Features ~~~~~~~~ * The fix for Twisted bug #5079 is now applied on the slave side, too. This fixes a perspective broker memory leak in older versions of Twisted. This fix was added on the master in Buildbot-0.8.4 (see :bug:`1958`). * The ``--nodaemon`` option to ``buildslave start`` now correctly prevents the slave from forking before running. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: none git log v0.8.7..v0.8.8 buildbot-3.4.0/master/docs/relnotes/0.8.9.rst000066400000000000000000000442531413250514000206110ustar00rootroot00000000000000Release Notes for Buildbot 0.8.9 ================================ .. Any change that adds a feature or fixes a bug should have an entry here. Most simply need an additional bulleted list item, but more significant changes can be given a subsection of their own. The following are the release notes for Buildbot 0.8.9. Buildbot 0.8.9 was released on 14 June, 2014. Master ------ Features ~~~~~~~~ * The following optional parameters have been added to :py:class:`EC2LatentBuildSlave` * Boolean parameter ``spot_instance``, default False, creates a spot instance. * Float parameter ``max_spot_price`` defines the maximum bid for a spot instance. * List parameter ``volumes``, takes a list of (volume_id, mount_point) tuples. * String parameter ``placement`` is appended to the ``region`` parameter, e.g. ``region='us-west-2', placement='b'`` will result in the spot request being placed in us-west-2b. * Float parameter ``price_multiplier`` specifies the percentage bid above the 24-hour average spot price. * Dict parameter ``tags`` specifies AWS tags as key/value pairs to be applied to new instances. With ``spot_instance=True``, an ``EC2LatentBuildSlave`` will attempt to create a spot instance with the provided spot price, placement, and so on. * The web hooks now include support for Bitbucket, GitLab and Gitorious. * The GitHub webhook has been updated to work with v3 of the GitHub webhook API. * The GitHub webhook can now optionally ignore non-distinct commits (:bug:`1861`). * The :bb:chsrc:`HgPoller` and :bb:chsrc:`GitPoller` now split filenames on newlines, rather than whitespace, so files containing whitespace are handled correctly. * Add 'pollAtLaunch' flag for polling change sources. This allows a poller to poll immediately on launch and get changes that occurred while it was down. * Added the :bb:chsrc:`BitbucketPullrequestPoller` changesource. * The :bb:chsrc:`GitPoller` can now be configured to poll all available branches (:pull:`1010`). * The :bb:chsrc:`P4Source` changesource now supports Perforce servers in a different timezone than the buildbot master (:pull:`728`). * Each Scheduler type can now take a 'reason' argument to customize the reason it uses for triggered builds. * A new argument ``createAbsoluteSourceStamps`` has been added to :bb:sched:`SingleBranchScheduler` for use with multiple codebases. * A new argument ``createAbsoluteSourceStamps`` has been added to :bb:sched:`Nightly` for use with multiple codebases. * The :bb:sched:`Periodic` scheduler now supports codebases. * The :bb:sched:`ForceScheduler` now takes a ``buttonName`` argument to specify the name of the button on the force-build form. * Master side source checkout steps now support patches (:bug:`2098`). The :bb:step:`Git` and :bb:step:`Mercurial` steps use their inbuilt commands to apply patches (:bug:`2563`). * Master side source checkout steps now support retry option (:bug:`2465`). * Master-side source checkout steps now respond to the "stop build" button (:bug:`2356`). * :bb:step:`Git` source checkout step now supports reference repositories. * The :bb:step:`Git` step now uses the `git clean` option `-f` twice, to also remove untracked directories managed by another git repository. See :bug:`2560`. * The ``branch`` and ``codebase`` arguments to the :bb:step:`Git` step are now renderable. * Gerrit integration with :bb:step:`Git` Source step on master side (:bug:`2485`). * :bb:step:`P4` source step now supports more advanced options. * The master-side :bb:step:`SVN` step now supports authentication for mode=export, fixing :bug:`2463`. * The :bb:step:`SVN` step will now canonicalize URL's before matching them for better accuracy. * The :bb:step:`SVN` step now obfuscates the password in status logs, fixing :bug:`2468`. * :bb:step:`SVN` source step and ShellCommand now support password obfuscation. (:bug:`2468` and :bug:`1748`). * :bb:step:`CVS` source step now checks for "sticky dates" from a previous checkout before updating an existing source directory. * ::bb:step:`Repo` now supports a ``depth`` flag when initializing the repo. This controls the amount of git history to download. * The ``manifestBranch`` of the bb:step:`Repo` step is now renderable * New source step :bb:step:`Monotone` added on master side. * New source step :bb:step:`Darcs` added on master side. * A new :bb:step:`Robocopy` step is available for Windows builders (:pull:`728`). * The attributes ``description``, ``descriptionDone`` and ``descriptionSuffix`` have been moved from :py:class:`ShellCommand` to its superclass :py:class:`BuildStep` so that any class that inherits from :py:class:`BuildStep` can provide a suitable description of itself. * A new :py:class:`FlattenList` Renderable has been added which can flatten nested lists. * Added new build steps for :bb:step:`VC12`, :bb:step:`VS2013` and :bb:step:`MsBuild12`. * The ``mode`` parameter of the VS steps is now renderable (:bug:`2592`). * The :bb:step:`HTTPStep` step can make arbitrary HTTP requests from the master, allowing communication with external APIs. This new feature requires the optional ``txrequests`` and ``requests`` Python packages. * A new :bb:step:`MultipleFileUpload` step was added to allow uploading several files (or directories) in a single step. * Information about the buildslaves (admin, host, etc) is now persisted in the database and available even if the slave is not connected. * Buildslave info can now be retrieved via :ref:`Interpolate` and a new ``SetSlaveInfo`` buildstep. * The ``GNUAutotools`` factory now has a reconf option to run autoreconf before ``./configure``. * Builder configurations can now include a ``description``, which will appear in the web UI to help humans figure out what the builder does. * The WebStatus builder page can now filter pending/current/finished builds by property parameters of the form ``?property.=``. * The WebStatus ``StatusResourceBuilder`` page can now take the ``maxsearch`` argument * The WebStatus has a new authz "view" action that allows you to require users to logged in to view the WebStatus. * The WebStatus now shows revisions (+ codebase) where it used to simply say "multiple rev". * The Console view now supports codebases. * The web UI for Builders has been updated: * shows the build 'reason' and 'interested users' * shows sourcestamp information for builders that use multiple codebases (instead of the generic "multiple rev" placeholder that was shown before). * The waterfall and atom/rss feeds can be filtered with the ``project`` url parameter. * The WebStatus ``Authorization`` support now includes a ``view`` action which can be used to restrict read-only access to the Buildbot instance. * The web status now has options to cancel some or all pending builds. * The WebStatus now interprets ANSI color codes in stdio output. * It is now possible to select categories to show in the waterfall help * The web status now automatically scrolls output logs (:pull:`1078`). * The web UI now supports a PNG Status Resource that can be accessed publicly from for example README.md files or wikis or whatever other resource. This view produces an image in PNG format with information about the last build for the given builder name or whatever other build number if is passed as an argument to the view. * Revision links for commits on SouceForge (Allura) are now automatically generated. * The 'Rebuild' button on the web pages for builds features a dropdown to choose whether to rebuild from exact revisions or from the same sourcestamps (ie, update branch references) * Build status can be sent to GitHub. Depends on txgithub package. See :bb:reporter:`GitHubStatusPush` and `GitHub Commit Status `_. * The IRC bot of :bb:reporter:`IRC` will, unless useRevisions is set, shorten long lists of revisions printed when a build starts; it will only show two, and the number of additional revisions included in the build. * A new argument ``summaryCB`` has been added to ``GerritStatusPush``, to allow sending one review per buildset. Sending a single "summary" review per buildset is now the default if neither ``summaryCB`` nor ``reviewCB`` are specified. * The ``comments`` field of changes is no longer limited to 1024 characters on MySQL and Postgres. See :bug:`2367` and :pull:`736`. * HTML log files are no longer stored in status pickles (:pull:`1077`) * Builds are now retried after a slave is lost (:pull:`1049`). * The buildbot status client can now access a build properties via the ``getProperties`` call. * The ``start``, ``restart``, and ``reconfig`` commands will now wait for longer than 10 seconds as long as the master continues producing log lines indicating that the configuration is progressing. * Added new config option ``protocols`` which allows to configure multiple protocols on single master. * RemoteShellCommands can be killed by SIGTERM with the sigtermTime parameter before resorting to SIGKILL (:bug:`751`). If the slave's version is less than 0.8.9, the slave will kill the process with SIGKILL regardless of whether sigtermTime is supplied. * Introduce an alternative way to deploy Buildbot and try the pyflakes tutorial using :ref:`Docker `. * Added zsh and bash tab-completions support for 'buildbot' command. * An example of a declarative configuration is included in ``master/contrib/SimpleConfig.py``, with copious comments. * Systemd unit files for Buildbot are available in the :contrib-src:`master/contrib/` directory. * We've added some extra checking to make sure that you have a valid locale before starting buildbot (#2608). Forward Compatibility ~~~~~~~~~~~~~~~~~~~~~ In preparation for a more asynchronous implementation of build steps in Buildbot 0.9.0, this version introduces support for new-style steps. Existing old-style steps will continue to function correctly in Buildbot 0.8.x releases and in Buildbot 0.9.0, but support will be dropped soon afterward. See :ref:`New-Style-Build-Steps`, below, for guidance on rewriting existing steps in this new style. To eliminate ambiguity, the documentation for this version only reflects support for new-style steps. Refer to the documentation for previous versions for information on old-style steps. Fixes ~~~~~ * Fixes an issue where :bb:chsrc:`GitPoller` sets the change branch to ``refs/heads/master`` - which isn't compatible with :bb:step:`Git` (:pull:`1069`). * Fixed an issue where the :bb:step:`Git` and :bb:step:`CVS` source steps silently changed the ``workdir`` to ``'build'`` when the 'copy' method is used. * The :bb:step:`CVS` source step now respects the timeout parameter. * The :bb:step:`Git` step now uses the `git submodule update` option `--init` when updating the submodules of an existing repository, so that it will receive any newly added submodules. * The web status no longer relies on the current working directory, which is not set correctly by some initscripts, to find the ``templates/`` directory (:bug:`2586`). * The Perforce source step uses the correct path separator when the master is on Windows and the build slave is on a POSIX OS (:pull:`1114`). * The source steps now correctly interpolate properties in ``env``. * ``GerritStatusPush`` now supports setting scores with Gerrit 2.6 and newer * The change hook no longer fails when passing unicode to ``change_hook_auth`` (:pull:`996`). * The source steps now correctly interpolate properties in ``env``. * Whitespace is properly handled for StringParameter, so that appropriate validation errors are raised for ``required`` parameters (:pull:`1084`). * Fix a rare case where a buildtep might fail from a GeneratorExit exception (:pull:`1063`). * Fixed an issue where UTF-8 data in logs caused RSS feed exceptions (:bug:`951`). * Fix an issue with unescaped author names causing invalid RSS feeds (:bug:`2596`). * Fixed an issue with pubDate format in feeds. * Fixed an issue where the step text value could cause a ``TypeError`` in the build detail page (:pull:`1061`). * Fix failures where ``git clean`` fails but could be clobbered (:pull:`1058`). * Build step now correctly fails when the git clone step fails (:pull:`1057`). * Fixed a race condition in slave shutdown (:pull:`1019`). * Now correctly unsubscribes StatusPush from status updates when reconfiguring (:pull:`997`). * Fixes parsing git commit messages that are blank. * :bb:step:`Git` no longer fails when work dir exists but isn't a checkout (:bug:`2531`). * The `haltOnFailure` and `flunkOnFailure` attributes of :bb:step:`ShellCommand` are now renderable. (:bug:`2486`). * The `rotateLength` and `maxRotatedFile` arguments are no longer treated as strings in :file:`buildbot.tac`. This fixes log rotation. The upgrade_master command will notify users if they have this problem. * Buildbot no longer specifies a revision when pulling from a mercurial (:bug:`438`). * The WebStatus no longer incorrectly refers to fields that might not be visible. * The GerritChangeSource now sets a default author, fixing an exception that occurred when Gerrit didn't report an owner name/email. * Respects the ``RETRY`` status when an interrupt occurs. * Fixes an off-by-one error when the tryclient is finding the current git branch. * Improve the Mercurial source stamp extraction in the try client. * Fixes some edge cases in timezone handling for python < ``2.7.4`` (:bug:`2522`). * The ``EC2LatentBuildSlave`` will now only consider available AMI's. * Fixes a case where the first build runs on an old slave instead of a new one after reconfig (:bug:`2507`). * The e-mail address validation for the MailNotifier status receiver has been improved. * The ``--db`` parameter of ``buildbot create-master`` is now validated. * No longer ignores default choice for ForceScheduler list parameters * Now correctly handles ``BuilderConfig(..., mergeRequests=False)`` (:bug:`2555`). * Now excludes changes from sourcestamps when they aren't in the DB (:bug:`2554`). * Fixes a compatibility issue with HPCloud in the OpenStack latent slave. * Allow ``_`` as a valid character in JSONP callback names. * Fix build start time retrieval in the WebStatus grid view. * Increase the length of the DB fields ``changes.comments`` and ``buildset_properties.property_value``. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * The slave-side source steps are deprecated in this version of Buildbot, and master-side support will be removed in a future version. Please convert any use of slave-side steps (imported directly from ``buildbot.steps.source``, rather than from a specific module like ``buildbot.steps.source.svn``) to use master-side steps. * Both old-style and new-style steps are supported in this version of Buildbot. Upgrade your steps to new-style now, as support for old-style steps will be dropped after Buildbot-0.9.0. See :ref:`New-Style-Build-Steps` for details. * The ``LoggingBuildStep`` class has been deprecated, and support will be removed along with support for old-style steps after the Buildbot-0.9.0 release. Instead, subclass :class:`~buildbot.process.buildstep.BuildStep` and mix in :class:`~buildbot.process.buildstep.ShellMixin` to get similar behavior. * ``slavePortnum`` option deprecated, please use ``c['protocols']['pb']['port']`` to set up PB port * The ``buildbot.process.mtrlogobserver`` module have been renamed to :py:mod:`buildbot.steps.mtrlogobserver`. * The buildmaster now requires at least Twisted-11.0.0. * The buildmaster now requires at least sqlalchemy-migrate 0.6.1. * The ``hgbuildbot`` Mercurial hook has been moved to ``contrib/``, and does not work with recent versions of Mercurial and Twisted. The runtimes for these two tools are incompatible, yet ``hgbuildbot`` attempts to run both in the same Python interpreter. Mayhem ensues. * The try scheduler's ``--connect=ssh`` method no longer supports waiting for results (``--wait``). * The former ``buildbot.process.buildstep.RemoteCommand`` class and its subclasses are now in :py:mod:`buildbot.process.remotecommand`, although imports from the previous path will continue to work. Similarly, the former ``buildbot.process.buildstep.LogObserver`` class and its subclasses are now in :py:mod:`buildbot.process.logobserver`, although imports from the previous path will continue to work. * The undocumented BuildStep method ``checkDisconnect`` is deprecated and now does nothing as the handling of disconnects is now handled in the ``failed`` method. Any custom steps adding this method as a callback or errback should no longer do so. * The build step ``MsBuild`` is now called ``MsBuild4`` as multiple versions are now supported. An alias is provided so existing setups will continue to work, but this will be removed in a future release. Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * The :py:class:`CompositeStepMixin` now provides a ``runGlob`` method to check for files on the slave that match a given shell-style pattern. * The :py:class:`BuilderStatus` now allows you to pass a ``filter_fn`` argument to ``generateBuilds``. Slave ----- Features ~~~~~~~~ * Added zsh and bash tab-completions support for 'buildslave' command. * RemoteShellCommands accept the new sigtermTime parameter from master. This allows processes to be killed by SIGTERM before resorting to SIGKILL (:bug:`751`) * Commands will now throw a ``ValueError`` if mandatory args are not present. * Added a new remote command :py:class:`GlobPath` that can be used to call Python's ``glob.glob`` on the slave. Fixes ~~~~~ * Fixed an issue when buildstep stop() was raising an exception incorrectly if timeout for buildstep wasn't set or was None (see :pull:`753`) thus keeping watched logfiles open (this prevented their removal on Windows in subsequent builds). * Fixed a bug in P4 source step where the ``timeout`` parameter was ignored. * Fixed a bug in P4 source step where using a custom view-spec could result in failed syncs due to incorrectly generated command-lines. * The logwatcher will use ``/usr/xpg4/bin/tail`` on Solaris, it if is available (:pull:`1065`). Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.8.8..v0.8.9 buildbot-3.4.0/master/docs/relnotes/0.9.0.rst000066400000000000000000000751401413250514000206000ustar00rootroot00000000000000Release Notes for Buildbot ``0.9.0`` ======================================== The following are the release notes for Buildbot ``0.9.0``. This version was released on October 6, 2016. This is a concatenation of important changes done between 0.8.12 and 0.9.0. This does not contain details of the bug fixes related to the nine beta and rc period. This document was written during the very long period of nine development. It might contain some incoherencies, *please* help us and report them on irc or trac. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ This version represents a refactoring of Buildbot into a consistent, well-defined application composed of loosely coupled components. The components are linked by a common database backend and a messaging system. This allows components to be distributed across multiple build masters. It also allows the rendering of complex web status views to be performed in the browser, rather than on the buildmasters. The branch looks forward to committing to long-term API compatibility, but does not reach that goal. The Buildbot-0.9.x series of releases will give the new APIs time to "settle in" before we commit to them. Commitment will wait for Buildbot-1.0.0 (as per http://semver.org). Once Buildbot reaches version 1.0.0, upgrades will become much easier for users. To encourage contributions from a wider field of developers, the web application is designed to look like a normal AngularJS application. Developers familiar with AngularJS, but not with Python, should be able to start hacking on the web application quickly. The web application is "pluggable", so users who develop their own status displays can package those separately from Buildbot itself. Other goals: * An approachable HTTP REST API, with real time event features used by the web application but available for any other purpose. * A high degree of coverage by reliable, easily-modified tests. * "Interlocking" tests to guarantee compatibility. For example, the real and fake DB implementations must both pass the same suite of tests. Then no unseen difference between the fake and real implementations can mask errors that will occur in production. Requirements ~~~~~~~~~~~~ The ``buildbot`` package requires Python 2.7 -- Python 2.5 and 2.6 are no longer supported. The ``buildbot-slave`` package requires Python 2.6 or higher -- Python 2.4 and 2.5 are no longer supported. No additional software or systems, aside from some minor Python packages, are required. But the devil is in the details: * If you want to do web *development*, or *build* the ``buildbot-www`` package, you'll need Node. It's an Angular app, and that's how such apps are developed. We've taken pains to not make either a requirement for users - you can simply 'pip install' ``buildbot-www`` and be on your way. This is the case even if you're hacking on the Python side of Buildbot. * For a single master, nothing else is required. Note for distro package maintainers: The npm dependency hell ............................................................ In order to *build* the ``buildbot-www`` package, you'll need Node. Node has a very specific package manager named npm, which has the interesting property of allowing different version of package to co-exist in the same environment. The node ecosystem also has the habit of creating packages for a few line of code. Buildbot UI uses the node ecosystem to build its javascript UI. The buildsystem that we use is called `guanlecoja`_, which is just an integration of various javascript build tools. Through npm dependency hell, guanlecoja is depending on *625* npm packages/versions. We do not advise you to try and package all those npm *build* dependencies. They are *not* required in order to *run* buildbot. We do release pre-built packages in the form of the `wheel`_ format on pypi. Those wheels contain the full python source code, and prebuilt javascript source code. Depending on distro maintainers feedback, we *could* also release source tarballs with prebuilt javascript, but those would be pypi packages with different names, e.g. ``buildbot_www_prebuilt.0.9.0.tar.gz``. Another option would be to package a `guanlecoja`_ that would embed all its dependencies inside one package. .. _guanlecoja: https://www.npmjs.com/package/guanlecoja .. _wheel: http://pythonwheels.com/ Detailed requirements ..................... see :ref:`Requirements` Features ~~~~~~~~ Buildbot-0.9.0 introduces the :ref:`Data_API`, a consistent and scalable method for accessing and updating the state of the Buildbot system. This API replaces the existing, ill-defined Status API, which has been removed. Buildbot-0.9.0 introduces new :ref:`WWW` Interface using websocket for realtime updates. Buildbot code that interacted with the Status API (a substantial portion!) has been rewritten to use the Data API. Individual features and improvements to the Data API are not described on this page. * Buildbot now supports plugins. They allow Buildbot to be extended by using components distributed independently from the main code. They also provide for a unified way to access all components. When previously the following construction was used:: from buildbot.kind.other.bits import ComponentClass ... ComponentClass ... the following construction achieves the same result:: from buildbot.plugins import kind ... kind.ComponentClass ... Kinds of components that are available this way are described in :doc:`../manual/plugins`. .. note:: While the components can be still directly imported as ``buildbot.kind.other.bits``, this might not be the case after Buildbot v1.0 is released. * Both the P4 source step and P4 change source support ticket-based authentication. * OpenStack latent slaves now support block devices as a bootable volume. * Add new :bb:step:`Cppcheck` step. * Add a new :doc:`Docker latent Workers
`. * Add a new configuration for creating custom services in out-of-tree CI systems or plugins. See :py:class:`buildbot.util.service.BuildbotService` * Add ``try_ssh`` configuration file setting and ``--ssh`` command line option for the try tool to specify the command to use for connecting to the build master. * GitHub change hook now supports application/json format. * Add support for dynamically adding steps during a build. See :ref:`DynamicBuildFactories`. * :bb:chsrc:`GitPoller` now supports detecting new branches * :bb:step:`Git` supports an "origin" option to give a name to the remote repo. * Mercurial hook was updated and modernized. It is no longer necessary to fork. One can now extend PYTHONPATH via the hook configuration. Among others, it permits to use a buildbot virtualenv instead of installing buildbot in all the system. Added documentation inside the hook. Misc. clean-up and reorganization in order to make the code a bit more readable. * UI templates can now be customizable. You can provide html or jade overrides to the www plugins, to customize the UI * The irc command ``hello`` now returns 'Hello' in a random language if invoked more than once. * :bb:sched:`Triggerable` now accepts a ``reason`` parameter. * :bb:reporter:`GerritStatusPush` now accepts a ``builders`` parameter. * `StatusPush` callback now receives build results (success/failure/etc) with the ``buildFinished`` event. * There's a new renderable type, :ref:`Transform`. * :class:`GitPoller` now has a ``buildPushesWithNoCommits`` option to allow the rebuild of already known commits on new branches. * Add GitLab authentication plugin for web UI. See :class:`buildbot.www.oauth2.GitLabAuth`. * :bb:step:`CMake` build step is added. It provides a convenience interface to `CMake `_ build system. * MySQL InnoDB tables are now supported. * :class:`~buildbot.reporters.http.HttpStatusPush` has been ported to reporter API. * :class:`~buildbot.reporters.stash.StashStatusPush` has been ported to reporter API. * ``GithubStatusPush`` has been ported to reporter API. * `summaryCB` of :bb:reporter:`GerritStatusPush` now gets not only pre-processed information but the actual build as well. * EC2LatentWorker supports VPCs, instance profiles, and advanced volume mounts. * New steps for Visual Studio 2015 (VS2015, VC14, and MsBuild14). * The :bb:step:`P4` step now obfuscates the password in status logs. * Added support for specifying the depth of a shallow clone in :bb:step:`Git`. * :bb:worker:`OpenStackLatentWorker` now uses a single novaclient instance to not require re-authentication when starting or stopping instances. * Buildbot UI introduces branch new Authentication, and Authorizations framework. Please look at their respective guide in :ref:`WWW` * ``buildbot stop`` now waits for complete buildmaster stop by default. * New ``--no-wait`` argument for ``buildbot stop`` which allows not to wait for complete master shutdown. * New ``LocalWorker`` worker to run a worker in the master process, requires ``buildbot-worker`` package installed. * :bb:reporter:`GerritStatusPush` now includes build properties in the ``startCB`` and ``reviewCB`` functions. ``startCB`` now must return a dictionary. * add tool to send usage data to buildbot.net :bb:cfg:`buildbotNetUsageData` * new :bb:step:`GitHub` which correctly checkout the magic branch like ``refs/pull/xx/merge``. * Enable parallel builds with Visual Studio and MSBuild. Reporters ~~~~~~~~~ Status plugins have been moved into the ``reporters`` namespace. Their API has slightly to changed in order to adapt to the new data API. See respective documentation for details. * :class:`~buildbot.status.status_gerrit.GerritStatusPush` renamed to :class:`~buildbot.reporters.gerrit.GerritStatusPush` * :class:`~buildbot.status.mail.MailNotifier` renamed to :class:`~buildbot.reporters.mail.MailNotifier` * :class:`~buildbot.status.mail.MailNotifier` argument ``messageFormatter`` should now be a :class:`~buildbot.status.message.MessageFormatter`, due to removal of data api, custom message formatters need to be rewritten. * :class:`~buildbot.status.mail.MailNotifier` argument ``previousBuildGetter`` is not supported anymore * :class:`~buildbot.reporters.gerrit.Gerrit` supports specifying an SSH identity file explicitly. * Added StashStatusPush status hook for Atlassian Stash * :bb:reporter:`MailNotifier` no longer forces SSL 3.0 when ``useTls`` is true. * :bb:reporter:`GerritStatusPush` callbacks slightly changed signature, and include a master reference instead of a status reference. * new :bb:reporter:`GitLabStatusPush` to report builds results to GitLab. * new ``HipchatStatusPush`` to report build results to Hipchat. Fixes ~~~~~ * Buildbot is now compatible with SQLAlchemy 0.8 and higher, using the newly-released SQLAlchemy-Migrate. * The version check for SQLAlchemy-Migrate was fixed to accept more version string formats. * The :bb:step:`HTTPStep` step's request parameters are now renderable. * With Git(), force the updating submodules to ensure local changes by the build are overwritten. This both ensures more consistent builds and avoids errors when updating submodules. * Buildbot is now compatible with Gerrit v2.6 and higher. To make this happen, the return result of ``reviewCB`` and ``summaryCB`` callback has changed from .. code-block:: python (message, verified, review) to .. code-block:: python {'message': message, 'labels': {'label-name': value, ... } } The implications are: * there are some differences in behaviour: only those labels that were provided will be updated * Gerrit server must be able to provide a version, if it can't the :bb:reporter:`GerritStatusPush` will not work .. note:: If you have an old style ``reviewCB`` and/or ``summaryCB`` implemented, these will still work, however there could be more labels updated than anticipated. More detailed information is available in :bb:reporter:`GerritStatusPush` section. * :bb:chsrc:`P4Source`'s ``server_tz`` parameter now works correctly. * The ``revlink`` in changes produced by the Bitbucket hook now correctly includes the ``changes/`` portion of the URL. * :bb:chsrc:`PBChangeSource`'s git hook :contrib-src:`master/contrib/git_buildbot.py` now supports git tags A pushed git tag generates a change event with the ``branch`` property equal to the tag name. To schedule builds based on buildbot tags, one could use something like this: .. code-block:: python c['schedulers'].append( SingleBranchScheduler(name='tags', change_filter=filter.ChangeFilter( branch_re='v[0-9]+\.[0-9]+\.[0-9]+(?:-pre|rc[0-9]+|p[0-9]+)?') treeStableTimer=None, builderNames=['tag_build'])) * Missing "name" and "email" properties received from Gerrit are now handled properly * Fixed bug which made it impossible to specify the project when using the BitBucket dialect. * The :bb:step:`PyLint` step has been updated to understand newer output. * Fixed SVN master-side source step: if a SVN operation fails, the repository end up in a situation when a manual intervention is required. Now if SVN reports such a situation during initial check, the checkout will be clobbered. * The build properties are now stored in the database in the ``build_properties`` table. * The list of changes in the build page now displays all the changes since the last successful build. * GitHub change hook now correctly responds to ping events. * GitHub change hook now correctly use the refs/pull/xx/merge branch for testing PRs. * ``buildbot.steps.http`` steps now correctly have ``url`` parameter renderable * When no arguments are used ``buildbot checkconfig`` now uses :file:`buildbot.tac` to locate the master config file. * `buildbot.util.flatten` now correctly flattens arbitrarily nested lists. `buildbot.util.flattened_iterator` provides an iterable over the collection which may be more efficient for extremely large lists. * The :bb:step:`PyFlakes` and :bb:step:`PyLint` steps no longer parse output in Buildbot log headers (:bug:`3337`). * :bb:chsrc:`GerritChangeSource` is now less verbose by default, and has a ``debug`` option to enable the logs. * :bb:chsrc:`P4Source` no longer relies on the perforce server time to poll for new changes. * The commit message for a change from :bb:chsrc:`P4Source` now matches what the user typed in. * Fix incompatibility with MySQL-5.7 (:bug:`3421`) * Fix incompatibility with postgresql driver psycopg2 (:bug:`3419`, further regressions will be caught by travis) * Made :class:`Interpolate` safe for deepcopy or serialization/deserialization * sqlite access is serialized in order to improve stability (:bug:`3565`) Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Seamless upgrading between buildbot 0.8.12 and buildbot 0.9.0 is not supported. Users should start from a clean install but can reuse their config according to the :ref:`Upgrading to Nine` guide. * `BonsaiPoller` is removed. * ``buildbot.ec2buildslave`` is removed; use ``buildbot.buildslave.ec2`` instead. * ``buildbot.libvirtbuildslave`` is removed; use ``buildbot.buildslave.libvirt`` instead. * `buildbot.util.flatten` flattens lists and tuples by default (previously only lists). Additionally, flattening something that isn't the type to flatten has different behaviour. Previously, it would return the original value. Instead, it now returns an array with the original value as the sole element. * ``buildbot.tac`` does not support ``print`` statements anymore. Such files should now use ``print`` as a function instead (see https://docs.python.org/3.0/whatsnew/3.0.html#print-is-a-function for more details). Note that this applies to both python2.x and python3.x runtimes. * Deprecated ``workdir`` property has been removed, ``builddir`` property should be used instead. * To support MySQL InnoDB, the size of six VARCHAR(256) columns ``changes.(author, branch, category, name); object_state.name; user.identifier`` was reduced to VARCHAR(255). * :class:`~buildbot.status.status_push.StatusPush` has been removed from buildbot. Please use the much simpler :class:`~buildbot.reporters.http.HttpStatusPush` instead. * Worker changes described in below worker section will probably impact a buildbot developer who uses undocumented '*slave*' API. Undocumented APIs have been replaced without failover, so any custom code that uses it shall be updated with new undocumented API. * Web server does not provide /png and /redirect anymore (:bug:`3357`). This functionality is used to implement build status images. This should be easy to implement if you need it. One could port the old image generation code, or implement a redirection to http://shields.io/. * Support of worker-side ``usePTY`` was removed from ``buildbot-worker``. ``usePTY`` argument was removed from ``WorkerForBuilder`` and ``Worker`` classes. * html is no longer permitted in 'label' attributes of forcescheduler parameters. * ``public_html`` directory is not created anymore in ``buildbot create-master`` (it's not used for some time already). Documentation was updated with suggestions to use third party web server for serving static file. * ``usePTY`` default value has been changed from ``slave-config`` to ``None`` (use of ``slave-config`` will still work). * ``/json`` web status was removed. :ref:`Data_API` should be used instead. WebStatus ......... The old, clunky WebStatus has been removed. You will like the new interface! RIP WebStatus, you were a good friend. remove it and replace it with :bb:cfg:`www configuration `. Requirements ............ * Support for python 2.6 was dropped from the master. * Buildbot's tests now require at least Mock-0.8.0. * SQLAlchemy-Migrate-0.6.1 is no longer supported. * Builder names are now restricted to unicode strings or ASCII bytestrings. Encoded bytestrings are not accepted. Steps ..... * New-style steps are now the norm, and support for old-style steps is deprecated. Upgrade your steps to new-style now, as support for old-style steps will be dropped after Buildbot-0.9.0. See :ref:`New-Style-Build-Steps` for details. * Status strings for old-style steps could be supplied through a wide variety of conflicting means (``describe``, ``description``, ``descriptionDone``, ``descriptionSuffix``, ``getText``, and ``setText``, to name just a few). While all attempts have been made to maintain compatibility, you may find that the status strings for old-style steps have changed in this version. To fix steps that call ``setText``, try setting the ``descriptionDone`` attribute directly, instead -- or just rewrite the step in the new style. * Old-style *source* steps (imported directly from ``buildbot.steps.source``) are no longer supported on the master. * The monotone source step got an overhaul and can now better manage its database (initialize and/or migrate it, if needed). In the spirit of monotone, buildbot now always keeps the database around, as it's an append-only database. Changes and Removals .................... * Buildslave names must now be 50-character :ref:`identifier `. Note that this disallows some common characters in bulidslave names, including spaces, ``/``, and ``.``. * Builders now have "tags" instead of a category. Builders can have multiple tags, allowing more flexible builder displays. * :bb:sched:`ForceScheduler` has the following changes: - The default configuration no longer contains four ``AnyPropertyParameter`` instances. - Configuring ``codebases`` is now mandatory, and the deprecated ``branch``, ``repository``, ``project``, ``revision`` are not supported anymore in :bb:sched:`ForceScheduler` - :py:meth:`buildbot.schedulers.forcesched.BaseParameter.updateFromKwargs` now takes a ``collector`` parameter used to collect all validation errors * :bb:sched:`Periodic`, :bb:sched:`Nightly` and :bb:sched:`NightlyTriggerable` have the following changes: - The :bb:sched:`Periodic` and :bb:sched:`Nightly` schedulers can now consume changes and use ``onlyIfChanged`` and ``createAbsoluteTimestamps``. - All "timed" schedulers now handle ``codebases`` the same way. Configuring ``codebases`` is strongly recommended. Using the ``branch`` parameter is discouraged. * Logs are now stored as Unicode strings, and thus must be decoded properly from the bytestrings provided by shell commands. By default this encoding is assumed to be UTF-8, but the :bb:cfg:`logEncoding` parameter can be used to select an alternative. Steps and individual logfiles can also override the global default. * The PB status service uses classes which have now been removed, and anyway is redundant to the REST API, so it has been removed. It has taken the following with it: * ``buildbot statuslog`` * ``buildbot statusgui`` (the GTK client) * ``buildbot debugclient`` The ``PBListener`` status listener is now deprecated and does nothing. Accordingly, there is no external access to status objects via Perspective Broker, aside from some compatibility code for the try scheduler. The ``debugPassword`` configuration option is no longer needed and is thus deprecated. * The undocumented and un-tested ``TinderboxMailNotifier``, designed to send emails suitable for the abandoned and insecure Tinderbox tool, has been removed. * Buildslave info is no longer available via :ref:`Interpolate` and the ``SetSlaveInfo`` buildstep has been removed. * The undocumented ``path`` parameter of the :bb:step:`MasterShellCommand` buildstep has been renamed ``workdir`` for better consistency with the other steps. * The name and source of a Property have to be unicode or ascii string. * Property values must be serializable in JSON. * :bb:reporter:`IRC` has the following changes: - categories parameter is deprecated and removed. It should be replaced with tags=[cat] - noticeOnChannel parameter is deprecated and removed. * workdir behavior has been unified: - ``workdir`` attribute of steps is now a property in :py:class:`~buildbot.process.buildstep.BuildStep`, and choose the workdir given following priority: * workdir of the step, if defined * workdir of the builder (itself defaults to 'build') - setDefaultWorkdir() has been deprecated, but is now behaving the same for all the steps: Setting self.workdir if not already set * :bb:step:`Trigger` now has a ``getSchedulersAndProperties`` method that can ve overridden to support dynamic triggering. * ```master.cfg`` is now parsed from a thread. Previously it was run in the main thread, and thus slowing down the master in case of big config, or network access done to generate the config. * :bb:chsrc:`SVNPoller`'s svnurl parameter has been changed to repourl. * Providing Latent AWS EC2 credentials by the :file:`.ec2/aws_id` file is deprecated: Use the standard :file:`.aws/credentials` file, instead. Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * Botmaster no longer service parent for workers. Service parent functionality has been transferred to WorkerManager. It should be noted Botmaster no longer has a ``slaves`` field as it was moved to WorkerManager. * The sourcestamp DB connector now returns a ``patchid`` field. * Buildbot no longer polls the database for jobs. The ``db_poll_interval`` configuration parameter and the :bb:cfg:`db` key of the same name are deprecated and will be ignored. * The interface for adding changes has changed. The new method is ``master.data.updates.addChange`` (implemented by :py:meth:`~buildbot.data.changes.ChangeResourceType.addChange`), although the old interface (``master.addChange``) will remain in place for a few versions. The new method: * returns a change ID, not a Change instance; * takes its ``when_timestamp`` argument as epoch time (UNIX time), not a datetime instance; and * does not accept the deprecated parameters ``who``, ``isdir``, ``is_dir``, and ``when``. * requires that all strings be unicode, not bytestrings. Please adjust any custom change sources accordingly. * A new build status, CANCELLED, has been added. It is used when a step or build is deliberately cancelled by a user. * This upgrade will delete all rows from the ``buildrequest_claims`` table. If you are using this table for analytical purposes outside of Buildbot, please back up its contents before the upgrade, and restore it afterward, translating object IDs to scheduler IDs if necessary. This translation would be very slow and is not required for most users, so it is not done automatically. * All of the schedulers DB API methods now accept a schedulerid, rather than an objectid. If you have custom code using these methods, check your code and make the necessary adjustments. * The ``addBuildsetForSourceStamp`` method has become ``addBuildsetForSourceStamps``, and its signature has changed. The ``addBuildsetForSourceStampSetDetails`` method has become ``addBuildsetForSourceStampsWithDefaults``, and its signature has changed. The ``addBuildsetForSourceStampDetails`` method has been removed. The ``addBuildsetForLatest`` method has been removed. It is equivalent to ``addBuildsetForSourceStampDetails`` with ``sourcestamps=None``. These methods are not yet documented, and their interface is not stable. Consult the source code for details on the changes. * The ``preStartConsumingChanges`` and ``startTimedSchedulerService`` hooks have been removed. * The triggerable schedulers ``trigger`` method now requires a list of sourcestamps, rather than a dictionary. * The :py:class:`~buildbot.sourcestamp.SourceStamp` class is no longer used. It remains in the codebase to support loading data from pickles on upgrade, but should not be used in running code. * The :py:class:`~buildbot.process.buildrequest.BuildRequest` class no longer has full ``source`` or ``sources`` attributes. Use the data API to get this information (which is associated with the buildset, not the build request) instead. * The undocumented ``BuilderControl`` method ``submitBuildRequest`` has been removed. * The debug client no longer supports requesting builds (the ``requestBuild`` method has been removed). If you have been using this method in production, consider instead creating a new change source, using the :bb:sched:`ForceScheduler`, or using one of the try schedulers. * The ``buildbot.misc.SerializedInvocation`` class has been removed; use :py:func:`buildbot.util.debounce.method` instead. * The ``progress`` attributes of both :py:class:`buildbot.process.buildstep.BuildStep` and :py:class:`buildbot.process.build.Build` have been removed. Subclasses should only be accessing the progress-tracking mechanics via the :py:meth:`buildbot.process.buildstep.BuildStep.setProgress` method. * The :py:class:`~buildbot.config.BuilderConfig` ``nextSlave`` keyword argument takes a callable. This callable now receives :py:class:`~buildbot.process.buildrequest.BuildRequest` instance in its signature as 3rd parameter. **For retro-compatibility, all callable taking only 2 parameters will still work**. * properties object is now directly present in build, and not in build_status. This should not change much unless you try to access your properties via step.build.build_status. Remember that with PropertiesMixin, you can access properties via getProperties on the steps, and on the builds objects. Slaves/Workers -------------- Transition to "worker" terminology ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Since version 0.9.0 of Buildbot "slave"-based terminology is deprecated in favor of "worker"-based terminology. For details about public API changes see :ref:`Transition-to-worker-terminology`, and :ref:`0.9.0b8` release notes. * The ``buildbot-slave`` package has been renamed to ``buildbot-worker``. * Buildbot now requires import to be sorted using `isort `_. Please run ``make isort`` before creating a PR or use any available editor plugin in order to reorder your imports. Requirements ~~~~~~~~~~~~ * ``buildbot-worker`` requires Python 2.6 Features ~~~~~~~~ * The Buildbot worker now includes the number of CPUs in the information it supplies to the master on connection. This value is autodetected, but can be overridden with the ``--numcpus`` argument to ``buildslave create-worker``. * The :class:`DockerLatentWorker` image attribute is now renderable (can take properties in account). * The :class:`DockerLatentWorker` sets environment variables describing how to connect to the master. Example dockerfiles can be found in :contrib-src:`master/contrib/docker`. * :class:`DockerLatentWorker` now has a ``hostconfig`` parameter that can be used to setup host configuration when creating a new container. * :class:`DockerLatentWorker` now has a ``networking_config`` parameter that can be used to setup container networks. * The :class:`DockerLatentWorker` ``volumes`` attribute is now renderable. Fixes ~~~~~ Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * EC2 Latent Worker upgraded from ``boto2`` to ``boto3``. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * buildmaster and worker no longer supports old-style source steps. * On Windows, if a :bb:step:`ShellCommand` step in which ``command`` was specified as a list is executed, and a list element is a string consisting of a single pipe character, it no longer creates a pipeline. Instead, the pipe character is passed verbatim as an argument to the program, like any other string. This makes command handling consistent between Windows and Unix-like systems. To have a pipeline, specify ``command`` as a string. * Support for python 2.6 was dropped from the master. * ``public_html`` directory is not created anymore in ``buildbot create-master`` (it's not used for some time already). Documentation was updated with suggestions to use third party web server for serving static file. * ``usePTY`` default value has been changed from ``slave-config`` to ``None`` (use of ``slave-config`` will still work). * ``GithubStatusPush`` reporter was renamed to :bb:reporter:`GitHubStatusPush`. * Worker commands version bumped to 3.0. * Master/worker protocol has been changed: * ``slave_commands`` key in worker information was renamed to ``worker_commands``. * ``getSlaveInfo`` remote method was renamed to ``getWorkerInfo``. * ``slave-config`` value of ``usePTY`` is not supported anymore. * ``slavesrc`` command argument was renamed to ``workersrc`` in ``uploadFile`` and ``uploadDirectory`` commands. * ``slavedest`` command argument was renamed to ``workerdest`` in ``downloadFile`` command. * Previously deprecated ``WorkerForBuilder.remote_shutdown()`` remote command has been removed. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.8.12..v0.9.0 buildbot-3.4.0/master/docs/relnotes/0.9.0b1.rst000066400000000000000000000512531413250514000210220ustar00rootroot00000000000000Release Notes for Buildbot 0.9.0b1 ================================== .. Any change that adds a feature or fixes a bug should have an entry here. Most simply need an additional bulleted list item, but more significant changes can be given a subsection of their own. The following are the release notes for Buildbot 0.9.0b1. Buildbot 0.9.0b1 was released on the 25th of June, 2015. Master ------ This version represents a refactoring of Buildbot into a consistent, well-defined application composed of loosely coupled components. The components are linked by a common database backend and a messaging system. This allows components to be distributed across multiple build masters. It also allows the rendering of complex web status views to be performed in the browser, rather than on the buildmasters. The branch looks forward to committing to long-term API compatibility, but does not reach that goal. The Buildbot-0.9.x series of releases will give the new APIs time to "settle in" before we commit to them. Commitment will wait for Buildbot-1.0.0 (as per http://semver.org). Once Buildbot reaches version 1.0.0, upgrades will become much easier for users. To encourage contributions from a wider field of developers, the web application is designed to look like a normal AngularJS application. Developers familiar with AngularJS, but not with Python, should be able to start hacking on the web application quickly. The web application is "pluggable", so users who develop their own status displays can package those separately from Buildbot itself. Other goals: * An approachable HTTP REST API, used by the web application but available for any other purpose. * A high degree of coverage by reliable, easily-modified tests. * "Interlocking" tests to guarantee compatibility. For example, the real and fake DB implementations must both pass the same suite of tests. Then no unseen difference between the fake and real implementations can mask errors that will occur in production. Requirements ~~~~~~~~~~~~ The ``buildbot`` package requires Python 2.6 or higher -- Python 2.5 is no longer supported. The ``buildbot-slave`` package requires Python 2.5 or higher -- Python 2.4 is no longer supported. No additional software or systems, aside from some minor Python packages, are required. But the devil is in the details: * If you want to do web *development*, or *build* the ``buildbot-www`` package, you'll need Node. It's an Angular app, and that's how such apps are developed. We've taken pains to not make either a requirement for users - you can simply 'pip install' ``buildbot-www`` and be on your way. This is the case even if you're hacking on the Python side of Buildbot. * For a single master, nothing else is required. Minor Python Packages ..................... * Buildbot requires at least Twisted-11.0.0. * Buildbot works python-dateutil >= 1.5 Known Limitations of 0.9.0b1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The following feature will be implemented for Buildbot 0.9.1 Milestone. * Multimaster is not supported as of Buildbot 0.9.0. http://trac.buildbot.net/ticket/2644 * Not all status plugin are converted to the new reporter API. Only email and Gerrit reporters are fully supported. Irc support is limited, and not converted to reporter api http://trac.buildbot.net/ticket/2648 Features ~~~~~~~~ Buildbot-0.9.0 introduces the :ref:`Data_API`, a consistent and scalable method for accessing and updating the state of the Buildbot system. This API replaces the existing, ill-defined Status API, which has been removed. Buildbot-0.9.0 introduces new :ref:`WWW` Interface using websocket for realtime updates. Buildbot code that interacted with the Status API (a substantial portion!) has been rewritten to use the Data API. Individual features and improvements to the Data API are not described on this page. * Buildbot now supports plugins. They allow Buildbot to be extended by using components distributed independently from the main code. They also provide for a unified way to access all components. When previously the following construction was used:: from buildbot.kind.other.bits import ComponentClass ... ComponentClass ... the following construction achieves the same result:: from buildbot.plugins import kind ... kind.ComponentClass ... Kinds of components that are available this way are described in :doc:`../manual/plugins`. .. note:: While the components can be still directly imported as ``buildbot.kind.other.bits``, this might not be the case after Buildbot v1.0 is released. * Both the P4 source step and P4 change source support ticket-based authentication. * OpenStack latent slaves now support block devices as a bootable volume. * Add new :bb:step:`Cppcheck` step. * Add a new :doc:`Docker latent BuildSlave `. * Add a new configuration for creating custom services in out-of-tree CI systems or plugins. See :py:class:`buildbot.util.service.BuildbotService` * Add ``try_ssh`` configuration file setting and ``--ssh`` command line option for the try tool to specify the command to use for connecting to the build master. * GitHub change hook now supports application/json format. * Add support for dynamically adding steps during a build. See :ref:`DynamicBuildFactories`. * :bb:chsrc:`GitPoller` now supports detecting new branches * :bb:step:`Git` supports an "origin" option to give a name to the remote repo. Reporters ~~~~~~~~~ Status plugins have been moved into the ``reporters`` namespace. Their API has slightly to changed in order to adapt to the new data API. See respective documentation for details. * :class:`~buildbot.status.status_gerrit.GerritStatusPush` renamed to :class:`~buildbot.reporters.gerrit.GerritStatusPush` * :class:`~buildbot.status.mail.MailNotifier` renamed to :class:`~buildbot.reporters.mail.MailNotifier` * :class:`~buildbot.status.mail.MailNotifier` argument ``messageFormatter`` should now be a :class:`~buildbot.status.message.MessageFormatter`, due to removal of data api, custom message formatters need to be rewritten. * :class:`~buildbot.status.mail.MailNotifier` argument ``previousBuildGetter`` is not supported anymore * :class:`~buildbot.reporters.gerrit.Gerrit` supports specifying an SSH identity file explicitly. * Added StashStatusPush status hook for Atlassian Stash * :bb:reporter:`MailNotifier` no longer forces SSL 3.0 when ``useTls`` is true. * :bb:reporter:`GerritStatusPush` callbacks slightly changed signature, and include a master reference instead of a status reference. * :class:`~buildbot.status.github.GitHubStatus` now accepts a ``context`` parameter to be passed to the GitHub Status API. * Buildbot UI introduces branch new Authentication, and Authorizations framework. Please look at their respective guide in :ref:`WWW` Fixes ~~~~~ * Buildbot is now compatible with SQLAlchemy 0.8 and higher, using the newly-released SQLAlchemy-Migrate. * The version check for SQLAlchemy-Migrate was fixed to accept more version string formats. * The :bb:step:`HTTPStep` step's request parameters are now renderable. * With Git(), force the updating submodules to ensure local changes by the build are overwritten. This both ensures more consistent builds and avoids errors when updating submodules. * Buildbot is now compatible with Gerrit v2.6 and higher. To make this happen, the return result of ``reviewCB`` and ``summaryCB`` callback has changed from .. code-block:: python (message, verified, review) to .. code-block:: python {'message': message, 'labels': {'label-name': value, ... } } The implications are: * there are some differences in behaviour: only those labels that were provided will be updated * Gerrit server must be able to provide a version, if it can't the :bb:reporter:`GerritStatusPush` will not work .. note:: If you have an old style ``reviewCB`` and/or ``summaryCB`` implemented, these will still work, however there could be more labels updated than anticipated. More detailed information is available in :bb:reporter:`GerritStatusPush` section. * :bb:chsrc:`P4Source`'s ``server_tz`` parameter now works correctly. * The ``revlink`` in changes produced by the Bitbucket hook now correctly includes the ``changes/`` portion of the URL. * :bb:chsrc:`PBChangeSource`'s git hook :contrib-src:`master/contrib/git_buildbot.py` now supports git tags A pushed git tag generates a change event with the ``branch`` property equal to the tag name. To schedule builds based on buildbot tags, one could use something like this: .. code-block:: python c['schedulers'].append( SingleBranchScheduler(name='tags', change_filter=filter.ChangeFilter( branch_re='v[0-9]+\.[0-9]+\.[0-9]+(?:-pre|rc[0-9]+|p[0-9]+)?') treeStableTimer=None, builderNames=['tag_build'])) * Missing "name" and "email" properties received from Gerrit are now handled properly * Fixed bug which made it impossible to specify the project when using the BitBucket dialect. * The :bb:step:`PyLint` step has been updated to understand newer output. * Fixed SVN master-side source step: if a SVN operation fails, the repository end up in a situation when a manual intervention is required. Now if SVN reports such a situation during initial check, the checkout will be clobbered. * The build properties are now stored in the database in the ``build_properties`` table. * The list of changes in the build page now displays all the changes since the last successful build. * GitHub change hook now correctly responds to ping events. * ``buildbot.steps.http`` steps now correctly have ``url`` parameter renderable * When no arguments are used ``buildbot checkconfig`` now uses :file:`buildbot.tac` to locate the master config file. * `buildbot.util.flatten` now correctly flattens arbitrarily nested lists. `buildbot.util.flattened_iterator` provides an iterable over the collection which may be more efficient for extremely large lists. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * `BonsaiPoller` is removed. * ``buildbot.ec2buildslave`` is removed; use ``buildbot.buildslave.ec2`` instead. * ``buildbot.libvirtbuildslave`` is removed; use ``buildbot.buildslave.libvirt`` instead. .. TODO: 0.9.0 release notes should include a warning similar to that in 0.8.9 about new-style steps * `buildbot.util.flatten` flattens lists and tuples by default (previously only lists). Additionally, flattening something that isn't the type to flatten has different behaviour. Previously, it would return the original value. Instead, it now returns an array with the original value as the sole element. * ``buildbot.tac`` does not support ``print`` statements anymore. Such files should now use ``print`` as a function instead (see https://docs.python.org/3.0/whatsnew/3.0.html#print-is-a-function for more details). Note that this applies to both python2.x and python3.x runtimes. WebStatus ......... The old, clunky WebStatus has been removed. You will like the new interface! RIP WebStatus, you were a good friend. remove it and replace it with :bb:cfg:`www configuration `. Requirements ............ * Buildbot's tests now require at least Mock-0.8.0. * SQLAlchemy-Migrate-0.6.1 is no longer supported. * Builder names are now restricted to unicode strings or ASCII bytestrings. Encoded bytestrings are not accepted. Steps ..... * New-style steps are now the norm, and support for old-style steps is deprecated. Such support will be removed in the next release. * Status strings for old-style steps could be supplied through a wide variety of conflicting means (``describe``, ``description``, ``descriptionDone``, ``descriptionSuffix``, ``getText``, and ``setText``, to name just a few). While all attempts have been made to maintain compatibility, you may find that the status strings for old-style steps have changed in this version. To fix steps that call ``setText``, try setting the ``descriptionDone`` attribute directly, instead -- or just rewrite the step in the new style. * Old-style *source* steps (imported directly from ``buildbot.steps.source``) are no longer supported on the master. * The monotone source step got an overhaul and can now better manage its database (initialize and/or migrate it, if needed). In the spirit of monotone, buildbot now always keeps the database around, as it's an append-only database. Changes and Removals .................... * Buildslave names must now be 50-character :ref:`identifier `. Note that this disallows some common characters in bulidslave names, including spaces, ``/``, and ``.``. * Builders now have "tags" instead of a category. Builders can have multiple tags, allowing more flexible builder displays. * :bb:sched:`ForceScheduler` has the following changes: - The default configuration no longer contains four ``AnyPropertyParameter`` instances. - Configuring ``codebases`` is now mandatory, and the deprecated ``branch``, ``repository``, ``project``, ``revision`` are not supported anymore in :bb:sched:`ForceScheduler` - :py:meth:`buildbot.schedulers.forcesched.BaseParameter.updateFromKwargs` now takes a ``collector`` parameter used to collect all validation errors * :bb:sched:`Periodic`, :bb:sched:`Nightly` and :bb:sched:`NightlyTriggerable` have the following changes: - The :bb:sched:`Periodic` and :bb:sched:`Nightly` schedulers can now consume changes and use ``onlyIfChanged`` and ``createAbsoluteTimestamps``. - All "timed" schedulers now handle ``codebases`` the same way. Configuring ``codebases`` is strongly recommended. Using the ``branch`` parameter is discouraged. * Logs are now stored as Unicode strings, and thus must be decoded properly from the bytestrings provided by shell commands. By default this encoding is assumed to be UTF-8, but the :bb:cfg:`logEncoding` parameter can be used to select an alternative. Steps and individual logfiles can also override the global default. * The PB status service uses classes which have now been removed, and anyway is redundant to the REST API, so it has been removed. It has taken the following with it: * ``buildbot statuslog`` * ``buildbot statusgui`` (the GTK client) * ``buildbot debugclient`` The ``PBListener`` status listener is now deprecated and does nothing. Accordingly, there is no external access to status objects via Perspective Broker, aside from some compatibility code for the try scheduler. The ``debugPassword`` configuration option is no longer needed and is thus deprecated. * The undocumented and un-tested ``TinderboxMailNotifier``, designed to send emails suitable for the abandoned and insecure Tinderbox tool, has been removed. * Buildslave info is no longer available via :ref:`Interpolate` and the ``SetSlaveInfo`` buildstep has been removed. * The undocumented ``path`` parameter of the :bb:step:`MasterShellCommand` buildstep has been renamed ``workdir`` for better consistency with the other steps. * The name and source of a Property have to be unicode or ascii string. * Property values must be serializable in JSON. * :bb:reporter:`IRC` has the following changes: - categories parameter is deprecated and removed. It should be replaced with tags=[cat] - noticeOnChannel parameter is deprecated and removed. * workdir behavior has been unified: - ``workdir`` attribute of steps is now a property in :py:class:`~buildbot.process.buildstep.BuildStep`, and choose the workdir given following priority: * workdir of the step, if defined * workdir of the builder (itself defaults to 'build') - setDefaultWorkdir() has been deprecated, but is now behaving the same for all the steps: Setting self.workdir if not already set * :bb:step:`Trigger` now has a ``getSchedulersAndProperties`` method that can ve overridden to support dynamic triggering. * ```master.cfg`` is now parsed from a thread. Previously it was run in the main thread, and thus slowing down the master in case of big config, or network access done to generate the config. * :bb:chsrc:`SVNPoller`'s svnurl parameter has been changed to repourl. Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * Botmaster no longer service parent for buildslaves. Service parent functionality has been transferred to BuildslaveManager. It should be noted Botmaster no longer has a ``slaves`` field as it was moved to BuildslaveManager. * The sourcestamp DB connector now returns a ``patchid`` field. * Buildbot no longer polls the database for jobs. The ``db_poll_interval`` configuration parameter and the :bb:cfg:`db` key of the same name are deprecated and will be ignored. * The interface for adding changes has changed. The new method is ``master.data.updates.addChange`` (implemented by :py:meth:`~buildbot.data.changes.ChangeResourceType.addChange`), although the old interface (``master.addChange``) will remain in place for a few versions. The new method: * returns a change ID, not a Change instance; * takes its ``when_timestamp`` argument as epoch time (UNIX time), not a datetime instance; and * does not accept the deprecated parameters ``who``, ``isdir``, ``is_dir``, and ``when``. * requires that all strings be unicode, not bytestrings. Please adjust any custom change sources accordingly. * A new build status, CANCELLED, has been added. It is used when a step or build is deliberately cancelled by a user. * This upgrade will delete all rows from the ``buildrequest_claims`` table. If you are using this table for analytical purposes outside of Buildbot, please back up its contents before the upgrade, and restore it afterward, translating object IDs to scheduler IDs if necessary. This translation would be very slow and is not required for most users, so it is not done automatically. * All of the schedulers DB API methods now accept a schedulerid, rather than an objectid. If you have custom code using these methods, check your code and make the necessary adjustments. * The ``addBuildsetForSourceStamp`` method has become ``addBuildsetForSourceStamps``, and its signature has changed. The ``addBuildsetForSourceStampSetDetails`` method has become ``addBuildsetForSourceStampsWithDefaults``, and its signature has changed. The ``addBuildsetForSourceStampDetails`` method has been removed. The ``addBuildsetForLatest`` method has been removed. It is equivalent to ``addBuildsetForSourceStampDetails`` with ``sourcestamps=None``. These methods are not yet documented, and their interface is not stable. Consult the source code for details on the changes. * The ``preStartConsumingChanges`` and ``startTimedSchedulerService`` hooks have been removed. * The triggerable schedulers' ``trigger`` method now requires a list of sourcestamps, rather than a dictionary. * The :py:class:`~buildbot.sourcestamp.SourceStamp` class is no longer used. It remains in the codebase to support loading data from pickles on upgrade, but should not be used in running code. * The :py:class:`~buildbot.process.buildrequest.BuildRequest` class no longer has full ``source`` or ``sources`` attributes. Use the data API to get this information (which is associated with the buildset, not the build request) instead. * The undocumented ``BuilderControl`` method ``submitBuildRequest`` has been removed. * The debug client no longer supports requesting builds (the ``requestBuild`` method has been removed). If you have been using this method in production, consider instead creating a new change source, using the :bb:sched:`ForceScheduler`, or using one of the try schedulers. * The ``buildbot.misc.SerializedInvocation`` class has been removed; use :py:func:`buildbot.util.debounce.method` instead. * The ``progress`` attributes of both :py:class:`buildbot.process.buildstep.BuildStep` and :py:class:`buildbot.process.build.Build` have been removed. Subclasses should only be accessing the progress-tracking mechanics via the :py:meth:`buildbot.process.buildstep.BuildStep.setProgress` method. Slave ----- Features ~~~~~~~~ Fixes ~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * buildmaster and buildslave no longer supports old-style source steps. * On Windows, if a :bb:step:`ShellCommand` step in which ``command`` was specified as a list is executed, and a list element is a string consisting of a single pipe character, it no longer creates a pipeline. Instead, the pipe character is passed verbatim as an argument to the program, like any other string. This makes command handling consistent between Windows and Unix-like systems. To have a pipeline, specify ``command`` as a string. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.8.10..v0.9.0b1 buildbot-3.4.0/master/docs/relnotes/0.9.0b2.rst000066400000000000000000000047411413250514000210230ustar00rootroot00000000000000Release Notes for Buildbot 0.9.0b2 ================================== .. Any change that adds a feature or fixes a bug should have an entry here. Most simply need an additional bulleted list item, but more significant changes can be given a subsection of their own. The following are the release notes for Buildbot 0.9.0b2. Buildbot 0.9.0b2 was released on August, 2 2015. Master ------ Features ~~~~~~~~ * Mercurial hook was updated and modernized. It is no longer necessary to fork. One can now extend PYTHONPATH via the hook configuration. Among others, it permits to use a buildbot virtualenv instead of installing buildbot in all the system. Added documentation inside the hook. Misc. clean-up and reorganization in order to make the code a bit more readable. * UI templates can now be customizable. You can provide html or jade overrides to the www plugins, to customize the UI * UI side bar is now fixed by default for large screens. Fixes ~~~~~ * Fix setup for missing www.hooks module * Fix setup to install only on recents version of pip (>=1.4). This prevents unexpected upgrade to nine from people who just use ``pip install -U buildbot`` * Fix a crash in the git hook. * Add checks to enforce slavenames are identifiers. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * The :py:class:`~buildbot.config.BuilderConfig` ``nextSlave`` keyword argument takes a callable. This callable now receives :py:class:`~buildbot.process.buildrequest.BuildRequest` instance in its signature as 3rd parameter. **For retro-compatibility, all callable taking only 2 parameters will still work**. * Data api provides a way to query the build list per slave. * Data api provides a way to query some build properties in a build list. Slave ----- * ``buildbot-slave`` now requires Python 2.6 Features ~~~~~~~~ * Schedulers: the ``codebases`` parameter can now be specified in a simple list-of-strings form. Fixes ~~~~~ * Fix two race conditions in the integration tests Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Providing Latent AWS EC2 credentials by the :file:`.ec2/aws_id` file is deprecated: Use the standard :file:`.aws/credentials` file, instead. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0b1..v0.9.0b2 buildbot-3.4.0/master/docs/relnotes/0.9.0b3.rst000066400000000000000000000035461413250514000210260ustar00rootroot00000000000000Release Notes for Buildbot 0.9.0b3 ================================== The following are the release notes for Buildbot 0.9.0b3. This version was released on October 18, 2015. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Features ~~~~~~~~ * The irc command ``hello`` now returns 'Hello' in a random language if invoked more than once. * :bb:sched:`Triggerable` now accepts a ``reason`` parameter. * :bb:reporter:`GerritStatusPush` now accepts a ``builders`` parameter. * `StatusPush` callback now receives build results (success/failure/etc) with the ``buildFinished`` event. * There's a new renderable type, :ref:`Transform`. * Buildbot now supports wamp as a mq backend. This allows to run a multi-master configuration. See :ref:`MQ-Specification`. Fixes ~~~~~ * The :bb:step:`PyFlakes` and :bb:step:`PyLint` steps no longer parse output in Buildbot log headers (:bug:`3337`). * :bb:chsrc:`GerritChangeSource` is now less verbose by default, and has a ``debug`` option to enable the logs. * :bb:chsrc:`P4Source` no longer relies on the perforce server time to poll for new changes. * The commit message for a change from :bb:chsrc:`P4Source` now matches what the user typed in. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * The :py:mod:`buildbot.status.results` module no longer exists and has been renamed to :py:mod:`buildbot.process.results`. Slave ----- Features ~~~~~~~~ * The Buildbot slave now includes the number of CPUs in the information it supplies to the master on connection. This value is autodetected, but can be overridden with the ``--numcpus`` argument to ``buildslave create-slave``. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0b2..v0.9.0b3 buildbot-3.4.0/master/docs/relnotes/0.9.0b4.rst000066400000000000000000000014441413250514000210220ustar00rootroot00000000000000Release Notes for Buildbot 0.9.0b4 ================================== The following are the release notes for Buildbot 0.9.0b4 This version was released on October 20, 2015. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ This version is very similar to 0.9.0b3, re-released due to issues with PyPI uploads. Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * The data API's ``startConsuming`` method has been removed. Instead of calling this method with a data API path, call ``self.master.mq.startConsuming`` with an appropriate message routing pattern. Slave ----- No changes since 0.9.0b3. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0b3..v0.9.0b4 buildbot-3.4.0/master/docs/relnotes/0.9.0b5.rst000066400000000000000000000013231413250514000210170ustar00rootroot00000000000000Release Notes for Buildbot 0.9.0b5 ================================== The following are the release notes for Buildbot 0.9.0b5. This version was released on October 21, 2015. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ This version addresses http://trac.buildbot.net/wiki/SecurityAlert090b4 by preventing dissemination of hook information via the web UI. This also reverts the addition of the frontend data service in 0.9.0b4, as that contained many bugs. It will be re-landed in a subsequent release. Slave ----- No changes. For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0b4..0.9.0b5 buildbot-3.4.0/master/docs/relnotes/0.9.0b6.rst000066400000000000000000000030261413250514000210220ustar00rootroot00000000000000Release Notes for Buildbot 0.9.0b6 ================================== The following are the release notes for Buildbot 0.9.0b6 This version was released on January 20, 2016. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Features ~~~~~~~~ * Builders ui page has improved tag filtering capabilities * Home page enhanced with the list of recent builds sorted by builder * :bb:reporter:`IRC` reporter has been partially ported to work on data api. Fixes ~~~~~ * better stability and reliability in the UI thanks to switch to buildbot data-module * fix irc Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * properties object is now directly present in build, and not in build_status. This should not change much unless you try to access your properties via step.build.build_status. Remember that with PropertiesMixin, you can access properties via getProperties on the steps, and on the builds objects. * :ref:`WWW-data-module` is now integrated, which sets a definitive API for accessing buildbot data in angularJS UI. Slave ----- Features ~~~~~~~~ * The :class:`DockerLatentBuildSlave` image attribute is now renderable (can take properties in account). * The :class:`DockerLatentBuildSlave` sets environment variables describing how to connect to the master. Example dockerfiles can be found in :contrib-src:`master/contrib/docker`. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0b5..v0.9.0b6 buildbot-3.4.0/master/docs/relnotes/0.9.0b7.rst000066400000000000000000000024221413250514000210220ustar00rootroot00000000000000Release Notes for Buildbot 0.9.0b7 ================================== The following are the release notes for Buildbot 0.9.0b7 This version was released on February 14, 2016. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Features ~~~~~~~~ Fixes ~~~~~ * Fix incompatibility with MySQL-5.7 (:bug:`3421`) * Fix incompatibility with postgresql driver psycopg2 (:bug:`3419`, further regressions will be caught by travis) * Fix regressions in forcescheduler UI (:bug:`3416`, :bug:`3418`, :bug:`3422`) Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * The ``buildbot`` Python dist now (finally) requires SQLAlchemy-0.8.0 or later and SQLAlchemy-Migrate-0.9.0 or later. While the old pinned versions (0.7.10 and 0.7.2, respectively) still work, this compatibility is no longer tested and this configuration should be considered deprecated. Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ Slave ----- Features ~~~~~~~~ Fixes ~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0b6..v0.9.0b7 buildbot-3.4.0/master/docs/relnotes/0.9.0b8.rst000066400000000000000000000547131413250514000210350ustar00rootroot00000000000000.. _0.9.0b8: Release Notes for Buildbot 0.9.0b8 ================================== The following are the release notes for Buildbot 0.9.0b8 This version was released on April 11, 2016. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Features ~~~~~~~~ * :class:`GitPoller` now has a ``buildPushesWithNoCommits`` option to allow the rebuild of already known commits on new branches. * Add GitLab authentication plugin for web UI. See :class:`buildbot.www.oauth2.GitLabAuth`. * :class:`DockerLatentWorker` now has a ``hostconfig`` parameter that can be used to setup host configuration when creating a new container. * :class:`DockerLatentWorker` now has a ``networking_config`` parameter that can be used to setup container networks. * The :class:`DockerLatentWorker` ``volumes`` attribute is now renderable. * :bb:step:`CMake` build step is added. It provides a convenience interface to `CMake `_ build system. * MySQL InnoDB tables are now supported. * :class:`~buildbot.reporters.http.HttpStatusPush` has been ported to reporter API. * :class:`~buildbot.reporters.stash.StashStatusPush` has been ported to reporter API. * ``GithubStatusPush`` has been ported to reporter API. * `summaryCB` of :bb:reporter:`GerritStatusPush` now gets not only pre-processed information but the actual build as well. * EC2LatentWorker supports VPCs, instance profiles, and advanced volume mounts. Fixes ~~~~~ * Fix loading :class:`~buildbot.ldapuserinfo.LdapUserInfo` plugin and its documentation (:bug:`3371`). * Fix deprecation warnings seen with docker-py >= 1.4 when passing arguments to ``docker_client.start()``. * :class:`GitHubEventHandler` now uses the ``['repository']['html_url']`` key in the webhook payload to populate ``repository``, as the previously used ``['url']`` and ``['clone_url']`` keys had a different format between push and pull requests and GitHub and GitHub Enterprise instances. * Fix race condition where log compression could lead to empty log results in reporter api * Error while applying db upgrade is now properly reported in the buildbot upgrade-master command line. * Made :class:`Interpolate` safe for deepcopy or serialization/deserialization * Optimized UI REST requests for child builds and change page. * Fix :class:`DockerLatentWorker` use of `volume` parameter, they now properly manage `src:dest` syntax. * Fix :class:`DockerLatentWorker` to properly create properties so that docker parameters can be renderable. * Lock down autobahn version for python 2.6 (note that autobahn and twisted are no longer supporting 2.6, and thus do not receive security fixes anymore). * Fix docs and example to always use port 8020 for the web ui. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Deprecated ``workdir`` property has been removed, ``builddir`` property should be used instead. * To support MySQL InnoDB, the size of six VARCHAR(256) columns ``changes.(author, branch, category, name); object_state.name; user.identifier`` was reduced to VARCHAR(255). * :class:`~buildbot.status.status_push.StatusPush` has been removed from buildbot. Please use the much simpler :class:`~buildbot.reporters.http.HttpStatusPush` instead. Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ Worker changes described in below worker section will probably impact a buildbot developer who uses undocumented '*slave*' API. Undocumented APIs have been replaced without failover, so any custom code that uses it shall be updated with new undocumented API. Worker ------ Package `buildbot-slave` is being renamed `buildbot-worker`. As the work is not completely finished, neither `buildbot-slave==0.9.0b8` or `buildbot-worker==0.9.0b8` have been released. You can safely use any version of `buildbot-slave` with `buildbot==0.9.0b8`, either `buildbot-slave==0.8.12` or `buildbot-slave==0.9.0b7`. Transition to "worker" terminology ---------------------------------- Since version 0.9.0 of Buildbot "slave"-based terminology is deprecated in favor of "worker"-based terminology. For details about public API changes see :ref:`Transition-to-worker-terminology`. API changes done without providing fallback: .. list-table:: :header-rows: 1 * - Old name - New name * - :py:mod:`buildbot.buildslave.manager` - :py:mod:`buildbot.worker.manager` * - :py:class:`buildbot.buildslave.manager.BuildslaveRegistration` - :py:class:`buildbot.worker.manager.WorkerRegistration` * - :py:class:`buildbot.buildslave.manager.BuildslaveRegistration.buildslave` - :py:class:`buildbot.worker.manager.WorkerRegistration.worker` * - :py:class:`buildbot.buildslave.manager.BuildslaveManager` - :py:class:`buildbot.worker.manager.WorkerManager` * - :py:attr:`buildbot.buildslave.manager.BuildslaveManager.slaves` - :py:attr:`buildbot.worker.manager.WorkerManager.workers` * - :py:meth:`buildbot.buildslave.manager.BuildslaveManager.getBuildslaveByName` - :py:meth:`buildbot.worker.manager.WorkerManager.getWorkerByName` * - :py:class:`buildbot.buildslave.docker.DockerLatentBuildSlave` - :py:class:`buildbot.worker.docker.DockerLatentWorker` * - :py:class:`buildbot.buildslave.local.LocalBuildSlave` - :py:class:`buildbot.worker.local.LocalWorker` * - :py:attr:`buildbot.buildslave.local.LocalBuildSlave.LocalBuildSlaveFactory` - :py:attr:`buildbot.worker.local.LocalWorker.LocalWorkerFactory` * - :py:attr:`buildbot.buildslave.local.LocalBuildSlave.remote_slave` - :py:attr:`buildbot.worker.local.LocalWorker.remote_worker` * - :py:mod:`buildbot.buildslave.base` module with all contents - :py:mod:`buildbot.worker.base` * - :py:meth:`buildbot.buildslave.AbstractBuildSlave.updateSlave` - :py:meth:`buildbot.worker.AbstractWorker.updateWorker` * - :py:attr:`buildbot.buildslave.AbstractBuildSlave.slavebuilders` - :py:attr:`buildbot.worker.AbstractWorker.workerforbuilders` * - :py:meth:`buildbot.buildslave.AbstractBuildSlave.updateSlaveStatus` - :py:meth:`buildbot.worker.AbstractWorker.updateWorkerStatus` * - :py:meth:`buildbot.buildslave.AbstractLatentBuildSlave.updateSlave` - :py:meth:`buildbot.worker.AbstractLatentWorker.updateWorker` * - :py:class:`buildbot.buildslave.BuildSlave.slave_status` - :py:class:`buildbot.worker.Worker.worker_status` * - :py:meth:`buildbot.config.MasterConfig.load_slaves` - :py:meth:`~buildbot.config.MasterConfig.load_workers` * - :py:attr:`buildbot.master.BuildMaster.buildslaves` - :py:attr:`buildbot.master.BuildMaster.workers` * - :py:attr:`buildbot.process.build.Build.slavebuilder` - :py:attr:`~buildbot.process.build.Build.workerforbuilder` * - :py:meth:`buildbot.process.build.Build.setSlaveEnvironment` - :py:meth:`~buildbot.process.build.Build.setWorkerEnvironment` * - :py:attr:`buildbot.process.build.Build.slaveEnvironment` - :py:attr:`~buildbot.process.build.Build.workerEnvironment` * - :py:meth:`buildbot.process.build.Build.getSlaveCommandVersion` - :py:meth:`~buildbot.process.build.Build.getWorkerCommandVersion` * - :py:meth:`buildbot.process.build.Build.setupSlaveBuilder` - :py:meth:`~buildbot.process.build.Build.setupWorkerForBuilder` * - :py:meth:`buildbot.process.builder.Build.canStartWithSlavebuilder` - :py:meth:`~buildbot.process.builder.Build.canStartWithWorkerForBuilder` * - :py:meth:`buildbot.process.slavebuilder.AbstractSlaveBuilder.getSlaveCommandVersion` - :py:meth:`buildbot.process.workerforbuilder.AbstractWorkerForBuilder.getWorkerCommandVersion` * - :py:meth:`buildbot.process.slavebuilder.AbstractSlaveBuilder.attached` method argument ``slave`` was renamed - ``worker`` * - :py:attr:`buildbot.buildslave.AbstractBuildSlave.slave_commands` - :py:attr:`buildbot.worker.AbstractWorker.worker_commands` * - :py:attr:`buildbot.buildslave.AbstractBuildSlave.slave_environ` - :py:attr:`buildbot.worker.AbstractWorker.worker_environ` * - :py:attr:`buildbot.buildslave.AbstractBuildSlave.slave_basedir` - :py:attr:`buildbot.worker.AbstractWorker.worker_basedir` * - :py:attr:`buildbot.buildslave.AbstractBuildSlave.slave_system` - :py:attr:`buildbot.worker.AbstractWorker.worker_system` * - :py:attr:`buildbot.buildslave.AbstractBuildSlave.buildslaveid` - :py:attr:`buildbot.worker.AbstractWorker.workerid` * - :py:meth:`buildbot.buildslave.AbstractBuildSlave.addSlaveBuilder` - :py:meth:`buildbot.worker.AbstractWorker.addWorkerForBuilder` * - :py:meth:`buildbot.buildslave.AbstractBuildSlave.removeSlaveBuilder` - :py:meth:`buildbot.worker.AbstractWorker.removeWorkerForBuilder` * - :py:meth:`buildbot.buildslave.AbstractBuildSlave.messageReceivedFromSlave` - :py:meth:`buildbot.worker.AbstractWorker.messageReceivedFromWorker` * - :py:meth:`buildbot.process.slavebuilder.LatentSlaveBuilder` constructor positional argument ``slave`` was renamed - ``worker`` * - :py:attr:`buildbot.process.buildrequestdistributor.BasicBuildChooser.nextSlave` - :py:attr:`~buildbot.process.buildrequestdistributor.BasicBuildChooser.nextWorker` * - :py:attr:`buildbot.process.buildrequestdistributor.BasicBuildChooser.slavepool` - :py:attr:`~buildbot.process.buildrequestdistributor.BasicBuildChooser.workerpool` * - :py:attr:`buildbot.process.buildrequestdistributor.BasicBuildChooser.preferredSlaves` - :py:attr:`~buildbot.process.buildrequestdistributor.BasicBuildChooser.preferredWorkers` * - :py:attr:`buildbot.process.buildrequestdistributor.BasicBuildChooser.rejectedSlaves` - :py:attr:`~buildbot.process.buildrequestdistributor.BasicBuildChooser.rejectedSlaves` * - :py:attr:`buildbot.steps.shell.ShellCommand.slaveEnvironment` (Note: this variable is renderable) - :py:attr:`buildbot.steps.shell.ShellCommand.workerEnvironment` * - :py:mod:`buildbot.status.slave` - :py:mod:`buildbot.status.worker` * - :py:class:`buildbot.status.slave.SlaveStatus` - :py:class:`buildbot.status.worker.WorkerStatus` * - :py:meth:`buildbot.interfaces.IStatusReceiver.slaveConnected` with all implementations - :py:meth:`buildbot.interfaces.IStatusReceiver.workerConnected` * - :py:meth:`buildbot.interfaces.IStatusReceiver.slaveDisconnected` with all implementations - :py:meth:`buildbot.interfaces.IStatusReceiver.workerDisconnected` * - :py:meth:`buildbot.status.master.Status.slaveConnected` - :py:meth:`buildbot.status.master.Status.workerConnected` * - :py:meth:`buildbot.status.master.Status.slaveDisconnected` - :py:meth:`buildbot.status.master.Status.workerDisconnected` * - :py:meth:`buildbot.status.master.Status.slavePaused` - :py:meth:`buildbot.status.master.Status.workerPaused` * - :py:meth:`buildbot.status.master.Status.slaveUnpaused` - :py:meth:`buildbot.status.master.Status.workerUnpaused` * - :py:attr:`buildbot.status.master.Status.buildslaves` - :py:attr:`buildbot.status.master.Status.workers` * - :py:meth:`buildbot.status.base.StatusReceiverBase.slavePaused` - :py:meth:`buildbot.status.base.StatusReceiverBase.workerPaused` * - :py:meth:`buildbot.status.base.StatusReceiverBase.slaveUnpaused` - :py:meth:`buildbot.status.base.StatusReceiverBase.workerUnpaused` * - :py:meth:`buildbot.interfaces.IStatus.getSlaveNames` with all implementations - :py:meth:`buildbot.interfaces.IStatus.getWorkerNames` * - :py:meth:`buildbot.interfaces.IStatus.getSlave` with all implementations - :py:meth:`buildbot.interfaces.IStatus.getWorker` * - :py:meth:`buildbot.interfaces.IBuildStatus.getSlavename` with all implementations - :py:meth:`buildbot.interfaces.IBuildStatus.getWorkername` * - :py:meth:`buildbot.status.build.BuildStatus.setSlavename` - :py:meth:`buildbot.status.build.BuildStatus.setWorkername` * - :py:attr:`buildbot.status.build.BuildStatus.slavename` - :py:attr:`buildbot.status.build.BuildStatus.workername` (also it was moved from class static attribute to instance attribute) * - :py:meth:`buildbot.interfaces.IBuilderStatus.getSlaves` with all implementations - :py:meth:`buildbot.interfaces.IBuilderStatus.getWorkers` * - :py:attr:`buildbot.status.builder.BuilderStatus.slavenames` - :py:attr:`buildbot.status.builder.BuilderStatus.workernames` * - :py:meth:`buildbot.status.builder.BuilderStatus.setSlavenames` - :py:meth:`buildbot.status.builder.BuilderStatus.setWorkernames` * - :py:meth:`buildbot.process.botmaster.BotMaster.slaveLost` - :py:meth:`buildbot.process.botmaster.BotMaster.workerLost` * - :py:meth:`buildbot.process.botmaster.BotMaster.getBuildersForSlave` - :py:meth:`buildbot.process.botmaster.BotMaster.getBuildersForWorker` * - :py:meth:`buildbot.process.botmaster.BotMaster.maybeStartBuildsForSlave` - :py:meth:`buildbot.process.botmaster.BotMaster.maybeStartBuildsForWorker` * - :py:class:`buildbot.locks.RealSlaveLock` - :py:class:`buildbot.locks.RealWorkerLock` * - :py:attr:`buildbot.locks.RealSlaveLock.maxCountForSlave` - :py:attr:`buildbot.locks.RealWorkerLock.maxCountForWorker` * - :py:class:`buildbot.protocols.base.Connection` constructor positional argument ``buildslave`` was renamed - ``worker`` * - :py:attr:`buildbot.protocols.base.Connection.buidslave` - :py:attr:`buildbot.protocols.base.Connection.worker` * - :py:meth:`buildbot.protocols.base.Connection.remoteGetSlaveInfo` - :py:meth:`buildbot.protocols.base.Connection.remoteGetWorkerInfo` * - :py:class:`buildbot.protocols.pb.Connection` constructor positional argument ``buildslave`` was renamed - ``worker`` Other changes done without providing fallback: * Functions argument ``buildslaveName`` renamed to ``workerName``. * Loop variables, local variables, helper functions: .. list-table:: :header-rows: 1 * - Old name - New name * - ``s`` - ``w`` or ``worker`` * - ``sl`` - ``w`` or ``worker`` * - ``bs`` ("buildslave") - ``w`` * - ``sb`` - ``wfb`` ("worker for builder") * - ``bs1()``, ``bs2()`` - ``w1()``, ``w2()`` * - ``bslave`` - ``worker`` * - ``BS1_NAME``, ``BS1_ID``, ``BS1_INFO`` - ``W1_NAME``, ``W1_ID``, ``W1_INFO`` * In :py:meth:`buildbot.config.BuilderConfig.getConfigDict` result ``'slavenames'`` key changed to ``'workernames'``; ``'slavebuilddir'`` key changed to ``'workerbuilddir'``; ``'nextSlave'`` key changed to ``'nextWorker'``. * :py:meth:`buildbot.process.builder.BuilderControl.ping` now generates ``["ping", "no worker"]`` event, instead of ``["ping", "no slave"]``. * ``buildbot.plugins.util.WorkerChoiceParameter`` (previously ``BuildslaveChoiceParameter``) label was changed from ``Build slave`` to ``Worker``. * ``buildbot.plugins.util.WorkerChoiceParameter`` (previously ``BuildslaveChoiceParameter``) default name was changed from ``slavename`` to ``workername``. * ``buildbot.status.builder.SlaveStatus`` fallback was removed. ``SlaveStatus`` was moved to ``buildbot.status.builder.slave`` previously, and now it's :py:class:`buildbot.status.worker.WorkerStatus`. * :py:mod:`buildbot.status.status_push.StatusPush` events generation changed (this module will be completely removed in 0.9.x): - instead of ``slaveConnected`` with data ``slave=...`` now generated ``workerConnected`` event with data ``worker=...``; - instead of ``slaveDisconnected`` with data ``slavename=...`` now generated ``workerDisconnected`` with data ``workername=...``; - instead of ``slavePaused`` with data ``slavename=...`` now generated ``workerPaused`` event with data ``workername=...``; - instead of ``slaveUnpaused`` with data ``slavename=...`` now generated ``workerUnpaused`` event with data ``workername=...``; * :py:meth:`buildbot.status.build.BuildStatus.asDict` returns worker name under ``'worker'`` key, instead of ``'slave'`` key. * :py:meth:`buildbot.status.builder.BuilderStatus.asDict` returns worker names under ``'workers'`` key, instead of ``'slaves'`` key. * Definitely privately used "slave"-named variables and attributes were renamed, including tests modules, classes and methods. Database ~~~~~~~~ Database API changes done without providing fallback. .. list-table:: :header-rows: 1 * - Old name - New name * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.getBuildslaves` (rewritten in nine) and :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.getBuildslave` (introduced in nine) results uses instead of ``'slaveinfo'`` key - ``'workerinfo'`` key * - :py:attr:`buildbot.db.model.Model.buildslaves` - :py:attr:`buildbot.db.model.Model.workers` * - :py:attr:`buildbot.db.model.Model.configured_buildslaves` - :py:attr:`buildbot.db.model.Model.configured_workers` * - :py:attr:`buildbot.db.model.Model.connected_buildslaves` - :py:attr:`buildbot.db.model.Model.connected_workers` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.findBuildslaveId` (introduced in nine) - :py:meth:`buildbot.db.workers.WorkersConnectorComponent.findWorkerId` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.deconfigureAllBuidslavesForMaster` (introduced in nine, note typo ``Buidslaves``) - :py:meth:`buildbot.db.workers.WorkersConnectorComponent.deconfigureAllWorkersForMaster` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.buildslaveConfigured` (introduced in nine) - :py:meth:`buildbot.db.workers.WorkersConnectorComponent.workerConfigured` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.buildslaveConfigured` method argument ``buildslaveid`` was renamed (introduced in nine) - ``workerid`` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.getBuildslave` - :py:meth:`buildbot.db.workers.WorkersConnectorComponent.getWorker` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.getBuildslaves` method argument ``_buildslaveid`` was renamed (introduced in nine) - ``_workerid`` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.buildslaveConnected` (introduced in nine) - :py:meth:`buildbot.db.workers.WorkersConnectorComponent.workerConnected` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.buildslaveConnected` method argument ``slaveinfo`` was renamed (introduced in nine) - ``workerinfo`` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.buildslaveConnected` method argument ``buildslaveid`` was renamed (introduced in nine) - ``workerid`` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.buildslaveDisconnected` (introduced in nine) - :py:meth:`buildbot.db.workers.WorkersConnectorComponent.workerDisconnected` * - :py:meth:`buildbot.db.buildslaves.BuildslavesConnectorComponent.buildslaveDisconnected` method argument ``buildslaveid`` was renamed (introduced in nine) - ``workerid`` * - :py:meth:`buildbot.db.builds.BuildsConnectorComponent.getBuilds` method argument ``buildslaveid`` was renamed (introduced in nine) - ``workerid`` * - :py:meth:`buildbot.db.builds.BuildsConnectorComponent.addBuild` method argument ``buildslaveid`` was renamed (introduced in nine) - ``workerid`` * - :py:class:`buildbot.reporters.message.MessageFormatter` template variable ``slavename`` - ``workername`` Data API ~~~~~~~~ Python API changes: .. list-table:: :header-rows: 1 * - Old name - New name * - :py:mod:`buildbot.data.buildslaves` - :py:mod:`~buildbot.data.workers` * - :py:class:`buildbot.data.buildslaves.BuildslaveEndpoint` - :py:class:`~buildbot.data.workers.WorkerEndpoint` * - :py:class:`buildbot.data.buildslaves.BuildslavesEndpoint` - :py:class:`~buildbot.data.workers.WorkersEndpoint` * - :py:class:`buildbot.data.buildslaves.Buildslave` - :py:class:`~buildbot.data.workers.Worker` * - :py:meth:`buildbot.data.buildslaves.Buildslave.buildslaveConfigured` - :py:meth:`~buildbot.data.workers.Worker.workerConfigured` * - :py:meth:`buildbot.data.buildslaves.Buildslave.findBuildslaveId` - :py:meth:`~buildbot.data.workers.Worker.findWorkerId` * - :py:meth:`buildbot.data.buildslaves.Buildslave.buildslaveConnected` - :py:meth:`~buildbot.data.workers.Worker.workerConnected` * - :py:meth:`buildbot.data.buildslaves.Buildslave.buildslaveDisconnected` - :py:meth:`~buildbot.data.workers.Worker.workerDisconnected` * - :py:meth:`buildbot.data.buildslaves.Buildslave.deconfigureAllBuidslavesForMaster` - :py:meth:`~buildbot.data.workers.Worker.deconfigureAllWorkersForMaster` * - ``buildslaveid`` in function arguments and API specification - ``workerid`` * - ``slaveinfo`` in function arguments and API specification - ``workerinfo`` Changed REST endpoints: .. list-table:: :header-rows: 1 * - Old name - New name * - ``/buildslaves`` - ``/workers`` * - ``/buildslaves/n:buildslaveid`` - ``/workers/n:workerid`` * - ``/buildslaves/n:buildslaveid/builds`` - ``/workers/n:workerid/builds`` * - ``/buildslaves/:buildslaveid/builds/:buildid`` - ``/workers/:workerid/builds/:buildid`` * - ``/masters/n:masterid/buildslaves`` - ``/masters/n:masterid/workers`` * - ``/masters/n:masterid/buildslaves/n:buildslaveid`` - ``/masters/n:masterid/workers/n:workerid`` * - ``/masters/n:masterid/builders/n:builderid/buildslaves`` - ``/masters/n:masterid/builders/n:builderid/workers`` * - ``/masters/n:masterid/builders/n:builderid/buildslaves/n:buildslaveid`` - ``/masters/n:masterid/builders/n:builderid/workers/n:workerid`` * - ``/builders/n:builderid/buildslaves`` - ``/builders/n:builderid/workers`` * - ``/builders/n:builderid/buildslaves/n:buildslaveid`` - ``/builders/n:builderid/workers/n:workerid`` Changed REST object keys: .. list-table:: :header-rows: 1 * - Old name - New name * - ``buildslaveid`` - ``workerid`` * - ``slaveinfo`` - ``workerinfo`` * - ``buildslave`` - ``worker`` * - ``buildslaves`` - ``workers`` ``data_module`` version bumped from ``1.2.0`` to ``2.0.0``. Web UI ~~~~~~ In base web UI (``www/base``) and Material Design web UI (``www/md_base``) all "slave"-named messages and identifiers were renamed to use "worker" names and new REST API endpoints. MQ layer ~~~~~~~~ ``buildslaveid`` key in messages were replaced with ``workerid``. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0b7..v0.9.0b8 buildbot-3.4.0/master/docs/relnotes/0.9.0b9.rst000066400000000000000000000074051413250514000210320ustar00rootroot00000000000000Release Notes for Buildbot 0.9.0b9 ================================== The following are the release notes for Buildbot 0.9.0b9 This version was released on May 10, 2016. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Features ~~~~~~~~ * new :bb:reporter:`GitLabStatusPush` to report builds results to GitLab. * ``buildbot stop`` now waits for complete buildmaster stop by default. * New ``--no-wait`` argument for ``buildbot stop`` which allows not to wait for complete master shutdown. * Builder page is now sorted by builder name * LogViewer page now supports ANSI color codes, and is displayed white on black. Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * Speed improvements for integration tests by use of SynchronousTestCase, and in-memory sqlite. * Buildbot now requires import to be sorted using `isort `_. Please run ``make isort`` before creating a PR or use any available editor plugin in order to reorder your imports. Fixes ~~~~~ * OpenStackLatentWorker uses the novaclient API correctly now. * The :bb:step:`MsBuild4` and :bb:step:`MsBuild12` steps work again (:bug:`2878`). * Scheduler changes are now identified by serviceid instead of objectid (:bug:`3532`) * Make groups optional in LdapUserInfo (:bug:`3511`) * Buildbot nine do not write pickles anymore in the master directory * Fix build page to not display build urls, but rather directly the build-summary, which already contain the URL. * UI Automatically reconnect on disconnection from the websocket. (:bug:`3462`) Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * The buildmaster now requires at least Twisted-14.0.1. * The web ui has upgrade its web components dependencies to `latest versions `_. This can impact web-ui plugin. * Web server does not provide /png and /redirect anymore (:bug:`3357`). This functionality is used to implement build status images. This should be easy to implement if you need it. One could port the old image generation code, or implement a redirection to http://shields.io/. * Support of worker-side ``usePTY`` was removed from ``buildbot-worker``. ``usePTY`` argument was removed from ``WorkerForBuilder`` and ``Worker`` classes. * html is no longer permitted in 'label' attributes of forcescheduler parameters. * ``LocalWorker`` now requires ``buildbot-worker`` package, instead of ``buildbot-slave``. * :ref:`Collapse-Request-Functions` now takes master as first argument. The previous callable contained too few data in order to be really usable. As collapseRequests has never been released outside of beta, backward compatibility with previous release has **not** been implemented. * This is the last version of buildbot nine which supports python 2.6 for the master. Next version will drop python 2.6 support. Worker ------ Fixes ~~~~~ * ``buildbot-worker`` script now outputs message to terminal. * Windows helper script now called ``buildbot-worker.bat`` (was ``buildbot_worker.bat``, notice underscore), so that ``buildbot-worker`` command can be used in virtualenv both on Windows and POSIX systems. Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * ``SLAVEPASS`` environment variable is not removed in default-generated ``buildbot.tac``. Environment variables are cleared in places where they are used (e.g. in Docker Latent Worker contrib scripts). * Master-part handling has been removed from ``buildbot-worker`` log watcher (:bug:`3482`). * ``WorkerDetectedError`` exception type has been removed. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0b8..v0.9.0b9 buildbot-3.4.0/master/docs/relnotes/0.9.0rc1.rst000066400000000000000000000067121413250514000212050ustar00rootroot00000000000000Release Notes for Buildbot ``0.9.0rc1`` ======================================== The following are the release notes for Buildbot ``0.9.0rc1``. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Features ~~~~~~~~ * new ``HipchatStatusPush`` to report build results to Hipchat. * new steps for Visual Studio 2015 (VS2015, VC14, and MsBuild14). * The :bb:step:`P4` step now obfuscates the password in status logs. * Added support for specifying the depth of a shallow clone in :bb:step:`Git`. * :bb:worker:`OpenStackLatentWorker` now uses a single novaclient instance to not require re-authentication when starting or stopping instances. * The ``dist`` parameter in :bb:step:`RpmBuild` is now renderable. * new :bb:reporter:`BitbucketStatusPush` to report build results to a Bitbucket Cloud repository. Fixes ~~~~~ * :bb:reporter:`GerritStatusPush` now includes build properties in the ``startCB`` and ``reviewCB`` functions. ``startCB`` now must return a dictionary. * Fix TypeError exception with :py:class:`~buildbot.changes.HgPoller` if ``usetimestamps=False`` is used (:bug:`3562`) * Fix recovery upon master unclean kill or crash (:bug:`3564`) * sqlite access is serialized in order to improve stability (:bug:`3565`) * Docker latent worker has been fixed (:bug:`3571`) Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ Features ~~~~~~~~ Fixes ~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Support for python 2.6 was dropped from the master. * ``public_html`` directory is not created anymore in ``buildbot create-master`` (it's not used for some time already). Documentation was updated with suggestions to use third party web server for serving static file. * ``usePTY`` default value has been changed from ``slave-config`` to ``None`` (use of ``slave-config`` will still work). * ``GithubStatusPush`` reporter was renamed to :bb:reporter:`GitHubStatusPush`. Worker ------ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * The ``buildbot-slave`` package has finished being renamed to ``buildbot-worker``. Worker ------ Fixes ~~~~~ * ``runGlob()`` uses the correct remote protocol for both :py:class:`~buildbot.process.buildstep.CommandMixin` and :py:class:`~buildbot.steps.worker.ComposititeStepMixin`. * Rename ``glob()`` to ``runGlob()`` in :py:class:`~buildbot.process.buildstep.CommandMixin` Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ * EC2 Latent Worker upgraded from ``boto2`` to ``boto3``. Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Worker commands version bumped to 3.0. * Master/worker protocol has been changed: * ``slave_commands`` key in worker information was renamed to ``worker_commands``. * ``getSlaveInfo`` remote method was renamed to ``getWorkerInfo``. * ``slave-config`` value of ``usePTY`` is not supported anymore. * ``slavesrc`` command argument was renamed to ``workersrc`` in ``uploadFile`` and ``uploadDirectory`` commands. * ``slavedest`` command argument was renamed to ``workerdest`` in ``downloadFile`` command. * Previously deprecated ``WorkerForBuilder.remote_shutdown()`` remote command has been removed. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0b9..v0.9.0rc1 Note that Buildbot-0.8.11 was never released. buildbot-3.4.0/master/docs/relnotes/0.9.0rc2.rst000066400000000000000000000033221413250514000212000ustar00rootroot00000000000000Release Notes for Buildbot ``0.9.0rc2`` ======================================== The following are the release notes for Buildbot ``0.9.0rc2``. This version was released on August 23, 2016. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Features ~~~~~~~~ * add a UI button to allow to cancel the whole queue for a builder Fixes ~~~~~ * fix the UI to allow to cancel a buildrequest (:bug:`3582`) * Fix BitbucketPullrequestPoller change detection * Fix customization for template_type in email reporter * fix DockerLatent integration of volumes mounting * misc doc fixes * fix buildbot not booting when builder tags contains duplicates * ``forcesched``: fix owner parameter when no authentication is used * REST: fix problem with twisted 16 error reporting * CORS: format errors according to API type * Dockerfiles fix and upgrade Ubuntu to 16.04 * Fixes #3430 Increased size of builder identifier from 20 to 50 (brings it in line to size of steps and workers in same module). * Fix missing VS2015 entry_points * removed the restriction on twisted < 16.3.0 now that autobahn 0.16.0 fixed the issue Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ Features ~~~~~~~~ Fixes ~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * remove repo from worker code (obsoleted by repo master source step) Worker ------ Fixes ~~~~~ Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0rc1..v0.9.0rc2 buildbot-3.4.0/master/docs/relnotes/0.9.0rc3.rst000066400000000000000000000015451413250514000212060ustar00rootroot00000000000000Release Notes for Buildbot ``0.9.0rc3`` ======================================== The following are the release notes for Buildbot ``0.9.0rc3``. This version was released on September 14, 2016. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Features ~~~~~~~~ * add tool to send usage data to buildbot.net :bb:cfg:`buildbotNetUsageData` Fixes ~~~~~ * Publish python module buildbot.buildslave in the dist files * Upgrade to guanlecoja 0.7 (for compatibility with node6) * Fix invocation of trial on windows, with twisted 16+ * Fix rare issue which makes buildbot throw a exception when there is a sourcestamp with no change for a particular codebase. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0rc2..v0.9.0rc3 buildbot-3.4.0/master/docs/relnotes/0.9.0rc4.rst000066400000000000000000000011571413250514000212060ustar00rootroot00000000000000Release Notes for Buildbot ``0.9.0rc4`` ======================================== The following are the release notes for Buildbot ``0.9.0rc4``. This version was released on September 28, 2016. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Fixes ~~~~~ * Fix the UI to better adapt to different screen width (:bug:`3614`) * Add more REST api documentation (document ``/raw`` endpoints, and ``POST`` actions) Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0rc3..v0.9.0rc4 buildbot-3.4.0/master/docs/relnotes/0.9.1.rst000066400000000000000000000157671413250514000206120ustar00rootroot00000000000000Release Notes for Buildbot ``0.9.1`` ======================================== The following are the release notes for Buildbot ``0.9.1``. This version was released on November 1, 2016. See :ref:`Upgrading to Nine` for a guide to upgrading from 0.8.x to 0.9.x Master ------ Features ~~~~~~~~ * Add support for hyper.sh via :class:`HyperLatentWorker` Hyper_ is a CaaS solution for hosting docker container in the cloud, billed to the second. It forms a very cost efficient solution to run your CI in the cloud. * The :bb:step:`Trigger` step now supports ``unimportantSchedulerNames`` * add a UI button to allow to cancel the whole queue for a builder * Buildbot log viewer now support 256 colors ANSI codes * new :bb:step:`GitHub` which correctly checkout the magic branch like ``refs/pull/xx/merge``. * :class:`MailNotifier` now supports a `schedulers` constructor argument that allows you to send mail only for builds triggered by the specified list of schedulers. * :class:`MailNotifier` now supports a `branches` constructor argument that allows you to send mail only for builds triggered by the specified list of branches. * Optimization of the data api filtering, sorting and paging, speeding up a lot the UI when the master has lots of builds. * :bb:reporter:`GerritStatusPush` now accepts a ``notify`` parameter to control who gets emailed by Gerrit. * Add a ``format_fn`` parameter to the ``HttpStatusPush`` reporter to customize the information being pushed. * Latent Workers can now start in parallel. * The build started by latent worker will be created while the latent worker is substantiated. * Latent Workers will now report startup issues in the UI. * Workers will be temporarily put in quarantine in case of build preparation issues. This avoids master and database overload in case of bad worker configuration. The quarantine is implemented with an exponential back-off timer. * Master Stop will now stop all builds, and wait for all workers to properly disconnect. Previously, the worker connections was stopped, which incidentally made all their builds marked retried. Now, builds started with a :class:`Triggereable` scheduler will be cancelled, while other builds will be retried. The master will make sure that all latent workers are stopped. * The ``MessageFormatter`` class also allows inline-templates with the ``template`` parameter. * The ``MessageFormatter`` class allows custom mail's subjects with the ``subject`` and ``subject_name`` parameters. * The ``MessageFormatter`` class allows extending the context given to the Templates via the ``ctx`` parameter. * The new ``MessageFormatterMissingWorker`` class allows to customize the message sent when a worker is missing. * The :bb:worker:`OpenStackLatentWorker` worker now supports rendering the block device parameters. The ``volume_size`` parameter will be automatically calculated if it is ``None``. .. _Hyper: https://hyper.sh Fixes ~~~~~ * fix the UI to allow to cancel a buildrequest (:bug:`3582`) * :bb:chsrc:`GitHub` change hook now correctly use the refs/pull/xx/merge branch for testing PRs. * Fix the UI to better adapt to different screen width (:bug:`3614`) * Don't log :class:`AlreadyClaimedError`. They are normal in case of :bb:step:`Trigger` cancelling, and in a multimaster configuration. * Fix issues with worker disconnection. When a worker disconnects, its current buildstep must be interrupted and the buildrequests should be retried. * Fix the worker missing email notification. * Fix issue with worker builder list not being updated in UI when buildmaster is reconfigured (:bug:`3629`) Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ Features ~~~~~~~~ * New :class:`SharedService` can be used by steps, reporters, etc to implement per master resource limit. * New :class:`HTTPClientService` can be used by steps, reporters, etc to implement HTTP client. This class will automatically choose between `treq`_ and `txrequests`_, whichever is installed, in order to access HTTP servers. This class comes with a fake implementation helping to write unit tests. * All HTTP reporters have been ported to :class:`HTTPClientService` .. _txrequests: https://pypi.python.org/pypi/txrequests .. _treq: https://pypi.python.org/pypi/treq Fixes ~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * By default, non-distinct commits received via :class:`buildbot.status.web.hooks.github.GitHubEventHandler` now get recorded as a :class:`Change`. In this way, a commit pushed to a branch that is not being watched (e.g. a dev branch) will still get acted on when it is later pushed to a branch that is being watched (e.g. master). In the past, such a commit would get ignored and not built because it was non-distinct. To disable this behavior and revert to the old behavior, install a :class:`ChangeFilter` that checks the ``github_distinct`` property: .. code-block:: python ChangeFilter(filter_fn=lambda c: c.properties.getProperty('github_distinct')) * setup.py 'scripts' have been converted to console_scripts entry point. This makes them more portable and compatible with wheel format. Most consequences are for the windows users: * ``buildbot.bat`` does not exist anymore, and is replaced by ``buildbot.exe``, which is generated by the console_script entrypoint. * ``buildbot_service.py`` is replaced by ``buildbot_windows_service.exe``, which is generated by the console_script entrypoint As this script has been written in 2006, has only inline documentation and no unit tests, it is not guaranteed to be working. Please help improving the windows situation. * The ``user`` and ``password`` parameters of the ``HttpStatusPush`` reporter have been deprecated in favor of the ``auth`` parameter. * The ``template_name`` parameter of the ``MessageFormatter`` class has been deprecated in favor of ``template_filename``. Worker ------ Fixes ~~~~~ Changes for Developers ~~~~~~~~~~~~~~~~~~~~~~ Deprecations, Removals, and Non-Compatible Changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * The worker now requires at least Twisted 10.2.0. * setup.py 'scripts' have been converted to console_scripts entry point. This makes them more portable and compatible with wheel format. Most consequences are for the windows users: * ``buildbot_worker.bat`` does not exist anymore, and is replaced by ``buildbot_worker.exe``, which is generated by the console_script entrypoint. * ``buildbot_service.py`` is replaced by ``buildbot_worker_windows_service.exe``, which is generated by the console_script entrypoint As this script has been written in 2006, has only inline documentation and no unit tests, it is not guaranteed to be working. Please help improving the windows situation. * :class:`AbstractLatentWorker` is now in :py:mod:`buildbot.worker.latent` instead of :py:mod:`buildbot.worker.base`. Details ------- For a more detailed description of the changes made in this version, see the git log itself: .. code-block:: bash git log v0.9.0..v0.9.1 buildbot-3.4.0/master/docs/relnotes/0.9.2-0.9.15.rst000066400000000000000000001016721413250514000213320ustar00rootroot00000000000000Release Notes for Buildbot ``0.9.15.post1`` ( ``2018-01-07`` ) ============================================================== Bug fixes --------- - Fix worker reconnection fails (:issue:`3875`, :issue:`3876`) - Fix umask set to 0 when using LocalWorker (:issue:`3878`) - Fix Buildbot reconfig, when badge plugin is installed (:issue:`3879`) - Fix (:issue:`3865`) so that now :py:class:`~buildbot.changes.svnpoller.SVNPoller` works with paths that contain valid UTF-8 characters which are not ASCII. Release Notes for Buildbot ``0.9.15`` ( ``2018-01-02`` ) ======================================================== Bug fixes --------- - Fix builder page not showing any build (:issue:`3820`) - Fix double Workers button in the menu. (:issue:`3818`) - Fix bad icons in the worker action dialog. - Fix url arguments in Buildbot :ref:`Badges` for python3. - Upgrading to `guanlecoja-ui` version 1.8.0, fixing two issues. Fixed issue where the console view would jump to the top of page when opening the build summary dialog (:issue:`3657`). Also improved sidebar behaviour by remembering previous pinned vs. collapsed state. - Fixes issue with Buildbot :bb:worker:`DockerLatentWorker`, where Buildbot can kill running workers by mistake based on the form the worker name (:issue:`3800`). - Fixes issue with Buildbot :bb:worker:`DockerLatentWorker` not reaping zombies process within its container environment. - Update requirement text to use the modern "docker" module from the older "docker-py" module name - When multiple :bb:cfg:`reporter` or :bb:cfg:`services` are configured with the same name, an error is now displayed instead of silently discarding all but the last one :issue:`3813`. - Fixed exception when using :py:class:`buildbot.www.auth.CustomAuth` Features -------- - New Buildbot SVG icons for web UI. The web UI now uses a colored favicon according to build results (:issue:`3785`). - ``paused`` and ``graceful`` :ref:`Worker-states` are now stored in the database. - :ref:`Worker-states` are now displayed in the web UI. - Quarantine timers is now using the ``paused`` worker state. - Quarantine timer is now enabled when a build finish on ``EXCEPTION`` state. - Standalone binaries for buildbot-worker package are now published for windows and linux (``amd64``). This allows to run a buildbot-worker without having a python environment. - New ``buildbot-worker create-worker --maxretries`` for :ref:`Latent-Workers` to quit if the master is or becomes unreachable. - Badges can now display `running` as status. - The database schema now supports cascade deletes for all objects instead of raising an error when deleting a record which has other records pointing to it via foreign keys. - Buildbot can properly find its version if installed from a git archive tarball generated from a tag. - Enhanced the test suite to add worker/master protocol interoperability tests between python3 and python2. Deprecations and Removals ------------------------- - buildbot.util.ascii2unicode() is removed. buildbot.util.bytes2unicode() should be used instead. Release Notes for Buildbot ``0.9.14`` ( ``2017-12-08`` ) ======================================================== Bug fixes --------- - Compile step now properly takes the decodeRC parameter in account (:issue:`3774`) - Fix duplicate build requests results in :py:class:`~buildbot.db.buildrequests.BuildRequestsConnectorComponent` when querying the database (:issue:`3712`). - :py:class:`~buildbot.changes.gitpoller.GitPoller` now accepts git branch names with UTF-8 characters (:issue:`3769`). - Fixed inconsistent use of `pointer` style mouse cursor by removing it from the `.label` css rule and instead creating a new `.clickable` css rule which is used only in places which are clickable and would not otherwise automatically get the `pointer` icon, for example it is not needed for hyper-links. (:issue:`3795`). - Rebuilding with the same revision now takes new change properties into account instead of re-using the original build change properties (:issue:`3701`). - Worker authentication is now delayed via a DeferredLock until Buildbot configuration is finished. This fixes UnauthorizedLogin errors during buildbot restart (:issue:`3462`). - Fixes python3 encoding issues with Windows Service (:issue:`3796`) Features -------- - new :ref`badges` plugin which reimplement the buildbot eight png badge system. - In progress worker control API. Worker can now be stopped and paused using the UI. Note that there is no UI yet to look the status of those actions (:issue:`3429`). - Make maximum number of builds fetched on the builders page configurable. - Include `context` in the log message for `GitHubStatusPush` - On 'Builders' page reload builds when tags change. - Give reporters access to master single in renderables. This allows access to build logs amongst other things - Added possibility to check www user credentials with a custom class. Release Notes for Buildbot ``0.9.13`` ( ``2017-11-07`` ) ======================================================== Deprecations and Removals ------------------------- Following will help Buildbot to leverage new feature of twisted to implement important features like worker protocol encryption. - The ``buildbot`` and ``buildbot-worker`` packages now requires Python 2.7 or Python 3.4+ -- Python 2.6 is no longer supported. - ``buildbot`` and ``buildbot-worker`` packages now required Twisted versions >= 16.1.0. Earlier versions of Twisted are not supported. Bug fixes --------- - Fix Console View forced builds stacking at top (issue:`3461`) - Improve buildrequest distributor to ensure all builders are processed. With previous version, builder list could be re-prioritized, while running the distributor, meaning some builders would never be run in case of master high load. (:issue:`3661`) - Improve ``getOldestRequestTime`` function of buildrequest distributor to do sorting and paging in the database layer (:issue:`3661`). - Arguments passed to GitLab push notifications now work with Python 3 (:issue:`3720`). - Web hooks change sources which use twisted.web.http.Request have been fixed to use bytes, not native strings. This ensures web hooks work on Python 3. Please report any issues on web hooks in python3, as it is hard for us to test end to end. - Fixed null value of steps and logs in reporter HttpStatusPush api. Fixes (:issue:`3180`) - EC2LatentBuilder now correctly sets tags on spot instances (:issue:`3739`). - Fixed operation of the Try scheduler for a code checked out from Subversion. - Fix buildbot worker startup when running as a windows service Features -------- - Make parameters for :py:class:`~buildbot.steps.shell.WarningCountingShellCommand` renderable. These are `suppressionList`, `warningPattern`, `directoryEnterPattern`, `directoryLeavePattern` and `maxWarnCount`. - :py:class:`~buildbot.www.hooks.github.GitHubEventHandler` now supports authentication for GitHub instances that do not allow anonymous access - Added support for renderable builder locks. Previously only steps could have renderable locks. - Added flag to Docker Latent Worker to always pull images Release Notes for Buildbot ``0.9.12.post1`` ( ``2017-10-10`` ) ============================================================== This is a release which only exists for the ``buildbot_grid_view`` package. Bug fixes --------- - Fix Grid View plugin broken because of merge resolution mistake ( :issue:`3603` and :issue:`3688`.) Release Notes for Buildbot ``0.9.12`` ( ``2017-10-05`` ) ======================================================== Bug fixes --------- - Fixed many issues related to connecting masters and workers with different major version of Python (:issue:`3416`). - Fixed KeyError in the log when two buildrequests of the same buildset are finished at the same time (:issue:`3472`, :issue:`3591`) - Fix for SVN.purge fails when modified files contain non-ascii characters (:issue:`3576`) - Fix the GitHub change hook on Python 3 (:issue:`3452`). - Fix :class:`reporters.gitlab` to use correct commit status codes (:issue:`3641`). - Fixed deadlock issue, when locks are taken at least 3 times by the 3 Buildstep with same configuration (:issue:`3650`) - Fix the Gerrit source step in the presence of multiple Gerrit repos (:issue:`3460`). - Add empty pidfile option to master and worker start script when `--nodaemon` option is on. (:issue:`3012`). Features -------- - Add possibility to specify a :bb:sched:`PatchParameter` for any :bb:sched:`CodebaseParameter` in a :bb:sched:`ForceScheduler` (part of :issue:`3110`). - Latent Workers will no longer continually retry if they cannot substantiate (:issue:`3572`) Deprecations and Removals ------------------------- - buildbot.util.encodeString() has been removed. buildbot.util.unicode2bytes() should be used instead. Release Notes for Buildbot ``0.9.11`` ( ``2017-09-08`` ) ======================================================== Incompatible Changes -------------------- - Buildbot is not compatible with ``python3-ldap`` anymore. It now requires ``ldap3`` package for its ldap operations (:issue:`3530`) Bug fixes --------- - Fix issue with ``logviewer`` scrolling up indefinitely when loading logs (:issue:`3154`). - Do not add the url if it already exists in the step. (:issue:`3554`) - Fix filtering for REST resource attributes when SQL is involved in the backend (eq, ne, and contains operations, when there are several filters) (:issue:`3526`). - The ``git`` source step now uses `git checkout -B` rather than `git branch -M` to create local branches (:issue:`3537`) - Fixed :ref:`Grid View ` settings. It is now possible to configure "false" values. - Fix performance issue when remote command does not send any line boundary (:issue:`3517`) - Fix regression in GithHub oauth2 v3 api, when using enterprise edition. - Fix the Perforce build step on Python 3 (:issue:`3493`) - Make REST API's filter __contains use OR connector rather than AND according to what the documentation suggests. - Fixed secret plugins registration, so that they are correctly available in ``import buildbot.plugins.secrets``. changes to all secrets plugin to be imported and used. - Fix secrets downloaded to worker with too wide permissions. - Fix issue with stop build during latent worker substantiating, the build result was retried instead of cancelled. - ``pip install 'buildbot[bundle]'`` now installs ``grid_view`` plugin. This fixes issues with the tutorial where ``grid_view`` is enabled by default. Improved Documentation ---------------------- - Fixed documentation regarding log obfuscation for passwords. - Improve documentation of REST API's __contains filter. Features -------- - Added autopull for Docker images based on config. (:issue:`3071`) - Allow to expose logs to summary callback of :py:class:`GerritStatusPush`. - Implement GitHub change hook CI skipping (:issue:`3443`). Now buildbot will ignore the event, if the ``[ci skip]`` keyword (configurable) in commit message. For more info, please check out the ``skip`` parameter of :bb:chsrc:`GitHub` hook. - :py:class:`~buildbot.reporters.github.GitHubStatusPush` now support reporting to ssh style URLs, ie `git@github.com:Owner/RepoName.git` - Added the possibility to filter builds according to results in :ref:`Grid View `. - :py:class:`~buildbot.worker.openstack.OpenStackLatentWorker` now supports V3 authentication. - Buildbot now tries harder at finding line boundaries. It now supports several cursor controlling ANSI sequences as well as use of lots of backspace to go back several characters. - UI Improvements so that Buildbot build pages looks better on mobile. - :py:class:`~buildbot.worker.openstack.OpenStackLatentWorker` now supports region attribute. - The :ref:`Schedulers` ``builderNames`` parameter can now be a :class:`~IRenderable` object that will render to a list of builder names. - The :py:class:`~buildbot.www.ldapuserinfo.LdapUserInfo` now uses the python3-ldap successor ldap3 (:issue:`3530`). - Added support for static suppressions parameter for shell commands. Release Notes for Buildbot ``0.9.10`` ( ``2017-08-03`` ) ======================================================== Bug fixes --------- - Fix 'reconfig master causes worker lost' error (:issue:`3392`). - Fix bug where object names could not be larger than 150 characters (:issue:`3449`) - Fix bug where notifier names could not be overridden (:issue:`3450`) - Fix exception when shutting down a master (:issue:`3478`) - Fix Manhole support to work with Python 3 and Twisted 16.0.0+ (:issue:`3160`). :py:class:`~buildbot.manhole.AuthorizedKeysManhole` and :py:class:`~buildbot.manhole.PasswordManhole` now require a directory containing SSH host keys to be specified. - Fix python 3 issue with displaying the properties when fetching builders (:issue:`3418`). - Fix bug when :py:class:`~buildbot.steps.shellsequence.ShellArg` arguments were rendered only once during an instance's lifetime. - Fix waterfall tiny size of build status indicators (:issue:`3475`) - Fix waterfall natural order of builder list - Fix builder page use 'pointer' cursor style for tags (:issue:`3473`) - Fix builder page update tag filter when using the browser's back button (:issue:`3474`) Features -------- - added support for builder names in REST API. Note that those endpoints are not (yet) available from the UI, as the events are not sent to the endpoints with builder names. - Implemented new ability to set from by email domain. Implemented :py:class:`~buildbot.www.authz.RolesFromDomain`. (:issue:`3422`) Release Notes for Buildbot ``0.9.9.post2`` ( ``2017-07-06`` ) ============================================================= Bug fixes --------- - Fix ``tried to complete 100 buildrequests, but only completed 25`` issue in buildrequest collapser (:issue:`3406`) - Fixed issue when several mail notifiers are used with same parameters, but different modes (:issue:`3398`). - Fixed release scripts for ``postN`` releases Release Notes for Buildbot ``0.9.9.post1`` ( ``2017-07-01`` ) ============================================================= Bug fixes --------- - Fix regression with :py:class:`~buildbot.www.oauth2.GitHubAuth` when API v3 is used. - When using the :py:class:`~buildbot.www.oauth2.GitHubAuth` v4 API, the generated GraphQL to get the user organizations uses a name alias for each organization. These aliases must not contain dashes. Release Notes for Buildbot ``0.9.9`` ( ``2017-06-29`` ) ======================================================= Bug fixes --------- - Fixed a regression inn ``UserPasswordAuth`` where a list would create an error. - Fix non ascii payload handling in base web hook (:issue:`3321`). - Fixed default buildrequest collapsing (:issue:`3151`) - _wait_for_request() would fail to format a log statement due to an invalid type being passed to log.msg (resulting in a broken build) - Fix Windows compatibility with frontend development tool ``gulp dev proxy`` (:issue:`3359`) Features -------- - New :ref:`Grid View ` UI plugin. - The :ref:`Change-Hooks` system is now integrated in the :ref:`Plugins` system, making it easier to subclass hooks. There is still the need to re- factor hook by hook to allow better customizability. - The :py:class:`~buildbot.www.oauth2.GitHubAuth` now allows fetching the user team membership for all organizations the user belongs to. This requires access to a V4 GitHub API(GraphQL). - GitLab merge request hook now create a change with repository to be the source repository and branch the source branch. Additional properties are created to point to destination branch and destination repository. This makes :bb:reporter:`GitLabStatusPush` push the correct status to GitLab, so that pipeline report is visible in the merge request page. - The :py:class:`~buildbot.www.hooks.github.GitHubEventHandler` now allows the inclusion of white-listed properties for push events. - Allow sending a comment to a pull request for Bitbucket Server in :py:class:`~buildbot.reporters.stash.BitbucketServerPRCommentPush` - Implement support for Bitbucket Server webhook plugin in :py:class:`~buildbot.www.hooks.bitbucketserver.BitbucketServerEventHandler` Release Notes for Buildbot ``0.9.8`` ( ``2017-06-14`` ) ======================================================= Core Bug fixes -------------- - Fix incompatibility issue of ``UserPasswordAuth`` with python 3. - Fix issue with oauth sequence not working with Firefox (:issue:`3306`) - Update old ``addChange`` method to accept the new chdict names if only the new name is present. Fixes :issue:`3191`. - fix bytes vs string issue on python3 with authorization of rest endpoints. Core Features ------------- - ``doStepIf`` is now renderable. - Source step codebase is now renderable. - Step names are now renderable. - Added :py:func:`giturlparse` utility function to help buildbot components like reporters to parse git url from change sources. - Factorized the mail reporter to be able to write new message based reporters, for other backend than SMTP. - The class :py:class:`~buildbot.process.properties.Property` now allows being used with Python built in comparators. It will return a Renderable which executes the comparison. Components Bug fixes -------------------- - GitLab reporter now correctly sets the status to running instead of pending when a build starts. - GitLab reporter now correctly works when there are multiple codebase, and when the projects names contain url reserved characters. - GitLab reporter now correctly reports the status even if there are several sourcestamps. Better parsing of change repository in GitLab reporter so that it understands ssh urls and https url. GitLab reporter do not use the project field anymore to know the repository to push to. Components Features ------------------- - GitLab hook now supports the merge_request event to automatically build from a merge request. Note that the results will not properly displayed in merge_request UI due to https://gitlab.com/gitlab-org/gitlab-ce/issues/33293 - Added a https://pushjet.io/ reporter as :py:class:`buildbot.reporters.pushjet.PushjetNotifier` - New build step :py:class:`~buildbot.steps.master.Assert` Tests a renderable or constant if it evaluates to true. It will succeed or fail to step according to the result. Release Notes for Buildbot ``0.9.7`` ( ``2017-05-09`` ) ======================================================================= Core Bug fixes -------------- - Fix :py:class:`UserPasswordAuth` authentication on ``py3`` and recent browsers. (:issue:`3162`, :issue:`3163`). The ``py3`` fix also requires Twisted https://github.com/twisted/twisted/pull/773. - :ref:`ConsoleView` now display changes the same way as in Recent Changes page. - Fix issue with :ref:`ConsoleView` when no change source is configured but still builds have ``got_revision`` property Components Bug fixes -------------------- - Allow renderables in options and definitions of step ``CMake``. Currently only dicts and lists with renderables inside are allowed. - ``OAuth`` Authentication are now working with :py:class:`RolesFromEmails`. - :py:class:`~buildbot.worker.docker.DockerLatentWorker`: ``_image_exists`` does not raise anymore if it encounters an image with ```` tag - Fix command line parameters for ``Robocopy`` step ``verbose`` option Core Features ------------- - Builds ``state_string`` is now automatically computed according to the :py:meth:`BuildStep.getResultSummary`, :py:attr:`BuildStep.description` and ``updateBuildSummaryPolicy`` from :ref:`Buildstep-Common-Parameters`. This allows the dashboards and reporters to get a descent summary text of the build without fetching the steps. - New :bb:cfg:`configurators` section, which can be used to create higher level configuration modules for Buildbot. - New :bb:configurator:`JanitorConfigurator` which can be used to create a builder which save disk space by removing old logs from the database. Components Features ------------------- - Added a https://pushover.net/ reporter as :py:class:`buildbot.reporters.pushover.PushoverNotifier` - ``property`` argument in SetPropery is now renderable. Release Notes for Buildbot ``0.9.6`` ( ``2017-04-19`` ) ======================================================= Core Bug fixes -------------- - :py:class:`buildbot.www.authz.endpointmatchers.AnyControlEndpointMatcher` now actually doesn't match `GET` requests. Before it would act like an `AnyEndpointMatcher` since the `GET` had a different case. - Passing ``unicode`` ``builderNames`` to :bb:sched:`ForceScheduler` no longer causes an error. - Fix issue with :bb:sched::`Nightly` change classification raising foreign key exceptions (:issue:`3021`) - Fixes an exception found :py:func:`buildbot_net_usage_data._sendWithUrlib` when running through the tutorial using Python 3. - ``usePTY`` configuration of the :bb:step:`ShellCommand` now works as expected with recent version of buildbot-worker. Components Bug fixes -------------------- - ``pollAtLaunch`` of the :bb:chsrc:`GitHubPullrequestPoller` now works as expected. Also the author email won't be displayed as None - :bb:chsrc:`GerritChangeSource` and :bb:reporter:`GerritStatusPush` now use the master's environment including PATH variable to find the ssh binary. - :py:class:`~buildbot_worker.commands.transfer.SlaveDirectoryUploadCommand` no longer throws exceptions because the file "is used by another process" under Windows UI Bug fixes ------------ - Fix waterfall scrolling and zooming in current browsers - ``console_view`` now properly uses ``revlink`` metadata to link to changes. - Fixed Console View infinite loading spinner when no change have been recorded yet (:issue:`3060`). Core Features ------------- - new :ref:`Virtual-Builders` concept for better integration of frameworks which store the build config along side the source code. Components Features ------------------- - :bb:chsrc:`BitBucket` now sets the ``event`` property on each change to what the ``X-Event-Key`` header contains. - :bb:chsrc:`GitHubPullrequestPoller` now adds additional information about the pull request to properties. The property argument is removed and is populated with the repository full name. - :bb:chsrc:`GitHub` now sets the ``event`` property on each change to what the ``X-GitHub-Event`` header contains. - Changed :py:class:`~buildbot.www.oauth2.GitHubAuth` now supports GitHub Enterprise when setting new ``serverURL`` argument. - :bb:chsrc:`GitLab` now sets the ``event`` property on each change to what the ``X-GitLab-Event`` header contains. - :bb:chsrc:`GitHub` now process git tag push events - :bb:chsrc:`GitHub` now adds more information about the pull request to the properties. This syncs features with :bb:chsrc:`GitHubPullrequestPoller` - :bb:chsrc:`GitLab` now process git tag push events - :bb:chsrc:`GitLab` now supports authentication with the secret token UI Features ----------- - Reworked :ref:`ConsoleView` and :ref:`WaterfallView` for better usability and better integration with virtual builders - :ref:`WWW-data-module` collections now have a ``$resolved`` attribute which allows dashboard to know when the data is loaded. Release Notes for Buildbot ``0.9.5`` ( ``2017-03-18`` ) ======================================================= Bug fixes --------- - Fix issue with compressing empty log - Fix issue with db being closed by wrong thread - Fix issue with buildbot_worker not closing file handles when using the transfer steps - Fix issue with buildbot requesting too many permissions from GitHub's OAuth - Fix :py:class:`~buildbot.steps.http.HTTPStep` to accept ``json`` as keyword argument. - Updated :py:class:`~buildbot.workers.openstack.OpenStackLatentWorker` to use keystoneauth1 so it will support latest python-novaclient packages. - Include :py:class:`~buildbot.steps.package.rpm.rpmlint.RpmLint` step in steps plugins. Core Features ------------- - Experimental support for Python 3.5 and 3.6. Note that complete support depends on fixes to be released in Twisted 17.2.0. - New experimental :ref:`secretManagement` framework, which allows to securely declare secrets, reusable in your steps. - New :ref:`buildbot_wsgi_dashboards` plugin, which allows to write custom dashboard with traditional server side web frameworks. - Added :py:class:`AnyControlEndpointMatcher` and :py:class:`EnableSchedulerEndpointMatcher` for better configurability of the access control. If you have access control to your Buildbot, it is recommended you add :py:class:`AnyControlEndpointMatcher` at the end of your access control configuration. - Schedulers can now be toggled on and off from the UI. Useful for temporarily disabling periodic timers. Components Features ------------------- - :py:class:`~buildbot.steps.transfer.FileUpload` now supports setting the url title text that is visible in the web UI. :py:class:`~buildbot.steps.transfer.FileUpload` now supports custom `description` and `descriptionDone` text. - :py:class:`~buildbot.worker.ec2.EC2LatentWorker` now provides instance id as the `instance` property enabling use of the AWS toolkit. - Add GitHub pull request Poller to list of available changesources. - :py:class:`~buildbot.util.OAuth2LoginResource` now supports the `token` URL parameter. If a user wants to authenticate through OAuth2 with a pre- generated token (such as the `access_token` provided by GitHub) it can be passed to `/auth/login` as the `token` URL parameter and the user will be authenticated to buildbot with those credentials. - New reporter :py:class:`~buildbot.reporters.github.GitHubCommentPush` can comment on GitHub PRs - :py:class:`~buildbot.changes.GitPoller` now supports polling tags in a git repository. - :py:class:`~buildbot.steps.transfer.MultipleFilUpload` now supports the `glob` parameter. If `glob` is set to `True` all `workersrcs` parameters will be run through `glob` and the result will be uploaded to `masterdest` - Changed :py:class:`~buildbot.workers.openstack.OpenStackLatentWorker` to default to v2 of the Nova API. The novaclient package has had a deprecation warning about v1.1 and would use v2 anyway. Deprecations and Removals ------------------------- - ``master/contrib`` and ``worker/contrib`` directories have been moved to their own repository at https://github.com/buildbot/buildbot-contrib/ Release Notes for Buildbot ``0.9.4`` ( ``2017-02-08`` ) ======================================================= Database upgrade ---------------- A database upgrade is necessary for this release (see :bb:cmdline:`upgrade-master`). Bug fixes --------- - Like for ``buildbot start``, ``buildbot upgrade-master`` will now erase an old pidfile if the process is not live anymore instead of just failing. - Change properties 'value' changed from String(1024) to Text. Requires upgrade master. (:bug:`3197`) - When using REST API, it is now possible to filter and sort in descending order at the same time. - Fix issue with :bb:reporter:`HttpStatusPush` raising ``datetime is not JSON serializable`` error. - Fix issue with log viewer not properly rendering color codes. - Fixed log viewer selection and copy-paste for Firefox (:bug:`3662`). - Fix issue with ``DelayedCalled`` already called, and worker missing notification email never received. - :bb:cfg:`schedulers` and :bb:cfg:`change_source` are now properly taking configuration change in account with ``buildbot reconfig``. - ``setuptools`` is now explicitly marked as required. The dependency was previously implicit. - :bb:cfg:`buildbotNetUsageData` now uses ``requests`` if available and will default to HTTP if a bogus SSL implementation is found. It will also correctly send information about the platform type. Features -------- - Buildbot now uses `JWT `_ to store its web UI Sessions. Sessions now persist upon buildbot restart. Sessions are shared between masters. Session expiration time is configurable with ``c['www']['cookie_expiration_time']`` see :bb:cfg:`www`. - Builders page has been optimized and can now be displayed with 4 http requests whatever is the builder count (previously, there was one http request per builder). - Builder and Worker page build list now have the ``numbuilds=`` option which allows to show more builds. - Masters page now shows more information about a master (workers, builds, activity timer) - Workers page improvements: - Shows which master the worker is connected to. - Shows correctly the list of builders that this master is configured on (not the list of ``buildermaster`` which nobody cares about). - Shows list of builds per worker similar to the builders page. - New worker details page displays the list of builds built by this worker using database optimized query. Deprecations and Removals ------------------------- - Some deprecated broken :ref:`Contrib-Scripts` were removed. - :py:data:`buildbot.www.hooks.googlecode` has been removed, since the Google Code service has been shut down. - :py:data:`buildbot.util.json` has been deprecated in favor of the standard library :py:mod:`json`. ``simplejson`` will not be used anymore if found in the virtualenv. Release Notes for Buildbot ``0.9.3`` ( ``2017-01-11`` ) ======================================================= Bug fixes --------- - Fix :bb:reporter:`BitbucketStatusPush` ``ep should start with /`` assertion error. - Fix duplicate worker use case, where a worker with the same name would make the other worker also disconnect (:bug:`3656`) - :py:class:`~buildbot.changes.GitPoller`: ``buildPushesWithNoCommits`` now rebuilds for a known branch that was updated to an existing commit. - Fix issue with log viewer not staying at bottom of the log when loading log lines. - Fixed `addBuildURLs` in :py:class:`~buildbot.steps.trigger.Trigger` to use results from triggered builds to include in the URL name exposed by API. - Fix :ref:`mq-Wamp` :bb:cfg:`mq` support by removing ``debug``, ``debug_amp`` and ``debug_app`` from the :bb:cfg:`mq` config, which is not available in latest version of `Python Autobahn `_. You can now use ``wamp_debug_level`` option instead. - fix issue with factory.workdir AttributeError are not properly reported. Features -------- - Optimize the memory consumption of the log compression process. Buildbot do not load the whole log into memory anymore. This should improve a lot buildbot memory footprint. - Changed the build page so that the preview of the logs are shown in live. It is a preview means the last lines of log. How many lines is configurable per user in the user settings page. - Log viewer line numbers are no longer selectable, so that it is easier to copy paste. - :py:class:`~buildbot.plugins.worker.DockerLatentWorker` accepts now renderable Dockerfile - :ref:`Renderer` function can now return :class:`~buildbot.interfaces.IRenderable` objects. - new :bb:step:`SetProperties` which allows to generate and transform properties separately. - Handle new workers in `windows_service.py` script. - Sort the builders in the waterfall view by name instead of ID. Release Notes for Buildbot ``0.9.2`` ( ``2016-12-13`` ) ======================================================= Bug fixes --------- - Fix :py:class:`~buildbot.www.oauth2.GitHubAuth` to retrieve all organizations instead of only those publicly available. - Fixed `ref` to point to `branch` instead of commit `sha` in :py:class:`~buildbot.reporters.GitLabStatusPush` - :bb:reporter:`IRC` :py:meth:`maybeColorize` is able to highlight single words and stop colorization at the end. The previous implementation only stopped colorization but not boldface. - fix compatibility issue with mysql5 (do not set default value for TEXT column). - Fixed `addChange` in :py:class:`~buildbot.data.changes.Change` to use the `revlink` configuration option to generate the revlink. - fix threading issue in :py:class:`~buildbot.plugins.worker.DockerLatentWorker` Features -------- - Implement :py:class:`~buildbot.www.oauth2.BitbucketAuth`. - New :bb:chsrc:`GerritEventLogPoller` poller to poll Gerrit changes via http API. - New :bb:reporter:`GerritVerifyStatusPush` can send multiple review status for the same Gerrit change. - :bb:reporter:`IRC` appends the builder URL to a successful/failed build if available - :bb:reporter:`MailNotifier` now accepts ``useSmtps`` parameter for initiating connection over an SSL/TLS encrypted connection (SMTPS) - New support for ``Mesos`` and `Marathon `_ via :py:class:`~buildbot.plugins.worker.MarathonLatentWorker`. ``Marathon`` is a production-grade container orchestration platform for Mesosphere's Data- center Operating System (DC/OS) and Apache ``Mesos``. - ``password`` in :py:class:`~buildbot.plugins.worker.DockerLatentWorker` and :py:class:`~buildbot.plugins.worker.HyperLatentWorker`, can be None. In that case, they will be auto-generated from random number. - :bb:reporter:`BitbucketServerStatusPush` now accepts ``key``, ``buildName``, ``endDescription``, ``startDescription``, and ``verbose`` parameters to control the JSON sent to Stash. - Buildbot can now be configured to deny read access to REST api resources based on authorization rules. buildbot-3.4.0/master/docs/relnotes/1.x.rst000066400000000000000000000402151413250514000205350ustar00rootroot00000000000000Release Notes for Buildbot ``1.8.2`` ( ``2019-05-22`` ) ======================================================= Bug fixes --------- - Fix vulnerability in OAuth where user-submitted authorization token was used for authentication (https://github.com/buildbot/buildbot/wiki/OAuth-vulnerability-in-using-submitted-authorization-token-for-authentication) Thanks to Phillip Kuhrt for reporting it. Release Notes for Buildbot ``1.8.1`` ( ``2019-02-02`` ) ======================================================= Bug fixes --------- - Fix CRLF injection vulnerability with validating user provided redirect parameters (https://github.com/buildbot/buildbot/wiki/CRLF-injection-in-Buildbot-login-and-logout-redirect-code) Thanks to ``mik317`` and ``mariadb`` for reporting it. Release Notes for Buildbot ``1.8.0`` ( ``2019-01-20`` ) ======================================================= Bug fixes --------- - Fix a regression present in v1.7.0 which caused buildrequests waiting for a lock that got released by an unrelated build not be scheduled (:issue:`4491`) - Don't run builds that request an instance with incompatible properties on Docker, Marathon and OpenStack latent workers. - Gitpoller now fetches only branches that are known to exist on remote. Non-existing branches are quietly ignored. - The demo repo in sample configuration files and the tutorial is now fetched via ``https:`` instead of ``git:`` to make life easier for those behind firewalls and/or using proxies. - `buildbot sendchange` has been fixed on Python 3 (:issue:`4138`) Features -------- - Add a :py:class:`~buildbot.worker.kubernetes.KubeLatentWorker` to launch workers into a kubernetes cluster - Simplify/automate configuration of worker as Windows service - eliminate manual configuration of Log on as a service Deprecations and Removals ------------------------- - The deprecated ``BuildMaster.addBuildset`` method has been removed. Use ``BuildMaster.data.updates.addBuildset`` instead. - The deprecated ``BuildMaster.addChange`` method has been removed. Use ``BuildMaster.data.updates.addChange`` instead. - ``buildbot`` package now requires Twisted versions >= 17.9.0. This is required for Python 3 support. Earlier versions of Twisted are not supported. Release Notes for Buildbot ``1.7.0`` ( ``2018-12-21`` ) ======================================================= Bug fixes --------- - Fixed JSON decoding error when sending build properties to www change hooks on Python 3. - Buildbot no longer attempts to start builds that it can prove will have unsatisfied locks. - Don't run builds that request images or sizes on instances started with different images or sizes. Features -------- - The Buildbot master Docker image at https://hub.docker.com/r/buildbot/ has been upgraded to use Python 3.7 by default. - Builder page has been improved with a smoothed build times plot, and a new success rate plot. - Allow the Buildbot master initial start timeout to be configurable. - An API to check whether an already started instance of a latent worker is compatible with what's required by a build that is about to be started. - Add support for v2 of the Vault key-value secret engine in the `SecretInVault` secret provider. Deprecations and Removals ------------------------- - Build.canStartWithWorkerForBuilder static method has been made private and renamed to _canAcquireLocks. - The Buildbot master Docker image based on Python 2.7 has been removed in favor of a Python 3.7 based image. - Builder.canStartWithWorkerForBuilder method has been removed. Use Builder.canStartBuild. Release Notes for Buildbot ``1.6.0`` ( ``2018-11-16`` ) ======================================================= Bug fixes --------- - Fixed missing buildrequest owners in the builder page (:issue:`4207`, :issue:`3904`) - Fixed display of the buildrequest number badge text in the builder page when on hover. - Fix usage of master paths when doing Git operations on worker (:issue:`4268`) Improved Documentation ---------------------- - Misc improvement in Git source build step documentation. - Improve documentation of AbstractLatentWorker. - Improve the documentation of the Buildbot concepts by removing unneeded details to other pages. Features -------- - Added a page that lists all pending buildrequests (:issue:`4239`) - Builder page now has a chart displaying the evolution of build times over time - Improved FileUpload efficiency (:issue:`3709`) - Add method ``getResponsibleUsersForBuild`` in :py:class:`~buildbot.notifier.NotifierBase` so that users can override recipients, for example to skip authors of changes. - Add define parameter to RpmBuild to specify additional --define parameters. - Added SSL proxy capability to base web application's developer test setup (``gulp dev proxy --host the-buildbot-host --secure``). Deprecations and Removals ------------------------- - The Material design Web UI has been removed as unmaintained. It may be brought back if a maintainer steps up. Release Notes for Buildbot ``1.5.0`` ( ``2018-10-09`` ) ======================================================= Bug fixes --------- - Fix the umask parameter example to make it work with both Python 2.x and 3.x. - Fix build-change association for multi-codebase builds in the console view.. - Fixed builders page doesn't list workers in multi-master configuration (:issue:`4326`) - Restricted groups added by :py:class:`~buildbot.www.oauth2.GitHubAuth`'s ``getTeamsMembership`` option to only those teams to which the user belongs. Previously, groups were added for all teams for all organizations to which the user belongs. - Fix 'Show old workers' combo behavior. Features -------- - GitHub teams added to a user's ``groups`` by :py:class:`~buildbot.www.oauth2.GitHubAuth`'s ``getTeamsMembership`` option are now added by slug as well as by name. This means a team named "Bot Builders" in the organization "buildbot" will be added as both ``buildbot/Bot Builders`` and ``buildbot/bot-builders``. - Make ``urlText`` renderable for the :py:class:`~buildbot.steps.transfer.FileUpload` build step. - Added ``noticeOnChannel`` option to :bb:reporter:`IRC` to send notices instead of messages to channels. This was an option in v0.8.x and removed in v0.9.0, which defaulted to sending notices. The v0.8.x default of sending messages is now restored. Reverts ------- - Reverted: Fix git submodule support when using `sshPrivateKey` and `sshHostKey` because it broke other use cases (:issue:`4316`) In order to have this feature to work, you need to keep your master in 1.4.0, and make sure your worker ``buildbot.tac`` are installed in the same path as your master. Release Notes for Buildbot ``1.4.0`` ( ``2018-09-02`` ) ======================================================= Bug fixes --------- - Fix `Build.getUrl()` to not ignore virtual builders. - Fix git submodule support when using `sshPrivateKey` and `sshHostKey` settings by passing ssh data as absolute, not relative paths. - Fixed :bb:step:`P4` for change in latest version of `p4 login -p`. - :py:class:`buildbot.reporters.irc.IrcStatusBot` no longer encodes messages before passing them on to methods of its Twisted base class to avoid posting the ``repr()`` of a bytes object when running on Python 3. Features -------- - Added new :bb:step:`GitPush` step to perform git push operations. - Objects returned by :ref:`renderer` now are able to pass extra arguments to the rendered function via `withArgs` method. Test Suite ---------- - Test suite has been improved for readability by adding a lot of ``inlineCallbacks`` - Fixed tests which didn't wait for ``assertFailure``'s returned deferred. - The test suite now runs on Python 3.7 (mostly deprecation warnings from dependencies shut down) Release Notes for Buildbot ``1.3.0`` ( ``2018-07-13`` ) ======================================================= Bug fixes --------- - buildbot-worker docker image no longer use pidfile. This allows to auto-restart a docker worker upon crash. - GitLab v3 API is deprecated and has been removed from http://gitlab.com, so we now use v4. (:issue:`4143`) Features -------- - -:bb:step:`Git` now supports `sshHostKey` parameter to specify ssh public host key for fetch operations. - -:bb:step:`Git` now supports `sshPrivateKey` parameter to specify private ssh key for fetch operations. - -:bb:chsrc:`GitPoller` now supports `sshHostKey` parameter to specify ssh public host key for fetch operations. This feature is supported on git 2.3 and newer. - -:bb:chsrc:`GitPoller` now supports `sshPrivateKey` parameter to specify private ssh key for fetch operations. This feature is supported on git 2.3 and newer. - Github hook token validation now uses ``hmac.compare_digest()`` for better security Deprecations and Removals ------------------------- - Removed support for GitLab v3 API ( GitLab < 9 ). Release Notes for Buildbot ``1.2.0`` ( ``2018-06-10`` ) ======================================================= Bug fixes --------- - Don't schedule a build when a GitLab merge request is deleted or edited (:issue:`3635`) - Add GitLab source step; using it, we now handle GitLab merge requests from forks properly (:issue:`4107`) - Fixed a bug in :py:class:`~buildbot.reporters.mail.MailNotifier`'s ``createEmail`` method when called with the default *builds* value which resulted in mail not being sent. - Fixed a Github crash that happened on Pull Requests, triggered by Github Web-hooks. The json sent by the API does not contain a commit message. In github.py this causes a crash, resulting into response 500 sent back to Github and building failure. - Speed up generation of api/v2/builders by an order of magnitude. (:issue:`3396`). Improved Documentation ---------------------- - Added ``examples/gitlab.cfg`` to demonstrate integrating Buildbot with GitLab. Features -------- - :ref:`ForceScheduler-Parameters` now support an ``autopopulate`` parameter. - :ref:`ForceScheduler-Parameters` ``ChoiceParameter`` now correctly supports the ``strict`` parameter, by allowing free text entry if strict is False. - Allow the remote ref to be specified in the GitHub hook configuration (:issue:`3998`) - Added callable to p4 source that allows client code to resolve the p4 user and workspace into a more complete author. Default behaviour is a lambda that simply returns the original supplied who. This callable happens after the existing regex is performed. Release Notes for Buildbot ``1.1.2`` ( ``2018-05-15`` ) ======================================================= Bug fixes --------- - fix several multimaster issues by reverting :issue:`3911`. re-opens :issue:`3783`. (:issue:`4067`, :issue:`4062`, :issue:`4059`) - Fix :bb:step:`MultipleFileUpload` to correctly compute path name when worker and master are on different OS (:issue:`4019`) - LDAP bytes/unicode handling has been fixed to work with Python 3. This means that LDAP authentication, REMOTE_USER authentication, and LDAP avatars now work on Python 3. In addition, an of bounds access when trying to load the value of an empty LDAP attribute has been fixed. - Removing ```no-select``` rules from places where they would prevent the user from selecting interesting text. (:issue:`3663`) - fix ```Maximum recursion depth exceeded`` when lots of worker are trying to connect while master is starting or reconfiguring (:issue:`4042`). Improved Documentation ---------------------- - Document a minimal secure config for the Buildbot web interface. (:issue:`4026`) Features -------- - The Dockerfile for the buildbot master image has been updated to use Alpine Linux 3.7. In addition, the Python requests module has been added to this image. This makes GitHub authentication work out of the box with this image. (:issue:`4039`) - New steps for Visual Studio 2017 (VS2017, VC141, and MsBuild141). - The smoke tests have been changed to use ES2017 async and await keywords. This requires that the smoke tests run with Node 8 or higher. Use of async and await is recommended by the Protractor team: https://github.com/angular/protractor/blob/master/docs/async-await.md - Allow ``urlText`` to be set on a url linked to a ``DirectoryUpload`` step (:issue:`3983`) Release Notes for Buildbot ``1.1.1`` ( ``2018-04-06`` ) ======================================================= Bug fixes --------- - Fix issue which marked all workers dis-configured in the database every 24h (:issue:`3981` :issue:`3956` :issue:`3970`) - The :bb:reporter:`MailNotifier` no longer crashes when sending from/to email addresses with "Real Name" parts (e.g., ``John Doe ``). - Corrected pluralization of text on landing page of the web UI Improved Documentation ---------------------- - Corrected typo in description of libvirt - Update sample config to use preferred API Misc Improvements ----------------- - Home page now contains links to recently active builders Release Notes for Buildbot ``1.1.0`` ( ``2018-03-10`` ) ======================================================= Deprecations and Removals ------------------------- - Removed ``ramlfication`` as a dependency to build the docs and run the tests. Bug fixes --------- - Fixed buildrequests API doesn't provide properties data (:issue:`3929`) - Fix missing owner on builder build table (:issue:`3311`) - Include `hipchat` as reporter. - Fix encoding issues of commands with Windows workers (:issue:`3799`). - Fixed Relax builder name length restriction (:issue:`3413`). - Fix the configuration order so that services can actually use secrets (:issue:`3985`) - Partially fix Builder page should show the worker information (:issue:`3546`). Features -------- - Added the ``defaultProperties`` parameter to :bb:cfg:`builders`. - When a build step has a log called "summary" (case-insensitive), the Build Summary page will sort that log first in the list of logs, and automatically expand it. Release Notes for Buildbot ``1.0.0`` ( ``2018-02-11`` ) ======================================================= Despite the major version bump, Buildbot 1.0.0 does not have major difference with the 0.9 series. 1.0.0 is rather the mark of API stability. Developers do not foresee a major API break in the next few years like we had for 0.8 to 0.9. Starting with 1.0.0, Buildbot will follow `semver`_ versioning methodology. .. _semver: https://semver.org/ Bug fixes --------- - Cloning :bb:step:`Git` repository with submodules now works with Git < 1.7.6 instead of failing due to the use of the unsupported ``--force`` option. - :bb:chsrc:`GitHub` hook now properly creates a change in case of new tag or new branch. :bb:chsrc:`GitHub` changes will have the ``category`` set to ``tag`` when a tag was pushed to easily distinguish from a branch push. - Fixed issue with :py:meth:`Master.expireMasters` not always honoring its ``forceHouseKeeping`` parameter. (:issue:`3783`) - Fixed issue with steps not correctly ending in ``CANCELLED`` status when interrupted. - Fix maximum recursion limit issue when transferring large files with ``LocalWorker`` (issue:`3014`). - Added an argument to P4Source that allows users to provide a callable to convert Perforce branch and revision to a valid revlink URL. Perforce supplies a p4web server for resolving urls into change lists. - Fixed issue with ``buildbot_pkg``` not hanging on yarn step on windows (:issue:`3890`). - Fix issue with :bb:cfg:`workers` ``notify_on_missing`` not able to be configurable as a single string instead of list of string (:issue:`3913`). - Fixed Builder page should display worker name instead of id (:issue:`3901`). Features -------- - Add capability to override the default UI settings (:issue:`3908`) - All :ref:`Reporters` have been adapted to be able to use :ref:`Secret`. :bb:chsrc:`SVNPoller` has been adapted to be able to use :ref:`Secret`. - Implement support for Bitbucket Cloud webhook plugin in :py:class:`~buildbot.www.hooks.bitbucketcloud.BitbucketCloudEventHandler` - The ``owners`` property now includes people associated with the changes of the build (:issue:`3904`). - The repo source step now syncs with the ``--force-sync`` flag which allows the sync to proceed when a source repo in the manifest has changed. - Add support for compressing the repo source step cache tarball with ``pigz``, a parallel gzip compressor. buildbot-3.4.0/master/docs/relnotes/2.x.rst000066400000000000000000001215341413250514000205420ustar00rootroot00000000000000Buildbot ``2.10.5`` ( ``2021-04-05`` ) ====================================== Bug fixes --------- - Fixed a race condition in log handling of ``RpmLint`` and ``WarningCountingShellCommand`` steps resulting in steps crashing occasionally. - Fixed incorrect state string of a finished buildstep being sent via message queue (:issue:`5906`). - Reduced flickering of build summary tooltip during mouseover of build numbers (:issue:`5930`). - Fixed missing data in Owners and Worker columns in changes and workers pages (:issue:`5888`, :issue:`5887`). - Fixed excessive debug logging in ``GerritEventLogPoller``. - Fixed regression in pending buildrequests UI where owner is not displayed anymore (:issue:`5940`). - Re-added support for ``lazylogfiles`` argument of ``ShellCommand`` that was available in old style steps. Buildbot ``2.10.4`` ( ``2021-03-16`` ) ====================================== Bug fixes --------- - Updated Buildbot requirements to specify sqlalchemy 1.4 and newer as not supported yet. Buildbot ``2.10.3`` ( ``2021-03-14`` ) ====================================== Bug fixes --------- - Fixed special character handling in avatar email URLs. - Added missing report generators to the Buildbot plugin database (:issue:`5892`) - Fixed non-default mode support in ``BuildSetStatusGenerator``. Buildbot ``2.10.2`` ( ``2021-03-07`` ) ====================================== Bug fixes --------- - Optimized builder reconfiguration when configuration does not change. This leads to up to 6 times faster reconfiguration in Buildbot instances with many builders. - Fixed build steps continuing running commands even if when they have been cancelled. - Worked around failure to build recent enough cryptography module in the docker image due to too old rust being available. - Fixed a regression in ``GitHubEventHandler`` in that it would require a GitHub token for public repositories (:issue:`5760`). - Fixed a regression in ``GerritChangeSource`` since v2.6.0 that caused only the first event related to a Gerrit change to be reporter as a change to Buildbot (:issue:`5596`). Now such deduplication will be applied only to ``patchset-created`` and ``ref-updated`` events. - Reconfiguration reliability has been improved by not reconfiguring WAMP router if settings have not changed. - Fixed unauthorized login errors when latent worker with automatic password is reconfigured during substantiation. - Don't deactivate master as seen by the data API before builds are stopped. - Fixed a race condition that may result in a crash when build request distributor stops when its activity loop is running. - Fixed a crash when a manual step interruption is happening during master shutdown which tries to stop builds itself. - Fixed a race condition that may result in a deadlock if master is stopped at the same time a build is started. - Improved ``buildbot.util.poll.method`` to react faster to a request to stop. New pending calls are no longer executed. Calls whose interval but not random delay has already expired are no longer executed. - Fixed a crash when a trigger step is used in a build with patch body data passed via the try scheduler (:issue:`5165`). - Fixed secret replacement for an empty string or whitespace which may have many matches and generally will not need to be redacted. - Fixed exceptions when using LdapUserInfo as avatar provider - Fixed exceptions when LDAP filter string contains characters that needs to be escaped. Buildbot ``2.10.1`` ( ``2021-01-29`` ) ====================================== Bug fixes --------- - Fixed reference to ``tuplematch`` in the ``ReporterBase`` class (:issue:`5764`). - For build summary tooltip, truncate very long step names or build status strings, enable auto positioning of tooltip, and improve text alignment. Also, add build summary tooltip to masters page and builds tables. - Fixed crash when using renderable locks with latent workers that may have incompatible builds (:issue:`5757`). - Improved REST API to use username or full name of a logged in user when email is empty. - Worked around a bug in Python's urllib which caused Python clients not to accept basic authentication headers (:issue:`5743`) - Fixed crash in ``BuildStartEndStatusGenerator`` when tags filter is setup (:issue:`5766`). - Added missing ``MessageFormatterEmpty``, ``MessageFormatterFunction``, ``MessageFormatterMissingWorker``, and ``MessageFormatterRenderable`` to ``buildbot.reporters`` namespace Improved Documentation ---------------------- - Fix services config for IRC in tour. Deprecations and Removals ------------------------- - Added deprecation messages to the following members of ``buildbot.process.buildstep`` module that have been deprecated in Buildbot 0.8.9: - ``RemoteCommand`` - ``LoggedRemoteCommand`` - ``RemoteShellCommand`` - ``LogObserver`` - ``LogLineObserver`` - ``OutputProgressObserver`` Buildbot ``2.10.0`` ( ``2021-01-02`` ) ====================================== Highlights ---------- This is the last release in 2.x series. Only 2.10.y bugfix releases will follow. Upgrading existing Buildbot instances to 3.x will require an upgrade to 2.10.y first and resolving all deprecation warnings. Please see :ref:`the documentation on upgrading to 3.0 <3.0_Upgrading>` for more information. Please submit bug reports for any issues found in new functionality that replaces deprecated functionality to be removed in Buildbot 3.0. These bugs will be fixed with extra priority in 2.10.y bugfix releases. Bug fixes --------- - Fixed a bug that caused builds running on latent workers to become unstoppable when an attempt was made to stop them while the latent worker was being substantiated (:issue:`5136`). - Fixed a bug that caused the buildmaster to be unable to restart if a latent worker was previously reconfigured during its substantiation. - Fixed handling of very long lines in the logs during Buildbot startup (:issue:`5706`). - Fixed a bug which prevented polling change sources derived from :py:class:`~buildbot.changes.base.ReconfigurablePollingChangeSource` from working correctly with `/change_hook/poller` (:issue:`5727`) Improved Documentation ---------------------- - Corrected the formatting for the code sample in the Docker Tutorial's Multi-master section. - Improved the readability of the documentation by conserving horizontal space. - Improved the introduction and concepts parts of the documentation. Features -------- - Added build summary tooltip for build bubbles in grid and console views (:issue:`4733`). - Added support for custom HTTP headers to ``HttpStatusPush`` reporter (:issue:`5398`). - Implemented ``MessageFormatterFunction`` that creates build report text or json by processing full build dictionary. - Implemented ``MessageFormatterRenderable`` that creates build report text by rendering build properties onto a renderable. - Implemented ``BuildStartEndStatusGenerator`` which ensures that a report is generated for either both build start and end events or neither of them. - The ``BitbucketServerCoreAPIStatusPush``, ``BitbucketServerStatusPush``, ``BitbucketStatusPush``, ``GerritVerifyStatusPush``, ``GitHubStatusPush``, ``GitHubCommentPush``, ``GitLabStatusPush`` and ``HttpStatusPush`` reporters now support report generators via the ``generators`` argument. - Implemented support for remote submodules when cloning a Git repository. Deprecations and Removals ------------------------- - The following arguments of ``BitbucketServerCoreAPIStatusPush``, ``BitbucketServerStatusPush``, ``GerritVerifyStatusPush``, ``GitHubStatusPush``, ``GitHubCommentPush`` and ``GitLabStatusPush`` reporters have been deprecated in favor of the list of report generators provided via the ``generators`` argument: ``startDescription``, ``endDescription``, ``builders``. - The following arguments of ``BitbucketStatusPush`` reporter have been deprecated in favor of the list of report generators provided via the ``generators`` argument: ``builders``. - The following arguments of ``HttpStatusPush`` reporter have been deprecated in favor of the list of report generators provided via the ``generators`` argument: ``format_fn``, ``builders``, ``wantProperties``, ``wantSteps``, ``wantPreviousBuild``, ``wantLogs``. - ``HipChatStatusPush`` has been deprecated because the public version of hipchat has been shut down. This reporter will be removed in Buildbot 3.0 unless there is someone who will upgrade the reporter to the new internal APIs present in Buildbot 3.0. - Support for passing paths to template files for rendering in message formatters has been deprecated. - Buildbot now requires at least the version 0.13 of sqlalchemy-migrate (:issue:`5669`). - The ``logfile`` argument of ``ShellArg`` has been deprecated (:issue:`3771`). Buildbot ``2.9.4`` ( ``2020-12-26`` ) ===================================== Bug fixes --------- - Fixed spam messages to stdout when renderable operators were being used. - Fixed handling of very long lines in the logs during Buildbot startup (:issue:`5706`). - Fixed logging of error message to ``twistd.log`` in case of old git and ``progress`` option being enabled. Deprecations and Removals ------------------------- - Removed setup of unused ``webstatus`` feature of autobahn. Buildbot ``2.9.3`` ( ``2020-12-15`` ) ===================================== Bug fixes --------- - Fixed extraneous warnings due to deprecation of ``buildbot.status`` module even when it's not used (:issue:`5693`). - The topbar zoom buttons are now cleared when leaving waterfall view. - The waterfall is now re-rendered upon change to masters. Buildbot ``2.9.2`` ( ``2020-12-08`` ) ===================================== Bug fixes --------- - Fixed the profile menu wrapping because the avatar shows more often and hiding the profile name was not kept in sync. - Reverted too early deprecation of the ``format_fn``, ``builders``, ``wantProperties``, ``wantSteps``, ``wantPreviousBuild``, ``wantLogs`` arguments of ``HttpStatusPush``. - Reverted accidental too early migration of ``MasterShellCommand`` and HTTP steps to new style (:issue:`5674`). Buildbot ``2.9.1`` ( ``2020-12-05`` ) ===================================== Bug fixes --------- - Fixed ``checkConfig`` failures in ``GitHubStatusPush`` and ``GitLabStatusPush`` (:issue:`5664`). - Fixed incorrect deprecation notice for the ``builders`` argument of ``GitLabStatusPush``. Buildbot ``2.9.0`` ( ``2020-12-04`` ) ===================================== Bug fixes --------- - Fixed a bug preventing the ``timeout=None`` parameter of CopyDirectory step from having effect (:issue:`3032`). - Fixed a bug in ``GitHubStatusPush`` that would cause silent failures for builders that specified multiple codebases. - Fixed display refresh of breadcrumb and topbar contextual action buttons (:issue:`5549`) - Throwing an exception out of a log observer while processing logs will now correctly fail the step in the case of new style steps. - Fixed an issue where ``git fetch`` would break on tag changes by adding the ``-f`` option. This could previously be handled by manually specifying ``clobberOnFailure``, but as that is rather heavy handed and off by default, this new default functionality will keep Buildbot in sync with the repository it is fetching from. - Fixed :py:class:`~GitHubStatusPush` logging an error when triggered by the NightlyScheduler - Fixed GitHub webhook event handler when no token has been set - Fixed :py:class:`~HashiCorpVaultSecretProvider` reading secrets attributes, when they are not named ``value`` - Fixed :py:class:`~buildbot.changes.HgPoller` misuse of ``hg heads -r `` to ``hg heads `` because ``-r`` option shows heads that may not be on the wanted branch. - Fixed inconsistent REST api, buildid vs build_number, :issue:`3427` - Fixed permission denied in ``rmtree()`` usage in ``PrivateTemporaryDirectory`` on Windows - Fixed AssertionError when calling try client with ``--dryrun`` option (:issue:`5618`). - Fixed issue with known hosts not working when using git with a version less than 2.3.0 - ``ForceScheduler`` now gets Responsible Users from owner property (:issue:`3476`) - Added support for ``refs/pull/###/head`` ref for fetching the issue ID in the GitHub reporter instead of always expecting ``refs/pull/###/merge``. - Fixed Github v4 API URL - Fixed ``show_old_builders`` to have expected effects in the waterfall view. - Latent workers no longer reuse the started worker when it's incompatible with the requested build. - Fixed handling of submission of non-decoded ``bytes`` logs in new style steps. - Removed usage of `distutils.LooseVersion` is favor of `packaging.version` - Updated :py:class:`OpenstackLatentWorker` to use checkConfig/reconfigService structure. - Fixed :py:class:`OpenstackLatentWorker` to use correct method when listing images. Updated :py:class:`OpenstackLatentWorker` to support renderable ``flavor``, ``nova_args`` and ``meta``. - Fixed support of renderables for `p4base`` and ``p4branch`` arguments of the P4 step. - Buildbot now uses pypugjs library instead of pyjade to render pug templates. - Step summary is now updated after the last point where the step status is changed. Previously exceptions in log finish methods would be ignored. - Transfer steps now return ``CANCELLED`` instead of ``SUCCESS`` when interrupted. - Fixed bytes-related master crash when calling buildbot try (:issue:`4488`) - The waterfall modal is now closed upon clicking build summary link - The worker will now report low level cause of errors during the command startup. Improved Documentation ---------------------- - Added documentation of how to log to stdout instead of twistd.log. - Added documentation of how to use pdb in a buildbot application. - Fixed import path for plugins - Added documentation about vault secrets handling. Features -------- - Added UpCloud latent worker :py:class:`~buildbot.worker.upcloud.UpCloudLatentWorker` - The init flag is now allowed to be set to false in the host config for :py:class:`~buildbot.plugins.worker.DockerLatentWorker` - Added ability for the browser to auto-complete force dialog form fields. - AvatarGitHub class has been implemented, which lets us display the user's GitHub avatar. - New reporter has been implemented :py:class:`~buildbot.reporters.bitbucketserver.BitbucketServerCoreAPIStatusPush`. Reporting build status has been integrated into `BitbucketServer Core REST API `_ in `Bitbucket Server 7.4 `_. Old `BitbucketServer Build REST API `_ is still working, but does not provide the new and improved functionality. - A per-build key-value store and related APIs have been created for transient and potentially large per-build data. - Buildbot worker docker image has been upgraded to ``python3``. - Added the ability to copy build properties to the clipboard. - The ``urlText`` parameter to the ``DirectoryUpload`` step is now renderable. - Added the option to hide sensitive HTTP header values from the log in :py:class:`~buildbot.steps.http.HTTPStep`. - It is now possible to set ``urlText`` on a url linked to a ``MultipleFileUpload`` step. - Use ``os_auth_args`` to pass in authentication for :py:class:`OpenstackLatentWorker`. - ``DebPbuilder``, ``DebCowbuilder``, ``UbuPbuilder`` and ``UbuCowbuilder`` now support renderables for the step parameters. - A new report generator API has been implemented to abstract generation of various reports that are then sent via the reporters. The ``BitbucketServerPRCommentPush``, ``MailNotifier``, ``PushjetNotifier`` and ``PushoverNotifier`` support this new API via their new ``generators`` parameter. - Added rules for Bitbucket to default revlink helpers. - Added counts of the statuses of the triggered builds to the summary of trigger steps - The worker preparation step now shows the worker name. Deprecations and Removals ------------------------- - ``buildbot.test.fake.httpclientservice.HttpClientService.getFakeService()`` has been deprecated. Use ``getService`` method of the same class. - The ``MTR`` step has been deprecated due to migration to new style steps and the build result APIs. The lack of proper unit tests made it too time-consuming to migrate this step along with other steps. Contributors are welcome to step in, migrate this step and add a proper test suite so that this situation never happens again. - Many steps have been migrated to new style from old style. This only affects users who use steps as base classes for their own steps. New style steps provide a completely different set of functions that may be overridden. Direct instantiation of step classes is not affected. Old and new style steps work exactly the same in that case and users don't need to do anything. The old-style steps have been deprecated since Buildbot v0.9.0 released in October 2016. The support for old-style steps will be removed entirely Buildbot v3.0.0 which will be released in near future. Users are advised to upgrade their custom steps to new-style steps as soon as possible. A gradual migration path is provided for steps that are likely to be used as base classes. Users need to inherit from ``NewStyle`` class and convert all overridden APIs to use new-style step APIs. The old-style ```` classes will be provided until Buildbot v3.0.0 release. In Buildbot v3.0.0 ```` will refer to new-style steps and will be equivalent to ``NewStyle``. ``NewStyle`` aliases will be removed in Buildbot v3.2.0. The list of old-style steps that have new-style equivalents for gradual migration is as follows: - ``Configure`` (new-style equivalent is ``ConfigureNewStyle``) - ``Compile`` (new-style equivalent is ``CompileNewStyle``) - ``HTTPStep`` (new-style equivalent is ``HTTPStepNewStyle``) - ``GET``, ``PUT``, ``POST``, ``DELETE``, ``HEAD``, ``OPTIONS`` (new-style equivalent is ``GETNewStyle``, ``PUTNewStyle``, ``POSTNewStyle``, ``DELETENewStyle``, ``HEADNewStyle``, ``OPTIONSNewStyle``) - ``MasterShellCommand`` (new-style equivalent is ``MasterShellCommandNewStyle``) - ``ShellCommand`` (new-style equivalent is ``ShellCommandNewStyle``) - ``SetPropertyFromCommand`` (new-style equivalent is ``SetPropertyFromCommandNewStyle``) - ``WarningCountingShellCommand`` (new-style equivalent is ``WarningCountingShellCommandNewStyle``) - ``Test`` (new-style equivalent is ``TestNewStyle``) The list of old-style steps that have been converted to new style without a gradual migration path is as follows: - ``BuildEPYDoc`` - ``CopyDirectory`` - ``DebLintian`` - ``DebPbuilder`` - ``DirectoryUpload`` - ``FileDownload`` - ``FileExists`` - ``FileUpload`` - ``HLint`` - ``JsonPropertiesDownload`` - ``JsonStringDownload`` - ``LogRenderable`` - ``MakeDirectory`` - ``MaxQ`` - ``Mock`` - ``MockBuildSRPM`` - ``MsBuild``, ``MsBuild4``, ``MsBuild12``, ``MsBuild14``, ``MsBuild141`` - ``MultipleFileUpload`` - ``PerlModuleTest`` - ``PyFlakes`` - ``PyLint`` - ``RemoveDirectory`` - ``RemovePYCs`` - ``RpmLint`` - ``RpmBuild`` - ``SetPropertiesFromEnv`` - ``Sphinx`` - ``StringDownload`` - ``TreeSize`` - ``Trial`` - ``VC6``, ``VC7``, ``VC8``, ``VC9``, ``VC10``, ``VC11``, ``VC12``, ``VC14``, ``VC141`` - ``VS2003``, ``VS2005``, ``VS2008``, ``VS2010`, ``VS2012``, ``VS2013``, ``VS2015``, ``VS2017`` Additionally, all source steps have been migrated to new style without a gradual migration path. Ability to be used as base classes was not documented and thus is considered unsupported. Please submit any custom steps to Buildbot for inclusion into the main tree to reduce maintenance burden. Additionally, bugs can be submitted to expose needed APIs publicly for which a migration path will be provided in the future. The list of old-style source steps that have been converted to new style is as follows: - ``Bzr`` - ``CVS`` - ``Darcs`` - ``Gerrit`` - ``Git`` - ``GitCommit`` - ``GitLab`` - ``GitPush`` - ``GitTag`` - ``Monotone`` - ``Mercurial`` - ``P4`` - ``Repo`` - ``Source`` - ``SVN`` - The undocumented and broken RpmSpec step has been removed. - The usage of certain parameters have been deprecated in ``BitbucketServerPRCommentPush``, ``MailNotifier``, ``PushjetNotifier`` and ``PushoverNotifier`` reporters. They have been replaced by the ``generators`` parameter. The support for the deprecated parameters will be removed in Buildbot v3.0. The list of deprecated parameters is as follows: - ``mode`` - ``tags`` - ``builders`` - ``buildSetSummary`` - ``messageFormatter`` - ``subject`` - ``addLogs`` - ``addPatch`` - ``schedulers`` - ``branches`` - ``watchedWorkers`` - ``messageFormatterMissingWorker`` The undocumented ``NotifierBase`` class has been renamed to ``ReporterBase``. The undocumented ``HttpStatusPushBase`` class has been deprecated. Please use ``ReporterBase`` directly. The ``send`` method of the reporters based on ``HttpStatusPushBase`` has been deprecated. This affects only users who implemented custom reporters that directly or indirectly derive ``HttpStatusPushBase``. Please use ``sendMessage`` as the replacement. The following reporters have been affected: - ``HttpStatusPush`` - ``BitbucketStatusPush`` - ``BitbucketServerStatusPush`` - ``BitbucketServerCoreAPIStatusPush`` - ``GerritVerifyStatusPush`` - ``GitHubStatusPush`` - ``GitLabStatusPush`` - ``HipChatStatusPush`` - ``ZulipStatusPush`` - BuildBot now requires SQLAlchemy 1.2.0 or newer. - Deprecation warnings have been added to the ``buildbot.status`` module. It has been deprecated in documentation since v0.9.0. - ``buildbot.interfaces.WorkerTooOldError`` is deprecated in favour of ``buildbot.interfaces.WorkerSetupError`` - The ``worker_transition`` module has been removed. - The buildbot worker Docker image has been updated to Ubuntu 20.04. Buildbot ``2.8.4`` ( ``2020-08-29`` ) ===================================== Bug fixes --------- - Fix 100% CPU on large installations when using the changes API (:issue:`5504`) - Work around incomplete support for codebases in ``GerritChangeSource`` (:issue:`5190`). This avoids an internal assertion when the configuration file does not specify any codebases. - Add missing VS2017 entry points. Buildbot ``2.8.3`` ( ``2020-08-22`` ) ===================================== Bug fixes --------- - Fix Docker image building for the master which failed due to mismatching versions of Alpine (:issue:`5469`). Buildbot ``2.8.2`` ( ``2020-06-14`` ) ===================================== Bug fixes --------- - Fix crash in Buildbot Windows service startup code (:issue:`5344`) Buildbot ``2.8.1`` ( ``2020-06-06`` ) ===================================== Bug fixes --------- - Fix source distribution missing required buildbot.test.fakedb module for unit tests. - Fix crash in trigger step when renderables are used for scheduler names (:issue:`5312`) Buildbot ``2.8.0`` ( ``2020-05-27`` ) ===================================== Bug fixes --------- - Fix :py:class:`GitHubEventHandler` to include files in `Change` that comes from a github PR (:issue:`5294`) - Updated the `Docker` container `buildbot-master` to `Alpine 3.11` to fix segmentation faults caused by an old version of `musl` - Base64 encoding logs and attachments sent via email so emails conform to RFC 5322 2.1.1 - Handling the case where the BitbucketStatusPush return code is not 200 - When cancelling a buildrequest, the reason field is now correctly transmitted all the way to the cancelled step. - Fix Cache-control header to be compliant with RFC 7234 (:issue:`5220`) - Fix :py:class:`GerritEventLogPoller` class to be declared as entry_point (can be used in master.cfg file) - Git poller: add `--ignore-missing` argument to `git log` call to avoid `fatal: bad object` errors - Log watcher looks for the "tail" utility in the right location on Haiku OS. - Add limit and filtering support for the changes data API as described in :issue:`5207` Improved Documentation ---------------------- - Make docs build with the latest sphinx and improve rendering of the example HTML file for custom dashboard - Make docs build with Sphinx 3 and fix some typos and incorrect Python module declarations Features -------- - :class:`Property` and :class:`Interpolate` objects can now be compared. This will generate a renderable that will be evaluated at runtime. see :ref:`RenderableComparison`. - Added argument `count` to lock access to allow a lock to consume a variable amount of units - Added arguments `pollRandomDelayMin` and `pollRandomDelayMax` to `HgPoller`, `GitPoller`, `P4Poller`, `SvnPoller` to spread the polling load Deprecations and Removals ------------------------- - Removed `_skipChecks` from `LockAccess` as it's obsolete Buildbot ``2.7.0`` ( ``2020-02-27`` ) ===================================== Bug fixes --------- - Command `buildbot-worker create-worker` now supports ipv6 address for buildmaster connection. - Fix crash in latent worker stopService() when the worker is insubstantiating (:issue:`4935`). - Fix race condition between latent worker's stopService() and substantiate(). - :class:`GitHubAuth` is now using `Authorization` headers instead of `access_token` query parameter, as the latter was deprecated by Github. (:issue:`5188`) - ``jQuery`` and ``$`` are available again as a global variable for UI plugins (:issue:`5161`). - Latent workers will no longer wait for builds to finish when worker is reconfigured. The builds will still be retried on other workers and the operators will not need to potentially wait multiple hours for builds to finish. - p4poller will no longer override Perforce login ticket handling behavior which fixes random crashes (:issue:`5042`). Improved Documentation ---------------------- - The procedures of upgrading to Buildbot 1.x and 2.x have been clarified in separate documents. - The layout of the specification of the REST API has been improved. - Updated newsfragments README.txt to no longer refer to renamed class :py:class:`~buildbot.reporters.http.HttpStatusBase` - The documentation now uses the read-the-docs theme which is more readable. Features -------- - A new www badges style was added: ``badgeio`` - :py:class:`~buildbot.reporters.http.HttpStatusPushBase` now allows you to skip unicode to bytes encoding while pushing data to server - New ``buildbot-worker create-worker --delete-leftover-dirs`` option to automatically remove obsolete builder directories Buildbot ``2.6.0`` ( ``2020-01-21`` ) ===================================== Bug fixes --------- - Fix a potential deadlock when interrupting a step that is waiting for a lock to become available. - Prepare unique hgpoller name when using multiple hgpoller for multiple branches (:issue:`5004`) - Fix hgpoller crash when force pushing a branch (:issue:`4876`) - Fix mail recipient formatting to make sure address comments are separately escaped instead of escaping the whole To: or CC: header, which is not RFC compliant. - Master side keep-alive requests are now repeated instead of being single-shot (:issue:`3630`). - The message queues will now wait until the delivered callbacks are fully completed during shutdown. - Fix encoding errors during P4Poller ticket parsing :issue:`5148`. - Remove server header from HTTP response served by the web component. - Fix multiple race conditions in Telegram reporter that were visible in tests. - The Telegram reporter will now wait until in-progress polls finish during shutdown. - Improve reliability of timed scheduler. - transfer steps now correctly report errors from workers :issue:`5058` - Warn if Buildbot title in the configuration is too long and will be ignored. - Worker will now wait for any pending keep-alive requests to finish leaving them in indeterminate state during shutdown. Improved Documentation ---------------------- - Mention that QueueRef.stopConsuming() may return a Deferred. Features -------- - Add the parameter --use-tls to `buildbot-worker create-worker` to automatically enable TLS in the connection string - Gerrit reporter now passes a tag for versions that support it. This enables filtering out buildbot's messages. - :py:class:`GerritEventLogPoller` and :py:class:`GerritChangeSource` coordinate so as not to generate duplicate changes, resolves :issue:`4786` - Web front end now allows you to configure the default landing page with `c['www']['default_page'] = 'name-of-page'`. - The new option dumpMailsToLog of MailNotifier allows to dump formatted mails to the log before sending. - bb:cfg:`workers` will now attempt to read ``/etc/os-release`` and stores them into worker info as ``os_`` items. Add new interpolation ``worker`` that can be used for accessing worker info items. Buildbot ``2.5.1`` ( ``2019-11-24`` ) ===================================== Bug fixes --------- - Updates supported browser list so that Ubuntu Chromium will not always be flagged as out of date. - Fixed IRC notification color of cancelled builds. - Updated url in description of worker service for Windows (no functionality impact). - Updated templates of www-badges to support additional padding configuration (:issue:`5079`) - Fix issue with custom_templates loading path (:issue:`5035`) - Fix url display when step do not contain any logs (:issue:`5047`) Buildbot ``2.5.0`` ( ``2019-10-17`` ) ===================================== Bug fixes --------- - Fix crash when reconfiguring changed workers that have new builders assigned to them (:issue:`4757`, :issue:`5027`). - DockerLatentWorker: Allow to bind the same volume twice into a worker's container, Buildbot now requires 'docker-py' (nowadays 'docker') version 1.2.3+ from 2015. - IRC bot can have authz configured to create or stop builds (:issue:`2957`). - Fix javascript exception with grid view tag filtering (:issue:`4801`) Improved Documentation ---------------------- - Changed PluginList link from trac wiki directly to the GitHub wiki. Features -------- - Created a `TelegramBot` for notification and control through Telegram messaging app. - Added support for environment variable P4CONFIG to class ``P4Source`` - Allow to define behavior for GitCommit when there is nothing to commit. - Add support for revision links to Mercurial poller - Support recursive matching ('**') in MultipleFileUpload when `glob=True` (requires python3.5+ on the worker) Buildbot ``2.4.1`` ( ``2019-09-11`` ) ===================================== Bug fixes --------- - allow committer of a change to be null for new setups (:issue:`4987`) - custom_templates are now working again. - Locks will no longer allow being acquired more times than the `maxCount` parameter if this parameter is changed during master reconfiguration. Features -------- - Improve log cleaning performance by using delete with join on supported databases. - Hiding/showing of inactive builders is now possible in Waterfall view. Buildbot ``2.4.0`` ( ``2019-08-18`` ) ===================================== Highlights ---------- Database upgrade may take a while on larger instances on this release due to newly added index. Bug fixes --------- - Add an index to ``steps.started_at`` to boost expensive SQL queries. - Fix handling of the ``refs_changed`` event in the BitBucket Server web hook. - Fix errors when disconnecting a libvirt worker (:issue:`4844`). - Fix Bitbucket Cloud hook crash due to changes in their API (:issue:`4873`). - Fix ``GerritEventLogPoller`` was using the wrong date format. - Fix janitor Exception when there is no logchunk to delete. - Reduced the number of SQL queries triggered by ``getPrevSuccessfulBuild()`` by up to 100. - :py:class:`~buildbot.util.git.GitStepMixin`: Prevent builders from corrupting temporary ssh data path by using builder name as part of the path - :py:class:`~buildbot.util.git.GitTag`: Allow ``tagName`` to be a renderable. - Fix Github error reporting to handle exceptions that happen before the HTTP request is sent. - :py:class:`~buildbot.changes.gitpoller.GitPoller`: Trigger on pushes with no commits when the new revision is not the tip of another branch. - :py:class:`~buildbot.steps.source.git.Git`: Fix the invocation of ``git submodule foreach`` on cleaning. - Fix StatsService not correctly clearing old consumers on reconfig. - Fix various errors in try client with Python 3 (:issue:`4765`). - Prevent accidental start of multiple force builds in web UI (:issue:`4823`). - The support for proxying Buildbot frontend to another Buildbot instance during development has been fixed. This feature has been broken since v2.3.0, and is now completely re-implemented for best performance, ease of use and maintainability. Improved Documentation ---------------------- - Document why some listed icons may not work out-of-the-box when building a custom dashboard (:issue:`4939`). - Improve Vault secrets management documentation and examples. - Link the documentation of ``www.port`` to the capabilities of ``twisted.application.strports``. - Move the documentation on how to submit PRs out of the trac wiki to the documentation shipped with Buildbot, update and enhance it. Features -------- - Update buildbot worker image to Ubuntu 18.04 (:issue:`4928`). - :py:class:`~buildbot.worker.docker.DockerLatentWorker`: Added support for docker build contexts, ``buildargs``, and specifying controlling context. - The :py:class:`~buildbot.changes.gerritchangesource.GerritChangeFilter` and :py:class:`~buildbot.changes.gerritchangesource.GerritEventLogPoller` now populate the ``files`` attribute of emitted changes when the ``get_files`` argument is true. Enabling this feature triggers an additional HTTP request or SSH command to the Gerrit server for every emitted change. - Buildbot now warns users who connect using unsupported browsers. - Boost janitor speed by using more efficient SQL queries. - Scheduler properties are now renderable. - :py:class:`~buildbot.steps.python.Sphinx`: Added ``strict_warnings`` option to fail on warnings. - UI now shows a paginated view for trigger step sub builds. Deprecations and Removals ------------------------- - Support for older browsers that were not working since 2.3.0 has been removed due to technical limitations. Notably, Internet Explorer 11 is no longer supported. Currently supported browsers are Chrome 56, Firefox 52, Edge 13 and Safari 10, newer versions of these browsers and their compatible derivatives. This set of browsers covers 98% of users of buildbot.net. Buildbot ``2.3.1`` ( ``2019-05-22`` ) ===================================== Bug fixes --------- - Fix vulnerability in OAuth where user-submitted authorization token was used for authentication (https://github.com/buildbot/buildbot/wiki/OAuth-vulnerability-in-using-submitted-authorization-token-for-authentication) Thanks to Phillip Kuhrt for reporting it. Buildbot ``2.3.0`` ( ``2019-05-06`` ) ===================================== Highlights ---------- - Support for older browsers has been hopefully temporarily broken due to frontend changes in progress. Notably, Internet Explorer 11 is not supported in this release. Currently supported browsers are Chrome 56, Firefox 52, Edge 13 and Safari 10, newer versions of these browsers and their compatible derivatives. This set of browsers covers 98% of users of buildbot.net. Bug fixes --------- - Fixed :bb:step:`Git` to clean the repository after the checkout when submodules are enabled. Previously this action could lead to untracked module directories after changing branches. - Latent workers with negative `build_wait_timeout` will be shutdown on master shutdown. - Latent worker will now wait until `start_instance()` before starting `stop_instance()` or vice-versa. Master will wait for these functions to finish during shutdown. - Latent worker will now correctly handle synchronous exception from the backend worker driver. - Fixed a potential error during database migration when upgrading to versions >=2.0 (:issue:`4711`). Deprecations and Removals ------------------------- - The implementation language of the Buildbot web frontend has been changed from CoffeeScript to JavaScript. The documentation has not been updated yet, as we plan to transition to TypeScript. In the transitory period support for some browsers, notably IE 11 has been dropped. We hope to bring support for older browsers back once the transitory period is over. - The support for building Buildbot using npm as package manager has been removed. Please use yarn as a replacement that is used by Buildbot developers. Buildbot ``2.2.0`` ( ``2019-04-07`` ) ===================================== Bug fixes --------- - Fix passing the verify and debug parameters for the HttpStatusPush reporter - The builder page UI now correctly shows the list of owners for each build. - Fixed bug with tilde in git repo url on Python 3.7 (:issue:`4639`). - Fix secret leak when non-interpolated secret was passed to a step (:issue:`4007`) Features -------- - Added new :bb:step:`GitCommit` step to perform git commit operation - Added new :bb:step:`GitTag` step to perform git tag operation - HgPoller now supports bookmarks in addition to branches. - Buildbot can now monitor multiple branches in a Mercurial repository. - :py:class:`~buildbot.www.oauth2.OAuth2Auth` have been adapted to support ref:`Secret`. - Buildbot can now get secrets from the unix password store by `zx2c4` (https://www.passwordstore.org/). - Added a ``basename`` property to the Github pull request webhook handler. - The GitHub change hook secret can now be rendered. - Each build now gets a preparation step which counts the time spend starting latent worker. - Support known_hosts file format as ``sshKnownHosts`` parameter in SSH-related operations (:issue:`4681`) Buildbot ``2.1.0`` ( ``2019-03-09`` ) ===================================== Highlights ---------- - Worker to Master protocol can now be encrypted via TLS. Bug fixes --------- - To avoid database corruption, the ``upgrade-master`` command now ignores all signals except ``SIGKILL``. It cannot be interrupted with ``ctrl-c`` (:issue:`4600`). - Fixed incorrect tracking of latent worker states that could sometimes result in duplicate ``stop_instance`` calls and so on. - Fixed a race condition that could manifest in cancelled substantiations if builds were created during insubstantiation of a latent worker. - Perforce CLI Rev. 2018.2/1751184 (2019/01/21) is now supported (:issue:`4574`). - Fix encoding issues with Forcescheduler parameters error management code. Improved Documentation ---------------------- - fix grammar mistakes and use Uppercase B for Buildbot Features -------- - :py:class:`~buildbot-worker.buildbot_worker.bot.Worker` now have `connection_string` kw-argument which can be used to connect to a master over TLS. - Adding 'expand_logs' option for LogPreview related settings. - Force schedulers buttons are now sorted by their name. (:issue:`4619`) - :bb:cfg:`workers` now have a new ``defaultProperties`` parameter. Buildbot ``2.0.1`` ( ``2019-02-06`` ) ===================================== Bug fixes --------- - Do not build universal python wheels now that Python 2 is not supported. - Print a warning discouraging users from stopping the database migration. Buildbot ``2.0.0`` ( ``2019-02-02`` ) ===================================== Deprecations and Removals ------------------------- - Removed support for Python <3.5 in the buildbot master code. Buildbot worker remains compatible with python2.7, and interoperability tests are run continuously. - APIs that are not documented in the official Buildbot documentation have been made private. Users of these undocumented APIs are encouraged to file bugs to get them exposed. - Removed support of old slave APIs from pre-0.9 days. Using old APIs may fail silently. To avoid weird errors when upgrading a Buildbot installation that may use old APIs, first upgrade to to 1.8.0 and make sure there are no deprecated API warnings. - Remove deprecated default value handling of the ``keypair_name`` and ``security_name`` attributes of ``EC2LatentWorker``. - Support for ``Hyper.sh`` containers cloud provider has been removed as this service has shutdown. Bug fixes --------- - Fix CRLF injection vulnerability with validating user provided redirect parameters (https://github.com/buildbot/buildbot/wiki/CRLF-injection-in-Buildbot-login-and-logout-redirect-code) Thanks to ``mik317`` and ``mariadb`` for reporting it. - Fix lockup during master shutdown when there's a build with unanswered ping from the worker and the TCP connection to worker is severed (issue:`4575`). - Fix RemoteUserAuth.maybeAutLogin consumes bytes object as str leading to TypeError during JSON serialization. (:issue:`4402`) - Various database integrity problems were fixed. Most notably, it is now possible to delete old changes without wiping all "child" changes in cascade (:issue:`4539`, :pull:`4536`). - The GitLab change hook secret is now rendered correctly. (:issue:`4118`). Features -------- - Identifiers can now contain UTF-8 characters which are not ASCII. This includes worker names, builder names, and step names. buildbot-3.4.0/master/docs/relnotes/index.rst000066400000000000000000000413461413250514000212440ustar00rootroot00000000000000Release Notes ~~~~~~~~~~~~~ .. Buildbot uses towncrier to manage its release notes. towncrier helps to avoid the need for rebase when several people work at the same time on the release notes files. Each PR should come with a file in the newsfragment directory .. towncrier release notes start Buildbot ``3.4.0`` ( ``2021-10-15`` ) ===================================== Bug fixes --------- - Database migrations are now handled using Alembic (1.6.0 or newer is required) (:issue:`5872`). - AMI for latent worker is now set before making spot request to enable dynamically setting AMIs for instantiating workers. - Fixed ``GitPoller`` fetch commands timing out on huge repositories - Fixed a bug that caused Gerrit review comments sometimes not to be reported. - Fixed a critical bug in the ``MsBuild141`` step (regression since Buildbot v2.8.0) (:issue:`6262`). - Implemented renderable support in secrets list of ``RemoveWorkerFileSecret``. - Fixed issues that prevented Buildbot from being used in Setuptools 58 and newer due to dependencies failing to build (:issue:`6222`). Improved Documentation ---------------------- - Fixed help text for ``buildbot create-master`` so it states that ``--db`` option is passed verbatim to ``master.cfg.sample`` instead of ``buildbot.tac``. - Added documentation of properties available in the formatting context that is presented to message formatters. Features -------- - MsBuild steps now handle correctly rebuilding or cleaning a specific project. Previously it could only be done on the entire solution. - Implemented support for controlling ``filter`` option of ``git clone``. - Optimized build property filtering in the database instead of in Python code. - Implemented support of ``SASL PLAIN`` authentication to ``IRC`` reporter. - The ``want_logs`` (previously ``wantLogs``) argument to message formatters will now imply ``wantSteps`` if selected. - Added information about log URLs to message formatter context. - Implemented a way to ask for only logs metadata (excluding content) in message formatters via ``want_logs`` and ``want_logs_content`` arguments. - Implemented support for specifying pre-processor defines sent to the compiler in the ``MsBuild`` steps. - Introduced ``HvacKvSecretProvider`` to allow working around flaws in ``HashiCorpVaultSecretProvider`` (:issue:`5903`). - Implemented support for proxying worker connection through a HTTP proxy. Deprecations and Removals ------------------------- - The ``wantLogs`` argument of message formatters has been deprecated. Please replace any uses with both ``want_logs`` and ``want_logs_content`` set to the same value. - The ``wantProperties`` and ``wantSteps`` arguments of message formatters have been renamed to ``want_properties`` and ``want_steps`` respectively. - Buildbot now requires SQLAlchemy 1.3.0 or newer. Buildbot ``3.3.0`` ( ``2021-07-31`` ) ===================================== Bug fixes --------- - Fixed support of SQLAlchemy v1.4 (:issue:`5992`). - Improved default build request collapsing functionality to take into account properties set by the scheduler and not collapse build requests if they differ (:issue:`4686`). - Fixed a race condition that would result in attempts to complete unclaimed buildrequests (:issue:`3762`). - Fixed a race condition in default buildrequest collapse function which resulted in two concurrently submitted build requests potentially being able to cancel each other (:issue:`4642`). - The ``comment-added`` event on Gerrit now produces the same branch as other events such as ``patchset-created``. - ``GerritChangeSource`` and ``GerritEventLogPoller`` will now produce change events with ``branch`` attribute that corresponds to the actual git branch on the repository. - Fixed handling of ``GitPoller`` state to not grow without bounds and eventually exceed the database field size. (:issue:`6100`) - Old browser warning banner is no longer shown for browsers that could not be identified (:issue:`5237`). - Fixed worker lock handling that caused max lock count to be ignored (:issue:`6132`). Features -------- - Buildbot can now be configured (via ``FailingBuildsetCanceller``) to cancel unfinished builds when a build in a buildset fails. - ``GitHubEventHandler`` can now configure authentication token via Secrets management for GitHub instances that do not allow anonymous access - Buildbot can now be configured (via ``OldBuildCanceller``) to cancel unfinished builds when branches on which they are running receive new commits. - Buildbot secret management can now be used to configure worker passwords. - Services can now be forced to reload their code via new ``canReconfigWithSibling`` API. Deprecations and Removals ------------------------- - ``changes.base.PollingChangeSource`` has been fully deprecated as internal uses of it were migrated to replacement APIs. Buildbot ``3.2.0`` ( ``2021-06-17`` ) ===================================== Bug fixes --------- - Fixed occasional ``InvalidSpotInstanceRequestID.NotFound`` errors when using spot instances on EC2. This could have lead to Buildbot launching zombie instances and not shutting them down. - Improved ``GitPoller`` behavior during reconfiguration to exit at earliest possible opportunity and thus reduce the delay that running ``GitPoller`` incurs for the reconfiguration. - The docker container for the master now fully builds the www packages. Previously they were downloaded from pypi which resulted in downloading whatever version was newest at the time (:issue:`4998`). - Implemented time out for master-side utility processes (e.g. ``git`` or ``hg``) which could break the respective version control poller potentially indefinitely upon hanging. - Fixed a regression in the ``reconfig`` script which would time out instead of printing error when configuration update was not successfully applied. - Improved buildbot restart behavior to restore the worker paused state (:issue:`6074`) - Fixed support for binary patch files in try client (:issue:`5933`) - Improved handling of unsubscription errors in WAMP which will no longer crash the unsubscribing component and instead just log an error. - Fixed a crash when a worker is disconnected from a running build that uses worker information for some of its properties (:issue:`5745`). Improved Documentation ---------------------- - Added documentation about installation Buildbot worker as Windows service. Features -------- - ``DebPbuilder`` now supports the ``--othermirror`` flag for including additional repositories - Implemented support for setting docker container's hostname - The libvirt latent worker will now wait for the VM to come online instead of disabling the worker during connection establishment process. The VM management connections are now pooled by URI. - Buildbot now sends metadata required to establish connection back to master to libvirt worker VMs. - ``LibVirtWorker`` will now setup libvirt metadata with details needed by the worker to connect back to master. - The docker container for the master has been switched to Debian. Additionally, buildbot is installed into a virtualenv there to reduce chances of conflicts with Python packages installed via ``dpkg``. - BitbucketStatusPush now has renderable build status key, name, and description. - Pausing a worker is a manual operation which the quarantine timer was overwriting. Worker paused state and quarantine state are now independent. (:issue:`5611`) - Reduce buildbot_worker wheel package size by 40% by dropping tests from package. Deprecations and Removals ------------------------- - The `connection` argument of the LibVirtWorker constructor has been deprecated along with the related `Connection` class. Use `uri` as replacement. - The ``*NewStyle`` build step aliases have been removed. Please use equivalent steps without the ``NewStyle`` suffix in the name. - Try client no longer supports protocol used by Buildbot older than v0.9. Buildbot ``3.1.1`` ( ``2021-04-28`` ) ===================================== Bug fixes --------- - Fix missing VERSION file in buildbot_worker wheel package (:issue:`5948`, :issue:`4464`). - Fixed error when attempting to specify ``ws_ping_interval`` configuration option (:issue:`5991`). Buildbot ``3.1.0`` ( ``2021-04-05`` ) ===================================== Bug fixes --------- - Fixed usage of invalid characters in temporary file names by git-related steps (:issue:`5949`) - Fixed parsing of URLs of the form https://api.bitbucket.org/2.0/repositories/OWNER/REPONAME in BitbucketStatusPush. These URLs are in the sourcestamps returned by the Bitbucket Cloud hook. - Brought back the old (pre v2.9.0) behavior of the ``FileDownload`` step to act more gracefully by returning ``FAILURE`` instead of raising an exception when the file doesn't exist on master. This makes use cases such as ``FileDownload(haltOnFailure=False)`` possible again. - Fixed issue with ``getNewestCompleteTime`` which was returning no completed builds, although it could. - Fixed the ``Git`` source step causing last active branch to point to wrong commits. This only affected the branch state in the local repository, the checked out code was correct. - Improved cleanup of any containers left running by ``OpenstackLatentWorker``. - Improved consistency of log messages produced by the reconfig script. Note that this output is not part of public API of Buildbot and may change at any time. - Improved error message when try client cannot create a build due to builder being not configured on master side. - Fixed exception when submitting builds via try jobdir client when the branch was not explicitly specified. - Fixed handling of secrets in nested folders by the vault provider. Features -------- - Implemented report generator for new build requests - Allow usage of Basic authentication to access GitHub API when looking for avatars - Added support for default Pylint message that was changed in v2.0. - Implemented support for configurable timeout in the reconfig script via new ``progress_timeout`` command-line parameter which determines how long it waits between subsequent progress updates in the logs before declaring a timeout. - Implemented ``GitDiffInfo`` step that would extract information about what code has been changed in a pull/merge request. - Add support ``--submodule`` option for the ``repo init`` command of the Repo source step. Deprecations and Removals ------------------------- - ``MessageFormatter`` will receive the actual builder name instead of ``whole buildset`` when used from ``BuildSetStatusGenerator``. Buildbot ``3.0.3`` ( ``2021-04-05`` ) ===================================== Bug fixes --------- - Fixed a race condition in log handling of ``RpmLint`` and ``WarningCountingShellCommand`` steps resulting in steps crashing occasionally. - Fixed incorrect state string of a finished buildstep being sent via message queue (:issue:`5906`). - Reduced flickering of build summary tooltip during mouseover of build numbers (:issue:`5930`). - Fixed missing data in Owners and Worker columns in changes and workers pages (:issue:`5888`, :issue:`5887`). - Fixed excessive debug logging in ``GerritEventLogPoller``. - Fixed regression in pending buildrequests UI where owner is not displayed anymore (:issue:`5940`). - Re-added support for ``lazylogfiles`` argument of ``ShellCommand`` that was available in old style steps. Buildbot ``3.0.2`` ( ``2021-03-16`` ) ===================================== Bug fixes --------- - Updated Buildbot requirements to specify sqlalchemy 1.4 and newer as not supported yet. Buildbot ``3.0.1`` ( ``2021-03-14`` ) ===================================== Bug fixes --------- - Fixed special character handling in avatar email URLs. - Fixed errors when an email address matches GitHub commits but the user is unknown to it. - Added missing report generators to the Buildbot plugin database (:issue:`5892`) - Fixed non-default mode support for ``BuildSetStatusGenerator``. Buildbot ``3.0.0`` ( ``2021-03-08`` ) ===================================== This release includes all changes up to Buildbot ``2.10.2``. Bug fixes --------- - Avatar caching is now working properly and size argument is now handled correctly. - Removed display of hidden steps in the build summary tooltip. - ``GitHubPullrequestPoller`` now supports secrets in its ``token`` argument (:issue:`4921`) - Plugin database will no longer issue warnings on load, but only when a particular entry is accessed. - SSH connections are now run with ``-o BatchMode=yes`` to prevent interactive prompts which may tie up a step, reporter or change source until it times out. Features -------- - ``BitbucketPullrequestPoller``, ``BitbucketCloudEventHandler``, ``BitbucketServerEventHandler`` were enhanced to save PR entries matching provided masks as build properties. - ``BitbucketPullrequestPoller`` has been enhanced to optionally authorize Bitbucket API. - Added `pullrequesturl` property to the following pollers and change hooks: ``BitbucketPullrequestPoller``, ``GitHubPullrequestPoller``, ``GitHubEventHandler``. This unifies all Bitbucket and GitHub pollers with the shared property interface. - AvatarGitHub class has been enhanced to handle avatar based on email requests and take size argument into account - Added support for Fossil user objects for use by the buildbot-fossil plugin. - A new ``www.ws_ping_interval`` configuration option was added to avoid websocket timeouts when using reverse proxies and CDNs (:issue:`4078`) Deprecations and Removals ------------------------- - Removed deprecated ``encoding`` argument to ``BitbucketPullrequestPoller``. - Removed deprecated support for constructing build steps from class and arguments in ``BuildFactory.addStep()``. - Removed support for deprecated ``db_poll_interval`` configuration setting. - Removed support for deprecated ``logHorizon``, ``eventHorizon`` and ``buildHorizon`` configuration settings. - Removed support for deprecated ``nextWorker`` function signature that accepts two parameters instead of three. - Removed deprecated ``status`` configuration setting. - ``LoggingBuildStep`` has been removed. - ``GET``, ``PUT``, ``POST``, ``DELETE``, ``HEAD``, ``OPTIONS`` steps now use new-style step implementation. - ``MasterShellCommand`` step now uses new-style step implementation. - ``Configure``, ``Compile``, ``ShellCommand``, ``SetPropertyFromCommand``, ``WarningCountingShellCommand``, ``Test`` steps now use new-style step implementation. - Removed support for old-style steps. - Python 3.5 is no longer supported for running Buildbot master. - The deprecated ``HipChatStatusPush`` reporter has been removed. - Removed support for the following deprecated parameters of ``HttpStatusPush`` reporter: ``format_fn``, ``builders``, ``wantProperties``, ``wantSteps``, ``wantPreviousBuild``, ``wantLogs``, ``user``, ``password``. - Removed support for the following deprecated parameters of ``BitbucketStatusPush`` reporter: ``builders``, ``wantProperties``, ``wantSteps``, ``wantPreviousBuild``, ``wantLogs``. - Removed support for the following deprecated parameters of ``BitbucketServerStatusPush``, ``BitbucketServerCoreAPIStatusPush``, ``GerritVerifyStatusPush``, ``GitHubStatusPush``, ``GitHubCommentPush`` and ``GitLabStatusPush`` reporters: ``startDescription``, ``endDescription``, ``builders``, ``wantProperties``, ``wantSteps``, ``wantPreviousBuild``, ``wantLogs``. - Removed support for the following deprecated parameters of ``BitbucketServerPRCommentPush``, ``MailNotifier``, ``PushjetNotifier`` and ``PushoverNotifier`` reporters: ``subject``, ``mode``, ``builders``, ``tags``, ``schedulers``, ``branches``, ``buildSetSummary``, ``messageFormatter``, ``watchedWorkers``, ``messageFormatterMissingWorker``. - Removed support for the following deprecated parameters of ``MessageFormatter`` report formatter: ``template_name``. - The deprecated ``send()`` function that can be overridden by custom reporters has been removed. - Removed deprecated support for ``template_filename``, ``template_dir`` and ``subject_filename`` configuration parameters of message formatters. - The deprecated ``buildbot.status`` module has been removed. - The deprecated ``MTR`` step has been removed. Contributors are welcome to step in, migrate this step to newer APIs and add a proper test suite to restore this step in Buildbot. - Removed deprecated ``buildbot.test.fake.httpclientservice.HttpClientService.getFakeService()`` function. - Removed deprecated support for ``block_device_map`` argument of EC2LatentWorker being not a list. - Removed support for deprecated builder categories which have been replaced by tags. Older Release Notes ~~~~~~~~~~~~~~~~~~~ .. toctree:: :maxdepth: 1 2.x 1.x 0.9.2-0.9.15 0.9.1 0.9.0 0.9.0rc4 0.9.0rc3 0.9.0rc2 0.9.0rc1 0.9.0b9 0.9.0b8 0.9.0b7 0.9.0b6 0.9.0b5 0.9.0b4 0.9.0b3 0.9.0b2 0.9.0b1 0.8.12 0.8.10 0.8.9 0.8.8 0.8.7 0.8.6 Note that Buildbot-0.8.11 was never released. buildbot-3.4.0/master/docs/spelling_wordlist.txt000066400000000000000000000212511413250514000220460ustar00rootroot00000000000000accessor acknowledgements addCallbacks admin admins afterwards aiohttp al Allura AMIs amongst analyse angularjs angularJS ansible apache api apis apiVersion app apparmor appart approle apps ar aren arg args argv ascii AssertionFailed asymmetrics async Async Atlassian attrs auth Auth authenticator authenticators authorisation authUri authz autoconf autodetected automake autopull autoreconf backend backends Backends backoff backported badgeio balancer balancers basedir basename basestring Basestring basetgz baseURL Baz bb bbcollections bbproto bbworker bdist behaviour Bitbucket Bitbuckets bitrot blamelist Bleh blocksizes boolean bootable Borkhuis botmaster Botmaster botname boto br brdict brdicts Brini Browserid bsdict bsdicts bsid bugfix buildbot Buildbot buildCacheSize builddir buildDict builderid builderids buildername builderName buildid buildmaster Buildmaster buildmasters buildnumber buildrequest buildrequests buildset Buildset buildsetids buildsets Buildsets buildslave Buildslave buildslaves buildstep buildsteps buildsummary buildsystem buildtep builtin bulidslave bytestring bytestrings bzip bzr Bzr bzrignore CaaS cacert callables Callables called callee camelCase canAcquireLocks cancelled canceller cancelling candidated canonicalize canStartBuild canStartWithWorkerForBuilder catchup Certifi cd cfg changehook changeid changeID changeids changelist changepw changeroot changeset changesource changesourceid changesources Changesources chdict checkbox checkconfig checkConfig checkin childs choco chroot chroots ci cleanupdb clickable Clickable cmake CMake cnf codebase Codebase codebaseGenerator codebases Codecov coffeelint Coffeelint coffeescript coffeeScript collapser collapseRequests commandId commandline committer comparators conditionalize config Config configs Configs configurability configurator configurators contrib Contrib coolproject coredump cowbuilder cp cpdir cppcheck Cppcheck CPython Cred creds cron Cron croniter cronjob crontab cryptographically css csv customizability customizable cvs CVS cvsmodule cvsroot CVSROOT cvsutil Cygwin daemonization daemonize daemonizing Darcs dataService dataset datastream datatype datetime dateutil Dateutil davide de deafault Debian debounce debounced debouncer debounces decodable decodeRC deduplicated deduplication deepcopy deferreds Deferreds demux deprecations deserialization deterministically dev Dict dicts didn didn diff diffinfo diffs dirname dirwatcher disambiguated discoverable distro distros Distutils divmod djmitche dn DNotify dns dockerfile Dockerfile dockerfiles docstring docstrings doesn dom downloadFile dpkg dropdown dumpMailsToLog durations dustin ec eg Eg emacs emption encodeString Encodings endsWith entityType entrypoint ep eq errback errbacks errorCb et eventPathPatterns explorable extensibility facto failover failsafe fakedb fallback fallbacks favicon favour fileIsImportant filename Filename filenames Filenames filesystem Filesystem Firefox fn fo foo forcescheduler forceschedulers formatter formatters formular fqdn framboozle Framboozle framboozler freenode Freenode frontend functor ge gedit Gerrit getChoices getfqdn getProperties getRecipients getter getters GiB gibibytes github GitHub GitLab Gitorious Gitpoller glMenuProvider google gpg GPL GraphQL Gravatar groupPrefix Gruntfile gtk guanlecoja Guanlecoja gz gzip hackability hardcode hardcoded hardcodes hardcoding hasn Hassler hgpoller hgweb highlevel hipchat Hipchat HLint hoc hostinfo hostname html htpasswd http https Hyperlinks IConfigurator ie img implementers impls incoherencies incrementing Indices influxDB infos ini init initialise Initlized initscripts inline inlineCallbacks inrepo inrepos instantiation instantiations insubstantiating insubstantiation integrators interCaps internet interoperability interoperable intialization invariants io ip iPhone IProperties iptables IPv ipv irc Irc IRenderable isn iterable java javascript Javascript jinja Jinja jobdir jpeg jqlite jquery JQuery js json Json jsonable JUnit kB Keepalive keepalives kerberos Kerberos keyring keystoneauth KiB kibibytes kube kubernetes Kuhrt kv kwargs latin ldap lexically lexicographically libaprutil libvirt Libvirt libvirtd lifecycle lighttpd lineboundaries linenumber Linter linux listdir localhost lodash Lodash logchunk logchunks logdict logdicts logfile Logfile logfiles Logfiles logid login Login loginfo logout Logout logwatcher lookup loopback lossless lru lxc lz macOS maildir maildirs Maildirs mailstatus makeChecker matcher matchers MaxQ maxWarnCount mbcs md mergeability mergeable metabuildbot metacharacters metadata MiB mebibytes minified minifies minimalistic mis misbehaving misconfiguration mistyped mixin Mixins mkdir mouseover mq Msbuild MsBuild msg mtime mtrlogobserver mulitple multi Multi multiline multimaster Multimaster munge mutualisation mv mysql nagios namedServices nameOfYourService namespace namespaces natively nd ne ness nestedParameter netrc netstring netstrings Nevow newcred newfeature newsfragments newstyle nextBuild nextWorker nginx Nickserv nodejs nodeJS nondeterministic noticeOnChannel novaclient npm nullability nupkg oauth oAuth OAuth objectid octothorpe offline ok Ok online optimiselogs org os OSes Ostinato osx othermirror overloadable param parameterizes parseable parsers passthrough passwd passwordless patchlevel patchset patchSet Patchset pathmatch pathname pathnames pb pbuilder pem PEM perl petmail Phantomjs PhantomJS picoseconds pidfile pids plaintext pluggable plugin Plugin plugins Plugins png pollAtLaunch poller Poller pollers pollInterval pong postcommit postfix postgres Postgres postgresql pqm PQM pre Pre prebuilt preformatted prepend prepended prepending prev PrivateTemporaryDirectory procmail prois propertiesDict propKey proxying psycopg Psycopg pubDate Pushjet py pyasn pyflakes Pyflakes pyjade pylint pypi pypugjs pysqlite pythonic Pythonic pywin qcow qmail queried queuedir queueing Queueing Raml rc readonly realtime recents reconf reconfig reconfigService reconfigurability reconfigurable reconfigures reconfiguring reconnection refactored refactoring Refactoring refactorings refspec regex regexp regexps registerAdapter reimplement relayhost remappings renderable Renderable renderables Renderables rendereable Rendereable renderer Renderer repo Repo repos Repos repourl reStructuredText resultSpec revertive revlink revlinks rmdir rmfile rmtree Robocopy roleId rootlink routingkeys rpmlint rsa rss rtype runtests runtime runtimes sautils scalability scalable sched schedulerid schedulerName schedulerNames schemas scp sdist searchable secretId secretsmount sed selectable sendchange sendMessage serializable serviceid setDefaultWorkdir setuptools sha sherrif shouldn sighup signalled signalling sigtermTime skillsets SlaveBuilder slavename slavenames smoketest solaris Solaris sortable sourcedproperties sourcestamp sourceStamp sourcestampid sourcestamps spambots spdy splitter sqlalchemy sqlite Sqlite SQlite SQLite src ssdict sse sshd ssid startService startsWith startup stateful stateProvider statsfies stderr stdin stdlib stdout stepdict stepdicts stepid stopService storages strports subclassed subclasses subclassing subcommand subcommands subdir subdirectories subdirectory submenu submodule submodules subnet subqueries Subqueries subquery subshell substrings subunit successCb sucessful summarization superclass superproject superset suppressions svn SVN svnmailer svnurl symlinks symmetrics synchronisation synchronise Synchronise syntaxes systemd Systemd tac tarball tarballs Tarballs tcp templateCache templating testability testsuite textarea tgrid tgz tls th timestamp Todo tokenUri tooltip topbar topicmatch trac tracebacks tradeoffs travelled travis triallog triggerable Triggerable trustRoots tryclient tuple tuples Tuples twistd Twistd txgithub txt ubuntu Ubuntu ues ui uid uids umask un unabbreviated unclaim Unclaim unclaiming uncollectable uncomment unencrypted unescaped unhandled unices unicode unimportantSchedulerNames unittest unittests unix unmaintained unorthodoxy Unregister unsubscribe unsubscribes unsubscription untracked untrusted unversioned unpause unpaused unpausing UpCloud uploadDirectory uploader uploadFile uri url Url urls Urls usdict usdicts useColors usefule userdicts useRevisions userid userids userInfoProvider userlist username Username usernames useTls usr utf UTF util validator Validator validators Validators ValueError VC vcs VCS vda ve versa versioned versioning viewspec viewvc virt virtualenv Virtualenv virtualization Vixie vpc wamp Wamp wantSteps warner warningPattern wasn webapp webdav webhook webpack webserver websocket websvn wgetrc Whent whereclause whitelist whitelists whitespace Whitespace Whlie wich wiki wikis wil wildcard wildcards Wildcards wordlist workdir Workdir workdirs WorkerForBuilder workername workflow wouldn www xda Xen xf xvfb yaml Zope zsh zulip Zulip buildbot-3.4.0/master/docs/templates/000077500000000000000000000000001413250514000175365ustar00rootroot00000000000000buildbot-3.4.0/master/docs/templates/raml.jinja000066400000000000000000000034011413250514000215040ustar00rootroot00000000000000.. sphinx wants to have at least same number of underline chars than actual tile but has the title is generated, this is a bit more complicated. So we generate hundred of them {{type.get("displayName", name)}} {{"="*100}} .. bb:rtype:: {{name}} {% if 'properties' in type -%} {% for key, value in type.properties.items() -%} :attr {{value.type}} {{key}}: {{raml.reindent(value.description, 4*2)}} {% endfor %} {% if 'example' in type -%} ``example`` .. code-block:: javascript {{raml.format_json(type.example, indent=4*2)}} {% endif %} {% if 'examples' in type -%} ``examples`` {% for example in type.examples -%} .. code-block:: javascript {{raml.format_json(example, indent=4*2)}} {% endfor %} {% endif %} {{type.description}} {% endif %} {% if name in raml.endpoints_by_type -%}{# if type has endpoints #} Endpoints --------- {% for ep, config in raml.endpoints_by_type[name].items()|sort -%} .. bb:rpath:: {{ep}} {% for key, value in config.uriParameters.items() -%} :pathkey {{value.type}} {{key}}: {{raml.reindent(value.description, 4*2)}} {% endfor %} {{config.description}} {% if 'get' in config -%} {% set method_ep = config['get'] -%} ``GET`` {% if method_ep['eptype'] -%} ``returns`` :bb:rtype:`collection` of :bb:rtype:`{{method_ep['eptype']}}` {% endif %} {% endif %}{# if ep has get #} {% for method, action in raml.iter_actions(config) -%} .. bb:raction:: {{ep}} (method={{method}}) :body string method: must be ``{{ method }}`` {% for key, value in action['body'].items() -%} :body {{value.type}} {{key}}: {{raml.reindent(value.description, 4*2)}} {% endfor %} {% endfor %}{# endpoints #} {% endfor %}{# endpoints #} {% endif %}{# if type has endpoints #} buildbot-3.4.0/master/docs/tutorial/000077500000000000000000000000001413250514000174035ustar00rootroot00000000000000buildbot-3.4.0/master/docs/tutorial/_images/000077500000000000000000000000001413250514000210075ustar00rootroot00000000000000buildbot-3.4.0/master/docs/tutorial/_images/builders.png000066400000000000000000000717511413250514000233410ustar00rootroot00000000000000PNG  IHDRmwogAMA a cHRMz&u0`:pQ<bKGD oFFsr> vpAgp¶s8IDATxwx٤{Im鞴Be/{uy*WTDTd# {(eޥMw4mf'?^- m'9yGN^|s{^Z=Sg\X,FS$z8׳l!&xdBP(0 /dBD<3T[o P?$|L&>?P221!-rUx#Grrrbcc'Oo/cžիWTg}յҴ/-v]V$-\0 jFSTTTTT(!...޺5sss>rJsa˄K'}W[gowXcގ#Y` &Ɔ緜xwiDZ_1wA+Al6ťo&"vww.jiiD4mĈ+ܷo_cc#!N(JP( |||,Xf^s'Pzce3.ps,g 9ƞuyI=unۜ}q G{_YzlY?m;s5%JoY2?IO\Rq Mm"TWW*<w)Ur"FSSӹs;[o+11d۷7}3l)H{GB=\t˃LMnd*ꘓ'ŞO?9MR:wȩ+-|/.˼߻ULi)]zk~QrZZ=j(k.Z?=yz`uq}Ftu2rJ6xʗs'$Ej3!Ɉ F uଶr\4 ?{u\\Wv"^^9oclS 6BBH U]~x;wd^^^UUUUUUcǎ0a 7" m& ---4MuuK}Y*E<==󫪪zeB\VV&,--ETp8Tŋ P(,++}iӴqZWWw9'''KKK:Z~GHdmm/J/\@]8%%%{{{wwIHHxL$mZhӧO絎^^^?rB14ZfQ,;S1Lii^Zä#s3!F۴nULDci?8jjb4wxl:r:9L&sϧkx#=1u'wEu? xqQA?l?F4d62Nd0*!ȐUQ]>)J6K{znlng~גV&v}`ww1ak>|qWT+Rx ,;Tdɓ'R---۷ot钏6z ĉB0$$d֬YTRT+++;̙3ﶢ6jKbqss#qww/++fB#ק+V88]SS}gRᇪ|ƍs`D"ŋU:;;nzϞᆆꨆ `0zzzlvkk_x'|K/}'cƌx~iӦӽ}Wffffҍ2l֭.]jkk366 D"`WZ2Mݲ;So _hbo;[9m׭ֽώ46`|w){PkuJjo[ K5iO)tɍ90GY2 !]R咭1ֱJE盓G0M)8^oR0h4w+'"lB]QgI㣃>\G.P%?ptod{ LV k7 vn-x}76W4p9VkneXʻM g1~;IrkSG{ߟMg:5~@?xB,'233 ]ݭD(ۏ=Z[hggٌdBPX^^`0Lc0IIIeeeӦM۝b1;D F~RSKG322!QQQ@Hqww/,,7n!FBLMM2n8033;y-@vb(-<r5%|MSS9r|wߝ{rv 3=Upڤ7<=tC_]zLhgcad)U^]`kmo蠴k9&~>n[v':*yqA_p(f׎r,kNPB踸eFz|+.N|Fwo,_ĒyzSPH9|n5ZM6oGH$[&$xzzfª*:Fh---uuuJJ} JJ&&&^kQ0<[t1N_|ybb`ii)NJ`0y<&33˱L&iɒ%nG"A^LH-v[؀XkNX t"~uo5$X|l ،;g`2BkcVDy0}Z!ΠӞ+?3yDZ^nАk +۫!H^T/'FGh3 ػ~`SM /eR~G]=aBd}v!!ɖ ٬?{ᮽ A4 ^,U____EFPsNR]g%//k׮Q vvv N=<67m9!V-n8FL37bKُB=iHace>:bLB3Y:Ĉ܅vdd4Д e ak7鷂S-J0!>l@H55;CR߿x#TwT[+ Tӽ5g]]]ll,5T t:PT}wMAp~T컩UTT ЭV%;`mj\\܄ x۷st>Llll6o\\\|ڵ?ɓk׮הН%Ê|}/z!~pz_WMt3 aC ':?i{+Nz<Cc1Q߳3(ws[K@$x=Gq웕W*8#DTEEB7xiu(U*Wg;BρnzԞ_8Kl,mxǦpR,mFYYKMk@j1L*x|yx}c-|ܴEf,~O~ UhKf,~kV5 K^]="X*f#nnn4w{zz8=g x<toN]R(U*RSyFWEEu`o 6RzLSVVvu"## !N!iiirAmmm{{;QL7fees Rz*nܸA=~)uFJHH!D"=&:y`&̫B8c&ɁB {^€ˢDڒ]AtX&Ԗe/SgSz˹*eJ5>c@<걣Χϙ0aL_u6!gNX_?|;}ؐ'kynMtt}֢Hlnnu=YO4iRUUU{{֭['LDMXtSgg5" V*SWB].\iocFù|36!hy_)t$?c i5|NΝC2b:7`ҢM2Vby][)՛.7>v\JsS0!zQgBFªu->}r܇oH+^|s]{&X>`IfƂ2r?)ŷkt3 |s1ܑūo~峞]2[|6 {nftJOcLJX̦]Rv8[kz7mxb0۵kׅ  ryMMMOOM ҒR*++]]];ܼ~( zeBGGo޼ظNϚ5~;Ə_^^^UUW_9;;K$ڱcǦKҁc!~g_]]}ԩӧOjօ4-..nҤIÛ7PLmttҥK}ޗ_~o21cƬZjŊ^,^ognn>gkk뒒BA֭[e˖]v SSӀ7|b(7âۚXq10ߛSz++͸@ˑ_6 |71v jegZ.U:X{K#VJq#I{.ޔüTy0͠X?5C+g+Cۡ:qiׁliGx9r$''Xxʕ+TcٖAAAN3gDEE%''B;[[ڍ\vD(tkkAѣل+VvZHdddo^^^(Ɍ8PXX8gjg}&Jz-Ŗ .*JKKȨ/_=ohJJJD"N733]oRnnÇW\`' BAx p?/My.-3!dBLCM̢ʚb `8x"@!/dBL 2!B&_Ȅ @!<4 LP{ 0`0Ѓ4h4v!&xxЃ9|Y_Ȅ @!/dBŔ;Fh=~BL 2!B&_Ȅ @!/dBL 2!`Xh Fh U(h h4b01FYZZJRD"ɔJ%?'FNV%%yWP%a=bakcFcSy!v/n̦Hie-q׈h42VwE;'/{'ڊ<5`B4F !W\_/7RGd\|L-lR)7חI%"Bk:W]kTYrG܉~=i\.j4O'3XN#Z*]ja)lrM M,z;-u>\#3NIJui+wY}U[@11vbsxBsUZٻQ0jJ~k]G U3ͱu SVpMEQ*Nqw! ] 5Nl~ػ[yu6vnB&4[۹i+Z a{ZF֎U%]VJV);ͥWZtL"s8Fw{_vh*oIĝJ.Wv  j3Fu?-J$Wྋ htFHo45e0YKkGAձui4jML%N[Pտ]ecRG~k ܦQHW gjfMhtBFkoS*v8K`2!2ƹ"kw~K-!DVuv4[ڸBD]|\baHa L-lښk{d wIT v Z!V3d)^ F1Խi4̷ua1‰ҟ9F hL +GsKGבB˥mMUZk;WA֥/ڠR.56>EXlౄXۻ1YTiapCU,nB֨U 9ɦJdRQ[SgpYu+0u;;Z=4;2l];-5:~!MD]|A[klePSwkm# {DBBJ)'yVyWKͭm -vnͼ2Ѫ6N¶^Jl\`YPSlbfcdbNh&7R˨Tʎ'wSζtFZ{:5R!սڰ܀c|σliQk^\#3fs-jJzWըh4:^{֫"0B5eѦ[Aa}oAū,w9{yBqG{}[S-FlTVw Zlk>HZYwomR*;KȤ⾕]~'*h96>I\؈Fa!2!B&_Ȅ @!/dBL 2!B&_Ȅ&x4A&xDQL 2!B&_Ȅ @!!4 O{.==yyyb̙3gʔ)t:z8(+8RYYo͕+WƦ$ E boƍ7$ URTTW_ h"x q~Bرc ]RRŋ /_kvMk.>JII;a8'9 cǎ#>쳾pBffᄏaÆ/_~w+W>3hF… }ӧB::: !'N[ޣ KȄ8BOOOߴiT*]rkr!i&&&WW`CCǏ>|711ӓN;v,$1w]__Ge\\@(B"##YJǏndBآEé?O?M8q"!ٳԟ+WRCe&Lp׏3?5kZXX,Yׯo۶b+qz|MCqFZZڸq.=v؁ #""^~e+++햷n_~7n qqq!~9992^ 2IFsSN^^^xr!ٲe.\pǏB~嗽{RkkkLRTx<ޚ5k̖,YsSL_ 0''Q]]][[斟OB.\gϞ*xBV\`z…T֚5k9B~Ç755&&&}7s]ti׮]J㩧k a3Ic\0̔+yrP]|>GGedd<Ν.}o[o\ƍo5{&]?::-/[NRעիW7x#"""%%>B֬YVQQ޽{Μ9l6ܹs&+O?{'^(++'<< X,g2 턐.;m4B{( !Gx<ŋ.\XYY{磻_Goݺuܸq~>رcǑ#Gbbb}>~Tݸqc|wB /+z;<~BK W_ZqDJVNnIƬ^Arpsu{v]LMMJ̆+z嗵t ^xZeeeQQXfg͚]}vFORlJKKϜ9s͘jpBbb?Ν;۶muVVV{@@@XXXJJJvvFhԌp3gΜ={kVUUsNOOO\yBBɓ'[[[͛GJW>ti xɊ Bׄ 8ѣ5 b?`gg_V366w w UYYYRRB?skkdӉF|: CCÀ@BHNNΜ9shlBYj5s_~cǎ$NGGW_}E2vw|x۷B^yW9rﻑFcoo?vXKKѣG BGG?0v˥O12>zG*MJz鼡nI{F y ݾ}ZZ]]M믿k*111k4>55u޽ӦMjRlmm}:ui3 OsIeCGNknnr?=ϡ[===;uԄ BBBB\.֨viMƾTs=G P)FFFo_Y[Μ9sBPBJɤP1cz s2,,lϞ=.\((((***))9y-[F!_7޼ySVWUUB77%/TI?EEEm޼9??%44dfdd**QrnR.]sΤ$mkך6ёdFw#vJII-..>~'>쳄?Lho;1qNok\z0s75}[|ήSml;;NI6=blBR9r[{6y؆93'r !g$]Ƅ{z7ǙcB(r8P(ͅBa]]ݽY9:B,--BnWGMMT*~***R*uSN=~xfffkk󮨨2gOO;!D PV__Bv ohhX|9u"}iii999sYIHHx]vMm!uA E ]immmooħ~355|BЌ_/&&M"PJ}||>ڶm͛׬Ya o(NܼyX7vj  &̝;?W.]Jdr zh>|!׸m~\hUU :=q|Ͽ> yMM-?:)M3sLr_Ĭ_p8S/_:v)ӿcAOWP2wƯh o<:eؕ,ڽkh}]p";;;MMMcbbtzPPPVVڵkNjaawM]>>>׿&Lpr9ҥKO}l߾ٳFO?60 GGdžر޾3gfMcǎO?sMMMׯǿa|N~iii3f H$ NiIXXجY=J=nooQBBB̙s E}:Zj.]200O'A#""RSSKJJéM)LHVXnݺu566߿r{5B,Yrԩ'NL>ڰaڵk-ZT^^~M6]… ׯ 2e Nv!3a Sz'`~ij/x *ʧy{zdd ґ>F$!.jGϾr?_2ÆH_ҍ &[ac)&uuvrMa23fXt%!䭷Zzuqq{˗/777d7G}駷o˖-|7>m۶m߾ 22_u_͛wm￿nݺw}~;vtɆ_}ƍSSSϝ;7bĈW_}n3°9'{=KKׯ9sxyyzוJɓ' !mP~ZZZյlٲӧOP}t+QQQT* BTjaaA͉@;w޽{mF/_>*l6^[j7|h"Juرol֬Y?]qJĉׯ'899kg[sa3Y^^[8/",wp96.3sǚ?{_oz>pB<ݗ-m|AǿgFV4Fzoe^=X,cW_Z1@sWnU(r.lUV8'JY,̙3M<<<8Z3gN[[536C'յ%lD[I$R[TORF1 &-g0Ƙbjͅv6wFlllll,!D$Ny睢c䴵5@esd2.wWRrT6-(U*RW|fﶌޘNC4WeBlu3! {DW޴iӭ[lllΝbŊ?0Der%޿ٱ6PdieA)XYZDb00++@vT͞bokccmU4mrBZC$J CfR#>u)Q;:Ì-/qnͯTJayk=W|S*"]RZ$S}ZJs|kAAHH)ia0@%Uޭ\k{ N_B҂-5Ђ|'k%,00"ͦ7 ~VJK7罖1adzw^ouͤ9]8ŠMjמj0Fkiâ'l;WԾiY@u1ݾ?mqM&C`b[0r.l|"%F4Poޢp0l#q~4ƎJ}/YLCƖgh`zB FШё!v-73r K ̓ 'b!ZhBaȄ0p81 *#+w~c㣓d\#`-ŝl]sk m#m6=dBbfMOrt=qBqIF!\Mϼv#k[[]{ز=q\|E^Y'kWbjȖ c{z 37>&{Z.~#S7/صHYy?|++JO&G']GagWsfNvtAQp&fiVRZR4HkKsR&mmG,U?LhcmStZFf~SK+Fwr  5rF8B-mKN^CӮ-g~ EJ@{quqtuqWz`2م!h{(F=1?YT}Ӿ;7.*))۳KFG#0tL_;UVʄ~lvVN!!$opejj|LjVvR 6/%zyX8|J)|pݗ 9tiFF%+&rVQqQ~Ƙ;Ξ˾s -Tk!^Y;$%&::ofeZYY3w!ۼ#SէS0`gbbܫBRq8.Ά\g^} `0J&psV5osY6BT*ψ= Loh?«FI vYẌ́&#]p颹& |=gLRs;nNT&ps ݜUl-vl£Ġc"lJoWIR{;R[cbBΪgZ~VvaqZ P2D"э,AkJe2BZرPGG'!\[!$_&?V|r:%-w_*:$,dԩ3J%!$8OQ9'+ !7]EWVչ=]2Ǐ5519)ʚz+keK}bBstTH_OɹeU>ZUSG14F'MHHJLؽHSK+nJ&TՄF-Y8/{/BD*bgcv46Py_Y91~# K !%+ndOՖT֍ r9U!A~LCV:_>rKAaKm]{Ԥk!!ĸD=b `%vvur9\.G"R%R^O$:M==$Ri[`L& R''{[38J>}iKh4!ȨNg8;9 BHss[]]SAasvn!NN#xp.ηF̙%e qQ&&yE֩5͚nnf5O̘6ZZYU`X]WOH$m谊%-r[['bƍttр`> ;qwTT訑aA :[TU;tvjHϤ\<1~:nuv;4aܙ\Nw<5jDgi4ZS)Ijƒϧ^Y/(UFXQ%VV %.*.i QFRA6&6J*)BC~h|GX՚ioR++qT2lw\z#! 0Dh K n{]=z9ܢAFSU]w&[Ȅ @!/dBL 2!B&_Ȅ @!/dBL 2!B&_L4]XXѡT*{-b0h(xOj4>"##Y,VVV۷#""***K+((x}}>lѣG9rK/}Gn…~-N6C?!#" Y,ZN;;;gdd(J&sUUU۷o...s̙;w>4+bp" LFwP(E]BpŊׯwppIMM]fZ?o@oo?wjFh40vQ()PZZ*^|Eoooccc[[E}':+W6m?LT;wΟ??>>~Μ97n3KչzjDDvK.JRvt|G'NLLL\fJ^jU||ԩS+2̈NKKKDDDff&;3f:\tҸiӦTK/f͚~;66_7n\||SO=uUW  ///O? BmabbbDD_{Ϝ9_ss3!dǎ?;3zӧOY2zh*nBn޼啑A=uVxx8=mG׆ 222~SNy{{9sn/gϞ=SN=};So߾}=@Zd?~|޽ Tyyyodɒ;vPZbFuq[[wyϟ5k[oۼ2!amm_N4!<{lOOn'|'P555}͙3'<qQΘBrmaHHHffyHH ̴tssӍvoG[BͣkYs 1Fc} ШuhwGђ?~Wׄ bcc322n޼~[n۶ <" cYdx:|{msԨQBp-Z[[WVVjK8եF]RxHB5˭[zus;ssszmKƵRx<!Qrh;;;BvwBzVRRP()8Θ1cVZt?O; %O̶0d}ZSRVy텘444S(\.wLhccgٳgI$[&%% |ŋo߾=...00,b\\lŋ !֭377k;:pᄄGG#G466zzz{T/_NLL ξvڻKqvvf2W\y'b={x]666[lK.=%Klٲeɒ%JrÆ ׯ_?|pjll3g;3e&R\\\2!n[9_}ӉөA3'B*k=ZVS%n58!^N4 CɄmήnm&p6+!h_QUKi o|vّ#BJ !.ѳ6֖~ZE)(]ZV& MD"W eX[8PO],s {UcQa2rҲK&FԬn.6w7gBeyU-NGxֽ(**kFxBlLMj!5iWn0jBJhiaљAm:9edkE"SaqUR ~wS^Q- 5BEb;TŬ-Utceu-pvvpwᙘz9:^#N͞D9}Z2aUMxhXNEX.bqOcs+!$8ojҸ)i ρ2L&3*2$2u@|H]l!d^ƆISssmm7L&OKqssܳtX,oOBEc:(P2aeUJvuutwu3KumFwvtpsudO}CSlꩱaTT 2/5)23] ,;hL\Zr9yQ"BNN'6qsurt#45 ؖ&Fn.p.2T*kllvuvtvt;dRo.4x!&K+v@BgnEVkjuZr LT*#TVFG{z5T*kmyXUb{Q|BB;:j5U(EV>_Ё.!&8X"UT=5fARbH3fVVNaߗ_4qB\P+7j&/2oTQ2T,/}5J54s3-?VrL?_/B5˨}&Dq#<\ wɄ B}+#-0x%;k4}f,_5n۾_&WD-Z0c_*jUjNO2ٴ?K˝͚z•,&+vt}YO<.4dԩ3U5<+KYɓ<1[Ǵ.knnrxJ[sƴI*Q:$(@7W퉚8>.6&KeֳO22>vvfDP(i4P$"x446#ޓyﳖ  !Vfh.POhjb$ʤRu"Ãtç[/GDŽi+knmh4e=!ΆFefֶc'{XVq ᡣ22s ʪڔ+.N;plZyEPؙ~3|L\dGS`mi@=uqvөf2 ne e҃M*j #!ކRPTLTQYS} 0YDCɄ*F \ɾ]У3NUX*r 9D"]laTDR7*U*;{[&Y[W!v;"beUڊg򊚮nQH`48ȿ[$.UފbVUlGӜE|7W'BK-ͅbddhcm Pի#*2$(`d>5``ehw104z86װIDjak*JݥTtض7.&"!>zĶv~+˫ lBDԀMyYbldw Z/ >u@7` ! &*766$TTո9B<]jjy<^D*7zRbv2L6+XVQ/%VVՌO?FFNEjZd2Y9.#p)8v!&!>z™~S&B:7SZ=*]}eT5\.';oDF9q:ר[. !յ,tLTU]hp ޓD"+( Ԗޮ)ps VTն\塌5Ǐmcc[>gʤ^C%.uuvoG۝[ێHav6-mJK!^lhh"p8vO"{zT*UnE]S[w֦+LMvv)Jrʚz:ijn%5:yTTsBSsss[+kή)h(Oˑ3O/Y<^r嗛_G΋7{ WUjuTD{}ql*`0BF)fLVS[occ4aLeUMSSյuYӓNImoX[YL2m/')jͩP(odFtuky\1vv6mS( 8::H L+jp T&߱E;:BG.v|xgg7 ~=L(nڲ;6:ogTxRjmm?xTamBgg֝{ ܺkں7{-:133Q*--mt !g]ƚ=%{ٵP҄1sgNr9⢒Kcaqݭ$RY|\ԌiRuϊ_MJO=m07kk#s˶}a6Vveȑb1E˹O0XL##QD.Uǐsa#hr .} 2!B&_Ȅ @!/dBL 2!B&_Ȅ @!/dBL 2!B&_Ȅ @!/dBL 2!B&_Ȅ0mbI3OB MÀd2+j ~BbM'ƒ%<ܜ{+,*335fV ͞G=VbՌƦkme !^YYTR~:%o?Q~>Ɔ=I~aI[yMMM.]UQY#W>hjҸʪ6> ``q쨸gnffS6~S(xB]KfqQl=bkg#}:P(|k/\ށv;jei|\C.wj*.'Sl=_]+]f1%icS\ak;AVrm:^ZFӃ=@D2ȵT*!DRQO 9RRO"COORTcGu06Ԥq΢1jū xw:;MMʫB055~֖!אSZV6`:s1&*TTA 2jO-Nitacc9-i\@X\R`h??sw-icmwwpq9:2 f--m*HV760 4`<`&|@HqtU*" 8E.`DG7';9ٻ9S[ێHav6xw򊚴7nU~uT*#444o޶˖̛>-1#3o#>?_@ӃurtL d0F5> :fffE=~>.4$"&T`R*Űz:թo=gGG3ƊcvtHUR pO n;_ 3R%)_qș܂bū'JBs rajHϤ\<1*rǘ(ʖ]{PS{=?{FһoD),.;p$ `0h96>IcG ۻhı{s.lD0D0pR[9t2 0HȄ0LܙHFpOt4B&_Ȅ @!/dBL 2!B&_Ȅ @!/dBL 2!B&_Ȅ @!/dBL 2!B&_Ȅ @!/yX毾BT*uu o_V}Gʫ͞jgc]}5=52Oi&\M,/" ]'mldxLk{z:چw``u&յef~̄2<#+aNW8i`x3a/*JѨȐS&ӯ%z윂sWuǎnHNtWE%孭K}GMV*UU5gR.uu!L322,]1q\ܕ뷮\ aiiVkZ]Ќ3 |F\.'8o93\<|0>6R}Z~͖݇/`0!*gĞ J͚_Xiwu/fq&PXΚ>iIcZsڭ۔{ĹԫeՄknN҄6S}~ejbCoO/ !NpWU8`(z3aڕ+ !L&".6qC*}g0;k+ܪ-}GPt/AlkDbW2BHCBBHu O"X0374!΄]M-:^CMm+x&<,VFnm`@-( Ɍ .e0FFcT-tض7.&"!>zĶv~+"P1 : !h4LNr9CC.@R)ʼ7uWs{[E g~ζv>N&r9f~ƒbvw!Rb1YYZجm !Nvw&K+v@F}dL=nnm;r"A;L'ps& K ==BHCSF |є_ǂ[$ iim}MKg,]4g҄1&4?vt[4ڎgSy&2JT7^0$=֙0~tDBJ쮮]%"'ϤƏ% NdfjJ3ڡ#g&O;{J*.)zmi R^QČUTMm;v7B\E'1ff&Jm#!$h96>I\؈Fa`(]O.dBL 2!B&_Ȅ @!/dBL 2!B&_Ȅ @!/dBL 2!B&_Ȅ:-IENDB`buildbot-3.4.0/master/docs/tutorial/_images/force-build.png000066400000000000000000001367171413250514000237270ustar00rootroot00000000000000PNG  IHDR= iCCPICC ProfileHTSiǿ%zAz 7Q Pb !#02CX  2(`ATTK={s?`)yPo7ztL,'@"P4&+XXj$kH3X@'3XBd6?˻ "|x}g&<ax2)H4,V2҇LFؔ^d#ճ܀^?I[qO&3Y2xn?|;ӄ {h I|ByPCj?1˞c'bY fz-05uk'2ĜI\/gs£8a~5b] Ϝ$czl,^p=<:/B\t/Ο-3f"0}<9f.svXn2'D:2^J775`Ϳw$_HDl\V {B>2m.QqtP5g0ȉ2ȩV@!YL @8+ p@: l{A@8N6p\WMG@F+0>ipA  B-yBP(CB96**:]ChBaLe`X6maWW8΃wp-|n/7{~OCP(wT*6 PeZT3ՍQX4 MG>4  ]@7[їwC 7 1chL2f-&SÜ\Ì`>`X9.냍`7`-Nl?v;p8G\.nFp$2ym(~ E&lzB10p0B&Jupb q+LB|L|G"4HvTN:NF"}"SdwrYHI'wQ( %IIi\<|IK0$%*%Z%nK$HjKJ̖,<)yKr\ #.Ŕ$U)uFjPjR&m&$.]$}D *CyCKaIshhiWh#2X]LL1^ Yl:Js"9C.MX܀%*K\$.ٱy%SJ.-?+<Rv+)?> Ș{ُW_ x l A=AOu  yjF [v$C[xqaDWdd\dcTGTI($zcnL{,.62.vrGVXJŕi+ϭ\\u2$ 3Y˜L`$T%LYX.RXcbIhcRIҋd=cgNgέIINJ JOIJkIǧǧQy˫UW[7Ek]3!e@+23e#~'rʪ6ruxz߱~4+ ]99[s6nmJԵYss-[n54$my*y[)_"_?a{رǷvB²/E?PΤwawv vP"]]2'`Ok)U{YU#ڿk NŽJʖ*USnt9\R]XGkkZkujaez~8pO?5)}ՋB.746Q>R7 Ǝ;q٨E88.<N:i{ӴP։6N=ӿRVl9s\Ⱦ0|qkUףKї^{ʵ^W/uv_xugnhi}ǪVmesϮYη/s.{" xìӏ<<.x"kY y < {h5߿= 829 539 mnM@IDATx `ՙMrs7 I" {PתةKqʌiљq:J;Όw馶Q\@\A K$dߓ{s9]%|ܼ{|<9a.t$@c-2fŠ @0#A$@$@$@$@$@JJOE$@$@$@$@$LHHH,"v"i)X%ξznY#- %" ETxF.^C$0" M<ֹ!&i97e݅]-(@mX̞بDg@< $    RxU UJ5jl7k:PZׁo^9mg5LIHHHH`@£ N_E!u)uȍZTK 9JXB0JU273 YNSާCY|N5OJOoj<&    Sʐ &@7 )cc1uJX8kpK&R"UBï7\(.C|f.&b$E;CGxdH&NAzE8x '  AJ|:˘:^UwΒY!Qs 5?a0<ǍJbi9HIpX% c(=h;jD΍+g Ǫ;GZ(=TQs&&$ 7A+QqR`U[vCDtA ޝ+JLTzDIH *= 1$L݁xX5í[?ZGz4aeĊ+rߌ㓦Z+p$}&ZR2$ބmo{nKq[^X~|ͺckW?|7†QҸە>g.Ixz,ɩb=tqaeIfa43; =̈~[eN~ n׍&ha8S|X⑚l-Lw4͓Ñ(ISSfUn(- lQ[!C:.V"5e$$>D19B͵n_jUf6&'=Q%t2ԌlOt͵Up:Q:j*P":5 )޲7עNU*<Y6 $sqYXQcԀZ . LLߗQw D"sȒσ& ۯ-vuqq wPS$>U)!%EdTlriN+w^n(*XݥҸ;$ NieB@R Q"_:@)=8c+V>8 вD52-V|^f͈Ů/[1{l֯Oo"*JU4`/ۓX;7bR-:|_$~~9-yJPv` |H׮+)+Ho=DGꛍ?8_b[qZ35G>5nS*E%¯֯hl#61^d_ |bP;/,iy8?~UA@Tu$Dp̓dR_J. )O^fyf|U< X]|ſpb4Ѣg“avvTP525ƈanCzˑٍ}P#m4k*f9='qݍcZECuDGL. c!k_:#:oQ1m]M%5w,$Eq;OZE 1i3S(̇j\~qnQhJث/B֍#9BXvFŁr֗bwjQq(sd瀣ՖhZa(=86T';9]d2TWPD,x]JzڎrS0'տ;#{| JάTȘor N|R"ر\蔏Ԉ \r((=aXGPwQlue8GP.@HE{ok1q'xJO?2ߣt%2"]+( JUsI!NLMCtg&{Wwqv(( xIDr3L,֪( ?x\d| F$b㢴TV6II7⼊\TT rOB^V߱)жM&D ̐,)QR%7)Ͽ=ʓsq/ڲx-SnCX4m r;PWquƃ"w/bί&aSĈ 'WtZlSx?39muӅrfg#5V&ߋI;jd.R4(_?]%!d*YrFPQ;>ߏENGnAMҢ0XHrju;ӹ2/^o(U֔$"77Gw6Tڑ,vV TܬLtV׋M:wTz cIazا*3'[BN4נӷPi3.TTYy*ʔ9s{++)<  9rMع .[8 (2.%pO[@m΀ )hon+'ir;\e\/o6X,?ف DfDD36Vj2f|vԁ*Bbr-RMY3ո!rJ,zbu^j8]\8yt-o9.ͪ6>&Ib[,dXUnd5Əwv"{r Ots2:*WL^ C|uIl9:q[ [,L.vŬ^i:aHI 1 i0Ei:.w&v]MX83Fzڅ{hqBisrӤ㯳oWAA^1:\Ҿ8|[=:DOe'V:y6TGAuASz_xEY e@I{_cM&\ !S+-S>ZK؃;{ڿYd($:b_ʷa=x!O\Cڏe:͑ZMӿZ 0r<7u.WgGLǑ^u>mɻa_u2OJʐ'ۼ߳٪lY?g/1]L[D_I#J{pя/oU $XٱueX`w,C"q_+`*xGcRIG!knD'ZڗOk 93jG7@Vdf^T$_:{L;S}Kg{%%r3aϙ U{EY˒rfZs=1sE&ə{>Mۭ2% 8\%gŔb vVeU(&~mlL-ZUӑcb㬺Fn5YHUJbw5UkJ%'1"ʴ5z&x3n.j4L̖ƃؼRsY==:jz+Sə_~$ܰp9_RyI7L(JVQx8 &Y|Y\U f"E+l+7^qnGِ_y{ ߞ>W x$[S1px0y F72倜?'kH((.ףG\qmӭhmNq8&Wc?2ީQXQ`M Yo=>(YaKN݅?$ @l{#Tj"H(>:؄se'Eøbu=OrFL˅w㰁ty7t!=+$*j9g H鈨rbF6͒q GٯlVl-CEئ$aȑU=(oBۡp\$׸tm4+N|f֟P1. l|/QtNr/v:];c&Ƚ(?Mn .>/Hq؄["gD=n,X;I:z])XQ *$ɧ,2R˃,.hJޤ}? *V}1OlDs˃-=DV۱Wb#_=_ޖ5E#s K{u6( qk7=SU]_ydۇYĜ .2i RAO>}z IwT-o|^>۹fL^K+PJz|#ߊR-dϱW,R?-EӅTf3 nD_[>%ߚcj)z#&I4L?Nj0d[;4E~{[O]/^vClq %vXiH8;urwzө{T3yc7'j;%\%y(H T"b3DŽZ W|̲4iZ =s:o)vk78!Y%k| ī||ΛF7ݞTVtdL4-yQ|[Sf<=Nz`zvm¤$Tj\,;9V5ъ- MCa*\Mf1NF+_).Aj4f\II'Z}1敖r(x!KO)>墘D*L;6;2JkEt8α[w b%4\pb`JrIzV6ɯ?9>L1⚌=q={Œ|֌crU['JdxT\\$䨓l_9)"ût9[q.(,HFRl46kS;&f5Ob,L@DS :cPR2pz@o4v;_,yakQ iWZ5#|{}LJ<\-_wDDJCՀOv>u_oN@ҳҊ>4ت ']cZF{?Rn{5f]WzAH΢|ݪK?n`yeP{;Z;/|`xbc9};q][*td;*;Nff%WF2nl~^cd.Y+^.+ fOuTQ:k0\IVqņWG/Qt(@jL*.Z_L%Gq,= !<-阷fEGg+^SX[}]qtF~jϹ Sa t͛}Q{eE,,Bv4IO7F"'NB<~}u G^-CԨ hEQ=f[;Ң#!U; l63woEqIe=J ץO}ӪEע q, xʛ=%ISIM^o)oFgSC!M)Z &TKz mӲȨg2bQ`#?/#M8q2)'`BbN$Dz{O>Z㩸Br{ 0SNfVjq(.jKz![-Sx8I :c3 3ĤJ'Qgxd WspSK=&q=hPt.TWtc^ؾ %qXz*2 䭬Dҙ#= &hJOk! ~Xx@Dś&"xv6(XJQG^NYG+Bwx@܁H8ω w0X"+tnDG iK)Kbj9! s sɁo_e%n$_װ{dii[{eȼgzsxZ$!tZ:rKZFH9B݁5KdqwUr._Rq,2U7b??,#W|'ab3ο‚y3z_*v?.-NC#qFGw`/¯1#ahd'Mpcg_ui\ KCAZfٞ,NL ^I[|<"d_Ͼ0WW.e?[^]$O1cM穱`breMYYSx~ ni+z^jΉ\/غNVɓUR_qvC(ʆ@bs.S9]YaƢKP{(k*=ʹj^Kɛ !r$,Od:pQ˷7jl(wPe;f\Ru{&K#?OuUy=E P齥9Ji":dX$IR˓pKjhlgnZ|AKĕ'䗉w׍6n˦56›7xnh S񫯯^8a_euj.pJ e㸼wwU=LL e W ݜu4@` iːŐ6ƽg\2Mݖ>5+2$NnRyy~cI7do!jǩG4AMQ/:*gFXe}/(g< pJq)%=`ᒗƻ[W9-ˇI27[~+=j_ڸ|qѐD͛$W9DnJ*)1O6#53t>ݢ(N W,i)v2\9"S'xpߜ8('XEv|R'Yo6e4d-Wu\&]\܅j8yM99w׿dvWR9B[L̻݇w`q4U%07/Q=]~x\27n%\UL'r\vۊѢ?tBta;-0SRL\2Vp-߰?֍Vxq{bQ<,G}㐼KjEWN ['^FT"AJτ/+Lg&e["։&ޡfܔ/S{;Gr-R#M] @ݫ"=^?U'PN5Mg1qQHi9,OʿG;QJ sL>8mz*X]+ak ^xyFӗ3塢CD78ߊ~ xy2XyErvK=( ߟq,xwQaݭd515yrc<wX kn^m'^Z)6Xϻ O- +`h#7`Amy_@q܄rl~v'H}I21O{inEElw$UJfәySFR; nڕ8q=NrܐcԭZ׭[z]UJ>دeًcknM?Ul\t+ dž >-({=s28~}=s<4}ЯfmBj;yRK7P+=Vo:$##Eca%:itÿze8 *S?T:|(WF(/uu%1L5(U:-luGn8bQTĒh)GY;@6IO=UٞYo^JʻL{CXuaZy0_qFPOX_۵+lOxɯQeez!m2׫MMR5`DY)L .6݆}a*a9XPSU(TmPń˷`*DLܕf0qJ ՈOPp {.YMmmpFLORUFt mMFq2YE24*- -j-Ԋ!$0+5lN)73 _yW,M򚬫ViĦbGsM8qzjd2o|7}W.}~$YFvj|KT="d]#b)ɭU]^!0.. *O Q]Ѩ'RjDOԬ# dMwŊOe|o$Mg{dD6^Bt"?i/w??@0 tȪSN{tܼ70{ɱ]ӊV!Vs ˵)lԩXIꤵb !%q5_ q2$\,Da2MD T|B4DOH ʎ_yJQgwL-]͆˄$*<~K$@$@$@$@H@}BU^o̜:QǑjQGgiUzVUK]a05&2&ۯfՌHHHHH(ՖW{nV&37"2(@9zSWNGcZyc(?JeS)iѓIHHHHFF@Sc3*j2R~GwiSzlZyRs oȇQ%&!    MVt'+ K2tahy:Xy`u, ˭]@YA#    8{ 6jqJ[_wԇ|#?p:UFÝVKJSUN:*KQj,y 1رqzt^l#fQ  hJ 3f"a4i2o     #0bi6^Yuƒ1_܅/oDAŪ+ēV!Z็qΣMZH/yܵlRTn l:$@$@$@$@$@$6k3v;vu咭![^>` ¯_}b>ۗ/ 5<aX_ƥwg{%<|aGQ/i|w;>~췛q4CO    1Co~#::QQQgZGXX cғチ>uQx㍥Hy>k Lgyn}k6!9x'*ȫknw_6a(M ϰgؓw?JO$@$@$@$@$t#Sz¦`Ƣ0#or` 1we ݀鹓<9ȟ{ &gdŒ eO QBZ^Hj b'%d kYz֒UxiL#d+(&z.\udm/<i HHҕOa,P`˂͎uᷫoBvBr7߀ pC$@$@$@$@$#^@iM31 ':܆TRR&d谨Ɉ>rbhlGƸ,DTUȠ7YS$i(=ވy,95dÿ<&m2<$ Yxr;:#v ޛ=UX7;$@$@$@$@$@#'0bG- PS8}&c|J!jr6ƥ& -ȘHQtB R0ҍ_fwvR:1S"Odد'@S <dCG1xV- (n= c8VHtn˰dep6GbC}gKC(@PU#삫dG,0Ğʇ+Դd!&V BVh,E(^!Bw]by7;(ͦ`%^zy)|v%+l:     Q#kpʑ,:pbs.D(-KO<2&]\>+,$K(< WIWyKusq}q'     G!Sx'@턫;k>۳Aؤ0TY _e,4Iq ,98!jO53˄yxlBcU⵸ҵfw,N/"%#c&MB@>Ӗ>>waom1zM5svӋڃe2|m卦ȧܸF/,~Yl^}h XzL{pz< 2Eq(dE [ʽ'm-OmZ"P|Mdq$8@IDATh8},d쫰q~L[ ܉:T%]TMؾ&/~T6~YnE]}TW+ [[,o d H!Jl.AY:AI|M)j5tȣy Q%3R?iBvv? &xiR0'    %02KO!r8Mߎ-=6vPVhGPKz! @-=)IHHHHH )s$    !TzBdP     |̑HHHHH P AQHHHHHOJO2G     "@'NE!    >*=gIHHHHB:HHHHH )s$    !TzBdP     |̑HHHHH P AQHHHHHOJO2G     "@'NE!    >*=gIHHHHB:HHHHH )s$    !TzBdP     |̑HHHHH P AQHHHHHOJO2G     "@'NE!    >*=gIHHHHB@𕞈 ?w֧ٸ{cՃ O=HHHHHH ,A29ѧ4:J+=WYYy &     _@3i  A$@$@$@$@$@$pjWzN]f@؉a%A':Tn     .Sz,OC2"#?7J4<½7GwG;j*PlG|\ Mn}HHHHHғA-?\.'z\ z>,B;$́1++@F #'܊Z)s#YT[vWy+-Fm;$@$@$@$@$@$0t5}_N3IƅEPbX@     4[twϷE:0 s&-r_C%     !=K>~HHHH5ĭ Kg3+A]0j>  QͪfP*=>#    * P $@g TVHHHHHGJHHHHH *=cJ$@$@$@$@$@>Tz|,G$@$@$@$@$0 P'U"    c=     1HJ<  A1X'VB@]]2rQ   !jN!u (ÈEFFHHHZv O'Dd FfJJږj: x}` {z"`oE- c*<aaacV @bbbZ=#;;{ ;M|&Qz1}Pz¨E,ȮIq~ v)9HtQ 5mscIJ"$@$@$p HLLANSp[b{7Θ…J1\.$2wH`Psx@gQփHHG@Uyt G;{yIH|Lɴ8*c3=>6Onf5Z q x(/+:8 pHY  Cm҃iYTrOAlwoŮjY*:q:9JhDig:|c6holFmg(Bt$@$@$@$@$@#'|QZn%R:Q 7~Kre^]Ȋ~%(.901     _n-:{fMUlvU2)iT*:w[q՗㒋0!     Ztd.1tVTqkQX0%V*wc՛B~Fؚ-P F#࿟9 neѮ )(#6‰@n{^+_ـ5EW1/k^ǵg ʂ$@$@$@$@$p5g8]49Ns)M>T˟oYn2\,g#y(R n=p՘B^ Sap>U 1x*GWvXWln2gLJ=uV,q _y[,m}r%J/G@VB,{\%-x|+λ /~$ W>ʭӏ>#5 VaK7W{YhױvcY8R k/MSζ1]λ/Bt'8dQ7>[9Sb1s.N W]'<,ڄR{%^(<=K/<3(O |FK$@$@$@$p.sV\GoYUVRs0=%OÒlC6A]EQh-݂M[Jz4 7.bё wZ.=ߊkƲPA[dv?^< Qo7/B,XP>y|^|ZaUxz;%u׆n^rT)-1yPe-<#b*=HHH=s֟(Ypʀcܛ +6£bN K}K~c?,ǃ>-򖖖"///$v jJHH&Ԑ6eQm TzB|@T1 %&  ,ݎϧ3Xp/J\_agb7Cμr9P<|f!0p;"[9lk"u"}؍ѭN w}túF$[]m <ͺA-rszcԚ%!80H B{fFH33sę{ѝӧ?j.4_Nw'꾹i_GW4iR.o6#ྀeee_)5  @Jƌq)mGzL__43M3攵Nݤ>Wu YTxL0n>|/d=mԩNϨQI|.CUh6 _ ;;[Ǐɓ'EIJ豛x%m~]* Me@ ;'jr?FWu! >AU@5u#  ׉@@R)@I>u#  ׉@@R)@I>u#Ї@5׫@srR~Ml:o|lo~m}ݰ\f  ~H'oii-dlS{@6mҫ[lYæ ]]?ڰIoزk/vo:f  @& wz2#0oTw*:0Wk5#2n:3k+w4ƚn[i   ]}nmгkj5e2٭ާmF{_kRT3dV̂hݹSM>_*Ң2@!m٫ƖS)Ek٪w^ϼ[wjoSKt~IV,_Bh۳[+QE,W'[ʔZJ"^_̗?7~H#z:3of=X_}LwW'|Oyn=mP'=N?.}Gg=>XN0SGv?P3p|`r`6=Ncf wK_}F?hY$|uF} ''˿y}j_҃_^W_ݮ=# W4vٔŀ $YГda]\$fim/mLk;9X_M}df4wѥUf54j wDfU/^+g|ťiTs=EjfIK?_̏b˯SuA=!jj4QKek`_G7LL&\TzI 'M'/.2A?v-3׾ :x[oHz,bRqٟ)/~E4cgh9}/Pkќ<5yUۿL55l̅c)`Wu?ОM_KuN8޼pAyΤv[cl!ͬhTnv,V*ˈS,wݱhZUP <|OO.~\Ou-S"&D:Q9g\勿,\ NWUzo~j_}O# ?V|\={(s$oOWWL;v杄ԫ^  !@ jvR : F^UU^|-, ic:`ρ r2yu\=*RU5ӱ;ܜ.7g[h-hKm)L/]o~z2c=Jd{V> ;Qt$1A_z,?y_e9폩ekw0Q;i'g3k7]8 o ȣڵCzĘ:5'<窧枩cmGd;#&1  npzý* !ӛ *dV+*L؈!ikkyxB-t(D{ӡWw9-[Ǻ}Z/޷-6ӣq݋pFyM0\bQu*G75IB0U~'smg +ZBrlf[fպ4y}wGn3yg :LD@nK+'d IQ|mۦmZYY* 8WVx3 :zh֯YfCZbóͩm}ݧ]yLojS̝ X3:|fN8~s+?{67^5FGTxMoYO?z=ϹA_j~4C} vo|WM63b|,N\]KQrj]N   :.\_Z_:Ponv`+)Ԟ!^7=Thn`T/bG峋T6{T\"AU54I ,_U.sjHʪU݆W͍g|Di0}qC38}Ks76uUz;Bmxu|oP|f}rQb16B 5z;M2g=^fzx'my%L&Cl]7<2=:ܼ`jȞ磧6IAF@(@ %$v־܁^Ӓ;Ti[:l k"έ[R6zm붚*V&ԘkvDlf:Miʷw2p3=y=ڊ53*SWv܆h-|Xk6>|hSBϹnE=z[_2aY~'ڨ_K}S=j&^&'q^6Y ,ҫ58=Eyzj_-}fL_zt:1  zsOu%k}R::;;y^5l9oV멧4 >}zJϔJ悞i'?߄ރ3Q9=˅fǬoC|S~& MMM5krrBD΍ ̍7Es.F\-.O+.Z)\T2bn`lc₌# a:vJJJR'Nkx5*=fپ}^|EMP  G3|%[  }z@a   BٗlI {_|9ZEs@@`(gY!==h08qΟ?? MB@ \pACzzc?Њa(PPP6ҥKp $@VP($Y!=rhFI Ewuu< h3sA`x x<M>]gΜщ'8mxf @^6c? !J#ۺv4ʓG뎻Tܞ7=h. ؃]aapF@G`P W~t"_Ӯ='c<{s&e闑   A]/ogKf y{-~Td{&!voL` @@ AowYϸ{sz[@fZt O|;龑M]`a~oh[Z̽ :v^;   - x G"?PGG>m7?v+vo"r4qLS^sY!  TOς@<]E-andHSh;%=Zՙްkv   ЧBϡ_h=<Ѻ1I۵%Ƞze>}hlnO?򏝠{?K#ʕ+k@@U^^Ə>'L45!?Ԙ1c}L  D">Нw)6cϤI<4c1@@_ϪcǎuNӿ&BQWWsJ۸q4   :{ŋe?2D=M0zNi  T~ffzzvǂ1@@aS$@@ X0  P3 w*  = @@@` zNe@@@Gc   CB0ܩl  zz,C@@a(@;MB@@BOc   0 r6I&Dt;wN/_ΰ\Fѣ5qDL!jCL$YcAT'Isk Ǝ;S^ b-Rp8/:^ ]@H@ B[@c xQVVV $pM`̘17nN>VH-Hlw p|L=HI2bZeOi:u3jφ l?^'O'cM)8TObI5h6U3oa5Ni+7p8H\\55f6EĞCNƂ]SU9֢؛z9L[%Á7yK8S~xgh O  xNNg5Սxs4_Ş{[2H+*K%Z]UryO g,aںtʟXbz7wtfW8SMN@z89cayMM)ŕű{nq=H/}fPȼ;CP"BZ Pp%TqkiHآݯA\XʹǷڬk)delQ~Q̡M9bѸåwnims\Ϳ4D{ӡWwKP:u72'~#+#(mՊ _3f0˞E<ёFnkwF'7vg[Цm۴VVVSiE::vU9Q,Њ}$sS9may5ڄ{3Fm?Q1_ʼ;f)FЬYCcBLE :::t1DHF[?ẕ>p;M~s _2qepscu$pqs|m27x3(#>>8q\ רQDZ}v⋚[yxœ~vڜ#{OP̐2oNCזx7;/j~i( @ \,1<35#ߗ97*#7=~30%B- UFM yU8D3 {z >-߸ќ)53ZX@ EKO@ e=o^jb@w$?w@@@` zFg@@ F@@Fg|6@@ @ {9ާiB 1C8>&ߔJC}=77b &NϻXE#.\̐|7DRPj߼.B͍XEU.]r&Fd m('41$IDAT̐|7DBP(NX#GӧOי3gt NuK-E!ঀ=D1C8>&ߔ C<:=7c$ 7B)hh<Π)   |BOM)@@HГF;   @=7D@@H#BO    $ߔ@@@ =i3h   $_Г|SJD@@4 Π)   |BOM)@@HГF;   @=7D@@H#BO    $ߔ@@@ rҨ-4e D"9sFΝ˗G@f =Z'NTAA<Of5>Z1vME &1==_FL/K;Vwy^v6Lúx{%39>J !@*=*=<NI 1c4n8>}-,,tZ0ǑL{ГeĴʞ6uTg(.11; ?~N<)BOw)R"C%q^2nŒ.kxDq"pYhR*C!q(Wgp^,J ݅]_JGM; =7c @@ BO,   ތ5@@@ =h*   ^3x3@@@ dj=NE$OB{=||*,OPj9%3)TxS"ģc#痿09G&]3SyE**)!s$W:w6H%U:oV/)s4 6@Z-BCNitssёY  @ 7g\s&Рe;guᠢgj϶ZVqֵoֱ<Z-9G}ܹl3qàc~cofpJzJa 5TՌ l%N<=m۶mݦG+Õ[ZΞ=ŭ\d Զk8dnV+DŽ#6Nkפg\t`^;nO~3Ckb|y^!9@ iޖ4ʡ)Zsjklz)AܹGAm7ὶ{Hb~),[f7g7jcU[%T6׮;[6lvNX^bUA@@wB M01w_Z_:`L`+g{jo7;h|6;xLT[sӃ ^.L*1Ŀ며LKԹ TQV,o%sЫ2@` V,7I-.77,xYU%o ZiuZaާb$dq"I^{y*l2qDOOۥe&(Ob=72Hh~ O51-{m[ڷ{c>x|Zbn.PViF/ NmK<&+idLt9y1Xwlge{e@U ]gΜщ'8-y+a |SJD`(8> ތ5,` G8@ 8>f FޖF;   @=7D@@H#BO    $ߔ@@@ =i3h   $_Г|SJD@@4 Π)   |BOM)@@HГF;   @=7D@@H#BO    $ߔ@@@ =i3h   $_Г|SJD@@4 Π)   |BOM)@@HГF;   @=7D@@H#BO    $ߔ@@@ =i3h   $_Г|SJD@@4 Π)   |BOM)@@HГF;   @=7D@@H#BwFNN"f1@@R#`?Ϯ =~Ǝ~a&  hooט1cR݌3ݑ%z.^s33A@Zc?Г'!*@~/w_XE@@{Jѣ5qD+ǝcXvvKǫ噉  P ؐcs}0zz,@6=_ OTD@H@<3]/ X@@ p#G@@w=R:  XГ@   ]_JG@@ zR@@ K   bBOw#  w})@@R,@Iz@@pW/#  @=)T   zt@@H';@@@]B  ) xP=  +@qח@@@ G@@w=R:  XГ@   ]_JG@@ zR@@ K   bBOw#  w})@@R,@Iz@@pW/#  @=)T   zt@@H';@@@]B  ) xP=  +@qח@@@ G@@w=R:  XГ@   ]_JG@@ zR@@ K   bBOw#  w})@@R,@Iz@@pW/#  @=)T   zt@@H';@@@]B  ) xP=  +@qח@@@ G@@w=R:  XГ@   ]_JG@@ 䤸~ommw>3@@@ u),,Y  +m0@@2]Г{#  @~y  .@=H@@@_BO@$ Nti >6 =fW!  0PFg}>`tbb[UF̔iD' ct_afM;ZIڶ}3xfWMq"3As?9i}fMs{O9)2>ĔiLOFMֲ  0h= =m.}љcǟԜ;NЌ\wz)wzϾdK@@`$N9|aIBox*R2 T,ҒuMK6b9,Ѧ>%봱(5zٙ:RC`YT7VHM{yjОi>4T/ [jASP?<"%ON57FJSmg}ϛn6MgzzlT[{D>;YeI2atVτ2z24<}Щ.uut:E2m &LP,&L7V6Č1v^Iyr4TM22&]zOic}GXjN m7nKL){D@a!67/6%3|oOL,|Uy.ڋ_tnmἜ&=A• 촁X;oStyEҳ </k<ӵgԜT|*{Wjȟ6b͛6Fw9wkɱhݏҿ:K~%͘%_.ѧ;jg4#@{&ܡm__Nc=3s/j'''ؕzT{֖/GOo0ɞvI5~h=Lex 7~~Wz#8\s4Ve \v|2zF^g@@ y:ϫ5V[UVL(fJ|yjjf^Wy9w_/Y]BZasPTN-Wاi TfERs[}9z#t_܂M{ jƩӼV+/|\k3NKۅez>:dCQ{c}f c57'_߾žs^$O=gm⃽C97~L4iãfJp\WT ۻ؟M`a|IY~$e=zBWyNuuڇyg:^ի#b<ۃZj=S."@@h27()- )d/178( ^oBp8,,etĊ赈-;bί^f"zg9u.~PUb0嘯߿|Y ǍM{'[H:Wg & N6ٳ_/17}3;m5烷*v`V^6d~~sݯ1}v⋚I9277y(;;[FRVVV#ӓ E@@R$5A>.{;ėe޶vu|\{=_`)r#N@@Fgx6@@"@){D@@` zFg@@)N@@Fgx6@@"@){D@ҥ[=qmLd-I_oMHg_1mpZ3(ۃ #0qD;w.cvC5k]SGGG_Dk\I_KL=h @}Gjmm=>X kͽGFD47FkMB] 7-0ˍRgtل%E@@Lmx4'5N @3bbd칐~  . /^TNNSK_3>GOm$ _g@@ Y~zx豛;ؼhr䛖iztLNgqzdC/~\@ 2M<;!޴x@@@!>}Z jkku)M87rnnƧ&24_1Ä|ˌ_\+agzWY֔}f;oBc&zҼ4@@<ķ%f쳽~dž1chرθ}PGvYd;n[G2!鱍x@;vCcGlo Ce 895*;G]w#:d{t:dP,?:[O@@ H1b`l2A&tm-5&>tz:͵>6y^mdž(F*f@@H +6`݉2ٮ_}q&wcf(ccNf3=D;{B7'klԱA'w/#Z3w? ذbxx6OsO|3!?4X 4{5=Yfm~6~co2=')rHBO#Z @& JlC<E Fto4AZg @û BM`a`aZ˗/S\;~V׿EYv-i0d UE[serI M>|pffqf̘ |M[[ۊ+FiؗT*?3F3ϸw[rپr_Dlٲu:]^^^^^^]]D"!yƍC>s`xBK}kx6r%qc}ފwzv}?/u3S?'M{op;.wF.x/PK㹹UVV a儐N333E(oow]WWGaXfffjZ,✜??Gy [4'瞚ssᕝxX̘хWv~ѫ3^,*-iS٭\ۀ{?qϯٜ侯!jy|+;_:|Ԛ涾7}_1RQQѭ\T BR9Ty溺:ssŋo;N:޽S^3l_,vGu+"8;z;07\IU =hsǝfk4IWOmh͝1~XY^'~Oێ~G{$): BR_^UUjGMhGRyбcۭ{$..bXeeeMk'JpM 'vLJz-oJ\D([ǎ )t\Q]ϔ "|NbQ,>d1a_eiwJr׀_S+ ~O%ːHe !ʵf_cF[[;;;y}BTZ[[GFFFGGE"Q^^!d…=W5.o-pt%qۑO5O٬^?B* /ea.$ o߮P(\]]G^^^^^^>q)S p'bx֭b:88XUTT;w.//g111u+|||˻0\\\\]][[[EttٳgSRRl#bqqqqQQٳDlVWW'%%X[[XmѱyfDbkk ˓g'Bg_bA0~򭱑s3>uq۷?='* O=:kBLF'Q?m;J216U$01?f5B8e R<WWturSMFG|6=6dBS"J|gqqqyy95ԤxիW;9PYYm۶đ#GZZZB蕯\x^LJJD"]bI{{-[nY[[KqUכ7KT#vN]<֚YZ~Ki9tū9\ \6<Ϙo$+!AZ.-#<`r׳!Q\q=Ju3= EʅEyӢj~N-$Їrjȿk׮SiAAX,vtta bccbqII ͞9s&fO^\\={fآv544HRz.Lק2}鄐(&B<==rssoܸ1i$BEQss~N'L49 ;vggۆ Zm;Tn͢!RY?퐘ߺ/Ce%gGsĚ 9ǝԩzP))\VjiWk:2e%> aymǏϜ:V]L˱` /\z#Jڐ)"j뛙`DI,bQZm/ tcՊ9)3 !㣃 +;WtAt9 RDW͊ UVYK'0C3쬭-!VfBmT8T*݆2b:0FX]]=Ѓ\]]355H$...nkjjP^^Dœ|~yyN(J/O?FͥTbLfԁoT*BvT-WPv"P:%U./KL <@fKy|?r44^usyV&BHicjWz3Bq?]eWu+475/x/:}𛞛;9&̂nU^NV&V3>:8b'u G~B i;:PuMC/՚kmOթTjBEQ^|=!_o_?=)6lJvVX,&TVV:tH5VKikkOwBwuK$B!G?X,BEQjƆ9V_ ,KGG!̬N foY?Ks뤄h[8X]Lw9ȏǡL8~Ɯ?~Xp%}7j$#MơɈ#.^HjooommusscUXXX^^ȴыZ-{ߊIElmjjoS]:ZiB?ZjBVH2uj-3ߪ35zk"Cn/@@<T`gY(Wٟ]]rBIeB]y0UXDV(Tt#!U+ en& zS݁yOf[LwK0%6F[0+)'Kg9sDEENC9J_ݹ\X,noocƍ{*blFyh:~ ,Eꟻ{VVViii? ZV&%F~8w)JNGyK:E`WQs 'k.  smS[%RcGxHdLN!#g**<=@֌ @aȸ1~Oַdd{y8/?R;?kqloDQ08Yz,yn;EeOO%IK( B`bbBwD԰li =ww = 5[(zֹt7|_\BHbJtvmܻؕRh o PyП3iaJ\o?SgӜm'Ą0%]g+zI^NcerEVnIx 2;Cdxje?_nU5z{Ւ|~]3Sw땶@o[>Wʔ[ΔO;tс&??, $??_.d'Evuߦ+:Y3¤Ri~~Fd^III/lUaee4MŷV 3rƞ+TTT(JSSӇrB.ߺ6F*Wi\ /UPU+5"1 ~<;ˋM$jb_4gbH|3|q_ܸ׶vJYmlVDsc.徴zæG}odQkԸHzx?1_/^X0#XTyy8ERkg_JtuSiRU״C=Vkkkjj*3J:rHqqi&Lp۝XXX9ɓ .U)DBHddmUѷ7VVV655ѽK\Bz|[QdddFFƵkBBBY*y<^HHb92''̙3?8=!--B[ZZl2eʔ`z~WWWZZ5MDDD@@C~I8s[m5ᲂ] iM.F,tWKΗv:%a!`<^zg;wZ&WVgR3nmcsk_{K--L[:Oow=Q3Rrzk/%=8{!{<>Sxߙ믿s{хS'Or;۾Ja.־59^dɎ;sssJeeeeWWa3gNcccVVVmmF)++twwg꟝eCCs_$n0ggS>}zӦM~~~---,kw4y䒒oU&UUUM8ry晽{VTT?~ĉfffZBN3ECY"3>{c;p0~׫M/5(Z' ުaUxeCmL8=T-w7Ç333o;EŊ+Fұ_=4f'O3g!~Æ /ҳX,[[۠άvȑ Bի{v#H$9uuu B xxx?ٙYa߾}- O?3^199JV[[[3&**믿noow%N+((ήH$,+**O8oܸq!WW{._Nܣ ,,,`8W\MJI`Ga kyeBgQ 0%  !  !  !  ! E/2Ǚ>0AbU~u>vPfV"l6h/Dh% B Bpd6c*ABK B B B B B  .J{p7(R*kawXR! >R)pPw(kkk\. ZFYYYݟ4fZ)+J%DGxwIMuSbMRkԅY!NEw6®8(c4X@t:LXXGUYzBHHL&(N!AXPS+ѯ9[9x|Z.j)$Y@ؤ aEs/+H뒶㺁?jXQ khho-a,6ŻץV&ccs36QcBilj mnbfQS'l%Yغy¢?Vl܄6tJ;I/X[&jrb iy5L흼FO(ι( UJ+[֖Zig.܅6&2ŗgdR;GQV)ĭ  V-nJ`nmi\^pCܤըu{kCavj^3WȤ>_{rp!DW&\ZS@ik }B(߯pݷ&I+}Bz.2XP,vD&3gsJk[gŦױwom$rY~Zjosli("js5kkyJ5 -l "PXV)\}q}wBEHCM1ψommj!R)q&pFVv U6py*yOKxඨu>CEܑuk !  !  !  !  !  ! `ta*xht:T*ɴZJB a)NԤX,f٨aC.#&gV8:YYYr`AK %vvOt$K.>5vKP-0!tlByH┉,-͎<!ǐ!~ nN,%s7nmx{[Idy۶w;yH_qcmjMYyS::%x_&5\IϜ7gĘp+-qc#|K UO/#:m{o^Q)\ȼVfN@_nhim?wڴYp<БĿH%7LFqeɬz0:`fϘL_6A a._+))/ D?vqqlnif@.B[x 2gٷ6iI$Rw7ᤳSw? ּu,-LnJB0!L$j/quubx2\L&7jFZJQp8QcBD3l6T :tVZ.9!$(p]_< 60_PK b{Pk4*:++Ռ?C;ѥ^L'8:0!`_[$m6֛չvR[Somc"jet:Dr a2j͒3lm̜beey;It/)lmlmľӎhrJ_o#&`ݑ[>5UR,VCCӎ]kh'%;4alLtZohuǁ&OLϿV}r`(2׭8pG2 !jjj,,,X,EQpaΎZx !  !  !  !  !  !  !  !  !  ! _|1.....ap ~;ypK.ч5lڴĉLիWO>/<zUu~VdSLgTzKzB5vXAZZZ?ݸqCVӏYY'|wav^a؜ԉ'֮]BC?(((kW2kRf0x ylݺu֭+W6'2@_-wwwBV~z_J B%IB\nDDwPI$*Pw! `8cj⪯!̙3h222z]Chh(G*RBryyy9U0EGGٳ˜9s;vL*\ֳ/R&W\hoo666!!!g\RRgff677T*kk렠y慅JJLLxbIII{{;EQ~~~ƍ1c/N>钒*++e2رc/_nooO|鄄NzťWr_a˖- oo !݆;v,33K ۇΝ;.!rȑ#gϞJVVVAAA- ~1B=^Qk֬a;w}]OʐMMM.\رc'|nS*ZO?MLLd߿?55u֟}'N\pa=zHE_~޽^RRG}^ClHo2MMM>SO=aPf͚fPx>bFmcǎmrW]C!f=&ɡjzfvnJFts8Wv aL (*88ڵk BMͼCBHTT0f#UC o;62""HҤ_wBS'\̹pZ cC˗/b÷lBIKKzBVt=efQhh9s^{ʤiӦ]vBlmmG ͛ /H$Ǐϟ?/?{w}i 9sudƬe˖^zc**,,_$/3}ݗ+ N b3O:{}+t ַh~sOV?ް03>x.͘6anfjeif!,0wlT[[[R_DʱG8q8PBHff&ݮC266W;w~@YSxx!JyEB굃i֬Y|g}pB !&Lccju|5EOAkf"H~?~yGto;r9y*ԙԼ"Ì%s۳sgO}gΞR-;?/kGGGQQI999L1u#GBju-(gggSSӞ+Ϝ9sܸq̱OC???AIIIϥ= &NYߦxᶪe2]!}___:\#h38؄B!}9;;(+t!QvW1aE=tFΛ?_ycOJ R(oNIYyUHJ_#zf.!$;opennr9j&8hWcǎݾ};s:xƌC?HOOg3ah4 k}5knn57&ofnjjj8993AKKKoO/2QYIգm@6Ys\kkkHD"Q_j󭭭{]DBd7w9;;p%񴷳w8hVlRn/]4[ 0)(*6i|ꥴ5ޱpqI9!$hc-ZDmI>yn.Z0$(IT[WOyrO#FӧƍZ/_~=3WmqhsOf=)zo{ieiYʼnSpYfRFy: kQW-,,۫[0/aggܜ[0fɁ.ѝP!}yfn>~Κ^*:0|=1;{Ԡ0/WVWTK;!{羣mť._z=VQqT0s3SoOw:uuŖf~??gxF{#;MB0/7.uϦ;x,1NvErYjog3q Vq{ye'ihj_hP{EqNOO;w.#ZZYYyĉ6+++P@a^Fp8_~m_ԟL&w:c`=VNg#Mg!/ePBL&[\.a+Vkcc뮃mmk+KfB3ZF$"'NڬOUՄ`No=ݜO() \lHye5!D`<&пC_3+]iujtH'GsnD[<KW2㩓cnNnAq(3SAFV^TTT ̝~䙖V[3446=@)+aXg_&dѸJ\%%j}X픉 ˉ L:[o󳱱DgϞK a!!!<OT9sBQT!|k֬QT/,,;wL&&jժ۾#G._|׮]Jr͚5!!!:0ٳgYnHLL[pJ*--=t<|`@c&:qℵc[[5~~~=o&^~KFFFD" k֬0&mxxxؼ[ .Deoiee/ϟ?ER9rD ,[_~]xZ[[kjjyEyyyGRxb41D4yիW/[lUUU+,,$XYY1_u/Wj_|Eppp[[h@5 Vwl[TTQlSR^)sWaej5Iq1v7,[2o^~/;=}:}&$$_5wSWϧ^k# IO[뙹*?\Uu/t%G>zAZ/$ON8L!_WP2:ph{;u~mB+S.\=z(1!s >_GLL 3}"##9ZMXXݻ._\^^b&M70W^y%>>liiQ*AAAuǯzԒv6mmm8uq=@p8_~/JB3(MMM]]]nj3;L 7lpgKRkke˖1ƏO?޽;++K$988?~666w=߾}ݻ\BOYgkkI1?͛7?iB,T`a`a`a`a`a`a`a`a`apKYYY\\{w}G?]vm\\X,kUV͛7oUсRSSqts [ZYY>3t8@0fZcƌ¥SSS3k,TAd2믿^VV6eoo着Çgeemذa\Ǐ?~<.-BȱcBpv^{mҥtI``7|sС{l8Rxxo[-Pw cӧO/\)?yRRR?Xk׮sJ/ˋ۰ay\\\MM 1&7|sg~wkkk)_!99W^?~||c=~^ܹsgggXbڴij~zj=ŋLJߟ|iӦ-[l b+t`7|9sf͚+2Gt;vxGNO=?s\\ܙ3g u:ݢE,X@Z1\^VVs8QFz$BG}4k֬)S'%%ӝk֬|G<== x 6ͬpر?<88EEE+**o{!ÑyHHԩSYUI7޽[*fff wfX/@Vwygƌhpppj9s<㝝Ǐw?3g޶\\\>>k={z)S0999"h}8_> V}}=!ѱ[9]~#|/^Ƞ];rX,fFgϞ}o‚^!99ܹs/\@x.[PPs kΑ۷o@@@@jjjjj*nBO>34qÆ r|Ϟ=?@ ŭ\9POIIIYYY=ܓO>IX[[o.]8qmO-[UQIII,X@̛7o?ԩS\n`ff6a,,,&L58;;D"&Bf̘&ݑÖL&#l211!HҾ6!ӧBnܸq'332uTd"\.WT0%-ꫯQVϞ=N_I||7 @kOg5k+ˏ־?҇dᬗ{ n}348a!}eY/<8.Ap:%چExUVא{ ~8122+((oĒj' B>d23!L,?ꫯJ$2ZM2{{{ ,zP(t8}O/looj|}}SRRrrr.@a5*]Ml77Ҳh1wsCVhHνG/^v/#kn\ UUfd枿t?j~CDXPGߌ3$ݻwd2OO>M #-Rշ>#BaNN΀>SN1%BRO>_oB/x_~BlxAѣ~-V 7Z{YT[WxcX3gN}}O?dii#@7;RVWWx[ffv6ť^fЅnn<.8;45 `XX%/-3ʸ?}jVOkҙ *!)dJaAa)jOJJڰaCAAwiiijjjhhh_5o.Z>--FzݻB᯿ĩ,X?cMMWqqqvvOss3!D =zT$x999'Onoo7H͚5+!!a͚5 b߾}Gk}:u*00pҤINzK.ۓ&Md'N())yw2ťxVVVlM~Kbz0SQRV53~iG.+nd/ϢXZj+ !#}'&:Ԓ|"ݣ7 !3M1m'_+#}'ƍuU5e'O4wԭ[pVSKBzfΑ17M>,!AZ.+;d,[덛#P;.2]v>}믿>>>|Illw/ݷo_ppp!|رϟG_$@u>λ;KO:GY`hw|aš:Bsk4-u{'.]ɸbbFݺ_$R{ot&Rڵ.wgd_̚liF \4FiYeڵ,Q[[gd6lS5V}rͫw=RPTd,;%|n S˫[D%aSbǍ8tvҊCG͟acm/ !vOX[CBK+ Jv5f{;=s閰{%Bȯ;zyN$a㶏־ÆmM̓8(3y`xMMźyas᯿?SZb ++7}nP&kmnwtxߜ˽R(JE0##ciy!Y7>*jmkn^:Bd2BJčnj%O:hgkMh5VBRkudJC [kKww:Xedv]¯e䤥 Μ`f*(+rvp744{zB;[*\-FLcNKNN(* `؟޽{0=@_r!̴RQ)rOIfJK+!Ԉ k+ Ĥ]үg3O[D%75 /%Рja=!$$8S"-)춚 )**,\VQf]]*+ޞnB33SOB/WRY-3S !'ϡ6`ۿ7.\`z]]]EEEǏ3f = zV^ho,v56B+A\.M*kh"Ϛ>ĩ2兲'jL蘈`fM8){}1^:N'+Ev6ec*MQ75!*C$jkjtBHD6 8qKiYب0cc~Eŭy+*k"F(U(&jk 4TJ߈'W(erS}Xz9LBDƦ_I?3ʸ;gdVwlF+>v⌬K6vL'ϜpeR\ӶJJ+w92iؘ0FSM`ʟ;7_Fټyg}fccSO&%%=3H~W2okkc5/[n9r+ űc,--*^D/vssHLL|w꫈\0aҭن)ill趈BΞ=n:pmmm6mK&N3ϟ| ?LyG_y啽{._SEo@ xϟ?x5BHPPPxxx?Rzoqqq?ov!LP޽366&̛7gaV(,,]zʕ+ |a?0aK&Bz6wBRiM$0Nw9oooKKKHD{{{_vMѰT:;yw[[[y<@6 gΜɼ w…'5+//OV=2lƌt#iӦS:1211opm!t:Gꕫ+󸵵sѢE=lootAAAt3H$skkk~ljݹpqqhnn{*!nnnQFEEE]x'7nܘ1c†Н`@aEA_2G̈́n{iIHT>ryϳ>KHHHLLꖷWJ>鼥P( ,Yp… :gK/"~k-###??t"NccccaaQUU-y1+/-))9p@]]m7BӺ:cccChf^٧=+Š^W6mڷ~p9\?0lƌdLݻe2=dG2%˗/駱ͩSN?~<o =x p7BHiiiyyyhh((N81Cx .H$ԩS {?~FN̑tG gOJJڰaCAAwiiijjjhhYիW_reÆ uuu(jܹ /_裏ϟokkvG}v scǎʈK"{ƍJ{('W0aBBBw}j*͡=,^xݯ*=GJ4**j˖-/̙3RSS^}U\~?0}WKJJŋmllVXr;occqƭ[^p!!!"$$䩧brpp7oޜDϘo̟?V\\\llkgj>Í7nٲ7dywM6_̙322b͛7;88,^?553qD\~?*3yݪPw$3y`‚b3 Qvvv`a`a`W1!.NfzFnYE%j0"W*=wL1zt_qi'z~M5lCxBLH?o 3R%jg_iI.Y8f\U9y#;ۢEgl޶'2̕"kn a!"Q[Uu-b|\@ƄQL#VWYYY!boo3kƤ I+*+=x6EQ~MAU>h+iydȟ>5œy 3/פ3!UB\=͎VBtM)*A a>^v&'BHNnXQW@?uv6ujnn=j~a !$,$pϛ5ATe&>Go4*vlVoL:{¢XƆdө= FӧƍZ/_~=3)Λ6e|pԫLaHPV',Y8鎤(*v\dxhK s+;(EFtlK Eoز#G|ttϿQ&B쇷OW/di7Pc0$ JttE= k櫞^&mݶWTE/dޯ(-h5lkS[}-+/,qqv\`VRrjAQ)%_~IRΚ1),tg+6֖ ?B&1X栗[ZG̛h{v;J]%%!,48g[˴Ǎ8tvx񡣉eULr9*(OwWEApk@-++W-Ma9Bl,P]0T }!V7ENq#W,n/+:u&QޞnN'SJJ+W3 J&+ckmD?usul¯e䤥 Μ`f*(-bn.΄GG;BHN^+KiY%.[lv/Z4Z-EQڥ]XS!R(W?,*2Jk=ézBALVV^ŔTTU2ľSH? HKJ+hr+Y)d(WNIS݅V%p#&v6j n%~^QcBGu+o&CkP#;;%|KMF<^8Ln1O*R:ӵںkȸ蹳6NN-*)7!2ҏx:Λ/-EBL݂Z6+;LDij:BF^9gDydn妦&JOWB[eP(033uwuš\UQUR[*1q9^ť.*%WN u%=ۢںFBѭؘ&=Dq^l !Xo1]ΐ[vjRm^7a|H?Vgl빎L $8S-/HȈ.reBR^Qp ޖL&) bJ Jgzy2UM"T Y-Mcgg_>:ofDWW'BؘY\Sw!Gc'bXNv jVwwgBHB|~'˥]]SVVՈDm=ijnV)Uf>jWYUâHDX0H}C!ݭ J:*fZZZ޺dGN`y>ϯ^~=[(rؾ#l nyB\@ )JCu} u{`ȱ`Qԭ}K0TPB B B B B J%&+++0EQ@ t:P!FMM Ƅ H`B B B B B B A/^72Z8ox a808  ?3{V=”pm{47,&:㡮a6X-aK kZDZ[xBzm]@޸7B[?WPrTJYO(_{;KLMMd܂SbҳfL577;w!RP>jʫE7xh bwkBE,,Ə\uo%s,V(C/Q~&DT*#*j^liiCn#m~|qiԚ̬Gx>^nE 0ERI!d&JMQeuV:9񪅵Zi J|O>}]2h4FC?}/ed$@?:w攏>VwG ?eX sQ[bRNoQpШر֖Z1bFNss⒊=T*'Z[M<#gFT<%LPlmV]#(bl;;' J8;.]8fONmdc-Y0+;M;l)yz㲇1͉rTJ"ttHR/\%DK\؉3:ՌY4x0\ˎ}+x0ssc EgQ7] P(8hD"rTʋ>N/rt(ZF\$MHJwj2L[*(B^noyyF !$8hEccF53hںF6J]H`g'{Z- !rˆo,rtMRKK{3yY7_+p)6`ww()L9l㖝rR[۰i뮚=_m{Bn#]و79:`KXںi@?7W[<KW2㩓cnNx{ĎVs\\x6&} =pjb?Q3ҍaX;LR766u m{$>"Z8oJ/w* rTfU"`H<1&&*|#ӧM?m5GL^jFQsedm;)*T<`yÇ 3C00@00@00@00@00@00@00@00@00@00@00@00@00 `quqhfn*HZgUTf`A!rsgO M)+,(*7:2 CvGxx33S}od7 _r1 gZrQQ0 n R)ih`IXH&<8)5c* aᡣ JsCF89lvLtIdjb΃e.knfiO>Fh훻)(*[;lx{ LLkj!^ZƔlߴ11Eڪn!li._;x&o҄2Y:)jkt3k 9}B?ڹ-V*+x_++KvVL&﹦ dGefO3omƖ3)jjC !>>OUT7rodGEYTBh4ZmLh'6[yFj aFBbQB65:Oʪqm}5!ɑRU%d kFFVx>5 n#CGX_{N04$03+y,Emr7|VKεv6v79߼dGN`-aEjrV_Tp%B(B0T"j sr?t4R^YU[8{$J%wx$i{GgHp@qI%"S&߿LprwvH::%xkBo/hnK{މ!s6= ;}"J=vLPX'01#ӧN6yK׮eҫXON~5zQ}}SPߴBK+RBktȈ6qG}CaF.Wʗv-,ӆ\?>$ŧMRV}! .pse&w(jJ0DPB B B B B B B B B B B B B BpPZlF5ZDWsO[[Y$>5 0<.BQ!r8>G/;T*BDE2X!ጉ 0gs8R+]rͫw>\\RwhtJ$KsK[m]= a-|Ua]Nqt4!cmjm͟ruB/>>"Ʀfm@OZ=sD s3JEqqv\`VRrjAQ)%_~I˜\`٣F546a t:o{,;njmWT ʘc"\Ξ Bф$o/a Ǔd2BJずB?jz9_uUhl5s\w..nhj>pb99:B JZDMM"= `(i}KW7iuZ[QᄐKW2!׮glܳ.I]^fLJJNMEnnNbL#|<v@MeťJr̩S4:͌i;E>,0IDATc#% lvxhZ]S)*+]rk T Q::$mgp:;U!m{Nx%lU|tj3peR\ӶrC[:/'K&۵Y6qG𲶶"^HcjbX 3Jؼc61NB[dXPEa렞`(2׭8oHըZ[=Dq8k+ V"j%L\An[YYp9ޙ](3Ίꪆ J:(pSRCZnjuK&5u\[[;aT*>#,ҶgBEo6<!Y`Rwh =B!BPvˣ RIPte:|dxB!=ԊEzhzLB@;_X;!B!zs9J%J%Y:!B!z2_ϼW(upR<B!BHBB!G#B!Ѥd]@ ͂%Y_Tj1$0r|c.M%lX\~AP/ю!}! RdX$[HxX \&~Z96D񤍊2Yu3VoJb&%9V#-dj䥷r~ڊ>e &^ !]BOoW^KU+>%VݗWC2(d}iU1i ^JƄY[9/2ӳIep.BՑ} /;K_7ǴU_CUL%O>N{Y]窵`bB39U}VpϫRmt:[N! |1ޢ ,^/q#o 4|>|N;܍f ?a|j.#  Z%Fc 6 !W.P|Ш0 LYb 8U*TDO~' , J.gϱW*4AI>iOHif+X)ܘIQ4b/疿*bOr34@ICKEFÑ̑ z'`t=>S}ղ|1׶~X=N`D@G{99**y"*Y6Kx ޝ4RSαcڱw!/Y`aŊcżvȟ'']B[b2^zU ܷu^~ oeQNMw9YY~~_)zx>m4c0-:w=U0৬:ȟ(5ua=G#j,~'֡: _~cu*$>7Ac׺&6!Uf˶ֆL ɤ"ˑ}s. s*vř؉actAm+_31WzDh-o*#74˚ XˑWE-P3Ɠ4>B#pUpe.tIg1W'{K&9f+a4%M--=@,-6 u`f i=fDeav֘@QVHNƲo'ka >m Ihlynjeh/%;aEZubȊB̛ɨ)s1$#z|?m ]Z ) Zr iTŒU -c-׊OHJJ :sUE?xtZpZ ʰŵ8 ǔμ)xKy,>$dsX(4z9Lmon_O1!Jaa[1ciqWv`R5kxjxZmSq4W w 3SQ+ei^ ua~/9\{ЅWŸ~)ur( gPb Ee-i:\c;#06 9_TqJet9Y[ #gM!60Bx& rrԃB-X8<a8`>ǘzP̎&AYcYEyC"qHO>ߣVqY8YTofHrp2f-r.ԁ_Kp#UBt P%v>7n^У n^o<*|Up[rtwH=5`sm"'c}z3z݋ 2*=ź)(/'٧Yy]K֬2wx #GpnO j*=\vA@BAI yr)< A O#5v}ߪpx,;\'%BZs5lዬ7q1(FOGyS=KۘC:#-»}V«)c3}]OOhYPǙZLUH1̼w9kvęeV?> ÁZS?q]yj)ظ )s7i~+XikwK1ysR:Y˒(\#%ddZ)cH31(j)l+l%c?d ٖ@iIfsB놞ul4 ccbͧbԘ Cj]IrLqc0ZqI3ERd1d,ln,b0dž7.;Ush0go)2`)1MOF]V[+rH~D Js.ܳGoUUfItٕ:M3nzo ~n *"Bt9+pRM +UۏJW= ؜m5-:XqR#b+|QFA/JxZoE^ﮤ":Úld#tv+Iwu);$K#d'k` QB1+RQ!OX+YӋ6c50%@w @ɓLTo/O !.m?ɟJo!99/N ȳ7(.\q ܮnId0;OՂ-k jٵ2nߕLU`cD 7E%gʤ~<@(P^lg cO>[x둷ɺm%t(映d3lgYmfKO&{Wgy<~9a;fn$,l;s_&Ò',!+'3K vpe;Fo܃Q7;vrVH9M5{2e6)TP|#s=$|ta.$ZKJl9y8?g[9%#K;Y\8CC=o/M&IK-16.2`>)6A@=.cRb*&:l8 CZs`^3o(Be i3eik'~a!YZ_8~FUbA >t!W/~V L=%VgQ$(A$y΅ޤœЛf$4-?&Le!Y\x< uuYoU5jqg ,lUvxQ7XBv1E0=x ѫT:@E`j*zOD`nRc֎l̤jd˜p f@%D"&D;OЇ{7ܠ%PSFCxu7m0bXOgjeWz Q$L]U_e0 0a8٬ֳk".*^V͒_I< }Uh [B(eRfcY{;,Z.m(/h 3G2Jn3qʮvD϶nnuOC3m@5Ͼ9g/ )aPq6!R686ywUY4kO8׳v{ǝ%7>u4]t7 `]<1Y49?lx%n楶Žpz.B~Կk{f/Ub5UC$lfvfs:Lm VQH0iw`2k]\;eJ1<032@;y$O GEY@Kn*(F}-fcwذYmجf8r35,5&Ʒ<N!,dmv\jq!dm>i1OH 1!ؐn߹ H/u/Ԓ5:y;v|.rnV)k\Mz aD@ogZNa'57'E|6 :mEϥ9gCUDþ-_)(SR:^N( GcGZ/@8q*蔑KVpo.oLl̻q NNcî%xt^OBp# n+-L P3hxAgED%_޶-:L9;x1,5 csN.`ͼv=/xԴ@E"s{Y.EH$l-v3I?k\V0Y=Z]+uOrm_ilU}!)lY5/D:ɱ1$^V)0/_odZ ͂e:6=h<_Bre<2 lۖE~%%-4$>FuAdn6#v_T2cz8@ЪA":7PU̬w~^J͟ҕ,`~;@%<֯~_-˹{r۩/e;jLmABo~3wzCv@IQfLPstbs*QJIѤtSp!P?K#0[j$!ܜnQƈ5LuJ@%3;"Uu]2[ ڱ%2wHt807Un2v^mXmnD6,w1wbw~2^)u$"/ 2 ٵ;_j@5;_]FE̽w 6WZU;2{9um6 ㆑81sZÐ]= 2,|5Isg7:g7Řajt~O}\VnP6u+.TRS9ِR J %^0DϖF%;`JNYU q)$h2b4ޒ ,^f[i ~+s r,l\5| MK<:Vn[>?DTX0 lg;`X#ڭН{k\Ts!4D/o D5|Ϗ@a۴[FM4_ s?"ElHp)<`u-[N18`=/s./o=(ԨcY:-BxOmѩ@O* =cJGs\(Hƪo`g6RolCpٛW:ȴԃϲ__hFhRf6lK@-E_Ɔq@EPdnNʃ摢\ Wi=j8 uΜ/Uϙꎅ^xO98.\*${AE `l g6>8^ #XضTtDCϓܵJ_F9"I?&*Q Ph92}5,G4w  yXDq!.zB#0&5ng[]C9^C?mjlQSچj ú.tK1$3-PTϠf&0[]Uo]nIoCZft\vx:.wRsPU@]wCy9#L5њjv5_R3Mh8=bH\I6{<M0"Q쬂Lk~>CKx RCp#,_k^iܮavbpK7%f5}- %־ Ĭ_emj8o6nOL@߸\g>,f¼l!|IIЂG{ؽ[.d攴-%3DRL*@19 =`\Gvi)ڶHa$#V)>/^\!RHЂ3 ɘgToSŒ~Htl\MSQ$왬.m}(ers7^d sbcn>Gcxc]j*Y􅯬"~A (ICD?;#l:ӻ;z`DJqJ~eG bF IDATYSJ)uE?]GLt{ kᷟd{ O?Y.n9?Ishs26V^[ΎӼʥOU3.B1PdWJy/ǖ?f+d MP%/H6gМ[rHH˴qhg` Yo$si;~Ǫx /0zS6pFOL#5 \If9xrְd03{L;zAЀaL%wpٷy afd)wUujs:7u1id9[6%NCIQ $Y'/}[L$a!@_;aʑ$QHNfV $L?_L4++2-lY:y 4>lsDT::a`_$G7@E+:Be$9IO^:Rb.?6`%\+V(ɵחKcFR>Yظq*)XWdaٲoN+6\1egtLX20xρЌp,j-u&yU/a:ɌaUjZ9#[^ۏ,5e b/Q0OtbxS,[A}Ez5)&cا6BIh~SgS߽yJ-3w "9/Vx3?m)?lM@wX[Âsw4i W\/lx Y8/(<5bm4_k$O5FRC꬯xW{maBxhF"R4oCC鍅N4qLe,_sO;^uqQ3hHZKTd;y)\=¢Ø6s}dΓ͖=ra,l1-ޣ~nVC|B^ jc4+q3 3lh<;~+ۯڟ?k?i6MjY|7+/8Gѡ<~DW<Ȅ_|j>x7vZS5q7;N=>N#W\P d=fVv3m{6@t y|emu0 !.B!]#/ ƻFUU;;׻J KC2:YGsKNp{OWo9˫S-Bh2Z/ExFRuh] =B!BPs<1evSb:ToB!2ԼH|a%peU5ݤMbbٷ`6 =B!K 6BM=!Hͯ~ݔn#ۄB\bSt7ʐ!5BOzzB!=!B!D&G!BѣIB!BhR@DEEEw7A![H:a4 B!B B!B.!szB!=!B!D&G!BѣIB!BhzB!=!B!D&G!BѣIB!BhzB!=!B!D&G!BѣIB!BhzB!=!B!D&G!Bѣ)d8Hv\ӛE'a(qYHL2Qo'x?儳N i|SÐa8[pDG Ԫo :}-Ob K"qh,6gGaD2C:bpW8s n݀tptOnc^aF .+E5mB!+tMiwcm"_I`*h/NJʡ_tCX8[Z8g9G$x+|Rw㪽KF98e^Þǒ]7y4E᷐+ C<9AI܀ ]B!B+놷՟VŐ0uT} z#GȾ(⬷>43{;ݍJ>jOq.bxBvJN40%S0B6`-l dnjJb.b2ѴKAC1D!BљpNO=էbFwr0@)JJO C9w}BCBO#E Gm$5%_Ak\qj9S@h=.吴*TT]w(NkDxbNELB!tx{~jI[Y`D)9pۏ 0a jVF (z=6)P00y*om: !hQ\9TW3YoRǝksŧr?Gm*"¿K !B!^+'FvaqxB{;Cc@kb|f*b8+:5ɡMEo;@m7-+x Fs8SΆ CЂZV;W7'EB!B!eB|T͙ThDbEG5'ZڒcԍHDUvH|8mpu""T;gKā_-a7ѩTUơDpc5<_QX71 ŏ:6|JmtQ#:B!Bkͪ{v![&!Bt )+&B!$!B!z4 =B!BMBB!G#B!$!B!z4 =B!BMBB!G#B!$!B!z4 =B!BMBB!G#B!$!B!z4 =B!BMBB!G#B!$!B!z4 =B!BMBB!G#B!$!B!z4 =B!BMBB!G'?߈^']1(@= ;}wM!:#Yw3K,G{">h@]w7R!$GI=H%"vn2SE &ǓeB!Уw&c Լ0Wub!o>sa!^Mن[ʰLk 1Qa(#.?䓃N )غr(B!|!BK*{ގɹ;K?fͬ?ĤZȈ!(_z@ ?||Oƿ(&0NS|S?@!.Q;Pӑ9! 4!;YX L#ՄB!ɕ1Mgy QDGT34of7> U ig _pqsD(Z,nCA>7@r{|+Et׃t=;Fᎄ‡=~ۏy\\o -B!]c{ fԽClf'~;8!!&7 [{_ἰgPjPз7z3!5IHU6D9^!lܡbpJB!W>8˱+ BWo8[RUyo.kɱ3Ζ˫g$Dxa6yRM![\'uc?C?ZsY}"e?z`$N95+߆wfOG ILG }z$W !Bt^kVKEwCžfw7A![\ۄB!\ۄB![+1\LsW9 >+',6wC7Pk!B\Mռ46n:-$OeW6Ok!BF |RvRpr@̽JEKEUwSk oB!d{zᬂws,z8[[4Z! zB!U)c7(ܛp">xcp OG/  F8 g쩪'(6j޴VS|m NaA.(B! UN$[‰/wa:,n ޛǧ't*LzOF1 wo0Jȟ< !]zB!:]=v~Se`Ji^<IJB0)9I~I96UK$cلl/*B]NK]?Jl:/920%*gl;~%.pDc0H#Bщܾz2$B¸ ?8wmF70G$}v~y-a&=w(jZ7cmB!}wٵ=Ǐlx=w6xܬ)KR(~?ӻ $!B|ZdV`q1qY}psN7wқ_O }ۇ&6? 1! !B\:|uO(VI_O *[;n Ol e@TgqQG^K⊐9=B!k /wܢ# iMDOq->Ш!G+IBB!ķ]p1DƘˋDP@ ˏz^Tj9RŁ=,(cr?v,0IB!hGU=_[I5 9Y^Ԙz4܋m_9fh,C^O &/h$G!M^m;ż=&EP]Ui3d e3 +ՌR#؄~zB!ڤ[՝ChX50q5,)9>ͫŭԲ达6J.H6!BvhB/C;@J~"~Ǐ2.HУ#HBB!DL孝ЇJ?'{ F)!zr;B![ņ1EMKSj?b/Gk !B|!lzB!!n6!B!D&==Bt* CI=,vsG]wK!%GΠ!~n-Xm9GC}rdn>߾kuw7T! 'Tnq#q-^g9C{|~Tgwm1p(?b<ã-oko_>`o{5+·WQrOGV|)Dk bJ|߆]o?ܟt6l#BqE]У'yhn>R> uKso bi'*e$e/xGǠ=zWW5Fw'Vr^rVJ@S6[Fb76\(B$ FMB!]У0n2wƸwXۼ!S4d7[.O/JW#cyҘt>lunص eIOrJ\D9o/~ԻyxnBq. 0C8cB|~ ƧwT&mm쫹uonܛ^1+ZM[YT1 u(ce3p**>$"z9soFild[' GỸl ۼ2+źQ a3.hW-n.;F -%9l?Qu2 łap|[yF}9,Kf1eU,[{%VmPBua1Y=N l%$ߝ>.o$a@dldN`axDO 26m:Nv E-8r3w#\̘8X""""_ec10 G:K&3^|GݼaNn-ٶcW\fcPͺ}-ZQU̜3SoRd|RxA<Yy@n]PQy]K05 <9^V>?Q掉kzp#&$Ս׮fΡ.R|+4% IDAT{tVBeG9cbtn"K.p%+aPbI wGF:,cpD_p㢊mdsm-#<4 |7_ACs+>:w{3g`닽wԕx*ع=ôE}1G0H9=A/ga+zZ9mŊ- .oeb~ޏoW?c<NCuޞ#XSG Y-p/ `Uyݐ>V` d 1(E:l Awr챣)9pMXHYNkXK7'ZEё:;̫Â@\ ͶLr\929d6E PuqMDk8iu30wNTAF,"X1ɳpbggDN~!^xg1*vob_*FϚK/-`jZ\ H%? y=e*$dسs>t F3}̭Q[" LzA|0i`;"&MK!!pCnI{='?<ä'(y ;0cA>پj W=pg#>-#y@كT%Tm謂ax%;&"""ֽ'VѪ )pY$,;9}!zg ٶ|#<$xr;jG:FOjK[T ;NȒt7+\=1?dT񉿭2MS r2 !'Zp0ul͏:6agXCDDDDovW\&M+Aķo-=_>;b(0t؉yCsW{8|V8n;z8|tIzD'IOO>,bہ+Ǚ1y(+e"y^ ="""UսDbyiM*}%XQkS%""""BG+tx:ֳDDDDzD-Y-"""""zDDDDD$)􈈈HLӜΘNe"4$Md\bKD>7=Rlp.dJ6ͲEDDDDn {N:?0,\j2b"􈈈t"&^b7DFID[rg/@DDDDn"pm-ֱOGDDD ~ifS0CzD4gw.BԉNgmj%hp[H%V}A!BO Y<|JkuhΏbӷT'%/#""rROc-kNbD6Rԋ?Kcӡ| z^ ¬Л䧕nwjS6b;3z36VF yy,Z5 my$p׫[ɛЗq%wUwd:]UˣJhU**ti󇁻#"^+z8";އ+>h&\w LLO?pwr\/Μ{<;"p3./bs :JRʫPt]- 3*1Y>9zzDDDDD$iNĔs|ﰍHm_M˸X v8X:%oӱ9]?}F$""roq&G/>h೚&j9Ӿr :zaV-'EU3kZ=#"1¤6JK .PBbQHK6c,~&DԴ^$'Zyzx2 aѶ<"""7 oa!//e_fAQV&J(fׅt@oF\j~,D9͕lӹ@a>7z"""\uE*|n Cӹdn¤V+!xZϸxtJ+N^s ~z#V|J$3ŏYyxzCZ7\'^M| B"""r9`DKNM'a#14-,U7{LGl!sG0l- 8Jq} Jm|$qؼKuvd&$5pJ[Õ!a^~/Bs<[8ʥ6:4QDD$toPE% dp6{ &Y\J!; 1]qiؒQGT u9DdsQmn_ f(' T~m]xm>i7~;[q:StcSڹ HO SRZ?=qPb/\#<ðOc~ 1$""r?[03P7<)pHĕNC8˴Yn-ex`8s<| '3uhYv0m.Nץ0d,vkN5$ME["XȞ1xv|:]kY,[]L c(gbobGJ7OmMg1_ WekVδgx˟[9bV֭V:~\S\֫3RxL#,|a yy{KomL&wGÅ+c4 -7 ;)Si(?M@/)Ǟq8Ի#"""ңqFi dⴀt㤚c(|x6\t ׀$=@-\'PW(Eam#j &fLrֱcVWT-)ukˬd򬘾Rf:[+4xӜ& $ss/p4~PQz;Rksb"!P/Pa rqÎ0՘D :JqR>i?J<qR(`B^Nǃg1GfVbۿnYO8js@3g e&9P¡0EsQp*w5OvmyzNOwKL]40ު5/](Z>|_O:Ç`z,YKÉf+xy fo6Cjh&[=ڶ?!AӦ0՝rg)yʎr . 6-BpFpdc^}d1ud(bH@H#:#(n['""""UF֒OY'g7Vq,C^ CrARؒ2yt00,muL4 WGqw&>N͚Awr/yLN T?pJc Mi8I;.ou^JP{Y#[:[ A 4ٶ yg""r4yɹn|A>:Tᡚ,^R4Vh571g_zKٹYdxs`Q _{+74s" axvm2w[wr/#xzi4óӍZwUᩙcd4JٰN_2xꘖam53eAħ~5\j jFxsw= p@k "ɑgB=hWa寿5P+WD<'8xd !FZJbF?D[ֶ&=Q$"|J|>.dK9 PC4}ymǕFidŦoeש`OJ_&GDD~m"rV9$&ү^YJ[-TЋHMf 4|'?wLtE$Pߗ3AЛ񰲶yy,Z5 my$p׫[ɛЗq%G-_~'J*?7<7^%lQYeUVY.L=V PZ֫}5M8>^YE3z|(LH`b"|is]zp?D8 v4.qy~{ C'>8/c(7G[IwxtPYeUVY,4GӻwBcJ#r.LVCMkV<}Zٽ ?5qm t%渆3~7L >#MxLAqZz[HlaVbE<{;N 9a\^wFowMO7EDD;U4/s9}KoyV;P@rva gj\)=+nu\ 1 ~LIߑ/m3dGW"""#ѓ==5#""_%CZ@DbJC9wƿM%TٹvN3{vyB+D-L9G?m#f[K!Hn~7EGDbɑg3[RY5oZ96[ So<Ȼm478Ŧoeש`OJ_&GDD~m"#,ek,(jd/|H K䎟V9$&ү& ^n%oB_}Iϧ޹fsf4qû\ ="#0z4&8XM?ˮPz8D}|2tt4=4EDDbĝ< 520ocC?G`k֋8"4`%9Ĺ8 aZIOKy4NDD *Zil ڦ?Zԋ,+ֆ yi' a-M z'%h s܌07_%-d "őD^i#}Ziޒ䜾 7<#&;"""ֿ֟!"ر]otDDDDzzzDDDDD$)􈈈HLS#"""""1MKV n Ǎ /; 8x!NtEDDD?XxtB!.r`R+h 1UW71?ױӞűlxWoY) f7XQeyr,)<:g> SH'1ªz""""۬.澸c a \_p3{ɷyj@,qx["yOLgȁdڀy>p;V{>C,}yÃwJF,v.}uKF"y_Du[ `>29\C%EDDD+t[OM//|T IDATfIBGY|Vó/L~2m'6Ob$D7[9K^[,-vɌ%(tְ{*PӞ]>|" _B^(k^9m¢%ɾC1NњMA'pi yvlkYt?'{BI!+O:óg.m| R< <%o40?B0:(!!E:{DDDDzD o30 n G.eC>[VQmg;jU_V&J(Jn7h  X ( 娣h>*&pk7RJgԄ뺿,d-p)k6s2m(ڑϤCjR1GF&k.<ť7ַ:2-\#PW/58a> } { d2iRNCI 28llwgHCDDDDtp#&6!@7e% *`PvƩTW3IIi LS^Ʊe?p;\ Zg>~8f c@*SyG7t6$.g0!v^H$o\xw:$pa@sif9Qn2,5d*-{ s1 r4rt 3J=X0t阍WEDDD;u[1^P4gPzO\/T^L;nO2|.[f0EsQC;"Ydg&[0_Ƕ׿;BQ0=NwÇhdZn6`>Zឋ?<}Y &=C^7-⣨,Lr$^I%'e G CF޵s 95~2%aӦxE<%{yݯx}TҗٖCVdӠ9P{](1 mG6p-?d1udjFCuBNC̠ 6N]F]=;!;@$B68!PVISF>#1ƺ\34_j*^·O srW==""""=ݾY~?1A. y/,0cF;D= F/Civ_QL7aӰ'7ԍF| {9LpLF cP`V^Ey~nf,x([D)3&(Uxy?EFj1X8~ @M[qy̡IᏀ/O_;M<':X7 bw۰%aOJanSvU(}fbc9떭rz!#0x۷_>.ڂP00cF_yLs-Q9^~mbczyDDDDzP~ӷt;DSV0i+`k'lMaTOHcu1c<bVaP^\ lvлh1GEٻ \u3sP K7 ^}/Mx}IۏRm$3wlə]XU—'sӎ|DDDDMׄcX +V:3 B$vh g0)7c?JۀLu7R-#,d|iͨ K6S'm 3 ;V08} Υ'!E:ͦ-Tx}e1lZW|Wv>&hEDDDRdꓛ TFϙ[۸emc§k3&&lgeg;̝0'CåkoU[,md&=;E>M~ ʠr4NqG$7Okpd:q`*_5!=XWDDDĭ׭?}k_OC$6Y238ڿwuJ߱]oUEDDDz^ oB϶fԆh&""""ݦKD}l[_{1""""_M#"""""1MGDDDDDbB4i ="""""zDDDDD$)􈈈HLS#"""""1MGDDDDDbB4i ="""""zDDDDD$)􈈈HLS#"""""1MGDDDDDbB4i ="""""zDDDDD$)􈈈HLS#"""""1|al濼k77sl>ZyG'J?Ϥskyu#w>?Zdekxc'Փ'\kʣw:DDDDD䞺COfO`5ΚQ+"p]}9q{EDDDD{J<+:˪0^zQc"wo)"""""==74tL:Bb垟k%]3zF'2̕DY'*{{b:3F$q0`|Tfeо"\!{֓L"hwzl>迃&"""""z BfHaF#'w>@XdålX~-I'IeME{nsg-d.+Jc~Gyu1P9r6#X9%iHWCO2mM(|[F3^ǎ9~uk% VSGR\r-KLBF^9g; zuQȊAv߇&nm[``/Vk__:j|Nb<h =# |sf& ׿HerWgSX07xoyǎps\=f]ފ P^'+sF0O ;;fX_ul;{L̎JX`p:!TWK`]=;!&װ1F$(;M+4xhdN/鱥0ȝb`K8ȱ Q O+r4oɗz"^vhdbgYvw!$k/t=>¿" v@&""""WȂF<Yuo kjvo(1{4͇y}O&?~Gl[sDZOYgR9~D{MkXΌ xi:}xMv%ڞdƜE6pglwvuֿ֟!"ر]otDDDDzD4i ="""""zDDDDD$)􈈈HLS#"""""1MGDDDDDbB4i ="""""zDDDDD$)􈈈HLS#"""""1MGDDDDDbB4i ="""""zDDDDD$)􈈈HLS#"""""1MGDDDDDbB4i ="""""zDDDDD$=݀/̒*][L9ZN3Cy4;ّ"6XN0 _Lṵwo*"""""ROMYȞ`jӞ煑I^{7Eeh[pAٰCi*Ū au1utFO#U'XT8 G&7e-5+hv_22l#Àp GۇY3ȴA4E}_ɲG3fhvzJضi_ӈuTSm}(CO"ΛLM #0ͣ8Fb^~kWq2#pBdێwy/ (cE#[R|]kuV]1ӟd;%1|lu5a[3S8o"I տdY~bKf͢]^,wF֕51rsǦCI)8=EyCiYv(d,-XW^O_/vHw9=l]j-xD ,2ۻبCQo5l `ld'N@j0#,tis5WJ7ҽ?RhTjzB{*bА NLle Lz:Hvuּ ձҗ?@$I'♞K~VO$o\yyg(@?mqj,O}JnG: dgx%>h>.XFp4 ys` RP|J4 ʪ  H81zAK.h<(ySr ;/elq*]|3wH$IB=cx')8FNw/Skt5W1 JKy*jgV`;} Q-Ky 8WdNY̏הLN;Z"yW: 'HwC?%NN*b+.v:Uu$I ۢSUoa 73"TgiE޶UJE:󋶩!xsg[]ZIUXHvEdP/Iַptt}$R[:~X2((}$Bn!~NRLda|fEȜ2Jmv^f^)/+I$3=9)+lX;H%Dtv!3-fj.Gv[Ĭ&CMDft̅ct:$?RHfK ;$QuI*IܘqLd֔(ɀ/z$I˰[\\:Wl| f~>D-3n)剚(bDGZCGiFחE Qme,2ze9@>zMn# dNiLtOn/MA1?&^nj!QKPU>{vֳL09?yY-IOcjnl#ۃYI$I %3! 3k4EsÉ幃H!O]¬8_ͧ`t⥩Fe10rd,Q5 ( زn#up%T@wv/YSIDAT^|e/ V~⛜>_4cxtj'tkxqfvˢ53.rcfkf߹ϿW %I!6gz^ϧ˱yXf,fdwkΖ;8‰y̛~u[d"!A'|rl'! =ǩ>\'LHt&u](c sW3:y]n'G/ \d&?Icyթl?B@F_C$IƆ}\Ö:G#dO(xun8A+;{XFI :I.nNuPx9m(!ΦD Gd%wX r8uZ,M{j>',]Sy }w@y)2*ggۀֿItۼ;\M$I4 {z$qhĻW|W2{^hdZ8̤dv9ďrK+/Sobν?L"rYX3 ˯6•b˻1ks+arf7΁(etcn!I$ C>z2s&SVZ|߭%!̉  ,O<.3PX:5%w[9Ar]Icț4sףXWq͔I: |3F;!V֖sQQ yq)inT?yLE8L]|r0cI ecIjw$I4| m#J-@˹a^۹$@;#f+֔Cϧxw/͟s7jtm9#.>>t*z|S/Gvsmz6԰tٓS>:LGє1D[زq˫+Y< b[8ǾS5o`ku\KU,Z9]g?'`t I$i~{^!H$ aM$I#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I PEwpf s@ŶV><ݧ; ?f֭K[51,Shȯwq+y~uu/_$I$VvvRA:2{WPO#z$IH>s|j3L\Mi:BS?i5'9$I4ԆoAќd unNnL2cYE̩CUxFۚxdJVM̘h@g]u{i,:nKk*y0tصq"];=$mhh$Jovt(=ɣnk_*zɋ=~ n:a`9\O;cÿ6QY;gb[+fNjKA$I a= XVޚm/dE#@^®7k1 긞))-"f`?$IoaAAAI9 gRnь tI_h2c77r 9"|^-$;IYX3yT+I(ueL|~f~,e)2)(k&{\?>۽(23'l'~ie<M}(I$۰.a{c/=OVgNE)?G Aq&\Ɛ;95s$8q^i;t转\=&!wB)O~ I&ݧz'/ o\X̃7Αwp%'fS8־}1հ"rPV=v/Ɏ/W':E+3cP2zIN%I6lA ٥,*O{f.O}j`D#uM{zH 6Ka F iG|f6e7`zz-xz~@g8dP[z߶< ] ;@[k'vS+tSu֝ $IC=I`'4C$Iwy$I$G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$Ij4zF1x;H!+/cθh3FF菟($U-I$7L+ ƣѼ229<8efL$Zh+I$i 聞3\$nLdqOyɼv k2%h8G0$$I4Tj L<=)l'0\lyYH3~ǧJ 8syl,#m1U3})f'Y<}|uZ_Ý)I$԰]}5v@^1R@;''A,IQX )Y\l2K*y8}w~K;-{oT`An!B ""N ZClj d oȲy,H~0Jޔoct(N:{#hAIb1+Ek< #d~I6Jy ֶ~ ݎ=MlvyaF~^獗&~c'HwCtog/qrR s_q)յөju[K$Iҗ31p_O" ^b-NX6h P?=g;9'Ԃ(W7Eg]>@S>b%<< d;Iu]c" k3#9eK=o߽cS|?_귄$I$hkddI9A+ 'YL-"z=fұLP;GW,2z[ZSωTx\i;t转\=&!wB)O~ I~VlA0 Xb rF;YY\I&ܽM$I m@ϙL$7|o邂(37foc+ukp͓p-[wY} vmklu++ZlxV? bWަ~ٷٰ#Ǘcicٴ g$I$}__|`!Ğz$ICb/o$I/fH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IREz!{R)U߭p4QT7ɳ18‡z$IzC=9gsuK[cK6ī-/q</'157ʕT+Gw`W˥є_3'3& Wz>w~,^3{O{ͿFڧXJYb=~4)cG3}@i.9'G*Io|'Ō I\"yN&R5}-Ϝ8:֭cwX*W2g\Ƞd*V"c3md˩~ʖɊw1n[)X<{Er_R--45L[EYXD=$IC⛍BJ3k F>LA6Ov>u;8KcDJYXE:7$hF_p 0xf5QWI_CS$IpdW05&ve>h[64Hh?( LKVn!t8;x=\nȞH7-uj# dwK;*I$}o0FS6{2^hwbolb,\Vˬ(\Ius] lyc"+cM\N6ΎF_%bbt.Nl|q+Mn%}nNnLv e2yS6Ϻ6,$I~4LePp%+ r;͝4K)Oyi I4$_Jh;MD]&IqR) I>F$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IPBm:kIENDB`buildbot-3.4.0/master/docs/tutorial/_images/runtests-success.png000066400000000000000000001115551413250514000250620ustar00rootroot00000000000000PNG  IHDR=sBIT|d IDATx}\Tu0w 2`]Xbnֵmf׺Wmm۫uwn[ٯtI R4T QGaF`B(~9n6~v!B!|r!z}MB!1=!B!NzB!!B!D&G!BѫIB!BjnBSz :M!==B!KxDOA!BqݑMqѭlUnCe;=ݜbBBoAvzB!l7sGBAPt\uul:P2>2M!B^h"j>^ӣZwtx; =B!BB9rJeO7[( ux} =B!BBe7~rϏNQgA!B!IB!BjzB!Y]C5Z?4 FMHcl\Х K023v76Ч/!?Z V OkXIϞ}0Sz\Q7sI?VP5I5ѱ_b;|9^@ALä âeEL|kk,Rp%BqtCƫxyO[nUxzgmx M1LSx^X|_ǻ"Tُe5n*&&:=w>/k$8}rSgsZk W=ΔW9zA'W^Ëc3B3rM|1 ,Y7q.nitzt ^/^ DFp#>Ff!ZЍ==n jUCH2L^ӁV妒j\]ˠ$zdu+ѮbEfÙ<֎]3Nⶕ9͘~i{R( Ԅ|>(tPo#=6s ѡF^A ધr^|$; '1JqM]X{kVh >N߁eX}+ȗq ͩznPxٵn?OzIqS/~}3:>{gxR)zDjUud}ujE6JL23͆yV Do(P(4h_#4ͱikȅג/jsvU!{6r -ؽǓFl`Cf&yf;^bHNKosRՑlldiAǘLZhb<Y{>05_0b']f?Gv' L^#30Gz<1|9Mӱ,y@Ƣ_j罪(ڽm9m! L-gMEL:s8[21Y;14n oWO2dohHl]0w+3sDHCi43(wl1CzaBxݠڳn| rB^Izn(g9ZtBA[نv4){p9=m `}$p᳕[Rt5{Uk;d4F$z ^X0Tc ~֮|;l[_y1MWco-U%ZePV+2ޟ!_Bҵ%l0˴!V=r}c|rU}HA;eU /nw*ro?476W}4^X0ԁ:MiYs噧fp(*fye/tMP<(SHъoY|&'h $48d,{f֭4Đa&ߔϖwMg-MP]VfZA vK!漍,,dYQOJ6<;HK23jmfm(cHI`7SXMtBhH7D%mDRRbj휫* |'h $$ƣՀjRHVE$WŁlgA3KgVh1k?bWI 2dgsy.G[4pxNv;/hilMto橥`FLNЧeEJvdy-4M=~d-K?ȣpdi!}ƞ苕21#_%dh)d[~cu IfozR/Cɷhz$'ŒOF՚19A; bb0ɵ%2vpc!&)bct]8,+YJaO\\w(r;OeRelU`:P魶h3+W搛8%6]g!|'`K2Lm37+u:/g*]T7`v0=w}nZ *"BXt9+rPυ*yUZfHGN6gxjXsN,b OFky򧑼q A8ڟT-U‚d-Uv˱5>`5弾KE)jXooDD Xoqz]N**CZ]̴X?L|]LdBa":?% Xк1yyf~:[퇄c\#lfUᶐ,~=|idkȘOگg0|;ӎECPmgII{j)9K&k,Qî;1\61.`ι0/\[Zl-Ϭ>7@cCl+e8٬=cp0ģ 6eYQk3&9e'8b$Gl0y5l2qR,s ̒ɡ P7h񳲹nڟ:멪^EM:o=Kh.Fqw?!:%~ P?X Z>j85Tǘ4[G3(86qz_ AAI ^kPBC}zvn.z@B%\U;gu xwwIX62E?\YJ'Μ ^<^ncլ]f٩N|XA~UŊufvU2z3Rqvwc+V U(F-yHu p5lVMٵ`1/+f /fE惤N~JEc\!bS)i3e]p~6k 㕗w4 ~QʬY>`z@;Y#TneWy[;hcMOUg[ c Pçә>g_慜^00(4AJ)rL2;)( ='Y_;zvx[050.mr,7uF;j.ZToP[aR[ ]f?==v6}J= 1 r!Qzb`b5 b:-6($5۱ؼp ϵ!N2BOB& n'kܼR',3'yT矛 q4|_KZmV+V i' 2 K -jin^3Y}8w}$|T21Y[}yxHLH 6w.dmz5s:Fj\1"1ꢿ]-qxQ.‡.ƇͷSW\k[:6[rCⰲ󴇻of_%?9%>1C{mP0%/旟T1.>Ը`ƥD]quxSq2fC)|QJQPrGpA[5iUa5Η 0yjڨBB='0W擌ǬؓƂ Wzɜ*Y#f_ٖ/msq))hLKaJÔGͱ#cv P%tH%S7/7 & 6esj]燸'1Ff}u{՗>?$cge=ܼNBiFά'9gCt/4w5=i*YݾbÒm4"Xd/o7fKK_jƼqf7/{!3y ˜:m"Ђ͜͜OPIy!tv*}!m@ ϯJfsoPzJcwU^V|.j烲ٮT9]vvsTcbw+ ޓov.73y[ᅄ>nFelěؠ6? V>`WU.I Bt;%Tc=Ga2N BAC8K^0%V^/elVdT4u+^Ԏ -yG:Wn*k{aJ^]-l2żIZR]ۉ9RH{%^efv>g;6ko w<jv鋙w|n $wqowWspC-{6㇓8Y3kzÐ]]= ҏ,|Isg̦7~gba`nt~O뵽\RnP4y+.TRS9'vѐR J %^0DϚF9`LNYU q* $h4`Ї ,Yn[q*3s)sl\-| MK<;Vn[>I?$>G(uXNeӬ²dtۭ9Fwe {Wn/l4o}7pa6vukфDcPP7މOקЬpz`V%:,%*KlP@;uJ'j@= w1k7Cp1CUTdv*0gxl Jʔg를l W4)7ak9E8=7\T*u)|kWCTH$UkW¹b<ytD@o|PЉ鉾J*v%XNS1\4ߎx#DrF`3%  lnc#t讒Ѓ?x480˵"Km{0=&6)\|E1U*L(xY텮km&nf/RlXkEi<8>):i~f @/ss}zWw,($G9ePtW^sPl $Rί|xQǁ3ku)jD(Yx߱mge-'{!Ĺj/XT3sD. m?Mf%T9+ByGzLcT55ְ^0)8Q<5NRB\ EFpaFT5ngS]C9j^Cr FuGKkQ,?XrC>9鑀@5x7O?oUvvUAYw1~&q,i3kJrYp6\ Ki:GCU>t]m.L 1q4ƨٹԩgp[ŐIԁm~`G܇Yi3|s3GX䷼ڸ]m7|5=CᲗonZW2J}/xqlh@qVsjy(B^;{Ǔg!34P{\,Y)iqSKfCr:kSm]C(F5 R4 wk}0jkkBZW7G^k1^fTﹺSƒ~(x1o\CSQ$l.m}(eͺrs5^d: sbcn^{cxa^j*Y􅯬"~a (HE D7+8#l:ӻ;zȔ0ܕ<eGBpB IDAT b59R[XA]v?/e*r~Pz7Aa\T,y *G  E3@l(!X%9<#C| eɆSl>VɁcS8H{NMj9X9YɎ}XVt .*\!ը]{mXbx6-q#O&)19CSp*+x=c&,?7FXZ(g+(8]GaȤY0(Ã5-Gɳ\fs[H}j<+WvL4 ,^8i3ι2=7{ǖf+d͋#P5;Wu$0xd9$$cxcCQ3\V9q<gu~\)Bw8c&o"9d{;zNL%cm/Lh"SIu mkqwrs+sJOAh~WgS?y 3{ "9VNR~~7>Ԁ:d{9˶4-х1h҂4BG\q[ax#·4>'Evj8OI~gͤYbɮeÄDkiφ diC8*Xq>xAKҍGౣhY,S_~hpyB gJrN}g:qA^;鳇@U1+Ǧ?μ֫+'FJ>^7??կqO,inJkM'm\/NG1][{+ R$qnRvdZ{@鎭urnV>ΫȔa PYrɏxG¾Zy6.HsCxɱd:Sdz ;Y3pakxemc2G,#'gzd.;O,ƤtfN0Rrr&oP?q3I,|F|7EFr2k2ghyH΄[h~P˶-3giZ74nؼnч@&$ĖK'/8_ꒈ0`hu<@8O6[hb$Z}fVĴx&~~ u_zmPY̹ۘ'9-aC?mlేџK̀(^~u5mt>LmtEI؟n"DPWYy~9C'\A&lUͧӶ6pUƃ|Z!>n{$E\8^cۯsAi/[ ڷ=ѭ7֘ѧ/TSW׽( ??yΏ|EP5=mZ*(d :Mɝdt᳎DrTY1#>"r2̀ VZ!dݵD;^1rf Odn*SGʍv;@NBy'B\ =A0&O$Ţl09pF&ss̕/K%Pʙ)th3Pw{ra=ԷnB!BQ@DW|݋|1w ڢ8JUj Vf_|Ӎb萁s6;:uvW;w9iGԙb;?@_suBAMLn 8'28:ji\+H8y*w4MFX%f\вܹJN9>NJ/ !B!.xn@m 3-z>셜:ΘԖnd"TEql;48[5%v|CoAyN= 2 #*.ºĎe@ ᪶rVzis8Ǐ2@ۡB!Bt--H?ַz B!=FJ !B!z5 =B!B^MBB!W#B!$!B!z5 =B!B^MBB!W#B!$!B!z5 =B!B^MBB!W#B!$!B!z5 =B!B^MBB!W#B!$!B!z5 =B!B^MBB!W#B!$!B!z5 =B!B^MBB!W#B!ծP aO7kB\#I'rk1 ?hN,c#_h{|O$S~̟[@B!wW6Xw%.}o:7F [ߎ"H D]{%Ɨ:;.j(>{fEMRzDG~g*Q;gqwhvDžxB!zȕ =r,',J\ՠ)l;yCuVvuyz?9p&ry,^/,4m#W !B.lG|6K[vt1h6~ɤ&I= xMsӲsDtB!*WYnJCAoH%QȗGk.iovQ'L 1:] &4 ulr ^!B+Y]y4Ňecԩ7ge;01Lxw2)%GTEY=aa[{B\G!j vSYkm%z{b(c>=GK;#&I&ZQ̺E[͌V"y%'Hk!B.UUY?4/ b!$?Dfz͇6NHs`lB\B!RDlէ^Uȍ6mY%K7WǕD ?r8"IA %$!Bta>BKX/'NmhEdƯ%̨Z> mB!uwٵ =חllx#v6RϜx%x]|RADV0vfAыIB!TZfV`v1qEk>cHNw҇PϷ } ˋ:6?K1!MB!:|sϣ8VA?w ?ysW3vțϩ㐽 (##szB!:?‚2{c,'.}})+X8ztxZIB!hc4C2HK԰`ˏ@JJkܒB!hGPlSenWKTl0%q-_W!B(4<4.=!e#B=zft#E !B!z5˨F꘡g1},z]B!7 =B\.GՔsd><ԗo)O,GTtCB!OW8`#o$N(|h/_nݏ~wP6.m]<OL[|tu͍wX>:z(͎Քsn?1Ek њ>!h|%oW&q&0,[ !B\qW.<~Ro7o)]A%y7[1|C|ӼBgķ+!{_57Q}Jmdlֹ`6,6o'uFf`cel~`,zy+=k|ov%ĀF}EfWwG-nV N<ҺfjG&nе̂a`X jƼv-'ټM6םbٌؼz+ȑݮ+) nWv&=!|G8t)Q4jV!,ߘ״{`6r6\ X34WCFQ3 O`p̂#k4 bs3s kz<꺤ǰb`^j,i\4g3.+m8<#ɈxY$X0 =`+Nd3|@{pSǶ1=DFf֓3Cfi Vyy~s[ߩ|GdlUJK[;VoMּtåcD1q Gf~:[>ShU{C>yV` bֲ3eVrxj]vHyWp݃ט&# "M\BMZDDDA1=:73{s< 7]tghKcjˑyw˅ qdo1&!HH+ϼYՁkr#Z digѲX^8?%kmZđɸy"h`n.|J""""]x2\jV6 ,FDN,+PhEӨ/=L%.FH#/bʌ>[}̉w>dsSx0w}D#Y6^MUl2:[g44qwl!U;uKPp0l\_ $weDDDDy]8i<'xȟCoHXN9RVY89Ksi6N̽zkbdTMR01od4uUYy~*@{C~.LL&|wCl%NW1}BϷ3jjn#ΈtY~+!012#'ҳc_c1sxwʩg7)W}Qq,JLW{.*veGȣ~.N#}LG1x22[XI43d.عCg~B<zD5[6YY~.#"""#nxbDVH)EDDDDDB5k =""""w?:IwDDBM>!Q#3ߐNGDDDFf;aSGwFDћV(>"rO#"""""qMGDDDDDB5"{vx%Y>9R{w-#և/xyI"F3#Y3 p~8j҃?6?5;yKd C{aaKY^$Va 1>n1H/DߡbBkǝγ2y0^< XG=/ыIOKD ƺc^<#""PSKe ="= xHj s’_A}{AUCoϒ|WLNj/gu$7v1{2q޽E9׃zvzx;k܏_<Ϡ;-"";Y=U*)ɲ:^]#0|ڃ1ǟx|l?<ƾ=~0bMyfUeaN,V'^z9R$V~?әpJ.ўE~7DDD;;t]Ů׻ r~y<6y$y渾zjL􈈈HҔ"@]^_~S,kweL|y [>Xo־Nv?>y+Wy=9^ .{_?ƿl _gsH:~m&aV|9Wܔ~Mi/:y)Ե_ :i8|C#}sæ7sgM=B! G>{C0?(@K7ԽMDY{"lgREA3=a~L{ΫԆxۚʚ",ǼSȬ/;WD΋{XXrCK.v܌p_"iCOtO0mmk[qIoO#"ԝW3?8%z>L83VZSXY ey~ dX?:4.] aeH(J챏I:/ x C9A'>(Ҷmmk;n5ofoytm-u,9igMAIA«s w-SVxeirh_w^<&xO|?`h /t17G 4_k9_|'=ƀuz|7Bbi#mpM\d`^wWEDD~? Z"myW,.Rp[;V+{_9{7Y:U+vu~ˌ?{'祈Wdϱuox6G+ %j%=WKוʒW%X'=,ye6]k`jVCY5 O\lY<4J+dƴǙq/xY~Sv}^۟g/- qxs %n/ h)A+oZc ޫg}""".=m tk) g6n\ |sG> `㢿.S[4)g9tPq2sd*[YD~=P5Y%ݲgeZ$}i 3bk<9{9H.mQww?|/y9F庂JLMLqC葮R{y8`9~?*ug\do1 'u~Awq$5iRULYx$5zBOQon,8=k,J`W1+V :G`V69j;Bpxm;X O.^Ĭ ^Vt*3/龖X =r֭֮֞MzPޞybr -ɒCYl#lXs6EKsƝ^1M Ag}$Rs^bO>3OeP_.brxnwwތo"(x_`G'םR_>v/?Vv5Wܔ~Mi/:}ìjg0=SOE a?ސ 'H=0/!,lK)+ &BOGj{`c@SқQC stCO̸ڬeֲs{%ޚjNlJ`F1澟J* 7\7& G9XpNcp %m$xm%e;rmW3S> Xq;EHt3{spx21I98z%}SYѱNy. 0czuz{'׭cuve k􈈈åB}4<V̑4pt,mʼ&NfQGyu- ?@1&3l@ f=[`"s Uh1&VGEg$ci0Mclc̀HǸ3bFob(O|w 'td'?s:=_:="""qK;nO{'WNwX=Kpc˹IZ51 mx{i;E}X&v-¬= bl;e澟Gqa NަhSVÜY㙷4(Y6dfMp:Z`5~sUL A-_}w56_9W+߽ġL*>NtǨg_s1iׇBgx_+v޵7]OCVrbD]BDDD)|f,zEDDD '2z ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDB5kFwW@e>CSdfg{g]AC%aq0F5e;p1Zy4ε>Ohpǜc'lYSyyr,}6wE9H%1̜\oaTEiu۬n-_BфTBU{X/gx(\5 vYUU4ҏDUHe~fN,8sVUy,<~Nz8#"""m{ApNu[pMGF 9u@MזQoa'jQ!FBtn7i"l&t{%Y[r` lyu4Q~ <[7)2vR M_bCAg f[IdbR2Iژll\9ʫo.ou3}L2[J\'T6U 4gAzN`zn0ܓHc]H328lRчNN.5"ذ:ݒg-!DBSs{&-&7s+[SMjXpHgB V~iعϚs`2ؓYjkM:w˅# ;'Tw%pd4 ii8,x5P8уҀ32*<r1ʩۼr_1&7cX-Iz愈t. =fGI'imY,s_ORN'7 2zb.[Wn{nN2V0p 3rap"a#ؽ?<xn1}7̍Np /}d:vrrx;o(A3$md,]BOboL yu L_<|X6{vpT0gx-ͥ'&-*JVͩ;|5| ӛ(_PWfN@-aӾTraU x9P3Ojhq1pOQV<+T˩YfZyDDDDYQ,ݕy8YIǔ IKr޻ݡb]nu-="q'J6a`&"""yEczDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDBȃbI_c\,#ptwDDDDAFwW@$%cxNuwuDDDDI]z+7Pd[^wwc`!8mC)Lƞ^ϫ̛J[p~e!r+C%m+槥,fc&Of\Od疽]oGG4Q=|0%7u<EYAwq3T{ܜpDc,LM;^]6F0*IG712n =""""]CO>5[XE{偯o{na؝oTTi{cq3sR ȟyu}73'YF/_B+˙""""{ =͜'QK(sH4EW dv\E-|]fT&ff1^W! ̛Mڪr6m9I.1v5DA*`Rwr_\s}A# Zr49|{ &"""ҥ{[f;oo,*b XǰY1L M3&1%SS*acuicr9> g l)acM3F8=@?;xgIɹ>>q;_Y%Ksl 5竏m/GۈX(H/czw>eu(_{>L2°pM6[KB[G{D 75lj=[[#vwGLnw=Jr2m?ٞcmD""""6A_7(Ǿ[LJDy.x30xdymtf'2<\?Yߙrv2cы4Qq5\y^diI8}[WNJ{9>LLH-=""""]K3*|5ko7 [Zz ^)b\l[뼒PKg9: a9Y=ݰ>_7i9)̹8F0 Od`a@V-=""""{;c~ce'!fflF"OA_9̜Q3f3=nw>>Ez4<39%L3!,ғp0.+AfosH[ra[4q]%;Qr%%Y Kڋ.8TJ~晷Y~|"@z6jtB'KI+Cr6?#>=4p[Is$HVH1嗺"qɒϓ>ި3~z^|>c65+`z?Gcj""""ҥ#PK<#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDL,.Rp/ql>RyWÓ/żZZY)y7mx^ze2&vCDDDDD3t-cL&3 05ѻB#jjnt9?T&pUTu3v [M ;^!"""""ҍsI˵I;Zx/Yw.juoafdFi6ް)`Aـ9wtFșFf^j*Wrst=fSW'2o8e\JvvͰۋr3 odۙ'Ȏr%`b^{uzIxqɧhL+֝lH'{qRfpd`SghKcr͹;fOnj$H̛y{XHϝʜD,!c2uz܋ǖObG㋁oyAc`i7u2Ze?#wq툯CEM~22?M=Q;tb! »Y݄ݓςK5nM̄[npnm"""""CzzfS}:n>Z%e8 1qQޟ/ m4f?@܎Q|X)`\D6b1Pl{s1"M| 5!"""""1嗺"`Uz """m{L<zDDDDD$)􈈈H\S#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\S#"""""qMGDDDDDB5k ="""""zDDDDD$)􈈈H\Sftw>ZO0=iH ͔/d+iel~P _ZBb^-?-S\S3b۔q_wOa~M֮M8'[FHΖσ,($r\_S??ҌclDQAp‚Om;~*Y6Do|*ݿ}m֭jǂgYFeF|؉nj Lz֖DDDDD>'ⴥ =C>+gd4pxrQ:ÕD"=kfb IދdJzz2pÀ*|bF`2w]ծ u38 ̲T7orv2cы4Qq ܊Г矧ʇU>UTsvHS%^0&ۓOјQGLfsego5Hk\dH 4pz75h{ۭ5rZ[Y12y:1P;a Mfӎc~>p3!H7l>\Gf\V"~&"""":Χ(#\Y-,:Gf+UcV SBYf]84-l>SYX%'h6տbәcmύ'@oajBw*bo}5&bͪj*g2?2@lB֯Ō+qh Jy1:LV^Jm$wwYn;At3xT['I$2A(Hh$Xv.%U-㓣ɻ幃H1+7:'W4̞M]4X ők'x^ 5>xj)A'젱fXMDYSd+HT~yŝmdϣOmdKm;4?#/;~^9{&~5g(I$3=ͮ_v%&ws?7T3'7ENv5LWt';xK!pr??"éNP3{E@o61!}׶}II`Znزcp^>I)|{h{ Uא$IFQ=Wo/9˩yl)?OΙ<1NN⤸衇ӗXX%E$ų(#d? $lBO &p_bӂz^X3 |#{ntK˟}7c0z\45$IfTGO}2x_[E&n"no.K z+j.y%mI@jfjA)3 nZ^U3-Ck7J,+Ͻ}5$IQjTGO,oC*\= R8iK1gzլ+$p,GmRLOYD@*X_7)tyzrʦ#HTXX^H"欮cU2f!I$nzyۘZˆm8?ʶ}UJ% |:A"ldLg}A]/zim69s:֬}zoŔLO];Ī6=gߎKl&?G5re"jd9p5Α?5kH$Iۏ~{Nt!BqOyfr+G2Ԕdu|}'e1$ww~}~$IҨ4:go}΃kܯ-g- g鸘$qH!˞ZGm4mgr^M|`p(ډ'سX9+=U eWLlxf)E'Dla떷HdQn=ue]nl>Xxj^?m`~rJkxb6զ> EU~m;XWOM[ORUKInM{WI$IFgͅ]PIEi*YlZL̯aFb8~Ywٞ,"FsupV`[KѷySY%,GcOR'Z߳Y4<.h&<jo7T>hE8B$IFod.`Ѽ"gSd6_:@zb!cgÿw_+԰Gez %?=rx[8h4 ԅ;;4ͷh4 pŵ7 ɏ01ɟZ_̳/rx[+'?l[A$I Q=լ|'w~oMAExnN ϔuG @O9ѯ#@On>I?b* q?K(.bߖE$IF叓AEe,_xKEЕ s+Lȍf_OLR L?n s "vRdS25"?4"zrL$ibAu3oO<6۽(29WKMڮޤI̝{(I$ߨ.~Ğ> 0GW=vďp"'8ʦfRLOR-dZoxãGgE<8uPD]b7ΑJ\jN%+R<9W־'8x XH y)yd5ON헫 ZuK3u<9)]N}$/k$IҨ\Jr旳l6W{ ^^}k~NnkE#4n{D?|l֬z'6U1`IK8{h~ʵt6H:E^+Y%kyfI@W{ˢ+{';2+X:~\yNn7;ñ)\5f p5}O^o`9d I$hy!uGz$I#ft.o$IdH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IBmF8</|.7GYx '~e%_-8mˣ?w$I1 K͗[e y<4=w̘j{>XI$I#lFm2,1uSZ}NTN!vmG4!t`DF+I$i$[ADf6s\0 \n#$'|u9őV-*/؟|qv V- t9=g/Y}ųYT=fvr>NŚ,f ]&>DK?ԇ)WFaNF:WceLɉBM{^{Q$IjTF|P$bwɚ܂ 7:I[PQ9((Ęt3> RM!ξ{RR?[s+^uCoQrBn\!6+VNq~)Map%>>df~N"Oy>K[C6gɀɅtY#u}gy3/T2q M>w#hNn`:M{"MM:9$I$}76zH'Hd9ӳ䵎W}p'аKo|n4u;C׿f )sw_}SSx嬪Mms#mL\$IQzTo빾+q&0w4֡z]jl 3OEQnrüv)KVh@@\"=-aNC+65ˉk7;[q_}$IuFoOGsc*) tcs2͌lc&?©tWB'躵ts$:IZ7|(YqNOrVH}ͣ؂`RƑ?l>M$Im@8L!?zh놢(7f`+d'#rG7ɻ6BZW'NXT.'i|MZ'{{#?$I#=ys/S{-/c`ؾr~j/j1:'g~k(" +k|iU}mP%I"̫0jGY\4qTp=xz[OpI\(SgˋL$iLzvWZp*%%TEl"ʶ)W;~XWeG"%It=-bNN8VS=*8b5퀙w *J${SyÕ?aqMY: = Q01 ]{pxfKxj6\:GŕJ:d"Lܸ}Is~ m3/Of3=$I#,om9:؟\V,eN^Gɒ̉:Z9؜aڊ:NH$~gyHɩ$p*]F*N% 7lGDspu}' +lm=Q>'{wD ֯XSp%{][8t/kl7ck-pu,/1m<.ɫ9#I4~4g<҃F,arʊ\N9>hYJyGz$I#;o3yH`l$ImᏓJfH$u2z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IP3z$I$#I$)ԌI$IfH$I 5G$IR=$IB$IjF$IPp6IENDB`buildbot-3.4.0/master/docs/tutorial/docker.rst000066400000000000000000000122761413250514000214140ustar00rootroot00000000000000.. _first-run-docker-label: ============================== First Buildbot run with Docker ============================== .. note:: Docker can be tricky to get working correctly if you haven't used it before. If you're having trouble, first determine whether it is a Buildbot issue or a Docker issue by running: .. code-block:: bash docker run ubuntu:20.04 apt-get update If that fails, look for help with your Docker install. On the other hand, if that succeeds, then you may have better luck getting help from members of the Buildbot community. Docker_ is a tool that makes building and deploying custom environments a breeze. It uses lightweight linux containers (LXC) and performs quickly, making it a great instrument for the testing community. The next section includes a Docker pre-flight check. If it takes more that 3 minutes to get the 'Success' message for you, try the Buildbot pip-based :ref:`first run ` instead. .. _Docker: https://www.docker.com Current Docker dependencies --------------------------- * Linux system, with at least kernel 3.8 and AUFS support. For example, Standard Ubuntu, Debian and Arch systems. * Packages: lxc, iptables, ca-certificates, and bzip2 packages. * Local clock on time or slightly in the future for proper SSL communication. * This tutorial uses docker-compose to run a master, a worker, and a postgresql database server Installation ------------ * Use the `Docker installation instructions `_ for your operating system. * Make sure you install docker-compose. As root or inside a virtualenv, run: .. code-block:: bash pip install docker-compose * Test docker is happy in your environment: .. code-block:: bash sudo docker run -i busybox /bin/echo Success Building and running Buildbot ----------------------------- .. code-block:: bash # clone the example repository git clone --depth 1 https://github.com/buildbot/buildbot-docker-example-config # Build the Buildbot container (it will take a few minutes to download packages) cd buildbot-docker-example-config/simple docker-compose up You should now be able to go to http://localhost:8010 and see a web page similar to: .. image:: _images/index.png :alt: index page Click on "Builds" at the left to open the submenu and then `Builders `_ to see that the worker you just started has connected to the master: .. image:: _images/builders.png :alt: builder runtests is active. Overview of the docker-compose configuration -------------------------------------------- This docker-compose configuration is made as a basis for what you would put in production - Separated containers for each component - A solid database backend with postgresql - A buildbot master that exposes its configuration to the docker host - A buildbot worker that can be cloned in order to add additional power - Containers are linked together so that the only port exposed to external is the web server - The default master container is based on Alpine linux for minimal footprint - The default worker container is based on more widely known Ubuntu distribution, as this is the container you want to customize. - Download the config from a tarball accessible via a web server Playing with your Buildbot containers ------------------------------------- If you've come this far, you have a Buildbot environment that you can freely experiment with. In order to modify the configuration, you need to fork the project on github https://github.com/buildbot/buildbot-docker-example-config Then you can clone your own fork, and start the docker-compose again. To modify your config, edit the master.cfg file, commit your changes, and push to your fork. You can use the command buildbot check-config in order to make sure the config is valid before the push. You will need to change ``docker-compose.yml`` the variable ``BUILDBOT_CONFIG_URL`` in order to point to your github fork. The ``BUILDBOT_CONFIG_URL`` may point to a ``.tar.gz`` file accessible from HTTP. Several git servers like github can generate that tarball automatically from the master branch of a git repository If the ``BUILDBOT_CONFIG_URL`` does not end with ``.tar.gz``, it is considered to be the URL to a ``master.cfg`` file accessible from HTTP. Customize your Worker container ------------------------------- It is advised to customize you worker container in order to suit your project's build dependencies and need. An example DockerFile is available which the buildbot community uses for its own CI purposes: https://github.com/buildbot/metabbotcfg/blob/nine/docker/metaworker/Dockerfile Multi-master ------------ A multi-master environment can be setup using the ``multimaster/docker-compose.yml`` file in the example repository .. code-block:: bash # Build the Buildbot container (it will take a few minutes to download packages) cd buildbot-docker-example-config/simple docker-compose up -d docker-compose scale buildbot=4 Going forward ------------- You've got a taste now, but you're probably curious for more. Let's step it up a little in the second tutorial by changing the configuration and doing an actual build. Continue on to :ref:`quick-tour-label`. buildbot-3.4.0/master/docs/tutorial/firstrun.rst000066400000000000000000000212511413250514000220120ustar00rootroot00000000000000.. _first-run-label: ========= First Run ========= Goal ---- This tutorial will take you from zero to running your first buildbot master and worker as quickly as possible, without changing the default configuration. This tutorial is all about instant gratification and the five minute experience: in five minutes we want to convince you that this project works, and that you should seriously consider spending time learning the system. In this tutorial no configuration or code changes are done. This tutorial assumes that you are running Unix, but might be adaptable to Windows. Thanks to virtualenv_, installing buildbot in a standalone environment is very easy. For those more familiar with Docker_, there also exists a :ref:`docker version of these instructions `. You should be able to cut and paste each shell block from this tutorial directly into a terminal. Simple introduction to BuildBot ------------------------------- Before trying to run BuildBot it's helpful to know what BuildBot is. BuildBot is a continuous integration framework written in Python. It consists of a master daemon and potentially many worker daemons that usually run on other machines. The master daemon runs a web server that allows the end user to start new builds and to control the behaviour of the BuildBot instance. The master also distributes builds to the workers. The worker daemons connect to the master daemon and execute builds whenever master tells them to do so. In this tutorial we will run a single master and a single worker on the same machine. A more thorough explanation can be found in the :ref:`manual section ` of the Buildbot documentation. .. _Docker: https://docker.com .. _getting-code-label: Getting ready ------------- There are many ways to get the code on your machine. We will use the easiest one: via ``pip`` in a virtualenv_. It has the advantage of not polluting your operating system, as everything will be contained in the virtualenv. To make this work, you will need the following installed: * Python_ and the development packages for it * virtualenv_ .. _Python: https://www.python.org/ .. _virtualenv: https://pypi.python.org/pypi/virtualenv Preferably, use your distribution package manager to install these. You will also need a working Internet connection, as virtualenv and pip will need to download other projects from the Internet. The master and builder daemons will need to be able to connect to ``github.com`` via HTTPS to fetch the repo we're testing; if you need to use a proxy for this ensure that either the ``HTTPS_PROXY`` or ``ALL_PROXY`` environment variable is set to your proxy, e.g., by executing ``export HTTPS_PROXY=http://localhost:9080`` in the shell before starting each daemon. .. note:: Buildbot does not require root access. Run the commands in this tutorial as a normal, unprivileged user. Creating a master ----------------- The first necessary step is to create a virtualenv for our master. We will also use a separate directory to demonstrate the distinction between a master and worker: On Python 2: .. code-block:: bash mkdir -p ~/buildbot-test/master cd ~/buildbot-test/master On Python 3: .. code-block:: bash python3 -m venv sandbox source sandbox/bin/activate Next, we need to install several build dependencies to make sure we can install buildbot and its supporting packages. These build dependencies are: * GCC build tools (``gcc`` for RHEL/CentOS/Fedora based distributions, or ``build-essential`` for Ubuntu/Debian based distributions). * Python development library (``python3-devel`` for RHEL/CentOS/Fedora based distributions, or ``python3-dev`` for Ubuntu/Debian based distributions). * OpenSSL development library (``openssl-devel`` for RHEL/CentOS/Fedora based distributions, or ``libssl-dev`` for Ubuntu/Debian based distributions). * `libffi` development library (``libffi-devel`` for RHEL/CentOS/Fedora based distributions, or ``libffi-dev`` for Ubuntu/Debian based distributions). Install these build dependencies: .. code-block:: bash # if in Ubuntu/Debian based distributions: sudo apt-get install build-essential python3-dev libssl-dev libffi-dev # if in RHEL/CentOS/Fedora based distributions: sudo yum install gcc python3-devel openssl-devel libffi-devel or refer to your distribution's documentation on how to install these packages. Now that we are ready, we need to install buildbot: .. code-block:: bash pip install --upgrade pip pip install 'buildbot[bundle]' Now that buildbot is installed, it's time to create the master: .. code-block:: bash buildbot create-master master Buildbot's activity is controlled by a configuration file. Buildbot by default uses configuration from file at ``master.cfg``. Buildbot comes with a sample configuration file named ``master.cfg.sample``. We will use the sample configuration file unchanged: .. code-block:: bash mv master/master.cfg.sample master/master.cfg Finally, start the master: .. code-block:: bash buildbot start master You will now see some log information from the master in this terminal. It should end with lines like these: .. code-block:: none 2014-11-01 15:52:55+0100 [-] BuildMaster is running The buildmaster appears to have (re)started correctly. From now on, feel free to visit the web status page running on the port 8010: http://localhost:8010/ Our master now needs (at least) a worker to execute its commands. For that, head on to the next section! Creating a worker ----------------- The worker will be executing the commands sent by the master. In this tutorial, we are using the buildbot/hello-world project as an example. As a consequence of this, your worker will need access to the git_ command in order to checkout some code. Be sure that it is installed, or the builds will fail. Same as we did for our master, we will create a virtualenv for our worker next to the other one. It would however be completely ok to do this on another computer - as long as the *worker* computer is able to connect to the *master* one: .. code-block:: bash mkdir -p ~/buildbot-test/worker cd ~/buildbot-test/worker On Python 2: .. code-block:: bash virtualenv sandbox source sandbox/bin/activate On Python 3: .. code-block:: bash python3 -m venv sandbox source sandbox/bin/activate Install the ``buildbot-worker`` command: .. code-block:: bash pip install --upgrade pip pip install buildbot-worker # required for `runtests` build pip install setuptools-trial Now, create the worker: .. code-block:: bash buildbot-worker create-worker worker localhost example-worker pass .. note:: If you decided to create this from another computer, you should replace ``localhost`` with the name of the computer where your master is running. The username (``example-worker``), and password (``pass``) should be the same as those in :file:`master/master.cfg`; verify this is the case by looking at the section for ``c['workers']``: .. code-block:: bash cat ../bb-master/master/master.cfg And finally, start the worker: .. code-block:: bash buildbot-worker start worker Check the worker's output. It should end with lines like these: .. code-block:: none 2014-11-01 15:56:51+0100 [-] Connecting to localhost:9989 2014-11-01 15:56:51+0100 [Broker,client] message from master: attached The worker appears to have (re)started correctly. Meanwhile, from the other terminal, in the master log (:file:`twisted.log` in the master directory), you should see lines like these: .. code-block:: none 2014-11-01 15:56:51+0100 [Broker,1,127.0.0.1] worker 'example-worker' attaching from IPv4Address(TCP, '127.0.0.1', 54015) 2014-11-01 15:56:51+0100 [Broker,1,127.0.0.1] Got workerinfo from 'example-worker' 2014-11-01 15:56:51+0100 [-] bot attached You should now be able to go to http://localhost:8010, where you will see a web page similar to: .. image:: _images/index.png :alt: index page Click on "Builds" at the left to open the submenu and then `Builders `_ to see that the worker you just started (identified by the green bubble) has connected to the master: .. image:: _images/builders.png :alt: builder runtests is active. Your master is now quietly waiting for new commits to hello-world. This doesn't happen very often though. In the next section, we'll see how to manually start a build. We just wanted to get you to dip your toes in the water. It's easy to take your first steps, but this is about as far as we can go without touching the configuration. You've got a taste now, but you're probably curious for more. Let's step it up a little in the second tutorial by changing the configuration and doing an actual build. Continue on to :ref:`quick-tour-label`. .. _git: https://git-scm.com/ buildbot-3.4.0/master/docs/tutorial/fiveminutes.rst000066400000000000000000000440151413250514000224770ustar00rootroot00000000000000.. _fiveminutes: =================================================== Buildbot in 5 minutes - a user-contributed tutorial =================================================== (Ok, maybe 10.) Buildbot is really an excellent piece of software, however it can be a bit confusing for a newcomer (like me when I first started looking at it). Typically, at first sight, it looks like a bunch of complicated concepts that make no sense and whose relationships with each other are unclear. After some time and some reread, it all slowly starts to be more and more meaningful, until you finally say "oh!" and things start to make sense. Once you get there, you realize that the documentation is great, but only if you already know what it's about. This is what happened to me, at least. Here, I'm going to (try to) explain things in a way that would have helped me more as a newcomer. The approach I'm taking is more or less the reverse of that used by the documentation. That is, I'm going to start from the components that do the actual work (the builders) and go up the chain to the change sources. I hope purists will forgive this unorthodoxy. Here I'm trying to only clarify the concepts and not go into the details of each object or property; the documentation explains those quite well. Installation ------------ I won't cover the installation; both Buildbot master and worker are available as packages for the major distributions, and in any case the instructions in the official documentation are fine. This document will refer to Buildbot 0.8.5 which was current at the time of writing, but hopefully the concepts are not too different in future versions. All the code shown is of course python code, and has to be included in the master.cfg configuration file. We won't cover basic things such as how to define the workers, project names, or other administrative information that is contained in that file; for that, again the official documentation is fine. Builders: the workhorses ------------------------ Since Buildbot is a tool whose goal is the automation of software builds, it makes sense to me to start from where we tell Buildbot how to build our software: the `builder` (or builders, since there can be more than one). Simply put, a builder is an element that is in charge of performing some action or sequence of actions, normally something related to building software (for example, checking out the source, or ``make all``), but it can also run arbitrary commands. A builder is configured with a list of workers that it can use to carry out its task. The other fundamental piece of information that a builder needs is, of course, the list of things it has to do (which will normally run on the chosen worker). In Buildbot, this list of things is represented as a ``BuildFactory`` object, which is essentially a sequence of steps, each one defining a certain operation or command. Enough talk, let's see an example. For this example, we are going to assume that our super software project can be built using a simple ``make all``, and there is another target ``make packages`` that creates rpm, deb and tgz packages of the binaries. In the real world things are usually more complex (for example there may be a ``configure`` step, or multiple targets), but the concepts are the same; it will just be a matter of adding more steps to a builder, or creating multiple builders, although sometimes the resulting builders can be quite complex. So to perform a manual build of our project, we would type the following on the command line (assuming we are at the root of the local copy of the repository): .. code-block:: bash $ make clean # clean remnants of previous builds ... $ svn update ... $ make all ... $ make packages ... # optional but included in the example: copy packages to some central machine $ scp packages/*.rpm packages/*.deb packages/*.tgz someuser@somehost:/repository ... Here we're assuming the repository is SVN, but again the concepts are the same with git, mercurial or any other VCS. Now, to automate this, we create a builder where each step is one of the commands we typed above. A step can be a shell command object, or a dedicated object that checks out the source code (there are various types for different repositories, see the docs for more info), or yet something else:: from buildbot.plugins import steps, util # first, let's create the individual step objects # step 1: make clean; this fails if the worker has no local copy, but # is harmless and will only happen the first time makeclean = steps.ShellCommand(name="make clean", command=["make", "clean"], description="make clean") # step 2: svn update (here updates trunk, see the docs for more # on how to update a branch, or make it more generic). checkout = steps.SVN(baseURL='svn://myrepo/projects/coolproject/trunk', mode="update", username="foo", password="bar", haltOnFailure=True) # step 3: make all makeall = steps.ShellCommand(name="make all", command=["make", "all"], haltOnFailure=True, description="make all") # step 4: make packages makepackages = steps.ShellCommand(name="make packages", command=["make", "packages"], haltOnFailure=True, description="make packages") # step 5: upload packages to central server. This needs passwordless ssh # from the worker to the server (set it up in advance as part of the worker setup) uploadpackages = steps.ShellCommand( name="upload packages", description="upload packages", command="scp packages/*.rpm packages/*.deb packages/*.tgz someuser@somehost:/repository", haltOnFailure=True) # create the build factory and add the steps to it f_simplebuild = util.BuildFactory() f_simplebuild.addStep(makeclean) f_simplebuild.addStep(checkout) f_simplebuild.addStep(makeall) f_simplebuild.addStep(makepackages) f_simplebuild.addStep(uploadpackages) # finally, declare the list of builders. In this case, we only have one builder c['builders'] = [ util.BuilderConfig(name="simplebuild", workernames=['worker1', 'worker2', 'worker3'], factory=f_simplebuild) ] So our builder is called ``simplebuild`` and can run on either of ``worker1``, ``worker2`` or ``worker3``. If our repository has other branches besides trunk, we could create another one or more builders to build them; in this example, only the checkout step would be different, in that it would need to check out the specific branch. Depending on how exactly those branches have to be built, the shell commands may be recycled, or new ones would have to be created if they are different in the branch. You get the idea. The important thing is that all the builders be named differently and all be added to the ``c['builders']`` value (as can be seen above, it is a list of ``BuilderConfig`` objects). Of course the type and number of steps will vary depending on the goal; for example, to just check that a commit doesn't break the build, we could include just up to the ``make all`` step. Or we could have a builder that performs a more thorough test by also doing ``make test`` or other targets. You get the idea. Note that at each step except the very first we use ``haltOnFailure=True`` because it would not make sense to execute a step if the previous one failed (ok, it wouldn't be needed for the last step, but it's harmless and protects us if one day we add another step after it). Schedulers ---------- Now this is all nice and dandy, but who tells the builder (or builders) to run, and when? This is the job of the `scheduler` which is a fancy name for an element that waits for some event to happen, and when it does, based on that information, decides whether and when to run a builder (and which one or ones). There can be more than one scheduler. I'm being purposely vague here because the possibilities are almost endless and highly dependent on the actual setup, build purposes, source repository layout and other elements. So a scheduler needs to be configured with two main pieces of information: on one hand, which events to react to, and on the other hand, which builder or builders to trigger when those events are detected. (It's more complex than that, but if you understand this, you can get the rest of the details from the docs). A simple type of scheduler may be a periodic scheduler that runs a certain builder (or builders) when a configurable amount of time has passed. In our example, that's how we would trigger a build every hour:: from buildbot.plugins import schedulers # define the periodic scheduler hourlyscheduler = schedulers.Periodic(name="hourly", builderNames=["simplebuild"], periodicBuildTimer=3600) # define the available schedulers c['schedulers'] = [hourlyscheduler] That's it. Every hour this ``hourly`` scheduler will run the ``simplebuild`` builder. If we have more than one builder that we want to run every hour, we can just add them to the ``builderNames`` list when defining the scheduler. Or since multiple schedulers are allowed, other schedulers can be defined and added to ``c['schedulers']`` in the same way. Other types of schedulers exist; in particular, there are schedulers that can be more dynamic than the periodic one. The typical dynamic scheduler is one that learns about changes in a source repository (generally because some developer checks in some change) and triggers one or more builders in response to those changes. Let's assume for now that the scheduler "magically" learns about changes in the repository (more about this later); here's how we would define it:: from buildbot.plugins import schedulers # define the dynamic scheduler trunkchanged = schedulers.SingleBranchScheduler(name="trunkchanged", change_filter=util.ChangeFilter(branch=None), treeStableTimer=300, builderNames=["simplebuild"]) # define the available schedulers c['schedulers'] = [trunkchanged] This scheduler receives changes happening to the repository, and among all of them, pays attention to those happening in "trunk" (that's what ``branch=None`` means). In other words, it filters the changes to react only to those it's interested in. When such changes are detected, and the tree has been quiet for 5 minutes (300 seconds), it runs the ``simplebuild`` builder. The ``treeStableTimer`` helps in those situations where commits tend to happen in bursts, which would otherwise result in multiple build requests queuing up. What if we want to act on two branches (say, trunk and 7.2)? First, we create two builders, one for each branch, and then we create two dynamic schedulers:: from buildbot.plugins import schedulers # define the dynamic scheduler for trunk trunkchanged = schedulers.SingleBranchScheduler(name="trunkchanged", change_filter=util.ChangeFilter(branch=None), treeStableTimer=300, builderNames=["simplebuild-trunk"]) # define the dynamic scheduler for the 7.2 branch branch72changed = schedulers.SingleBranchScheduler( name="branch72changed", change_filter=util.ChangeFilter(branch='branches/7.2'), treeStableTimer=300, builderNames=["simplebuild-72"]) # define the available schedulers c['schedulers'] = [trunkchanged, branch72changed] The syntax of the change filter is VCS-dependent (above is for SVN), but again, once the idea is clear, the documentation has all the details. Another feature of the scheduler is that it can be told which changes, within those it's paying attention to, are important and which are not. For example, there may be a documentation directory in the branch the scheduler is watching, but changes under that directory should not trigger a build of the binary. This finer filtering is implemented by means of the ``fileIsImportant`` argument to the scheduler (full details in the docs and - alas - in the sources). Change sources -------------- Earlier, we said that a dynamic scheduler "magically" learns about changes; the final piece of the puzzle is `change sources`, which are precisely the elements in Buildbot whose task is to detect changes in a repository and communicate them to the schedulers. Note that periodic schedulers don't need a change source since they only depend on elapsed time; dynamic schedulers, on the other hand, do need a change source. A change source is generally configured with information about a source repository (which is where changes happen). A change source can watch changes at different levels in the hierarchy of the repository, so for example, it is possible to watch the whole repository or a subset of it, or just a single branch. This determines the extent of the information that is passed down to the schedulers. There are many ways a change source can learn about changes; it can periodically poll the repository for changes, or the VCS can be configured (for example through hook scripts triggered by commits) to push changes into the change source. While these two methods are probably the most common, they are not the only possibilities. It is possible, for example, to have a change source detect changes by parsing an email sent to a mailing list when a commit happens. Yet other methods exist and the manual again has the details. To complete our example, here's a change source that polls a SVN repository every 2 minutes:: from buildbot.plugins import changes, util svnpoller = changes.SVNPoller(repourl="svn://myrepo/projects/coolproject", svnuser="foo", svnpasswd="bar", pollinterval=120, split_file=util.svn.split_file_branches) c['change_source'] = svnpoller This poller watches the whole "coolproject" section of the repository, so it will detect changes in all the branches. We could have said:: repourl = "svn://myrepo/projects/coolproject/trunk" or:: repourl = "svn://myrepo/projects/coolproject/branches/7.2" to watch only a specific branch. To watch another project, you need to create another change source, and you need to filter changes by project. For instance, when you add a change source watching project 'superproject' to the above example, you need to change the original scheduler from:: trunkchanged = schedulers.SingleBranchScheduler( name="trunkchanged", change_filter=filter.ChangeFilter(branch=None), # ... ) to e.g.:: trunkchanged = schedulers.SingleBranchScheduler( name="trunkchanged", change_filter=filter.ChangeFilter(project="coolproject", branch=None), # ... ) otherwise, coolproject will be built when there's a change in superproject. Since we're watching more than one branch, we need a method to tell in which branch the change occurred when we detect one. This is what the ``split_file`` argument does, it takes a callable that Buildbot will call to do the job. The split_file_branches function, which comes with Buildbot, is designed for exactly this purpose so that's what the example above uses. And of course this is all SVN-specific, but there are pollers for all the popular VCSs. Note that if you have many projects, branches, and builders, it probably pays not to hardcode all the schedulers and builders in the configuration, but generate them dynamically starting from the list of all projects, branches, targets, etc, and using loops to generate all possible combinations (or only the needed ones, depending on the specific setup), as explained in the documentation chapter about :doc:`../manual/customization`. Reporters --------- Now that the basics are in place, let's go back to the builders, which is where the real work happens. `Reporters` are simply the means Buildbot uses to inform the world about what's happening, that is, how builders are doing. There are many reporters: a mail notifier, an IRC notifier, and others. They are described fairly well in the manual. One thing I've found useful is the ability to pass a domain name as the lookup argument to a ``mailNotifier``, which allows you to take an unqualified username as it appears in the SVN change and create a valid email address by appending the given domain name to it:: from buildbot.plugins import reporter # if jsmith commits a change, an email for the build is sent to jsmith@example.org notifier = reporter.MailNotifier(fromaddr="buildbot@example.org", sendToInterestedUsers=True, lookup="example.org") c['reporters'].append(notifier) The mail notifier can be customized at will by means of the ``messageFormatter`` argument, which is a class that Buildbot calls to format the body of the email, and to which it makes available lots of information about the build. For more details, look into the :ref:`Reporters` section of the Buildbot manual. Conclusion ---------- Please note that this article has just scratched the surface; given the complexity of the task of build automation, the possibilities are almost endless. So there's much much more to say about Buildbot. Hopefully this has been a gentle introduction before reading the official manual. Had I found an explanation as the one above when I was approaching Buildbot, I'd have had to read the manual just once, rather than multiple times. I hope this can help someone else. (Thanks to Davide Brini for permission to include this tutorial, derived from one he originally posted at http://backreference.org .) buildbot-3.4.0/master/docs/tutorial/further.rst000066400000000000000000000002341413250514000216130ustar00rootroot00000000000000Further Reading =============== See the following user-contributed tutorials for other highlights and ideas: .. toctree:: :maxdepth: 2 fiveminutes buildbot-3.4.0/master/docs/tutorial/index.rst000066400000000000000000000001671413250514000212500ustar00rootroot00000000000000Buildbot Tutorial ================= Contents: .. toctree:: :maxdepth: 2 firstrun tour docker further buildbot-3.4.0/master/docs/tutorial/tour.rst000066400000000000000000000272211413250514000211320ustar00rootroot00000000000000.. _quick-tour-label: ============ A Quick Tour ============ Goal ---- This tutorial will expand on the :ref:`first-run-label` tutorial by taking a quick tour around some of the features of buildbot that are hinted at in the comments in the sample configuration. We will simply change parts of the default configuration and explain the activated features. As a part of this tutorial, we will make buildbot do a few actual builds. This section will teach you how to: - make simple configuration changes and activate them - deal with configuration errors - force builds - enable and control the IRC bot - add a 'try' scheduler The First Build --------------- On the `Builders `_ page, click on the runtests link. You'll see a builder page, and a blue "force" button that will bring up the following dialog box: .. image:: _images/force-build.png :alt: force a build. Click *Start Build* - there's no need to fill in any of the fields in this case. Next, click on `view in waterfall `_. You will now see that a successful test run has happened: .. image:: _images/runtests-success.png :alt: an successful test run happened. This simple process is essentially the whole purpose of the Buildbot project. The information about what actions are executed for a certain build are defined in things called :ref:`builders `. The information about when a certain builder should launch a build are defined in things called :ref:`schedulers `. In fact, the blue "force" button that was pushed in this example activated a scheduler too. Setting Project Name and URL ---------------------------- Let's start simple by looking at where you would customize the buildbot's project name and URL. We continue where we left off in the :ref:`first-run-label` tutorial. Open a new terminal, go to the directory you created master in, activate the same virtualenv instance you created before, and open the master configuration file with an editor (here ``$EDITOR`` is your editor of choice like vim, gedit, or emacs): .. code-block:: bash cd ~/buildbot-test/master source sandbox/bin/activate $EDITOR master/master.cfg Now, look for the section marked *PROJECT IDENTITY* which reads:: ####### PROJECT IDENTITY # the 'title' string will appear at the top of this buildbot installation's # home pages (linked to the 'titleURL'). c['title'] = "Hello World CI" c['titleURL'] = "https://buildbot.github.io/hello-world/" If you want, you can change either of these links to anything you want so that you can see what happens when you change them. After making a change, go to the terminal and type: .. code-block:: bash buildbot reconfig master You will see a handful of lines of output from the master log, much like this: .. code-block:: none 2011-12-04 10:11:09-0600 [-] loading configuration from /path/to/buildbot/master.cfg 2011-12-04 10:11:09-0600 [-] configuration update started 2011-12-04 10:11:09-0600 [-] builder runtests is unchanged 2011-12-04 10:11:09-0600 [-] removing IStatusReceiver <...> 2011-12-04 10:11:09-0600 [-] (TCP Port 8010 Closed) 2011-12-04 10:11:09-0600 [-] Stopping factory <...> 2011-12-04 10:11:09-0600 [-] adding IStatusReceiver <...> 2011-12-04 10:11:09-0600 [-] RotateLogSite starting on 8010 2011-12-04 10:11:09-0600 [-] Starting factory <...> 2011-12-04 10:11:09-0600 [-] Setting up http.log rotating 10 files of 10000000 bytes each 2011-12-04 10:11:09-0600 [-] WebStatus using (/path/to/buildbot/public_html) 2011-12-04 10:11:09-0600 [-] removing 0 old schedulers, updating 0, and adding 0 2011-12-04 10:11:09-0600 [-] adding 1 new changesources, removing 1 2011-12-04 10:11:09-0600 [-] gitpoller: using workdir '/path/to/buildbot/gitpoller-workdir' 2011-12-04 10:11:09-0600 [-] GitPoller repository already exists 2011-12-04 10:11:09-0600 [-] configuration update complete Reconfiguration appears to have completed successfully. The important lines are the ones telling you that the new configuration is being loaded (at the top) and that the update is complete (at the bottom). Now, if you go back to `the waterfall page `_, you will see that the project's name is whatever you may have changed it to, and when you click on the URL of the project name at the bottom of the page, it should take you to the link you put in the configuration. Configuration Errors -------------------- It is very common to make a mistake when configuring buildbot, so you might as well see now what happens in that case and what you can do to fix the error. Open up the config again and introduce a syntax error by removing the first single quote in the two lines you changed before, so they read: .. Format a `none` since this is not a valid Python code .. code-block:: none c[title'] = "Hello World CI" c[titleURL'] = "https://buildbot.github.io/hello-world/" This creates a Python ``SyntaxError``. Now go ahead and reconfig the master: .. code-block:: bash buildbot reconfig master This time, the output looks like: .. code-block:: none 2015-08-14 18:40:46+0000 [-] beginning configuration update 2015-08-14 18:40:46+0000 [-] Loading configuration from '/data/buildbot/master/master.cfg' 2015-08-14 18:40:46+0000 [-] error while parsing config file: Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/buildbot/master.py", line 265, in reconfig d = self.doReconfig() File "/usr/local/lib/python2.7/dist-packages/twisted/internet/defer.py", line 1274, in unwindGenerator return _inlineCallbacks(None, gen, Deferred()) File "/usr/local/lib/python2.7/dist-packages/twisted/internet/defer.py", line 1128, in _inlineCallbacks result = g.send(result) File "/usr/local/lib/python2.7/dist-packages/buildbot/master.py", line 289, in doReconfig self.configFileName) --- --- File "/usr/local/lib/python2.7/dist-packages/buildbot/config.py", line 156, in loadConfig exec f in localDict exceptions.SyntaxError: EOL while scanning string literal (master.cfg, line 103) 2015-08-14 18:40:46+0000 [-] error while parsing config file: EOL while scanning string literal (master.cfg, line 103) (traceback in logfile) 2015-08-14 18:40:46+0000 [-] reconfig aborted without making any changes Reconfiguration failed. Please inspect the master.cfg file for errors, correct them, then try 'buildbot reconfig' again. This time, it's clear that there was a mistake in the configuration. Luckily, the Buildbot master will ignore the wrong configuration and keep running with the previous configuration. The message is clear enough, so open the configuration again, fix the error, and reconfig the master. Enabling the IRC Bot -------------------- Buildbot includes an IRC bot that you can tell to join a channel to control and report on the status of buildbot. .. note:: Security Note Please note that any user having access to your IRC channel, or can send a private message to the bot, will be able to create or stop builds :bug:`3377`. First, start an IRC client of your choice, connect to irc.freenode.net and join an empty channel. In this example we will use ``#buildbot-test``, so go join that channel. (*Note: please do not join the main buildbot channel!*) Edit :file:`master.cfg` and look for the *BUILDBOT SERVICES* section. At the end of that section add the lines:: c['services'].append(reporters.IRC(host="irc.freenode.net", nick="bbtest", channels=["#buildbot-test"])) The reconfigure the master and type: .. code-block:: bash grep -i irc master/twistd.log The log output should contain a line like this: .. code-block:: none 2016-11-13 15:53:06+0100 [-] Starting factory <...> 2016-11-13 15:53:19+0100 [IrcStatusBot,client] <...>: I have joined #buildbot-test You should see the bot now joining in your IRC client. In your IRC channel, type: .. code-block:: none bbtest: commands to get a list of the commands the bot supports. Let's tell the bot to notify on certain events. To learn on which EVENTS we can notify, type: .. code-block:: none bbtest: help notify Now, let's set some event notifications: .. code-block:: irc <@lsblakk> bbtest: notify on started finished failure < bbtest> The following events are being notified: ['started', 'failure', 'finished'] Now, go back to the web interface and force another build. Alternatively, ask the bot to force a build: .. code-block:: irc <@lsblakk> bbtest: force build --codebase= runtests < bbtest> build #1 of runtests started < bbtest> Hey! build runtests #1 is complete: Success [finished] You can also see the new builds in the web interface. .. image:: _images/irc-testrun.png :alt: a successful test run from IRC happened. The full documentation is available at :bb:reporter:`IRC`. Setting Authorized Web Users ---------------------------- The default configuration allows everyone to perform any task, like creating or stopping builds via the web interface. To restrict this to a user, look for:: c['www'] = dict(port=8010, plugins=dict(waterfall_view={}, console_view={})) and append:: c['www']['authz'] = util.Authz( allowRules = [ util.AnyEndpointMatcher(role="admins") ], roleMatchers = [ util.RolesFromUsername(roles=['admins'], usernames=['Alice']) ] ) c['www']['auth'] = util.UserPasswordAuth([('Alice','Password1')]) For more details, see :ref:`Web-Authentication`. Adding a 'try' scheduler ------------------------ Buildbot includes a way for developers to submit patches for testing without committing them to the source code control system. (This is really handy for projects that support several operating systems or architectures.) To set this up, add the following lines to master.cfg:: from buildbot.scheduler import Try_Userpass c['schedulers'] = [] c['schedulers'].append(Try_Userpass( name='try', builderNames=['runtests'], port=5555, userpass=[('sampleuser','samplepass')])) Then you can submit changes using the :bb:cmdline:`try` command. Let's try this out by making a one-line change to hello-world, say, to make it trace the tree by default: .. code-block:: bash git clone https://github.com/buildbot/hello-world.git hello-world-git cd hello-world-git/hello $EDITOR __init__.py # change 'return "hello " + who' on line 6 to 'return "greets " + who' Then run buildbot's ``try`` command as follows: .. code-block:: bash cd ~/buildbot-test/master source sandbox/bin/activate buildbot try --connect=pb --master=127.0.0.1:5555 \ --username=sampleuser --passwd=samplepass --vc=git This will do ``git diff`` for you and send the resulting patch to the server for build and test against the latest sources from Git. Now go back to the `waterfall `_ page, click on the runtests link, and scroll down. You should see that another build has been started with your change (and stdout for the tests should be chock-full of parse trees as a result). The "Reason" for the job will be listed as "'try' job", and the blamelist will be empty. To make yourself show up as the author of the change, use the ``--who=emailaddr`` option on ``buildbot try`` to pass your email address. To make a description of the change show up, use the ``--properties=comment="this is a comment"`` option on ``buildbot try``. To use ssh instead of a private username/password database, see :bb:sched:`Try_Jobdir`. buildbot-3.4.0/master/setup.cfg000066400000000000000000000000431413250514000164260ustar00rootroot00000000000000[aliases] test = trial -m buildbot buildbot-3.4.0/master/setup.py000077500000000000000000000544541413250514000163410ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Standard setup script. """ from setuptools import setup # isort:skip import glob import inspect import os import pkg_resources import sys from distutils.command.install_data import install_data from distutils.command.sdist import sdist from pkg_resources import parse_version from buildbot import version BUILDING_WHEEL = bool("bdist_wheel" in sys.argv) def include(d, e): """Generate a pair of (directory, file-list) for installation. 'd' -- A directory 'e' -- A glob pattern""" return (d, [f for f in glob.glob('{}/{}'.format(d, e)) if os.path.isfile(f)]) def include_statics(d): r = [] for root, ds, fs in os.walk(d): r.append((root, [os.path.join(root, f) for f in fs])) return r class install_data_twisted(install_data): """make sure data files are installed in package. this is evil. copied from Twisted/setup.py. """ def finalize_options(self): self.set_undefined_options('install', ('install_lib', 'install_dir'), ) super().finalize_options() def run(self): super().run() # ensure there's a buildbot/VERSION file fn = os.path.join(self.install_dir, 'buildbot', 'VERSION') open(fn, 'w').write(version) self.outfiles.append(fn) class our_sdist(sdist): def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) # ensure there's a buildbot/VERSION file fn = os.path.join(base_dir, 'buildbot', 'VERSION') open(fn, 'w').write(version) # ensure that NEWS has a copy of the latest release notes, with the # proper version substituted src_fn = os.path.join('docs', 'relnotes/index.rst') with open(src_fn) as f: src = f.read() src = src.replace('|version|', version) dst_fn = os.path.join(base_dir, 'NEWS') with open(dst_fn, 'w') as f: f.write(src) def define_plugin_entry(name, module_name): """ helper to produce lines suitable for setup.py's entry_points """ if isinstance(name, tuple): entry, name = name else: entry = name return '{} = {}:{}'.format(entry, module_name, name) def concat_dicts(*dicts): result = dict() for d in dicts: result.update(d) return result def define_plugin_entries(groups): """ helper to all groups for plugins """ result = dict() for group, modules in groups: tempo = [] for module_name, names in modules: tempo.extend([define_plugin_entry(name, module_name) for name in names]) result[group] = tempo return result __file__ = inspect.getframeinfo(inspect.currentframe()).filename with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as long_d_f: long_description = long_d_f.read() setup_args = { 'name': "buildbot", 'version': version, 'description': "The Continuous Integration Framework", 'long_description': long_description, 'author': "Brian Warner", 'author_email': "warner-buildbot@lothar.com", 'maintainer': "Dustin J. Mitchell", 'maintainer_email': "dustin@v.igoro.us", 'url': "http://buildbot.net/", 'classifiers': [ 'Development Status :: 5 - Production/Stable', 'Environment :: No Input/Output (Daemon)', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Testing', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], 'packages': [ "buildbot", "buildbot.configurators", "buildbot.worker", "buildbot.worker.protocols", "buildbot.changes", "buildbot.clients", "buildbot.data", "buildbot.db", "buildbot.db.migrations.versions", "buildbot.db.types", "buildbot.machine", "buildbot.monkeypatches", "buildbot.mq", "buildbot.plugins", "buildbot.process", "buildbot.process.users", "buildbot.reporters", "buildbot.reporters.generators", "buildbot.schedulers", "buildbot.scripts", "buildbot.secrets", "buildbot.secrets.providers", "buildbot.statistics", "buildbot.statistics.storage_backends", "buildbot.steps", "buildbot.steps.package", "buildbot.steps.package.deb", "buildbot.steps.package.rpm", "buildbot.steps.source", "buildbot.util", "buildbot.wamp", "buildbot.www", "buildbot.www.hooks", "buildbot.www.authz", ] + ([] if BUILDING_WHEEL else [ # skip tests for wheels (save 50% of the archive) "buildbot.test", "buildbot.test.util", "buildbot.test.fake", "buildbot.test.fakedb", "buildbot.test.fuzz", "buildbot.test.integration", "buildbot.test.integration.interop", "buildbot.test.regressions", "buildbot.test.unit", ]), 'data_files': [ include("buildbot/reporters/templates", "*.txt"), ("buildbot/db/migrations", [ "buildbot/db/migrations/alembic.ini", ]), include("buildbot/db/migrations/versions", "*.py"), ("buildbot/scripts", [ "buildbot/scripts/sample.cfg", "buildbot/scripts/buildbot_tac.tmpl", ]), include("buildbot/spec", "*.raml"), include("buildbot/spec/types", "*.raml"), include("buildbot/test/unit/test_templates_dir", "*.html"), include("buildbot/test/unit/test_templates_dir/plugin", "*.*"), include("buildbot/test/integration/pki", "*.*"), include("buildbot/test/integration/pki/ca", "*.*"), ] + include_statics("buildbot/www/static"), 'cmdclass': {'install_data': install_data_twisted, 'sdist': our_sdist}, 'entry_points': concat_dicts(define_plugin_entries([ ('buildbot.changes', [ ('buildbot.changes.mail', [ 'MaildirSource', 'CVSMaildirSource', 'SVNCommitEmailMaildirSource', 'BzrLaunchpadEmailMaildirSource']), ('buildbot.changes.bitbucket', ['BitbucketPullrequestPoller']), ('buildbot.changes.github', ['GitHubPullrequestPoller']), ('buildbot.changes.gerritchangesource', [ 'GerritChangeSource', 'GerritEventLogPoller']), ('buildbot.changes.gitpoller', ['GitPoller']), ('buildbot.changes.hgpoller', ['HgPoller']), ('buildbot.changes.p4poller', ['P4Source']), ('buildbot.changes.pb', ['PBChangeSource']), ('buildbot.changes.svnpoller', ['SVNPoller']) ]), ('buildbot.schedulers', [ ('buildbot.schedulers.basic', [ 'SingleBranchScheduler', 'AnyBranchScheduler']), ('buildbot.schedulers.dependent', ['Dependent']), ('buildbot.schedulers.triggerable', ['Triggerable']), ('buildbot.schedulers.forcesched', ['ForceScheduler']), ('buildbot.schedulers.timed', [ 'Periodic', 'Nightly', 'NightlyTriggerable']), ('buildbot.schedulers.trysched', [ 'Try_Jobdir', 'Try_Userpass']) ]), ('buildbot.secrets', [ ('buildbot.secrets.providers.file', ['SecretInAFile']), ('buildbot.secrets.providers.passwordstore', ['SecretInPass']), ('buildbot.secrets.providers.vault', ['HashiCorpVaultSecretProvider']), ('buildbot.secrets.providers.vault_hvac', [ 'HashiCorpVaultKvSecretProvider', 'VaultAuthenticatorToken', 'VaultAuthenticatorApprole']) ]), ('buildbot.worker', [ ('buildbot.worker.base', ['Worker']), ('buildbot.worker.ec2', ['EC2LatentWorker']), ('buildbot.worker.libvirt', ['LibVirtWorker']), ('buildbot.worker.openstack', ['OpenStackLatentWorker']), ('buildbot.worker.docker', ['DockerLatentWorker']), ('buildbot.worker.kubernetes', ['KubeLatentWorker']), ('buildbot.worker.local', ['LocalWorker']), ]), ('buildbot.machine', [ ('buildbot.machine.base', ['Machine']), ]), ('buildbot.steps', [ ('buildbot.process.buildstep', ['BuildStep']), ('buildbot.steps.cmake', ['CMake']), ('buildbot.steps.cppcheck', ['Cppcheck']), ('buildbot.steps.gitdiffinfo', ['GitDiffInfo']), ('buildbot.steps.http', [ 'HTTPStep', 'POST', 'GET', 'PUT', 'DELETE', 'HEAD', 'OPTIONS']), ('buildbot.steps.master', [ 'MasterShellCommand', 'SetProperty', 'SetProperties', 'LogRenderable', "Assert"]), ('buildbot.steps.maxq', ['MaxQ']), ('buildbot.steps.mswin', ['Robocopy']), ('buildbot.steps.package.deb.lintian', ['DebLintian']), ('buildbot.steps.package.deb.pbuilder', [ 'DebPbuilder', 'DebCowbuilder', 'UbuPbuilder', 'UbuCowbuilder']), ('buildbot.steps.package.rpm.mock', [ 'Mock', 'MockBuildSRPM', 'MockRebuild']), ('buildbot.steps.package.rpm.rpmbuild', ['RpmBuild']), ('buildbot.steps.package.rpm.rpmlint', ['RpmLint']), ('buildbot.steps.python', [ 'BuildEPYDoc', 'PyFlakes', 'PyLint', 'Sphinx']), ('buildbot.steps.python_twisted', [ 'HLint', 'Trial', 'RemovePYCs']), ('buildbot.steps.shell', [ 'ShellCommand', 'TreeSize', 'SetPropertyFromCommand', 'Configure', 'WarningCountingShellCommand', 'Compile', 'Test', 'PerlModuleTest']), ('buildbot.steps.shellsequence', ['ShellSequence']), ('buildbot.steps.source.bzr', ['Bzr']), ('buildbot.steps.source.cvs', ['CVS']), ('buildbot.steps.source.darcs', ['Darcs']), ('buildbot.steps.source.gerrit', ['Gerrit']), ('buildbot.steps.source.git', ['Git', 'GitCommit', 'GitPush', 'GitTag']), ('buildbot.steps.source.github', ['GitHub']), ('buildbot.steps.source.gitlab', ['GitLab']), ('buildbot.steps.source.mercurial', ['Mercurial']), ('buildbot.steps.source.mtn', ['Monotone']), ('buildbot.steps.source.p4', ['P4']), ('buildbot.steps.source.repo', ['Repo']), ('buildbot.steps.source.svn', ['SVN']), ('buildbot.steps.subunit', ['SubunitShellCommand']), ('buildbot.steps.transfer', [ 'FileUpload', 'DirectoryUpload', 'MultipleFileUpload', 'FileDownload', 'StringDownload', 'JSONStringDownload', 'JSONPropertiesDownload']), ('buildbot.steps.trigger', ['Trigger']), ('buildbot.steps.vstudio', [ 'VC6', 'VC7', 'VS2003', 'VC8', 'VS2005', 'VCExpress9', 'VC9', 'VS2008', 'VC10', 'VS2010', 'VC11', 'VS2012', 'VC12', 'VS2013', 'VC14', 'VS2015', 'VC141', 'VS2017', 'MsBuild4', 'MsBuild', 'MsBuild12', 'MsBuild14', 'MsBuild141']), ('buildbot.steps.worker', [ 'SetPropertiesFromEnv', 'FileExists', 'CopyDirectory', 'RemoveDirectory', 'MakeDirectory']), ]), ('buildbot.reporters', [ ('buildbot.reporters.generators.build', [ 'BuildStatusGenerator', 'BuildStartEndStatusGenerator' ]), ('buildbot.reporters.generators.buildrequest', [ 'BuildRequestGenerator' ]), ('buildbot.reporters.generators.buildset', ['BuildSetStatusGenerator']), ('buildbot.reporters.generators.worker', ['WorkerMissingGenerator']), ('buildbot.reporters.mail', ['MailNotifier']), ('buildbot.reporters.pushjet', ['PushjetNotifier']), ('buildbot.reporters.pushover', ['PushoverNotifier']), ('buildbot.reporters.message', [ 'MessageFormatter', 'MessageFormatterEmpty', 'MessageFormatterFunction', 'MessageFormatterMissingWorker', 'MessageFormatterRenderable', ]), ('buildbot.reporters.gerrit', ['GerritStatusPush']), ('buildbot.reporters.gerrit_verify_status', ['GerritVerifyStatusPush']), ('buildbot.reporters.http', ['HttpStatusPush']), ('buildbot.reporters.github', ['GitHubStatusPush', 'GitHubCommentPush']), ('buildbot.reporters.gitlab', ['GitLabStatusPush']), ('buildbot.reporters.bitbucketserver', [ 'BitbucketServerStatusPush', 'BitbucketServerCoreAPIStatusPush', 'BitbucketServerPRCommentPush' ]), ('buildbot.reporters.bitbucket', ['BitbucketStatusPush']), ('buildbot.reporters.irc', ['IRC']), ('buildbot.reporters.telegram', ['TelegramBot']), ('buildbot.reporters.zulip', ['ZulipStatusPush']), ]), ('buildbot.util', [ # Connection seems to be a way too generic name, though ('buildbot.worker.libvirt', ['Connection']), ('buildbot.changes.filter', ['ChangeFilter']), ('buildbot.changes.gerritchangesource', ['GerritChangeFilter']), ('buildbot.changes.svnpoller', [ ('svn.split_file_projects_branches', 'split_file_projects_branches'), ('svn.split_file_branches', 'split_file_branches'), ('svn.split_file_alwaystrunk', 'split_file_alwaystrunk')]), ('buildbot.configurators.janitor', ['JanitorConfigurator']), ('buildbot.config', ['BuilderConfig']), ('buildbot.locks', [ 'MasterLock', 'WorkerLock', ]), ('buildbot.manhole', [ 'AuthorizedKeysManhole', 'PasswordManhole', 'TelnetManhole']), ('buildbot.process.builder', [ 'enforceChosenWorker', ]), ('buildbot.process.factory', [ 'BuildFactory', 'GNUAutoconf', 'CPAN', 'Distutils', 'Trial', 'BasicBuildFactory', 'QuickBuildFactory', 'BasicSVN']), ('buildbot.process.logobserver', ['LogLineObserver']), ('buildbot.process.properties', [ 'FlattenList', 'Interpolate', 'Property', 'Transform', 'WithProperties', 'renderer', 'Secret']), ('buildbot.process.users.manual', [ 'CommandlineUserManager']), ('buildbot.revlinks', ['RevlinkMatch']), ('buildbot.reporters.utils', ['URLForBuild']), ('buildbot.schedulers.canceller', ['OldBuildCanceller']), ('buildbot.schedulers.canceller_buildset', ['FailingBuildsetCanceller']), ('buildbot.schedulers.forcesched', [ 'AnyPropertyParameter', 'BooleanParameter', 'ChoiceStringParameter', 'CodebaseParameter', 'FileParameter', 'FixedParameter', 'InheritBuildParameter', 'IntParameter', 'NestedParameter', 'ParameterGroup', 'PatchParameter', 'StringParameter', 'TextParameter', 'UserNameParameter', 'WorkerChoiceParameter', ]), ('buildbot.process.results', [ 'Results', 'SUCCESS', 'WARNINGS', 'FAILURE', 'SKIPPED', 'EXCEPTION', 'RETRY', 'CANCELLED']), ('buildbot.steps.source.repo', [ ('repo.DownloadsFromChangeSource', 'RepoDownloadsFromChangeSource'), ('repo.DownloadsFromProperties', 'RepoDownloadsFromProperties')]), ('buildbot.steps.shellsequence', ['ShellArg']), ('buildbot.util.kubeclientservice', [ 'KubeHardcodedConfig', 'KubeCtlProxyConfigLoader', 'KubeInClusterConfigLoader' ]), ('buildbot.util.ssfilter', ['SourceStampFilter']), ('buildbot.www.avatar', ['AvatarGravatar', 'AvatarGitHub']), ('buildbot.www.auth', [ 'UserPasswordAuth', 'HTPasswdAuth', 'RemoteUserAuth', 'CustomAuth']), ('buildbot.www.ldapuserinfo', ['LdapUserInfo']), ('buildbot.www.oauth2', [ 'GoogleAuth', 'GitHubAuth', 'GitLabAuth', 'BitbucketAuth']), ('buildbot.db.dbconfig', [ 'DbConfig']), ('buildbot.www.authz', [ 'Authz', 'fnmatchStrMatcher', 'reStrMatcher']), ('buildbot.www.authz.roles', [ 'RolesFromEmails', 'RolesFromGroups', 'RolesFromOwner', 'RolesFromUsername', 'RolesFromDomain']), ('buildbot.www.authz.endpointmatchers', [ 'AnyEndpointMatcher', 'StopBuildEndpointMatcher', 'ForceBuildEndpointMatcher', 'RebuildBuildEndpointMatcher', 'AnyControlEndpointMatcher', 'EnableSchedulerEndpointMatcher' ]), ]), ('buildbot.webhooks', [ ('buildbot.www.hooks.base', ['base']), ('buildbot.www.hooks.bitbucket', ['bitbucket']), ('buildbot.www.hooks.github', ['github']), ('buildbot.www.hooks.gitlab', ['gitlab']), ('buildbot.www.hooks.gitorious', ['gitorious']), ('buildbot.www.hooks.poller', ['poller']), ('buildbot.www.hooks.bitbucketcloud', ['bitbucketcloud']), ('buildbot.www.hooks.bitbucketserver', ['bitbucketserver']) ]) ]), { 'console_scripts': [ 'buildbot=buildbot.scripts.runner:run', ]} ) } # set zip_safe to false to force Windows installs to always unpack eggs # into directories, which seems to work better -- # see http://buildbot.net/trac/ticket/907 if sys.platform == "win32": setup_args['zip_safe'] = False setup_args['entry_points']['console_scripts'].append( 'buildbot_windows_service=buildbot.scripts.windows_service:HandleCommandLine' ) py_36 = sys.version_info[0] > 3 or ( sys.version_info[0] == 3 and sys.version_info[1] >= 6) if not py_36: raise RuntimeError("Buildbot master requires at least Python-3.6") # pip<1.4 doesn't have the --pre flag, and will thus attempt to install alpha # and beta versions of Buildbot. Prevent that from happening. VERSION_MSG = """ This is a pre-release version of Buildbot, which can only be installed with pip-1.4 or later Try installing the latest stable version of Buildbot instead: pip install buildbot==0.8.12 See https://pypi.python.org/pypi/buildbot to verify the current stable version. """ if 'a' in version or 'b' in version: try: pip_dist = pkg_resources.get_distribution('pip') except pkg_resources.DistributionNotFound: pip_dist = None if pip_dist: if parse_version(pip_dist.version) < parse_version('1.4'): raise RuntimeError(VERSION_MSG) twisted_ver = ">= 17.9.0" autobahn_ver = ">= 0.16.0" txaio_ver = ">= 2.2.2" bundle_version = version.split("-")[0] # dependencies setup_args['install_requires'] = [ 'setuptools >= 8.0', 'Twisted ' + twisted_ver, 'Jinja2 >= 2.1', # required for tests, but Twisted requires this anyway 'zope.interface >= 4.1.1', 'sqlalchemy >= 1.3.0, < 1.5', 'alembic >= 1.6.0', 'python-dateutil>=1.5', 'txaio ' + txaio_ver, 'autobahn ' + autobahn_ver, 'PyJWT', 'pyyaml' ] # buildbot_windows_service needs pywin32 if sys.platform == "win32": setup_args['install_requires'].append('pywin32') # Unit test dependencies. test_deps = [ # http client libraries 'treq', 'txrequests', # pypugjs required for custom templates tests 'pypugjs', # boto3 and moto required for running EC2 tests 'boto3', 'moto', 'mock>=2.0.0', 'parameterized', ] if sys.platform != 'win32': test_deps += [ # LZ4 fails to build on Windows: # https://github.com/steeve/python-lz4/issues/27 # lz4 required for log compression tests. 'lz4', ] setup_args['tests_require'] = test_deps setup_args['extras_require'] = { 'test': [ 'setuptools_trial', 'isort', # spellcheck introduced in version 1.4.0 'pylint<1.7.0', 'pyenchant', 'flake8~=3.9.2', ] + test_deps, 'bundle': [ "buildbot-www=={0}".format(bundle_version), "buildbot-worker=={0}".format(bundle_version), "buildbot-waterfall-view=={0}".format(bundle_version), "buildbot-console-view=={0}".format(bundle_version), "buildbot-grid-view=={0}".format(bundle_version), ], 'tls': [ 'Twisted[tls] ' + twisted_ver, # There are bugs with extras inside extras: # # so we explicitly include Twisted[tls] dependencies. 'pyopenssl >= 16.0.0', 'service_identity', 'idna >= 0.6', ], 'docs': [ 'docutils>=0.16.0', 'sphinx>=3.2.0', 'sphinx-rtd-theme>=0.5', 'sphinxcontrib-spelling', 'sphinxcontrib-websupport', 'pyenchant', 'sphinx-jinja', 'towncrier', ], } if '--help-commands' in sys.argv or 'trial' in sys.argv or 'test' in sys.argv: setup_args['setup_requires'] = [ 'setuptools_trial', ] if os.getenv('NO_INSTALL_REQS'): setup_args['install_requires'] = None setup_args['extras_require'] = None if __name__ == '__main__': setup(**setup_args) # Local Variables: # fill-column: 71 # End: buildbot-3.4.0/master/tox.ini000066400000000000000000000004711413250514000161250ustar00rootroot00000000000000# Tox (http://tox.testrun.org/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. [tox] envlist = py26, py27 [testenv] commands = python setup.py test buildbot-3.4.0/newsfragments/000077500000000000000000000000001413250514000162005ustar00rootroot00000000000000buildbot-3.4.0/newsfragments/README.txt000066400000000000000000000016041413250514000176770ustar00rootroot00000000000000This is the directory for news fragments used by towncrier: https://github.com/hawkowl/towncrier You create a news fragment in this directory when you make a change, and the file gets removed from this directory when the news is published. towncrier has a few standard types of news fragments, signified by the file extension. These are: .feature: Signifying a new feature. .bugfix: Signifying a bug fix. .doc: Signifying a documentation improvement. .removal: Signifying a deprecation or removal of public API. The core of the filename can be the fixed issue number of any unique text relative to your work. Buildbot project does not require a tracking ticket to be made for each contribution even if this is appreciated. Please point to the trac bug using syntax: (:bug:`NNN`) Please point to the github bug using syntax: (:issue:`NNN`) please point to classes using syntax: `HttpStatusPush`. buildbot-3.4.0/pkg/000077500000000000000000000000001413250514000140765ustar00rootroot00000000000000buildbot-3.4.0/pkg/README000066400000000000000000000024141413250514000147570ustar00rootroot00000000000000 Buildbot: build/test automation http://buildbot.net Brian Warner Dustin J. Mitchell Buildbot is a continuous integration system designed to automate the build/test cycle. By automatically rebuilding and testing the tree each time something has changed, build problems are pinpointed quickly, before other developers are inconvenienced by the failure. Features * Buildbot is easy to set up, but very extensible and customizable. It supports arbitrary build processes, and is not limited to common build processes for particular languages (e.g., autotools or ant) * Buildbot supports building and testing on a variety of platforms. Developers, who do not have the facilities to test their changes everywhere before committing, will know shortly afterwards whether they have broken the build or not. * Buildbot has minimal requirements for workers: using virtualenv, only a Python installation is required. * Workers can be run behind a NAT firewall and communicate with the master * Buildbot has a variety of status-reporting tools to get information about builds in front of developers in a timely manner. Buildbot-pkg:: This package contains utilities and common code for building and testing www plugins. buildbot-3.4.0/pkg/buildbot_pkg.py000066400000000000000000000205341413250514000171210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Method to add build step taken from here # https://seasonofcode.com/posts/how-to-add-custom-build-steps-and-commands-to-setuppy.html import datetime import os import re import subprocess import sys from pkg_resources import parse_version from subprocess import PIPE from subprocess import STDOUT from subprocess import Popen import setuptools.command.build_py import setuptools.command.egg_info from setuptools import setup import distutils.cmd # isort:skip old_listdir = os.listdir def listdir(path): # patch listdir to avoid looking into node_modules l = old_listdir(path) if "node_modules" in l: l.remove("node_modules") return l os.listdir = listdir def check_output(cmd, shell): """Version of check_output which does not throw error""" popen = subprocess.Popen(cmd, shell=shell, stdout=subprocess.PIPE) out = popen.communicate()[0].strip() if not isinstance(out, str): out = out.decode(sys.stdout.encoding) return out def gitDescribeToPep440(version): # git describe produce version in the form: v0.9.8-20-gf0f45ca # where 20 is the number of commit since last release, and gf0f45ca is the short commit id preceded by 'g' # we parse this a transform into a pep440 release version 0.9.9.dev20 (increment last digit and add dev before 20) VERSION_MATCH = re.compile(r'(?P\d+)\.(?P\d+)\.(?P\d+)(\.post(?P\d+))?(-(?P\d+))?(-g(?P.+))?') v = VERSION_MATCH.search(version) if v: major = int(v.group('major')) minor = int(v.group('minor')) patch = int(v.group('patch')) if v.group('dev'): patch += 1 dev = int(v.group('dev')) return "{}.{}.{}-dev{}".format(major, minor, patch, dev) if v.group('post'): return "{}.{}.{}.post{}".format(major, minor, patch, v.group('post')) return "{}.{}.{}".format(major, minor, patch) return v def mTimeVersion(init_file): cwd = os.path.dirname(os.path.abspath(init_file)) m = 0 for root, dirs, files in os.walk(cwd): for f in files: m = max(os.path.getmtime(os.path.join(root, f)), m) d = datetime.datetime.utcfromtimestamp(m) return d.strftime("%Y.%m.%d") def getVersionFromArchiveId(git_archive_id='1634372192 (HEAD -> master, tag: v3.4.0)'): """ Extract the tag if a source is from git archive. When source is exported via `git archive`, the git_archive_id init value is modified and placeholders are expanded to the "archived" revision: %ct: committer date, UNIX timestamp %d: ref names, like the --decorate option of git-log See man gitattributes(5) and git-log(1) (PRETTY FORMATS) for more details. """ # mangle the magic string to make sure it is not replaced by git archive if not git_archive_id.startswith('$For''mat:'): # source was modified by git archive, try to parse the version from # the value of git_archive_id match = re.search(r'tag:\s*v([^,)]+)', git_archive_id) if match: # archived revision is tagged, use the tag return gitDescribeToPep440(match.group(1)) # archived revision is not tagged, use the commit date tstamp = git_archive_id.strip().split()[0] d = datetime.datetime.utcfromtimestamp(int(tstamp)) return d.strftime('%Y.%m.%d') return None def getVersion(init_file): """ Return BUILDBOT_VERSION environment variable, content of VERSION file, git tag or 'latest' """ try: return os.environ['BUILDBOT_VERSION'] except KeyError: pass try: cwd = os.path.dirname(os.path.abspath(init_file)) fn = os.path.join(cwd, 'VERSION') with open(fn) as f: return f.read().strip() except IOError: pass version = getVersionFromArchiveId() if version is not None: return version try: p = Popen(['git', 'describe', '--tags', '--always'], stdout=PIPE, stderr=STDOUT, cwd=cwd) out = p.communicate()[0] if (not p.returncode) and out: v = gitDescribeToPep440(str(out)) if v: return v except OSError: pass try: # if we really can't find the version, we use the date of modification of the most recent file # docker hub builds cannot use git describe return mTimeVersion(init_file) except Exception: # bummer. lets report something return "latest" # JS build strategy: # # Obviously, building javascript with setuptools is not really something supported initially # # The goal of this hack are: # - override the distutils command to insert our js build # - has very small setup.py # # from buildbot_pkg import setup_www # # setup_www( # ... # packages=["buildbot_myplugin"] # ) # # We need to override the first command done, so that source tree is populated very soon, # as well as version is found from git tree or "VERSION" file # # This supports following setup.py commands: # # - develop, via egg_info # - install, via egg_info # - sdist, via egg_info # - bdist_wheel, via build # This is why we override both egg_info and build, and the first run build # the js. class BuildJsCommand(distutils.cmd.Command): """A custom command to run JS build.""" description = 'run JS build' already_run = False def initialize_options(self): """Set default values for options.""" def finalize_options(self): """Post-process options.""" def run(self): """Run command.""" if self.already_run: return package = self.distribution.packages[0] if os.path.exists("webpack.config.js"): shell = bool(os.name == 'nt') yarn_program = None for program in ["yarnpkg", "yarn"]: try: yarn_version = check_output([program, "--version"], shell=shell) if yarn_version != "": yarn_program = program break except subprocess.CalledProcessError: pass assert yarn_program is not None, "need nodejs and yarn installed in current PATH" yarn_bin = check_output([yarn_program, "bin"], shell=shell).strip() commands = [ [yarn_program, 'install', '--pure-lockfile'], [yarn_program, 'run', 'build'], ] for command in commands: self.announce('Running command: {}'.format(str(" ".join(command))), level=distutils.log.INFO) subprocess.check_call(command, shell=shell) self.copy_tree(os.path.join(package, 'static'), os.path.join( "build", "lib", package, "static")) with open(os.path.join("build", "lib", package, "VERSION"), "w") as f: f.write(self.distribution.metadata.version) with open(os.path.join(package, "VERSION"), "w") as f: f.write(self.distribution.metadata.version) self.already_run = True class BuildPyCommand(setuptools.command.build_py.build_py): """Custom build command.""" def run(self): self.run_command('build_js') super().run() class EggInfoCommand(setuptools.command.egg_info.egg_info): """Custom egginfo command.""" def run(self): self.run_command('build_js') super().run() def setup_www_plugin(**kw): package = kw['packages'][0] if 'version' not in kw: kw['version'] = getVersion(os.path.join(package, "__init__.py")) setup(cmdclass=dict( egg_info=EggInfoCommand, build_py=BuildPyCommand, build_js=BuildJsCommand), **kw) buildbot-3.4.0/pkg/setup.cfg000066400000000000000000000000001413250514000157050ustar00rootroot00000000000000buildbot-3.4.0/pkg/setup.py000077500000000000000000000022521413250514000156140ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from setuptools import setup import buildbot_pkg setup( name='buildbot-pkg', version=buildbot_pkg.getVersion("."), description='Buildbot packaging tools', author=u'Pierre Tardy', author_email=u'tardyp@gmail.com', url='http://buildbot.net/', py_modules=['buildbot_pkg'], install_requires=[ "setuptools >= 21.2.1", ], classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)' ], ) buildbot-3.4.0/pkg/test_buildbot_pkg.py000066400000000000000000000066021413250514000201600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil import sys from subprocess import call from subprocess import check_call from textwrap import dedent from twisted.trial import unittest class BuildbotWWWPkg(unittest.TestCase): pkgName = "buildbot_www" pkgPaths = ["www", "base"] epName = "base" loadTestScript = dedent(""" import pkg_resources import re apps = {} for ep in pkg_resources.iter_entry_points('buildbot.www'): apps[ep.name] = ep.load() print(apps["%(epName)s"]) assert("scripts.js" in apps["%(epName)s"].resource.listNames()) assert(re.match(r'\d+\.\d+\.\d+', apps["%(epName)s"].version) is not None) assert(apps["%(epName)s"].description is not None) """) @property def path(self): return os.path.abspath(os.path.join(os.path.dirname(__file__), "..", *self.pkgPaths)) def rmtree(self, d): if os.path.isdir(d): shutil.rmtree(d) def setUp(self): call("pip uninstall -y " + self.pkgName, shell=True) self.rmtree(os.path.join(self.path, "build")) self.rmtree(os.path.join(self.path, "dist")) self.rmtree(os.path.join(self.path, "static")) def run_setup(self, cmd): check_call([sys.executable, 'setup.py', cmd], cwd=self.path) def check_correct_installation(self): # assert we can import buildbot_www # and that it has an endpoint with resource containing file "script.js" check_call([ sys.executable, '-c', self.loadTestScript % dict(epName=self.epName)]) def test_install(self): self.run_setup("install") self.check_correct_installation() def test_wheel(self): self.run_setup("bdist_wheel") check_call("pip install dist/*.whl", shell=True, cwd=self.path) self.check_correct_installation() def test_develop(self): self.run_setup("develop") self.check_correct_installation() def test_develop_via_pip(self): check_call("pip install -e .", shell=True, cwd=self.path) self.check_correct_installation() def test_sdist(self): self.run_setup("sdist") check_call("pip install dist/*.tar.gz", shell=True, cwd=self.path) self.check_correct_installation() class BuildbotConsolePkg(BuildbotWWWPkg): pkgName = "buildbot-console-view" pkgPaths = ["www", "console_view"] epName = "console_view" class BuildbotWaterfallPkg(BuildbotWWWPkg): pkgName = "buildbot-waterfall-view" pkgPaths = ["www", "waterfall_view"] epName = "waterfall_view" class BuildbotCodeparameterPkg(BuildbotWWWPkg): pkgName = "buildbot-codeparameter" pkgPaths = ["www", "codeparameter"] epName = "codeparameter" buildbot-3.4.0/pyinstaller/000077500000000000000000000000001413250514000156635ustar00rootroot00000000000000buildbot-3.4.0/pyinstaller/buildbot-worker.py000066400000000000000000000002041413250514000213440ustar00rootroot00000000000000from __future__ import absolute_import from __future__ import print_function from buildbot_worker.scripts.runner import run run() buildbot-3.4.0/pyinstaller/buildbot-worker.spec000066400000000000000000000015771413250514000216640ustar00rootroot00000000000000# -*- mode: python -*- block_cipher = None a = Analysis(['buildbot-worker.py'], pathex=[], binaries=[], datas=[], hiddenimports=["buildbot_worker", "buildbot_worker.scripts.create_worker", "buildbot_worker.scripts.start", "buildbot_worker.scripts.stop", "buildbot_worker.scripts.restart", "buildbot_worker.bot"], hookspath=[], runtime_hooks=[], excludes=[], win_no_prefer_redirects=False, win_private_assemblies=False, cipher=block_cipher) pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher) exe = EXE(pyz, a.scripts, a.binaries, a.zipfiles, a.datas, name='buildbot-worker', debug=False, strip=False, upx=True, runtime_tmpdir=None, console=True ) buildbot-3.4.0/pyproject.toml000066400000000000000000000015741413250514000162400ustar00rootroot00000000000000[tool.towncrier] package = "buildbot" package_dir = "master" directory = "newsfragments" filename = "master/docs/relnotes/index.rst" template = "master/docs/relnotes.rst.jinja" title_format = "{name} ``{version}`` ( ``{project_date}`` )" [[tool.towncrier.section]] path = "" [[tool.towncrier.type]] directory = "feature" name = "Features" showcontent = true [[tool.towncrier.type]] directory = "bugfix" name = "Bug fixes" showcontent = true [[tool.towncrier.type]] directory = "doc" name = "Improved Documentation" showcontent = true [[tool.towncrier.type]] directory = "removal" name = "Deprecations and Removals" showcontent = true [[tool.towncrier.type]] directory = "misc" name = "Misc" showcontent = false buildbot-3.4.0/readthedocs.yaml000066400000000000000000000002011413250514000164570ustar00rootroot00000000000000version: 2 build: image: latest python: version: 3.6 install: - requirements: requirements-readthedocs.txt buildbot-3.4.0/requirements-ci.txt000066400000000000000000000035541413250514000172010ustar00rootroot00000000000000alabaster==0.7.12 appdirs==1.4.4 asn1crypto==1.4.0 astroid==2.4.2; attrs==21.2.0 autobahn==20.12.3; python_version < "3.7" # pyup: ignore autobahn==21.3.1; python_version >= "3.7" Automat==20.2.0 Babel==2.9.1 backports.functools-lru-cache==1.6.4 boto==2.49.0 boto3==1.18.1 botocore==1.21.1 cffi==1.14.6 click==8.0.1 configparser==5.0.2 constantly==15.1.0 cookies==2.2.1 cryptography==3.4.7 decorator==5.0.9 dicttoxml==1.7.4 docker==5.0.0 docutils==0.17.1 flake8==3.9.2 funcsigs==1.0.2 future==0.18.2 graphql-core==3.1.5 hvac==0.10.11 idna==2.10 # pyup: ignore (conflicts with moto on master) imagesize==1.2.0 incremental==17.5.0 ipaddress==1.0.23 isort==4.3.21 # pyup: ignore (until https://github.com/PyCQA/pylint/pull/3725 is merged) Jinja2==3.0.1 jmespath==0.10.0 jsonref==0.2 lazy-object-proxy==1.4.1 # pyup: ignore (required by astroid) ldap3==2.9.1 lz4==3.1.3 markdown2==2.4.0 MarkupSafe==2.0.1 mccabe==0.6.1 mock==4.0.3 moto==2.1.0 olefile==0.46 packaging==21.0 parameterized==0.8.1 pathlib2==2.3.6 pbr==5.6.0 pep8==1.7.1 Pillow==8.3.2 pyaml==20.4.0 ruamel.yaml==0.17.16 pyasn1==0.4.8 pyasn1-modules==0.2.8 pycodestyle==2.7.0 pycparser==2.20 pyenchant==3.2.1 pyflakes==2.3.1 pypugjs==5.9.9 PyJWT==2.1.0 pylint==2.6.0; pyOpenSSL==20.0.1 pyparsing==2.4.7 python-dateutil==2.8.2 python-subunit==1.4.0 pytz==2021.1 PyYAML==5.4.1 requests==2.26.0 s3transfer==0.5.0 scandir==1.10.0 service-identity==21.1.0 setuptools-trial==0.6.0 singledispatch==3.6.2 six==1.16.0 snowballstemmer==2.1.0 SQLAlchemy==1.4.22 sqlparse==0.4.2 termcolor==1.1.0 toml==0.10.2 towncrier==21.3.0 treq==21.5.0 Twisted==21.2.0 txaio==21.2.1 txrequests==0.9.6 unidiff==0.6.0 webcolors==1.11.1 Werkzeug==2.0.1 wrapt==1.12.1 xmltodict==0.12.0 zope.interface==5.4.0 coverage==5.5 codecov==2.1.11 -e master -e worker -e pkg # we install buildbot www from pypi to avoid the slow nodejs build at each test buildbot-www==3.2.0 buildbot-3.4.0/requirements-cidb.txt000066400000000000000000000000711413250514000174760ustar00rootroot00000000000000psycopg2-binary==2.9.1 mysqlclient==2.0.3 pg8000==1.16.6 buildbot-3.4.0/requirements-cidocs.txt000066400000000000000000000002321413250514000200400ustar00rootroot00000000000000Sphinx==4.1.1 sphinx-jinja==1.1.0 sphinx-rtd-theme==0.5.2 sphinxcontrib-spelling==7.2.1 sphinxcontrib-websupport==1.2.4 Pygments==2.9.0 towncrier==21.3.0 buildbot-3.4.0/requirements-ciworker.txt000066400000000000000000000007731413250514000204330ustar00rootroot00000000000000attrs==21.2.0 Automat==20.2.0 constantly==15.1.0 funcsigs==1.0.2 future==0.18.2 hyperlink==21.0.0 idna==2.10 # pyup: ignore (conflicts with moto on master) incremental==17.5.0 # pin mock, because 4.x no longer supports Python 3.5 mock==3.0.5 # pyup: ignore pbr==5.6.0 # pin PyHamcrest, because 2.x no longer supports Python 2.7 PyHamcrest==1.9.0 # pyup: ignore six==1.16.0 Twisted==21.2.0; python_version >= "3.6" Twisted==20.3.0; python_version < "3.0" # pyup: ignore zope.interface==5.4.0 -e worker buildbot-3.4.0/requirements-master-docker-extras.txt000066400000000000000000000001031413250514000226350ustar00rootroot00000000000000requests==2.26.0 psycopg2==2.9.1 txrequests==0.9.6 pycairo==1.20.1 buildbot-3.4.0/requirements-minimal.txt000066400000000000000000000001761413250514000202310ustar00rootroot00000000000000# Requirements list for minimal local development testing -e master[tls,test,docs] -e worker[test] -e pkg buildbot_www==3.2.0 buildbot-3.4.0/requirements-readthedocs.txt000066400000000000000000000001731413250514000210650ustar00rootroot00000000000000# Requirements list for building documentation on ReadTheDocs. # Install master with the docs dependencies -e master[docs]buildbot-3.4.0/smokes/000077500000000000000000000000001413250514000146165ustar00rootroot00000000000000buildbot-3.4.0/smokes/e2e/000077500000000000000000000000001413250514000152715ustar00rootroot00000000000000buildbot-3.4.0/smokes/e2e/about.scenarios.ts000066400000000000000000000011031413250514000207330ustar00rootroot00000000000000// test goal: checks the capability to navigate on about web page // to use previous and next link import { AboutPage } from './pages/about'; describe('', function() { let about = null; beforeEach(() => about = new AboutPage('runtests')); describe('check about page', () => it('should navigate to the about page, check the default elements inside', async () => { await about.goAbout(); await about.checkBuildbotTitle(); await about.checkConfigTitle(); await about.checkAPIDescriptionTitle(); }) ); }); buildbot-3.4.0/smokes/e2e/buildsnavigation.scenarios.ts000066400000000000000000000056051413250514000231760ustar00rootroot00000000000000// test goal: checks the capability to navigate in a dedicated build // to use previous and next link import { BuilderPage } from './pages/builder'; import { HomePage } from './pages/home'; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from './utils'; describe('previousnextlink', function() { let builder = null; beforeEach(function() { builder = new BuilderPage('runtests', 'force'); }); afterEach(async () => { const homePage = new HomePage(); await homePage.waitAllBuildsFinished(); }); it('has afterEach working', function() { }); it('should navigate in the builds history by using the previous next links', async () => { await builder.go(); const lastbuild = await builder.getLastFinishedBuildNumber(); // Build #1 let force = await builder.goForce(); await force.clickStartButtonAndWaitRedirectToBuild(); await builder.go(); await builder.waitBuildFinished(lastbuild + 1); // Build #2 force = await builder.goForce(); await force.clickStartButtonAndWaitRedirectToBuild(); await builder.go(); await builder.waitBuildFinished(lastbuild + 2); await builder.goBuild(+lastbuild + 2); const lastBuildURL = await browser.getCurrentUrl(); let previousButton = builder.getPreviousButton(); await bbrowser.wait(EC.elementToBeClickable(previousButton), "previous button not clickable"); await previousButton.click() expect(await browser.getCurrentUrl()).not.toMatch(lastBuildURL); let nextButton = builder.getNextButton(); await bbrowser.wait(EC.elementToBeClickable(nextButton), "next button not clickable"); await nextButton.click(); expect(await browser.getCurrentUrl()).toMatch(lastBuildURL); }); }); describe('forceandstop', function() { let builder = null; beforeEach(function() { builder = new BuilderPage('slowruntests', 'force'); }); it('should create a build with a dedicated reason and stop it during execution', async () => { let force = await builder.goForce(); await force.clickStartButtonAndWaitRedirectToBuild(); expect(await browser.getCurrentUrl()).toMatch("/builders/\[1-9]/builds/\[1-9]"); let stopButton = builder.getStopButton(); await bbrowser.wait(EC.elementToBeClickable(stopButton), "stop button not clickable"); await stopButton.click(); const buildStatusIsCancelled = async () => { let elements = await element.all(By.css('.bb-build-result.results_CANCELLED')); return (elements.length !== 0); }; await bbrowser.wait(buildStatusIsCancelled, "build could not be cancelled"); }); }); buildbot-3.4.0/smokes/e2e/dashboard.scenarios.ts000066400000000000000000000023401413250514000215540ustar00rootroot00000000000000// test goal: checks the the number of element present in home page // to test this part: two different builds need to be started import { BuilderPage } from './pages/builder'; import { DashboardPage } from './pages/dashboard'; import { HomePage } from './pages/home'; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; describe('dashboard page', function() { let builder = null; let home = null; let dashboard = null; beforeEach(async () => { builder = new BuilderPage('runtests', 'force'); dashboard = new DashboardPage(); home = new HomePage(); await builder.goBuildersList(); }); afterEach(done => browser.manage().logs().get('browser').then(function(browserLog) { // uncomment when following bug is fixed https://crbug.com/902918 // console.log(browserLog); // expect(browserLog.length).toEqual(0); return done(); }) ); it('should go to the dashboard page and see no error', async () => { let force = await builder.goForce(); await force.clickStartButtonAndWaitRedirectToBuild(); await home.waitAllBuildsFinished(); await dashboard.go(); }); }); buildbot-3.4.0/smokes/e2e/home.scenarios.ts000066400000000000000000000023011413250514000205520ustar00rootroot00000000000000// test goal: checks the the number of element present in home page // to test this part: two different builds need to be started import { BuilderPage } from './pages/builder'; import { HomePage } from './pages/home'; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; describe('home page', function() { let builder = null; let home = null; beforeEach(async () => { builder = new BuilderPage('runtests', 'force'); home = new HomePage(); await home.loginUser("my@email.com", "mypass"); }); afterEach(async () => await home.logOut()); it('should go to the home page and check if panel with builder name exists', async () => { const builderName = { "0" : "runtests" }; await builder.go(); const buildnumber = await builder.getLastFinishedBuildNumber(); let force = await builder.goForce(); await force.clickStartButtonAndWaitRedirectToBuild(); await builder.go(); await builder.waitBuildFinished(buildnumber + 1); await home.go(); const panel0 = home.getPanel().first(); expect(await panel0.getText()).toContain(builderName[0]); }); }); buildbot-3.4.0/smokes/e2e/hook.scenarios.ts000066400000000000000000000023161413250514000205700ustar00rootroot00000000000000import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { post } from 'request'; import { ConsolePage } from './pages/console'; import { BuilderPage } from './pages/builder'; import { HomePage } from './pages/home'; import { testPageUrl } from './pages/base'; describe('change hook', function() { let builder = null; beforeEach(function() { builder = new BuilderPage('runtests1', 'force'); }); afterEach(async () => { const homePage = new HomePage(); await homePage.waitAllBuildsFinished(); }); it('should create a build', async () => { await builder.go(); let lastbuild = await builder.getLastFinishedBuildNumber(); await post(`${testPageUrl}/change_hook/base`).form({ comments:'sd', project:'pyflakes', repository:'git://github.com/buildbot/hello-world.git', author:'foo ', committer:'foo ', revision: 'HEAD', branch:'master' }); await builder.waitBuildFinished(lastbuild + 1); let result = await builder.getBuildResult(lastbuild + 1); expect(result).toEqual("SUCCESS"); }); }); buildbot-3.4.0/smokes/e2e/pages/000077500000000000000000000000001413250514000163705ustar00rootroot00000000000000buildbot-3.4.0/smokes/e2e/pages/about.ts000066400000000000000000000023601413250514000200530ustar00rootroot00000000000000// this file will contains the different generic functions which // will be called by the different tests import { BasePage } from "./base"; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from '../utils'; export class AboutPage extends BasePage { constructor(builder) { super(); this.builder = builder; } async goAbout() { await bbrowser.get('#/about'); await bbrowser.wait(EC.urlContains('#/about'), "URL does not contain #/about"); } async checkBuildbotTitle() { const aboutTitle = element.all(By.css('h2')).first(); const title:string = await aboutTitle.getText(); expect(title).toContain('About this'); expect(title).toContain('buildbot'); } async checkConfigTitle() { const configurationTitle = element.all(By.css('h2')).get(1); const title:string = await configurationTitle.getText(); expect(title).toContain('Configuration'); } async checkAPIDescriptionTitle() { const dependenciesTitle = element.all(By.css('h2')).get(2); const dependenciesText:string = await dependenciesTitle.getText(); expect(dependenciesText).toContain('API description'); } } buildbot-3.4.0/smokes/e2e/pages/base.ts000066400000000000000000000017401413250514000176540ustar00rootroot00000000000000// this file will contains the different generic functions which // will be called by the different tests // inspired by this methodology // http://www.lindstromhenrik.com/using-protractor-with.jsscript/ import { bbrowser } from '../utils'; export const testPageUrl = 'http://localhost:8011' export class BasePage { // accessors for elements that all pages have (menu, login, etc) constructor() {} async logOut() { await element(By.css('.navbar-right a.dropdown-toggle')).click(); await element(By.linkText('Logout')).click(); const anonymousButton = element.all(By.css('.dropdown')).first(); expect(await anonymousButton.getText()).toContain("Anonymous"); } async loginUser(user, password) { await bbrowser.get(`http://${user}:${password}@localhost:8011/auth/login`); const anonymousButton = element.all(By.css('.dropdown')).first(); expect(await anonymousButton.getText()).not.toContain("Anonymous"); } } buildbot-3.4.0/smokes/e2e/pages/builder.ts000066400000000000000000000127751413250514000204020ustar00rootroot00000000000000// this file will contains the different generic functions which // will be called by the different tests import { BasePage } from "./base"; import { ForcePage } from './force'; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from '../utils'; export class BuilderPage extends BasePage { builder: string; forceName: string; constructor(builder, forcename) { super(); this.builder = builder; this.forceName=forcename; } async goBuildersList() { await bbrowser.get('#/builders'); await bbrowser.wait(EC.urlContains('#/builders'), "URL does not contain #/builders"); } async go() { await bbrowser.get('#/builders'); await bbrowser.wait(EC.urlContains('#/builders'), "URL does not contain #/builders"); let localBuilder = element(By.linkText(this.builder)); await bbrowser.wait(EC.elementToBeClickable(localBuilder), "local builder not clickable"); await localBuilder.click(); const isBuilderPage = async () => { let url = await browser.getCurrentUrl(); return (new RegExp("#/builders/[0-9]+$")).test(url); }; await bbrowser.wait(isBuilderPage, "Did not got to builder page"); } async goForce() { await this.go(); var forceButton = element.all(By.buttonText(this.forceName)).first(); await bbrowser.wait(EC.elementToBeClickable(forceButton), "force button not clickable"); await forceButton.click(); return new ForcePage(); } async goBuild(buildRef) { await this.go(); const matchLink = async (elem) => { return await elem.getText() == buildRef.toString(); }; var buildLink = element.all(By.css('.bb-buildid-link')) .filter(matchLink) .first(); await bbrowser.wait(EC.elementToBeClickable(buildLink), "build link not clickable"); await buildLink.click(); } async getLastFinishedBuildNumber() { await browser.actions().mouseMove(element(by.css('.navbar-brand'))).perform(); var buildLinks = element.all(By.css('.bb-buildid-link')); let finishedBuildCss = 'span.badge-status.results_SUCCESS, ' + 'span.badge-status.results_WARNINGS, ' + 'span.badge-status.results_FAILURE, ' + 'span.badge-status.results_SKIPPED, ' + 'span.badge-status.results_EXCEPTION, ' + 'span.badge-status.results_RETRY, ' + 'span.badge-status.results_CANCELLED '; let elements = await buildLinks.all(By.css(finishedBuildCss)); if (elements.length === 0) { return 0; } return +await elements[0].getText(); } async getBuildResult(buildNumber) { const matchElement = async (elem) => { return await elem.getText() == buildNumber.toString(); }; var buildLink = element.all(By.css('.bb-buildid-link')).filter(matchElement); if (await buildLink.count() == 0) { return "NOT FOUND"; } var resultTypes = [ ['.badge-status.results_SUCCESS', "SUCCESS"], ['.badge-status.results_WARNINGS', "WARNINGS"], ['.badge-status.results_FAILURE', "FAILURE"], ['.badge-status.results_SKIPPED', "SKIPPED"], ['.badge-status.results_EXCEPTION', "EXCEPTION"], ['.badge-status.results_RETRY', "RETRY"], ['.badge-status.results_CANCELLED', "CANCELLED"] ]; for (let i = 0; i < resultTypes.length; i++) { var answer = buildLink.all(By.css(resultTypes[i][0])); if (await answer.count() > 0) { return resultTypes[i][1]; } } return "NOT FOUND"; } async waitBuildFinished(reference) { const self = this; async function buildCountIncrement() { let currentBuildCount = await self.getLastFinishedBuildNumber(); return currentBuildCount == reference; } await bbrowser.wait(buildCountIncrement, "Build count did not increment"); } async waitGoToBuild(expected_buildnumber) { const isInBuild = async () => { let buildUrl = await browser.getCurrentUrl(); const split = buildUrl.split("/"); const builds_part = split[split.length-2]; const number = +split[split.length-1]; if (builds_part !== "builds") { return false; } if (number !== expected_buildnumber) { return false; } return true; } await bbrowser.wait(isInBuild, "Did not get into build"); } getStopButton() { return element(By.buttonText('Stop')); } getPreviousButton() { return element(By.partialLinkText('Previous')); } getNextButton() { return element(By.partialLinkText('Next')); } getRebuildButton() { return element(By.buttonText('Rebuild')); } async checkBuilderURL() { const builderLink = element.all(By.linkText(this.builder)); expect(await builderLink.count()).toBeGreaterThan(0); } } buildbot-3.4.0/smokes/e2e/pages/console.ts000066400000000000000000000011721413250514000204030ustar00rootroot00000000000000// this file contains the different generic functions which // will be called by the different tests import { BasePage } from "./base"; import { by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from '../utils'; export class ConsolePage extends BasePage { constructor() { super(); } async go() { await bbrowser.get('#/console'); await bbrowser.wait(EC.urlContains('#/console'), "URL does not contain #/console"); } async countSuccess() { return await element.all(By.css('.badge-status.results_SUCCESS')).count(); } } buildbot-3.4.0/smokes/e2e/pages/dashboard.ts000066400000000000000000000014101413250514000206630ustar00rootroot00000000000000// this file will contains the different generic functions which // will be called by the different tests import { BasePage } from "./base"; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from '../utils'; export class DashboardPage extends BasePage { constructor() { super(); } async go() { await bbrowser.get('#/mydashboard'); await bbrowser.wait(EC.urlContains('#/mydashboard'), "URL does not contain #/mydashboard"); var buildLink = element.all(By.linkText("runtests/1")).first(); await bbrowser.wait(EC.elementToBeClickable(buildLink), "runtests/1 link not clickable"); await buildLink.click(); } } buildbot-3.4.0/smokes/e2e/pages/force.ts000066400000000000000000000046561413250514000200510ustar00rootroot00000000000000// this file will contains the different generic functions which // will be called by the different tests import { BasePage } from "./base"; import { ExpectedConditions as EC } from 'protractor'; import { bbrowser } from '../utils'; export class ForcePage extends BasePage { constructor() { super(); } async setInputText(cssLabel, value) { const setInputValue = element(By.css(`forcefield label[for=${cssLabel}] + div input`)); await setInputValue.clear(); await setInputValue.sendKeys(value); const inputValue = await setInputValue.getAttribute('value'); expect(inputValue).toBe(value); } async setReason(reason) { await this.setInputText("reason", reason); } async setYourName(yourName) { await this.setInputText("username", yourName); } async setProjectName(projectName) { await this.setInputText("project", projectName); } async setBranchName(branchName) { await this.setInputText("branch", branchName); } async setRepo(repo) { await this.setInputText("repository", repo); } async setRevisionName(RevisionName) { await this.setInputText("revision", RevisionName); } async clickStartButton() { let button = this.getStartButton(); await bbrowser.wait(EC.elementToBeClickable(button), "start button not clickable"); await button.click(); } async clickStartButtonAndWaitRedirectToBuild() { let previousUrl = await browser.getCurrentUrl(); await this.clickStartButton(); await bbrowser.wait(EC.not(EC.urlIs(previousUrl)), "failed to create a buildrequest"); await bbrowser.wait(EC.not(EC.urlContains('redirect_to_build=true')), "failed to create a build"); } async clickCancelWholeQueue() { let button = this.getCancelWholeQueue(); await bbrowser.wait(EC.elementToBeClickable(button), "cancel whole queue button not clickable"); await button.click(); } getStartButton() { return element(By.buttonText('Start Build')); } getCancelButton() { return element(By.buttonText('Cancel')); } getCancelWholeQueue() { return element(By.buttonText('Cancel whole queue')); } getStopButton() { return element(By.buttonText('Stop')); } } buildbot-3.4.0/smokes/e2e/pages/home.ts000066400000000000000000000031151413250514000176700ustar00rootroot00000000000000// this file will contains the different generic functions which // will be called by the different tests import { BasePage } from "./base"; import { by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from '../utils'; export class HomePage extends BasePage { constructor(){ super(); } async go() { await bbrowser.get('#/'); await bbrowser.wait(EC.urlContains('#/'), "URL does not contain #/"); } getPanel() { return element.all(By.css(".panel-title")); } getAnonymousButton() { const anonymousButton = element(By.css('[ng-class="loginCollapsed ? \'\':\'open\'"')); return anonymousButton; } getLoginButton() { return element(By.buttonText('Login')); } async setUserText(value) { const setUserValue = element.all(By.css('[ng-model="username"]')); await setUserValue.clear(); await setUserValue.sendKeys(value); } async setPasswordText(value) { const setPasswordValue = element.all(By.css('[ng-model="password"]')); await setPasswordValue.clear(); await setPasswordValue.sendKeys(value); } async waitAllBuildsFinished() { await this.go(); const self = this; const noRunningBuilds = async () => { let text = await element.all(By.css("h4")).getText(); text = text.join(" "); return text.toLowerCase().indexOf("0 builds running") >= 0; } await bbrowser.wait(noRunningBuilds, "Builds are still running"); } } buildbot-3.4.0/smokes/e2e/pages/pendingbuildrequests.ts000066400000000000000000000013001413250514000231720ustar00rootroot00000000000000// this file will contains the different generic functions which // will be called by the different tests import { BasePage } from "./base"; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from '../utils'; export class PendingBuildrequestsPage extends BasePage { constructor() { super(); } async go() { await bbrowser.get('#/pendingbuildrequests'); await bbrowser.wait(EC.urlContains('#/pendingbuildrequests'), "URL does not contain #/pendingbuildrequests"); } getAllBuildrequestRows() { return element.all(By.css("td .badge-status")).all(By.xpath('../../..')); } } buildbot-3.4.0/smokes/e2e/pages/settings.ts000066400000000000000000000075551413250514000206140ustar00rootroot00000000000000// this file contains the different generic functions which // will be called by the different tests import { BasePage } from "./base"; import { by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from '../utils'; export class SettingsPage extends BasePage { builder: string; constructor(builder) { super(); this.builder = builder; } async goSettings() { await bbrowser.get('#/settings'); await bbrowser.wait(EC.urlContains('#/settings'), "URL does not contain #/settings"); } getItem(group, name) { return element(By.css(`form[name='${group}'] [name='${name}']`)); } async changeScallingFactor(scallingVar) { const scallingFactorForm = this.getItem("Waterfall", "scaling_waterfall"); await scallingFactorForm.clear(); await scallingFactorForm.sendKeys(scallingVar); } async checkScallingFactor(scallingVar) { const scallingFactor = this.getItem("Waterfall", "scaling_waterfall"); expect(await scallingFactor.getAttribute('value')).toEqual(scallingVar); } async changeColumnWidth(columnVar) { const columnWidthForm = this.getItem("Waterfall", "min_column_width_waterfall"); await columnWidthForm.clear(); await columnWidthForm.sendKeys(columnVar); } async checkColumnWidth(columnVar) { const columnWidthForm = this.getItem("Waterfall", "min_column_width_waterfall"); expect(await columnWidthForm.getAttribute('value')).toEqual(columnVar); } async changeLazyLoadingLimit(lazyLoadingLimit) { const lazyLoadingLimitForm = this.getItem("Waterfall", "lazy_limit_waterfall"); await lazyLoadingLimitForm.clear(); await lazyLoadingLimitForm.sendKeys(lazyLoadingLimit); } async checkLazyLoadingLimit(lazyLoadingLimit) { const lazyLoadingLimitForm = this.getItem("Waterfall", "lazy_limit_waterfall"); expect(await lazyLoadingLimitForm.getAttribute('value')).toEqual(lazyLoadingLimit); } async changeIdleTime(idleTimeVar) { const idleTimeForm = this.getItem("Waterfall", "idle_threshold_waterfall"); await idleTimeForm.clear(); await idleTimeForm.sendKeys(idleTimeVar); } async checkIdleTime(idleTimeVar) { const idleTimeForm = this.getItem("Waterfall", "idle_threshold_waterfall"); expect(await idleTimeForm.getAttribute('value')).toEqual(idleTimeVar); } async changeMaxBuild(maxBuildVar) { const maxBuildForm = this.getItem("Console", "buildLimit"); await maxBuildForm.clear() await maxBuildForm.sendKeys(maxBuildVar); } async checkMaxBuild(maxBuildVar) { const maxBuildForm = this.getItem("Console", "buildLimit"); expect(await maxBuildForm.getAttribute('value')).toEqual(maxBuildVar); } async changeMaxRecentsBuilders(maxBuildersVar) { const maxBuilderForm = this.getItem("Console", "changeLimit"); await maxBuilderForm.clear(); await maxBuilderForm.sendKeys(maxBuildersVar); } async checkMaxRecentsBuilders(maxBuildersVar) { const maxBuilderForm = this.getItem("Console", "changeLimit"); expect(await maxBuilderForm.getAttribute('value')).toEqual(maxBuildersVar); } async changeShowWorkerBuilders(showWorkerBuildersVar) { const showWorkerBuildersForm = this.getItem("Workers", "showWorkerBuilders"); const checked = await showWorkerBuildersForm.isSelected(); if (checked !== showWorkerBuildersVar) { await showWorkerBuildersForm.click(); } } async checkShowWorkerBuilders(showWorkerBuildersVar) { const showWorkerBuildersForm = this.getItem("Workers", "showWorkerBuilders"); const isSelected = await showWorkerBuildersForm.isSelected(); expect(isSelected).toEqual(showWorkerBuildersVar); } } buildbot-3.4.0/smokes/e2e/pages/waterfall.ts000066400000000000000000000056211413250514000207250ustar00rootroot00000000000000// this file contains the different generic functions which // will be called by the different tests import { BasePage } from "./base"; import { browser, by, element, ExpectedConditions as EC, By } from 'protractor'; import { bbrowser } from '../utils'; export class WaterfallPage extends BasePage { builder: string; constructor(builder: string) { super(); this.builder = builder; } async go() { await bbrowser.get('#/waterfall'); await bbrowser.wait(EC.urlContains('#/waterfall'), "URL does not contain #/waterfall"); await bbrowser.wait(EC.elementToBeClickable($("div.waterfall")), "waterfall is not clickable"); } async checkBuilder() { const currentUrl = await browser.getCurrentUrl(); expect(currentUrl).toContain("builders/"); } async checkBuildResult() { const firstLinkInPopup = element.all(By.css('.modal-dialog a')).first(); await bbrowser.wait(EC.elementToBeClickable(firstLinkInPopup), "first link in popup not clickable"); await firstLinkInPopup.click(); const currentUrl = await browser.getCurrentUrl(); expect(currentUrl).toContain("builders/"); expect(currentUrl).toContain("builds/"); } async goBuild() { const buildList = element.all(By.css('text.id')).last(); await bbrowser.wait(EC.elementToBeClickable(buildList), "build list not clickable"); await buildList.click(); } async goBuildAndClose() { await this.goBuild(); const popupClose = element.all(By.css('i.fa-times')).first(); await bbrowser.wait(EC.elementToBeClickable(popupClose), "popup close not clickable"); await popupClose.click(); const dialogIsPresent = await $('modal-dialog').isPresent(); expect(dialogIsPresent).toBeFalsy(); } async goBuildAndCheck() { await this.goBuild(); await this.checkBuildResult(); } async goBuilderAndCheck(builderRef) { let localBuilder = element(By.linkText(this.builder)); await bbrowser.wait(EC.elementToBeClickable(localBuilder), "local builder not clickable"); await localBuilder.click(); await this.checkBuilder(); } async goTagAndCheckUrl() { const firstTag = element.all(By.binding('tag')).first(); await bbrowser.wait(EC.elementToBeClickable(firstTag), "first tag close not clickable"); await firstTag.click(); expect(browser.getCurrentUrl()).toContain(firstTag.getText()); } async goUrlAndCheckTag() { await bbrowser.get('#/waterfall?tags=runt'); const selectedTag = element(by.className('label-success')); expect(await selectedTag.getText()).toContain('runt'); } } buildbot-3.4.0/smokes/e2e/pages/worker.ts000066400000000000000000000024601413250514000202530ustar00rootroot00000000000000// this file will contains the different generic functions which // will be called by the different tests import { BuilderPage } from './builder'; import { BasePage } from "./base"; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from '../utils'; export class WorkerPage extends BasePage { builder: string; constructor(builder) { super(); this.builder = builder; } async goWorker() { await bbrowser.get('#/workers'); await bbrowser.wait(EC.urlContains('#/workers'), "URL does not contain #/workers"); } async checkWorkerPage() { expect(await browser.getCurrentUrl()).toContain('#/worker'); } async checkHrefPresent() { const hrefRef = element.all(By.css('a')); const hrefRefText = await hrefRef.getText(); expect(hrefRefText).toContain('slowruntests'); expect(hrefRefText).toContain('runtests'); } async goBuilderLink(builderName) { let builderLink = element(By.linkText(builderName)); await bbrowser.wait(EC.elementToBeClickable(builderLink), "link for " + builderName + " not clickable"); await builderLink.click(); return new BuilderPage(builderName, 'Force'); } } buildbot-3.4.0/smokes/e2e/pendingbuildrequests.scenarios.ts000066400000000000000000000035721413250514000240750ustar00rootroot00000000000000// test goal: checks the capability to navigate in a dedicated build // to use previous and next link import { HomePage } from './pages/home'; import { PendingBuildrequestsPage } from './pages/pendingbuildrequests'; import { BuilderPage } from './pages/builder'; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from './utils'; describe('pending build requests', function() { let builder = null; let pendingBuildrequests = null; beforeEach(async () => { builder = new BuilderPage('slowruntests', 'force'); pendingBuildrequests = new PendingBuildrequestsPage(); await builder.goBuildersList(); }); afterEach(async () => { const homePage = new HomePage(); await homePage.waitAllBuildsFinished(); }); it('shows', async () => { let force = await builder.goForce(); await force.clickStartButton(); await builder.goForce(); await force.clickStartButton(); // hopefully we'll see at least one buildrequest by the time we get to // the pending build requests page await pendingBuildrequests.go(); const isBulidrequestsVisible = async () => { let count = await pendingBuildrequests.getAllBuildrequestRows().count(); return count > 0; }; await bbrowser.wait(isBulidrequestsVisible, "did not find buildrequests"); const br = pendingBuildrequests.getAllBuildrequestRows().first(); expect(await br.element(By.css('td:nth-child(2) a')).getText()).toMatch('slowruntests'); // kill remaining builds await builder.go(); await force.clickCancelWholeQueue(); await bbrowser.wait(EC.alertIsPresent(), "did not find confirmation alert"); await browser.switchTo().alert().accept(); }); }); buildbot-3.4.0/smokes/e2e/reason_force.scenarios.ts000066400000000000000000000035051413250514000222760ustar00rootroot00000000000000// test goal: checks the capability to define a reason and to cancel/start the build import { HomePage } from './pages/home'; import { BuilderPage } from './pages/builder'; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from './utils'; describe('force and cancel', function() { let builder = null; beforeEach(async () => { builder = new BuilderPage('runtests', 'force'); await builder.goBuildersList(); }); afterEach(async () => { const homePage = new HomePage(); await homePage.waitAllBuildsFinished(); }); it('should create a build', async () => { await builder.go(); let lastbuild = await builder.getLastFinishedBuildNumber(); let force = await builder.goForce(); await force.clickStartButtonAndWaitRedirectToBuild(); await builder.go(); await builder.waitBuildFinished(lastbuild + 1); }); it('should create a build with a dedicated reason and cancel it', async () => { await builder.go(); let force = await builder.goForce(); let cancelButton = force.getCancelButton(); await bbrowser.wait(EC.elementToBeClickable(cancelButton), "cancel button not clickable"); await cancelButton.click(); }); it('should create a build with a dedicated reason and Start it', async () => { await builder.go(); let force = await builder.goForce(); await force.setReason("New Test Reason"); await force.setYourName("user@example.com"); await force.setProjectName("BBOT9"); await force.setBranchName("Gerrit Branch"); await force.setRepo("http//name.com"); await force.setRevisionName("12345"); await force.clickStartButtonAndWaitRedirectToBuild(); }); }); buildbot-3.4.0/smokes/e2e/rebuilds.scenarios.ts000066400000000000000000000025501413250514000214410ustar00rootroot00000000000000// test goal: checks the capability to navigate in a dedicated build // to use previous and next link import { HomePage } from './pages/home'; import { BuilderPage } from './pages/builder'; import { browser, by, element, ExpectedConditions as EC } from 'protractor'; import { bbrowser } from './utils'; describe('rebuilds', function() { let builder = null; beforeEach(async () => { builder = new BuilderPage('runtests', 'force'); await builder.goBuildersList(); }); afterEach(async () => { const homePage = new HomePage(); await homePage.waitAllBuildsFinished(); }); it('should navigate to a dedicated build and to use the rebuild button', async () => { await builder.go(); const lastbuild: number = await builder.getLastFinishedBuildNumber(); let force = await builder.goForce(); await force.clickStartButtonAndWaitRedirectToBuild(); await builder.go(); await builder.waitBuildFinished(lastbuild + 1); await builder.goBuild(lastbuild + 1); await browser.getCurrentUrl(); let rebuildButton = builder.getRebuildButton(); await bbrowser.wait(EC.elementToBeClickable(rebuildButton), "rebuild button not clickable"); await rebuildButton.click(); await builder.waitGoToBuild(lastbuild + 2); }); }); buildbot-3.4.0/smokes/e2e/settings.scenarios.ts000066400000000000000000000055121413250514000214710ustar00rootroot00000000000000// test goal: checks the capability to navigate in a dedicated build // to use previous and next link import { BuilderPage } from './pages/builder'; import { WaterfallPage } from './pages/waterfall'; import { SettingsPage } from './pages/settings'; describe('manage settings', function() { let builder = null; let waterfall = null; let settings = null; beforeEach(function() { builder = new BuilderPage('runtests', 'force'); waterfall = new WaterfallPage('runtests'); settings = new SettingsPage('runtests'); return builder.goBuildersList(); }); describe('waterfall', () => { const scalingFactor = '10'; it('change the "scalling factor" and check it', async () => { await settings.goSettings(); await settings.changeScallingFactor(scalingFactor); await waterfall.go(); await settings.goSettings(); await settings.checkScallingFactor(scalingFactor); }) const scalingWidth = '450'; it('change the "minimum column width" and check it', async () => { await settings.goSettings(); await settings.changeColumnWidth(scalingWidth); await waterfall.go(); await settings.goSettings(); await settings.checkColumnWidth(scalingWidth); }) const lazyLoadingLimit = '30'; it('change the "lazy loading limit" and check it', async () => { await settings.goSettings(); await settings.changeLazyLoadingLimit(lazyLoadingLimit); await waterfall.go(); await settings.goSettings(); await settings.checkLazyLoadingLimit(lazyLoadingLimit); }) const idleTimeThreshold = '15'; it('change the "idle time threshold" and check it', async () => { await settings.goSettings(); await settings.changeIdleTime(idleTimeThreshold); await waterfall.go(); await settings.goSettings(); await settings.checkIdleTime(idleTimeThreshold); }) }); describe('console', () => { const buildsToFetch = '130'; it('change the "number of builds to fetch" and check it', async () => { await settings.goSettings(); await settings.changeMaxBuild(buildsToFetch); await waterfall.go(); await settings.goSettings(); await settings.checkMaxBuild(buildsToFetch); }) const changesToFetch='45'; it('change the "number of changes to fetch" and check it', async () => { await settings.goSettings(); await settings.changeMaxRecentsBuilders(changesToFetch); await waterfall.go(); await settings.goSettings(); await settings.checkMaxRecentsBuilders(changesToFetch); }) }); }); buildbot-3.4.0/smokes/e2e/utils.ts000066400000000000000000000046311413250514000170050ustar00rootroot00000000000000// this defines a wrapper for protractor.browser which intercepts various calls from the tests // and adds additional logging. This allows much better understanding of test failures without // trying to debug the tests themselves import { browser } from 'protractor'; export class BBBrowser { static defaultTimeoutMs: int = 100000; // 0 - nothing, 1 - one-line messages, 2 - timings, 3 - full stack traces static debugCallLogLevel: int = 2; static debugErrorLogLevel: int = 3; printErrorIfNeeded(e: Error) { if (BBBrowser.debugErrorLogLevel > 0) { if (BBBrowser.debugErrorLogLevel > 2) { console.trace(); } console.log(`Got exception ${e}`); } } printTimingsIfNeeded(startTimeMs: number) { if (BBBrowser.debugCallLogLevel > 1) { const endTimeMs = new Date().getTime(); const durationS = (endTimeMs - startTimeMs) / 1000.0; console.log(` ... Took ${durationS} s`); } } printEntryMessageIfNeeded(functionName: string, params: string) { if (BBBrowser.debugCallLogLevel > 0) { if (BBBrowser.debugCallLogLevel > 2) { console.trace(); } console.log(`${functionName}(${params})`); } } async wait(condition: Function, message: string) { this.printEntryMessageIfNeeded('bbrowser.wait', message); let start = new Date().getTime() try { await browser.wait(condition, BBBrowser.defaultTimeoutMs, message); this.printTimingsIfNeeded(start); if (BBBrowser.debugCallLogLevel > 2) { console.trace(); } } catch (e) { this.printTimingsIfNeeded(start); this.printErrorIfNeeded(e); throw e; } } async get(url: string) { this.printEntryMessageIfNeeded('bbrowser.get', url); let start = new Date().getTime() try { await browser.get(url); this.printTimingsIfNeeded(start); if (BBBrowser.debugCallLogLevel > 2) { console.trace(); } } catch (e) { this.printTimingsIfNeeded(start); this.printErrorIfNeeded(); throw e; } } }; const bbrowser = new BBBrowser(); export { bbrowser }; buildbot-3.4.0/smokes/e2e/waterfall.scenarios.ts000066400000000000000000000041761413250514000216170ustar00rootroot00000000000000// test goal: checks the capability to navigate in a dedicated build // to use previous and next link import { BuilderPage } from './pages/builder'; import { WaterfallPage } from './pages/waterfall'; import { HomePage } from './pages/home'; import { SettingsPage } from './pages/settings'; describe('waterfall', function() { let builder = null; let waterfall = null; beforeEach(async () => { builder = new BuilderPage('runtests', 'force'); waterfall = new WaterfallPage('runtests'); const settings = new SettingsPage('runtests'); await settings.goSettings(); await settings.changeScallingFactor('10'); await settings.changeColumnWidth('45'); }); afterEach(async () => { const homePage = new HomePage(); await homePage.waitAllBuildsFinished(); }); const createBuildAndWaitForFinish = async () => { await builder.go(); const lastbuildid = await builder.getLastFinishedBuildNumber(); let force = await builder.goForce(); await force.clickStartButtonAndWaitRedirectToBuild(); await builder.go(); await builder.waitBuildFinished(lastbuildid + 1); }; it('can go to builder page via hyperlink', async () => { await createBuildAndWaitForFinish(); await waterfall.go(); await waterfall.goBuilderAndCheck('runtests'); }); it('can go to build page via hyperlink in build modal dialog', async () => { await createBuildAndWaitForFinish(); await waterfall.go(); await waterfall.goBuildAndCheck(); }); it('can open build modal dialog and close it', async () => { await createBuildAndWaitForFinish(); await waterfall.go(); await waterfall.goBuildAndClose(); }); it('does url change once tag clicked', async () => { await createBuildAndWaitForFinish(); await waterfall.go(); await waterfall.goTagAndCheckUrl(); }); it('is tag clicked when url contains tag', async () => { await createBuildAndWaitForFinish(); await waterfall.go(); await waterfall.goUrlAndCheckTag(); }); }); buildbot-3.4.0/smokes/e2e/worker.scenarios.ts000066400000000000000000000022321413250514000211360ustar00rootroot00000000000000// test goal: checks the capability to navigate on about web page // to use previous and next link import { WorkerPage } from './pages/worker'; import { SettingsPage } from './pages/settings'; describe('worker', function() { let worker = null; let builder = null; let settings = null; beforeEach(function() { worker = new WorkerPage('runtests'); settings = new SettingsPage('runtests'); }); const navigateAndCheckBuilderLink = async (builderName) => { await settings.goSettings(); await settings.changeShowWorkerBuilders(true); await settings.checkShowWorkerBuilders(true); await worker.goWorker(); await worker.checkWorkerPage(); await worker.checkHrefPresent(); builder = await worker.goBuilderLink(builderName); await builder.checkBuilderURL(); } it('should navigate to the worker page, check the one slowruntests link', async () => { await navigateAndCheckBuilderLink("slowruntests"); }); it('should navigate to the worker page, check the one runtests link', async () => { await navigateAndCheckBuilderLink("runtests"); }); }); buildbot-3.4.0/smokes/master.cfg000066400000000000000000000100431413250514000165700ustar00rootroot00000000000000# -*- python -*- # ex: set filetype=python: from buildbot.plugins import * NUM_BUILDERS = 2 c = BuildmasterConfig = {} ####### WORKERS c['workers'] = [worker.Worker("example-worker", "pass")] c['protocols'] = {'pb': {'port': 9989}} ####### CHANGESOURCES c['change_source'] = [] c['change_source'].append(changes.GitPoller( 'https://github.com/buildbot/hello-world.git', # the buildbot clone of pyflakes workdir='gitpoller-workdir', branch='master', pollinterval=300)) ####### SCHEDULERS c['schedulers'] = [] c['schedulers'].append(schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch='master'), treeStableTimer=None, builderNames=["runtests" + str(i) for i in range(NUM_BUILDERS)])) c['schedulers'].append(schedulers.ForceScheduler( name="force", builderNames=["runtests", "slowruntests"])) c['schedulers'].append(schedulers.ForceScheduler( name="custom", builderNames=["runtests"], buttonName="Start Custom Build", codebases = [util.CodebaseParameter( codebase='', project=None, branch=util.ChoiceStringParameter( name="branch", label="Branch", strict=False, choices=["master", "dev"], autopopulate={ 'master': { 'build_name': 'master', }, 'dev': { 'build_name': 'dev', } } ))], properties=[ util.StringParameter( name="build_name", label="Name of the Build release.", default="")])) ####### BUILDERS factory = util.BuildFactory() factory.addStep(steps.Git(repourl='https://github.com/buildbot/hello-world.git', mode='incremental')) factory.addStep(steps.ShellCommand(command=["trial", "hello"], env={"PYTHONPATH": "."})) slowfactory = util.BuildFactory() slowfactory.addStep(steps.Git(repourl='https://github.com/buildbot/hello-world.git', mode='incremental')) slowfactory.addStep(steps.ShellCommand(command=["trial", "hello"], env={"PYTHONPATH": "."})) slowfactory.addStep(steps.ShellCommand(command=["sleep", "10"])) c['builders'] = [] c['builders'].append( util.BuilderConfig(name="runtests", tags=['runt'], workernames=["example-worker"], factory=factory)) c['builders'].append( util.BuilderConfig(name="slowruntests", tags=['slow', 'runt'], workernames=["example-worker"], factory=slowfactory)) for i in range(NUM_BUILDERS): c['builders'].append( util.BuilderConfig(name="runtests" + str(i), tags=[str(i), 'runt'], workernames=["example-worker"], factory=factory)) ####### PROJECT IDENTITY c['title'] = "Pyflakes" c['titleURL'] = "https://launchpad.net/pyflakes" c['buildbotURL'] = "http://localhost:8011/" # we're not using the default port so that it would not accidentally conflict # with any development instances of buildbot on developer machines c['www'] = dict(port=8011, # graphql={}, change_hook_dialects={'base': True}, plugins=dict(waterfall_view={}, console_view={}, grid_view={}, badges={}), ui_default_config={'Builders.buildFetchLimit': 201}) c['buildbotNetUsageData'] = None ####### DB URL c['db'] = { 'db_url': "sqlite:///state.sqlite", } authz = util.Authz( allowRules=[ ], roleMatchers=[ util.RolesFromEmails(admins=["my@email.com"]) ] ) auth=util.UserPasswordAuth({'my@email.com': b'mypass'}) c['www']['auth'] = auth c['www']['authz'] = authz # in order to share this snippet in the doc, we load mydashboard.py using exec exec(open("mydashboard.py").read()) buildbot-3.4.0/smokes/mydashboard.py000066400000000000000000000042621413250514000174710ustar00rootroot00000000000000 import os from flask import Flask from flask import render_template from buildbot.process.results import statusToString mydashboardapp = Flask('test', root_path=os.path.dirname(__file__)) # this allows to work on the template without having to restart Buildbot mydashboardapp.config['TEMPLATES_AUTO_RELOAD'] = True @mydashboardapp.route("/index.html") def main(): # This code fetches build data from the data api, and give it to the # template builders = mydashboardapp.buildbot_api.dataGet("/builders") builds = mydashboardapp.buildbot_api.dataGet("/builds", limit=20) # properties are actually not used in the template example, but this is # how you get more properties for build in builds: build['properties'] = mydashboardapp.buildbot_api.dataGet( ("builds", build['buildid'], "properties")) build['results_text'] = statusToString(build['results']) graph_data = [ {'x': 1, 'y': 100}, {'x': 2, 'y': 200}, {'x': 3, 'y': 300}, {'x': 4, 'y': 0}, {'x': 5, 'y': 100}, {'x': 6, 'y': 200}, {'x': 7, 'y': 300}, {'x': 8, 'y': 0}, {'x': 9, 'y': 100}, {'x': 10, 'y': 200}, ] # mydashboard.html is a template inside the template directory return render_template('mydashboard.html', builders=builders, builds=builds, graph_data=graph_data) # Here we assume c['www']['plugins'] has already be created earlier. # Please see the web server documentation to understand how to configure # the other parts. # This is a list of dashboards, you can create several c['www']['plugins']['wsgi_dashboards'] = [ { 'name': 'mydashboard', # as used in URLs 'caption': 'My Dashboard', # Title displayed in the UI' 'app': mydashboardapp, # priority of the dashboard in the left menu (lower is higher in the # menu) 'order': 5, # An available icon list can be found at http://fontawesome.io/icons/. Double-check # the buildbot about dashboard for the installed version of Font Awesome as the # published icons may include more recently additions. 'icon': 'area-chart' } ] buildbot-3.4.0/smokes/package.json000066400000000000000000000007771413250514000171170ustar00rootroot00000000000000{ "name": "smokes", "version": "1.0.0", "description": "smoke tests for buildbot with protractor", "main": "index.js", "dependencies": { "jasmine-spec-reporter": "^4.2.1", "protractor": "^5.4.2", "request": "^2.88.0", "ts-node": "~4.1.0", "tslint": "~5.9.1", "typescript": "^2.5.3" }, "devDependencies": {}, "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, "author": "", "license": "ISC" } buildbot-3.4.0/smokes/protractor-headless.conf.js000066400000000000000000000042651413250514000220740ustar00rootroot00000000000000const { SpecReporter } = require('jasmine-spec-reporter'); exports.config = { // when running tests on a heavily loaded maching (which is the case on e.g. CI server), // Buildbot master sometimes takes a lot of time to respond to certain queries during tests. // The following timeout is increased to avoid test instabilities in such cases allScriptsTimeout: 30000, specs: [ 'e2e/*.scenarios.ts' ], SELENIUM_PROMISE_MANAGER: false, localSeleniumStandaloneOpts: { // undocumented option to pass the stdio output of selenium webdriver to // console stdio: "inherit", }, capabilities: { 'browserName': 'chrome', 'chromeOptions': { 'args': [ '--headless', '--window-size=1200,1024', '--disable-dev-shm-usage', '--disable-gpu', '--no-sandbox', '--user-agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) Chrome/56.0.2924.87"', ] } }, baseUrl: 'http://localhost:8011', framework: 'jasmine', jasmineNodeOpts: { // jasmine requires that whole test is completed within // defaultTimeoutInterval. If we accidentally exceed this timeout, // jasmine will not stop the execution of the test method, but will // simply start afterEach() callback and fail the test. The test code // will likely fail too as the page was pulled from under its feet. // The test error messages will make the cause of the failure very // confusing. Thus we increase the timeout value to one that hopefully // will never be exceeded. defaultTimeoutInterval: 1000000, print: function() {} }, onPrepare() { jasmine.getEnv().addReporter(new SpecReporter({ spec: { displayFailed: true, displayDuration: true, displayStacktrace: true }, summary: { displayFailed: true, displayStacktrace: true } })); require('ts-node').register({ project: './tsconfig.ee.json' }); } }; buildbot-3.4.0/smokes/protractor.conf.js000066400000000000000000000042461413250514000203050ustar00rootroot00000000000000const { SpecReporter } = require('jasmine-spec-reporter'); exports.config = { // when running tests on a heavily loaded maching (which is the case on e.g. CI server), // Buildbot master sometimes takes a lot of time to respond to certain queries during tests. // The following timeout is increased to avoid test instabilities in such cases allScriptsTimeout: 30000, specs: [ 'e2e/*.scenarios.ts' ], SELENIUM_PROMISE_MANAGER: false, localSeleniumStandaloneOpts: { // undocumented option to pass the stdio output of selenium webdriver to // console stdio: "inherit", }, capabilities: { 'browserName': 'chrome', chromeOptions: { // minimal supported browser size for tests // if smaller we start need to scroll for clicking buttons args: [ '--window-size=1200,1024', '--user-agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) Chrome/56.0.2924.87"', ] } }, baseUrl: 'http://localhost:8011', framework: 'jasmine', jasmineNodeOpts: { // jasmine requires that whole test is completed within // defaultTimeoutInterval. If we accidentally exceed this timeout, // jasmine will not stop the execution of the test method, but will // simply start afterEach() callback and fail the test. The test code // will likely fail too as the page was pulled from under its feet. // The test error messages will make the cause of the failure very // confusing. Thus we increase the timeout value to one that hopefully // will never be exceeded. defaultTimeoutInterval: 1000000, print: function() {} }, onPrepare() { jasmine.getEnv().addReporter(new SpecReporter({ spec: { displayFailed: true, displayDuration: true, displayStacktrace: true }, summary: { displayFailed: true, displayStacktrace: true } })); require('ts-node').register({ project: './tsconfig.ee.json' }); } }; buildbot-3.4.0/smokes/run.sh000077500000000000000000000036371413250514000157720ustar00rootroot00000000000000#!/bin/bash set -e set -v cd `dirname $0` YARN=$(which yarnpkg || which yarn) if [ $? -ne 0 ]; then echo "Neither yarnpkg nor yarn is available" exit 1 fi echo "Using ${YARN} as yarn" function finish { # uncomment for debug in kube # for i in `seq 1000` # do # echo please debug me! # sleep 60 # done set +e kill %1 buildbot stop workdir buildbot-worker stop workdir/worker rm -rf workdir } trap finish EXIT rm -rf workdir buildbot create-master workdir ln -s ../templates ../mydashboard.py ../master.cfg workdir buildbot-worker create-worker workdir/worker localhost example-worker pass buildbot checkconfig workdir # on docker buildbot might be a little bit slower to start, so sleep another 20s in case of start to slow. buildbot start workdir || sleep 20 buildbot-worker start workdir/worker cat workdir/twistd.log & # CI mode: use preinstalled protractor with xvfb-run if [ -f /usr/bin/protractor ]; then PROTRACTOR=/usr/bin/protractor else ${YARN} install --pure-lockfile ../common/smokedist-download-compatible-chromedriver.py \ ./node_modules/protractor/bin/webdriver-manager \ google-chrome \ chromium-browser \ /Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome \ chromium PROTRACTOR=./node_modules/protractor/bin/protractor fi if [ -f /usr/bin/xvfb-run ] && [[ ! -n "$SMOKES_DONT_USE_XVFB" ]] ; then xvfb-run --server-args="-screen 0 1024x768x24" $PROTRACTOR protractor-headless.conf.js else # manual mode: install locally ${YARN} install ../common/smokedist-download-compatible-chromedriver.py \ ./node_modules/protractor/bin/webdriver-manager \ google-chrome \ chromium-browser \ /Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome \ chromium ./node_modules/protractor/bin/protractor protractor.conf.js fi buildbot-3.4.0/smokes/templates/000077500000000000000000000000001413250514000166145ustar00rootroot00000000000000buildbot-3.4.0/smokes/templates/mydashboard.html000066400000000000000000000063071413250514000220050ustar00rootroot00000000000000
{% for builder in builders %} {% endfor %} {% for build in builds %} {% for builder in builders %} {% endfor %} {% endfor %}
{{builder.name}}
{% if build.builderid == builder.builderid %} {{build.number}} {% endif %}
buildbot-3.4.0/smokes/tsconfig.ee.json000066400000000000000000000004401413250514000177130ustar00rootroot00000000000000{ "extends": "./tsconfig.json", "compilerOptions": { "outDir": "./out-tsc/e2e", "sourceMap": true, "removeComments": false, "baseUrl": "./", "module": "commonjs", "target": "es2017", "types": [ "jasmine", "jasminewd2", "node" ] } } buildbot-3.4.0/smokes/tsconfig.json000066400000000000000000000006031413250514000173240ustar00rootroot00000000000000{ "compileOnSave": false, "compilerOptions": { "outDir": "./dist/out-tsc", "sourceMap": true, "declaration": false, "moduleResolution": "node", "emitDecoratorMetadata": true, "experimentalDecorators": true, "target": "es6", "typeRoots": [ "node_modules/@types" ], "lib": [ "es2017", "dom", "es2015.promise" ] } } buildbot-3.4.0/smokes/yarn.lock000066400000000000000000001417031413250514000164470ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@types/q@^0.0.32": version "0.0.32" resolved "https://registry.yarnpkg.com/@types/q/-/q-0.0.32.tgz#bd284e57c84f1325da702babfc82a5328190c0c5" integrity sha1-vShOV8hPEyXacCur/IKlMoGQwMU= "@types/selenium-webdriver@^3.0.0": version "3.0.16" resolved "https://registry.yarnpkg.com/@types/selenium-webdriver/-/selenium-webdriver-3.0.16.tgz#50a4755f8e33edacd9c406729e9b930d2451902a" integrity sha512-lMC2G0ItF2xv4UCiwbJGbnJlIuUixHrioOhNGHSCsYCJ8l4t9hMCUimCytvFv7qy6AfSzRxhRHoGa+UqaqwyeA== "@types/strip-bom@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/strip-bom/-/strip-bom-3.0.0.tgz#14a8ec3956c2e81edb7520790aecf21c290aebd2" integrity sha1-FKjsOVbC6B7bdSB5CuzyHCkK69I= "@types/strip-json-comments@0.0.30": version "0.0.30" resolved "https://registry.yarnpkg.com/@types/strip-json-comments/-/strip-json-comments-0.0.30.tgz#9aa30c04db212a9a0649d6ae6fd50accc40748a1" integrity sha512-7NQmHra/JILCd1QqpSzl8+mJRc8ZHz3uDm8YV1Ks9IhK0epEiTw8aIErbvH9PI+6XbqhyIQy3462nEsn7UVzjQ== adm-zip@^0.4.9: version "0.4.13" resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.4.13.tgz#597e2f8cc3672151e1307d3e95cddbc75672314a" integrity sha512-fERNJX8sOXfel6qCBCMPvZLzENBEhZTzKqg6vrOW5pvoEaQuJhRU4ndTAh6lHOxn1I6jnz2NHra56ZODM751uw== agent-base@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.3.0.tgz#8165f01c436009bccad0b1d122f05ed770efc6ee" integrity sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg== dependencies: es6-promisify "^5.0.0" ajv@^6.5.5: version "6.10.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= arrify@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.22.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" blocking-proxy@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/blocking-proxy/-/blocking-proxy-1.0.1.tgz#81d6fd1fe13a4c0d6957df7f91b75e98dac40cb2" integrity sha512-KE8NFMZr3mN2E0HcvCgRtX7DjhiIQrwle+nSVJVC/yqFb9+xznHl2ZcoBp2L9qzkI4t4cBFJ1efXF8Dwi132RA== dependencies: minimist "^1.2.0" brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" browserstack@^1.5.1: version "1.5.3" resolved "https://registry.yarnpkg.com/browserstack/-/browserstack-1.5.3.tgz#93ab48799a12ef99dbd074dd595410ddb196a7ac" integrity sha512-AO+mECXsW4QcqC9bxwM29O7qWa7bJT94uBFzeb5brylIQwawuEziwq20dPYbins95GlWzOawgyDNdjYAo32EKg== dependencies: https-proxy-agent "^2.2.1" buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== builtin-modules@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" integrity sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8= caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= chalk@^1.1.1, chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" chalk@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colors@1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" integrity sha1-FopHAXVran9RoSzgyXv6KMCE7WM= combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@^2.12.1: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" debug@^3.1.0: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" del@^2.2.0: version "2.2.2" resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8" integrity sha1-wSyYHQZ4RshLyvhiz/kw2Qf/0ag= dependencies: globby "^5.0.0" is-path-cwd "^1.0.0" is-path-in-cwd "^1.0.0" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" rimraf "^2.2.8" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= diff@^3.1.0, diff@^3.2.0: version "3.5.0" resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" es-abstract@^1.5.1: version "1.16.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d" integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.0" is-callable "^1.1.4" is-regex "^1.0.4" object-inspect "^1.6.0" object-keys "^1.1.1" string.prototype.trimleft "^2.1.0" string.prototype.trimright "^2.1.0" es-to-primitive@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es6-promise@^4.0.3: version "4.2.8" resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w== es6-promisify@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-5.0.0.tgz#5109d62f3e56ea967c4b63505aef08291c8a5203" integrity sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM= dependencies: es6-promise "^4.0.3" escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== exit@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob@^7.0.3, glob@^7.0.6, glob@^7.1.1, glob@^7.1.3: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" globby@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d" integrity sha1-69hGZ8oNuzMLmbz8aOrCvFQ3Dg0= dependencies: array-union "^1.0.1" arrify "^1.0.0" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0, has-symbols@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-proxy-agent@^2.2.1: version "2.2.4" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz#4ee7a737abd92678a293d9b34a1af4d0d08c787b" integrity sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg== dependencies: agent-base "^4.3.0" debug "^3.1.0" immediate@~3.0.5: version "3.0.6" resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" integrity sha1-nbHb0Pr43m++D13V5Wu2BigN5ps= inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== ini@^1.3.4: version "1.3.7" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-path-cwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" integrity sha1-0iXsIxMuie3Tj9p2dHLmLmXxEG0= is-path-in-cwd@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.1.tgz#5ac48b345ef675339bd6c7a48a912110b241cf52" integrity sha512-FjV1RTW48E7CWM7eE/J2NJvAEEVektecDBVBE5Hh3nM1Jd0kvhHtX68Pr3xsDf857xt3Y4AkwVULK1Vku62aaQ== dependencies: is-path-inside "^1.0.0" is-path-inside@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.1.tgz#8ef5b7de50437a3fdca6b4e865ef7aa55cb48036" integrity sha1-jvW33lBDej/cprToZe96pVy0gDY= dependencies: path-is-inside "^1.0.1" is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: has-symbols "^1.0.1" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= jasmine-core@~2.8.0: version "2.8.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-2.8.0.tgz#bcc979ae1f9fd05701e45e52e65d3a5d63f1a24e" integrity sha1-vMl5rh+f0FcB5F5S5l06XWPxok4= jasmine-spec-reporter@^4.2.1: version "4.2.1" resolved "https://registry.yarnpkg.com/jasmine-spec-reporter/-/jasmine-spec-reporter-4.2.1.tgz#1d632aec0341670ad324f92ba84b4b32b35e9e22" integrity sha512-FZBoZu7VE5nR7Nilzy+Np8KuVIOxF4oXDPDknehCYBDE080EnlPu0afdZNmpGDBRCUBv3mj5qgqCRmk6W/K8vg== dependencies: colors "1.1.2" jasmine@2.8.0: version "2.8.0" resolved "https://registry.yarnpkg.com/jasmine/-/jasmine-2.8.0.tgz#6b089c0a11576b1f16df11b80146d91d4e8b8a3e" integrity sha1-awicChFXax8W3xG4AUbZHU6Lij4= dependencies: exit "^0.1.2" glob "^7.0.6" jasmine-core "~2.8.0" jasminewd2@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/jasminewd2/-/jasminewd2-2.2.0.tgz#e37cf0b17f199cce23bea71b2039395246b4ec4e" integrity sha1-43zwsX8ZnM4jvqcbIDk5Uka07E4= js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@^3.7.0: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jszip@^3.1.3: version "3.7.1" resolved "https://registry.yarnpkg.com/jszip/-/jszip-3.7.1.tgz#bd63401221c15625a1228c556ca8a68da6fda3d9" integrity sha512-ghL0tz1XG9ZEmRMcEN2vt7xabrDdqHHeykgARpmZ0BiIctWxM47Vt63ZO2dnp4QYt/xJVLLy5Zv1l/xRdh2byg== dependencies: lie "~3.3.0" pako "~1.0.2" readable-stream "~2.3.6" set-immediate-shim "~1.0.1" lie@~3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/lie/-/lie-3.3.0.tgz#dcf82dee545f46074daf200c7c1c5a08e0f40f6a" integrity sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ== dependencies: immediate "~3.0.5" make-error@^1.1.1: version "1.3.5" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.5.tgz#efe4e81f6db28cadd605c70f29c831b58ef776c8" integrity sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g== mime-db@1.42.0: version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-types@^2.1.12, mime-types@~2.1.19: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@0.0.8: version "0.0.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= minimist@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= mkdirp@^0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= dependencies: minimist "0.0.8" ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-inspect@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" optimist@~0.6.0: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" os-tmpdir@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= pako@~1.0.2: version "1.0.11" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-parse@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== protractor@^5.4.2: version "5.4.2" resolved "https://registry.yarnpkg.com/protractor/-/protractor-5.4.2.tgz#329efe37f48b2141ab9467799be2d4d12eb48c13" integrity sha512-zlIj64Cr6IOWP7RwxVeD8O4UskLYPoyIcg0HboWJL9T79F1F0VWtKkGTr/9GN6BKL+/Q/GmM7C9kFVCfDbP5sA== dependencies: "@types/q" "^0.0.32" "@types/selenium-webdriver" "^3.0.0" blocking-proxy "^1.0.0" browserstack "^1.5.1" chalk "^1.1.3" glob "^7.0.3" jasmine "2.8.0" jasminewd2 "^2.1.0" optimist "~0.6.0" q "1.4.1" saucelabs "^1.5.0" selenium-webdriver "3.6.0" source-map-support "~0.4.0" webdriver-js-extender "2.1.0" webdriver-manager "^12.0.6" psl@^1.1.24: version "1.4.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== q@1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/q/-/q-1.4.1.tgz#55705bcd93c5f3673530c2c2cbc0c2b3addc286e" integrity sha1-VXBbzZPF82c1MMLCy8DCs63cKG4= q@^1.4.1: version "1.5.1" resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== readable-stream@~2.3.6: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" request@^2.87.0, request@^2.88.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" resolve@^1.3.2: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" rimraf@^2.2.8, rimraf@^2.5.2, rimraf@^2.5.4: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" safe-buffer@^5.0.1, safe-buffer@^5.1.2: version "5.2.0" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== saucelabs@^1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/saucelabs/-/saucelabs-1.5.0.tgz#9405a73c360d449b232839919a86c396d379fd9d" integrity sha512-jlX3FGdWvYf4Q3LFfFWS1QvPg3IGCGWxIc8QBFdPTbpTJnt/v17FHXYVAn7C8sHf1yUXo2c7yIM0isDryfYtHQ== dependencies: https-proxy-agent "^2.2.1" sax@>=0.6.0: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== selenium-webdriver@3.6.0, selenium-webdriver@^3.0.1: version "3.6.0" resolved "https://registry.yarnpkg.com/selenium-webdriver/-/selenium-webdriver-3.6.0.tgz#2ba87a1662c020b8988c981ae62cb2a01298eafc" integrity sha512-WH7Aldse+2P5bbFBO4Gle/nuQOdVwpHMTL6raL3uuBj/vPG07k6uzt3aiahu352ONBr5xXh0hDlM3LhtXPOC4Q== dependencies: jszip "^3.1.3" rimraf "^2.5.4" tmp "0.0.30" xml2js "^0.4.17" semver@^5.3.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== set-immediate-shim@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" integrity sha1-SysbJ+uAip+NzEgaWOXlb1mfP2E= source-map-support@^0.5.0: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-support@~0.4.0: version "0.4.18" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f" integrity sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA== dependencies: source-map "^0.5.6" source-map@^0.5.6: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string.prototype.trimright@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-bom@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= strip-json-comments@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" tmp@0.0.30: version "0.0.30" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.30.tgz#72419d4a8be7d6ce75148fd8b324e593a711c2ed" integrity sha1-ckGdSovn1s51FI/YsyTlk6cRwu0= dependencies: os-tmpdir "~1.0.1" tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" ts-node@~4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-4.1.0.tgz#36d9529c7b90bb993306c408cd07f7743de20712" integrity sha512-xcZH12oVg9PShKhy3UHyDmuDLV3y7iKwX25aMVPt1SIXSuAfWkFiGPEkg+th8R4YKW/QCxDoW7lJdb15lx6QWg== dependencies: arrify "^1.0.0" chalk "^2.3.0" diff "^3.1.0" make-error "^1.1.1" minimist "^1.2.0" mkdirp "^0.5.1" source-map-support "^0.5.0" tsconfig "^7.0.0" v8flags "^3.0.0" yn "^2.0.0" tsconfig@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/tsconfig/-/tsconfig-7.0.0.tgz#84538875a4dc216e5c4a5432b3a4dec3d54e91b7" integrity sha512-vZXmzPrL+EmC4T/4rVlT2jNVMWCi/O4DIiSj3UHg1OE5kCKbk4mfrXc6dZksLgRM/TZlKnousKH9bbTazUWRRw== dependencies: "@types/strip-bom" "^3.0.0" "@types/strip-json-comments" "0.0.30" strip-bom "^3.0.0" strip-json-comments "^2.0.0" tslib@^1.8.0, tslib@^1.8.1: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tslint@~5.9.1: version "5.9.1" resolved "https://registry.yarnpkg.com/tslint/-/tslint-5.9.1.tgz#1255f87a3ff57eb0b0e1f0e610a8b4748046c9ae" integrity sha1-ElX4ej/1frCw4fDmEKi0dIBGya4= dependencies: babel-code-frame "^6.22.0" builtin-modules "^1.1.1" chalk "^2.3.0" commander "^2.12.1" diff "^3.2.0" glob "^7.1.1" js-yaml "^3.7.0" minimatch "^3.0.4" resolve "^1.3.2" semver "^5.3.0" tslib "^1.8.0" tsutils "^2.12.1" tsutils@^2.12.1: version "2.29.0" resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-2.29.0.tgz#32b488501467acbedd4b85498673a0812aca0b99" integrity sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA== dependencies: tslib "^1.8.1" tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= typescript@^2.5.3: version "2.9.2" resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.9.2.tgz#1cbf61d05d6b96269244eb6a3bce4bd914e0f00c" integrity sha512-Gr4p6nFNaoufRIY4NMdpQRNmgxVIGMs4Fcu/ujdYk3nAZqk7supzBE9idmvfZIlH/Cuj//dvi+019qEue9lV0w== uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" uuid@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== v8flags@^3.0.0: version "3.1.3" resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.1.3.tgz#fc9dc23521ca20c5433f81cc4eb9b3033bb105d8" integrity sha512-amh9CCg3ZxkzQ48Mhcb8iX7xpAfYJgePHxWMQCBWECpOSqJUXgY26ncA61UTV0BkPqfhcy6mzwCIoP4ygxpW8w== dependencies: homedir-polyfill "^1.0.1" verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" webdriver-js-extender@2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/webdriver-js-extender/-/webdriver-js-extender-2.1.0.tgz#57d7a93c00db4cc8d556e4d3db4b5db0a80c3bb7" integrity sha512-lcUKrjbBfCK6MNsh7xaY2UAUmZwe+/ib03AjVOpFobX4O7+83BUveSrLfU0Qsyb1DaKJdQRbuU+kM9aZ6QUhiQ== dependencies: "@types/selenium-webdriver" "^3.0.0" selenium-webdriver "^3.0.1" webdriver-manager@^12.0.6: version "12.1.7" resolved "https://registry.yarnpkg.com/webdriver-manager/-/webdriver-manager-12.1.7.tgz#ed4eaee8f906b33c146e869b55e850553a1b1162" integrity sha512-XINj6b8CYuUYC93SG3xPkxlyUc3IJbD6Vvo75CVGuG9uzsefDzWQrhz0Lq8vbPxtb4d63CZdYophF8k8Or/YiA== dependencies: adm-zip "^0.4.9" chalk "^1.1.1" del "^2.2.0" glob "^7.0.3" ini "^1.3.4" minimist "^1.2.0" q "^1.4.1" request "^2.87.0" rimraf "^2.5.2" semver "^5.3.0" xml2js "^0.4.17" wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= xml2js@^0.4.17: version "0.4.22" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.22.tgz#4fa2d846ec803237de86f30aa9b5f70b6600de02" integrity sha512-MWTbxAQqclRSTnehWWe5nMKzI3VmJ8ltiJEco8akcC6j3miOhjjfzKum5sId+CWhfxdOs/1xauYr8/ZDBtQiRw== dependencies: sax ">=0.6.0" util.promisify "~1.0.0" xmlbuilder "~11.0.0" xmlbuilder@~11.0.0: version "11.0.1" resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== yn@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/yn/-/yn-2.0.0.tgz#e5adabc8acf408f6385fc76495684c88e6af689a" integrity sha1-5a2ryKz0CPY4X8dklWhMiOavaJo= buildbot-3.4.0/worker/000077500000000000000000000000001413250514000146265ustar00rootroot00000000000000buildbot-3.4.0/worker/COPYING000066400000000000000000000354221413250514000156670ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS buildbot-3.4.0/worker/Dockerfile000066400000000000000000000032241413250514000166210ustar00rootroot00000000000000# buildbot/buildbot-worker # please follow docker best practices # https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/ # Provides a base Ubuntu (20.04) image with latest buildbot worker installed # the worker image is not optimized for size, but rather uses ubuntu for wider package availability FROM ubuntu:20.04 MAINTAINER Buildbot maintainers # Last build date - this can be updated whenever there are security updates so # that everything is rebuilt ENV security_updates_as_of 2018-06-15 # This will make apt-get install without question ARG DEBIAN_FRONTEND=noninteractive # Install security updates and required packages RUN apt-get update && \ apt-get -y upgrade && \ apt-get -y install -q \ build-essential \ git \ subversion \ python3-dev \ libffi-dev \ libssl-dev \ python3-setuptools \ python3-pip \ # Test runs produce a great quantity of dead grandchild processes. In a # non-docker environment, these are automatically reaped by init (process 1), # so we need to simulate that here. See https://github.com/Yelp/dumb-init dumb-init \ curl && \ rm -rf /var/lib/apt/lists/* && \ # Install required python packages, and twisted pip3 --no-cache-dir install 'twisted[tls]' && \ pip3 install virtualenv && \ mkdir /buildbot &&\ useradd -ms /bin/bash buildbot COPY . /usr/src/buildbot-worker COPY docker/buildbot.tac /buildbot/buildbot.tac RUN pip3 install /usr/src/buildbot-worker && \ chown -R buildbot /buildbot USER buildbot WORKDIR /buildbot CMD ["/usr/bin/dumb-init", "twistd", "--pidfile=", "-ny", "buildbot.tac"]buildbot-3.4.0/worker/MANIFEST.in000066400000000000000000000003161413250514000163640ustar00rootroot00000000000000include MANIFEST.in README COPYING UPGRADING include bin/buildbot-worker include docs/buildbot-worker.1 include contrib/windows/* contrib/os-x/* contrib/init-scripts/* include contrib/zsh/* contrib/bash/* buildbot-3.4.0/worker/Makefile000066400000000000000000000003271413250514000162700ustar00rootroot00000000000000# developer utilities pylint: pylint -j4 --rcfile=../common/pylintrc --disable=super-with-arguments,raise-missing-from buildbot_worker setup.py flake8: flake8 --config=../common/flake8rc buildbot_worker setup.py buildbot-3.4.0/worker/README000066400000000000000000000040371413250514000155120ustar00rootroot00000000000000 Buildbot: build/test automation http://buildbot.net Brian Warner Dustin J. Mitchell Buildbot is a continuous integration system designed to automate the build/test cycle. By automatically rebuilding and testing the tree each time something has changed, build problems are pinpointed quickly, before other developers are inconvenienced by the failure. Features * Buildbot is easy to set up, but very extensible and customizable. It supports arbitrary build processes, and is not limited to common build processes for particular languages (e.g., autotools or ant) * Buildbot supports building and testing on a variety of platforms. Developers, who do not have the facilities to test their changes everywhere before committing, will know shortly afterwards whether they have broken the build or not. * Buildbot has minimal requirements for workers: using virtualenv, only a Python installation is required. * Workers can be run behind a NAT firewall and communicate with the master * Buildbot has a variety of status-reporting tools to get information about builds in front of developers in a timely manner. Worker: This package contains only the Buildbot worker implementation. The `buildbot` package contains the buildmaster as well as a complete set of documentation. See http://buildbot.net for more information and for an online version of the Buildbot documentation. Docker Image: Here are the list of configuration variable for the buildbot/buildbot-worker image - BUILDMASTER: the dns or IP address of the master to connect to - BUILDMASTER_PORT: the port of the worker protocol - WORKERNAME: the name of the worker as declared in the master configuration - WORKERPASS: the password of the worker as declared in the master configuration - WORKER_ENVIRONMENT_BLACKLIST: the worker environment variable to remove before starting the worker As the environment variables are accessible from the build, and displayed in the log, it is better to remove secret variables like $WORKERPASS buildbot-3.4.0/worker/UPGRADING000066400000000000000000000003001413250514000160620ustar00rootroot00000000000000For information on ugprading Buildbot, see the section "Upgrading" in the buildbot documentation. This may be found locally in docs/manual/upgrading/index.rst of the buildbot-master package. buildbot-3.4.0/worker/buildbot_worker/000077500000000000000000000000001413250514000200235ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/__init__.py000066400000000000000000000112141413250514000221330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # Keep in sync with master/buildbot/__init__.py # # We can't put this method in utility modules, because they import dependency packages # from __future__ import division from __future__ import print_function import datetime import os import re from subprocess import PIPE from subprocess import STDOUT from subprocess import Popen def gitDescribeToPep440(version): # git describe produce version in the form: v0.9.8-20-gf0f45ca # where 20 is the number of commit since last release, and gf0f45ca is the short commit id # preceded by 'g' # we parse this a transform into a pep440 release version 0.9.9.dev20 (increment last digit # band add dev before 20) VERSION_MATCH = re.compile(r'(?P\d+)\.(?P\d+)\.(?P\d+)(\.post(?P\d+))?(-(?P\d+))?(-g(?P.+))?') # noqa pylint: disable=line-too-long v = VERSION_MATCH.search(version) if v: major = int(v.group('major')) minor = int(v.group('minor')) patch = int(v.group('patch')) if v.group('dev'): patch += 1 dev = int(v.group('dev')) return "{0}.{1}.{2}-dev{3}".format(major, minor, patch, dev) if v.group('post'): return "{0}.{1}.{2}.post{3}".format(major, minor, patch, v.group('post')) return "{0}.{1}.{2}".format(major, minor, patch) return v def mTimeVersion(init_file): cwd = os.path.dirname(os.path.abspath(init_file)) m = 0 for root, dirs, files in os.walk(cwd): for f in files: m = max(os.path.getmtime(os.path.join(root, f)), m) d = datetime.datetime.utcfromtimestamp(m) return d.strftime("%Y.%m.%d") def getVersionFromArchiveId(git_archive_id='1634372192 (HEAD -> master, tag: v3.4.0)'): """ Extract the tag if a source is from git archive. When source is exported via `git archive`, the git_archive_id init value is modified and placeholders are expanded to the "archived" revision: %ct: committer date, UNIX timestamp %d: ref names, like the --decorate option of git-log See man gitattributes(5) and git-log(1) (PRETTY FORMATS) for more details. """ # mangle the magic string to make sure it is not replaced by git archive if not git_archive_id.startswith('$For''mat:'): # source was modified by git archive, try to parse the version from # the value of git_archive_id match = re.search(r'tag:\s*v([^,)]+)', git_archive_id) if match: # archived revision is tagged, use the tag return gitDescribeToPep440(match.group(1)) # archived revision is not tagged, use the commit date tstamp = git_archive_id.strip().split()[0] d = datetime.datetime.utcfromtimestamp(int(tstamp)) return d.strftime('%Y.%m.%d') return None def getVersion(init_file): """ Return BUILDBOT_VERSION environment variable, content of VERSION file, git tag or 'latest' """ try: return os.environ['BUILDBOT_VERSION'] except KeyError: pass try: cwd = os.path.dirname(os.path.abspath(init_file)) fn = os.path.join(cwd, 'VERSION') with open(fn) as f: return f.read().strip() except IOError: pass version = getVersionFromArchiveId() if version is not None: return version try: p = Popen(['git', 'describe', '--tags', '--always'], stdout=PIPE, stderr=STDOUT, cwd=cwd) out = p.communicate()[0] if (not p.returncode) and out: v = gitDescribeToPep440(str(out)) if v: return v except OSError: pass try: # if we really can't find the version, we use the date of modification of the most # recent file # docker hub builds cannot use git describe return mTimeVersion(init_file) except Exception: # bummer. lets report something return "latest" version = getVersion(__file__) __version__ = version buildbot-3.4.0/worker/buildbot_worker/backports/000077500000000000000000000000001413250514000220135ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/backports/__init__.py000066400000000000000000000016171413250514000241310ustar00rootroot00000000000000# coding=utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from twisted.trial.unittest import SynchronousTestCase except ImportError: from twisted.trial.unittest import TestCase as SynchronousTestCase __all__ = ['SynchronousTestCase'] buildbot-3.4.0/worker/buildbot_worker/base.py000066400000000000000000000375051413250514000213210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import multiprocessing import os.path import shutil import socket import sys from twisted.application import service from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from twisted.spread import pb import buildbot_worker from buildbot_worker import monkeypatches from buildbot_worker.commands import base from buildbot_worker.commands import registry from buildbot_worker.compat import bytes2unicode from buildbot_worker.pbutil import decode class UnknownCommand(pb.Error): pass class WorkerForBuilderBase(service.Service): """This is the local representation of a single Builder: it handles a single kind of build (like an all-warnings build). It has a name and a home directory. The rest of its behavior is determined by the master. """ stopCommandOnShutdown = True # remote is a ref to the Builder object on the master side, and is set # when they attach. We use it to detect when the connection to the master # is severed. remote = None # .command points to a WorkerCommand instance, and is set while the step # is running. We use it to implement the stopBuild method. command = None # .command_ref is a ref to the master-side BuildStep object, and is set # when the step is started command_ref = None bf = None def __init__(self, name): # service.Service.__init__(self) # Service has no __init__ method self.setName(name) def __repr__(self): return "".format(self.name, id(self)) @defer.inlineCallbacks def setServiceParent(self, parent): yield service.Service.setServiceParent(self, parent) self.bot = self.parent # note that self.parent will go away when the buildmaster's config # file changes and this Builder is removed (possibly because it has # been changed, so the Builder will be re-added again in a moment). # This may occur during a build, while a step is running. def setBuilddir(self, builddir): assert self.parent self.builddir = builddir self.basedir = os.path.join(bytes2unicode(self.bot.basedir), bytes2unicode(self.builddir)) if not os.path.isdir(self.basedir): os.makedirs(self.basedir) def stopService(self): service.Service.stopService(self) if self.stopCommandOnShutdown: self.stopCommand() def activity(self): bot = self.parent if bot: bworker = bot.parent if bworker and self.bf: bf = bworker.bf bf.activity() def remote_setMaster(self, remote): self.remote = remote self.remote.notifyOnDisconnect(self.lostRemote) def remote_print(self, message): log.msg("WorkerForBuilder.remote_print({0}): message from master: {1}".format( self.name, message)) def lostRemote(self, remote): log.msg("lost remote") self.remote = None def lostRemoteStep(self, remotestep): log.msg("lost remote step") self.command_ref = None if self.stopCommandOnShutdown: self.stopCommand() # the following are Commands that can be invoked by the master-side # Builder def remote_startBuild(self): """This is invoked before the first step of any new build is run. It doesn't do much, but masters call it so it's still here.""" def remote_startCommand(self, command_ref, stepId, command, args): """ This gets invoked by L{buildbot.process.step.RemoteCommand.start}, as part of various master-side BuildSteps, to start various commands that actually do the build. I return nothing. Eventually I will call .commandComplete() to notify the master-side RemoteCommand that I'm done. """ stepId = decode(stepId) command = decode(command) args = decode(args) self.activity() if self.command: log.msg("leftover command, dropping it") self.stopCommand() try: factory = registry.getFactory(command) except KeyError: raise UnknownCommand(u"unrecognized WorkerCommand '{0}'".format(command)) self.protocol_args_setup(command, args) self.command = factory(self, stepId, args) log.msg(u" startCommand:{0} [id {1}]".format(command, stepId)) self.command_ref = command_ref self.protocol_notify_on_disconnect() d = self.command.doStart() d.addCallback(lambda res: None) d.addBoth(self.commandComplete) return None def remote_interruptCommand(self, stepId, why): """Halt the current step.""" log.msg("asked to interrupt current command: {0}".format(why)) self.activity() if not self.command: # TODO: just log it, a race could result in their interrupting a # command that wasn't actually running log.msg(" .. but none was running") return self.command.doInterrupt() def stopCommand(self): """Make any currently-running command die, with no further status output. This is used when the worker is shutting down or the connection to the master has been lost. Interrupt the command, silence it, and then forget about it.""" if not self.command: return log.msg("stopCommand: halting current command {0}".format(self.command)) self.command.doInterrupt() # shut up! and die! self.command = None # forget you! # sendUpdate is invoked by the Commands we spawn def sendUpdate(self, data): """This sends the status update to the master-side L{buildbot.process.step.RemoteCommand} object, giving it a sequence number in the process. It adds the update to a queue, and asks the master to acknowledge the update so it can be removed from that queue.""" if not self.running: # .running comes from service.Service, and says whether the # service is running or not. If we aren't running, don't send any # status messages. return # the update[1]=0 comes from the leftover 'updateNum', which the # master still expects to receive. Provide it to avoid significant # interoperability issues between new workers and old masters. if self.command_ref: update = [data, 0] updates = [update] d = self.protocol_update(updates) d.addCallback(self.ackUpdate) d.addErrback(self._ackFailed, "WorkerForBuilder.sendUpdate") def ackUpdate(self, acknum): self.activity() # update the "last activity" timer def ackComplete(self, dummy): self.activity() # update the "last activity" timer def _ackFailed(self, why, where): log.msg("WorkerForBuilder._ackFailed:", where) log.err(why) # we don't really care # this is fired by the Deferred attached to each Command def commandComplete(self, failure): if failure: log.msg("WorkerForBuilder.commandFailed", self.command) log.err(failure) # failure, if present, is a failure.Failure. To send it across # the wire, we must turn it into a pb.CopyableFailure. failure = pb.CopyableFailure(failure) failure.unsafeTracebacks = True else: # failure is None log.msg("WorkerForBuilder.commandComplete", self.command) self.command = None if not self.running: log.msg(" but we weren't running, quitting silently") return if self.command_ref: d = self.protocol_complete(failure) d.addCallback(self.ackComplete) d.addErrback(self._ackFailed, "sendComplete") self.command_ref = None class BotBase(service.MultiService): """I represent the worker-side bot.""" name = "bot" WorkerForBuilder = WorkerForBuilderBase os_release_file = "/etc/os-release" def __init__(self, basedir, unicode_encoding=None, delete_leftover_dirs=False): service.MultiService.__init__(self) self.basedir = basedir self.numcpus = None self.unicode_encoding = unicode_encoding or sys.getfilesystemencoding( ) or 'ascii' self.delete_leftover_dirs = delete_leftover_dirs self.builders = {} # for testing purposes def setOsReleaseFile(self, os_release_file): self.os_release_file = os_release_file def startService(self): assert os.path.isdir(self.basedir) service.MultiService.startService(self) def remote_getCommands(self): commands = { n: base.command_version for n in registry.getAllCommandNames() } return commands @defer.inlineCallbacks def remote_setBuilderList(self, wanted): retval = {} wanted_names = {name for (name, builddir) in wanted} wanted_dirs = {builddir for (name, builddir) in wanted} wanted_dirs.add('info') for (name, builddir) in wanted: b = self.builders.get(name, None) if b: if b.builddir != builddir: log.msg("changing builddir for builder {0} from {1} to {2}".format( name, b.builddir, builddir)) b.setBuilddir(builddir) else: b = self.WorkerForBuilder(name) b.unicode_encoding = self.unicode_encoding b.setServiceParent(self) b.setBuilddir(builddir) self.builders[name] = b retval[name] = b # disown any builders no longer desired to_remove = list(set(self.builders.keys()) - wanted_names) if to_remove: yield defer.gatherResults([ defer.maybeDeferred(self.builders[name].disownServiceParent) for name in to_remove]) # and *then* remove them from the builder list for name in to_remove: del self.builders[name] # finally warn about any leftover dirs for dir in os.listdir(self.basedir): if os.path.isdir(os.path.join(self.basedir, dir)): if dir not in wanted_dirs: if self.delete_leftover_dirs: log.msg("Deleting directory '{0}' that is not being " "used by the buildmaster".format(dir)) try: shutil.rmtree(dir) except OSError as e: log.msg("Cannot remove directory '{0}': " "{1}".format(dir, e)) else: log.msg("I have a leftover directory '{0}' that is not " "being used by the buildmaster: you can delete " "it now".format(dir)) defer.returnValue(retval) def remote_print(self, message): log.msg("message from master:", message) @staticmethod def _read_os_release(os_release_file, props): if not os.path.exists(os_release_file): return with open(os_release_file, "r") as fin: for line in fin: line = line.strip("\r\n") # as per man page: Lines beginning with "#" shall be ignored as comments. if len(line) == 0 or line.startswith('#'): continue # parse key-values key, value = line.split("=", 1) if value: key = 'os_{}'.format(key.lower()) props[key] = value.strip('"') def remote_getWorkerInfo(self): """This command retrieves data from the files in WORKERDIR/info/* and sends the contents to the buildmaster. These are used to describe the worker and its configuration, and should be created and maintained by the worker administrator. They will be retrieved each time the master-worker connection is established. """ files = {} basedir = os.path.join(self.basedir, "info") if os.path.isdir(basedir): for f in os.listdir(basedir): filename = os.path.join(basedir, f) if os.path.isfile(filename): with open(filename, "r") as fin: files[f] = fin.read() self._read_os_release(self.os_release_file, files) if not self.numcpus: try: self.numcpus = multiprocessing.cpu_count() except NotImplementedError: log.msg("warning: could not detect the number of CPUs for " "this worker. Assuming 1 CPU.") self.numcpus = 1 files['environ'] = os.environ.copy() files['system'] = os.name files['basedir'] = self.basedir files['numcpus'] = self.numcpus files['version'] = self.remote_getVersion() files['worker_commands'] = self.remote_getCommands() return files def remote_getVersion(self): """Send our version back to the Master""" return buildbot_worker.version def remote_shutdown(self): log.msg("worker shutting down on command from master") # there's no good way to learn that the PB response has been delivered, # so we'll just wait a bit, in hopes the master hears back. Masters are # resilient to workers dropping their connections, so there is no harm # if this timeout is too short. reactor.callLater(0.2, reactor.stop) class WorkerBase(service.MultiService): def __init__(self, name, basedir, bot_class, umask=None, unicode_encoding=None, delete_leftover_dirs=False): service.MultiService.__init__(self) self.name = name bot = bot_class(basedir, unicode_encoding=unicode_encoding, delete_leftover_dirs=delete_leftover_dirs) bot.setServiceParent(self) self.bot = bot self.umask = umask self.basedir = basedir def startService(self): # first, apply all monkeypatches monkeypatches.patch_all() log.msg("Starting Worker -- version: {0}".format(buildbot_worker.version)) if self.umask is not None: os.umask(self.umask) self.recordHostname(self.basedir) service.MultiService.startService(self) def recordHostname(self, basedir): "Record my hostname in twistd.hostname, for user convenience" log.msg("recording hostname in twistd.hostname") filename = os.path.join(basedir, "twistd.hostname") try: hostname = os.uname()[1] # only on unix except AttributeError: # this tends to fail on non-connected hosts, e.g., laptops # on planes hostname = socket.getfqdn() try: with open(filename, "w") as f: f.write("{0}\n".format(hostname)) except Exception: log.msg("failed - ignoring") buildbot-3.4.0/worker/buildbot_worker/bot.py000066400000000000000000000016101413250514000211570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from buildbot_worker.null import LocalWorker from buildbot_worker.pb import Worker __all__ = ['Worker', 'LocalWorker'] buildbot-3.4.0/worker/buildbot_worker/commands/000077500000000000000000000000001413250514000216245ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/commands/__init__.py000066400000000000000000000000001413250514000237230ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/commands/base.py000066400000000000000000000214441413250514000231150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from zope.interface import implementer from buildbot_worker import util from buildbot_worker.exceptions import AbandonChain from buildbot_worker.interfaces import IWorkerCommand # The following identifier should be updated each time this file is changed command_version = "3.1" # version history: # >=1.17: commands are interruptable # >=1.28: Arch understands 'revision', added Bazaar # >=1.33: Source classes understand 'retry' # >=1.39: Source classes correctly handle changes in branch (except Git) # Darcs accepts 'revision' (now all do but Git) (well, and P4Sync) # Arch/Baz should accept 'build-config' # >=1.51: (release 0.7.3) # >= 2.1: SlaveShellCommand now accepts 'initial_stdin', 'keep_stdin_open', # and 'logfiles'. It now sends 'log' messages in addition to # stdout/stdin/header/rc. It acquired writeStdin/closeStdin methods, # but these are not remotely callable yet. # (not externally visible: ShellCommandPP has writeStdin/closeStdin. # ShellCommand accepts new arguments (logfiles=, initialStdin=, # keepStdinOpen=) and no longer accepts stdin=) # (release 0.7.4) # >= 2.2: added monotone, uploadFile, and downloadFile (release 0.7.5) # >= 2.3: added bzr (release 0.7.6) # >= 2.4: Git understands 'revision' and branches # >= 2.5: workaround added for remote 'hg clone --rev REV' when hg<0.9.2 # >= 2.6: added uploadDirectory # >= 2.7: added usePTY option to SlaveShellCommand # >= 2.8: added username and password args to SVN class # >= 2.9: add depth arg to SVN class # >= 2.10: CVS can handle 'extra_options' and 'export_options' # >= 2.11: Arch, Bazaar, and Monotone removed # >= 2.12: SlaveShellCommand no longer accepts 'keep_stdin_open' # >= 2.13: SlaveFileUploadCommand supports option 'keepstamp' # >= 2.14: RemoveDirectory can delete multiple directories # >= 2.15: 'interruptSignal' option is added to SlaveShellCommand # >= 2.16: 'sigtermTime' option is added to SlaveShellCommand # >= 2.16: runprocess supports obfuscation via tuples (#1748) # >= 2.16: listdir command added to read a directory # >= 3.0: new buildbot-worker package: # * worker-side usePTY configuration (usePTY='slave-config') support # dropped, # * remote method getSlaveInfo() renamed to getWorkerInfo(). # * "slavesrc" command argument renamed to "workersrc" in uploadFile and # uploadDirectory commands. # * "slavedest" command argument renamed to "workerdest" in downloadFile # command. # >= 3.1: rmfile command added to remove a file @implementer(IWorkerCommand) class Command(object): """This class defines one command that can be invoked by the build master. The command is executed on the worker side, and always sends back a completion message when it finishes. It may also send intermediate status as it runs (by calling builder.sendStatus). Some commands can be interrupted (either by the build master or a local timeout), in which case the step is expected to complete normally with a status message that indicates an error occurred. These commands are used by BuildSteps on the master side. Each kind of BuildStep uses a single Command. The worker must implement all the Commands required by the set of BuildSteps used for any given build: this is checked at startup time. All Commands are constructed with the same signature: c = CommandClass(builder, stepid, args) where 'builder' is the parent WorkerForBuilder object, and 'args' is a dict that is interpreted per-command. The setup(args) method is available for setup, and is run from __init__. Mandatory args can be declared by listing them in the requiredArgs property. They will be checked before calling the setup(args) method. The Command is started with start(). This method must be implemented in a subclass, and it should return a Deferred. When your step is done, you should fire the Deferred (the results are not used). If the command is interrupted, it should fire the Deferred anyway. While the command runs. it may send status messages back to the buildmaster by calling self.sendStatus(statusdict). The statusdict is interpreted by the master-side BuildStep however it likes. A separate completion message is sent when the deferred fires, which indicates that the Command has finished, but does not carry any status data. If the Command needs to return an exit code of some sort, that should be sent as a regular status message before the deferred is fired . Once builder.commandComplete has been run, no more status messages may be sent. If interrupt() is called, the Command should attempt to shut down as quickly as possible. Child processes should be killed, new ones should not be started. The Command should send some kind of error status update, then complete as usual by firing the Deferred. .interrupted should be set by interrupt(), and can be tested to avoid sending multiple error status messages. If .running is False, the bot is shutting down (or has otherwise lost the connection to the master), and should not send any status messages. This is checked in Command.sendStatus . """ # builder methods: # sendStatus(dict) (zero or more) # commandComplete() or commandInterrupted() (one, at end) requiredArgs = [] debug = False interrupted = False # set by Builder, cleared on shutdown or when the Deferred fires running = False _reactor = reactor def __init__(self, builder, stepId, args): self.builder = builder self.stepId = stepId # just for logging self.args = args self.startTime = None missingArgs = [arg for arg in self.requiredArgs if arg not in args] if missingArgs: raise ValueError("{0} is missing args: {1}".format( self.__class__.__name__, ", ".join(missingArgs))) self.setup(args) def setup(self, args): """Override this in a subclass to extract items from the args dict.""" def doStart(self): self.running = True self.startTime = util.now(self._reactor) d = defer.maybeDeferred(self.start) def commandComplete(res): self.sendStatus( {"elapsed": util.now(self._reactor) - self.startTime}) self.running = False return res d.addBoth(commandComplete) return d def start(self): """Start the command. This method should return a Deferred that will fire when the command has completed. The Deferred's argument will be ignored. This method should be overridden by subclasses.""" raise NotImplementedError("You must implement this in a subclass") def sendStatus(self, status): """Send a status update to the master.""" if self.debug: log.msg("sendStatus", status) if not self.running: log.msg("would sendStatus but not .running") return self.builder.sendUpdate(status) def doInterrupt(self): self.running = False self.interrupt() def interrupt(self): """Override this in a subclass to allow commands to be interrupted. May be called multiple times, test and set self.interrupted=True if this matters.""" # utility methods, mostly used by WorkerShellCommand and the like def _abandonOnFailure(self, rc): if not isinstance(rc, int): log.msg("weird, _abandonOnFailure was given rc={0} ({1})".format( rc, type(rc))) assert isinstance(rc, int) if rc != 0: raise AbandonChain(rc) return rc def _sendRC(self, res): self.sendStatus({'rc': 0}) def _checkAbandoned(self, why): log.msg("_checkAbandoned", why) why.trap(AbandonChain) log.msg(" abandoning chain", why.value) self.sendStatus({'rc': why.value.args[0]}) return None buildbot-3.4.0/worker/buildbot_worker/commands/fs.py000066400000000000000000000234341413250514000226140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import glob import os import shutil import sys from twisted.internet import defer from twisted.internet import threads from twisted.python import log from twisted.python import runtime from buildbot_worker import runprocess from buildbot_worker.commands import base from buildbot_worker.commands import utils class MakeDirectory(base.Command): header = "mkdir" # args['dir'] is relative to Builder directory, and is required. requiredArgs = ['dir'] def start(self): dirname = os.path.join(self.builder.basedir, self.args['dir']) try: if not os.path.isdir(dirname): os.makedirs(dirname) self.sendStatus({'rc': 0}) except OSError as e: log.msg("MakeDirectory {0} failed: {1}".format(dirname, e)) self.sendStatus( {'header': '{0}: {1}: {2}'.format(self.header, e.strerror, dirname)}) self.sendStatus({'rc': e.errno}) class RemoveDirectory(base.Command): header = "rmdir" # args['dir'] is relative to Builder directory, and is required. requiredArgs = ['dir'] def setup(self, args): self.logEnviron = args.get('logEnviron', True) @defer.inlineCallbacks def start(self): args = self.args dirnames = args['dir'] self.timeout = args.get('timeout', 120) self.maxTime = args.get('maxTime', None) self.rc = 0 if isinstance(dirnames, list): assert dirnames for dirname in dirnames: res = yield self.removeSingleDir(dirname) # Even if single removal of single file/dir consider it as # failure of whole command, but continue removing other files # Send 'rc' to master to handle failure cases if res != 0: self.rc = res else: self.rc = yield self.removeSingleDir(dirnames) self.sendStatus({'rc': self.rc}) def removeSingleDir(self, dirname): self.dir = os.path.join(self.builder.basedir, dirname) if runtime.platformType != "posix": d = threads.deferToThread(utils.rmdirRecursive, self.dir) def cb(_): return 0 # rc=0 def eb(f): self.sendStatus( {'header': 'exception from rmdirRecursive\n' + f.getTraceback()}) return -1 # rc=-1 d.addCallbacks(cb, eb) else: d = self._clobber(None) return d @defer.inlineCallbacks def _clobber(self, dummy, chmodDone=False): command = ["rm", "-rf", self.dir] c = runprocess.RunProcess(self.builder, command, self.builder.basedir, sendRC=0, timeout=self.timeout, maxTime=self.maxTime, logEnviron=self.logEnviron, usePTY=False) self.command = c # sendRC=0 means the rm command will send stdout/stderr to the # master, but not the rc=0 when it finishes. That job is left to # _sendRC rc = yield c.start() # The rm -rf may fail if there is a left-over subdir with chmod 000 # permissions. So if we get a failure, we attempt to chmod suitable # permissions and re-try the rm -rf. if not chmodDone: rc = yield self._tryChmod(rc) defer.returnValue(rc) @defer.inlineCallbacks def _tryChmod(self, rc): assert isinstance(rc, int) if rc == 0: defer.returnValue(0) return # pragma: no cover # Attempt a recursive chmod and re-try the rm -rf after. command = ["chmod", "-Rf", "u+rwx", os.path.join(self.builder.basedir, self.dir)] if sys.platform.startswith('freebsd'): # Work around a broken 'chmod -R' on FreeBSD (it tries to recurse into a # directory for which it doesn't have permission, before changing that # permission) by running 'find' instead command = ["find", os.path.join(self.builder.basedir, self.dir), '-exec', 'chmod', 'u+rwx', '{}', ';'] c = runprocess.RunProcess(self.builder, command, self.builder.basedir, sendRC=0, timeout=self.timeout, maxTime=self.maxTime, logEnviron=self.logEnviron, usePTY=False) self.command = c rc = yield c.start() rc = yield self._clobber(rc, True) defer.returnValue(rc) class CopyDirectory(base.Command): header = "cpdir" # args['todir'] and args['fromdir'] are relative to Builder directory, and # are required. requiredArgs = ['todir', 'fromdir'] def setup(self, args): self.logEnviron = args.get('logEnviron', True) def start(self): args = self.args fromdir = os.path.join(self.builder.basedir, self.args['fromdir']) todir = os.path.join(self.builder.basedir, self.args['todir']) self.timeout = args.get('timeout', 120) self.maxTime = args.get('maxTime', None) if runtime.platformType != "posix": d = threads.deferToThread(shutil.copytree, fromdir, todir) def cb(_): return 0 # rc=0 def eb(f): self.sendStatus( {'header': 'exception from copytree\n' + f.getTraceback()}) return -1 # rc=-1 d.addCallbacks(cb, eb) @d.addCallback def send_rc(rc): self.sendStatus({'rc': rc}) else: if not os.path.exists(os.path.dirname(todir)): os.makedirs(os.path.dirname(todir)) if os.path.exists(todir): # I don't think this happens, but just in case.. log.msg(("cp target '{0}' already exists -- cp will not do what you think!" ).format(todir)) command = ['cp', '-R', '-P', '-p', '-v', fromdir, todir] c = runprocess.RunProcess(self.builder, command, self.builder.basedir, sendRC=False, timeout=self.timeout, maxTime=self.maxTime, logEnviron=self.logEnviron, usePTY=False) self.command = c d = c.start() d.addCallback(self._abandonOnFailure) d.addCallbacks(self._sendRC, self._checkAbandoned) return d class StatFile(base.Command): header = "stat" # args['file'] is relative to Builder directory, and is required. requireArgs = ['file'] def start(self): filename = os.path.join( self.builder.basedir, self.args.get('workdir', ''), self.args['file']) try: stat = os.stat(filename) self.sendStatus({'stat': tuple(stat)}) self.sendStatus({'rc': 0}) except OSError as e: log.msg("StatFile {0} failed: {1}".format(filename, e)) self.sendStatus( {'header': '{0}: {1}: {2}'.format(self.header, e.strerror, filename)}) self.sendStatus({'rc': e.errno}) class GlobPath(base.Command): header = "glob" # args['path'] is relative to Builder directory, and is required. requiredArgs = ['path'] def start(self): pathname = os.path.join(self.builder.basedir, self.args['path']) try: # recursive matching is only support in python3.5+ if sys.version_info[:2] >= (3, 5): files = glob.glob(pathname, recursive=True) else: files = glob.glob(pathname) self.sendStatus({'files': files}) self.sendStatus({'rc': 0}) except OSError as e: log.msg("GlobPath {0} failed: {1}".format(pathname, e)) self.sendStatus( {'header': '{0}: {1}: {2}'.format(self.header, e.strerror, pathname)}) self.sendStatus({'rc': e.errno}) class ListDir(base.Command): header = "listdir" # args['dir'] is relative to Builder directory, and is required. requireArgs = ['dir'] def start(self): dirname = os.path.join(self.builder.basedir, self.args['dir']) try: files = os.listdir(dirname) self.sendStatus({'files': files}) self.sendStatus({'rc': 0}) except OSError as e: log.msg("ListDir {0} failed: {1}".format(dirname, e)) self.sendStatus( {'header': '{0}: {1}: {2}'.format(self.header, e.strerror, dirname)}) self.sendStatus({'rc': e.errno}) class RemoveFile(base.Command): header = "rmfile" # args['path'] is relative to Builder directory, and is required. requiredArgs = ['path'] def start(self): pathname = os.path.join(self.builder.basedir, self.args['path']) try: os.remove(pathname) self.sendStatus({'rc': 0}) except OSError as e: log.msg("remove file {0} failed: {1}".format(pathname, e)) self.sendStatus( {'header': '{0}: {1}: {2}'.format(self.header, e.strerror, pathname)}) self.sendStatus({'rc': e.errno}) buildbot-3.4.0/worker/buildbot_worker/commands/registry.py000066400000000000000000000034171413250514000240530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import buildbot_worker.commands.fs import buildbot_worker.commands.shell import buildbot_worker.commands.transfer commandRegistry = { # command name : fully qualified factory (callable) "shell": buildbot_worker.commands.shell.WorkerShellCommand, "uploadFile": buildbot_worker.commands.transfer.WorkerFileUploadCommand, "uploadDirectory": buildbot_worker.commands.transfer.WorkerDirectoryUploadCommand, "downloadFile": buildbot_worker.commands.transfer.WorkerFileDownloadCommand, "mkdir": buildbot_worker.commands.fs.MakeDirectory, "rmdir": buildbot_worker.commands.fs.RemoveDirectory, "cpdir": buildbot_worker.commands.fs.CopyDirectory, "stat": buildbot_worker.commands.fs.StatFile, "glob": buildbot_worker.commands.fs.GlobPath, "listdir": buildbot_worker.commands.fs.ListDir, "rmfile": buildbot_worker.commands.fs.RemoveFile } def getFactory(command): factory = commandRegistry[command] return factory def getAllCommandNames(): return list(commandRegistry) buildbot-3.4.0/worker/buildbot_worker/commands/shell.py000066400000000000000000000041621413250514000233100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import os from buildbot_worker import runprocess from buildbot_worker.commands import base class WorkerShellCommand(base.Command): requiredArgs = ['workdir', 'command'] def start(self): args = self.args workdir = os.path.join(self.builder.basedir, args['workdir']) c = runprocess.RunProcess( self.builder, args['command'], workdir, environ=args.get('env'), timeout=args.get('timeout', None), maxTime=args.get('maxTime', None), sigtermTime=args.get('sigtermTime', None), sendStdout=args.get('want_stdout', True), sendStderr=args.get('want_stderr', True), sendRC=True, initialStdin=args.get('initial_stdin'), logfiles=args.get('logfiles', {}), usePTY=args.get('usePTY', False), logEnviron=args.get('logEnviron', True), ) if args.get('interruptSignal'): c.interruptSignal = args['interruptSignal'] c._reactor = self._reactor self.command = c d = self.command.start() return d def interrupt(self): self.interrupted = True self.command.kill("command interrupted") def writeStdin(self, data): self.command.writeStdin(data) def closeStdin(self): self.command.closeStdin() buildbot-3.4.0/worker/buildbot_worker/commands/transfer.py000066400000000000000000000320231413250514000240220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import os import tarfile import tempfile from twisted.internet import defer from twisted.python import log from buildbot_worker.commands.base import Command class TransferCommand(Command): def finished(self, res): if self.debug: log.msg('finished: stderr={0!r}, rc={1!r}'.format(self.stderr, self.rc)) # don't use self.sendStatus here, since we may no longer be running # if we have been interrupted upd = {'rc': self.rc} if self.stderr: upd['stderr'] = self.stderr self.builder.sendUpdate(upd) return res def interrupt(self): if self.debug: log.msg('interrupted') if self.interrupted: return self.rc = 1 self.interrupted = True # now we wait for the next trip around the loop. It abandon the file # when it sees self.interrupted set. class WorkerFileUploadCommand(TransferCommand): """ Upload a file from worker to build master Arguments: - ['workdir']: base directory to use - ['workersrc']: name of the worker-side file to read from - ['writer']: RemoteReference to a buildbot_worker.protocols.base.FileWriterProxy object - ['maxsize']: max size (in bytes) of file to write - ['blocksize']: max size for each data block - ['keepstamp']: whether to preserve file modified and accessed times """ debug = False requiredArgs = ['workdir', 'workersrc', 'writer', 'blocksize'] def setup(self, args): self.workdir = args['workdir'] self.filename = args['workersrc'] self.writer = args['writer'] self.remaining = args['maxsize'] self.blocksize = args['blocksize'] self.keepstamp = args.get('keepstamp', False) self.stderr = None self.rc = 0 self.fp = None def start(self): if self.debug: log.msg('WorkerFileUploadCommand started') # Open file self.path = os.path.join(self.builder.basedir, self.workdir, os.path.expanduser(self.filename)) access_time = None modified_time = None try: if self.keepstamp: access_time = os.path.getatime(self.path) modified_time = os.path.getmtime(self.path) self.fp = open(self.path, 'rb') if self.debug: log.msg("Opened '{0}' for upload".format(self.path)) except Exception: self.fp = None self.stderr = "Cannot open file '{0}' for upload".format(self.path) self.rc = 1 if self.debug: log.msg("Cannot open file '{0}' for upload".format(self.path)) self.sendStatus({'header': "sending {0}\n".format(self.path)}) d = defer.Deferred() self._reactor.callLater(0, self._loop, d) @defer.inlineCallbacks def _close_ok(res): if self.fp: self.fp.close() self.fp = None yield self.builder.protocol_update_upload_file_close(self.writer) if self.keepstamp: yield self.builder.protocol_update_upload_file_utime(self.writer, access_time, modified_time) def _close_err(f): self.rc = 1 if self.fp: self.fp.close() self.fp = None # call remote's close(), but keep the existing failure d1 = self.builder.protocol_update_upload_file_close(self.writer) def eb(f2): log.msg("ignoring error from remote close():") log.err(f2) d1.addErrback(eb) d1.addBoth(lambda _: f) # always return _loop failure return d1 d.addCallbacks(_close_ok, _close_err) d.addBoth(self.finished) return d def _loop(self, fire_when_done): d = defer.maybeDeferred(self._writeBlock) def _done(finished): if finished: fire_when_done.callback(None) else: self._loop(fire_when_done) def _err(why): fire_when_done.errback(why) d.addCallbacks(_done, _err) return None def _writeBlock(self): """Write a block of data to the remote writer""" if self.interrupted or self.fp is None: if self.debug: log.msg('WorkerFileUploadCommand._writeBlock(): end') return True length = self.blocksize if self.remaining is not None and length > self.remaining: length = self.remaining if length <= 0: if self.stderr is None: self.stderr = 'Maximum filesize reached, truncating file \'{0}\''.format( self.path) self.rc = 1 data = '' else: data = self.fp.read(length) if self.debug: log.msg('WorkerFileUploadCommand._writeBlock(): ' + 'allowed={0} readlen={1}'.format(length, len(data))) if not data: log.msg("EOF: callRemote(close)") return True if self.remaining is not None: self.remaining = self.remaining - len(data) assert self.remaining >= 0 d = self.do_protocol_write(data) d.addCallback(lambda res: False) return d def do_protocol_write(self, data): return self.builder.protocol_update_upload_file_write(self.writer, data) class WorkerDirectoryUploadCommand(WorkerFileUploadCommand): debug = False requiredArgs = ['workdir', 'workersrc', 'writer', 'blocksize'] def setup(self, args): self.workdir = args['workdir'] self.dirname = args['workersrc'] self.writer = args['writer'] self.remaining = args['maxsize'] self.blocksize = args['blocksize'] self.compress = args['compress'] self.stderr = None self.rc = 0 def start(self): if self.debug: log.msg('WorkerDirectoryUploadCommand started') self.path = os.path.join(self.builder.basedir, self.workdir, os.path.expanduser(self.dirname)) if self.debug: log.msg("path: {0!r}".format(self.path)) # Create temporary archive fd, self.tarname = tempfile.mkstemp() self.fp = os.fdopen(fd, "rb+") if self.compress == 'bz2': mode = 'w|bz2' elif self.compress == 'gz': mode = 'w|gz' else: mode = 'w' # TODO: Use 'with' when depending on Python 2.7 # Not possible with older versions: # exceptions.AttributeError: 'TarFile' object has no attribute '__exit__' archive = tarfile.open(mode=mode, fileobj=self.fp) archive.add(self.path, '') archive.close() # Transfer it self.fp.seek(0) self.sendStatus({'header': "sending {0}\n".format(self.path)}) d = defer.Deferred() self._reactor.callLater(0, self._loop, d) def unpack(res): d1 = self.builder.protocol_update_upload_directory(self.writer) def unpack_err(f): self.rc = 1 return f d1.addErrback(unpack_err) d1.addCallback(lambda ignored: res) return d1 d.addCallback(unpack) d.addBoth(self.finished) return d def finished(self, res): self.fp.close() self.fp = None os.remove(self.tarname) return TransferCommand.finished(self, res) def do_protocol_write(self, data): return self.builder.protocol_update_upload_directory_write(self.writer, data) class WorkerFileDownloadCommand(TransferCommand): """ Download a file from master to worker Arguments: - ['workdir']: base directory to use - ['workerdest']: name of the worker-side file to be created - ['reader']: RemoteReference to a buildbot_worker.protocols.base.FileReaderProxy object - ['maxsize']: max size (in bytes) of file to write - ['blocksize']: max size for each data block - ['mode']: access mode for the new file """ debug = False requiredArgs = ['workdir', 'workerdest', 'reader', 'blocksize'] def setup(self, args): self.workdir = args['workdir'] self.filename = args['workerdest'] self.reader = args['reader'] self.bytes_remaining = args['maxsize'] self.blocksize = args['blocksize'] self.mode = args['mode'] self.stderr = None self.rc = 0 self.fp = None def start(self): if self.debug: log.msg('WorkerFileDownloadCommand starting') # Open file self.path = os.path.join(self.builder.basedir, self.workdir, os.path.expanduser(self.filename)) dirname = os.path.dirname(self.path) if not os.path.exists(dirname): os.makedirs(dirname) try: self.fp = open(self.path, 'wb') if self.debug: log.msg("Opened '{0}' for download".format(self.path)) if self.mode is not None: # note: there is a brief window during which the new file # will have the worker's default (umask) mode before we # set the new one. Don't use this mode= feature to keep files # private: use the worker's umask for that instead. (it # is possible to call os.umask() before and after the open() # call, but cleaning up from exceptions properly is more of a # nuisance that way). os.chmod(self.path, self.mode) except IOError: # TODO: this still needs cleanup if self.fp: self.fp.close() self.fp = None self.stderr = "Cannot open file '{0}' for download".format(self.path) self.rc = 1 if self.debug: log.msg("Cannot open file '{0}' for download".format(self.path)) d = defer.Deferred() self._reactor.callLater(0, self._loop, d) def _close(res): # close the file, but pass through any errors from _loop d1 = self.builder.protocol_update_read_file_close(self.reader) d1.addErrback(log.err, 'while trying to close reader') d1.addCallback(lambda ignored: res) return d1 d.addBoth(_close) d.addBoth(self.finished) return d def _loop(self, fire_when_done): d = defer.maybeDeferred(self._readBlock) def _done(finished): if finished: fire_when_done.callback(None) else: self._loop(fire_when_done) def _err(why): fire_when_done.errback(why) d.addCallbacks(_done, _err) return None def _readBlock(self): """Read a block of data from the remote reader.""" if self.interrupted or self.fp is None: if self.debug: log.msg('WorkerFileDownloadCommand._readBlock(): end') return True length = self.blocksize if self.bytes_remaining is not None and length > self.bytes_remaining: length = self.bytes_remaining if length <= 0: if self.stderr is None: self.stderr = "Maximum filesize reached, truncating file '{0}'".format( self.path) self.rc = 1 return True else: d = self.builder.protocol_update_read_file(self.reader, length) d.addCallback(self._writeData) return d def _writeData(self, data): if self.debug: log.msg('WorkerFileDownloadCommand._readBlock(): readlen=%d' % len(data)) if not data: return True if self.bytes_remaining is not None: self.bytes_remaining = self.bytes_remaining - len(data) assert self.bytes_remaining >= 0 self.fp.write(data) return False def finished(self, res): if self.fp: self.fp.close() self.fp = None return TransferCommand.finished(self, res) buildbot-3.4.0/worker/buildbot_worker/commands/utils.py000066400000000000000000000100321413250514000233320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from future.utils import text_type import os from twisted.python import log from twisted.python import runtime from twisted.python.procutils import which def getCommand(name): possibles = which(name) if not possibles: raise RuntimeError("Couldn't find executable for '{0}'".format(name)) # # Under windows, if there is more than one executable "thing" # that matches (e.g. *.bat, *.cmd and *.exe), we not just use # the first in alphabet (*.bat/*.cmd) if there is a *.exe. # e.g. under MSysGit/Windows, there is both a git.cmd and a # git.exe on path, but we want the git.exe, since the git.cmd # does not seem to work properly with regard to errors raised # and caught in buildbot worker command (vcs.py) # if runtime.platformType == 'win32' and len(possibles) > 1: possibles_exe = which(name + ".exe") if possibles_exe: return possibles_exe[0] return possibles[0] # this just keeps pyflakes happy on non-Windows systems if runtime.platformType != 'win32': WindowsError = RuntimeError if runtime.platformType == 'win32': # pragma: no cover def rmdirRecursive(dir): """This is a replacement for shutil.rmtree that works better under windows. Thanks to Bear at the OSAF for the code.""" if not os.path.exists(dir): return if os.path.islink(dir) or os.path.isfile(dir): os.remove(dir) return # Verify the directory is read/write/execute for the current user os.chmod(dir, 0o700) # os.listdir below only returns a list of unicode filenames if the parameter is unicode # Thus, if a non-unicode-named dir contains a unicode filename, that filename will get # garbled. # So force dir to be unicode. if not isinstance(dir, text_type): try: dir = text_type(dir, "utf-8") except UnicodeDecodeError: log.err("rmdirRecursive: decoding from UTF-8 failed (ignoring)") try: list = os.listdir(dir) except WindowsError as e: msg = ("rmdirRecursive: unable to listdir {0} ({1}). Trying to " "remove like a dir".format(dir, e.strerror.decode('mbcs'))) log.msg(msg.encode('utf-8')) os.rmdir(dir) return for name in list: full_name = os.path.join(dir, name) # on Windows, if we don't have write permission we can't remove # the file/directory either, so turn that on if os.name == 'nt': if not os.access(full_name, os.W_OK): # I think this is now redundant, but I don't have an NT # machine to test on, so I'm going to leave it in place # -warner os.chmod(full_name, 0o600) if os.path.islink(full_name): os.remove(full_name) # as suggested in bug #792 elif os.path.isdir(full_name): rmdirRecursive(full_name) else: if os.path.isfile(full_name): os.chmod(full_name, 0o700) os.remove(full_name) os.rmdir(dir) else: # use rmtree on POSIX import shutil rmdirRecursive = shutil.rmtree buildbot-3.4.0/worker/buildbot_worker/compat.py000066400000000000000000000056371413250514000216730ustar00rootroot00000000000000# coding=utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Helpers for handling compatibility differences between Python 2 and Python 3. """ from __future__ import absolute_import from __future__ import print_function from future.utils import text_type if str != bytes: # On Python 3 and higher, str and bytes # are not equivalent. We must use StringIO for # doing io on native strings. from io import StringIO as NativeStringIO else: # On Python 2 and older, str and bytes # are equivalent. We must use BytesIO for # doing io on native strings. from io import BytesIO as NativeStringIO def bytes2NativeString(x, encoding='utf-8'): """ Convert C{bytes} to a native C{str}. On Python 3 and higher, str and bytes are not equivalent. In this case, decode the bytes, and return a native string. On Python 2 and lower, str and bytes are equivalent. In this case, just just return the native string. @param x: a string of type C{bytes} @param encoding: an optional codec, default: 'utf-8' @return: a string of type C{str} """ if isinstance(x, bytes) and str != bytes: return x.decode(encoding) return x def unicode2bytes(x, encoding='utf-8', errors='strict'): """ Convert a unicode string to C{bytes}. @param x: a unicode string, of type C{unicode} on Python 2, or C{str} on Python 3. @param encoding: an optional codec, default: 'utf-8' @param errors: error handling scheme, default 'strict' @return: a string of type C{bytes} """ if isinstance(x, text_type): x = x.encode(encoding, errors) return x def bytes2unicode(x, encoding='utf-8', errors='strict'): """ Convert a C{bytes} to a unicode string. @param x: a unicode string, of type C{unicode} on Python 2, or C{str} on Python 3. @param encoding: an optional codec, default: 'utf-8' @param errors: error handling scheme, default 'strict' @return: a unicode string of type C{unicode} on Python 2, or C{str} on Python 3. """ if isinstance(x, (text_type, type(None))): return x return text_type(x, encoding, errors) __all__ = [ "NativeStringIO", "bytes2NativeString", "bytes2unicode", "unicode2bytes" ] buildbot-3.4.0/worker/buildbot_worker/exceptions.py000066400000000000000000000023321413250514000225560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function class AbandonChain(Exception): """A series of chained steps can raise this exception to indicate that one of the intermediate RunProcesses has failed, such that there is no point in running the remainder. The first argument to the exception is the 'rc' - the non-zero exit code of the failing ShellCommand. The second is an optional error message.""" def __repr__(self): return "".format(self.args[0]) buildbot-3.4.0/worker/buildbot_worker/interfaces.py000066400000000000000000000071311413250514000225220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # disable pylint warnings triggered by interface definitions # pylint: disable=no-self-argument # pylint: disable=no-method-argument # pylint: disable=inherit-non-class from __future__ import absolute_import from __future__ import print_function from zope.interface import Interface class IWorkerCommand(Interface): """This interface is implemented by all of the worker's Command subclasses. It specifies how the worker can start, interrupt, and query the various Commands running on behalf of the buildmaster.""" def __init__(builder, stepId, args): """Create the Command. 'builder' is a reference to the parent buildbot_worker.base.WorkerForBuilderBase instance, which will be used to send status updates (by calling builder.sendStatus). 'stepId' is a random string which helps correlate worker logs with the master. 'args' is a dict of arguments that comes from the master-side BuildStep, with contents that are specific to the individual Command subclass. This method is not intended to be subclassed.""" def setup(args): """This method is provided for subclasses to override, to extract parameters from the 'args' dictionary. The default implementation does nothing. It will be called from __init__""" def start(): """Begin the command, and return a Deferred. While the command runs, it should send status updates to the master-side BuildStep by calling self.sendStatus(status). The 'status' argument is typically a dict with keys like 'stdout', 'stderr', and 'rc'. When the step completes, it should fire the Deferred (the results are not used). If an exception occurs during execution, it may also errback the deferred, however any reasonable errors should be trapped and indicated with a non-zero 'rc' status rather than raising an exception. Exceptions should indicate problems within the buildbot itself, not problems in the project being tested. """ def interrupt(): """This is called to tell the Command that the build is being stopped and therefore the command should be terminated as quickly as possible. The command may continue to send status updates, up to and including an 'rc' end-of-command update (which should indicate an error condition). The Command's deferred should still be fired when the command has finally completed. If the build is being stopped because the worker it shutting down or because the connection to the buildmaster has been lost, the status updates will simply be discarded. The Command does not need to be aware of this. Child shell processes should be killed. Simple ShellCommand classes can just insert a header line indicating that the process will be killed, then os.kill() the child.""" buildbot-3.4.0/worker/buildbot_worker/invalid_utf8.bugfix000066400000000000000000000001611413250514000236230ustar00rootroot00000000000000- Fixed escaping of invalid UTF-8 sequences in log files that are being watched by the worker (:issue:`4744`). buildbot-3.4.0/worker/buildbot_worker/monkeypatches/000077500000000000000000000000001413250514000226755ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/monkeypatches/__init__.py000066400000000000000000000015271413250514000250130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members def patch_all(for_tests=False): if for_tests: from buildbot_worker.monkeypatches import testcase_assert testcase_assert.patch() buildbot-3.4.0/worker/buildbot_worker/monkeypatches/testcase_assert.py000066400000000000000000000043211413250514000264430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from future.utils import string_types import re import unittest def _assertRaisesRegexp(self, expected_exception, expected_regexp, callable_obj, *args, **kwds): """ Asserts that the message in a raised exception matches a regexp. This is a simple clone of unittest.TestCase.assertRaisesRegexp() method introduced in python 2.7. The goal for this function is to behave exactly as assertRaisesRegexp() in standard library. """ exception = None try: callable_obj(*args, **kwds) except expected_exception as ex: # let unexpected exceptions pass through exception = ex if exception is None: self.fail("{0} not raised".format(str(expected_exception.__name__))) if isinstance(expected_regexp, string_types): expected_regexp = re.compile(expected_regexp) if not expected_regexp.search(str(exception)): self.fail('"{0}" does not match "{1}"'.format( expected_regexp.pattern, str(exception))) def patch(): hasAssertRaisesRegexp = getattr(unittest.TestCase, "assertRaisesRegexp", None) hasAssertRaisesRegex = getattr(unittest.TestCase, "assertRaisesRegex", None) if not hasAssertRaisesRegexp: # Python 2.6 unittest.TestCase.assertRaisesRegexp = _assertRaisesRegexp if not hasAssertRaisesRegex: # Python 2.6 and Python 2.7 unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp buildbot-3.4.0/worker/buildbot_worker/null.py000066400000000000000000000042031413250514000213460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from twisted.internet import defer from buildbot_worker.base import BotBase from buildbot_worker.base import WorkerBase from buildbot_worker.pb import WorkerForBuilderPbLike class WorkerForBuilderNull(WorkerForBuilderPbLike): pass class BotNull(BotBase): WorkerForBuilder = WorkerForBuilderNull class LocalWorker(WorkerBase): def __init__(self, name, basedir, umask=None, unicode_encoding=None, delete_leftover_dirs=False): super().__init__(name, basedir, BotNull, umask=umask, unicode_encoding=unicode_encoding, delete_leftover_dirs=delete_leftover_dirs) @defer.inlineCallbacks def startService(self): # importing here to avoid dependency on buildbot master package from buildbot.worker.protocols.null import Connection yield WorkerBase.startService(self) self.workername = self.name conn = Connection(self) # I don't have a master property, but my parent has. master = self.parent.master res = yield master.workers.newConnection(conn, self.name) if res: yield self.parent.attached(conn) # detached() will be called automatically on connection disconnection which is # invoked from the master side when the AbstarctWorker.stopService() is called. buildbot-3.4.0/worker/buildbot_worker/pb.py000066400000000000000000000331171413250514000210030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import os.path import signal from twisted.application.internet import ClientService from twisted.application.internet import backoffPolicy from twisted.cred import credentials from twisted.internet import defer from twisted.internet import reactor from twisted.internet import task from twisted.internet.endpoints import clientFromString from twisted.python import log from twisted.spread import pb from buildbot_worker import util from buildbot_worker.base import BotBase from buildbot_worker.base import WorkerBase from buildbot_worker.base import WorkerForBuilderBase from buildbot_worker.compat import unicode2bytes from buildbot_worker.pbutil import AutoLoginPBFactory from buildbot_worker.tunnel import HTTPTunnelEndpoint class UnknownCommand(pb.Error): pass class WorkerForBuilderPbLike(WorkerForBuilderBase): def protocol_args_setup(self, command, args): pass # Returns a Deferred def protocol_update(self, updates): return self.command_ref.callRemote("update", updates) def protocol_notify_on_disconnect(self): self.command_ref.notifyOnDisconnect(self.lostRemoteStep) # Returns a Deferred def protocol_complete(self, failure): self.command_ref.dontNotifyOnDisconnect(self.lostRemoteStep) return self.command_ref.callRemote("complete", failure) # Returns a Deferred def protocol_update_upload_file_close(self, writer): return writer.callRemote("close") # Returns a Deferred def protocol_update_upload_file_utime(self, writer, access_time, modified_time): return writer.callRemote("utime", (access_time, modified_time)) # Returns a Deferred def protocol_update_upload_file_write(self, writer, data): return writer.callRemote('write', data) # Returns a Deferred def protocol_update_upload_directory(self, writer): return writer.callRemote("unpack") # Returns a Deferred def protocol_update_upload_directory_write(self, writer, data): return writer.callRemote('write', data) # Returns a Deferred def protocol_update_read_file_close(self, reader): return reader.callRemote('close') # Returns a Deferred def protocol_update_read_file(self, reader, length): return reader.callRemote('read', length) class WorkerForBuilderPb(WorkerForBuilderPbLike, pb.Referenceable): pass class BotPb(BotBase, pb.Referenceable): WorkerForBuilder = WorkerForBuilderPb class BotFactory(AutoLoginPBFactory): """The protocol factory for the worker. This class implements the optional applicative keepalives, on top of AutoLoginPBFactory. 'keepaliveInterval' serves two purposes. The first is to keep the connection alive: it guarantees that there will be at least some traffic once every 'keepaliveInterval' seconds, which may help keep an interposed NAT gateway from dropping the address mapping because it thinks the connection has been abandoned. This also gives the operating system a chance to notice that the master has gone away, and inform us of such (although this could take several minutes). buildmaster host, port and maxDelay are accepted for backwards compatibility only. """ keepaliveInterval = None # None = do not use keepalives keepaliveTimer = None perspective = None _reactor = reactor def __init__(self, buildmaster_host, port, keepaliveInterval, maxDelay): AutoLoginPBFactory.__init__(self) self.keepaliveInterval = keepaliveInterval self.keepalive_lock = defer.DeferredLock() self._shutting_down = False # notified when shutdown is complete. self._shutdown_notifier = util.Notifier() self._active_keepalives = 0 def gotPerspective(self, perspective): log.msg("Connected to buildmaster; worker is ready") AutoLoginPBFactory.gotPerspective(self, perspective) self.perspective = perspective try: perspective.broker.transport.setTcpKeepAlive(1) except Exception: log.msg("unable to set SO_KEEPALIVE") if not self.keepaliveInterval: self.keepaliveInterval = 10 * 60 self.activity() if self.keepaliveInterval: log.msg("sending application-level keepalives every {0} seconds".format( self.keepaliveInterval)) self.startTimers() def startTimers(self): assert self.keepaliveInterval assert not self.keepaliveTimer @defer.inlineCallbacks def doKeepalive(): self._active_keepalives += 1 self.keepaliveTimer = None self.startTimers() yield self.keepalive_lock.acquire() self.currentKeepaliveWaiter = defer.Deferred() # Send the keepalive request. If an error occurs # was already dropped, so just log and ignore. log.msg("sending app-level keepalive") try: details = yield self.perspective.callRemote("keepalive") log.msg("Master replied to keepalive, everything's fine") self.currentKeepaliveWaiter.callback(details) self.currentKeepaliveWaiter = None except (pb.PBConnectionLost, pb.DeadReferenceError): log.msg("connection already shut down when attempting keepalive") except Exception as e: log.err(e, "error sending keepalive") finally: self.keepalive_lock.release() self._active_keepalives -= 1 self._checkNotifyShutdown() self.keepaliveTimer = self._reactor.callLater(self.keepaliveInterval, doKeepalive) def _checkNotifyShutdown(self): if self._active_keepalives == 0 and self._shutting_down and \ self._shutdown_notifier is not None: self._shutdown_notifier.notify(None) self._shutdown_notifier = None def stopTimers(self): self._shutting_down = True if self.keepaliveTimer: # by cancelling the timer we are guaranteed that doKeepalive() won't be called again, # as there's no interruption point between doKeepalive() beginning and call to # startTimers() self.keepaliveTimer.cancel() self.keepaliveTimer = None self._checkNotifyShutdown() def activity(self, res=None): """Subclass or monkey-patch this method to be alerted whenever there is active communication between the master and worker.""" def stopFactory(self): self.stopTimers() AutoLoginPBFactory.stopFactory(self) @defer.inlineCallbacks def waitForCompleteShutdown(self): # This function waits for a complete shutdown to happen. It's fired when all keepalives # have been finished and there are no pending ones. if self._shutdown_notifier is not None: yield self._shutdown_notifier.wait() class Worker(WorkerBase): """The service class to be instantiated from buildbot.tac to just pass a connection string, set buildmaster_host and port to None, and use connection_string. maxdelay is deprecated in favor of using twisted's backoffPolicy. """ def __init__(self, buildmaster_host, port, name, passwd, basedir, keepalive, usePTY=None, keepaliveTimeout=None, umask=None, maxdelay=None, numcpus=None, unicode_encoding=None, useTls=None, allow_shutdown=None, maxRetries=None, connection_string=None, delete_leftover_dirs=False, proxy_connection_string=None): assert usePTY is None, "worker-side usePTY is not supported anymore" assert (connection_string is None or (buildmaster_host, port) == (None, None)), ( "If you want to supply a connection string, " "then set host and port to None") bot_class = BotPb WorkerBase.__init__( self, name, basedir, bot_class, umask=umask, unicode_encoding=unicode_encoding, delete_leftover_dirs=delete_leftover_dirs) if keepalive == 0: keepalive = None name = unicode2bytes(name, self.bot.unicode_encoding) passwd = unicode2bytes(passwd, self.bot.unicode_encoding) self.numcpus = numcpus self.shutdown_loop = None if allow_shutdown == 'signal': if not hasattr(signal, 'SIGHUP'): raise ValueError("Can't install signal handler") elif allow_shutdown == 'file': self.shutdown_file = os.path.join(basedir, 'shutdown.stamp') self.shutdown_mtime = 0 self.allow_shutdown = allow_shutdown bf = self.bf = BotFactory(buildmaster_host, port, keepalive, maxdelay) bf.startLogin( credentials.UsernamePassword(name, passwd), client=self.bot) def get_connection_string(host, port): if useTls: connection_type = 'tls' else: connection_type = 'tcp' return '{}:host={}:port={}'.format( connection_type, host.replace(':', r'\:'), # escape ipv6 addresses port) assert not (proxy_connection_string and connection_string), ( "If you want to use HTTP tunneling, then supply build master " "host and port rather than a connection string") if proxy_connection_string: log.msg("Using HTTP tunnel to connect through proxy") proxy_endpoint = clientFromString(reactor, proxy_connection_string) endpoint = HTTPTunnelEndpoint(buildmaster_host, port, proxy_endpoint) if useTls: from twisted.internet.endpoints import wrapClientTLS from twisted.internet.ssl import optionsForClientTLS contextFactory = optionsForClientTLS(hostname=buildmaster_host) endpoint = wrapClientTLS(contextFactory, endpoint) else: if connection_string is None: connection_string = get_connection_string(buildmaster_host, port) endpoint = clientFromString(reactor, connection_string) def policy(attempt): if maxRetries and attempt >= maxRetries: reactor.stop() return backoffPolicy()(attempt) pb_service = ClientService(endpoint, bf, retryPolicy=policy) self.addService(pb_service) def startService(self): WorkerBase.startService(self) if self.allow_shutdown == 'signal': log.msg("Setting up SIGHUP handler to initiate shutdown") signal.signal(signal.SIGHUP, self._handleSIGHUP) elif self.allow_shutdown == 'file': log.msg("Watching {0}'s mtime to initiate shutdown".format( self.shutdown_file)) if os.path.exists(self.shutdown_file): self.shutdown_mtime = os.path.getmtime(self.shutdown_file) self.shutdown_loop = loop = task.LoopingCall(self._checkShutdownFile) loop.start(interval=10) @defer.inlineCallbacks def stopService(self): if self.shutdown_loop: self.shutdown_loop.stop() self.shutdown_loop = None yield WorkerBase.stopService(self) yield self.bf.waitForCompleteShutdown() def _handleSIGHUP(self, *args): log.msg("Initiating shutdown because we got SIGHUP") return self.gracefulShutdown() def _checkShutdownFile(self): if os.path.exists(self.shutdown_file) and \ os.path.getmtime(self.shutdown_file) > self.shutdown_mtime: log.msg("Initiating shutdown because {0} was touched".format( self.shutdown_file)) self.gracefulShutdown() # In case the shutdown fails, update our mtime so we don't keep # trying to shutdown over and over again. # We do want to be able to try again later if the master is # restarted, so we'll keep monitoring the mtime. self.shutdown_mtime = os.path.getmtime(self.shutdown_file) def gracefulShutdown(self): """Start shutting down""" if not self.bf.perspective: log.msg("No active connection, shutting down NOW") reactor.stop() return None log.msg( "Telling the master we want to shutdown after any running builds are finished") d = self.bf.perspective.callRemote("shutdown") def _shutdownfailed(err): if err.check(AttributeError): log.msg( "Master does not support worker initiated shutdown. Upgrade master to 0.8.3" "or later to use this feature.") else: log.msg('callRemote("shutdown") failed') log.err(err) d.addErrback(_shutdownfailed) return d buildbot-3.4.0/worker/buildbot_worker/pbutil.py000066400000000000000000000107071413250514000217010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """Base classes handy for use with PB clients. """ from __future__ import absolute_import from __future__ import print_function from future.utils import iteritems from twisted.cred import error from twisted.internet import reactor from twisted.python import log from twisted.spread import pb from twisted.spread.pb import PBClientFactory from buildbot_worker.compat import bytes2unicode class AutoLoginPBFactory(PBClientFactory): """Factory for PB brokers that are managed through a ClientService. Upon reconnect issued by ClientService this factory will re-login. Instead of using f.getRootObject (which gives a Deferred that can only be fired once), override the gotRootObject method. GR -> yes in case a user would use that to be notified of root object appearances, it wouldn't work. But getRootObject() can itself be used as much as one wants. Instead of using the f.login (which is also one-shot), call f.startLogin() with the credentials and client, and override the gotPerspective method. gotRootObject and gotPerspective will be called each time the object is received (once per successful connection attempt). If an authorization error occurs, failedToGetPerspective() will be invoked. """ def clientConnectionMade(self, broker): PBClientFactory.clientConnectionMade(self, broker) self.doLogin(self._root, broker) self.gotRootObject(self._root) def login(self, *args): raise RuntimeError("login is one-shot: use startLogin instead") def startLogin(self, credentials, client=None): self._credentials = credentials self._client = client def doLogin(self, root, broker): d = self._cbSendUsername(root, self._credentials.username, self._credentials.password, self._client) d.addCallbacks(self.gotPerspective, self.failedToGetPerspective, errbackArgs=(broker,)) return d # methods to override def gotPerspective(self, perspective): """The remote avatar or perspective (obtained each time this factory connects) is now available.""" def gotRootObject(self, root): """The remote root object (obtained each time this factory connects) is now available. This method will be called each time the connection is established and the object reference is retrieved.""" def failedToGetPerspective(self, why, broker): """The login process failed, most likely because of an authorization failure (bad password), but it is also possible that we lost the new connection before we managed to send our credentials. """ log.msg("ReconnectingPBClientFactory.failedToGetPerspective") # put something useful in the logs if why.check(pb.PBConnectionLost): log.msg("we lost the brand-new connection") # fall through elif why.check(error.UnauthorizedLogin): log.msg("unauthorized login; check worker name and password") # fall through else: log.err(why, 'While trying to connect:') reactor.stop() return # lose the current connection, which will trigger a retry broker.transport.loseConnection() def decode(data, encoding='utf-8', errors='strict'): """We need to convert a dictionary where keys and values are bytes, to unicode strings. This happens when a Python 2 master sends a dictionary back to a Python 3 worker. """ data_type = type(data) if data_type == bytes: return bytes2unicode(data, encoding, errors) if data_type in (dict, list, tuple): if data_type == dict: data = iteritems(data) return data_type(map(decode, data)) return data buildbot-3.4.0/worker/buildbot_worker/runprocess.py000066400000000000000000001111011413250514000225730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Support for running 'shell commands' """ from __future__ import absolute_import from __future__ import print_function from future.builtins import range from future.utils import PY3 from future.utils import iteritems from future.utils import string_types from future.utils import text_type import os import pprint import re import signal import stat import subprocess import sys import traceback from codecs import getincrementaldecoder from collections import deque from tempfile import NamedTemporaryFile from twisted.internet import defer from twisted.internet import error from twisted.internet import protocol from twisted.internet import reactor from twisted.internet import task from twisted.python import failure from twisted.python import log from twisted.python import runtime from twisted.python.win32 import quoteArguments from buildbot_worker import util from buildbot_worker.compat import bytes2NativeString from buildbot_worker.compat import bytes2unicode from buildbot_worker.compat import unicode2bytes from buildbot_worker.exceptions import AbandonChain if runtime.platformType == 'posix': from twisted.internet.process import Process def win32_batch_quote(cmd_list, unicode_encoding='utf-8'): # Quote cmd_list to a string that is suitable for inclusion in a # Windows batch file. This is not quite the same as quoting it for the # shell, as cmd.exe doesn't support the %% escape in interactive mode. def escape_arg(arg): arg = bytes2NativeString(arg, unicode_encoding) arg = quoteArguments([arg]) # escape shell special characters arg = re.sub(r'[@()^"<>&|]', r'^\g<0>', arg) # prevent variable expansion return arg.replace('%', '%%') return ' '.join(map(escape_arg, cmd_list)) def shell_quote(cmd_list, unicode_encoding='utf-8'): # attempt to quote cmd_list such that a shell will properly re-interpret # it. The pipes module is only available on UNIX; also, the quote # function is undocumented (although it looks like it will be documented # soon: http://bugs.python.org/issue9723). Finally, it has a nasty bug # in some versions where an empty string is not quoted. # # So: # - use pipes.quote on UNIX, handling '' as a special case # - use our own custom function on Windows if isinstance(cmd_list, bytes): cmd_list = bytes2unicode(cmd_list, unicode_encoding) if runtime.platformType == 'win32': return win32_batch_quote(cmd_list, unicode_encoding) # only available on unix import pipes # pylint: disable=import-outside-toplevel def quote(e): if not e: return u'""' e = bytes2unicode(e, unicode_encoding) return pipes.quote(e) return u" ".join([quote(e) for e in cmd_list]) class LogFileWatcher(object): POLL_INTERVAL = 2 def __init__(self, command, name, logfile, follow=False, poll=True): self.command = command self.name = name self.logfile = logfile decoderFactory = getincrementaldecoder( self.command.builder.unicode_encoding) self.logDecode = decoderFactory(errors='replace') log.msg("LogFileWatcher created to watch {0}".format(logfile)) # we are created before the ShellCommand starts. If the logfile we're # supposed to be watching already exists, record its size and # ctime/mtime so we can tell when it starts to change. self.old_logfile_stats = self.statFile() self.started = False # follow the file, only sending back lines # added since we started watching self.follow = follow # every 2 seconds we check on the file again self.poller = task.LoopingCall(self.poll) if poll else None def start(self): self.poller.start(self.POLL_INTERVAL).addErrback(self._cleanupPoll) def _cleanupPoll(self, err): log.err(err, msg="Polling error") self.poller = None def stop(self): self.poll() if self.poller is not None: self.poller.stop() if self.started: self.f.close() def statFile(self): if os.path.exists(self.logfile): s = os.stat(self.logfile) return (s[stat.ST_CTIME], s[stat.ST_MTIME], s[stat.ST_SIZE]) return None def poll(self): if not self.started: s = self.statFile() if s == self.old_logfile_stats: return # not started yet if not s: # the file was there, but now it's deleted. Forget about the # initial state, clearly the process has deleted the logfile # in preparation for creating a new one. self.old_logfile_stats = None return # no file to work with self.f = open(self.logfile, "rb") # if we only want new lines, seek to # where we stat'd so we only find new # lines if self.follow: self.f.seek(s[2], 0) self.started = True # Mac OS X and Linux differ in behaviour when reading from a file that has previously # reached EOF. On Linux, any new data that has been appended to the file will be returned. # On Mac OS X, the empty string will always be returned. Seeking to the current position # in the file resets the EOF flag on Mac OS X and will allow future reads to work as # intended. self.f.seek(self.f.tell(), 0) while True: data = self.f.read(10000) if not data: return decodedData = self.logDecode.decode(data) self.command.addLogfile(self.name, decodedData) if runtime.platformType == 'posix': class ProcGroupProcess(Process): """Simple subclass of Process to also make the spawned process a process group leader, so we can kill all members of the process group.""" def _setupChild(self, *args, **kwargs): Process._setupChild(self, *args, **kwargs) # this will cause the child to be the leader of its own process group; # it's also spelled setpgrp() on BSD, but this spelling seems to work # everywhere os.setpgid(0, 0) class RunProcessPP(protocol.ProcessProtocol): debug = False def __init__(self, command): self.command = command self.pending_stdin = b"" self.stdin_finished = False self.killed = False decoderFactory = getincrementaldecoder( self.command.builder.unicode_encoding) self.stdoutDecode = decoderFactory(errors='replace') self.stderrDecode = decoderFactory(errors='replace') def setStdin(self, data): assert not self.connected self.pending_stdin = data def connectionMade(self): if self.debug: log.msg("RunProcessPP.connectionMade") if self.command.useProcGroup: if self.debug: log.msg(" recording pid {0} as subprocess pgid".format( self.transport.pid)) self.transport.pgid = self.transport.pid if self.pending_stdin: if self.debug: log.msg(" writing to stdin") self.transport.write(self.pending_stdin) if self.debug: log.msg(" closing stdin") self.transport.closeStdin() def outReceived(self, data): if self.debug: log.msg("RunProcessPP.outReceived") decodedData = self.stdoutDecode.decode(data) self.command.addStdout(decodedData) def errReceived(self, data): if self.debug: log.msg("RunProcessPP.errReceived") decodedData = self.stderrDecode.decode(data) self.command.addStderr(decodedData) def processEnded(self, status_object): if self.debug: log.msg("RunProcessPP.processEnded", status_object) # status_object is a Failure wrapped around an # error.ProcessTerminated or and error.ProcessDone. # requires twisted >= 1.0.4 to overcome a bug in process.py sig = status_object.value.signal rc = status_object.value.exitCode # sometimes, even when we kill a process, GetExitCodeProcess will still return # a zero exit status. So we force it. See # http://stackoverflow.com/questions/2061735/42-passed-to-terminateprocess-sometimes-getexitcodeprocess-returns-0 if self.killed and rc == 0: log.msg( "process was killed, but exited with status 0; faking a failure") # windows returns '1' even for signalled failures, while POSIX # returns -1 if runtime.platformType == 'win32': rc = 1 else: rc = -1 self.command.finished(sig, rc) class RunProcess(object): """ This is a helper class, used by worker commands to run programs in a child shell. """ BACKUP_TIMEOUT = 5 interruptSignal = "KILL" CHUNK_LIMIT = 128 * 1024 # Don't send any data until at least BUFFER_SIZE bytes have been collected # or BUFFER_TIMEOUT elapsed BUFFER_SIZE = 64 * 1024 BUFFER_TIMEOUT = 5 # For sending elapsed time: startTime = None elapsedTime = None # For scheduling future events _reactor = reactor # I wish we had easy access to CLOCK_MONOTONIC in Python: # http://www.opengroup.org/onlinepubs/000095399/functions/clock_getres.html # Then changes to the system clock during a run wouldn't effect the "elapsed # time" results. def __init__(self, builder, command, workdir, environ=None, sendStdout=True, sendStderr=True, sendRC=True, timeout=None, maxTime=None, sigtermTime=None, initialStdin=None, keepStdout=False, keepStderr=False, logEnviron=True, logfiles=None, usePTY=False, useProcGroup=True): """ @param keepStdout: if True, we keep a copy of all the stdout text that we've seen. This copy is available in self.stdout, which can be read after the command has finished. @param keepStderr: same, for stderr @param usePTY: true to use a PTY, false to not use a PTY. @param useProcGroup: (default True) use a process group for non-PTY process invocations """ if logfiles is None: logfiles = {} self.builder = builder if isinstance(command, list): def obfus(w): if (isinstance(w, tuple) and len(w) == 3 and w[0] == 'obfuscated'): return util.Obfuscated(w[1], w[2]) return w command = [obfus(w) for w in command] # We need to take unicode commands and arguments and encode them using # the appropriate encoding for the worker. This is mostly platform # specific, but can be overridden in the worker's buildbot.tac file. # # Encoding the command line here ensures that the called executables # receive arguments as bytestrings encoded with an appropriate # platform-specific encoding. It also plays nicely with twisted's # spawnProcess which checks that arguments are regular strings or # unicode strings that can be encoded as ascii (which generates a # warning). def to_bytes(cmd): if isinstance(cmd, (tuple, list)): for i, a in enumerate(cmd): if isinstance(a, text_type): cmd[i] = a.encode(self.builder.unicode_encoding) elif isinstance(cmd, text_type): cmd = cmd.encode(self.builder.unicode_encoding) return cmd self.command = to_bytes(util.Obfuscated.get_real(command)) self.fake_command = to_bytes(util.Obfuscated.get_fake(command)) self.sendStdout = sendStdout self.sendStderr = sendStderr self.sendRC = sendRC self.logfiles = logfiles self.workdir = workdir self.process = None if not os.path.exists(workdir): os.makedirs(workdir) if environ: for key, v in iteritems(environ): if isinstance(v, list): # Need to do os.pathsep translation. We could either do that # by replacing all incoming ':'s with os.pathsep, or by # accepting lists. I like lists better. # If it's not a string, treat it as a sequence to be # turned in to a string. environ[key] = os.pathsep.join(environ[key]) if "PYTHONPATH" in environ: environ['PYTHONPATH'] += os.pathsep + "${PYTHONPATH}" # do substitution on variable values matching pattern: ${name} p = re.compile(r'\${([0-9a-zA-Z_]*)}') def subst(match): return os.environ.get(match.group(1), "") newenv = {} for key in os.environ: # setting a key to None will delete it from the worker # environment if key not in environ or environ[key] is not None: newenv[key] = os.environ[key] for key, v in iteritems(environ): if v is not None: if not isinstance(v, string_types): raise RuntimeError("'env' values must be strings or " "lists; key '{0}' is incorrect".format(key)) newenv[key] = p.sub(subst, v) self.environ = newenv else: # not environ self.environ = os.environ.copy() self.initialStdin = to_bytes(initialStdin) self.logEnviron = logEnviron self.timeout = timeout self.ioTimeoutTimer = None self.sigtermTime = sigtermTime self.maxTime = maxTime self.maxTimeoutTimer = None self.killTimer = None self.keepStdout = keepStdout self.keepStderr = keepStderr self.buffered = deque() self.buflen = 0 self.sendBuffersTimer = None assert usePTY in (True, False), \ "Unexpected usePTY argument value: {!r}. Expected boolean.".format( usePTY) self.usePTY = usePTY # usePTY=True is a convenience for cleaning up all children and # grandchildren of a hung command. Fall back to usePTY=False on systems # and in situations where ptys cause problems. PTYs are posix-only, # and for .closeStdin to matter, we must use a pipe, not a PTY if runtime.platformType != "posix" or initialStdin is not None: if self.usePTY: self.sendStatus( {'header': "WARNING: disabling usePTY for this command"}) self.usePTY = False # use an explicit process group on POSIX, noting that usePTY always implies # a process group. if runtime.platformType != 'posix': useProcGroup = False elif self.usePTY: useProcGroup = True self.useProcGroup = useProcGroup self.logFileWatchers = [] for name, filevalue in self.logfiles.items(): filename = filevalue follow = False # check for a dictionary of options # filename is required, others are optional if isinstance(filevalue, dict): filename = filevalue['filename'] follow = filevalue.get('follow', False) w = LogFileWatcher(self, name, os.path.join(self.workdir, filename), follow=follow) self.logFileWatchers.append(w) def __repr__(self): return "<{0} '{1}'>".format(self.__class__.__name__, self.fake_command) def sendStatus(self, status): self.builder.sendUpdate(status) def start(self): # return a Deferred which fires (with the exit code) when the command # completes if self.keepStdout: self.stdout = "" if self.keepStderr: self.stderr = "" self.deferred = defer.Deferred() try: self._startCommand() except Exception as e: log.err(failure.Failure(), "error in RunProcess._startCommand") self._addToBuffers('stderr', "error in RunProcess._startCommand (%s)\n" % str(e)) self._addToBuffers('stderr', traceback.format_exc()) self._sendBuffers() # pretend it was a shell error self.deferred.errback(AbandonChain(-1, 'Got exception (%s)' % str(e))) return self.deferred def _startCommand(self): # ensure workdir exists if not os.path.isdir(self.workdir): os.makedirs(self.workdir) log.msg("RunProcess._startCommand") self.pp = RunProcessPP(self) self.using_comspec = False self.command = unicode2bytes(self.command, encoding=self.builder.unicode_encoding) if isinstance(self.command, bytes): if runtime.platformType == 'win32': # allow %COMSPEC% to have args argv = os.environ['COMSPEC'].split() if '/c' not in argv: argv += ['/c'] argv += [self.command] self.using_comspec = True else: # for posix, use /bin/sh. for other non-posix, well, doesn't # hurt to try argv = [b'/bin/sh', b'-c', self.command] display = self.fake_command else: # On windows, CreateProcess requires an absolute path to the executable. # When we call spawnProcess below, we pass argv[0] as the executable. # So, for .exe's that we have absolute paths to, we can call directly # Otherwise, we should run under COMSPEC (usually cmd.exe) to # handle path searching, etc. if (runtime.platformType == 'win32' and not (bytes2unicode(self.command[0], self.builder.unicode_encoding).lower().endswith(".exe") and os.path.isabs(self.command[0]))): # allow %COMSPEC% to have args argv = os.environ['COMSPEC'].split() if '/c' not in argv: argv += ['/c'] argv += list(self.command) self.using_comspec = True else: argv = self.command # Attempt to format this for use by a shell, although the process # isn't perfect display = shell_quote(self.fake_command, self.builder.unicode_encoding) display = bytes2unicode(display, self.builder.unicode_encoding) # $PWD usually indicates the current directory; spawnProcess may not # update this value, though, so we set it explicitly here. This causes # weird problems (bug #456) on msys, though.. if not self.environ.get('MACHTYPE', None) == 'i686-pc-msys': self.environ['PWD'] = os.path.abspath(self.workdir) # self.stdin is handled in RunProcessPP.connectionMade log.msg(u" " + display) self._addToBuffers(u'header', display + u"\n") # then comes the secondary information msg = u" in dir {0}".format(self.workdir) if self.timeout: if self.timeout == 1: unit = u"sec" else: unit = u"secs" msg += u" (timeout {0} {1})".format(self.timeout, unit) if self.maxTime: if self.maxTime == 1: unit = u"sec" else: unit = u"secs" msg += u" (maxTime {0} {1})".format(self.maxTime, unit) log.msg(u" " + msg) self._addToBuffers(u'header', msg + u"\n") msg = " watching logfiles {0}".format(self.logfiles) log.msg(" " + msg) self._addToBuffers('header', msg + u"\n") # then the obfuscated command array for resolving unambiguity msg = u" argv: {0}".format(self.fake_command) log.msg(u" " + msg) self._addToBuffers('header', msg + u"\n") # then the environment, since it sometimes causes problems if self.logEnviron: msg = u" environment:\n" env_names = sorted(self.environ.keys()) for name in env_names: msg += u" {0}={1}\n".format(bytes2unicode(name, encoding=self.builder.unicode_encoding), bytes2unicode(self.environ[name], encoding=self.builder.unicode_encoding)) log.msg(u" environment:\n{0}".format(pprint.pformat(self.environ))) self._addToBuffers(u'header', msg) if self.initialStdin: msg = u" writing {0} bytes to stdin".format(len(self.initialStdin)) log.msg(u" " + msg) self._addToBuffers(u'header', msg + u"\n") msg = u" using PTY: {0}".format(bool(self.usePTY)) log.msg(u" " + msg) self._addToBuffers(u'header', msg + u"\n") # put data into stdin and close it, if necessary. This will be # buffered until connectionMade is called if self.initialStdin: self.pp.setStdin(self.initialStdin) self.startTime = util.now(self._reactor) # start the process self.process = self._spawnProcess( self.pp, argv[0], argv, self.environ, self.workdir, usePTY=self.usePTY) # set up timeouts if self.timeout: self.ioTimeoutTimer = self._reactor.callLater( self.timeout, self.doTimeout) if self.maxTime: self.maxTimeoutTimer = self._reactor.callLater( self.maxTime, self.doMaxTimeout) for w in self.logFileWatchers: w.start() def _spawnProcess(self, processProtocol, executable, args=(), env=None, path=None, uid=None, gid=None, usePTY=False, childFDs=None): """private implementation of reactor.spawnProcess, to allow use of L{ProcGroupProcess}""" if env is None: env = {} # use the ProcGroupProcess class, if available if runtime.platformType == 'posix': if self.useProcGroup and not usePTY: return ProcGroupProcess(reactor, executable, args, env, path, processProtocol, uid, gid, childFDs) # fall back if self.using_comspec: return self._spawnAsBatch(processProtocol, executable, args, env, path, usePTY=usePTY) return reactor.spawnProcess(processProtocol, executable, args, env, path, usePTY=usePTY) def _spawnAsBatch(self, processProtocol, executable, args, env, path, usePTY): """A cheat that routes around the impedance mismatch between twisted and cmd.exe with respect to escaping quotes""" # NamedTemporaryFile differs in PY2 and PY3. # In PY2, it needs encoded str and its encoding cannot be specified. # In PY3, it needs str which is unicode and its encoding can be specified. if PY3: tf = NamedTemporaryFile(mode='w+', dir='.', suffix=".bat", delete=False, encoding=self.builder.unicode_encoding) else: tf = NamedTemporaryFile(mode='w+', dir='.', suffix=".bat", delete=False) # echo off hides this cheat from the log files. tf.write(u"@echo off\n") if isinstance(self.command, (string_types, bytes)): tf.write(bytes2NativeString(self.command, self.builder.unicode_encoding)) else: tf.write(win32_batch_quote(self.command, self.builder.unicode_encoding)) tf.close() argv = os.environ['COMSPEC'].split() # allow %COMSPEC% to have args if '/c' not in argv: argv += ['/c'] argv += [tf.name] def unlink_temp(result): os.unlink(tf.name) return result self.deferred.addBoth(unlink_temp) return reactor.spawnProcess(processProtocol, executable, argv, env, path, usePTY=usePTY) def _chunkForSend(self, data): """ limit the chunks that we send over PB to 128k, since it has a hardwired string-size limit of 640k. """ LIMIT = self.CHUNK_LIMIT for i in range(0, len(data), LIMIT): yield data[i:i + LIMIT] def _collapseMsg(self, msg): """ Take msg, which is a dictionary of lists of output chunks, and concatenate all the chunks into a single string """ retval = {} for logname in msg: data = u"" for m in msg[logname]: m = bytes2unicode(m, self.builder.unicode_encoding) data += m if isinstance(logname, tuple) and logname[0] == 'log': retval['log'] = (logname[1], data) else: retval[logname] = data return retval def _sendMessage(self, msg): """ Collapse and send msg to the master """ if not msg: return msg = self._collapseMsg(msg) self.sendStatus(msg) def _bufferTimeout(self): self.sendBuffersTimer = None self._sendBuffers() def _sendBuffers(self): """ Send all the content in our buffers. """ msg_size = 0 lastlog = None logdata = [] while self.buffered: # Grab the next bits from the buffer logname, data = self.buffered.popleft() # If this log is different than the last one, then we have to send # out the message so far. This is because the message is # transferred as a dictionary, which makes the ordering of keys # unspecified, and makes it impossible to interleave data from # different logs. A future enhancement could be to change the # master to support a list of (logname, data) tuples instead of a # dictionary. # On our first pass through this loop lastlog is None if lastlog is None: lastlog = logname elif logname != lastlog: self._sendMessage({lastlog: logdata}) msg_size = 0 lastlog = logname logdata = [] # Chunkify the log data to make sure we're not sending more than # CHUNK_LIMIT at a time for chunk in self._chunkForSend(data): if not chunk: continue logdata.append(chunk) msg_size += len(chunk) if msg_size >= self.CHUNK_LIMIT: # We've gone beyond the chunk limit, so send out our # message. At worst this results in a message slightly # larger than (2*CHUNK_LIMIT)-1 self._sendMessage({logname: logdata}) logdata = [] msg_size = 0 self.buflen = 0 if logdata: self._sendMessage({logname: logdata}) if self.sendBuffersTimer: if self.sendBuffersTimer.active(): self.sendBuffersTimer.cancel() self.sendBuffersTimer = None def _addToBuffers(self, logname, data): """ Add data to the buffer for logname Start a timer to send the buffers if BUFFER_TIMEOUT elapses. If adding data causes the buffer size to grow beyond BUFFER_SIZE, then the buffers will be sent. """ n = len(data) self.buflen += n self.buffered.append((logname, data)) if self.buflen > self.BUFFER_SIZE: self._sendBuffers() elif not self.sendBuffersTimer: self.sendBuffersTimer = self._reactor.callLater( self.BUFFER_TIMEOUT, self._bufferTimeout) def addStdout(self, data): if self.sendStdout: self._addToBuffers('stdout', data) if self.keepStdout: self.stdout += data if self.ioTimeoutTimer: self.ioTimeoutTimer.reset(self.timeout) def addStderr(self, data): if self.sendStderr: self._addToBuffers('stderr', data) if self.keepStderr: self.stderr += data if self.ioTimeoutTimer: self.ioTimeoutTimer.reset(self.timeout) def addLogfile(self, name, data): self._addToBuffers(('log', name), data) if self.ioTimeoutTimer: self.ioTimeoutTimer.reset(self.timeout) def finished(self, sig, rc): self.elapsedTime = util.now(self._reactor) - self.startTime log.msg("command finished with signal {0}, exit code {1}, elapsedTime: {2:0.6f}".format( sig, rc, self.elapsedTime)) for w in self.logFileWatchers: # this will send the final updates w.stop() self._sendBuffers() if sig is not None: rc = -1 if self.sendRC: if sig is not None: self.sendStatus( {'header': "process killed by signal {0}\n".format(sig)}) self.sendStatus({'rc': rc}) self.sendStatus({'header': "elapsedTime={0:0.6f}\n".format(self.elapsedTime)}) self._cancelTimers() d = self.deferred self.deferred = None if d: d.callback(rc) else: log.msg("Hey, command {0} finished twice".format(self)) def failed(self, why): self._sendBuffers() log.msg("RunProcess.failed: command failed: {0}".format(why)) self._cancelTimers() d = self.deferred self.deferred = None if d: d.errback(why) else: log.msg("Hey, command {0} finished twice".format(self)) def doTimeout(self): self.ioTimeoutTimer = None msg = ( "command timed out: {0} seconds without output running {1}".format( self.timeout, self.fake_command)) self.kill(msg) def doMaxTimeout(self): self.maxTimeoutTimer = None msg = "command timed out: {0} seconds elapsed running {1}".format( self.maxTime, self.fake_command) self.kill(msg) def isDead(self): if self.process.pid is None: return True pid = int(self.process.pid) try: os.kill(pid, 0) except OSError: return True # dead return False # alive def checkProcess(self): self.sigtermTimer = None if not self.isDead(): hit = self.sendSig(self.interruptSignal) else: hit = 1 self.cleanUp(hit) def cleanUp(self, hit): if not hit: log.msg("signalProcess/os.kill failed both times") if runtime.platformType == "posix": # we only do this under posix because the win32eventreactor # blocks here until the process has terminated, while closing # stderr. This is weird. self.pp.transport.loseConnection() if self.deferred: # finished ought to be called momentarily. Just in case it doesn't, # set a timer which will abandon the command. self.killTimer = self._reactor.callLater(self.BACKUP_TIMEOUT, self.doBackupTimeout) def sendSig(self, interruptSignal): hit = 0 # try signalling the process group if not hit and self.useProcGroup and runtime.platformType == "posix": sig = getattr(signal, "SIG" + interruptSignal, None) if sig is None: log.msg("signal module is missing SIG{0}".format(interruptSignal)) elif not hasattr(os, "kill"): log.msg("os module is missing the 'kill' function") elif self.process.pgid is None: log.msg("self.process has no pgid") else: log.msg("trying to kill process group {0}".format( self.process.pgid)) try: os.killpg(self.process.pgid, sig) log.msg(" signal {0} sent successfully".format(sig)) self.process.pgid = None hit = 1 except OSError: log.msg('failed to kill process group (ignored): {0}'.format( (sys.exc_info()[1]))) # probably no-such-process, maybe because there is no process # group elif runtime.platformType == "win32": if interruptSignal is None: log.msg("interruptSignal==None, only pretending to kill child") elif self.process.pid is not None: if interruptSignal == "TERM": log.msg("using TASKKILL PID /T to kill pid {0}".format( self.process.pid)) subprocess.check_call( "TASKKILL /PID {0} /T".format(self.process.pid)) log.msg("taskkill'd pid {0}".format(self.process.pid)) hit = 1 elif interruptSignal == "KILL": log.msg("using TASKKILL PID /F /T to kill pid {0}".format( self.process.pid)) subprocess.check_call( "TASKKILL /F /PID {0} /T".format(self.process.pid)) log.msg("taskkill'd pid {0}".format(self.process.pid)) hit = 1 # try signalling the process itself (works on Windows too, sorta) if not hit: try: log.msg("trying process.signalProcess('{0}')".format( interruptSignal)) self.process.signalProcess(interruptSignal) log.msg(" signal {0} sent successfully".format(interruptSignal)) hit = 1 except OSError: log.err("from process.signalProcess:") # could be no-such-process, because they finished very recently except error.ProcessExitedAlready: log.msg("Process exited already - can't kill") # the process has already exited, and likely finished() has # been called already or will be called shortly return hit def kill(self, msg): # This may be called by the timeout, or when the user has decided to # abort this build. self._sendBuffers() self._cancelTimers() msg += ", attempting to kill" log.msg(msg) self.sendStatus({'header': "\n" + msg + "\n"}) # let the PP know that we are killing it, so that it can ensure that # the exit status comes out right self.pp.killed = True sendSigterm = self.sigtermTime is not None if sendSigterm: self.sendSig("TERM") self.sigtermTimer = self._reactor.callLater( self.sigtermTime, self.checkProcess) else: hit = self.sendSig(self.interruptSignal) self.cleanUp(hit) def doBackupTimeout(self): log.msg("we tried to kill the process, and it wouldn't die.." " finish anyway") self.killTimer = None signalName = "SIG" + self.interruptSignal self.sendStatus({'header': signalName + " failed to kill process\n"}) if self.sendRC: self.sendStatus({'header': "using fake rc=-1\n"}) self.sendStatus({'rc': -1}) self.failed(RuntimeError(signalName + " failed to kill process")) def _cancelTimers(self): for timerName in ('ioTimeoutTimer', 'killTimer', 'maxTimeoutTimer', 'sendBuffersTimer', 'sigtermTimer'): timer = getattr(self, timerName, None) if timer: timer.cancel() setattr(self, timerName, None) buildbot-3.4.0/worker/buildbot_worker/scripts/000077500000000000000000000000001413250514000215125ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/scripts/__init__.py000066400000000000000000000000001413250514000236110ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/scripts/base.py000066400000000000000000000026371413250514000230060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # This module is left for backward compatibility of old-named worker API. # It should never be imported by Buildbot. import os def isWorkerDir(dir): def print_error(error_message): print("{0}\ninvalid worker directory '{1}'".format(error_message, dir)) buildbot_tac = os.path.join(dir, "buildbot.tac") try: with open(buildbot_tac) as f: contents = f.read() except IOError as exception: print_error("error reading '{0}': {1}".format( buildbot_tac, exception.strerror)) return False if "Application('buildbot-worker')" not in contents: print_error("unexpected content in '{0}'".format(buildbot_tac)) return False return True buildbot-3.4.0/worker/buildbot_worker/scripts/create_worker.py000066400000000000000000000162331413250514000247250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import division from __future__ import print_function import os workerTACTemplate = [ """ import os from buildbot_worker.bot import Worker from twisted.application import service basedir = %(basedir)r rotateLength = %(log-size)d maxRotatedFiles = %(log-count)s # if this is a relocatable tac file, get the directory containing the TAC if basedir == '.': import os.path basedir = os.path.abspath(os.path.dirname(__file__)) # note: this line is matched against to check that this is a worker # directory; do not edit it. application = service.Application('buildbot-worker') """, """ from twisted.python.logfile import LogFile from twisted.python.log import ILogObserver, FileLogObserver logfile = LogFile.fromFullPath( os.path.join(basedir, "twistd.log"), rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) application.setComponent(ILogObserver, FileLogObserver(logfile).emit) """, """ buildmaster_host = %(host)r port = %(port)d workername = %(name)r passwd = %(passwd)r keepalive = %(keepalive)d umask = %(umask)s maxdelay = %(maxdelay)d numcpus = %(numcpus)s allow_shutdown = %(allow-shutdown)s maxretries = %(maxretries)s use_tls = %(use-tls)s delete_leftover_dirs = %(delete-leftover-dirs)s proxy_connection_string = %(proxy-connection-string)s s = Worker(buildmaster_host, port, workername, passwd, basedir, keepalive, umask=umask, maxdelay=maxdelay, numcpus=numcpus, allow_shutdown=allow_shutdown, maxRetries=maxretries, useTls=use_tls, delete_leftover_dirs=delete_leftover_dirs, proxy_connection_string=proxy_connection_string) s.setServiceParent(application) """] class CreateWorkerError(Exception): """ Raised on errors while setting up worker directory. """ def _makeBaseDir(basedir, quiet): """ Make worker base directory if needed. @param basedir: worker base directory relative path @param quiet: if True, don't print info messages @raise CreateWorkerError: on error making base directory """ if os.path.exists(basedir): if not quiet: print("updating existing installation") return if not quiet: print("mkdir", basedir) try: os.mkdir(basedir) except OSError as exception: raise CreateWorkerError("error creating directory {0}: {1}".format( basedir, exception.strerror)) def _makeBuildbotTac(basedir, tac_file_contents, quiet): """ Create buildbot.tac file. If buildbot.tac file already exists with different contents, create buildbot.tac.new instead. @param basedir: worker base directory relative path @param tac_file_contents: contents of buildbot.tac file to write @param quiet: if True, don't print info messages @raise CreateWorkerError: on error reading or writing tac file """ tacfile = os.path.join(basedir, "buildbot.tac") if os.path.exists(tacfile): try: with open(tacfile, "rt") as f: oldcontents = f.read() except IOError as exception: raise CreateWorkerError("error reading {0}: {1}".format( tacfile, exception.strerror)) if oldcontents == tac_file_contents: if not quiet: print("buildbot.tac already exists and is correct") return if not quiet: print("not touching existing buildbot.tac") print("creating buildbot.tac.new instead") tacfile = os.path.join(basedir, "buildbot.tac.new") try: with open(tacfile, "wt") as f: f.write(tac_file_contents) os.chmod(tacfile, 0o600) except IOError as exception: raise CreateWorkerError("could not write {0}: {1}".format( tacfile, exception.strerror)) def _makeInfoFiles(basedir, quiet): """ Create info/* files inside basedir. @param basedir: worker base directory relative path @param quiet: if True, don't print info messages @raise CreateWorkerError: on error making info directory or writing info files """ def createFile(path, file, contents): filepath = os.path.join(path, file) if os.path.exists(filepath): return False if not quiet: print("Creating {0}, you need to edit it appropriately.".format( os.path.join("info", file))) try: open(filepath, "wt").write(contents) except IOError as exception: raise CreateWorkerError("could not write {0}: {1}".format( filepath, exception.strerror)) return True path = os.path.join(basedir, "info") if not os.path.exists(path): if not quiet: print("mkdir", path) try: os.mkdir(path) except OSError as exception: raise CreateWorkerError("error creating directory {0}: {1}".format( path, exception.strerror)) # create 'info/admin' file created = createFile(path, "admin", "Your Name Here \n") # create 'info/host' file created = createFile(path, "host", "Please put a description of this build host here\n") access_uri = os.path.join(path, "access_uri") if not os.path.exists(access_uri): if not quiet: print("Not creating {0} - add it if you wish".format( os.path.join("info", "access_uri"))) if created and not quiet: print("Please edit the files in {0} appropriately.".format(path)) def createWorker(config): basedir = config['basedir'] quiet = config['quiet'] if config['relocatable']: config['basedir'] = '.' asd = config['allow-shutdown'] if asd: config['allow-shutdown'] = repr(asd) if config['no-logrotate']: workerTAC = "".join([workerTACTemplate[0]] + workerTACTemplate[2:]) else: workerTAC = "".join(workerTACTemplate) contents = workerTAC % config try: _makeBaseDir(basedir, quiet) _makeBuildbotTac(basedir, contents, quiet) _makeInfoFiles(basedir, quiet) except CreateWorkerError as exception: print("{0}\nfailed to configure worker in {1}".format( exception, config['basedir'])) return 1 if not quiet: print("worker configured in {0}".format(basedir)) return 0 buildbot-3.4.0/worker/buildbot_worker/scripts/logwatcher.py000066400000000000000000000075261413250514000242350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import platform from twisted.internet import defer from twisted.internet import error from twisted.internet import protocol from twisted.internet import reactor from twisted.protocols.basic import LineOnlyReceiver from twisted.python.failure import Failure from buildbot_worker.compat import unicode2bytes class FakeTransport(object): disconnecting = False class WorkerTimeoutError(Exception): pass class TailProcess(protocol.ProcessProtocol): def outReceived(self, data): self.lw.dataReceived(unicode2bytes(data)) def errReceived(self, data): print("ERR: '{0}'".format(data)) class LogWatcher(LineOnlyReceiver): POLL_INTERVAL = 0.1 TIMEOUT_DELAY = 10.0 delimiter = unicode2bytes(os.linesep) def __init__(self, logfile): self.logfile = logfile self.in_reconfig = False self.transport = FakeTransport() self.pp = TailProcess() self.pp.lw = self self.timer = None def start(self): # If the log file doesn't exist, create it now. if not os.path.exists(self.logfile): open(self.logfile, 'a').close() # return a Deferred that fires when the start process has # finished. It errbacks with TimeoutError if the finish line has not # been seen within 10 seconds, and with ReconfigError if the error # line was seen. If the logfile could not be opened, it errbacks with # an IOError. if platform.system().lower() == 'sunos' and os.path.exists('/usr/xpg4/bin/tail'): tailBin = "/usr/xpg4/bin/tail" elif platform.system().lower() == 'haiku' and os.path.exists('/bin/tail'): tailBin = "/bin/tail" else: tailBin = "/usr/bin/tail" self.p = reactor.spawnProcess(self.pp, tailBin, ("tail", "-f", "-n", "0", self.logfile), env=os.environ, ) self.running = True d = defer.maybeDeferred(self._start) return d def _start(self): self.d = defer.Deferred() self.timer = reactor.callLater(self.TIMEOUT_DELAY, self.timeout) return self.d def timeout(self): self.timer = None e = WorkerTimeoutError() self.finished(Failure(e)) def finished(self, results): try: self.p.signalProcess("KILL") except error.ProcessExitedAlready: pass if self.timer: self.timer.cancel() self.timer = None self.running = False self.in_reconfig = False self.d.callback(results) def lineReceived(self, line): if not self.running: return None if b"Log opened." in line: self.in_reconfig = True if b"loading configuration from" in line: self.in_reconfig = True if self.in_reconfig: print(line) if b"message from master: attached" in line: return self.finished("buildbot-worker") return None buildbot-3.4.0/worker/buildbot_worker/scripts/restart.py000066400000000000000000000025471413250514000235600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # This module is left for backward compatibility of old-named worker API. # It should never be imported by Buildbot. from buildbot_worker.scripts import base from buildbot_worker.scripts import start from buildbot_worker.scripts import stop def restart(config): quiet = config['quiet'] basedir = config['basedir'] if not base.isWorkerDir(basedir): return 1 try: stop.stopWorker(basedir, quiet) except stop.WorkerNotRunning: if not quiet: print("no old worker process found to stop") if not quiet: print("now restarting worker process..") return start.startWorker(basedir, quiet, config['nodaemon']) buildbot-3.4.0/worker/buildbot_worker/scripts/runner.py000066400000000000000000000230061413250514000233760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # N.B.: don't import anything that might pull in a reactor yet. Some of our # subcommands want to load modules that need the gtk reactor. from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import re import sys import textwrap from twisted.python import log from twisted.python import reflect from twisted.python import usage # the create/start/stop commands should all be run as the same user, # preferably a separate 'buildbot' account. # Note that the terms 'options' and 'config' are used interchangeably here - in # fact, they are interchanged several times. Caveat legator. class MakerBase(usage.Options): optFlags = [ ['help', 'h', "Display this message"], ["quiet", "q", "Do not emit the commands being run"], ] longdesc = textwrap.dedent(""" Operates upon the specified (or the current directory, if not specified). """) # on tab completion, suggest directories as first argument if hasattr(usage, 'Completions'): # only set completion suggestion if running with # twisted version (>=11.1.0) that supports it compData = usage.Completions( extraActions=[usage.CompleteDirs(descr="worker base directory")]) opt_h = usage.Options.opt_help def parseArgs(self, *args): if args: self['basedir'] = args[0] else: # Use the current directory if no basedir was specified. self['basedir'] = os.getcwd() if len(args) > 1: raise usage.UsageError("I wasn't expecting so many arguments") def postOptions(self): self['basedir'] = os.path.abspath(self['basedir']) class StartOptions(MakerBase): subcommandFunction = "buildbot_worker.scripts.start.startCommand" optFlags = [ ['quiet', 'q', "Don't display startup log messages"], ['nodaemon', None, "Don't daemonize (stay in foreground)"], ] def getSynopsis(self): return "Usage: buildbot-worker start []" class StopOptions(MakerBase): subcommandFunction = "buildbot_worker.scripts.stop.stop" def getSynopsis(self): return "Usage: buildbot-worker stop []" class RestartOptions(MakerBase): subcommandFunction = "buildbot_worker.scripts.restart.restart" optFlags = [ ['quiet', 'q', "Don't display startup log messages"], ['nodaemon', None, "Don't daemonize (stay in foreground)"], ] def getSynopsis(self): return "Usage: buildbot-worker restart []" class CreateWorkerOptions(MakerBase): subcommandFunction = "buildbot_worker.scripts.create_worker.createWorker" optFlags = [ ["force", "f", "Re-use an existing directory"], ["relocatable", "r", "Create a relocatable buildbot.tac"], ["no-logrotate", "n", "Do not permit buildmaster rotate logs by itself"], ['use-tls', None, "Uses TLS to connect to master"], ['delete-leftover-dirs', None, 'Delete folders that are not required by the master on connection'], ] optParameters = [ ["keepalive", "k", 600, "Interval at which keepalives should be sent (in seconds)"], ["umask", None, "None", "controls permissions of generated files. " "Use --umask=0o22 to be world-readable"], ["maxdelay", None, 300, "Maximum time between connection attempts"], ["maxretries", None, 'None', "Maximum number of retries before worker shutdown"], ["numcpus", None, "None", "Number of available cpus to use on a build. "], ["log-size", "s", "10000000", "size at which to rotate twisted log files"], ["log-count", "l", "10", "limit the number of kept old twisted log files " "(None for unlimited)"], ["allow-shutdown", "a", None, "Allows the worker to initiate a graceful shutdown. One of " "'signal' or 'file'"], ["proxy-connection-string", None, None, "Address of HTTP proxy to tunnel through"] ] longdesc = textwrap.dedent(""" This command creates a buildbot worker directory and buildbot.tac file. The bot will use the and arguments to authenticate itself when connecting to the master. All commands are run in a build-specific subdirectory of . is a string of the form 'hostname[:port]', and specifies where the buildmaster can be reached. port defaults to 9989. The appropriate values for , , and should be provided to you by the buildmaster administrator. You must choose yourself. """) def validateMasterArgument(self, master_arg): """ Parse the argument. @param master_arg: the argument to parse @return: tuple of master's host and port @raise UsageError: on errors parsing the argument """ if master_arg[:5] == "http:": raise usage.UsageError(" is not a URL - do not use URL") if master_arg.startswith("[") and "]" in master_arg: # detect ipv6 address with format [2001:1:2:3:4::1]:4321 master, port_tmp = master_arg.split("]") master = master[1:] if ":" not in port_tmp: port = 9989 else: port = port_tmp.split(":")[1] elif ":" not in master_arg: master = master_arg port = 9989 else: try: master, port = master_arg.split(":") except ValueError: raise usage.UsageError(("invalid argument '{}', " "if it is an ipv6 address, it must be enclosed by []" ).format(master_arg)) if not master: raise usage.UsageError("invalid argument '{}'".format( master_arg)) try: port = int(port) except ValueError: raise usage.UsageError("invalid master port '{}', " "needs to be a number".format(port)) return master, port def getSynopsis(self): return "Usage: buildbot-worker create-worker " \ "[options] " def parseArgs(self, *args): if len(args) != 4: raise usage.UsageError("incorrect number of arguments") basedir, master, name, passwd = args self['basedir'] = basedir self['host'], self['port'] = self.validateMasterArgument(master) self['name'] = name self['passwd'] = passwd def postOptions(self): MakerBase.postOptions(self) # check and convert numeric parameters for argument in ["keepalive", "maxdelay", "log-size"]: try: self[argument] = int(self[argument]) except ValueError: raise usage.UsageError("{} parameter needs to be a number".format( argument)) for argument in ["log-count", "maxretries", "umask", "numcpus"]: if not re.match(r'^(0o)?\d+$', self[argument]) and \ self[argument] != 'None': raise usage.UsageError("{} parameter needs to be a number" " or None".format(argument)) if self['allow-shutdown'] not in [None, 'signal', 'file']: raise usage.UsageError("allow-shutdown needs to be one of" " 'signal' or 'file'") class Options(usage.Options): synopsis = "Usage: buildbot-worker [command options]" subCommands = [ # the following are all admin commands ['create-worker', None, CreateWorkerOptions, "Create and populate a directory for a new worker"], ['start', None, StartOptions, "Start a worker"], ['stop', None, StopOptions, "Stop a worker"], ['restart', None, RestartOptions, "Restart a worker"], ] def opt_version(self): import buildbot_worker # pylint: disable=import-outside-toplevel print("worker version: {}".format(buildbot_worker.version)) usage.Options.opt_version(self) def opt_verbose(self): log.startLogging(sys.stderr) def postOptions(self): if not hasattr(self, 'subOptions'): raise usage.UsageError("must specify a command") def run(): config = Options() try: config.parseOptions() except usage.error as e: print("{}: {}".format(sys.argv[0], e)) print() c = getattr(config, 'subOptions', config) print(str(c)) sys.exit(1) subconfig = config.subOptions subcommandFunction = reflect.namedObject(subconfig.subcommandFunction) sys.exit(subcommandFunction(subconfig)) buildbot-3.4.0/worker/buildbot_worker/scripts/start.py000066400000000000000000000117501413250514000232250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import sys import time from buildbot_worker.scripts import base from buildbot_worker.util import rewrap class Follower(object): def follow(self): from twisted.internet import reactor from buildbot_worker.scripts.logwatcher import LogWatcher self.rc = 0 print("Following twistd.log until startup finished..") lw = LogWatcher("twistd.log") d = lw.start() d.addCallbacks(self._success, self._failure) reactor.run() return self.rc def _success(self, processtype): from twisted.internet import reactor print("The {0} appears to have (re)started correctly.".format(processtype)) self.rc = 0 reactor.stop() def _failure(self, why): from twisted.internet import reactor from buildbot_worker.scripts.logwatcher import WorkerTimeoutError if why.check(WorkerTimeoutError): print(rewrap("""\ The worker took more than 10 seconds to start and/or connect to the buildmaster, so we were unable to confirm that it started and connected correctly. Please 'tail twistd.log' and look for a line that says 'message from master: attached' to verify correct startup. If you see a bunch of messages like 'will retry in 6 seconds', your worker might not have the correct hostname or portnumber for the buildmaster, or the buildmaster might not be running. If you see messages like 'Failure: twisted.cred.error.UnauthorizedLogin' then your worker might be using the wrong botname or password. Please correct these problems and then restart the worker. """)) else: print(rewrap("""\ Unable to confirm that the worker started correctly. You may need to stop it, fix the config file, and restart. """)) print(why) self.rc = 1 reactor.stop() def startCommand(config): basedir = config['basedir'] if not base.isWorkerDir(basedir): return 1 return startWorker(basedir, config['quiet'], config['nodaemon']) def startWorker(basedir, quiet, nodaemon): """ Start worker process. Fork and start twisted application described in basedir buildbot.tac file. Print it's log messages to stdout for a while and try to figure out if start was successful. If quiet or nodaemon parameters are True, or we are running on a win32 system, will not fork and log will not be printed to stdout. @param basedir: worker's basedir path @param quiet: don't display startup log messages @param nodaemon: don't daemonize (stay in foreground) @return: 0 if worker was successfully started, 1 if we are not sure that worker started successfully """ os.chdir(basedir) if quiet or nodaemon: return launch(nodaemon) # we probably can't do this os.fork under windows from twisted.python.runtime import platformType if platformType == "win32": return launch(nodaemon) # fork a child to launch the daemon, while the parent process tails the # logfile if os.fork(): # this is the parent rc = Follower().follow() return rc # this is the child: give the logfile-watching parent a chance to start # watching it before we start the daemon time.sleep(0.2) launch(nodaemon) return None def launch(nodaemon): sys.path.insert(0, os.path.abspath(os.getcwd())) # see if we can launch the application without actually having to # spawn twistd, since spawning processes correctly is a real hassle # on windows. from twisted.python.runtime import platformType from twisted.scripts.twistd import run argv = ["twistd", "--no_save", "--logfile=twistd.log", # windows doesn't use the same default "--python=buildbot.tac"] if nodaemon: argv.extend(["--nodaemon"]) if platformType != 'win32': # windows doesn't use pidfile option. argv.extend(["--pidfile="]) sys.argv = argv run() buildbot-3.4.0/worker/buildbot_worker/scripts/stop.py000066400000000000000000000047521413250514000230610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import signal import time from buildbot_worker.scripts import base class WorkerNotRunning(Exception): """ raised when trying to stop worker process that is not running """ def stopWorker(basedir, quiet, signame="TERM"): """ Stop worker process by sending it a signal. Using the specified basedir path, read worker process's pid file and try to terminate that process with specified signal. @param basedir: worker's basedir path @param quite: if False, don't print any messages to stdout @param signame: signal to send to the worker process @raise WorkerNotRunning: if worker pid file is not found """ os.chdir(basedir) try: f = open("twistd.pid", "rt") except IOError: raise WorkerNotRunning() pid = int(f.read().strip()) signum = getattr(signal, "SIG" + signame) timer = 0 try: os.kill(pid, signum) except OSError as e: if e.errno != 3: raise time.sleep(0.1) while timer < 10: # poll once per second until twistd.pid goes away, up to 10 seconds try: os.kill(pid, 0) except OSError: if not quiet: print("worker process {0} is dead".format(pid)) return 0 timer += 1 time.sleep(1) if not quiet: print("never saw process go away") return 1 def stop(config, signame="TERM"): quiet = config['quiet'] basedir = config['basedir'] if not base.isWorkerDir(basedir): return 1 try: return stopWorker(basedir, quiet, signame) except WorkerNotRunning: if not quiet: print("worker not running") return 0 buildbot-3.4.0/worker/buildbot_worker/scripts/windows_service.py000077500000000000000000000610771413250514000253140ustar00rootroot00000000000000# pylint: disable=import-outside-toplevel # Runs the build-bot as a Windows service. # To use: # * Install and configure buildbot as per normal (ie, running # 'setup.py install' from the source directory). # # * Configure any number of build-bot directories (workers or masters), as # per the buildbot instructions. Test these directories normally by # using the (possibly modified) "buildbot.bat" file and ensure everything # is working as expected. # # * Install the buildbot service. Execute the command: # % buildbot_worker_windows_service # To see installation options. You probably want to specify: # + --username and --password options to specify the user to run the # + --startup auto to have the service start at boot time. # # For example: # % buildbot_worker_windows_service --user mark --password secret \ # --startup auto install # Alternatively, you could execute: # % buildbot_worker_windows_service install # to install the service with default options, then use Control Panel # to configure it. # # * Start the service specifying the name of all buildbot directories as # service args. This can be done one of 2 ways: # - Execute the command: # % buildbot_worker_windows_service start "dir_name1" "dir_name2" # or: # - Start Control Panel->Administrative Tools->Services # - Locate the previously installed buildbot service. # - Open the "properties" for the service. # - Enter the directory names into the "Start Parameters" textbox. The # directory names must be fully qualified, and surrounded in quotes if # they include spaces. # - Press the "Start"button. # Note that the service will automatically use the previously specified # directories if no arguments are specified. This means the directories # need only be specified when the directories to use have changed (and # therefore also the first time buildbot is configured) # # * The service should now be running. You should check the Windows # event log. If all goes well, you should see some information messages # telling you the buildbot has successfully started. # # * If you change the buildbot configuration, you must restart the service. # There is currently no way to ask a running buildbot to reload the # config. You can restart by executing: # % buildbot_worker_windows_service restart # # Troubleshooting: # * Check the Windows event log for any errors. # * Check the "twistd.log" file in your buildbot directories - once each # bot has been started it just writes to this log as normal. # * Try executing: # % buildbot_worker_windows_service debug # This will execute the buildbot service in "debug" mode, and allow you to # see all messages etc generated. If the service works in debug mode but # not as a real service, the error probably relates to the environment or # permissions of the user configured to run the service (debug mode runs as # the currently logged in user, not the service user) # * Ensure you have the latest pywin32 build available, at least version 206. # Written by Mark Hammond, 2006. from __future__ import absolute_import from __future__ import division from __future__ import print_function from future.builtins import range import os import sys import threading from contextlib import contextmanager import pywintypes import servicemanager import win32api import win32con import win32event import win32file import win32pipe import win32process import win32security import win32service import win32serviceutil import winerror # Are we running in a py2exe environment? is_frozen = hasattr(sys, "frozen") # Taken from the Zope service support - each "child" is run as a sub-process # (trying to run multiple twisted apps in the same process is likely to screw # stdout redirection etc). # Note that unlike the Zope service, we do *not* attempt to detect a failed # client and perform restarts - buildbot itself does a good job # at reconnecting, and Windows itself provides restart semantics should # everything go pear-shaped. # We execute a new thread that captures the tail of the output from our child # process. If the child fails, it is written to the event log. # This process is unconditional, and the output is never written to disk # (except obviously via the event log entry) # Size of the blocks we read from the child process's output. CHILDCAPTURE_BLOCK_SIZE = 80 # The number of BLOCKSIZE blocks we keep as process output. CHILDCAPTURE_MAX_BLOCKS = 200 class BBService(win32serviceutil.ServiceFramework): _svc_name_ = 'BuildBot' _svc_display_name_ = _svc_name_ _svc_description_ = 'Manages local buildbot workers and masters - ' \ 'see https://buildbot.net' def __init__(self, args): win32serviceutil.ServiceFramework.__init__(self, args) # Create an event which we will use to wait on. The "service stop" # request will set this event. # * We must make it inheritable so we can pass it to the child # process via the cmd-line # * Must be manual reset so each child process and our service # all get woken from a single set of the event. sa = win32security.SECURITY_ATTRIBUTES() sa.bInheritHandle = True self.hWaitStop = win32event.CreateEvent(sa, True, False, None) self.args = args self.dirs = None self.runner_prefix = None # Patch up the service messages file in a frozen exe. # (We use the py2exe option that magically bundles the .pyd files # into the .zip file - so servicemanager.pyd doesn't exist.) if is_frozen and servicemanager.RunningAsService(): msg_file = os.path.join(os.path.dirname(sys.executable), "buildbot.msg") if os.path.isfile(msg_file): servicemanager.Initialize("BuildBot", msg_file) else: self.warning("Strange - '{0}' does not exist".format(msg_file)) def _checkConfig(self): # Locate our child process runner (but only when run from source) if not is_frozen: # Running from source python_exe = os.path.join(sys.prefix, "python.exe") if not os.path.isfile(python_exe): # for ppl who build Python itself from source. python_exe = os.path.join(sys.prefix, "PCBuild", "python.exe") if not os.path.isfile(python_exe): # virtualenv support python_exe = os.path.join(sys.prefix, "Scripts", "python.exe") if not os.path.isfile(python_exe): self.error("Can not find python.exe to spawn subprocess") return False me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = me[:-1] self.runner_prefix = '"{0}" "{1}"'.format(python_exe, me) else: # Running from a py2exe built executable - our child process is # us (but with the funky cmdline args!) self.runner_prefix = '"' + sys.executable + '"' # Now our arg processing - this may be better handled by a # twisted/buildbot style config file - but as of time of writing, # MarkH is clueless about such things! # Note that the "arguments" you type into Control Panel for the # service do *not* persist - they apply only when you click "start" # on the service. When started by Windows, args are never presented. # Thus, it is the responsibility of the service to persist any args. # so, when args are presented, we save them as a "custom option". If # they are not presented, we load them from the option. self.dirs = [] if len(self.args) > 1: dir_string = os.pathsep.join(self.args[1:]) save_dirs = True else: dir_string = win32serviceutil.GetServiceCustomOption(self, "directories") save_dirs = False if not dir_string: self.error("You must specify the buildbot directories as " "parameters to the service.\nStopping the service.") return False dirs = dir_string.split(os.pathsep) for d in dirs: d = os.path.abspath(d) sentinal = os.path.join(d, "buildbot.tac") if os.path.isfile(sentinal): self.dirs.append(d) else: msg = "Directory '{0}' is not a buildbot dir - ignoring".format(d) self.warning(msg) if not self.dirs: self.error("No valid buildbot directories were specified.\n" "Stopping the service.") return False if save_dirs: dir_string = os.pathsep.join(self.dirs) win32serviceutil.SetServiceCustomOption(self, "directories", dir_string) return True def SvcStop(self): # Tell the SCM we are starting the stop process. self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # Set the stop event - the main loop takes care of termination. win32event.SetEvent(self.hWaitStop) # SvcStop only gets triggered when the user explicitly stops (or restarts) # the service. To shut the service down cleanly when Windows is shutting # down, we also need to hook SvcShutdown. SvcShutdown = SvcStop def SvcDoRun(self): if not self._checkConfig(): # stopped status set by caller. return self.logmsg(servicemanager.PYS_SERVICE_STARTED) child_infos = [] for bbdir in self.dirs: self.info("Starting BuildBot in directory '{0}'".format(bbdir)) # hWaitStop is the Handle and the command needs the int associated # to that Handle hstop = int(self.hWaitStop) cmd = '{} --spawn {} start --nodaemon {}'.format(self.runner_prefix, hstop, bbdir) h, t, output = self.createProcess(cmd) child_infos.append((bbdir, h, t, output)) while child_infos: handles = [self.hWaitStop] + [i[1] for i in child_infos] rc = win32event.WaitForMultipleObjects(handles, 0, # bWaitAll win32event.INFINITE) if rc == win32event.WAIT_OBJECT_0: # user sent a stop service request break # A child process died. For now, just log the output # and forget the process. index = rc - win32event.WAIT_OBJECT_0 - 1 bbdir, dead_handle, dead_thread, output_blocks = \ child_infos[index] status = win32process.GetExitCodeProcess(dead_handle) output = "".join(output_blocks) if not output: output = ("The child process generated no output. " "Please check the twistd.log file in the " "indicated directory.") self.warning("BuildBot for directory {0!r} terminated with " "exit code {1}.\n{2}".format(bbdir, status, output)) del child_infos[index] if not child_infos: self.warning("All BuildBot child processes have " "terminated. Service stopping.") # Either no child processes left, or stop event set. self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # The child processes should have also seen our stop signal # so wait for them to terminate. for bbdir, h, t, output in child_infos: for i in range(10): # 30 seconds to shutdown... self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) rc = win32event.WaitForSingleObject(h, 3000) if rc == win32event.WAIT_OBJECT_0: break # Process terminated - no need to try harder. if rc == win32event.WAIT_OBJECT_0: break self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # If necessary, kill it if win32process.GetExitCodeProcess(h) == win32con.STILL_ACTIVE: self.warning("BuildBot process at {0!r} failed to terminate - " "killing it".format(bbdir)) win32api.TerminateProcess(h, 3) self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # Wait for the redirect thread - it should have died as the remote # process terminated. # As we are shutting down, we do the join with a little more care, # reporting progress as we wait (even though we never will ) for i in range(5): t.join(1) self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) if not t.is_alive(): break else: self.warning("Redirect thread did not stop!") # All done. self.logmsg(servicemanager.PYS_SERVICE_STOPPED) # # Error reporting/logging functions. # def logmsg(self, event): # log a service event using servicemanager.LogMsg try: servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, event, (self._svc_name_, " ({0})".format(self._svc_display_name_))) except win32api.error as details: # Failed to write a log entry - most likely problem is # that the event log is full. We don't want this to kill us try: print("FAILED to write INFO event", event, ":", details) except IOError: # No valid stdout! Ignore it. pass def _dolog(self, func, msg): try: func(msg) except win32api.error as details: # Failed to write a log entry - most likely problem is # that the event log is full. We don't want this to kill us try: print("FAILED to write event log entry:", details) print(msg) except IOError: pass def info(self, s): self._dolog(servicemanager.LogInfoMsg, s) def warning(self, s): self._dolog(servicemanager.LogWarningMsg, s) def error(self, s): self._dolog(servicemanager.LogErrorMsg, s) # Functions that spawn a child process, redirecting any output. # Although buildbot itself does this, it is very handy to debug issues # such as ImportErrors that happen before buildbot has redirected. def createProcess(self, cmd): hInputRead, hInputWriteTemp = self.newPipe() hOutReadTemp, hOutWrite = self.newPipe() pid = win32api.GetCurrentProcess() # This one is duplicated as inheritable. hErrWrite = win32api.DuplicateHandle(pid, hOutWrite, pid, 0, 1, win32con.DUPLICATE_SAME_ACCESS) # These are non-inheritable duplicates. hOutRead = self.dup(hOutReadTemp) hInputWrite = self.dup(hInputWriteTemp) # dup() closed hOutReadTemp, hInputWriteTemp si = win32process.STARTUPINFO() si.hStdInput = hInputRead si.hStdOutput = hOutWrite si.hStdError = hErrWrite si.dwFlags = win32process.STARTF_USESTDHANDLES | \ win32process.STARTF_USESHOWWINDOW si.wShowWindow = win32con.SW_HIDE # pass True to allow handles to be inherited. Inheritance is # problematic in general, but should work in the controlled # circumstances of a service process. create_flags = win32process.CREATE_NEW_CONSOLE # info is (hProcess, hThread, pid, tid) info = win32process.CreateProcess(None, cmd, None, None, True, create_flags, None, None, si) # (NOTE: these really aren't necessary for Python - they are closed # as soon as they are collected) hOutWrite.Close() hErrWrite.Close() hInputRead.Close() # We don't use stdin hInputWrite.Close() # start a thread collecting output blocks = [] t = threading.Thread(target=self.redirectCaptureThread, args=(hOutRead, blocks)) t.start() return info[0], t, blocks def redirectCaptureThread(self, handle, captured_blocks): # One of these running per child process we are watching. It # handles both stdout and stderr on a single handle. The read data is # never referenced until the thread dies - so no need for locks # around self.captured_blocks. # self.info("Redirect thread starting") while True: try: ec, data = win32file.ReadFile(handle, CHILDCAPTURE_BLOCK_SIZE) except pywintypes.error as err: # ERROR_BROKEN_PIPE means the child process closed the # handle - ie, it terminated. if err.winerror != winerror.ERROR_BROKEN_PIPE: self.warning("Error reading output from process: {0}".format(err)) break captured_blocks.append(data) del captured_blocks[CHILDCAPTURE_MAX_BLOCKS:] handle.Close() # self.info("Redirect capture thread terminating") def newPipe(self): sa = win32security.SECURITY_ATTRIBUTES() sa.bInheritHandle = True return win32pipe.CreatePipe(sa, 0) def dup(self, pipe): # create a duplicate handle that is not inherited, so that # it can be closed in the parent. close the original pipe in # the process. pid = win32api.GetCurrentProcess() dup = win32api.DuplicateHandle(pid, pipe, pid, 0, 0, win32con.DUPLICATE_SAME_ACCESS) pipe.Close() return dup # Service registration and startup def RegisterWithFirewall(exe_name, description): # Register our executable as an exception with Windows Firewall. # taken from http://msdn.microsoft.com/library/default.asp?url=\ # /library/en-us/ics/ics/wf_adding_an_application.asp from win32com.client import Dispatch # Scope NET_FW_SCOPE_ALL = 0 # IP Version - ANY is the only allowable setting for now NET_FW_IP_VERSION_ANY = 2 fwMgr = Dispatch("HNetCfg.FwMgr") # Get the current profile for the local firewall policy. profile = fwMgr.LocalPolicy.CurrentProfile app = Dispatch("HNetCfg.FwAuthorizedApplication") app.ProcessImageFileName = exe_name app.Name = description app.Scope = NET_FW_SCOPE_ALL # Use either Scope or RemoteAddresses, but not both # app.RemoteAddresses = "*" app.IpVersion = NET_FW_IP_VERSION_ANY app.Enabled = True # Use this line if you want to add the app, but disabled. # app.Enabled = False profile.AuthorizedApplications.Add(app) @contextmanager def GetLocalSecurityPolicyHandle(systemName, desiredAccess): # Context manager for GetPolicyHandle policyHandle = win32security.GetPolicyHandle(systemName, desiredAccess) yield policyHandle win32security.LsaClose(policyHandle) def ConfigureLogOnAsAServicePolicy(accountName): # Modifies LocalSecurityPolicy to allow run buildbot as specified user # You can do it manually by running "secpol.msc" # Open Local Policies > User Rights Assignment > Log on as a service # Add User or Group... # # Args: # accountName(str): fully qualified string in the domain_name\user_name format. # use ".\user_name" format for local account SE_SERVICE_LOGON_RIGHT = "SeServiceLogonRight" try: if "\\" not in accountName or accountName.startswith(".\\"): computerName = os.environ['COMPUTERNAME'] if not computerName: computerName = win32api.GetComputerName() if not computerName: print("error: Cannot determine computer name") return accountName = "{}\\{}".format(computerName, accountName.lstrip(".\\")) account = win32security.LookupAccountName(None, accountName) accountSid = account[0] sid = win32security.ConvertSidToStringSid(accountSid) except win32api.error as err: print("error {} ({}): {}".format(err.winerror, err.funcname, err.strerror)) return with GetLocalSecurityPolicyHandle('', win32security.POLICY_ALL_ACCESS) as policy: win32security.LsaAddAccountRights(policy, accountSid, [SE_SERVICE_LOGON_RIGHT]) # verify if policy was really modified with GetLocalSecurityPolicyHandle('', win32security.POLICY_ALL_ACCESS) as policy: try: privileges = win32security.LsaEnumerateAccountRights(policy, accountSid) except win32api.error as err: # If no account rights are found or if the function fails for any other reason, # the function returns throws winerror.ERROR_FILE_NOT_FOUND or any other print("error {} ({}): {}".format(err.winerror, err.funcname, err.strerror)) privileges = [] if SE_SERVICE_LOGON_RIGHT in privileges: print("Account {}({}) has granted {} privilege.".format(accountName, sid, SE_SERVICE_LOGON_RIGHT)) else: print(("error: Account {}({}) does not have {} privilege." ).format(accountName, sid, SE_SERVICE_LOGON_RIGHT)) # A custom install function. def CustomInstall(opts): # Register this process with the Windows Firewaall import pythoncom try: RegisterWithFirewall(sys.executable, "BuildBot") except pythoncom.com_error as why: print("FAILED to register with the Windows firewall") print(why) for opt, val in opts: if opt == '--username': userName = val ConfigureLogOnAsAServicePolicy(userName) # Magic code to allow shutdown. Note that this code is executed in # the *child* process, by way of the service process executing us with # special cmdline args (which includes the service stop handle!) def _RunChild(runfn): del sys.argv[1] # The --spawn arg. # Create a new thread that just waits for the event to be signalled. t = threading.Thread(target=_WaitForShutdown, args=(int(sys.argv[1]), ) ) del sys.argv[1] # The stop handle # This child process will be sent a console handler notification as # users log off, or as the system shuts down. We want to ignore these # signals as the service parent is responsible for our shutdown. def ConsoleHandler(what): # We can ignore *everything* - ctrl+c will never be sent as this # process is never attached to a console the user can press the # key in! return True win32api.SetConsoleCtrlHandler(ConsoleHandler, True) t.setDaemon(True) # we don't want to wait for this to stop! t.start() if hasattr(sys, "frozen"): # py2exe sets this env vars that may screw our child process - reset del os.environ["PYTHONPATH"] # Start the buildbot/buildbot-worker app runfn() print("Service child process terminating normally.") def _WaitForShutdown(h): win32event.WaitForSingleObject(h, win32event.INFINITE) print("Shutdown requested") from twisted.internet import reactor reactor.callLater(0, reactor.stop) def DetermineRunner(bbdir): '''Checks if the given directory is a buildbot worker or a master and returns the appropriate run function.''' try: import buildbot_worker.scripts.runner tacfile = os.path.join(bbdir, 'buildbot.tac') if os.path.exists(tacfile): with open(tacfile, 'r') as f: contents = f.read() if 'import Worker' in contents: return buildbot_worker.scripts.runner.run except ImportError: # Use the default pass import buildbot.scripts.runner return buildbot.scripts.runner.run # This function is also called by the py2exe startup code. def HandleCommandLine(): if len(sys.argv) > 1 and sys.argv[1] == "--spawn": # Special command-line created by the service to execute the # child-process. # First arg is the handle to wait on # Fourth arg is the config directory to use for the buildbot worker _RunChild(DetermineRunner(sys.argv[5])) else: win32serviceutil.HandleCommandLine(BBService, customOptionHandler=CustomInstall) if __name__ == '__main__': HandleCommandLine() buildbot-3.4.0/worker/buildbot_worker/test/000077500000000000000000000000001413250514000210025ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/test/__init__.py000066400000000000000000000040551413250514000231170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sys import twisted from twisted.trial import unittest from buildbot_worker import monkeypatches # apply the same patches the worker does when it starts monkeypatches.patch_all(for_tests=True) def add_debugging_monkeypatches(): """ DO NOT CALL THIS DIRECTLY This adds a few "harmless" monkeypatches which make it easier to debug failing tests. """ from twisted.application.service import Service old_startService = Service.startService old_stopService = Service.stopService def startService(self): assert not self.running return old_startService(self) def stopService(self): assert self.running return old_stopService(self) Service.startService = startService Service.stopService = stopService # versions of Twisted before 9.0.0 did not have a UnitTest.patch that worked # on Python-2.7 if twisted.version.major <= 9 and sys.version_info[:2] == (2, 7): def nopatch(self, *args): raise unittest.SkipTest('unittest.TestCase.patch is not available') unittest.TestCase.patch = nopatch add_debugging_monkeypatches() __all__ = [] # import mock so we bail out early if it's not installed try: import mock [mock] except ImportError: raise ImportError("Buildbot tests require the 'mock' module; " "try 'pip install mock'") buildbot-3.4.0/worker/buildbot_worker/test/fake/000077500000000000000000000000001413250514000217105ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/test/fake/__init__.py000066400000000000000000000000001413250514000240070ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/test/fake/remote.py000066400000000000000000000025421413250514000235600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # This module is left for backward compatibility of old-named worker API. # It should never be imported by Buildbot. from twisted.internet import defer class FakeRemote(object): """ Wrap a local object to make it look like it's remote """ def __init__(self, original, method_prefix="remote_"): self.original = original self.method_prefix = method_prefix def callRemote(self, meth, *args, **kwargs): fn = getattr(self.original, self.method_prefix + meth) return defer.maybeDeferred(fn, *args, **kwargs) def notifyOnDisconnect(self, what): pass def dontNotifyOnDisconnect(self, what): pass buildbot-3.4.0/worker/buildbot_worker/test/fake/runprocess.py000066400000000000000000000165751413250514000245030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from twisted.internet import defer from twisted.python import failure class Expect(object): """ An expected instantiation of RunProcess. Usually used within a RunProcess expect invocation: rp.expect( Expect("echo", "bar", usePTY=False) + { 'stdout' : 'hello!!' } + { 'rc' : 13 } + 13 # for a callback with rc=13; or + Failure(..), # for a failure Expect(..) + .. , ... ) Note that the default values are accepted for all keyword arguments if they are not omitted. """ def __init__(self, command, workdir, **kwargs): self.kwargs = dict(command=command, workdir=workdir) self.kwargs.update(kwargs) self.result = None self.status_updates = [] def __add__(self, other): if isinstance(other, dict): self.status_updates.append(other) elif isinstance(other, int): self.result = ('c', other) elif isinstance(other, failure.Failure): self.result = ('e', other) else: raise ValueError("invalid expectation '{0!r}'".format(other)) return self def __str__(self): other_kwargs = self.kwargs.copy() del other_kwargs['command'] del other_kwargs['workdir'] return "Command: {0}\n workdir: {1}\n kwargs: {2}\n result: {3}\n".format( self.kwargs['command'], self.kwargs['workdir'], other_kwargs, self.result) class FakeRunProcess(object): """ A fake version of L{buildbot_worker.runprocess.RunProcess} which will simulate running external processes without actually running them (which is very fragile in tests!) This class is first programmed with the set of instances that are expected, and with their expected results. It will raise an AssertionError if the expected behavior is not seen. Note that this handles sendStderr/sendStdout and keepStderr/keepStdout properly. """ @classmethod def expect(cls, *expectations): """ Set the expectations for this test run """ cls._expectations = list(expectations) # list the first expectation last, so we can pop it cls._expectations.reverse() @classmethod def test_done(cls): """ Indicate that this test is finished; if any expected instantiations have not taken place, this will raise the appropriate AssertionError. """ if cls._expectations: raise AssertionError(("{0} expected instances not created" ).format(len(cls._expectations))) del cls._expectations def __init__(self, builder, command, workdir, **kwargs): kwargs['command'] = command kwargs['workdir'] = workdir # the default values for the constructor kwargs; if we got a default # value in **kwargs and didn't expect anything, well count that as OK default_values = dict(environ=None, sendStdout=True, sendStderr=True, sendRC=True, timeout=None, maxTime=None, sigtermTime=None, initialStdin=None, keepStdout=False, keepStderr=False, logEnviron=True, logfiles={}, usePTY=False) if not self._expectations: raise AssertionError("unexpected instantiation: {0}".format(kwargs)) exp = self._exp = self._expectations.pop() if exp.kwargs != kwargs: msg = [] # pylint: disable=consider-iterating-dictionary for key in sorted(list(set(exp.kwargs.keys()) | set(kwargs.keys()))): if key not in exp.kwargs: if key in default_values: if default_values[key] == kwargs[key]: continue # default values are expected msg.append('{0}: expected default ({1!r}),\n got {2!r}'.format( key, default_values[key], kwargs[key])) else: msg.append('{0}: unexpected arg, value = {1!r}'.format(key, kwargs[key])) elif key not in kwargs: msg.append('{0}: did not get expected arg'.format(key)) elif exp.kwargs[key] != kwargs[key]: msg.append('{0}: expected {1!r},\n got {2!r}'.format(key, exp.kwargs[key], kwargs[key])) if msg: msg.insert( 0, 'did not get expected __init__ arguments for\n {0}'.format( " ".join(map(repr, kwargs.get('command', ['unknown command']))))) self._expectations[:] = [] # don't expect any more instances, since we're failing raise AssertionError("\n".join(msg)) self._builder = builder self.stdout = '' self.stderr = '' def start(self): # figure out the stdio-related parameters keepStdout = self._exp.kwargs.get('keepStdout', False) keepStderr = self._exp.kwargs.get('keepStderr', False) sendStdout = self._exp.kwargs.get('sendStdout', True) sendStderr = self._exp.kwargs.get('sendStderr', True) if keepStdout: self.stdout = '' if keepStderr: self.stderr = '' finish_immediately = True # send the updates, accounting for the stdio parameters for upd in self._exp.status_updates: if 'stdout' in upd: if keepStdout: self.stdout += upd['stdout'] if not sendStdout: del upd['stdout'] if 'stderr' in upd: if keepStderr: self.stderr += upd['stderr'] if not sendStderr: del upd['stderr'] if 'wait' in upd: finish_immediately = False continue # don't send this update if not upd: continue self._builder.sendUpdate(upd) d = self.run_deferred = defer.Deferred() if finish_immediately: self._finished() return d def _finished(self): if self._exp.result[0] == 'e': self.run_deferred.errback(self._exp.result[1]) else: self.run_deferred.callback(self._exp.result[1]) def kill(self, reason): self._builder.sendUpdate({'hdr': 'killing'}) self._builder.sendUpdate({'rc': -1}) self.run_deferred.callback(-1) buildbot-3.4.0/worker/buildbot_worker/test/fake/workerforbuilder.py000066400000000000000000000046451413250514000256620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import division from __future__ import print_function import pprint class FakeWorkerForBuilder(object): """ Simulates a WorkerForBuilder, but just records the updates from sendUpdate in its updates attribute. Call show() to get a pretty-printed string showing the updates. Set debug to True to show updates as they happen. """ debug = False def __init__(self, basedir="/workerbuilder/basedir"): self.updates = [] self.basedir = basedir self.unicode_encoding = 'utf-8' def sendUpdate(self, data): if self.debug: print("FakeWorkerForBuilder.sendUpdate", data) self.updates.append(data) def show(self): return pprint.pformat(self.updates) # Returns a Deferred def protocol_update_upload_file_close(self, writer): return writer.callRemote("close") # Returns a Deferred def protocol_update_upload_file_utime(self, writer, access_time, modified_time): return writer.callRemote("utime", (access_time, modified_time)) # Returns a Deferred def protocol_update_upload_file_write(self, writer, data): return writer.callRemote('write', data) # Returns a Deferred def protocol_update_upload_directory(self, writer): return writer.callRemote("unpack") # Returns a Deferred def protocol_update_upload_directory_write(self, writer, data): return writer.callRemote('write', data) # Returns a Deferred def protocol_update_read_file_close(self, reader): return reader.callRemote('close') # Returns a Deferred def protocol_update_read_file(self, reader, length): return reader.callRemote('read', length) buildbot-3.4.0/worker/buildbot_worker/test/test_extra_coverage.py000066400000000000000000000021001413250514000254020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this file imports a number of source files that are not # included in the coverage because none of the tests import # them; this results in a more accurate total coverage percent. from __future__ import absolute_import from __future__ import print_function from buildbot_worker.scripts import logwatcher modules = [] # for the benefit of pyflakes modules.extend([logwatcher]) buildbot-3.4.0/worker/buildbot_worker/test/test_util_hangcheck.py000066400000000000000000000165311413250514000253710ustar00rootroot00000000000000""" Tests for `buildbot_worker.util._hangcheck`. """ from __future__ import absolute_import from __future__ import print_function from twisted.internet import defer from twisted.internet import reactor from twisted.internet.endpoints import TCP4ClientEndpoint from twisted.internet.endpoints import TCP4ServerEndpoint from twisted.internet.error import ConnectionDone from twisted.internet.protocol import Protocol from twisted.internet.task import Clock from twisted.python.failure import Failure from twisted.spread.pb import PBClientFactory from twisted.trial.unittest import TestCase from twisted.web.server import Site from twisted.web.static import Data from ..backports import SynchronousTestCase from ..util import HangCheckFactory from ..util._hangcheck import HangCheckProtocol try: from twisted.internet.testing import AccumulatingProtocol from twisted.internet.testing import StringTransport except ImportError: from twisted.test.proto_helpers import AccumulatingProtocol from twisted.test.proto_helpers import StringTransport def assert_clock_idle(case, clock): """ Assert that the given clock doesn't have any pending delayed calls. """ case.assertEqual( clock.getDelayedCalls(), [], ) class HangCheckTests(SynchronousTestCase): """ Tests for HangCheckProtocol. """ def test_disconnects(self): """ When connecting to a server that doesn't send any data, the protocol disconnects after the timeout. """ clock = Clock() protocol = HangCheckProtocol(Protocol(), reactor=clock) transport = StringTransport() transport.protocol = protocol protocol.makeConnection(transport) clock.advance(120) self.assertTrue(transport.disconnecting) assert_clock_idle(self, clock) def test_transport(self): """ The transport passed to the underlying protocol is the underlying transport. """ clock = Clock() wrapped_protocol = Protocol() protocol = HangCheckProtocol(wrapped_protocol, reactor=clock) transport = StringTransport() transport.protocol = protocol protocol.makeConnection(transport) self.assertIdentical(wrapped_protocol.transport, transport) def test_forwards_data(self): """ Data received by the protocol gets passed to the wrapped protocol. """ clock = Clock() wrapped_protocol = AccumulatingProtocol() protocol = HangCheckProtocol(wrapped_protocol, reactor=clock) transport = StringTransport() transport.protocol = protocol protocol.makeConnection(transport) protocol.dataReceived(b'some-data') self.assertEqual(wrapped_protocol.data, b"some-data") def test_data_cancels_timeout(self): """ When data is received, the timeout is canceled """ clock = Clock() protocol = HangCheckProtocol(Protocol(), reactor=clock) transport = StringTransport() transport.protocol = protocol protocol.makeConnection(transport) protocol.dataReceived(b'some-data') assert_clock_idle(self, clock) def test_calls_callback(self): """ When connecting to a server that doesn't send any data, the protocol calls the hung callback. """ results = [] clock = Clock() protocol = HangCheckProtocol( Protocol(), hung_callback=lambda: results.append(True), reactor=clock, ) transport = StringTransport() transport.protocol = protocol protocol.makeConnection(transport) clock.advance(120) self.assertEqual(results, [True]) assert_clock_idle(self, clock) def test_disconnect_forwarded(self): """ If the connection is closed, the underlying protocol is informed. """ clock = Clock() wrapped_protocol = AccumulatingProtocol() protocol = HangCheckProtocol(wrapped_protocol, reactor=clock) transport = StringTransport() transport.protocol = protocol protocol.makeConnection(transport) reason = ConnectionDone("Bye.") protocol.connectionLost( Failure(reason) ) self.assertTrue(wrapped_protocol.closed) self.assertEqual( wrapped_protocol.closedReason.value, reason, ) def test_disconnect_cancels_timeout(self): """ If the connection is closed, the hang check is cancelled. """ clock = Clock() protocol = HangCheckProtocol( Protocol(), reactor=clock, ) transport = StringTransport() transport.protocol = protocol protocol.makeConnection(transport) protocol.connectionLost( Failure(ConnectionDone("Bye.")) ) assert_clock_idle(self, clock) def test_data_and_disconnect(self): """ If the connection receives data and then is closed, no error results. """ clock = Clock() protocol = HangCheckProtocol( Protocol(), reactor=clock, ) transport = StringTransport() transport.protocol = protocol protocol.makeConnection(transport) protocol.dataReceived(b"some-data") protocol.connectionLost( Failure(ConnectionDone("Bye.")) ) assert_clock_idle(self, clock) @defer.inlineCallbacks def connected_server_and_client(case, server_factory, client_factory): """ Create a server and client connected to that server. :param case: The test case that will handle cleanup. :param IProtocolFactory server_factory: The factory for the server protocol. :param IProtocolFactory client_factory: The factory for the client protocol. :return: A deferred that fires when the client has connected. .. todo: Figure out what a sensible value to return is. The existing caller doesn't use the return value. """ try: listening_port = yield TCP4ServerEndpoint(reactor, 0).listen(server_factory) case.addCleanup(listening_port.stopListening) endpoint = TCP4ClientEndpoint(reactor, '127.0.0.1', listening_port.getHost().port) yield endpoint.connect(client_factory) except Exception as e: f = Failure(e) # we can't use `e` from the lambda itself case.addCleanup(lambda: f) class EndToEndHangCheckTests(TestCase): """ End to end test for HangCheckProtocol. """ @defer.inlineCallbacks def test_http(self): """ When connecting to a HTTP server, a PB connection times out. """ result = defer.Deferred() site = Site(Data("", "text/plain")) client = HangCheckFactory( PBClientFactory(), lambda: result.callback(None)) self.patch(HangCheckProtocol, '_HUNG_CONNECTION_TIMEOUT', 0.1) d_connected = connected_server_and_client( self, site, client, ) def cancel_all(): result.cancel() d_connected.cancel() timer = reactor.callLater(2, cancel_all) try: yield result except defer.CancelledError: raise Exception('Timeout did not happen') finally: d_connected.cancel() timer.cancel() buildbot-3.4.0/worker/buildbot_worker/test/unit/000077500000000000000000000000001413250514000217615ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/test/unit/__init__.py000066400000000000000000000000001413250514000240600ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/test/unit/runprocess-scripts.py000066400000000000000000000062271413250514000262320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # This file contains scripts run by the test_runprocess tests. Note that since # this code runs in a different Python interpreter, it does not necessarily # have access to any of the Buildbot source. Functions here should be kept # very simple! from __future__ import absolute_import from __future__ import print_function import os import select import signal import sys import time # utils def write_pidfile(pidfile): pidfile_tmp = pidfile + "~" f = open(pidfile_tmp, "w") f.write(str(os.getpid())) f.close() os.rename(pidfile_tmp, pidfile) def sleep_forever(): signal.alarm(110) # die after 110 seconds while True: time.sleep(10) def wait_for_parent_death(orig_parent_pid): while True: ppid = os.getppid() if ppid != orig_parent_pid: return # on some systems, getppid will keep returning # a dead pid, so check it for liveness try: os.kill(ppid, 0) except OSError: # Probably ENOSUCH return script_fns = {} def script(fn): script_fns[fn.__name__] = fn return fn # scripts @script def write_pidfile_and_sleep(): pidfile = sys.argv[2] write_pidfile(pidfile) sleep_forever() @script def spawn_child(): parent_pidfile, child_pidfile = sys.argv[2:] if os.fork() == 0: write_pidfile(child_pidfile) else: write_pidfile(parent_pidfile) sleep_forever() @script def double_fork(): # when using a PTY, the child process will get SIGHUP when the # parent process exits, so ignore that. signal.signal(signal.SIGHUP, signal.SIG_IGN) parent_pidfile, child_pidfile = sys.argv[2:] parent_pid = os.getpid() if os.fork() == 0: wait_for_parent_death(parent_pid) write_pidfile(child_pidfile) sleep_forever() else: write_pidfile(parent_pidfile) sys.exit(0) @script def assert_stdin_closed(): # EOF counts as readable data, so we should see stdin in the readable list, # although it may not appear immediately, and select may return early bail_at = time.time() + 10 while True: r, w, x = select.select([0], [], [], 0.01) if r == [0]: return # success! if time.time() > bail_at: assert False # failure :( # make sure this process dies if necessary if not hasattr(signal, 'alarm'): signal.alarm = lambda t: None signal.alarm(110) # die after 110 seconds # dispatcher script_fns[sys.argv[1]]() buildbot-3.4.0/worker/buildbot_worker/test/unit/test_bot.py000066400000000000000000000343071413250514000241650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from future.builtins import range import multiprocessing import os import shutil import mock from twisted.internet import defer from twisted.internet import reactor from twisted.internet import task from twisted.python import failure from twisted.python import log from twisted.trial import unittest import buildbot_worker from buildbot_worker import base from buildbot_worker import pb from buildbot_worker.commands.base import Command from buildbot_worker.test.fake.remote import FakeRemote from buildbot_worker.test.fake.runprocess import Expect from buildbot_worker.test.util import command class TestBot(unittest.TestCase): def setUp(self): self.basedir = os.path.abspath("basedir") if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # create test-release-file with open("{}/test-release-file".format(self.basedir), "w") as fout: fout.write( """ # unit test release file OS_NAME="Test" VERSION="1.0" ID=test ID_LIKE=generic PRETTY_NAME="Test 1.0 Generic" VERSION_ID="1" """ ) self.real_bot = base.BotBase(self.basedir, False) self.real_bot.setOsReleaseFile("{}/test-release-file".format(self.basedir)) self.real_bot.startService() self.bot = FakeRemote(self.real_bot) @defer.inlineCallbacks def tearDown(self): if self.real_bot and self.real_bot.running: yield self.real_bot.stopService() if os.path.exists(self.basedir): shutil.rmtree(self.basedir) @defer.inlineCallbacks def test_getCommands(self): cmds = yield self.bot.callRemote("getCommands") # just check that 'shell' is present.. self.assertTrue('shell' in cmds) @defer.inlineCallbacks def test_getVersion(self): vers = yield self.bot.callRemote("getVersion") self.assertEqual(vers, buildbot_worker.version) @defer.inlineCallbacks def test_getWorkerInfo(self): infodir = os.path.join(self.basedir, "info") os.makedirs(infodir) with open(os.path.join(infodir, "admin"), "w") as f: f.write("testy!") with open(os.path.join(infodir, "foo"), "w") as f: f.write("bar") with open(os.path.join(infodir, "environ"), "w") as f: f.write("something else") info = yield self.bot.callRemote("getWorkerInfo") # remove any os_ fields as they are dependent on the test environment info = {k: v for k, v in info.items() if not k.startswith("os_")} self.assertEqual(info, dict( admin='testy!', foo='bar', environ=os.environ, system=os.name, basedir=self.basedir, worker_commands=self.real_bot.remote_getCommands(), version=self.real_bot.remote_getVersion(), numcpus=multiprocessing.cpu_count())) @defer.inlineCallbacks def test_getWorkerInfo_nodir(self): info = yield self.bot.callRemote("getWorkerInfo") info = {k: v for k, v in info.items() if not k.startswith("os_")} self.assertEqual(set(info.keys()), set( ['environ', 'system', 'numcpus', 'basedir', 'worker_commands', 'version'])) @defer.inlineCallbacks def test_setBuilderList_empty(self): builders = yield self.bot.callRemote("setBuilderList", []) self.assertEqual(builders, {}) @defer.inlineCallbacks def test_setBuilderList_single(self): builders = yield self.bot.callRemote("setBuilderList", [('mybld', 'myblddir')]) self.assertEqual(list(builders), ['mybld']) self.assertTrue( os.path.exists(os.path.join(self.basedir, 'myblddir'))) # note that we test the WorkerForBuilder instance below @defer.inlineCallbacks def test_setBuilderList_updates(self): workerforbuilders = {} builders = yield self.bot.callRemote("setBuilderList", [ ('mybld', 'myblddir')]) self.assertEqual(list(builders), ['mybld']) self.assertTrue( os.path.exists(os.path.join(self.basedir, 'myblddir'))) workerforbuilders['my'] = builders['mybld'] builders = yield self.bot.callRemote("setBuilderList", [ ('mybld', 'myblddir'), ('yourbld', 'yourblddir')]) self.assertEqual( sorted(builders.keys()), sorted(['mybld', 'yourbld'])) self.assertTrue( os.path.exists(os.path.join(self.basedir, 'myblddir'))) self.assertTrue( os.path.exists(os.path.join(self.basedir, 'yourblddir'))) # 'my' should still be the same WorkerForBuilder object self.assertEqual( id(workerforbuilders['my']), id(builders['mybld'])) workerforbuilders['your'] = builders['yourbld'] self.assertTrue(repr(workerforbuilders['your']).startswith( "= (3, 5): if sys.platform == 'win32': filename = 'test\\testdir\\test.txt' else: filename = 'test/testdir/test.txt' self.assertEqual( self.get_updates()[0]['files'], [os.path.join(self.basedir, filename)]) else: self.assertEqual( self.get_updates()[0]['files'], []) self.assertIn({'rc': 0}, self.get_updates(), self.builder.show()) class TestListDir(CommandTestMixin, unittest.TestCase): def setUp(self): self.setUpCommand() def tearDown(self): self.tearDownCommand() @defer.inlineCallbacks def test_non_existent(self): self.make_command(fs.ListDir, dict(dir='no-such-dir'), True) yield self.run_command() self.assertIn({'rc': errno.ENOENT}, self.get_updates(), self.builder.show()) @defer.inlineCallbacks def test_dir(self): self.make_command(fs.ListDir, dict( dir='workdir', ), True) workdir = os.path.join(self.basedir, 'workdir') with open(os.path.join(workdir, 'file1'), "w"): pass with open(os.path.join(workdir, 'file2'), "w"): pass yield self.run_command() def any(items): # not a builtin on python-2.4 for i in items: if i: return True return None self.assertIn({'rc': 0}, self.get_updates(), self.builder.show()) self.assertTrue(any([ 'files' in upd and sorted(upd['files']) == ['file1', 'file2'] for upd in self.get_updates()]), self.builder.show()) class TestRemoveFile(CommandTestMixin, unittest.TestCase): def setUp(self): self.setUpCommand() def tearDown(self): self.tearDownCommand() @defer.inlineCallbacks def test_simple(self): workdir = os.path.join(self.basedir, 'workdir') self.file1_path = os.path.join(workdir, 'file1') self.make_command(fs.RemoveFile, dict( path=self.file1_path, ), True) with open(os.path.join(workdir, 'file1'), "w"): pass yield self.run_command() self.assertFalse( os.path.exists(self.file1_path)) self.assertIn({'rc': 0}, # this may ignore a 'header' : '..', which is OK self.get_updates(), self.builder.show()) @defer.inlineCallbacks def test_simple_exception(self): workdir = os.path.join(self.basedir, 'workdir') self.file2_path = os.path.join(workdir, 'file2') def fail(src, dest): raise RuntimeError("oh noes") self.make_command(fs.RemoveFile, dict( path=self.file2_path ), True) yield self.run_command() self.assertIn({'rc': 2}, self.get_updates(), self.builder.show()) buildbot-3.4.0/worker/buildbot_worker/test/unit/test_commands_registry.py000066400000000000000000000027421413250514000271300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from twisted.trial import unittest from buildbot_worker.commands import registry from buildbot_worker.commands import shell class Registry(unittest.TestCase): def test_getFactory(self): factory = registry.getFactory('shell') self.assertEqual(factory, shell.WorkerShellCommand) def test_getFactory_KeyError(self): with self.assertRaises(KeyError): registry.getFactory('nosuchcommand') def test_getAllCommandNames(self): self.assertTrue('shell' in registry.getAllCommandNames()) def test_all_commands_exist(self): # if this doesn't raise a KeyError, then we're good for n in registry.getAllCommandNames(): registry.getFactory(n) buildbot-3.4.0/worker/buildbot_worker/test/unit/test_commands_shell.py000066400000000000000000000035531413250514000263700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from twisted.internet import defer from twisted.trial import unittest from buildbot_worker.commands import shell from buildbot_worker.test.fake.runprocess import Expect from buildbot_worker.test.util.command import CommandTestMixin class TestWorkerShellCommand(CommandTestMixin, unittest.TestCase): def setUp(self): self.setUpCommand() def tearDown(self): self.tearDownCommand() @defer.inlineCallbacks def test_simple(self): self.make_command(shell.WorkerShellCommand, dict( command=['echo', 'hello'], workdir='workdir', )) self.patch_runprocess( Expect(['echo', 'hello'], self.basedir_workdir) + {'hdr': 'headers'} + {'stdout': 'hello\n'} + {'rc': 0} + 0, ) yield self.run_command() # note that WorkerShellCommand does not add any extra updates of it own self.assertUpdates( [{'hdr': 'headers'}, {'stdout': 'hello\n'}, {'rc': 0}], self.builder.show()) # TODO: test all functionality that WorkerShellCommand adds atop RunProcess buildbot-3.4.0/worker/buildbot_worker/test/unit/test_commands_transfer.py000066400000000000000000000367141413250514000271120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import io import os import shutil import sys import tarfile from twisted.internet import defer from twisted.internet import reactor from twisted.python import failure from twisted.python import runtime from twisted.trial import unittest from buildbot_worker.commands import transfer from buildbot_worker.test.fake.remote import FakeRemote from buildbot_worker.test.util.command import CommandTestMixin class FakeMasterMethods(object): # a fake to represent any of: # - FileWriter # - FileDirectoryWriter # - FileReader def __init__(self, add_update): self.add_update = add_update self.delay_write = False self.count_writes = False self.keep_data = False self.write_out_of_space_at = None self.delay_read = False self.count_reads = False self.unpack_fail = False self.written = False self.read = False self.data = b'' def remote_write(self, data): if self.write_out_of_space_at is not None: self.write_out_of_space_at -= len(data) if self.write_out_of_space_at <= 0: f = failure.Failure(RuntimeError("out of space")) return defer.fail(f) if self.count_writes: self.add_update('write {0}'.format(len(data))) elif not self.written: self.add_update('write(s)') self.written = True if self.keep_data: self.data += data if self.delay_write: d = defer.Deferred() reactor.callLater(0.01, d.callback, None) return d return None def remote_read(self, length): if self.count_reads: self.add_update('read {0}'.format(length)) elif not self.read: self.add_update('read(s)') self.read = True if not self.data: return '' _slice, self.data = self.data[:length], self.data[length:] if self.delay_read: d = defer.Deferred() reactor.callLater(0.01, d.callback, _slice) return d return _slice def remote_unpack(self): self.add_update('unpack') if self.unpack_fail: return defer.fail(failure.Failure(RuntimeError("out of space"))) return None def remote_utime(self, accessed_modified): self.add_update('utime - {0}'.format(accessed_modified[0])) def remote_close(self): self.add_update('close') class TestUploadFile(CommandTestMixin, unittest.TestCase): def setUp(self): self.setUpCommand() self.fakemaster = FakeMasterMethods(self.add_update) # write 180 bytes of data to upload self.datadir = os.path.join(self.basedir, 'workdir') if os.path.exists(self.datadir): shutil.rmtree(self.datadir) os.makedirs(self.datadir) self.datafile = os.path.join(self.datadir, 'data') # note: use of 'wb' here ensures newlines aren't translated on the # upload with open(self.datafile, mode="wb") as f: f.write(b"this is some data\n" * 10) def tearDown(self): self.tearDownCommand() if os.path.exists(self.datadir): shutil.rmtree(self.datadir) @defer.inlineCallbacks def test_simple(self): self.fakemaster.count_writes = True # get actual byte counts self.make_command(transfer.WorkerFileUploadCommand, dict( workdir='workdir', workersrc='data', writer=FakeRemote(self.fakemaster), maxsize=1000, blocksize=64, keepstamp=False, )) yield self.run_command() self.assertUpdates([ {'header': 'sending {0}\n'.format(self.datafile)}, 'write 64', 'write 64', 'write 52', 'close', {'rc': 0} ]) @defer.inlineCallbacks def test_truncated(self): self.fakemaster.count_writes = True # get actual byte counts self.make_command(transfer.WorkerFileUploadCommand, dict( workdir='workdir', workersrc='data', writer=FakeRemote(self.fakemaster), maxsize=100, blocksize=64, keepstamp=False, )) yield self.run_command() self.assertUpdates([ {'header': 'sending {0}\n'.format(self.datafile)}, 'write 64', 'write 36', 'close', {'rc': 1, 'stderr': "Maximum filesize reached, truncating file '{0}'".format(self.datafile)} ]) @defer.inlineCallbacks def test_missing(self): self.make_command(transfer.WorkerFileUploadCommand, dict( workdir='workdir', workersrc='data-nosuch', writer=FakeRemote(self.fakemaster), maxsize=100, blocksize=64, keepstamp=False, )) yield self.run_command() df = self.datafile + "-nosuch" self.assertUpdates([ {'header': 'sending {0}\n'.format(df)}, 'close', {'rc': 1, 'stderr': "Cannot open file '{0}' for upload".format(df)} ]) @defer.inlineCallbacks def test_out_of_space(self): self.fakemaster.write_out_of_space_at = 70 self.fakemaster.count_writes = True # get actual byte counts self.make_command(transfer.WorkerFileUploadCommand, dict( workdir='workdir', workersrc='data', writer=FakeRemote(self.fakemaster), maxsize=1000, blocksize=64, keepstamp=False, )) yield self.assertFailure(self.run_command(), RuntimeError) self.assertUpdates([ {'header': 'sending {0}\n'.format(self.datafile)}, 'write 64', 'close', {'rc': 1} ]) @defer.inlineCallbacks def test_interrupted(self): self.fakemaster.delay_write = True # write very slowly self.make_command(transfer.WorkerFileUploadCommand, dict( workdir='workdir', workersrc='data', writer=FakeRemote(self.fakemaster), maxsize=100, blocksize=2, keepstamp=False, )) d = self.run_command() # wait a jiffy.. interrupt_d = defer.Deferred() reactor.callLater(0.01, interrupt_d.callback, None) # and then interrupt the step def do_interrupt(_): return self.cmd.interrupt() interrupt_d.addCallback(do_interrupt) yield defer.DeferredList([d, interrupt_d]) self.assertUpdates([ {'header': 'sending {0}\n'.format(self.datafile)}, 'write(s)', 'close', {'rc': 1} ]) @defer.inlineCallbacks def test_timestamp(self): self.fakemaster.count_writes = True # get actual byte counts timestamp = (os.path.getatime(self.datafile), os.path.getmtime(self.datafile)) self.make_command(transfer.WorkerFileUploadCommand, dict( workdir='workdir', workersrc='data', writer=FakeRemote(self.fakemaster), maxsize=1000, blocksize=64, keepstamp=True, )) yield self.run_command() self.assertUpdates([ {'header': 'sending {0}\n'.format(self.datafile)}, 'write 64', 'write 64', 'write 52', 'close', 'utime - {0}'.format(timestamp[0]), {'rc': 0} ]) class TestWorkerDirectoryUpload(CommandTestMixin, unittest.TestCase): def setUp(self): self.setUpCommand() self.fakemaster = FakeMasterMethods(self.add_update) # write a directory to upload self.datadir = os.path.join(self.basedir, 'workdir', 'data') if os.path.exists(self.datadir): shutil.rmtree(self.datadir) os.makedirs(self.datadir) with open(os.path.join(self.datadir, "aa"), mode="wb") as f: f.write(b"lots of a" * 100) with open(os.path.join(self.datadir, "bb"), mode="wb") as f: f.write(b"and a little b" * 17) def tearDown(self): self.tearDownCommand() if os.path.exists(self.datadir): shutil.rmtree(self.datadir) @defer.inlineCallbacks def test_simple(self, compress=None): self.fakemaster.keep_data = True self.make_command(transfer.WorkerDirectoryUploadCommand, dict( workdir='workdir', workersrc='data', writer=FakeRemote(self.fakemaster), maxsize=None, blocksize=512, compress=compress, )) yield self.run_command() self.assertUpdates([ {'header': 'sending {0}\n'.format(self.datadir)}, 'write(s)', 'unpack', # note no 'close" {'rc': 0} ]) f = io.BytesIO(self.fakemaster.data) a = tarfile.open(fileobj=f, name='check.tar', mode="r") exp_names = ['.', 'aa', 'bb'] got_names = [n.rstrip('/') for n in a.getnames()] # py27 uses '' instead of '.' got_names = sorted([n or '.' for n in got_names]) self.assertEqual(got_names, exp_names, "expected archive contents") a.close() f.close() # try it again with bz2 and gzip def test_simple_bz2(self): return self.test_simple('bz2') def test_simple_gz(self): return self.test_simple('gz') # except bz2 can't operate in stream mode on py24 if sys.version_info[:2] <= (2, 4): test_simple_bz2.skip = "bz2 stream decompression not supported on Python-2.4" @defer.inlineCallbacks def test_out_of_space_unpack(self): self.fakemaster.keep_data = True self.fakemaster.unpack_fail = True self.make_command(transfer.WorkerDirectoryUploadCommand, dict( workdir='workdir', workersrc='data', writer=FakeRemote(self.fakemaster), maxsize=None, blocksize=512, compress=None )) yield self.assertFailure(self.run_command(), RuntimeError) self.assertUpdates([ {'header': 'sending {0}\n'.format(self.datadir)}, 'write(s)', 'unpack', {'rc': 1} ]) class TestDownloadFile(CommandTestMixin, unittest.TestCase): def setUp(self): self.setUpCommand() self.fakemaster = FakeMasterMethods(self.add_update) # the command will write to the basedir, so make sure it exists if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) def tearDown(self): self.tearDownCommand() if os.path.exists(self.basedir): shutil.rmtree(self.basedir) @defer.inlineCallbacks def test_simple(self): self.fakemaster.count_reads = True # get actual byte counts self.fakemaster.data = test_data = b'1234' * 13 assert(len(self.fakemaster.data) == 52) self.make_command(transfer.WorkerFileDownloadCommand, dict( workdir='.', workerdest='data', reader=FakeRemote(self.fakemaster), maxsize=None, blocksize=32, mode=0o777, )) yield self.run_command() self.assertUpdates([ 'read 32', 'read 32', 'read 32', 'close', {'rc': 0} ]) datafile = os.path.join(self.basedir, 'data') self.assertTrue(os.path.exists(datafile)) with open(datafile, mode="rb") as f: datafileContent = f.read() self.assertEqual(datafileContent, test_data) if runtime.platformType != 'win32': self.assertEqual(os.stat(datafile).st_mode & 0o777, 0o777) @defer.inlineCallbacks def test_mkdir(self): self.fakemaster.data = test_data = b'hi' self.make_command(transfer.WorkerFileDownloadCommand, dict( workdir='workdir', workerdest=os.path.join('subdir', 'data'), reader=FakeRemote(self.fakemaster), maxsize=None, blocksize=32, mode=0o777, )) yield self.run_command() self.assertUpdates([ 'read(s)', 'close', {'rc': 0} ]) datafile = os.path.join(self.basedir, 'workdir', 'subdir', 'data') self.assertTrue(os.path.exists(datafile)) with open(datafile, mode="rb") as f: datafileContent = f.read() self.assertEqual(datafileContent, test_data) @defer.inlineCallbacks def test_failure(self): self.fakemaster.data = 'hi' os.makedirs(os.path.join(self.basedir, 'dir')) self.make_command(transfer.WorkerFileDownloadCommand, dict( workdir='.', workerdest='dir', # but that's a directory! reader=FakeRemote(self.fakemaster), maxsize=None, blocksize=32, mode=0o777, )) yield self.run_command() self.assertUpdates([ 'close', {'rc': 1, 'stderr': "Cannot open file '{0}' for download".format( os.path.join(self.basedir, '.', 'dir'))} ]) @defer.inlineCallbacks def test_truncated(self): self.fakemaster.data = test_data = b'tenchars--' * 10 self.make_command(transfer.WorkerFileDownloadCommand, dict( workdir='.', workerdest='data', reader=FakeRemote(self.fakemaster), maxsize=50, blocksize=32, mode=0o777, )) yield self.run_command() self.assertUpdates([ 'read(s)', 'close', {'rc': 1, 'stderr': "Maximum filesize reached, truncating file '{0}'".format( os.path.join(self.basedir, '.', 'data'))} ]) datafile = os.path.join(self.basedir, 'data') self.assertTrue(os.path.exists(datafile)) with open(datafile, mode="rb") as f: data = f.read() self.assertEqual(data, test_data[:50]) @defer.inlineCallbacks def test_interrupted(self): self.fakemaster.data = b'tenchars--' * 100 # 1k self.fakemaster.delay_read = True # read very slowly self.make_command(transfer.WorkerFileDownloadCommand, dict( workdir='.', workerdest='data', reader=FakeRemote(self.fakemaster), maxsize=100, blocksize=2, mode=0o777, )) d = self.run_command() # wait a jiffy.. interrupt_d = defer.Deferred() reactor.callLater(0.01, interrupt_d.callback, None) # and then interrupt the step def do_interrupt(_): return self.cmd.interrupt() interrupt_d.addCallback(do_interrupt) yield defer.DeferredList([d, interrupt_d]) self.assertUpdates([ 'read(s)', 'close', {'rc': 1} ]) buildbot-3.4.0/worker/buildbot_worker/test/unit/test_commands_utils.py000066400000000000000000000123171413250514000264170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import shutil import sys import twisted.python.procutils from twisted.python import runtime from twisted.trial import unittest from buildbot_worker.commands import utils class GetCommand(unittest.TestCase): def setUp(self): # monkey-patch 'which' to return something appropriate self.which_results = {} def which(arg): return self.which_results.get(arg, []) self.patch(twisted.python.procutils, 'which', which) # note that utils.py currently imports which by name, so we # patch it there, too self.patch(utils, 'which', which) def set_which_results(self, results): self.which_results = results def test_getCommand_empty(self): self.set_which_results({ 'xeyes': [], }) with self.assertRaises(RuntimeError): utils.getCommand('xeyes') def test_getCommand_single(self): self.set_which_results({ 'xeyes': ['/usr/bin/xeyes'], }) self.assertEqual(utils.getCommand('xeyes'), '/usr/bin/xeyes') def test_getCommand_multi(self): self.set_which_results({ 'xeyes': ['/usr/bin/xeyes', '/usr/X11/bin/xeyes'], }) self.assertEqual(utils.getCommand('xeyes'), '/usr/bin/xeyes') def test_getCommand_single_exe(self): self.set_which_results({ 'xeyes': ['/usr/bin/xeyes'], # it should not select this option, since only one matched # to begin with 'xeyes.exe': [r'c:\program files\xeyes.exe'], }) self.assertEqual(utils.getCommand('xeyes'), '/usr/bin/xeyes') def test_getCommand_multi_exe(self): self.set_which_results({ 'xeyes': [r'c:\program files\xeyes.com', r'c:\program files\xeyes.exe'], 'xeyes.exe': [r'c:\program files\xeyes.exe'], }) # this one will work out differently depending on platform.. if runtime.platformType == 'win32': self.assertEqual( utils.getCommand('xeyes'), r'c:\program files\xeyes.exe') else: self.assertEqual( utils.getCommand('xeyes'), r'c:\program files\xeyes.com') class RmdirRecursive(unittest.TestCase): # this is more complicated than you'd think because Twisted doesn't # rmdir its test directory very well, either.. def setUp(self): self.target = 'testdir' try: if os.path.exists(self.target): shutil.rmtree(self.target) except Exception: # this test will probably fail anyway e = sys.exc_info()[0] raise unittest.SkipTest("could not clean before test: {0}".format(e)) # fill it with some files os.mkdir(os.path.join(self.target)) with open(os.path.join(self.target, "a"), "w"): pass os.mkdir(os.path.join(self.target, "d")) with open(os.path.join(self.target, "d", "a"), "w"): pass os.mkdir(os.path.join(self.target, "d", "d")) with open(os.path.join(self.target, "d", "d", "a"), "w"): pass def tearDown(self): try: if os.path.exists(self.target): shutil.rmtree(self.target) except Exception: print( "\n(target directory was not removed by test, and cleanup failed too)\n") raise def test_rmdirRecursive_easy(self): utils.rmdirRecursive(self.target) self.assertFalse(os.path.exists(self.target)) def test_rmdirRecursive_symlink(self): # this was intended as a regression test for #792, but doesn't seem # to trigger it. It can't hurt to check it, all the same. if runtime.platformType == 'win32': raise unittest.SkipTest("no symlinks on this platform") os.mkdir("noperms") with open("noperms/x", "w"): pass os.chmod("noperms/x", 0) try: os.symlink("../noperms", os.path.join(self.target, "link")) utils.rmdirRecursive(self.target) # that shouldn't delete the target of the symlink self.assertTrue(os.path.exists("noperms")) finally: # even Twisted can't clean this up very well, so try hard to # clean it up ourselves.. os.chmod("noperms/x", 0o777) os.unlink("noperms/x") os.rmdir("noperms") self.assertFalse(os.path.exists(self.target)) buildbot-3.4.0/worker/buildbot_worker/test/unit/test_runprocess.py000066400000000000000000000771351413250514000256120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import re import signal import sys import time from mock import Mock from twisted.internet import defer from twisted.internet import reactor from twisted.internet import task from twisted.python import log from twisted.python import runtime from twisted.python import util from twisted.trial import unittest from buildbot_worker import runprocess from buildbot_worker import util as bsutil from buildbot_worker.exceptions import AbandonChain from buildbot_worker.test.fake.workerforbuilder import FakeWorkerForBuilder from buildbot_worker.test.util import compat from buildbot_worker.test.util.misc import BasedirMixin from buildbot_worker.test.util.misc import nl def catCommand(): return [sys.executable, '-c', 'import sys; sys.stdout.write(sys.stdin.read())'] def stdoutCommand(output): return [sys.executable, '-c', 'import sys; sys.stdout.write("{0}\\n")'.format(output)] def stderrCommand(output): return [sys.executable, '-c', 'import sys; sys.stderr.write("{0}\\n")'.format(output)] def sleepCommand(dur): return [sys.executable, '-c', 'import time; time.sleep({0})'.format(dur)] def scriptCommand(function, *args): runprocess_scripts = util.sibpath(__file__, 'runprocess-scripts.py') return [sys.executable, runprocess_scripts, function] + list(args) def printArgsCommand(): return [sys.executable, '-c', 'import sys; sys.stdout.write(repr(sys.argv[1:]))'] # windows returns rc 1, because exit status cannot indicate "signalled"; # posix returns rc -1 for "signalled" FATAL_RC = -1 if runtime.platformType == 'win32': FATAL_RC = 1 # We would like to see debugging output in the test.log runprocess.RunProcessPP.debug = True class TestRunProcess(BasedirMixin, unittest.TestCase): def setUp(self): self.setUpBasedir() def tearDown(self): self.tearDownBasedir() def testCommandEncoding(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, u'abcd', self.basedir) self.assertIsInstance(s.command, bytes) self.assertIsInstance(s.fake_command, bytes) def testCommandEncodingList(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, [u'abcd', b'efg'], self.basedir) self.assertIsInstance(s.command[0], bytes) self.assertIsInstance(s.fake_command[0], bytes) def testCommandEncodingObfuscated(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, [bsutil.Obfuscated(u'abcd', u'ABCD')], self.basedir) self.assertIsInstance(s.command[0], bytes) self.assertIsInstance(s.fake_command[0], bytes) @defer.inlineCallbacks def testStart(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir) yield s.start() self.assertTrue({'stdout': nl('hello\n')} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def testNoStdout(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess( b, stdoutCommand('hello'), self.basedir, sendStdout=False) yield s.start() self.failIf({'stdout': nl('hello\n')} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def testKeepStdout(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess( b, stdoutCommand('hello'), self.basedir, keepStdout=True) yield s.start() self.assertTrue({'stdout': nl('hello\n')} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) self.assertEqual(s.stdout, nl('hello\n')) @defer.inlineCallbacks def testStderr(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stderrCommand("hello"), self.basedir) yield s.start() self.failIf({'stderr': nl('hello\n')} not in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def testNoStderr(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess( b, stderrCommand("hello"), self.basedir, sendStderr=False) yield s.start() self.failIf({'stderr': nl('hello\n')} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def test_incrementalDecoder(self): b = FakeWorkerForBuilder(self.basedir) b.unicode_encoding = "utf-8" s = runprocess.RunProcess( b, stderrCommand("hello"), self.basedir, sendStderr=True) pp = runprocess.RunProcessPP(s) # u"\N{SNOWMAN} when encoded to utf-8 bytes is b"\xe2\x98\x83" pp.outReceived(b"\xe2") pp.outReceived(b"\x98\x83") pp.errReceived(b"\xe2") pp.errReceived(b"\x98\x83") yield s.start() self.assertTrue({'stderr': u"\N{SNOWMAN}"} in b.updates) self.assertTrue({'stdout': u"\N{SNOWMAN}"} in b.updates) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def testInvalidUTF8(self): b = FakeWorkerForBuilder(self.basedir) b.unicode_encoding = "utf-8" s = runprocess.RunProcess( b, stderrCommand("hello"), self.basedir, sendStderr=True) pp = runprocess.RunProcessPP(s) INVALID_UTF8 = b"\xff" with self.assertRaises(UnicodeDecodeError): INVALID_UTF8.decode('utf-8') pp.outReceived(INVALID_UTF8) yield s.start() stdout = [up['stdout'] for up in b.updates if 'stdout' in up][0] # On Python < 2.7 bytes is used, on Python >= 2.7 unicode self.assertIn(stdout, (b'\xef\xbf\xbd', u'\ufffd')) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def testKeepStderr(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess( b, stderrCommand("hello"), self.basedir, keepStderr=True) yield s.start() self.assertTrue({'stderr': nl('hello\n')} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) self.assertEqual(s.stderr, nl('hello\n')) @defer.inlineCallbacks def testStringCommand(self): b = FakeWorkerForBuilder(self.basedir) # careful! This command must execute the same on windows and UNIX s = runprocess.RunProcess(b, 'echo hello', self.basedir) yield s.start() self.assertTrue({'stdout': nl('hello\n')} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) def testObfuscatedCommand(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, [('obfuscated', 'abcd', 'ABCD')], self.basedir) self.assertEqual(s.command, [b'abcd']) self.assertEqual(s.fake_command, [b'ABCD']) @defer.inlineCallbacks def testMultiWordStringCommand(self): b = FakeWorkerForBuilder(self.basedir) # careful! This command must execute the same on windows and UNIX s = runprocess.RunProcess(b, 'echo Happy Days and Jubilation', self.basedir) # no quoting occurs exp = nl('Happy Days and Jubilation\n') yield s.start() self.assertTrue({'stdout': exp} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def testInitialStdinUnicode(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess( b, catCommand(), self.basedir, initialStdin=u'hello') yield s.start() self.assertTrue({'stdout': nl('hello')} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def testMultiWordStringCommandQuotes(self): b = FakeWorkerForBuilder(self.basedir) # careful! This command must execute the same on windows and UNIX s = runprocess.RunProcess(b, 'echo "Happy Days and Jubilation"', self.basedir) if runtime.platformType == "win32": # echo doesn't parse out the quotes, so they come through in the # output exp = nl('"Happy Days and Jubilation"\n') else: exp = nl('Happy Days and Jubilation\n') yield s.start() self.assertTrue({'stdout': exp} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def testTrickyArguments(self): # make sure non-trivial arguments are passed verbatim b = FakeWorkerForBuilder(self.basedir) args = [ 'Happy Days and Jubilation', # spaces r'''!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~''', # special characters '%PATH%', # Windows variable expansions # Expansions get an argument of their own, because the Windows # shell doesn't treat % as special unless it surrounds a # variable name. ] s = runprocess.RunProcess(b, printArgsCommand() + args, self.basedir) yield s.start() self.assertTrue({'stdout': nl(repr(args))} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks @compat.skipUnlessPlatformIs("win32") def testPipeString(self): b = FakeWorkerForBuilder(self.basedir) # this is highly contrived, but it proves the point. cmd = sys.executable + \ ' -c "import sys; sys.stdout.write(\'b\\na\\n\')" | sort' s = runprocess.RunProcess(b, cmd, self.basedir) yield s.start() self.assertTrue({'stdout': nl('a\nb\n')} in b.updates, b.show()) self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def testCommandTimeout(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, sleepCommand(10), self.basedir, timeout=5) clock = task.Clock() s._reactor = clock d = s.start() clock.advance(6) yield d self.assertTrue( {'stdout': nl('hello\n')} not in b.updates, b.show()) self.assertTrue({'rc': FATAL_RC} in b.updates, b.show()) @defer.inlineCallbacks def testCommandMaxTime(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, sleepCommand(10), self.basedir, maxTime=5) clock = task.Clock() s._reactor = clock d = s.start() clock.advance(6) # should knock out maxTime yield d self.assertTrue( {'stdout': nl('hello\n')} not in b.updates, b.show()) self.assertTrue({'rc': FATAL_RC} in b.updates, b.show()) @compat.skipUnlessPlatformIs("posix") @defer.inlineCallbacks def test_stdin_closed(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, scriptCommand('assert_stdin_closed'), self.basedir, # if usePTY=True, stdin is never closed usePTY=False, logEnviron=False) yield s.start() self.assertTrue({'rc': 0} in b.updates, b.show()) @defer.inlineCallbacks def test_startCommand_exception(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, ['whatever'], self.basedir) # set up to cause an exception in _startCommand def _startCommand(*args, **kwargs): raise RuntimeError('error message') s._startCommand = _startCommand try: yield s.start() except AbandonChain: pass stderr = [] # Here we're checking that the exception starting up the command # actually gets propagated back to the master in stderr. for u in b.updates: if 'stderr' in u: stderr.append(u['stderr']) stderr = ''.join(stderr) self.assertTrue(stderr.startswith('error in RunProcess._startCommand (error message)')) yield self.flushLoggedErrors() @defer.inlineCallbacks def testLogEnviron(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir, environ={"FOO": "BAR"}) yield s.start() headers = "".join([list(update.values())[0] for update in b.updates if list(update) == ["header"]]) self.assertTrue("FOO=BAR" in headers, "got:\n" + headers) @defer.inlineCallbacks def testNoLogEnviron(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir, environ={"FOO": "BAR"}, logEnviron=False) yield s.start() headers = "".join([list(update.values())[0] for update in b.updates if list(update) == ["header"]]) self.assertTrue("FOO=BAR" not in headers, "got:\n" + headers) @defer.inlineCallbacks def testEnvironExpandVar(self): b = FakeWorkerForBuilder(self.basedir) environ = {"EXPND": "-${PATH}-", "DOESNT_EXPAND": "-${---}-", "DOESNT_FIND": "-${DOESNT_EXISTS}-"} s = runprocess.RunProcess( b, stdoutCommand('hello'), self.basedir, environ=environ) yield s.start() headers = "".join([list(update.values())[0] for update in b.updates if list(update) == ["header"]]) self.assertTrue("EXPND=-$" not in headers, "got:\n" + headers) self.assertTrue("DOESNT_FIND=--" in headers, "got:\n" + headers) self.assertTrue( "DOESNT_EXPAND=-${---}-" in headers, "got:\n" + headers) @defer.inlineCallbacks def testUnsetEnvironVar(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir, environ={"PATH": None}) yield s.start() headers = "".join([list(update.values())[0] for update in b.updates if list(update) == ["header"]]) self.assertFalse( re.match('\bPATH=', headers), "got:\n" + headers) @defer.inlineCallbacks def testEnvironPythonPath(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir, environ={"PYTHONPATH": 'a'}) yield s.start() headers = "".join([list(update.values())[0] for update in b.updates if list(update) == ["header"]]) self.assertFalse(re.match('\bPYTHONPATH=a{0}'.format(os.pathsep), headers), "got:\n" + headers) @defer.inlineCallbacks def testEnvironArray(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir, environ={"FOO": ['a', 'b']}) yield s.start() headers = "".join([list(update.values())[0] for update in b.updates if list(update) == ["header"]]) self.assertFalse(re.match('\bFOO=a{0}b\b'.format(os.pathsep), headers), "got:\n" + headers) def testEnvironInt(self): b = FakeWorkerForBuilder(self.basedir) with self.assertRaises(RuntimeError): runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir, environ={"BUILD_NUMBER": 13}) def _test_spawnAsBatch(self, cmd, comspec): def spawnProcess(processProtocol, executable, args=(), env=None, path=None, uid=None, gid=None, usePTY=False, childFDs=None): self.assertTrue(args[0].lower().endswith("cmd.exe"), "{0} is not cmd.exe".format(args[0])) self.patch(runprocess.reactor, "spawnProcess", spawnProcess) tempEnviron = os.environ.copy() if 'COMSPEC' not in tempEnviron: tempEnviron['COMSPEC'] = comspec self.patch(os, "environ", tempEnviron) b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, cmd, self.basedir) s.pp = runprocess.RunProcessPP(s) s.deferred = defer.Deferred() d = s._spawnAsBatch(s.pp, s.command, "args", tempEnviron, "path", False) return d def test_spawnAsBatchCommandString(self): return self._test_spawnAsBatch("dir c:/", "cmd.exe") def test_spawnAsBatchCommandList(self): return self._test_spawnAsBatch(stdoutCommand('hello'), "cmd.exe /c") def test_spawnAsBatchCommandWithNonAscii(self): return self._test_spawnAsBatch(u"echo \u6211", "cmd.exe") def test_spawnAsBatchCommandListWithNonAscii(self): return self._test_spawnAsBatch(['echo', u"\u6211"], "cmd.exe /c") class TestPOSIXKilling(BasedirMixin, unittest.TestCase): if runtime.platformType != "posix": skip = "not a POSIX platform" def setUp(self): self.pidfiles = [] self.setUpBasedir() def tearDown(self): # make sure all of the subprocesses are dead for pidfile in self.pidfiles: if not os.path.exists(pidfile): continue with open(pidfile) as f: pid = f.read() if not pid: return pid = int(pid) try: os.kill(pid, signal.SIGKILL) except OSError: pass # and clean up leftover pidfiles for pidfile in self.pidfiles: if os.path.exists(pidfile): os.unlink(pidfile) self.tearDownBasedir() def newPidfile(self): pidfile = os.path.abspath("test-{0}.pid".format(len(self.pidfiles))) if os.path.exists(pidfile): os.unlink(pidfile) self.pidfiles.append(pidfile) return pidfile def waitForPidfile(self, pidfile): # wait for a pidfile, and return the pid via a Deferred until = time.time() + 10 d = defer.Deferred() def poll(): if reactor.seconds() > until: d.errback(RuntimeError( "pidfile {0} never appeared".format(pidfile))) return if os.path.exists(pidfile): try: with open(pidfile) as f: pid = int(f.read()) except (IOError, TypeError, ValueError): pid = None if pid is not None: d.callback(pid) return reactor.callLater(0.01, poll) poll() # poll right away return d def assertAlive(self, pid): try: os.kill(pid, 0) except OSError: self.fail("pid {0} still alive".format(pid)) def assertDead(self, pid, timeout=5): log.msg("checking pid {0!r}".format(pid)) def check(): try: os.kill(pid, 0) except OSError: return True # dead return False # alive # check immediately if check(): return # poll every 100'th of a second; this allows us to test for # processes that have been killed, but where the signal hasn't # been delivered yet until = time.time() + timeout while time.time() < until: time.sleep(0.01) if check(): return self.fail("pid {0} still alive after {1}s".format(pid, timeout)) # tests def test_simple_interruptSignal(self): return self.test_simple('TERM') def test_simple(self, interruptSignal=None): # test a simple process that just sleeps waiting to die pidfile = self.newPidfile() self.pid = None b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, scriptCommand( 'write_pidfile_and_sleep', pidfile), self.basedir) if interruptSignal is not None: s.interruptSignal = interruptSignal runproc_d = s.start() pidfile_d = self.waitForPidfile(pidfile) def check_alive(pid): self.pid = pid # for use in check_dead # test that the process is still alive self.assertAlive(pid) # and tell the RunProcess object to kill it s.kill("diaf") pidfile_d.addCallback(check_alive) def check_dead(_): self.assertDead(self.pid) runproc_d.addCallback(check_dead) return defer.gatherResults([pidfile_d, runproc_d]) def test_sigterm(self, interruptSignal=None): # Tests that the process will receive SIGTERM if sigtermTimeout # is not None pidfile = self.newPidfile() self.pid = None b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, scriptCommand( 'write_pidfile_and_sleep', pidfile), self.basedir, sigtermTime=1) runproc_d = s.start() pidfile_d = self.waitForPidfile(pidfile) self.receivedSIGTERM = False def check_alive(pid): # Create a mock process that will check if we receive SIGTERM mock_process = Mock(wraps=s.process) mock_process.pgid = None # Skips over group SIGTERM mock_process.pid = pid process = s.process def _mock_signalProcess(sig): if sig == "TERM": self.receivedSIGTERM = True process.signalProcess(sig) mock_process.signalProcess = _mock_signalProcess s.process = mock_process self.pid = pid # for use in check_dead # test that the process is still alive self.assertAlive(pid) # and tell the RunProcess object to kill it s.kill("diaf") pidfile_d.addCallback(check_alive) def check_dead(_): self.assertEqual(self.receivedSIGTERM, True) self.assertDead(self.pid) runproc_d.addCallback(check_dead) return defer.gatherResults([pidfile_d, runproc_d]) def test_pgroup_usePTY(self): return self.do_test_pgroup(usePTY=True) def test_pgroup_no_usePTY(self): return self.do_test_pgroup(usePTY=False) def test_pgroup_no_usePTY_no_pgroup(self): # note that this configuration is not *used*, but that it is # still supported, and correctly fails to kill the child process return self.do_test_pgroup(usePTY=False, useProcGroup=False, expectChildSurvival=True) @defer.inlineCallbacks def do_test_pgroup(self, usePTY, useProcGroup=True, expectChildSurvival=False): # test that a process group gets killed parent_pidfile = self.newPidfile() self.parent_pid = None child_pidfile = self.newPidfile() self.child_pid = None b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, scriptCommand( 'spawn_child', parent_pidfile, child_pidfile), self.basedir, usePTY=usePTY, useProcGroup=useProcGroup) runproc_d = s.start() # wait for both processes to start up, then call s.kill parent_pidfile_d = self.waitForPidfile(parent_pidfile) child_pidfile_d = self.waitForPidfile(child_pidfile) pidfiles_d = defer.gatherResults([parent_pidfile_d, child_pidfile_d]) def got_pids(pids): self.parent_pid, self.child_pid = pids pidfiles_d.addCallback(got_pids) def kill(_): s.kill("diaf") pidfiles_d.addCallback(kill) # check that both processes are dead after RunProcess is done yield defer.gatherResults([pidfiles_d, runproc_d]) self.assertDead(self.parent_pid) if expectChildSurvival: self.assertAlive(self.child_pid) else: self.assertDead(self.child_pid) def test_double_fork_usePTY(self): return self.do_test_double_fork(usePTY=True) def test_double_fork_no_usePTY(self): return self.do_test_double_fork(usePTY=False) def test_double_fork_no_usePTY_no_pgroup(self): # note that this configuration is not *used*, but that it is # still supported, and correctly fails to kill the child process return self.do_test_double_fork(usePTY=False, useProcGroup=False, expectChildSurvival=True) @defer.inlineCallbacks def do_test_double_fork(self, usePTY, useProcGroup=True, expectChildSurvival=False): # when a spawned process spawns another process, and then dies itself # (either intentionally or accidentally), we should be able to clean up # the child. parent_pidfile = self.newPidfile() self.parent_pid = None child_pidfile = self.newPidfile() self.child_pid = None b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, scriptCommand( 'double_fork', parent_pidfile, child_pidfile), self.basedir, usePTY=usePTY, useProcGroup=useProcGroup) runproc_d = s.start() # wait for both processes to start up, then call s.kill parent_pidfile_d = self.waitForPidfile(parent_pidfile) child_pidfile_d = self.waitForPidfile(child_pidfile) pidfiles_d = defer.gatherResults([parent_pidfile_d, child_pidfile_d]) def got_pids(pids): self.parent_pid, self.child_pid = pids pidfiles_d.addCallback(got_pids) def kill(_): s.kill("diaf") pidfiles_d.addCallback(kill) # check that both processes are dead after RunProcess is done yield defer.gatherResults([pidfiles_d, runproc_d]) self.assertDead(self.parent_pid) if expectChildSurvival: self.assertAlive(self.child_pid) else: self.assertDead(self.child_pid) class TestLogging(BasedirMixin, unittest.TestCase): def setUp(self): self.setUpBasedir() def tearDown(self): self.tearDownBasedir() def testSendStatus(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir) s.sendStatus({'stdout': nl('hello\n')}) self.assertEqual(b.updates, [{'stdout': nl('hello\n')}], b.show()) def testSendBuffered(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir) s._addToBuffers('stdout', 'hello ') s._addToBuffers('stdout', 'world') s._sendBuffers() self.assertEqual(b.updates, [{'stdout': 'hello world'}], b.show()) def testSendBufferedInterleaved(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir) s._addToBuffers('stdout', 'hello ') s._addToBuffers('stderr', 'DIEEEEEEE') s._addToBuffers('stdout', 'world') s._sendBuffers() self.assertEqual(b.updates, [ {'stdout': 'hello '}, {'stderr': 'DIEEEEEEE'}, {'stdout': 'world'}, ]) def testSendChunked(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir) data = "x" * int(runprocess.RunProcess.CHUNK_LIMIT * 3 / 2) s._addToBuffers('stdout', data) s._sendBuffers() self.assertEqual(len(b.updates), 2) def testSendNotimeout(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir) data = "x" * (runprocess.RunProcess.BUFFER_SIZE + 1) s._addToBuffers('stdout', data) self.assertEqual(len(b.updates), 1) def testSendLog(self): b = FakeWorkerForBuilder(self.basedir) s = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir) s._addToBuffers(('log', 'stdout'), 'hello ') s._sendBuffers() self.assertEqual(b.updates, [ {'log': ('stdout', 'hello ')}, ]) class TestLogFileWatcher(BasedirMixin, unittest.TestCase): def setUp(self): self.setUpBasedir() def tearDown(self): self.tearDownBasedir() def makeRP(self): b = FakeWorkerForBuilder(self.basedir) rp = runprocess.RunProcess(b, stdoutCommand('hello'), self.basedir) return rp def test_statFile_missing(self): rp = self.makeRP() test_filename = 'test_runprocess_test_statFile_missing.log' if os.path.exists(test_filename): os.remove(test_filename) lf = runprocess.LogFileWatcher(rp, 'test', test_filename, False) self.assertFalse(lf.statFile(), "{} doesn't exist".format(test_filename)) def test_statFile_exists(self): rp = self.makeRP() test_filename = 'test_runprocess_test_statFile_exists.log' try: with open(test_filename, 'w') as f: f.write('hi') lf = runprocess.LogFileWatcher(rp, 'test', test_filename, False) st = lf.statFile() self.assertEqual( st and st[2], 2, "statfile.log exists and size is correct") finally: os.remove(test_filename) def test_invalid_utf8(self): # create the log file watcher first rp = self.makeRP() test_filename = 'test_runprocess_test_invalid_utf8.log' try: lf = runprocess.LogFileWatcher(rp, 'test', test_filename, follow=False, poll=False) # now write to the log file INVALID_UTF8 = b'before\xffafter' with open(test_filename, 'wb') as f: f.write(INVALID_UTF8) # the watcher picks up the changed log lf.poll() # flush she buffer rp._sendBuffers() # the log file content was captured and the invalid byte replaced with \ufffd (the # replacement character, often a black diamond with a white question mark) REPLACED = u'before\ufffdafter' self.assertEqual(rp.builder.updates, [{'log': ('test', REPLACED)}]) finally: lf.stop() os.remove(f.name) buildbot-3.4.0/worker/buildbot_worker/test/unit/test_scripts_base.py000066400000000000000000000101441413250514000260530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import os import sys from twisted.trial import unittest from buildbot_worker.compat import NativeStringIO from buildbot_worker.scripts import base from buildbot_worker.test.util import misc class TestIsWorkerDir(misc.FileIOMixin, misc.StdoutAssertionsMixin, unittest.TestCase): """Test buildbot_worker.scripts.base.isWorkerDir()""" def setUp(self): # capture output to stdout self.mocked_stdout = NativeStringIO() self.patch(sys, "stdout", self.mocked_stdout) # generate OS specific relative path to buildbot.tac inside basedir self.tac_file_path = os.path.join("testdir", "buildbot.tac") def assertReadErrorMessage(self, strerror): expected_message = ("error reading '{0}': {1}\n" "invalid worker directory 'testdir'\n".format( self.tac_file_path, strerror)) self.assertEqual(self.mocked_stdout.getvalue(), expected_message, "unexpected error message on stdout") def test_open_error(self): """Test that open() errors are handled.""" # patch open() to raise IOError self.setUpOpenError(1, "open-error", "dummy") # check that isWorkerDir() flags directory as invalid self.assertFalse(base.isWorkerDir("testdir")) # check that correct error message was printed to stdout self.assertReadErrorMessage("open-error") # check that open() was called with correct path self.open.assert_called_once_with(self.tac_file_path) def test_read_error(self): """Test that read() errors on buildbot.tac file are handled.""" # patch open() to return file object that raises IOError on read() self.setUpReadError(1, "read-error", "dummy") # check that isWorkerDir() flags directory as invalid self.assertFalse(base.isWorkerDir("testdir")) # check that correct error message was printed to stdout self.assertReadErrorMessage("read-error") # check that open() was called with correct path self.open.assert_called_once_with(self.tac_file_path) def test_unexpected_tac_contents(self): """Test that unexpected contents in buildbot.tac is handled.""" # patch open() to return file with unexpected contents self.setUpOpen("dummy-contents") # check that isWorkerDir() flags directory as invalid self.assertFalse(base.isWorkerDir("testdir")) # check that correct error message was printed to stdout self.assertEqual(self.mocked_stdout.getvalue(), "unexpected content in '{0}'\n".format(self.tac_file_path) + "invalid worker directory 'testdir'\n", "unexpected error message on stdout") # check that open() was called with correct path self.open.assert_called_once_with(self.tac_file_path) def test_workerdir_good(self): """Test checking valid worker directory.""" # patch open() to return file with valid worker tac contents self.setUpOpen("Application('buildbot-worker')") # check that isWorkerDir() flags directory as good self.assertTrue(base.isWorkerDir("testdir")) # check that open() was called with correct path self.open.assert_called_once_with(self.tac_file_path) buildbot-3.4.0/worker/buildbot_worker/test/unit/test_scripts_create_worker.py000066400000000000000000000730161413250514000300040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import os import mock from twisted.trial import unittest from buildbot_worker.scripts import create_worker from buildbot_worker.test.util import misc def _regexp_path(name, *names): """ Join two or more path components and create a regexp that will match that path. """ return os.path.join(name, *names).replace("\\", "\\\\") class TestMakeBaseDir(misc.StdoutAssertionsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.create_worker._makeBaseDir() """ def setUp(self): # capture stdout self.setUpStdoutAssertions() # patch os.mkdir() to do nothing self.mkdir = mock.Mock() self.patch(os, "mkdir", self.mkdir) def testBasedirExists(self): """ test calling _makeBaseDir() on existing base directory """ self.patch(os.path, "exists", mock.Mock(return_value=True)) # call _makeBaseDir() create_worker._makeBaseDir("dummy", False) # check that correct message was printed to stdout self.assertStdoutEqual("updating existing installation\n") # check that os.mkdir was not called self.assertFalse(self.mkdir.called, "unexpected call to os.mkdir()") def testBasedirExistsQuiet(self): """ test calling _makeBaseDir() on existing base directory with quiet flag enabled """ self.patch(os.path, "exists", mock.Mock(return_value=True)) # call _makeBaseDir() create_worker._makeBaseDir("dummy", True) # check that nothing was printed to stdout self.assertWasQuiet() # check that os.mkdir was not called self.assertFalse(self.mkdir.called, "unexpected call to os.mkdir()") def testBasedirCreated(self): """ test creating new base directory with _makeBaseDir() """ self.patch(os.path, "exists", mock.Mock(return_value=False)) # call _makeBaseDir() create_worker._makeBaseDir("dummy", False) # check that os.mkdir() was called with correct path self.mkdir.assert_called_once_with("dummy") # check that correct message was printed to stdout self.assertStdoutEqual("mkdir dummy\n") def testBasedirCreatedQuiet(self): """ test creating new base directory with _makeBaseDir() and quiet flag enabled """ self.patch(os.path, "exists", mock.Mock(return_value=False)) # call _makeBaseDir() create_worker._makeBaseDir("dummy", True) # check that os.mkdir() was called with correct path self.mkdir.assert_called_once_with("dummy") # check that nothing was printed to stdout self.assertWasQuiet() def testMkdirError(self): """ test that _makeBaseDir() handles error creating directory correctly """ self.patch(os.path, "exists", mock.Mock(return_value=False)) # patch os.mkdir() to raise an exception self.patch(os, "mkdir", mock.Mock(side_effect=OSError(0, "dummy-error"))) # check that correct exception was raised with self.assertRaisesRegex(create_worker.CreateWorkerError, "error creating directory dummy: dummy-error"): create_worker._makeBaseDir("dummy", False) class TestMakeBuildbotTac(misc.StdoutAssertionsMixin, misc.FileIOMixin, unittest.TestCase): """ Test buildbot_worker.scripts.create_worker._makeBuildbotTac() """ def setUp(self): # capture stdout self.setUpStdoutAssertions() # patch os.chmod() to do nothing self.chmod = mock.Mock() self.patch(os, "chmod", self.chmod) # generate OS specific relative path to buildbot.tac inside basedir self.tac_file_path = _regexp_path("bdir", "buildbot.tac") def testTacOpenError(self): """ test that _makeBuildbotTac() handles open() errors on buildbot.tac """ self.patch(os.path, "exists", mock.Mock(return_value=True)) # patch open() to raise exception self.setUpOpenError() # call _makeBuildbotTac() and check that correct exception is raised expected_message = "error reading {0}: dummy-msg".format(self.tac_file_path) with self.assertRaisesRegex(create_worker.CreateWorkerError, expected_message): create_worker._makeBuildbotTac("bdir", "contents", False) def testTacReadError(self): """ test that _makeBuildbotTac() handles read() errors on buildbot.tac """ self.patch(os.path, "exists", mock.Mock(return_value=True)) # patch read() to raise exception self.setUpReadError() # call _makeBuildbotTac() and check that correct exception is raised expected_message = "error reading {0}: dummy-msg".format(self.tac_file_path) with self.assertRaisesRegex(create_worker.CreateWorkerError, expected_message): create_worker._makeBuildbotTac("bdir", "contents", False) def testTacWriteError(self): """ test that _makeBuildbotTac() handles write() errors on buildbot.tac """ self.patch(os.path, "exists", mock.Mock(return_value=False)) # patch write() to raise exception self.setUpWriteError(0) # call _makeBuildbotTac() and check that correct exception is raised expected_message = "could not write {0}: dummy-msg".format(self.tac_file_path) with self.assertRaisesRegex(create_worker.CreateWorkerError, expected_message): create_worker._makeBuildbotTac("bdir", "contents", False) def checkTacFileCorrect(self, quiet): """ Utility function to test calling _makeBuildbotTac() on base directory with existing buildbot.tac file, which does not need to be changed. @param quiet: the value of 'quiet' argument for _makeBuildbotTac() """ # set-up mocks to simulate buildbot.tac file in the basedir self.patch(os.path, "exists", mock.Mock(return_value=True)) self.setUpOpen("test-tac-contents") # call _makeBuildbotTac() create_worker._makeBuildbotTac("bdir", "test-tac-contents", quiet) # check that write() was not called self.assertFalse(self.fileobj.write.called, "unexpected write() call") # check output to stdout if quiet: self.assertWasQuiet() else: self.assertStdoutEqual( "buildbot.tac already exists and is correct\n") def testTacFileCorrect(self): """ call _makeBuildbotTac() on base directory which contains a buildbot.tac file, which does not need to be changed """ self.checkTacFileCorrect(False) def testTacFileCorrectQuiet(self): """ call _makeBuildbotTac() on base directory which contains a buildbot.tac file, which does not need to be changed. Check that quite flag works """ self.checkTacFileCorrect(True) def checkDiffTacFile(self, quiet): """ Utility function to test calling _makeBuildbotTac() on base directory with a buildbot.tac file, with does needs to be changed. @param quiet: the value of 'quiet' argument for _makeBuildbotTac() """ # set-up mocks to simulate buildbot.tac file in basedir self.patch(os.path, "exists", mock.Mock(return_value=True)) self.setUpOpen("old-tac-contents") # call _makeBuildbotTac() create_worker._makeBuildbotTac("bdir", "new-tac-contents", quiet) # check that buildbot.tac.new file was created with expected contents tac_file_path = os.path.join("bdir", "buildbot.tac") self.open.assert_has_calls([mock.call(tac_file_path, "rt"), mock.call(tac_file_path + ".new", "wt")]) self.fileobj.write.assert_called_once_with("new-tac-contents") self.chmod.assert_called_once_with(tac_file_path + ".new", 0o600) # check output to stdout if quiet: self.assertWasQuiet() else: self.assertStdoutEqual("not touching existing buildbot.tac\n" "creating buildbot.tac.new instead\n") def testDiffTacFile(self): """ call _makeBuildbotTac() on base directory which contains a buildbot.tac file, with does needs to be changed. """ self.checkDiffTacFile(False) def testDiffTacFileQuiet(self): """ call _makeBuildbotTac() on base directory which contains a buildbot.tac file, with does needs to be changed. Check that quite flag works """ self.checkDiffTacFile(True) def testNoTacFile(self): """ call _makeBuildbotTac() on base directory with no buildbot.tac file """ self.patch(os.path, "exists", mock.Mock(return_value=False)) # capture calls to open() and write() self.setUpOpen() # call _makeBuildbotTac() create_worker._makeBuildbotTac("bdir", "test-tac-contents", False) # check that buildbot.tac file was created with expected contents tac_file_path = os.path.join("bdir", "buildbot.tac") self.open.assert_called_once_with(tac_file_path, "wt") self.fileobj.write.assert_called_once_with("test-tac-contents") self.chmod.assert_called_once_with(tac_file_path, 0o600) class TestMakeInfoFiles(misc.StdoutAssertionsMixin, misc.FileIOMixin, unittest.TestCase): """ Test buildbot_worker.scripts.create_worker._makeInfoFiles() """ def setUp(self): # capture stdout self.setUpStdoutAssertions() def checkMkdirError(self, quiet): """ Utility function to test _makeInfoFiles() when os.mkdir() fails. Patch os.mkdir() to raise an exception, and check that _makeInfoFiles() handles mkdir errors correctly. @param quiet: the value of 'quiet' argument for _makeInfoFiles() """ self.patch(os.path, "exists", mock.Mock(return_value=False)) # patch os.mkdir() to raise an exception self.patch(os, "mkdir", mock.Mock(side_effect=OSError(0, "err-msg"))) # call _makeInfoFiles() and check that correct exception is raised with self.assertRaisesRegex(create_worker.CreateWorkerError, "error creating directory {}: err-msg".format( _regexp_path("bdir", "info"))): create_worker._makeInfoFiles("bdir", quiet) # check output to stdout if quiet: self.assertWasQuiet() else: self.assertStdoutEqual("mkdir {0}\n".format(os.path.join("bdir", "info"))) def testMkdirError(self): """ test _makeInfoFiles() when os.mkdir() fails """ self.checkMkdirError(False) def testMkdirErrorQuiet(self): """ test _makeInfoFiles() when os.mkdir() fails and quiet flag is enabled """ self.checkMkdirError(True) def checkIOError(self, error_type, quiet): """ Utility function to test _makeInfoFiles() when open() or write() fails. Patch file IO functions to raise an exception, and check that _makeInfoFiles() handles file IO errors correctly. @param error_type: type of error to emulate, 'open' - patch open() to fail 'write' - patch write() to fail @param quiet: the value of 'quiet' argument for _makeInfoFiles() """ # patch os.path.exists() to simulate that 'info' directory exists # but not 'admin' or 'host' files self.patch(os.path, "exists", lambda path: path.endswith("info")) # set-up requested IO error if error_type == "open": self.setUpOpenError(strerror="info-err-msg") elif error_type == "write": self.setUpWriteError(strerror="info-err-msg") else: self.fail("unexpected error_type '{0}'".format(error_type)) # call _makeInfoFiles() and check that correct exception is raised with self.assertRaisesRegex(create_worker.CreateWorkerError, "could not write {0}: info-err-msg".format( _regexp_path("bdir", "info", "admin"))): create_worker._makeInfoFiles("bdir", quiet) # check output to stdout if quiet: self.assertWasQuiet() else: self.assertStdoutEqual( "Creating {}, you need to edit it appropriately.\n".format( os.path.join("info", "admin"))) def testOpenError(self): """ test _makeInfoFiles() when open() fails """ self.checkIOError("open", False) def testOpenErrorQuiet(self): """ test _makeInfoFiles() when open() fails and quiet flag is enabled """ self.checkIOError("open", True) def testWriteError(self): """ test _makeInfoFiles() when write() fails """ self.checkIOError("write", False) def testWriteErrorQuiet(self): """ test _makeInfoFiles() when write() fails and quiet flag is enabled """ self.checkIOError("write", True) def checkCreatedSuccessfully(self, quiet): """ Utility function to test _makeInfoFiles() when called on base directory that does not have 'info' sub-directory. @param quiet: the value of 'quiet' argument for _makeInfoFiles() """ # patch os.path.exists() to report the no dirs/files exists self.patch(os.path, "exists", mock.Mock(return_value=False)) # patch os.mkdir() to do nothing mkdir_mock = mock.Mock() self.patch(os, "mkdir", mkdir_mock) # capture calls to open() and write() self.setUpOpen() # call _makeInfoFiles() create_worker._makeInfoFiles("bdir", quiet) # check calls to os.mkdir() info_path = os.path.join("bdir", "info") mkdir_mock.assert_called_once_with(info_path) # check open() calls self.open.assert_has_calls( [mock.call(os.path.join(info_path, "admin"), "wt"), mock.call(os.path.join(info_path, "host"), "wt")]) # check write() calls self.fileobj.write.assert_has_calls( [mock.call("Your Name Here \n"), mock.call("Please put a description of this build host here\n")]) # check output to stdout if quiet: self.assertWasQuiet() else: self.assertStdoutEqual( ("mkdir {}\n" "Creating {}, you need to edit it appropriately.\n" "Creating {}, you need to edit it appropriately.\n" "Not creating {} - add it if you wish\n" "Please edit the files in {} appropriately.\n").format( info_path, os.path.join("info", "admin"), os.path.join("info", "host"), os.path.join("info", "access_uri"), info_path)) def testCreatedSuccessfully(self): """ test calling _makeInfoFiles() on basedir without 'info' directory """ self.checkCreatedSuccessfully(False) def testCreatedSuccessfullyQuiet(self): """ test calling _makeInfoFiles() on basedir without 'info' directory and quiet flag is enabled """ self.checkCreatedSuccessfully(True) def testInfoDirExists(self): """ test calling _makeInfoFiles() on basedir with fully populated 'info' directory """ self.patch(os.path, "exists", mock.Mock(return_value=True)) create_worker._makeInfoFiles("bdir", False) # there should be no messages to stdout self.assertWasQuiet() class TestCreateWorker(misc.StdoutAssertionsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.create_worker.createWorker() """ # default options and required arguments options = { # flags "no-logrotate": False, "relocatable": False, "quiet": False, "use-tls": False, "delete-leftover-dirs": False, # options "basedir": "bdir", "allow-shutdown": None, "umask": None, "log-size": 16, "log-count": 8, "keepalive": 4, "maxdelay": 2, "numcpus": None, "maxretries": None, "proxy-connection-string": None, # arguments "host": "masterhost", "port": 1234, "name": "workername", "passwd": "orange" } def setUp(self): # capture stdout self.setUpStdoutAssertions() def setUpMakeFunctions(self, exception=None): """ patch create_worker._make*() functions with a mocks @param exception: if not None, the mocks will raise this exception. """ self._makeBaseDir = mock.Mock(side_effect=exception) self.patch(create_worker, "_makeBaseDir", self._makeBaseDir) self._makeBuildbotTac = mock.Mock(side_effect=exception) self.patch(create_worker, "_makeBuildbotTac", self._makeBuildbotTac) self._makeInfoFiles = mock.Mock(side_effect=exception) self.patch(create_worker, "_makeInfoFiles", self._makeInfoFiles) def assertMakeFunctionsCalls(self, basedir, tac_contents, quiet): """ assert that create_worker._make*() were called with specified arguments """ self._makeBaseDir.assert_called_once_with(basedir, quiet) self._makeBuildbotTac.assert_called_once_with(basedir, tac_contents, quiet) self._makeInfoFiles.assert_called_once_with(basedir, quiet) def testCreateError(self): """ test that errors while creating worker directory are handled correctly by createWorker() """ # patch _make*() functions to raise an exception self.setUpMakeFunctions(create_worker.CreateWorkerError("err-msg")) # call createWorker() and check that we get error exit code self.assertEqual(create_worker.createWorker(self.options), 1, "unexpected exit code") # check that correct error message was printed on stdout self.assertStdoutEqual("err-msg\n" "failed to configure worker in bdir\n") def testMinArgs(self): """ test calling createWorker() with only required arguments """ # patch _make*() functions to do nothing self.setUpMakeFunctions() # call createWorker() and check that we get success exit code self.assertEqual(create_worker.createWorker(self.options), 0, "unexpected exit code") # check _make*() functions were called with correct arguments expected_tac_contents = \ "".join(create_worker.workerTACTemplate) % self.options self.assertMakeFunctionsCalls(self.options["basedir"], expected_tac_contents, self.options["quiet"]) # check that correct info message was printed self.assertStdoutEqual("worker configured in bdir\n") def assertTACFileContents(self, options): """ Check that TAC file generated with provided options is valid Python script and does typical for TAC file logic. """ # pylint: disable=import-outside-toplevel # import modules for mocking import twisted.application.service import twisted.python.logfile import buildbot_worker.bot # mock service.Application class application_mock = mock.Mock() application_class_mock = mock.Mock(return_value=application_mock) self.patch(twisted.application.service, "Application", application_class_mock) # mock logging stuff logfile_mock = mock.Mock() self.patch(twisted.python.logfile.LogFile, "fromFullPath", logfile_mock) # mock Worker class worker_mock = mock.Mock() worker_class_mock = mock.Mock(return_value=worker_mock) self.patch(buildbot_worker.bot, "Worker", worker_class_mock) expected_tac_contents = \ "".join(create_worker.workerTACTemplate) % options # Executed .tac file with mocked functions with side effect. # This will raise exception if .tac file is not valid Python file. glb = {} exec(expected_tac_contents, glb, glb) # only one Application must be created in .tac application_class_mock.assert_called_once_with("buildbot-worker") # check that Worker created with passed options worker_class_mock.assert_called_once_with( options["host"], options["port"], options["name"], options["passwd"], options["basedir"], options["keepalive"], umask=options["umask"], numcpus=options["numcpus"], maxdelay=options["maxdelay"], allow_shutdown=options["allow-shutdown"], maxRetries=options["maxretries"], useTls=options["use-tls"], delete_leftover_dirs=options["delete-leftover-dirs"], proxy_connection_string=options["proxy-connection-string"], ) # check that Worker instance attached to application self.assertEqual(worker_mock.method_calls, [mock.call.setServiceParent(application_mock)]) # .tac file must define global variable "application", instance of # Application self.assertTrue('application' in glb, ".tac file doesn't define \"application\" variable") self.assertTrue(glb['application'] is application_mock, "defined \"application\" variable in .tac file is not " "Application instance") def testDefaultTACContents(self): """ test that with default options generated TAC file is valid. """ self.assertTACFileContents(self.options) def testBackslashInBasedir(self): """ test that using backslash (typical for Windows platform) in basedir won't break generated TAC file. """ p = mock.patch.dict( self.options, {"basedir": r"C:\buildbot-worker dir\\"}) p.start() try: self.assertTACFileContents(self.options) finally: p.stop() def testQuotesInBasedir(self): """ test that using quotes in basedir won't break generated TAC file. """ p = mock.patch.dict(self.options, {"basedir": r"Buildbot's \"dir"}) p.start() try: self.assertTACFileContents(self.options) finally: p.stop() def testDoubleQuotesInBasedir(self): """ test that using double quotes at begin and end of basedir won't break generated TAC file. """ p = mock.patch.dict(self.options, {"basedir": r"\"\"Buildbot''"}) p.start() try: self.assertTACFileContents(self.options) finally: p.stop() def testSpecialCharactersInOptions(self): """ test that using special characters in options strings won't break generated TAC file. """ test_string = ("\"\" & | ^ # @ \\& \\| \\^ \\# \\@ \\n" " \x07 \" \\\" ' \\' ''") p = mock.patch.dict(self.options, { "basedir": test_string, "host": test_string, "passwd": test_string, "name": test_string, }) p.start() try: self.assertTACFileContents(self.options) finally: p.stop() def testNoLogRotate(self): """ test that when --no-logrotate options is used, correct tac file is generated. """ options = self.options.copy() options["no-logrotate"] = True # patch _make*() functions to do nothing self.setUpMakeFunctions() # call createWorker() and check that we get success exit code self.assertEqual(create_worker.createWorker(options), 0, "unexpected exit code") # check _make*() functions were called with correct arguments expected_tac_contents = (create_worker.workerTACTemplate[0] + create_worker.workerTACTemplate[2]) % options self.assertMakeFunctionsCalls(self.options["basedir"], expected_tac_contents, self.options["quiet"]) # check that correct info message was printed self.assertStdoutEqual("worker configured in bdir\n") def testUseTLS(self): """ test that when --use-tls options is used, correct connection_string is generated """ options = self.options.copy() options["use-tls"] = True # patch _make*() functions to do nothing self.setUpMakeFunctions() # call createWorker() and check that we get success exit code self.assertEqual(create_worker.createWorker(options), 0, "unexpected exit code") # check _make*() functions were called with correct arguments expected_tac_contents = ("".join(create_worker.workerTACTemplate)) % options self.assertMakeFunctionsCalls(self.options["basedir"], expected_tac_contents, self.options["quiet"]) # check that correct info message was printed self.assertStdoutEqual("worker configured in bdir\n") def testWithOpts(self): """ test calling createWorker() with --relocatable and --allow-shutdown options specified. """ options = self.options.copy() options["relocatable"] = True options["allow-shutdown"] = "signal" # patch _make*() functions to do nothing self.setUpMakeFunctions() # call createWorker() and check that we get success exit code self.assertEqual(create_worker.createWorker(options), 0, "unexpected exit code") # check _make*() functions were called with correct arguments options["allow-shutdown"] = "'signal'" expected_tac_contents = \ "".join(create_worker.workerTACTemplate) % options self.assertMakeFunctionsCalls(self.options["basedir"], expected_tac_contents, options["quiet"]) # check that correct info message was printed self.assertStdoutEqual("worker configured in bdir\n") def testQuiet(self): """ test calling createWorker() with --quiet flag """ options = self.options.copy() options["quiet"] = True # patch _make*() functions to do nothing self.setUpMakeFunctions() # call createWorker() and check that we get success exit code self.assertEqual(create_worker.createWorker(options), 0, "unexpected exit code") # check _make*() functions were called with correct arguments expected_tac_contents = \ "".join(create_worker.workerTACTemplate) % options self.assertMakeFunctionsCalls(options["basedir"], expected_tac_contents, options["quiet"]) # there should be no output on stdout self.assertWasQuiet() def testDeleteLeftoverDirs(self): """ test calling createWorker() with --delete-leftover-dirs flag """ options = self.options.copy() options["delete-leftover-dirs"] = True # patch _make*() functions to do nothing self.setUpMakeFunctions() # call createWorker() and check that we get success exit code self.assertEqual(create_worker.createWorker(options), 0, "unexpected exit code") # check _make*() functions were called with correct arguments expected_tac_contents = ("".join(create_worker.workerTACTemplate)) % options self.assertMakeFunctionsCalls(self.options["basedir"], expected_tac_contents, self.options["quiet"]) # check that correct info message was printed self.assertStdoutEqual("worker configured in bdir\n") buildbot-3.4.0/worker/buildbot_worker/test/unit/test_scripts_restart.py000066400000000000000000000070041413250514000266260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import mock from twisted.trial import unittest from buildbot_worker.scripts import restart from buildbot_worker.scripts import start from buildbot_worker.scripts import stop from buildbot_worker.test.util import misc class TestRestart(misc.IsWorkerDirMixin, misc.StdoutAssertionsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.restart.restart() """ config = {"basedir": "dummy", "nodaemon": False, "quiet": False} def setUp(self): self.setUpStdoutAssertions() # patch start.startWorker() to do nothing self.startWorker = mock.Mock() self.patch(start, "startWorker", self.startWorker) def test_bad_basedir(self): """ test calling restart() with invalid basedir path """ # patch isWorkerDir() to fail self.setupUpIsWorkerDir(False) # call startCommand() and check that correct exit code is returned self.assertEqual(restart.restart(self.config), 1, "unexpected exit code") # check that isWorkerDir was called with correct argument self.isWorkerDir.assert_called_once_with(self.config["basedir"]) def test_no_worker_running(self): """ test calling restart() when no worker is running """ # patch basedir check to always succeed self.setupUpIsWorkerDir(True) # patch stopWorker() to raise an exception mock_stopWorker = mock.Mock(side_effect=stop.WorkerNotRunning()) self.patch(stop, "stopWorker", mock_stopWorker) # check that restart() calls startWorker() and outputs correct messages restart.restart(self.config) self.startWorker.assert_called_once_with(self.config["basedir"], self.config["quiet"], self.config["nodaemon"]) self.assertStdoutEqual("no old worker process found to stop\n" "now restarting worker process..\n") def test_restart(self): """ test calling restart() when worker is running """ # patch basedir check to always succeed self.setupUpIsWorkerDir(True) # patch stopWorker() to do nothing mock_stopWorker = mock.Mock() self.patch(stop, "stopWorker", mock_stopWorker) # check that restart() calls startWorker() and outputs correct messages restart.restart(self.config) self.startWorker.assert_called_once_with(self.config["basedir"], self.config["quiet"], self.config["nodaemon"]) self.assertStdoutEqual("now restarting worker process..\n") buildbot-3.4.0/worker/buildbot_worker/test/unit/test_scripts_runner.py000066400000000000000000000405671413250514000264660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import os import sys import mock from twisted.python import log from twisted.python import usage from twisted.trial import unittest from buildbot_worker.scripts import runner from buildbot_worker.test.util import misc class OptionsMixin(object): def assertOptions(self, opts, exp): got = {k: opts[k] for k in exp} if got != exp: msg = [] for k in exp: if opts[k] != exp[k]: msg.append(" {0}: expected {1!r}, got {2!r}".format( k, exp[k], opts[k])) self.fail("did not get expected options\n" + ("\n".join(msg))) class BaseDirTestsMixin(object): """ Common tests for Options classes with 'basedir' parameter """ GETCWD_PATH = "test-dir" ABSPATH_PREFIX = "test-prefix-" MY_BASEDIR = "my-basedir" # the options class to instantiate for test cases options_class = None def setUp(self): self.patch(os, "getcwd", lambda: self.GETCWD_PATH) self.patch(os.path, "abspath", lambda path: self.ABSPATH_PREFIX + path) def parse(self, *args): assert self.options_class is not None opts = self.options_class() opts.parseOptions(args) return opts def test_defaults(self): opts = self.parse() self.assertEqual(opts["basedir"], self.ABSPATH_PREFIX + self.GETCWD_PATH, "unexpected basedir path") def test_basedir_arg(self): opts = self.parse(self.MY_BASEDIR) self.assertEqual(opts["basedir"], self.ABSPATH_PREFIX + self.MY_BASEDIR, "unexpected basedir path") def test_too_many_args(self): with self.assertRaisesRegex(usage.UsageError, "I wasn't expecting so many arguments"): self.parse("arg1", "arg2") class TestMakerBase(BaseDirTestsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.runner.MakerBase class. """ options_class = runner.MakerBase class TestStopOptions(BaseDirTestsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.runner.StopOptions class. """ options_class = runner.StopOptions def test_synopsis(self): opts = runner.StopOptions() self.assertIn('buildbot-worker stop', opts.getSynopsis()) class TestStartOptions(OptionsMixin, BaseDirTestsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.runner.StartOptions class. """ options_class = runner.StartOptions def test_synopsis(self): opts = runner.StartOptions() self.assertIn('buildbot-worker start', opts.getSynopsis()) def test_all_args(self): opts = self.parse("--quiet", "--nodaemon", self.MY_BASEDIR) self.assertOptions(opts, dict(quiet=True, nodaemon=True, basedir=self.ABSPATH_PREFIX + self.MY_BASEDIR)) class TestRestartOptions(OptionsMixin, BaseDirTestsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.runner.RestartOptions class. """ options_class = runner.RestartOptions def test_synopsis(self): opts = runner.RestartOptions() self.assertIn('buildbot-worker restart', opts.getSynopsis()) def test_all_args(self): opts = self.parse("--quiet", "--nodaemon", self.MY_BASEDIR) self.assertOptions(opts, dict(quiet=True, nodaemon=True, basedir=self.ABSPATH_PREFIX + self.MY_BASEDIR)) class TestCreateWorkerOptions(OptionsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.runner.CreateWorkerOptions class. """ req_args = ["bdir", "mstr:5678", "name", "pswd"] def parse(self, *args): opts = runner.CreateWorkerOptions() opts.parseOptions(args) return opts def test_defaults(self): with self.assertRaisesRegex(usage.UsageError, "incorrect number of arguments"): self.parse() def test_synopsis(self): opts = runner.CreateWorkerOptions() self.assertIn('buildbot-worker create-worker', opts.getSynopsis()) def test_min_args(self): # patch runner.MakerBase.postOptions() so that 'basedir' # argument will not be converted to absolute path self.patch(runner.MakerBase, "postOptions", mock.Mock()) self.assertOptions(self.parse(*self.req_args), dict(basedir="bdir", host="mstr", port=5678, name="name", passwd="pswd")) def test_all_args(self): # patch runner.MakerBase.postOptions() so that 'basedir' # argument will not be converted to absolute path self.patch(runner.MakerBase, "postOptions", mock.Mock()) opts = self.parse("--force", "--relocatable", "--no-logrotate", "--keepalive=4", "--umask=0o22", "--maxdelay=3", "--numcpus=4", "--log-size=2", "--log-count=1", "--allow-shutdown=file", *self.req_args) self.assertOptions(opts, {"force": True, "relocatable": True, "no-logrotate": True, "umask": "0o22", "maxdelay": 3, "numcpus": "4", "log-size": 2, "log-count": "1", "allow-shutdown": "file", "basedir": "bdir", "host": "mstr", "port": 5678, "name": "name", "passwd": "pswd"}) def test_master_url(self): with self.assertRaisesRegex(usage.UsageError, " is not a URL - do not use URL"): self.parse("a", "http://b.c", "d", "e") def test_inv_keepalive(self): with self.assertRaisesRegex(usage.UsageError, "keepalive parameter needs to be a number"): self.parse("--keepalive=X", *self.req_args) def test_inv_maxdelay(self): with self.assertRaisesRegex(usage.UsageError, "maxdelay parameter needs to be a number"): self.parse("--maxdelay=X", *self.req_args) def test_inv_log_size(self): with self.assertRaisesRegex(usage.UsageError, "log-size parameter needs to be a number"): self.parse("--log-size=X", *self.req_args) def test_inv_log_count(self): with self.assertRaisesRegex(usage.UsageError, "log-count parameter needs to be a number or None"): self.parse("--log-count=X", *self.req_args) def test_inv_numcpus(self): with self.assertRaisesRegex(usage.UsageError, "numcpus parameter needs to be a number or None"): self.parse("--numcpus=X", *self.req_args) def test_inv_umask(self): with self.assertRaisesRegex(usage.UsageError, "umask parameter needs to be a number or None"): self.parse("--umask=X", *self.req_args) def test_inv_allow_shutdown(self): with self.assertRaisesRegex(usage.UsageError, "allow-shutdown needs to be one of 'signal' or 'file'"): self.parse("--allow-shutdown=X", *self.req_args) def test_too_few_args(self): with self.assertRaisesRegex(usage.UsageError, "incorrect number of arguments"): self.parse("arg1", "arg2") def test_too_many_args(self): with self.assertRaisesRegex(usage.UsageError, "incorrect number of arguments"): self.parse("extra_arg", *self.req_args) def test_validateMasterArgument_no_port(self): """ test calling CreateWorkerOptions.validateMasterArgument() on argument without port specified. """ opts = runner.CreateWorkerOptions() self.assertEqual(opts.validateMasterArgument("mstrhost"), ("mstrhost", 9989), "incorrect master host and/or port") def test_validateMasterArgument_empty_master(self): """ test calling CreateWorkerOptions.validateMasterArgument() on without host part specified. """ opts = runner.CreateWorkerOptions() with self.assertRaisesRegex(usage.UsageError, "invalid argument ':1234'"): opts.validateMasterArgument(":1234") def test_validateMasterArgument_inv_port(self): """ test calling CreateWorkerOptions.validateMasterArgument() on without with unparsable port part """ opts = runner.CreateWorkerOptions() with self.assertRaisesRegex(usage.UsageError, "invalid master port 'apple', " "needs to be a number"): opts.validateMasterArgument("host:apple") def test_validateMasterArgument_ok(self): """ test calling CreateWorkerOptions.validateMasterArgument() on with host and port parts specified. """ opts = runner.CreateWorkerOptions() self.assertEqual(opts.validateMasterArgument("mstrhost:4321"), ("mstrhost", 4321), "incorrect master host and/or port") def test_validateMasterArgument_ipv4(self): """ test calling CreateWorkerOptions.validateMasterArgument() on with ipv4 host specified. """ opts = runner.CreateWorkerOptions() self.assertEqual(opts.validateMasterArgument("192.168.0.0"), ("192.168.0.0", 9989), "incorrect master host and/or port") def test_validateMasterArgument_ipv4_port(self): """ test calling CreateWorkerOptions.validateMasterArgument() on with ipv4 host and port parts specified. """ opts = runner.CreateWorkerOptions() self.assertEqual(opts.validateMasterArgument("192.168.0.0:4321"), ("192.168.0.0", 4321), "incorrect master host and/or port") def test_validateMasterArgument_ipv6(self): """ test calling CreateWorkerOptions.validateMasterArgument() on with ipv6 host specified. """ opts = runner.CreateWorkerOptions() self.assertEqual(opts.validateMasterArgument("[2001:1:2:3:4::1]"), ("2001:1:2:3:4::1", 9989), "incorrect master host and/or port") def test_validateMasterArgument_ipv6_port(self): """ test calling CreateWorkerOptions.validateMasterArgument() on with ipv6 host and port parts specified. """ opts = runner.CreateWorkerOptions() self.assertEqual(opts.validateMasterArgument("[2001:1:2:3:4::1]:4321"), ("2001:1:2:3:4::1", 4321), "incorrect master host and/or port") def test_validateMasterArgument_ipv6_no_bracket(self): """ test calling CreateWorkerOptions.validateMasterArgument() on with ipv6 without [] specified. """ opts = runner.CreateWorkerOptions() with self.assertRaisesRegex(usage.UsageError, r"invalid argument '2001:1:2:3:4::1:4321', " r"if it is an ipv6 address, it must be enclosed by \[\]"): opts.validateMasterArgument("2001:1:2:3:4::1:4321") class TestOptions(misc.StdoutAssertionsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.runner.Options class. """ def setUp(self): self.setUpStdoutAssertions() def parse(self, *args): opts = runner.Options() opts.parseOptions(args) return opts def test_defaults(self): with self.assertRaisesRegex(usage.UsageError, "must specify a command"): self.parse() def test_version(self): exception = self.assertRaises(SystemExit, self.parse, '--version') self.assertEqual(exception.code, 0, "unexpected exit code") self.assertInStdout('worker version:') def test_verbose(self): self.patch(log, 'startLogging', mock.Mock()) with self.assertRaises(usage.UsageError): self.parse("--verbose") log.startLogging.assert_called_once_with(sys.stderr) # used by TestRun.test_run_good to patch in a callback functionPlaceholder = None class TestRun(misc.StdoutAssertionsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.runner.run() """ def setUp(self): self.setUpStdoutAssertions() class TestSubCommand(usage.Options): subcommandFunction = __name__ + ".functionPlaceholder" optFlags = [["test-opt", None, None]] class TestOptions(usage.Options): """ Option class that emulates usage error. The 'suboptions' flag enables emulation of usage error in a sub-option. """ optFlags = [["suboptions", None, None]] def postOptions(self): if self["suboptions"]: self.subOptions = "SubOptionUsage" raise usage.UsageError("usage-error-message") def __str__(self): return "GeneralUsage" def test_run_good(self): """ Test successful invocation of worker command. """ self.patch(sys, "argv", ["command", 'test', '--test-opt']) # patch runner module to use our test subcommand class self.patch(runner.Options, 'subCommands', [['test', None, self.TestSubCommand, None]]) # trace calls to subcommand function subcommand_func = mock.Mock(return_value=42) self.patch(sys.modules[__name__], "functionPlaceholder", subcommand_func) # check that subcommand function called with correct arguments # and that it's return value is used as exit code exception = self.assertRaises(SystemExit, runner.run) subcommand_func.assert_called_once_with({'test-opt': 1}) self.assertEqual(exception.code, 42, "unexpected exit code") def test_run_bad_noargs(self): """ Test handling of invalid command line arguments. """ self.patch(sys, "argv", ["command"]) # patch runner module to use test Options class self.patch(runner, "Options", self.TestOptions) exception = self.assertRaises(SystemExit, runner.run) self.assertEqual(exception.code, 1, "unexpected exit code") self.assertStdoutEqual("command: usage-error-message\n\n" "GeneralUsage\n", "unexpected error message on stdout") def test_run_bad_suboption(self): """ Test handling of invalid command line arguments in a suboption. """ self.patch(sys, "argv", ["command", "--suboptions"]) # patch runner module to use test Options class self.patch(runner, "Options", self.TestOptions) exception = self.assertRaises(SystemExit, runner.run) self.assertEqual(exception.code, 1, "unexpected exit code") # check that we get error message for a sub-option self.assertStdoutEqual("command: usage-error-message\n\n" "SubOptionUsage\n", "unexpected error message on stdout") buildbot-3.4.0/worker/buildbot_worker/test/unit/test_scripts_start.py000066400000000000000000000046401413250514000263020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import mock from twisted.trial import unittest from buildbot_worker.scripts import start from buildbot_worker.test.util import misc class TestStartCommand(unittest.TestCase, misc.IsWorkerDirMixin): """ Test buildbot_worker.scripts.startup.startCommand() """ def test_start_command_bad_basedir(self): """ test calling startCommand() with invalid basedir path """ # patch isWorkerDir() to fail self.setupUpIsWorkerDir(False) # call startCommand() and check that correct exit code is returned config = {"basedir": "dummy"} self.assertEqual(start.startCommand(config), 1, "unexpected exit code") # check that isWorkerDir was called with correct argument self.isWorkerDir.assert_called_once_with("dummy") def test_start_command_good(self): """ test successful startCommand() call """ # patch basedir check to always succeed self.setupUpIsWorkerDir(True) # patch startWorker() to do nothing mocked_startWorker = mock.Mock(return_value=0) self.patch(start, "startWorker", mocked_startWorker) config = {"basedir": "dummy", "nodaemon": False, "quiet": False} self.assertEqual(start.startCommand(config), 0, "unexpected exit code") # check that isWorkerDir() and startWorker() were called # with correct argument self.isWorkerDir.assert_called_once_with("dummy") mocked_startWorker.assert_called_once_with(config["basedir"], config["quiet"], config["nodaemon"]) buildbot-3.4.0/worker/buildbot_worker/test/unit/test_scripts_stop.py000066400000000000000000000141361413250514000261330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import errno import os import signal import time import mock from twisted.trial import unittest from buildbot_worker.scripts import stop from buildbot_worker.test.util import compat from buildbot_worker.test.util import misc class TestStopWorker(misc.FileIOMixin, misc.StdoutAssertionsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.stop.stopWorker() """ PID = 9876 def setUp(self): self.setUpStdoutAssertions() # patch os.chdir() to do nothing self.patch(os, "chdir", mock.Mock()) def test_no_pid_file(self): """ test calling stopWorker() when no pid file is present """ # patch open() to raise 'file not found' exception self.setUpOpenError(2) # check that stop() raises WorkerNotRunning exception with self.assertRaises(stop.WorkerNotRunning): stop.stopWorker(None, False) @compat.skipUnlessPlatformIs("posix") def test_successful_stop(self): """ test stopWorker() on a successful worker stop """ def emulated_kill(pid, sig): if sig == 0: # when probed if a signal can be send to the process # emulate that it is dead with 'No such process' error raise OSError(errno.ESRCH, "dummy") # patch open() to return a pid file self.setUpOpen(str(self.PID)) # patch os.kill to emulate successful kill mocked_kill = mock.Mock(side_effect=emulated_kill) self.patch(os, "kill", mocked_kill) # don't waste time self.patch(time, "sleep", mock.Mock()) # check that stopWorker() sends expected signal to right PID # and print correct message to stdout exit_code = stop.stopWorker(None, False) self.assertEqual(exit_code, 0) mocked_kill.assert_has_calls([mock.call(self.PID, signal.SIGTERM), mock.call(self.PID, 0)]) self.assertStdoutEqual("worker process {0} is dead\n".format(self.PID)) @compat.skipUnlessPlatformIs("posix") def test_stop_timeout(self): """ test stopWorker() when stop timeouts """ # patch open() to return a pid file self.setUpOpen(str(self.PID)) # patch os.kill to emulate successful kill mocked_kill = mock.Mock() self.patch(os, "kill", mocked_kill) # don't waste time self.patch(time, "sleep", mock.Mock()) # check that stopWorker() sends expected signal to right PID # and print correct message to stdout exit_code = stop.stopWorker(None, False) self.assertEqual(exit_code, 1) mocked_kill.assert_has_calls([mock.call(self.PID, signal.SIGTERM), mock.call(self.PID, 0)]) self.assertStdoutEqual("never saw process go away\n") class TestStop(misc.IsWorkerDirMixin, misc.StdoutAssertionsMixin, unittest.TestCase): """ Test buildbot_worker.scripts.stop.stop() """ config = {"basedir": "dummy", "quiet": False} def test_bad_basedir(self): """ test calling stop() with invalid basedir path """ # patch isWorkerDir() to fail self.setupUpIsWorkerDir(False) # call startCommand() and check that correct exit code is returned self.assertEqual(stop.stop(self.config), 1, "unexpected exit code") # check that isWorkerDir was called with correct argument self.isWorkerDir.assert_called_once_with(self.config["basedir"]) def test_no_worker_running(self): """ test calling stop() when no worker is running """ self.setUpStdoutAssertions() # patch basedir check to always succeed self.setupUpIsWorkerDir(True) # patch stopWorker() to raise an exception mock_stopWorker = mock.Mock(side_effect=stop.WorkerNotRunning()) self.patch(stop, "stopWorker", mock_stopWorker) exit_code = stop.stop(self.config) self.assertEqual(exit_code, 0) self.assertStdoutEqual("worker not running\n") def test_successful_stop(self): """ test calling stop() when worker is running """ # patch basedir check to always succeed self.setupUpIsWorkerDir(True) # patch stopWorker() to do nothing mock_stopWorker = mock.Mock(return_value=0) self.patch(stop, "stopWorker", mock_stopWorker) exit_code = stop.stop(self.config) self.assertEqual(exit_code, 0) mock_stopWorker.assert_called_once_with(self.config["basedir"], self.config["quiet"], "TERM") def test_failed_stop(self): """ test failing stop() """ # patch basedir check to always succeed self.setupUpIsWorkerDir(True) # patch stopWorker() to do nothing mock_stopWorker = mock.Mock(return_value=17) self.patch(stop, "stopWorker", mock_stopWorker) exit_code = stop.stop(self.config) self.assertEqual(exit_code, 17) mock_stopWorker.assert_called_once_with(self.config["basedir"], self.config["quiet"], "TERM") buildbot-3.4.0/worker/buildbot_worker/test/unit/test_util.py000066400000000000000000000123351413250514000243530ustar00rootroot00000000000000# coding: utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from twisted.trial import unittest from buildbot_worker import util class remove_userpassword(unittest.TestCase): def assertUrl(self, real_url, expected_url): new_url = util.remove_userpassword(real_url) self.assertEqual(expected_url, new_url) def test_url_with_no_user_and_password(self): self.assertUrl('http://myurl.com/myrepo', 'http://myurl.com/myrepo') def test_url_with_user_and_password(self): self.assertUrl( 'http://myuser:mypass@myurl.com/myrepo', 'http://myurl.com/myrepo') def test_another_url_with_no_user_and_password(self): self.assertUrl( 'http://myurl2.com/myrepo2', 'http://myurl2.com/myrepo2') def test_another_url_with_user_and_password(self): self.assertUrl( 'http://myuser2:mypass2@myurl2.com/myrepo2', 'http://myurl2.com/myrepo2') def test_with_different_protocol_without_user_and_password(self): self.assertUrl('ssh://myurl3.com/myrepo3', 'ssh://myurl3.com/myrepo3') def test_with_different_protocol_with_user_and_password(self): self.assertUrl( 'ssh://myuser3:mypass3@myurl3.com/myrepo3', 'ssh://myurl3.com/myrepo3') def test_file_path(self): self.assertUrl('/home/me/repos/my-repo', '/home/me/repos/my-repo') def test_file_path_with_at_sign(self): self.assertUrl('/var/repos/speci@l', '/var/repos/speci@l') def test_win32file_path(self): self.assertUrl('c:\\repos\\my-repo', 'c:\\repos\\my-repo') class TestObfuscated(unittest.TestCase): def testSimple(self): c = util.Obfuscated('real', '****') self.assertEqual(str(c), '****') self.assertEqual(repr(c), "'****'") def testObfuscatedCommand(self): cmd = ['echo', util.Obfuscated('password', '*******')] cmd_bytes = [b'echo', util.Obfuscated(b'password', b'*******')] cmd_unicode = [u'echo', util.Obfuscated(u'password', u'привет')] self.assertEqual( ['echo', 'password'], util.Obfuscated.get_real(cmd)) self.assertEqual( ['echo', '*******'], util.Obfuscated.get_fake(cmd)) self.assertEqual( [b'echo', b'password'], util.Obfuscated.get_real(cmd_bytes)) self.assertEqual( [b'echo', b'*******'], util.Obfuscated.get_fake(cmd_bytes)) self.assertEqual( [u'echo', u'password'], util.Obfuscated.get_real(cmd_unicode)) self.assertEqual( [u'echo', u'привет'], util.Obfuscated.get_fake(cmd_unicode)) def testObfuscatedNonString(self): cmd = ['echo', 1] cmd_bytes = [b'echo', 2] cmd_unicode = [u'привет', 3] self.assertEqual(['echo', '1'], util.Obfuscated.get_real(cmd)) self.assertEqual([b'echo', '2'], util.Obfuscated.get_fake(cmd_bytes)) self.assertEqual([u'привет', u'3'], util.Obfuscated.get_fake(cmd_unicode)) def testObfuscatedNonList(self): cmd = 1 self.assertEqual(1, util.Obfuscated.get_real(cmd)) self.assertEqual(1, util.Obfuscated.get_fake(cmd)) class TestRewrap(unittest.TestCase): def test_main(self): tests = [ ("", "", None), ("\n", "\n", None), ("\n ", "\n", None), (" \n", "\n", None), (" \n ", "\n", None), (""" multiline with indent """, "\nmultiline with indent", None), ("""\ multiline with indent """, "multiline with indent\n", None), ("""\ multiline with indent """, "multiline with indent\n", None), ("""\ multiline with indent and formatting """, "multiline with indent\n and\n formatting\n", None), ("""\ multiline with indent and wrapping and formatting """, "multiline with\nindent and\nwrapping\n and\n formatting\n", 15), ] for text, expected, width in tests: self.assertEqual(util.rewrap(text, width=width), expected) buildbot-3.4.0/worker/buildbot_worker/test/util/000077500000000000000000000000001413250514000217575ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/test/util/__init__.py000066400000000000000000000000001413250514000240560ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/test/util/command.py000066400000000000000000000117531413250514000237560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import os import shutil import buildbot_worker.runprocess from buildbot_worker.commands import utils from buildbot_worker.test.fake import runprocess from buildbot_worker.test.fake import workerforbuilder class CommandTestMixin(object): """ Support for testing Command subclasses. """ def setUpCommand(self): """ Get things ready to test a Command Sets: self.basedir -- the basedir (an abs path) self.basedir_workdir -- os.path.join(self.basedir, 'workdir') self.basedir_source -- os.path.join(self.basedir, 'source') """ self.basedir = os.path.abspath('basedir') self.basedir_workdir = os.path.join(self.basedir, 'workdir') self.basedir_source = os.path.join(self.basedir, 'source') # clean up the basedir unconditionally if os.path.exists(self.basedir): shutil.rmtree(self.basedir) def tearDownCommand(self): """ Call this from the tearDown method to clean up any leftover workdirs and do any additional cleanup required. """ # clean up the basedir unconditionally if os.path.exists(self.basedir): shutil.rmtree(self.basedir) # finish up the runprocess if hasattr(self, 'runprocess_patched') and self.runprocess_patched: runprocess.FakeRunProcess.test_done() def make_command(self, cmdclass, args, makedirs=False): """ Create a new command object, creating the necessary arguments. The cmdclass argument is the Command class, and args is the args dict to pass to its constructor. This always creates the WorkerForBuilder with a basedir (self.basedir). If makedirs is true, it will create the basedir and a workdir directory inside (named 'workdir'). The resulting command is returned, but as a side-effect, the following attributes are set: self.cmd -- the command self.builder -- the (fake) WorkerForBuilder """ # set up the workdir and basedir if makedirs: basedir_abs = os.path.abspath(os.path.join(self.basedir)) workdir_abs = os.path.abspath( os.path.join(self.basedir, 'workdir')) if os.path.exists(basedir_abs): shutil.rmtree(basedir_abs) os.makedirs(workdir_abs) b = self.builder = workerforbuilder.FakeWorkerForBuilder( basedir=self.basedir) self.cmd = cmdclass(b, 'fake-stepid', args) return self.cmd def run_command(self): """ Run the command created by make_command. Returns a deferred that will fire on success or failure. """ return self.cmd.doStart() def get_updates(self): """ Return the updates made so far """ return self.builder.updates def assertUpdates(self, updates, msg=None): """ Asserts that self.get_updates() matches updates, ignoring elapsed time data """ my_updates = [] for update in self.get_updates(): try: if "elapsed" in update: continue except Exception: pass my_updates.append(update) self.assertEqual(my_updates, updates, msg) def add_update(self, upd): self.builder.updates.append(upd) def patch_runprocess(self, *expectations): """ Patch a fake RunProcess class in, and set the given expectations. """ self.patch( buildbot_worker.runprocess, 'RunProcess', runprocess.FakeRunProcess) buildbot_worker.runprocess.RunProcess.expect(*expectations) self.runprocess_patched = True def patch_getCommand(self, name, result): """ Patch utils.getCommand to return RESULT for NAME """ old_getCommand = utils.getCommand def new_getCommand(n): if n == name: return result return old_getCommand(n) self.patch(utils, 'getCommand', new_getCommand) def clean_environ(self): """ Temporarily clean out os.environ to { 'PWD' : '.' } """ self.patch(os, 'environ', {'PWD': '.'}) buildbot-3.4.0/worker/buildbot_worker/test/util/compat.py000066400000000000000000000020521413250514000236130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # This module is left for backward compatibility of old-named worker API. # It should never be imported by Buildbot. from twisted.python import runtime def skipUnlessPlatformIs(platform): def closure(test): if runtime.platformType != platform: test.skip = "not a {0} platform".format(platform) return test return closure buildbot-3.4.0/worker/buildbot_worker/test/util/misc.py000066400000000000000000000176011413250514000232710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # We cannot use the builtins module here from Python-Future. # We need to use the native __builtin__ module on Python 2, # and builtins module on Python 3, because we need to override # the actual native open method. from __future__ import absolute_import from __future__ import print_function from future.utils import PY3 from future.utils import string_types import errno import os import re import shutil import sys from io import BytesIO from io import StringIO import mock from twisted.python import log from buildbot_worker.scripts import base try: # Python 2 import __builtin__ as builtins except ImportError: # Python 3 import builtins def nl(s): """Convert the given string to the native newline format, assuming it is already in normal UNIX newline format (\n). Use this to create the appropriate expectation in an assertEqual""" if not isinstance(s, string_types): return s return s.replace('\n', os.linesep) class BasedirMixin(object): """Mix this in and call setUpBasedir and tearDownBasedir to set up a clean basedir with a name given in self.basedir.""" def setUpBasedir(self): self.basedir = "test-basedir" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) def tearDownBasedir(self): if os.path.exists(self.basedir): shutil.rmtree(self.basedir) class IsWorkerDirMixin(object): """ Mixin for setting up mocked base.isWorkerDir() function """ def setupUpIsWorkerDir(self, return_value): self.isWorkerDir = mock.Mock(return_value=return_value) self.patch(base, "isWorkerDir", self.isWorkerDir) class PatcherMixin(object): """ Mix this in to get a few special-cased patching methods """ def patch_os_uname(self, replacement): # twisted's 'patch' doesn't handle the case where an attribute # doesn't exist.. if hasattr(os, 'uname'): self.patch(os, 'uname', replacement) else: def cleanup(): del os.uname self.addCleanup(cleanup) os.uname = replacement class FileIOMixin(object): """ Mixin for patching open(), read() and write() to simulate successful I/O operations and various I/O errors. """ def setUpOpen(self, file_contents="dummy-contents"): """ patch open() to return file object with provided contents. @param file_contents: contents that will be returned by file object's read() method """ # Use mock.mock_open() to create a substitute for # open(). fakeOpen = mock.mock_open(read_data=file_contents) # When fakeOpen() is called, it returns a Mock # that has these methods: read(), write(), __enter__(), __exit__(). # read() will always return the value of the 'file_contents variable. self.fileobj = fakeOpen() # patch open() to always return our Mock file object self.open = mock.Mock(return_value=self.fileobj) self.patch(builtins, "open", self.open) def setUpOpenError(self, errno=errno.ENOENT, strerror="dummy-msg", filename="dummy-file"): """ patch open() to raise IOError @param errno: exception's errno value @param strerror: exception's strerror value @param filename: exception's filename value """ # Use mock.mock_open() to create a substitute for # open(). fakeOpen = mock.mock_open() # Add side_effect so that calling fakeOpen() will always # raise an IOError. fakeOpen.side_effect = IOError(errno, strerror, filename) self.open = fakeOpen self.patch(builtins, "open", self.open) def setUpReadError(self, errno=errno.EIO, strerror="dummy-msg", filename="dummy-file"): """ patch open() to return a file object that will raise IOError on read() @param errno: exception's errno value @param strerror: exception's strerror value @param filename: exception's filename value """ # Use mock.mock_open() to create a substitute for # open(). fakeOpen = mock.mock_open() # When fakeOpen() is called, it returns a Mock # that has these methods: read(), write(), __enter__(), __exit__(). self.fileobj = fakeOpen() # Add side_effect so that calling read() will always # raise an IOError. self.fileobj.read.side_effect = IOError(errno, strerror, filename) # patch open() to always return our Mock file object self.open = mock.Mock(return_value=self.fileobj) self.patch(builtins, "open", self.open) def setUpWriteError(self, errno=errno.ENOSPC, strerror="dummy-msg", filename="dummy-file"): """ patch open() to return a file object that will raise IOError on write() @param errno: exception's errno value @param strerror: exception's strerror value @param filename: exception's filename value """ # Use mock.mock_open() to create a substitute for # open(). fakeOpen = mock.mock_open() # When fakeOpen() is called, it returns a Mock # that has these methods: read(), write(), __enter__(), __exit__(). self.fileobj = fakeOpen() # Add side_effect so that calling write() will always # raise an IOError. self.fileobj.write.side_effect = IOError(errno, strerror, filename) # patch open() to always return our Mock file object self.open = mock.Mock(return_value=self.fileobj) self.patch(builtins, "open", self.open) class LoggingMixin(object): def setUpLogging(self): self._logEvents = [] log.addObserver(self._logEvents.append) self.addCleanup(log.removeObserver, self._logEvents.append) def assertLogged(self, *args): for regexp in args: r = re.compile(regexp) for event in self._logEvents: msg = log.textFromEventDict(event) if msg is not None and r.search(msg): return self.fail( "{0!r} not matched in log output.\n{1} ".format(regexp, self._logEvents)) def assertWasQuiet(self): self.assertEqual(self._logEvents, []) class StdoutAssertionsMixin(object): """ Mix this in to be able to assert on stdout during the test """ def setUpStdoutAssertions(self): # # sys.stdout is implemented differently # in Python 2 and Python 3, so we need to # override it differently. # In Python 2, sys.stdout is a byte stream. # In Python 3, sys.stdout is a text stream. if PY3: self.stdout = StringIO() else: self.stdout = BytesIO() self.patch(sys, 'stdout', self.stdout) def assertWasQuiet(self): self.assertEqual(self.stdout.getvalue(), '') def assertInStdout(self, exp): self.assertIn(exp, self.stdout.getvalue()) def assertStdoutEqual(self, exp, msg=None): self.assertEqual(exp, self.stdout.getvalue(), msg) def getStdout(self): return self.stdout.getvalue().strip() buildbot-3.4.0/worker/buildbot_worker/test/util/sourcecommand.py000066400000000000000000000064751413250514000252040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from buildbot_worker import runprocess from buildbot_worker.test.util import command class SourceCommandTestMixin(command.CommandTestMixin): """ Support for testing Source Commands; an extension of CommandTestMixin """ def make_command(self, cmdclass, args, makedirs=False, initial_sourcedata=''): """ Same as the parent class method, but this also adds some source-specific patches: * writeSourcedata - writes to self.sourcedata (self is the TestCase) * readSourcedata - reads from self.sourcedata * doClobber - invokes RunProcess(['clobber', DIRECTORY]) * doCopy - invokes RunProcess(['copy', cmd.srcdir, cmd.workdir]) """ cmd = command.CommandTestMixin.make_command(self, cmdclass, args, makedirs) # note that these patches are to an *instance*, not a class, so there # is no need to use self.patch() to reverse them self.sourcedata = initial_sourcedata def readSourcedata(): if self.sourcedata is None: raise IOError("File not found") return self.sourcedata cmd.readSourcedata = readSourcedata def writeSourcedata(res): self.sourcedata = cmd.sourcedata return res cmd.writeSourcedata = writeSourcedata # patch out a bunch of actions with invocations of RunProcess that will # end up being Expect-able by the tests. def doClobber(_, dirname): r = runprocess.RunProcess(self.builder, ['clobber', dirname], self.builder.basedir) return r.start() cmd.doClobber = doClobber def doCopy(_): r = runprocess.RunProcess(self.builder, ['copy', cmd.srcdir, cmd.workdir], self.builder.basedir) return r.start() cmd.doCopy = doCopy def setFileContents(filename, contents): r = runprocess.RunProcess(self.builder, ['setFileContents', filename, contents], self.builder.basedir) return r.start() cmd.setFileContents = setFileContents def check_sourcedata(self, _, expected_sourcedata): """ Assert that the sourcedata (from the patched functions - see make_command) is correct. Use this as a deferred callback. """ self.assertEqual(self.sourcedata, expected_sourcedata) return _ buildbot-3.4.0/worker/buildbot_worker/tunnel.py000066400000000000000000000113051413250514000217020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # Parts of this code were copied from Twisted Python. # Copyright (c) Twisted Matrix Laboratories. # from twisted.internet import defer from twisted.internet import interfaces from twisted.internet import protocol from zope.interface import implementer class HTTPTunnelClient(protocol.Protocol): """ This protocol handles the HTTP communication with the proxy server and subsequent creation of the tunnel. Once the tunnel is established, all incoming communication is forwarded directly to the wrapped protocol. """ def __init__(self, connectedDeferred): # this gets set once the tunnel is ready self._proxyWrappedProtocol = None self._connectedDeferred = connectedDeferred def connectionMade(self): request = "CONNECT {}:{} HTTP/1.1\r\n\r\n".format( self.factory.host, self.factory.port) self.transport.write(request.encode()) def connectionLost(self, reason): if self._proxyWrappedProtocol: # Proxy connectionLost to the wrapped protocol self._proxyWrappedProtocol.connectionLost(reason) def dataReceived(self, data): if self._proxyWrappedProtocol is not None: # If tunnel is already established, proxy dataReceived() # calls to the wrapped protocol return self._proxyWrappedProtocol.dataReceived(data) # process data from the proxy server _, status, _ = data.split(b"\r\n")[0].split(b" ", 2) if status != b"200": return self.transport.loseConnection() self._proxyWrappedProtocol = ( self.factory._proxyWrappedFactory.buildProtocol( self.transport.getPeer())) self._proxyWrappedProtocol.makeConnection(self.transport) self._connectedDeferred.callback(self._proxyWrappedProtocol) # forward all traffic directly to the wrapped protocol self.transport.protocol = self._proxyWrappedProtocol # In case the server sent some data together with its response, # forward those to the wrapped protocol. remaining_data = data.split(b"\r\n\r\n", 2)[1] if remaining_data: return self._proxyWrappedProtocol.dataReceived(remaining_data) return None class HTTPTunnelFactory(protocol.ClientFactory): """The protocol factory for the HTTP tunnel. It is used as a wrapper for BotFactory, which can hence be shielded from all the proxy business. """ protocol = HTTPTunnelClient def __init__(self, host, port, wrappedFactory): self.host = host self.port = port self._proxyWrappedFactory = wrappedFactory self._onConnection = defer.Deferred() def doStart(self): super().doStart() # forward start notifications through to the wrapped factory. self._proxyWrappedFactory.doStart() def doStop(self): # forward stop notifications through to the wrapped factory. self._proxyWrappedFactory.doStop() super().doStop() def buildProtocol(self, addr): proto = self.protocol(self._onConnection) proto.factory = self return proto def clientConnectionFailed(self, connector, reason): if not self._onConnection.called: self._onConnection.errback(reason) @implementer(interfaces.IStreamClientEndpoint) class HTTPTunnelEndpoint(object): """This handles the connection to buildbot master on given 'host' and 'port' through the proxy server given as 'proxyEndpoint'. """ def __init__(self, host, port, proxyEndpoint): self.host = host self.port = port self.proxyEndpoint = proxyEndpoint def connect(self, protocolFactory): """Connect to remote server through an HTTP tunnel.""" tunnel = HTTPTunnelFactory(self.host, self.port, protocolFactory) d = self.proxyEndpoint.connect(tunnel) # once tunnel connection is established, # defer the subsequent server connection d.addCallback(lambda result: tunnel._onConnection) return d buildbot-3.4.0/worker/buildbot_worker/util/000077500000000000000000000000001413250514000210005ustar00rootroot00000000000000buildbot-3.4.0/worker/buildbot_worker/util/__init__.py000066400000000000000000000067751413250514000231300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from future.utils import text_type import itertools import textwrap import time from ._hangcheck import HangCheckFactory from ._notifier import Notifier __all__ = [ "remove_userpassword", "now", "Obfuscated", "rewrap", "HangCheckFactory", "Notifier", ] def remove_userpassword(url): if '@' not in url: return url if '://' not in url: return url # urlparse would've been nice, but doesn't support ssh... sigh (protocol, repo_url) = url.split('://') repo_url = repo_url.split('@')[-1] return protocol + '://' + repo_url def now(_reactor=None): if _reactor and hasattr(_reactor, "seconds"): return _reactor.seconds() return time.time() class Obfuscated(object): """An obfuscated string in a command""" def __init__(self, real, fake): self.real = real self.fake = fake def __str__(self): return self.fake def __repr__(self): return repr(self.fake) def __eq__(self, other): return other.__class__ is self.__class__ and \ other.real == self.real and \ other.fake == self.fake @staticmethod def to_text(s): if isinstance(s, (text_type, bytes)): return s return str(s) @staticmethod def get_real(command): rv = command if isinstance(command, list): rv = [] for elt in command: if isinstance(elt, Obfuscated): rv.append(elt.real) else: rv.append(Obfuscated.to_text(elt)) return rv @staticmethod def get_fake(command): rv = command if isinstance(command, list): rv = [] for elt in command: if isinstance(elt, Obfuscated): rv.append(elt.fake) else: rv.append(Obfuscated.to_text(elt)) return rv def rewrap(text, width=None): """ Rewrap text for output to the console. Removes common indentation and rewraps paragraphs according to the console width. Line feeds between paragraphs preserved. Formatting of paragraphs that starts with additional indentation preserved. """ if width is None: width = 80 # Remove common indentation. text = textwrap.dedent(text) def needs_wrapping(line): # Line always non-empty. return not line[0].isspace() # Split text by lines and group lines that comprise paragraphs. wrapped_text = "" for do_wrap, lines in itertools.groupby(text.splitlines(True), key=needs_wrapping): paragraph = ''.join(lines) if do_wrap: paragraph = textwrap.fill(paragraph, width) wrapped_text += paragraph return wrapped_text buildbot-3.4.0/worker/buildbot_worker/util/_hangcheck.py000066400000000000000000000101351413250514000234240ustar00rootroot00000000000000""" Protocol wrapper that will detect hung connections. In particular, since PB expects the server to talk first and HTTP expects the client to talk first, when a PB client talks to an HTTP server, neither side will talk, leading to a hung connection. This wrapper will disconnect in that case, and inform the caller. """ from __future__ import absolute_import from __future__ import print_function from twisted.internet.interfaces import IProtocol from twisted.internet.interfaces import IProtocolFactory from twisted.python.components import proxyForInterface def _noop(): pass class HangCheckProtocol( proxyForInterface(IProtocol, '_wrapped_protocol'), object, ): """ Wrap a protocol, so the underlying connection will disconnect if the other end doesn't send data within a given timeout. """ transport = None _hungConnectionTimer = None # hung connections wait for a relatively long time, since a busy master may # take a while to get back to us. _HUNG_CONNECTION_TIMEOUT = 120 def __init__(self, wrapped_protocol, hung_callback=_noop, reactor=None): """ :param IProtocol wrapped_protocol: The protocol to wrap. :param hung_callback: Called when the connection has hung. :type hung_callback: callable taking no arguments. :param IReactorTime reactor: The reactor to use to schedule the hang check. """ if reactor is None: from twisted.internet import reactor self._wrapped_protocol = wrapped_protocol self._reactor = reactor self._hung_callback = hung_callback def makeConnection(self, transport): # Note that we don't wrap the transport for the protocol, # because we only care about noticing data received, not # sent. self.transport = transport super(HangCheckProtocol, self).makeConnection(transport) self._startHungConnectionTimer() def dataReceived(self, data): self._stopHungConnectionTimer() super(HangCheckProtocol, self).dataReceived(data) def connectionLost(self, reason): self._stopHungConnectionTimer() super(HangCheckProtocol, self).connectionLost(reason) def _startHungConnectionTimer(self): """ Start a timer to detect if the connection is hung. """ def hungConnection(): self._hung_callback() self._hungConnectionTimer = None self.transport.loseConnection() self._hungConnectionTimer = self._reactor.callLater( self._HUNG_CONNECTION_TIMEOUT, hungConnection) def _stopHungConnectionTimer(self): """ Cancel the hang check timer, since we have received data or been closed. """ if self._hungConnectionTimer: self._hungConnectionTimer.cancel() self._hungConnectionTimer = None class HangCheckFactory( proxyForInterface(IProtocolFactory, '_wrapped_factory'), object, ): """ Wrap a protocol factory, so the underlying connection will disconnect if the other end doesn't send data within a given timeout. """ def __init__(self, wrapped_factory, hung_callback): """ :param IProtocolFactory wrapped_factory: The factory to wrap. :param hung_callback: Called when the connection has hung. :type hung_callback: callable taking no arguments. """ self._wrapped_factory = wrapped_factory self._hung_callback = hung_callback def buildProtocol(self, addr): protocol = self._wrapped_factory.buildProtocol(addr) return HangCheckProtocol(protocol, hung_callback=self._hung_callback) # This is used as a ClientFactory, which doesn't have a specific interface, so forward the # additional methods. def startedConnecting(self, connector): self._wrapped_factory.startedConnecting(connector) def clientConnectionFailed(self, connector, reason): self._wrapped_factory.clientConnectionFailed(connector, reason) def clientConnectionLost(self, connector, reason): self._wrapped_factory.clientConnectionLost(connector, reason) buildbot-3.4.0/worker/buildbot_worker/util/_notifier.py000066400000000000000000000030261413250514000233310ustar00rootroot00000000000000# Copyright Buildbot Team Members # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from twisted.internet.defer import Deferred class Notifier: # this is a copy of buildbot.util.Notifier def __init__(self): self._waiters = [] def wait(self): d = Deferred() self._waiters.append(d) return d def notify(self, result): waiters, self._waiters = self._waiters, [] for waiter in waiters: waiter.callback(result) def __bool__(self): return bool(self._waiters) buildbot-3.4.0/worker/docker/000077500000000000000000000000001413250514000160755ustar00rootroot00000000000000buildbot-3.4.0/worker/docker/buildbot.tac000066400000000000000000000025301413250514000203720ustar00rootroot00000000000000import fnmatch import os import sys from twisted.application import service from twisted.python.log import FileLogObserver from twisted.python.log import ILogObserver from buildbot_worker.bot import Worker # setup worker basedir = os.environ.get("BUILDBOT_BASEDIR", os.path.abspath(os.path.dirname(__file__))) application = service.Application('buildbot-worker') application.setComponent(ILogObserver, FileLogObserver(sys.stdout).emit) # and worker on the same process! buildmaster_host = os.environ.get("BUILDMASTER", 'localhost') port = int(os.environ.get("BUILDMASTER_PORT", 9989)) workername = os.environ.get("WORKERNAME", 'docker') passwd = os.environ.get("WORKERPASS") # delete the password from the environ so that it is not leaked in the log blacklist = os.environ.get("WORKER_ENVIRONMENT_BLACKLIST", "WORKERPASS").split() for name in list(os.environ.keys()): for toremove in blacklist: if fnmatch.fnmatch(name, toremove): del os.environ[name] keepalive = 600 umask = None maxdelay = 300 allow_shutdown = None maxretries = 10 delete_leftover_dirs = False s = Worker(buildmaster_host, port, workername, passwd, basedir, keepalive, umask=umask, maxdelay=maxdelay, allow_shutdown=allow_shutdown, maxRetries=maxretries, delete_leftover_dirs=delete_leftover_dirs) s.setServiceParent(application) buildbot-3.4.0/worker/docs/000077500000000000000000000000001413250514000155565ustar00rootroot00000000000000buildbot-3.4.0/worker/docs/buildbot-worker.1000066400000000000000000000073471413250514000207660ustar00rootroot00000000000000.\" This file is part of Buildbot. Buildbot is free software: you can .\" redistribute it and/or modify it under the terms of the GNU General Public .\" License as published by the Free Software Foundation, version 2. .\" .\" This program is distributed in the hope that it will be useful, but WITHOUT .\" ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS .\" FOR A PARTICULAR PURPOSE. See the GNU General Public License for more .\" details. .\" .\" You should have received a copy of the GNU General Public License along with .\" this program; if not, write to the Free Software Foundation, Inc., 51 .\" Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. .\" .\" Copyright Buildbot Team Members .TH BUILDBOT-WORKER "1" "August 2010" "Buildbot" "User Commands" .SH NAME buildbot-worker \- a tool for managing buildbot worker instances .SH SYNOPSIS .PP .B buildbot-worker [ .BR "global options" ] .I command [ .BR "command options" ] .PP .B buildbot-worker create-worker [ .BR \-q | \-\-quiet ] [ .BR \-f | \-\-force ] [ .BR \-r | \-\-relocatable ] [ .BR \-n | \-\-no-logrotate ] [ .BR \-k | \-\-keepalive .I TIME ] [ .BR --usepty {0|1} ] [ .BR \-\-umask .I UMASK ] [ .BR \-s | \-\-log-size .I SIZE ] [ .BR \-l | \-\-log-count .I COUNT ] [ .BR \-\-delete\-leftover\-dirs ] [ .BR \-\-verbose ] .I PATH .I MASTER .I USERNAME .I PASSWORD .PP .B buildbot-worker [ .BR \-\-verbose ] { .BR start | stop | restart } [ .I PATH ] .PP .B buildbot-worker [ .BR \-\-verbose ] { .BR \-\-help | \-\-version } .PP .B buildbot-worker .I command .BR \-h | \-\-help .SH DESCRIPTION .\" Putting a newline after each sentence can generate better output. The `buildbot-worker' command-line tool can be used to start or stop a Buildbot worker or create a new worker instance. .SH OPTIONS .SS Commands .TP .BR create-worker Create and populate a directory for a new worker .TP .BR start Start a worker .TP .BR stop Stop a worker .TP .BR restart Restart a worker .SS Global options .TP .BR \-h | \-\-help Print the list of available commands and global options. All subsequent commands are ignored. .TP .BR --version Print twistd and buildbot-worker version. All subsequent commands are ignored. .TP .BR --verbose Verbose output. .SS create-worker command options .TP .BR \-f | \-\-force Re-use an existing directory. .TP .BR \-h | \-\-help Show help for current command and exit. All subsequent commands are ignored. .TP .BR \-k | \-\-keepalive Send keepalive requests to buildmaster every .I TIME seconds. Default value is 600 seconds. .TP .BR \-l | \-\-log-count Limit the number of kept old twisted log files to .IR COUNT . All files are kept by default. .TP .BR \-q | \-\-quiet Do not emit the commands being run. .TP .BR \-r | \-\-relocatable Create a relocatable buildbot.tac. .TP .BR \-n | \-\-no-logrotate Do not permit worker rotate logs by itself. .TP .BR \-s | \-\-log-size Set size at which twisted lof file is rotated to .I SIZE bytes. Default value is 1000000 bytes. .TP .BR \-\-umask Set umask for files created by worker. Default value is 077 which means only owner can access the files. See .BR umask (2) for more details. .TP .BR \-\-usepty Set whether child processes should be run in a pty (0 means do not run in a pty). Default value is 0. .TP .BR \-\-delete\-leftover\-dirs Set to remove unexpected directories in worker base directory. .TP .I PATH Path to worker base directory. .TP .I MASTER Set the host and port of buildbot master to attach to in form .IR HOST:PORT . This should be provided by buildmaster administrator. .TP .I USERNAME Worker name to connect with. This should be provided by buildmaster administrator. .TP .I PASSWORD Worker password to connect with. This should be provided by buildmaster administrator. .SH "SEE ALSO" .BR buildbot (1), .BR umask (2), buildbot-3.4.0/worker/setup.cfg000066400000000000000000000001041413250514000164420ustar00rootroot00000000000000[aliases] test = trial -m buildbot_worker [bdist_wheel] universal=1 buildbot-3.4.0/worker/setup.py000077500000000000000000000136521413250514000163520ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Standard setup script. """ from __future__ import absolute_import from __future__ import print_function import os import sys from buildbot_worker import version try: # If setuptools is installed, then we'll add setuptools-specific arguments # to the setup args. import setuptools from setuptools import setup from setuptools.command.sdist import sdist from distutils.command.install_data import install_data except ImportError: setuptools = None from distutils.command.sdist import sdist from distutils.core import setup BUILDING_WHEEL = bool("bdist_wheel" in sys.argv) class our_install_data(install_data): def finalize_options(self): self.set_undefined_options('install', ('install_lib', 'install_dir'), ) install_data.finalize_options(self) def run(self): install_data.run(self) # ensure there's a buildbot_worker/VERSION file fn = os.path.join(self.install_dir, 'buildbot_worker', 'VERSION') with open(fn, 'w') as f: f.write(version) self.outfiles.append(fn) class our_sdist(sdist): def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) # ensure there's a buildbot_worker/VERSION file fn = os.path.join(base_dir, 'buildbot_worker', 'VERSION') open(fn, 'w').write(version) # ensure that NEWS has a copy of the latest release notes, copied from # the master tree, with the proper version substituted src_fn = os.path.join('..', 'master', 'docs', 'relnotes/index.rst') with open(src_fn) as f: src = f.read() src = src.replace('|version|', version) dst_fn = os.path.join(base_dir, 'NEWS') with open(dst_fn, 'w') as f: f.write(src) setup_args = { 'name': "buildbot-worker", 'version': version, 'description': "Buildbot Worker Daemon", 'long_description': "See the 'buildbot' package for details", 'author': "Brian Warner", 'author_email': "warner-buildbot@lothar.com", 'maintainer': "Dustin J. Mitchell", 'maintainer_email': "dustin@v.igoro.us", 'url': "http://buildbot.net/", 'classifiers': [ 'Development Status :: 5 - Production/Stable', 'Environment :: No Input/Output (Daemon)', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Testing', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], 'packages': [ "buildbot_worker", "buildbot_worker.util", "buildbot_worker.backports", "buildbot_worker.commands", "buildbot_worker.scripts", "buildbot_worker.monkeypatches", ] + ([] if BUILDING_WHEEL else [ # skip tests for wheels (save 40% of the archive) "buildbot_worker.test", "buildbot_worker.test.fake", "buildbot_worker.test.unit", "buildbot_worker.test.util", ]), # mention data_files, even if empty, so install_data is called and # VERSION gets copied 'data_files': [("buildbot_worker", [])], 'package_data': { '': [ 'VERSION', ] }, 'cmdclass': { 'install_data': our_install_data, 'sdist': our_sdist }, 'entry_points': { 'console_scripts': [ 'buildbot-worker=buildbot_worker.scripts.runner:run', ]} } # set zip_safe to false to force Windows installs to always unpack eggs # into directories, which seems to work better -- # see http://buildbot.net/trac/ticket/907 if sys.platform == "win32": setup_args['zip_safe'] = False setup_args['entry_points']['console_scripts'].append( 'buildbot_worker_windows_service=buildbot_worker.scripts.windows_service:HandleCommandLine', # noqa pylint: disable=line-too-long ) twisted_ver = ">= 17.9.0" if setuptools is not None: setup_args['install_requires'] = [ 'twisted ' + twisted_ver, 'future', ] # buildbot_worker_windows_service needs pywin32 if sys.platform == "win32": setup_args['install_requires'].append('pywin32') # Unit test hard dependencies. test_deps = [ 'mock', ] setup_args['tests_require'] = test_deps setup_args['extras_require'] = { 'test': [ 'pep8', # spellcheck introduced in version 1.4.0 'pylint>=1.4.0', 'pyenchant', 'flake8~=3.9.0', ] + test_deps, } if '--help-commands' in sys.argv or 'trial' in sys.argv or 'test' in sys.argv: setup_args['setup_requires'] = [ 'setuptools_trial', ] if os.getenv('NO_INSTALL_REQS'): setup_args['install_requires'] = None setup_args['extras_require'] = None setup(**setup_args) buildbot-3.4.0/worker/tox.ini000066400000000000000000000005571413250514000161500ustar00rootroot00000000000000# Tox (http://tox.testrun.org/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. [tox] envlist = py24, py25, py26, py27, py35, py36, py37 [testenv] deps = setuptools_trial commands = python setup.py test buildbot-3.4.0/www/000077500000000000000000000000001413250514000141415ustar00rootroot00000000000000buildbot-3.4.0/www/README.txt000066400000000000000000000015641413250514000156450ustar00rootroot00000000000000# About # This directory contains the components that comprise the Buildbot web interface. The core interface is defined in `www/base`, with other plugins in sibling directories. # Connection to Python # The setup.py script in each directory is designed to create wheel packages containing pre-built Angular files. This means that installing the buildbot-www package from PyPI gets all of the code required to run the Buildbot UI, without any requirement for Node.js or any NPM install. The ordinary 'python setup.py sdist' and 'python setup.py install' commands will work just as expected. # For Python Hackers # If you're finding yourself facing errors due to buildbot_www not being installed, try running `make prebuilt_frontend` in the root directory; this will install prebuilt versions of each of these distributions, based on the latest commits to the upstream master. buildbot-3.4.0/www/badges/000077500000000000000000000000001413250514000153665ustar00rootroot00000000000000buildbot-3.4.0/www/badges/buildbot_badges/000077500000000000000000000000001413250514000204775ustar00rootroot00000000000000buildbot-3.4.0/www/badges/buildbot_badges/__init__.py000066400000000000000000000120571413250514000226150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from xml.sax.saxutils import escape import jinja2 from klein import Klein from twisted.internet import defer import cairocffi as cairo import cairosvg from buildbot.process.results import Results from buildbot.util import bytes2unicode from buildbot.www.plugin import Application class Api: app = Klein() default = { # note that these defaults are documented in configuration/www.rst "left_pad": 5, "left_text": "Build Status", "left_color": "#555", "right_pad": 5, "border_radius": 5, "style": "plastic", "template_name": "{style}.svg.j2", "font_face": "DejaVu Sans", "font_size": 11, "color_scheme": { "exception": "#007ec6", # blue "failure": "#e05d44", # red "retry": "#007ec6", # blue "running": "#007ec6", # blue "skipped": "a4a61d", # yellowgreen "success": "#4c1", # brightgreen "unknown": "#9f9f9f", # lightgrey "warnings": "#dfb317" # yellow } } def __init__(self, ep): self.ep = ep self.env = jinja2.Environment(loader=jinja2.ChoiceLoader([ jinja2.PackageLoader('buildbot_badges'), jinja2.FileSystemLoader('templates') ])) def makeConfiguration(self, request): config = {} config.update(self.default) for k, v in self.ep.config.items(): if k == 'color_scheme': config[k].update(v) else: config[k] = v for k, v in request.args.items(): k = bytes2unicode(k) config[k] = escape(bytes2unicode(v[0])) return config @app.route("/.png", methods=['GET']) @defer.inlineCallbacks def getPng(self, request, builder): svg = yield self.getSvg(request, builder) request.setHeader('content-type', 'image/png') return cairosvg.svg2png(svg) @app.route("/.svg", methods=['GET']) @defer.inlineCallbacks def getSvg(self, request, builder): config = self.makeConfiguration(request) request.setHeader('content-type', 'image/svg+xml') request.setHeader('cache-control', 'no-cache') # get the last build for that builder using the data api last_build = yield self.ep.master.data.get( ("builders", builder, "builds"), limit=1, order=['-number']) # get the status text corresponding to results code results_txt = "unknown" if last_build: results = last_build[0]['results'] complete = last_build[0]['complete'] if not complete: results_txt = "running" elif results >= 0 and results < len(Results): results_txt = Results[results] svgdata = self.makesvg(results_txt, results_txt, left_text=config['left_text'], config=config) return svgdata def textwidth(self, text, config): """Calculates the width of the specified text. """ surface = cairo.SVGSurface(None, 1280, 200) ctx = cairo.Context(surface) ctx.select_font_face(config['font_face'], cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL) ctx.set_font_size(int(config['font_size'])) return ctx.text_extents(text)[4] def makesvg(self, right_text, status=None, left_text=None, left_color=None, config=None): """Renders an SVG from the template, using the specified data """ right_color = config['color_scheme'].get(status, "#9f9f9f") # Grey left_text = left_text or config['left_text'] left_color = left_color or config['left_color'] left = { "color": left_color, "text": left_text, "width": self.textwidth(left_text, config) } right = { "color": right_color, "text": right_text, "width": self.textwidth(right_text, config) } template = self.env.get_template(config['template_name'].format(**config)) return template.render(left=left, right=right, config=config) # create the interface for the setuptools entry point ep = Application(__name__, "Buildbot badges", ui=False) ep.resource = Api(ep).app.resource() buildbot-3.4.0/www/badges/buildbot_badges/static/000077500000000000000000000000001413250514000217665ustar00rootroot00000000000000buildbot-3.4.0/www/badges/buildbot_badges/static/.placeholder000066400000000000000000000000001413250514000242370ustar00rootroot00000000000000buildbot-3.4.0/www/badges/buildbot_badges/templates/000077500000000000000000000000001413250514000224755ustar00rootroot00000000000000buildbot-3.4.0/www/badges/buildbot_badges/templates/badgeio.svg.j2000066400000000000000000000022741413250514000251270ustar00rootroot00000000000000 {{ left.text }} {{ left.text }} {{ right.text }} {{ right.text }} buildbot-3.4.0/www/badges/buildbot_badges/templates/flat-square.svg.j2000066400000000000000000000013121413250514000257510ustar00rootroot00000000000000 {{ left.text }} {{ right.text }} buildbot-3.4.0/www/badges/buildbot_badges/templates/flat.svg.j2000066400000000000000000000027101413250514000244560ustar00rootroot00000000000000 {{ left.text }} {{ left.text }} {{ right.text }} {{ right.text }} buildbot-3.4.0/www/badges/buildbot_badges/templates/plastic.svg.j2000066400000000000000000000030461413250514000251720ustar00rootroot00000000000000 {{ left.text }} {{ left.text }} {{ right.text }} {{ right.text }} buildbot-3.4.0/www/badges/setup.cfg000066400000000000000000000000001413250514000171750ustar00rootroot00000000000000buildbot-3.4.0/www/badges/setup.py000066400000000000000000000032241413250514000171010ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from buildbot_pkg import setup_www_plugin except ImportError: import sys print('Please install buildbot_pkg module in order to install that ' 'package, or use the pre-build .whl modules available on pypi', file=sys.stderr) sys.exit(1) setup_www_plugin( name='buildbot-badges', description='Buildbot badges', author=u'Buildbot Team Members', author_email=u'users@buildbot.net', url='http://buildbot.net/', packages=['buildbot_badges'], install_requires=[ 'klein', 'CairoSVG', 'cairocffi', 'Jinja2' ], package_data={ '': [ # dist is required by buildbot_pkg 'VERSION', 'templates/*.svg.j2', 'static/.placeholder' ], }, entry_points=""" [buildbot.www] badges = buildbot_badges:ep """, classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)' ], ) buildbot-3.4.0/www/badges/yarn.lock000066400000000000000000000001261413250514000172100ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 buildbot-3.4.0/www/base/000077500000000000000000000000001413250514000150535ustar00rootroot00000000000000buildbot-3.4.0/www/base/buildbot_www/000077500000000000000000000000001413250514000175635ustar00rootroot00000000000000buildbot-3.4.0/www/base/buildbot_www/__init__.py000066400000000000000000000015171413250514000217000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.www.plugin import Application # create the interface for the setuptools entry point ep = Application(__name__, "Buildbot UI") buildbot-3.4.0/www/base/karma.conf.js000066400000000000000000000003651413250514000174340ustar00rootroot00000000000000const common = require('buildbot-build-common'); module.exports = function karmaConfig (config) { common.createTemplateKarmaConfig(config, { testRoot: 'src/tests.webpack.js', webpack: require('./webpack.config') }); }; buildbot-3.4.0/www/base/package.json000066400000000000000000000026331413250514000173450ustar00rootroot00000000000000{ "name": "buildbot-www", "plugin_name": "buildbot-www", "private": true, "main": "buildbot_www/static/scripts.js", "style": "buildbot_www/static/styles.css", "scripts": { "build": "rimraf buildbot_www/static && webpack --bail --progress --profile --env prod", "build-dev": "rimraf buildbot_www/static && webpack --bail --progress --profile --env dev", "dev": "webpack --bail --progress --profile --watch --env dev", "test": "karma start", "test-watch": "karma start --auto-watch --no-single-run" }, "devDependencies": { "angular-mocks": "^1.7.9", "buildbot-build-common": "link:../build_common", "copy-webpack-plugin": "^5.0.3", "lodash": "^4.17.21", "pug-cli": "^1.0.0-alpha6", "rimraf": "^2.6.3", "webpack-shell-plugin": "^0.5.0" }, "dependencies": { "@uirouter/angularjs": "^1.0.22", "angular": "^1.8.0", "angular-animate": "^1.7.9", "angular-bootstrap-multiselect": "^1.1.11", "angular-recursion": "^1.0.5", "angular-ui-bootstrap": "^2.5.6", "bootstrap": "^3.1.1", "buildbot-data-js": "link:../data_module", "d3": "^3.5.17", "font-awesome": "^4.7.0", "guanlecoja-ui": "link:../guanlecoja-ui", "moment": "^2.24.0", "outdated-browser-rework": "^2.8.0", "popper.js": "^1.15.0" } } buildbot-3.4.0/www/base/postcss.config.js000066400000000000000000000001711413250514000203520ustar00rootroot00000000000000module.exports = { plugins: { autoprefixer: { browsers: ['last 2 versions'] }, }, }; buildbot-3.4.0/www/base/setup.cfg000066400000000000000000000000001413250514000166620ustar00rootroot00000000000000buildbot-3.4.0/www/base/setup.py000066400000000000000000000031271413250514000165700ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from buildbot_pkg import setup_www_plugin except ImportError: import sys print('Please install buildbot_pkg module in order to install that ' 'package, or use the pre-build .whl modules available on pypi', file=sys.stderr) sys.exit(1) setup_www_plugin( name='buildbot-www', description='Buildbot UI', author=u'Pierre Tardy', author_email=u'tardyp@gmail.com', setup_requires=['buildbot_pkg'], install_requires=['buildbot'], url='http://buildbot.net/', packages=['buildbot_www'], package_data={ '': [ 'VERSION', 'static/*', 'static/img/*', 'static/fonts/*', ] }, entry_points=""" [buildbot.www] base = buildbot_www:ep """, classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)' ], ) buildbot-3.4.0/www/base/src/000077500000000000000000000000001413250514000156425ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/000077500000000000000000000000001413250514000164225ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/about/000077500000000000000000000000001413250514000175345ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/about/about.controller.js000066400000000000000000000004451413250514000233710ustar00rootroot00000000000000class About { constructor($scope, config, restService) { $scope.config = config; restService.get('application.spec').then(specs => $scope.specs = specs['specs']); } } angular.module('app') .controller('aboutController', ['$scope', 'config', 'restService', About]); buildbot-3.4.0/www/base/src/app/about/about.route.js000066400000000000000000000014411413250514000223410ustar00rootroot00000000000000class AboutState { constructor($stateProvider, glMenuServiceProvider) { // Name of the state const name = 'about'; // Menu configuration glMenuServiceProvider.addGroup({ name, caption: 'About', icon: 'info-circle', order: 99 }); // Configuration const cfg = { group: name, caption: 'About' }; // Register new state const state = { controller: `${name}Controller`, template: require('./about.tpl.jade'), name, url: '/about', data: cfg }; $stateProvider.state(state); } } angular.module('app') .config(['$stateProvider', 'glMenuServiceProvider', AboutState]); buildbot-3.4.0/www/base/src/app/about/about.tpl.jade000066400000000000000000000020431413250514000222700ustar00rootroot00000000000000.container .row .well h2 img.nut-spin(src="img/icon.svg", width="64px") |  About this  a(href="http://buildbot.net") buildbot |  running for  a(ng-href="{{config.titleURL}}") {{config.title}} .row .col-sm-12 ul li(ng-repeat="v in config.versions") {{v[0]}} version: {{v[1]}} .row .well h2 Configuration | buildbot-www is configured using rawdata(data='config') .row .well h2 API description ul.list-group li.list-group-item(ng-repeat='spec in specs |orderBy:"path"') b(ng-click="show_detail = ! show_detail", ng-init='show_detail=0') /{{spec.path}}: | {{spec.type_spec.fields.length}} fields dl.dl-horizontal(ng-show='show_detail') span(ng-repeat='field in spec.type_spec.fields |orderBy:"path"') dt {{field.name}} dd {{field.type}} span(ng-show="field.type=='list'") {{field.type_spec}} buildbot-3.4.0/www/base/src/app/app.browserwarning.notranspile.js000066400000000000000000000014161413250514000251470ustar00rootroot00000000000000// this file is not transpiled and included direcly to index.html because we must show the browser // warning even on ancient browsers. The browser list here must correspond to the browser list in // babel config which is located at www/build_common/src/webpack.js outdatedBrowserRework({ browserSupport: { 'Chrome': 56, // Includes Chrome for mobile devices 'Chromium': 56, // same as Chrome, but needs to be listed explicitly // (https://github.com/mikemaccana/outdated-browser-rework/issues/49) 'Edge': 13, 'Safari': 10, 'Mobile Safari': 10, 'Firefox': 52, 'Opera': 43, // uses Chrome 56 internally 'IE': false }, requireChromeOnAndroid: false, isUnknownBrowserOK: true, }); buildbot-3.4.0/www/base/src/app/app.module.js000066400000000000000000000105771413250514000210360ustar00rootroot00000000000000window.$ = window.jQuery = require('jquery'); /* make jquery available as global variable for plugins */ import 'angular'; import '@uirouter/angularjs'; import 'angular-animate'; import 'angular-bootstrap-multiselect'; import 'angular-recursion'; import 'angular-ui-bootstrap'; import 'guanlecoja-ui'; import 'buildbot-data-js'; angular.module('app', [ 'buildbot_config', 'ngAnimate', 'ui.bootstrap', 'ui.router', 'RecursionHelper', 'guanlecoja.ui', 'bbData', 'btorfs.multiselect', ]); // require common module first because it declares a new module other files will need require('./common/common.module.js'); require('./about/about.controller.js'); require('./about/about.route.js'); require('./app.route.js'); require('./app.run.js'); require('./builders/builder/builder.controller.js'); require('./builders/builder/builder.route.js'); require('./builders/builders.controller.js'); require('./builders/builders.route.js'); require('./builders/buildrequest/buildrequest.controller.js'); require('./builders/buildrequest/buildrequest.route.js'); require('./builders/buildrequest/forcedialog/forcedialog.config.js'); require('./builders/buildrequest/forcedialog/forcedialog.controller.js'); require('./builders/builds/build.controller.js'); require('./builders/builds/build.route.js'); require('./builders/log/log.controller.js'); require('./builders/log/log.route.js'); require('./builders/log/logviewer/logpreview.directive.js'); require('./builders/log/logviewer/logviewer.directive.js'); require('./builders/log/logviewer/scrollviewport.directive.js'); require('./builders/services/findbuilds.factory.js'); require('./builders/services/timeout.factory.js'); require('./builders/step/step.controller.js'); require('./builders/step/step.route.js'); require('./buildrequests/pendingbuildrequests.controller.js'); require('./buildrequests/pendingbuildrequests.route.js'); require('./changes/changebuilds/changebuilds.controller.js'); require('./changes/changebuilds/changebuilds.route.js'); require('./changes/changes.controller.js'); require('./changes/changes.route.js'); require('./common/common.constant.js'); require('./common/directives/basefield/basefield.directive.js'); require('./common/directives/buildrequestsummary/buildrequestsummary.directive.js'); require('./common/directives/builds/buildstable.directive.js'); require('./common/directives/buildsticker/buildsticker.directive.js'); require('./common/directives/buildsummary/buildsummary.directive.js'); require('./common/directives/changedetails/changedetails.directive.js'); require('./common/directives/changelist/changelist.directive.js'); require('./common/directives/connectionstatus/connectionstatus.directive.js'); require('./common/directives/forcefields/forcefields.directive.js'); require('./common/directives/lineplot/lineplot.directive.js'); require('./common/directives/loginbar/loginbar.directive.js'); require('./common/directives/properties/properties.directive.js'); require('./common/directives/rawdata/rawdata.directive.js'); require('./common/directives/windowtitle/windowtitle.directive.js'); require('./common/filters/encodeURI.filter.js'); require('./common/filters/moment/moment.constant.js'); require('./common/filters/moment/moment.filter.js'); require('./common/filters/publicFields.filter.js'); require('./common/filters/limitStringLength.filter.js'); require('./common/services/ansicodes/ansicodes.service.js'); require('./common/services/buildercache/buildercache.service.js'); require('./common/services/datagrouper/datagrouper.service.js'); require('./common/services/favicon/favicon.service.js'); require('./common/services/results/results.service.js'); require('./common/services/settings/settings.service.js'); require('./d3/d3.service.js'); require('./home/home.controller.js'); require('./home/home.route.js'); require('./masters/master/master.route.js'); require('./masters/masters.controller.js'); require('./masters/masters.route.js'); require('./schedulers/schedulers.controller.js'); require('./schedulers/schedulers.route.js'); require('./settings/settings.controller.js'); require('./settings/settings.route.js'); require('./workers/worker/worker.route.js'); require('./workers/workeraction.dialog.js'); require('./workers/workers.controller.js'); require('./workers/workers.route.js'); require('../img/favicon.ico'); require('../img/icon.png'); require('../img/icon.svg'); require('../img/icon16.svg'); require('../img/nobody.png'); buildbot-3.4.0/www/base/src/app/app.route.js000077500000000000000000000024261413250514000207040ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Route { constructor($urlRouterProvider, glMenuServiceProvider, config) { let apptitle; $urlRouterProvider.otherwise(config.default_page || '/'); // the app title needs to be < 18 chars else the UI looks bad // we try to find best option // Note that we warn about too long title in master/buildbot/config.py. // Adjust that code if the maximum length changes. let max_title_len = 18; if (config.title != null) { apptitle = `Buildbot: ${config.title}`; if (apptitle.length > max_title_len) { apptitle = config.title; } if (apptitle.length > max_title_len) { apptitle = "Buildbot"; } } else { apptitle = "Buildbot"; } glMenuServiceProvider.setAppTitle(apptitle); } } // all states config are in the modules angular.module('app') .config(['$urlRouterProvider', 'glMenuServiceProvider', 'config', Route]) .config(['$locationProvider', function($locationProvider) { $locationProvider.hashPrefix(''); }]); buildbot-3.4.0/www/base/src/app/app.run.js000066400000000000000000000076111413250514000203500ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class RouteChangeListener { constructor($rootScope, $log, config, glNotificationService) { // fire an event related to the current route $rootScope.$on('$routeChangeSuccess', (event, currentRoute, priorRoute) => $rootScope.$broadcast(`${currentRoute.controller}$routeChangeSuccess`, currentRoute, priorRoute) ); if (config.on_load_warning != null) { setTimeout(() => glNotificationService.notify({msg:config.on_load_warning}) , 500); } } } // FIXME hack to reload the window if the websocket is disconnected // We should fix properly in dataModule using :bug:`3462`, but after initial nine release // fix in dataModule is much harder, as when reconnection is detected, we should // reload all watched collections, take care of sending the proper events, etc class ReconnectingListener { constructor($rootScope, $log, socketService, $interval, $http, $window, $timeout) { let reconnecting = false; let hasBeenConnected = false; // first poll for an initial connected socket // we cannot really use events, as we are not doing this inside dataModule var interval = $interval(function() { if (socketService.socket != null) { if ((socketService.socket.readyState === 1) && !hasBeenConnected) { $interval.cancel(interval); interval = null; hasBeenConnected = true; socketService.socket.onclose = function(evt) { // ignore if we are navigating away from buildbot // see https://github.com/buildbot/buildbot/issues/3306 if (evt.code <= 1001) { // CLOSE_GOING_AWAY or CLOSE_NORMAL return; } reconnecting = true; $rootScope.$apply(() => // send event to connectionstatus directive $rootScope.$broadcast("mq.lost_connection") ); reloadWhenReady(); }; } } } , 1000); // following code do the polling of reconnection, and eventually // reload the document, when we managed to get the index page // we avoid to do that polling if the tab is hidden $window.document.addEventListener("visibilitychange", function() { if (!$window.document.hidden && reconnecting) { reloadWhenReady(); } }); var reloadWhenReady = function() { // if the window/tab is hidden, we stop the polling // if browser does not support visibility api, this will just always poll if ($window.document.hidden) { return; } $http.get($window.document.location.href).then(function() { // send event to connectionstatus directive $rootScope.$broadcast("mq.restored_connection"); // wait one second before actually reload to let user to see message $timeout((() => $window.document.location.reload()), 1000); } , () => // error callback: if we cannot connect, we will retry in 3 seconds $timeout(reloadWhenReady, 3000) ); }; } } angular.module('app') .run(['$rootScope', '$log', 'config', 'glNotificationService', RouteChangeListener]) .run(['$rootScope', '$log', 'socketService', '$interval', '$http', '$window', '$timeout', ReconnectingListener]); buildbot-3.4.0/www/base/src/app/builders/000077500000000000000000000000001413250514000202335ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/builders/builder/000077500000000000000000000000001413250514000216615ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/builders/builder/builder.controller.js000066400000000000000000000170401413250514000260310ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class BuilderController { constructor($rootScope, $scope, dataService, $stateParams, resultsService, glBreadcrumbService, $state, glTopbarContextualActionsService, $q, $window) { // make resultsService utilities available in the template _.mixin($scope, resultsService); const data = dataService.open().closeOnDestroy($scope); const builderid = $stateParams.builder; $scope.forceschedulers = []; $scope.is_cancelling = false; // Clear breadcrumb and contextual action buttons on destroy const clearGl = function () { glBreadcrumbService.setBreadcrumb([]); glTopbarContextualActionsService.setContextualActions([]); }; $scope.$on('$destroy', clearGl); data.getBuilders(builderid).onNew = function(builder) { $window.document.title = $state.current.data.pageTitle({ builder: builder['name']}); $scope.builder = builder; const breadcrumb = [{ caption: "Builders", sref: "builders" } , { caption: builder.name, sref: `builder({builder:${builder.builderid}})` } ]; // reinstall breadcrumb when coming back from forcesched $scope.$on('$stateChangeSuccess', () => glBreadcrumbService.setBreadcrumb(breadcrumb)); glBreadcrumbService.setBreadcrumb(breadcrumb); const doCancel = function() { if ($scope.is_cancelling) { return; } if (!window.confirm("Are you sure you want to cancel all builds?")) { return; } $scope.is_cancelling = true; refreshContextMenu(); const success = function(res) { $scope.is_cancelling = false; refreshContextMenu(); }; const failure = function(why) { $scope.is_cancelling = false; $scope.error = `Cannot cancel: ${why.error.message}`; refreshContextMenu(); }; const dl = []; $scope.buildrequests.forEach(function(buildrequest) { if (!buildrequest.claimed) { dl.push(buildrequest.control('cancel')); } }); $scope.builds.forEach(function(build) { if (!build.complete) { dl.push(build.control('stop')); } }); $q.when(dl).then(success, failure); }; var refreshContextMenu = function() { if ($scope.$$destroyed) { return; } const actions = [ ]; let canStop = false; $scope.builds.forEach(function(build) { if (!build.complete) { canStop = true; } }); $scope.buildrequests.forEach(function(buildrequest) { if (!buildrequest.claimed) { canStop = true; } }); if (canStop) { if ($scope.is_cancelling) { actions.push({ caption: "Cancelling...", icon: "spinner fa-spin", action: doCancel }); } else { actions.push({ caption: "Cancel whole queue", extra_class: "btn-danger", icon: "stop", action: doCancel }); } } _.forEach($scope.forceschedulers, sch => actions.push({ caption: sch.button_name, extra_class: "btn-primary", action() { return $state.go("builder.forcebuilder", {scheduler:sch.name}); } }) ); glTopbarContextualActionsService.setContextualActions(actions); }; builder.getForceschedulers({order:'name'}).onChange = function(forceschedulers) { $scope.forceschedulers = forceschedulers; refreshContextMenu(); // reinstall contextual actions when coming back from forcesched $scope.$on('$stateChangeSuccess', () => refreshContextMenu()); }; $scope.numbuilds = 200; if ($stateParams.numbuilds != null) { $scope.numbuilds = +$stateParams.numbuilds; } $scope.builds = builder.getBuilds({ property: ["owners", "workername"], limit: $scope.numbuilds, order: '-number' }); $scope.buildrequests = builder.getBuildrequests({claimed:false}); $scope.buildrequests.onNew = buildrequest => data.getBuildsets(buildrequest.buildsetid).onNew = buildset => buildset.getProperties().onNew = properties => buildrequest.properties = properties ; $scope.builds.onChange = function() { refreshContextMenu(); if ($scope.builds.length === 0) { return; } $scope.successful_builds = []; $scope.success_ratio = []; const max_started = $scope.builds[0].started_at; const min_started = $scope.builds[$scope.builds.length-1].started_at; const threshold = (max_started - min_started)/30; // build 30 success ratio points let last_started = max_started; let cur_success = 0; let num_builds = 0; $scope.builds.forEach(function(b) { if (b.complete_at !== null) { num_builds +=1; if (b.results === 0) { cur_success +=1; b.duration = b.complete_at - b.started_at; $scope.successful_builds.push(b); } // we walk backward? The logic is reversed to avoid another sort if ((last_started - b.started_at) > threshold) { $scope.success_ratio.push({date:last_started, success_ratio: (100 * cur_success) / num_builds}); last_started = b.started_at; num_builds = 0; cur_success = 0; } } }); }; $scope.buildrequests.onChange = refreshContextMenu; }; } } angular.module('app') .controller('builderController', ['$rootScope', '$scope', 'dataService', '$stateParams', 'resultsService', 'glBreadcrumbService', '$state', 'glTopbarContextualActionsService', '$q', '$window', BuilderController]); buildbot-3.4.0/www/base/src/app/builders/builder/builder.route.js000066400000000000000000000012031413250514000247760ustar00rootroot00000000000000class BuilderState { constructor($stateProvider) { // Name of the state const name = 'builder'; // Configuration const cfg = { tabid: 'builders', pageTitle: _.template("Buildbot: builder <%= builder %>") }; // Register new state const state = { controller: `${name}Controller`, template: require('./builder.tpl.jade'), name, url: '/builders/:builder?numbuilds', data: cfg }; $stateProvider.state(state); } } angular.module('app') .config(['$stateProvider', BuilderState]); buildbot-3.4.0/www/base/src/app/builders/builder/builder.tpl.jade000066400000000000000000000031031413250514000247270ustar00rootroot00000000000000.container .row(ng-show='builder.description') h4 Description: | {{ builder.description }} uib-tabset uib-tab(heading="Build requests") span(ng-hide='buildrequests.length>0') | None table.table.table-hover.table-striped.table-condensed(ng-show='buildrequests.length>0') tr td(width='100px') # td(width='150px') Submitted At td(width='150px') Owners td Properties tr(ng-repeat='br in buildrequests | orderBy:"-submitted_at"', ng-if="br.claimed==false" ) td a(ui-sref="buildrequest({buildrequest:br.buildrequestid})") span.badge-status {{br.buildrequestid}} td span(title="{{br.submitted_at | dateformat:'LLL'}}") | {{br.submitted_at | timeago }} td span(ng-if="br.properties.owners === undefined") {{br.properties.owner[0]}} span(ng-repeat="owner in br.properties.owners[0]") {{owner}} td uib-tab(heading="Build times") line-plot(data="successful_builds", xattr="started_at", yattr="duration", xunit="timestamp", yunit="seconds", width=800, height=200) uib-tab(heading="Success Rate") line-plot(data="success_ratio", xattr="date", yattr="success_ratio", xunit="timestamp", yunit="percent", width=800, height=200) builds-table(builder="builder", builds="builds") a.btn.btn-default(ui-sref='builder({builder: builder.builderid, numbuilds: numbuilds + 100})', ng-if="builds.length>=numbuilds") | more buildbot-3.4.0/www/base/src/app/builders/builders.controller.js000066400000000000000000000166271413250514000246000ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Builders { constructor($scope, $log, dataService, resultsService, bbSettingsService, $stateParams, $location, dataGrouperService, $rootScope, $filter, glBreadcrumbService, glTopbarContextualActionsService) { const breadcrumb = [{ caption: "Builders", sref: "builders" } ]; const setupGl = function () { glBreadcrumbService.setBreadcrumb(breadcrumb); glTopbarContextualActionsService.setContextualActions([]); }; $scope.$on('$stateChangeSuccess', setupGl); setupGl(); // Clear breadcrumb and contextual action buttons on destroy const clearGl = function () { glBreadcrumbService.setBreadcrumb([]); glTopbarContextualActionsService.setContextualActions([]); }; $scope.$on('$destroy', clearGl); // make resultsService utilities available in the template _.mixin($scope, resultsService); $scope.connected2class = function(worker) { if (worker.connected_to.length > 0) { return "worker_CONNECTED"; } else { return "worker_DISCONNECTED"; } }; $scope.hasActiveMaster = function(builder) { let active = false; if ((builder.masterids == null)) { return false; } for (let mid of Array.from(builder.masterids)) { const m = $scope.masters.get(mid); if ((m != null) && m.active) { active = true; } } if (builder.tags.includes('_virtual_')) { active = true; } return active; }; $scope.settings = bbSettingsService.getSettingsGroup("Builders"); $scope.$watch('settings', () => { bbSettingsService.save(); }, true); const buildFetchLimit = $scope.settings.buildFetchLimit.value; $scope.page_size = $scope.settings.page_size.value; $scope.currentPage = 1; const updateTagsFilterFromLocation = function() { $scope.tags_filter = $location.search()["tags"]; if ($scope.tags_filter == null) { $scope.tags_filter = []; } if (!angular.isArray($scope.tags_filter)) { return $scope.tags_filter = [$scope.tags_filter]; } }; updateTagsFilterFromLocation(); $scope.$watch("tags_filter", function(tags, old) { if (old != null) { $location.search("tags", tags); } } , true); $rootScope.$on('$locationChangeSuccess', updateTagsFilterFromLocation); $scope.isBuilderFiltered = function(builder, index) { // filter out inactive builders let tag; if (!$scope.settings.show_old_builders.value && !$scope.hasActiveMaster(builder)) { return false; } const pluses = _.filter($scope.tags_filter, tag => tag.indexOf("+") === 0); const minuses = _.filter($scope.tags_filter, tag => tag.indexOf("-") === 0); // First enforce that we have no tag marked '-' for (tag of Array.from(minuses)) { if (builder.tags.indexOf(tag.slice(1)) >= 0) { return false; } } // if only minuses or no filter if ($scope.tags_filter.length === minuses.length) { return true; } // Then enforce that we have all the tags marked '+' for (tag of Array.from(pluses)) { if (builder.tags.indexOf(tag.slice(1)) < 0) { return false; } } // Then enforce that we have at least one of the tag (marked '+' or not) for (tag of Array.from($scope.tags_filter)) { if (tag.indexOf("+") === 0) { tag = tag.slice(1); } if (builder.tags.indexOf(tag) >= 0) { return true; } } return false; }; $scope.isTagFiltered = tag => ($scope.tags_filter.length === 0) || ($scope.tags_filter.indexOf(tag) >= 0) || ($scope.tags_filter.indexOf(`+${tag}`) >= 0) || ($scope.tags_filter.indexOf(`-${tag}`) >= 0) ; $scope.toggleTag = function(tag) { if (tag.indexOf('+') === 0) { tag = tag.slice(1); } if (tag.indexOf('-') === 0) { tag = tag.slice(1); } const i = $scope.tags_filter.indexOf(tag); const iplus = $scope.tags_filter.indexOf(`+${tag}`); const iminus = $scope.tags_filter.indexOf(`-${tag}`); if ((i < 0) && (iplus < 0) && (iminus < 0)) { return $scope.tags_filter.push(`+${tag}`); } else if (iplus >= 0) { $scope.tags_filter.splice(iplus, 1); return $scope.tags_filter.push(`-${tag}`); } else if (iminus >= 0) { $scope.tags_filter.splice(iminus, 1); return $scope.tags_filter.push(tag); } else { return $scope.tags_filter.splice(i, 1); } }; const data = dataService.open().closeOnDestroy($scope); // as there is usually lots of builders, its better to get the overall // list of workers, masters, and builds and then associate by builder $scope.builders = data.getBuilders(); $scope.masters = data.getMasters(); const workers = data.getWorkers(); let builds = null; const requeryBuilds = function() { $scope.builders.forEach(builder => builder.builds = []); const filteredBuilds = $filter('filter')($scope.builders, $scope.isBuilderFiltered) || []; let builderIds = filteredBuilds.map(builder => builder.builderid); if (builderIds.length === $scope.builders.length) { builderIds = []; } builds = data.getBuilds({limit: buildFetchLimit, order: '-started_at', builderid__eq: builderIds}); dataGrouperService.groupBy($scope.builders, workers, 'builderid', 'workers', 'configured_on'); dataGrouperService.groupBy($scope.builders, builds, 'builderid', 'builds'); }; if ($scope.tags_filter.length === 0) { requeryBuilds(); } else { $scope.$watch("builders.$resolved", function(resolved) { if (resolved) { requeryBuilds(); } }); } $scope.searchQuery = ''; $scope.$watch("tags_filter", function() { if (builds && $scope.builders.$resolved) { builds.close(); requeryBuilds(); } } , true); } } angular.module('app') .controller('buildersController', ['$scope', '$log', 'dataService', 'resultsService', 'bbSettingsService', '$stateParams', '$location', 'dataGrouperService', '$rootScope', '$filter', 'glBreadcrumbService', 'glTopbarContextualActionsService', Builders]); buildbot-3.4.0/www/base/src/app/builders/builders.route.js000066400000000000000000000034611413250514000235430ustar00rootroot00000000000000class BuildersState { constructor($stateProvider, glMenuServiceProvider, bbSettingsServiceProvider) { // Name of the state const name = 'builders'; // Menu configuration glMenuServiceProvider.addGroup({ name: "builds", caption: 'Builds', icon: 'cogs', order: 10 }); // Configuration const cfg = { group: "builds", caption: 'Builders' }; // Register new state const state = { controller: `${name}Controller`, template: require('./builders.tpl.jade'), name, url: '/builders?tags', data: cfg, reloadOnSearch: false }; $stateProvider.state(state); bbSettingsServiceProvider.addSettingsGroup({ name:'Builders', caption: 'Builders page related settings', items:[{ type:'bool', name:'show_old_builders', caption:'Show old builders', default_value: false } , { type:'bool', name:'show_workers_name', caption:'Show workers name', default_value: false } , { type:'integer', name:'buildFetchLimit', caption:'Maximum number of builds to fetch', default_value: 200 } , { type:'integer', name:'page_size', caption:'Number of builders to show per page', default_value: 100 } ]}); } } angular.module('app') .config(['$stateProvider', 'glMenuServiceProvider', 'bbSettingsServiceProvider', BuildersState]); buildbot-3.4.0/www/base/src/app/builders/builders.tpl.jade000066400000000000000000000105401413250514000234670ustar00rootroot00000000000000.container .row form(role="search" style="width:150px") input(type="text" ng-model="searchQuery" class="form-control" placeholder="Search for builders") table.table.table-hover.table-striped.table-condensed tr th Builder Name th Builds th span(ng-init="help=false", ng-click="help=!help") i.fa.fa-question-circle.clickable(style="position:relative") .popover.bottom.anim-popover(ng-if="help", style="display:block;min-width:600px;left:-300px;top:30px") h5.popover-title | Tags filtering .popover-content p b pre +{tag} | all tags with '+' must be present in the builder tags p b pre -{tag} | no tags with '-' must be present in the builder tags p b pre {tag} | at least one of the filtered tag should be present p url bar is updated with you filter configuration, so you can bookmark your filters! .arrow span(ng-show="tags_filter.length==0") Tags span(ng-show="tags_filter.length < 5", ng-repeat="tag in tags_filter") span.label.label-success(ng-click="toggleTag(tag)") | {{ tag }} |   span(ng-show="tags_filter.length >= 5") span.label.label-success | {{ tags_filter.length }} tags span(ng-show="tags_filter.length > 0") span.label.clickable.label-danger(ng-click="tags_filter = []") x th(style="width:20%px;") Workers ul(uib-pagination, total-items="(builders | filter: {name: searchQuery} | filter: isBuilderFiltered).length", ng-model="currentPage", class="pagination-sm", boundary-link-numbers="true", max-size=10, items-per-page="page_size") tr(ng-repeat='builder in builders | filter: {name: searchQuery} | filter: isBuilderFiltered | orderBy: "name" | limitTo: page_size :(currentPage-1)*page_size') td(style="width:200px") a(ui-sref='builder({builder: builder.builderid})') | {{ builder.name }} td a(ng-repeat="build in builder.builds | orderBy: '-buildid' | limitTo: '15' ", ui-sref="build({builder: builder.builderid, build: build.number})") script(type="text/ng-template" id="buildsummarytooltip") buildsummary(buildid="build.buildid" type="tooltip") span.badge-status(uib-tooltip-template="'buildsummarytooltip'" tooltip-class="buildsummarytooltipstyle" tooltip-placement="auto left-bottom" tooltip-popup-delay="400" tooltip-popup-close-delay="400" ng-class="results2class(build, 'pulse')") | {{ build.number }} td(style="width:20%;") span(ng-repeat="tag in builder.tags") span.label.clickable(ng-click="toggleTag(tag)", ng-class="isTagFiltered(tag) ? 'label-success': 'label-default'") | {{ tag }} |   td(style="width:20%;") span(data-ng-repeat="worker in builder.workers") a(ui-sref='worker({worker: worker.workerid})') span.badge-status(title="{{worker.name}}" ng-class="connected2class(worker, 'pulse')" ng-if="!settings.show_workers_name.value") .badge-inactive {{worker.workerid}} .badge-active {{worker.name}} span.badge-status(title="{{worker.name}}" ng-class="connected2class(worker, 'pulse')" ng-if="settings.show_workers_name.value") | {{ worker.name }} .row .form-group label.checkbox-inline input(type="checkbox" name="{{settings.show_old_builders.name}}" ng-model="settings.show_old_builders.value") | {{settings.show_old_builders.caption}} buildbot-3.4.0/www/base/src/app/builders/buildrequest/000077500000000000000000000000001413250514000227435ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/builders/buildrequest/buildrequest.controller.js000066400000000000000000000101251413250514000301720ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class BuildrequestController { constructor($scope, dataService, $stateParams, findBuilds, glBreadcrumbService, glTopbarContextualActionsService, publicFieldsFilter) { $scope.is_cancelling = false; $scope.$watch("buildrequest.claimed", function(n, o) { if (n) { // if it is unclaimed, then claimed, we need to try again findBuilds($scope, $scope.buildrequest.buildrequestid, $stateParams.redirect_to_build); // when a build is discovered, force the tab to go to that build const savedNew = $scope.builds.onNew; $scope.builds.onNew = function(build) { build.active = true; savedNew(build); }; } }); const doCancel = function() { $scope.is_cancelling = true; refreshContextMenu(); const success = function(res) {}; // refresh is done via complete event const failure = function(why) { $scope.is_cancelling = false; $scope.error = `Cannot cancel: ${why.error.message}`; refreshContextMenu(); }; $scope.buildrequest.control('cancel').then(success, failure); }; var refreshContextMenu = function() { const actions = []; if (($scope.buildrequest == null)) { return; } if (!$scope.buildrequest.complete) { if ($scope.is_cancelling) { actions.push({ caption: "Cancelling...", icon: "spinner fa-spin", action: doCancel }); } else { actions.push({ caption: "Cancel", extra_class: "btn-default", action: doCancel }); } } glTopbarContextualActionsService.setContextualActions(actions); }; $scope.$watch('buildrequest.complete', refreshContextMenu); // Clear breadcrumb and contextual action buttons on destroy const clearGl = function () { glBreadcrumbService.setBreadcrumb([]); glTopbarContextualActionsService.setContextualActions([]); }; $scope.$on('$destroy', clearGl); const data = dataService.open().closeOnDestroy($scope); data.getBuildrequests($stateParams.buildrequest).onNew = function(buildrequest) { $scope.buildrequest = buildrequest; $scope.raw_buildrequest = publicFieldsFilter(buildrequest); data.getBuilders(buildrequest.builderid).onNew = function(builder) { $scope.builder = builder; const breadcrumb = [{ caption: builder.name, sref: `builder({builder:${buildrequest.builderid}})` } , {caption: "buildrequests"} , { caption: buildrequest.buildrequestid, sref: `buildrequest({buildrequest:${buildrequest.buildrequestid}})` } ]; glBreadcrumbService.setBreadcrumb(breadcrumb); }; data.getBuildsets(buildrequest.buildsetid).onNew = function(buildset) { $scope.buildset = publicFieldsFilter(buildset); buildset.getProperties().onNew = properties => $scope.properties = publicFieldsFilter(properties); }; }; } } angular.module('app') .controller('buildrequestController', ['$scope', 'dataService', '$stateParams', 'findBuilds', 'glBreadcrumbService', 'glTopbarContextualActionsService', 'publicFieldsFilter', BuildrequestController]); buildbot-3.4.0/www/base/src/app/builders/buildrequest/buildrequest.controller.spec.js000066400000000000000000000072511413250514000311310ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ beforeEach(angular.mock.module('app')); describe('buildrequest controller', function() { let $httpBackend, $rootScope, $scope, $stateParams, createController; let dataService = ($scope = ($httpBackend = ($rootScope = null))); let $timeout = (createController = ($stateParams = null)); let goneto = null; // override "$state" beforeEach(angular.mock.module(function($provide) { $provide.value("$state", {go(...args) { return goneto = args; }}); $provide.value("$stateParams", {buildrequest: 1}); }) ); const injected = function($injector) { $rootScope = $injector.get('$rootScope'); $scope = $rootScope.$new(); $timeout = $injector.get('$timeout'); $stateParams = $injector.get('$stateParams'); const $controller = $injector.get('$controller'); const $q = $injector.get('$q'); dataService = $injector.get('dataService'); createController = () => $controller('buildrequestController', {$scope}) ; }; beforeEach(inject(injected)); it('should query for buildrequest', function() { dataService.when('buildsets/1/properties', [{a: ['a','b']}]); dataService.when('buildrequests/1', [{buildrequestid: 1, builderid: 1, buildsetid: 1}]); dataService.when('builders/1', [{builderid: 1}]); dataService.when('buildsets/1', [{buildsetid: 1}]); const controller = createController(); $timeout.flush(); expect(dataService.get).toHaveBeenCalledWith('buildrequests', 1, jasmine.any(Object)); dataService.when('builds', {buildrequestid: 1}, [{buildid: 1, buildrequestid: 1}, {buildid: 2, buildrequestid: 1}]); $scope.$apply(() => $scope.buildrequest.claimed = true); $timeout.flush(); expect($scope.builds[0]).toBeDefined(); }); it('should query for builds again if first query returns 0', function() { dataService.when('buildsets/1/properties', [{a: ['a','b']}]); dataService.when('builders', [{builderid: 1}]); dataService.when('buildrequests/1', [{buildrequestid: 1, builderid: 1, buildsetid: 1}]); dataService.when('builders/1', [{builderid: 1}]); dataService.when('buildsets/1', [{buildsetid: 1}]); const controller = createController(); dataService.when('builds', {buildrequestid: 1}, []); $timeout.flush(); $scope.$apply(() => $scope.buildrequest.claimed = true); $timeout.flush(); expect($scope.builds.length).toBe(0); // simulate new builds from event stream $scope.builds.from([{buildid: 1, buildrequestid: 1}, {buildid: 2, buildrequestid: 1}]); $timeout.flush(); expect($scope.builds.length).toBe(2); }); it('should go to build page if build started', function() { dataService.when('buildsets/1/properties', [{a: ['a','b']}]); dataService.when('buildrequests/1', [{buildrequestid: 1, builderid: 3, buildsetid: 1}]); dataService.when('builders/3', [{builderid: 3}]); dataService.when('buildsets/1', [{buildsetid: 1}]); $stateParams.redirect_to_build = 1; const controller = createController(); $timeout.flush(); dataService.when('builds', {buildrequestid: 1}, [{buildid: 1, builderid: 3, number: 1, buildrequestid: 1}]); $scope.$apply(() => $scope.buildrequest.claimed = true); $timeout.flush(); expect(goneto).toEqual(['build', { builder : 3, build : 1 }]); }); }); buildbot-3.4.0/www/base/src/app/builders/buildrequest/buildrequest.route.js000066400000000000000000000010101413250514000271360ustar00rootroot00000000000000class BuildRequestState { constructor($stateProvider) { // Name of the state const name = 'buildrequest'; // Register new state const state = { controller: `${name}Controller`, template: require('./buildrequest.tpl.jade'), name, data: {}, url: '/buildrequests/:buildrequest?redirect_to_build' }; $stateProvider.state(state); } } angular.module('app') .config(['$stateProvider', BuildRequestState]); buildbot-3.4.0/www/base/src/app/builders/buildrequest/buildrequest.tpl.jade000066400000000000000000000011161413250514000270750ustar00rootroot00000000000000.container .row uib-tabset(justified="left") uib-tab(heading="build {{build.number}}", active="build.active", ng-repeat="build in builds") buildsummary(build="build") uib-tab(heading="properties") properties(properties="properties") uib-tab(heading="Debug") h4 Buildrequest rawdata(data="raw_buildrequest") h4 Buildset rawdata(data="buildset") h4 Builder rawdata(data="builder") h4 Builds rawdata(data="builds") buildbot-3.4.0/www/base/src/app/builders/buildrequest/forcedialog/000077500000000000000000000000001413250514000252215ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/builders/buildrequest/forcedialog/forcedialog.config.js000066400000000000000000000034711413250514000313060ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class ForceDialogState { constructor($stateProvider) { $stateProvider.state("builder.forcebuilder", { url: "/force/:scheduler", onEnter: ['$stateParams', '$state', '$uibModal', function($stateParams, $state, $uibModal) { const modal = {}; modal.modal = $uibModal.open({ template: require('./forcedialog.tpl.jade'), controller: 'forceDialogController', windowClass: 'modal-xlg', resolve: { builderid() { return $stateParams.builder; }, schedulerid() { return $stateParams.scheduler; }, modal() { return modal; } } }); // We exit the state if the dialog is closed or dismissed const goBuild = function(result) { const [ buildsetid, brids ] = Array.from(result); const buildernames = _.keys(brids); if (buildernames.length === 1) { return $state.go("buildrequest", { buildrequest: brids[buildernames[0]], redirect_to_build: true } ); } }; const goUp = result => $state.go("^"); return modal.modal.result.then(goBuild, goUp); }] } ); } } angular.module('app') .config(['$stateProvider', ForceDialogState]); buildbot-3.4.0/www/base/src/app/builders/buildrequest/forcedialog/forcedialog.controller.js000066400000000000000000000075151413250514000322270ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class forceDialog { constructor($scope, config, $state, modal, schedulerid, $rootScope, builderid, dataService) { dataService.getForceschedulers(schedulerid, {subscribe: false}).onChange = function(schedulers) { const scheduler = schedulers[0]; const all_fields_by_name = {}; // prepare default values var prepareFields = fields => { for (let field of Array.from(fields)) { all_fields_by_name[field.fullName] = field; // give a reference of other fields to easily implement // autopopulate field.all_fields_by_name = all_fields_by_name; field.errors = ''; field.haserrors = false; if (field.fields != null) { prepareFields(field.fields); } else { field.value = field.default; // if field type is username, then we just hide the field // the backend will fill the value automatically if (field.type === 'username') { field.type = "text"; const { user } = config; if (user.email != null) { field.type = "text"; field.hide = true; } } } } }; prepareFields(scheduler.all_fields); angular.extend($scope, { rootfield: { type: 'nested', layout: 'simple', fields: scheduler.all_fields, columns: 1 }, sch: scheduler, startDisabled: false, ok() { if ($scope.startDisabled == true) { // prevent multiple executions of scheduler return null; }; $scope.startDisabled = true; const params = {builderid}; for (let name in all_fields_by_name) { const field = all_fields_by_name[name]; params[name] = field.value; } return scheduler.control('force', params) .then(res => modal.modal.close(res.result), function(err) { $scope.startDisabled = false; if (err === null) { return; } if (err.error.code === -32602) { for (let k in err.error.message) { const v = err.error.message[k]; all_fields_by_name[k].errors = v; all_fields_by_name[k].haserrors = true; } } else { $scope.error = err.error.message; } }); }, cancel() { return modal.modal.dismiss(); } } ); }; } } angular.module('app') .controller('forceDialogController', ['$scope', 'config', '$state', 'modal', 'schedulerid', '$rootScope', 'builderid', 'dataService', forceDialog]); buildbot-3.4.0/www/base/src/app/builders/buildrequest/forcedialog/forcedialog.spec.js000066400000000000000000000031231413250514000307650ustar00rootroot00000000000000beforeEach(angular.mock.module('app')); describe('buildrequest controller', function() { let $httpBackend, $rootScope, $scope, $stateParams, $timeout, modal; let dataService = ($scope = ($rootScope = null)); let createController = ($stateParams = (modal = ($httpBackend = ($timeout = null)))); const injected = function($injector) { $rootScope = $injector.get('$rootScope'); $scope = $rootScope.$new(); $timeout = $injector.get('$timeout'); const $controller = $injector.get('$controller'); const $q = $injector.get('$q'); dataService = $injector.get('dataService'); $httpBackend = $injector.get('$httpBackend'); modal = {}; createController = () => $controller('forceDialogController', { $scope, builderid: 1, schedulerid: 'forcesched', modal } ) ; }; beforeEach(inject(injected)); it('should query for forcecheduler', function() { dataService.when('forceschedulers/forcesched', [{all_fields:[{'foo': 'int'}]}]); const controller = createController(); $rootScope.$apply(); }); it('should call forcecheduler control api when ok', function() { dataService.when('forceschedulers/forcesched', [{name: "forcesched", all_fields:[{'foo': 'int'}]}]); const controller = createController(); $timeout.flush(); $httpBackend.when('POST', 'api/v2/forceschedulers/forcesched') .respond("{}"); $scope.ok(); $rootScope.$apply(); }); }); buildbot-3.4.0/www/base/src/app/builders/buildrequest/forcedialog/forcedialog.tpl.jade000066400000000000000000000010711413250514000311210ustar00rootroot00000000000000.modal-content form .modal-body // put the header in the body in order to correctly display error popup h4 {{sch.label}} hr div.form-horizontal .alert.alert-danger(ng-show="error") {{error}} .alert.alert-danger(ng-show="!sch.enabled") SCHEDULER DISABLED forcefield(field="rootfield" ng-if="rootfield") .modal-footer button.btn.btn-default(ng-click="cancel()") Cancel button.btn.btn-primary(type="submit", ng-click="ok()", ng-disabled="!sch.enabled || startDisabled") Start Build buildbot-3.4.0/www/base/src/app/builders/builds/000077500000000000000000000000001413250514000215155ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/builders/builds/build.controller.js000066400000000000000000000166051413250514000253440ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class BuildController { constructor($rootScope, $scope, $location, $stateParams, $state, faviconService, dataService, dataUtilsService, publicFieldsFilter, glBreadcrumbService, glTopbarContextualActionsService, resultsService, $window) { _.mixin($scope, resultsService); const builderid = _.parseInt($stateParams.builder); const buildnumber = _.parseInt($stateParams.build); $scope.last_build = true; $scope.is_stopping = false; $scope.is_rebuilding = false; const doRebuild = function() { $scope.is_rebuilding = true; refreshContextMenu(); const success = function(res) { const brid = _.values(res.result[1])[0]; $state.go("buildrequest", { buildrequest: brid, redirect_to_build: true }); }; const failure = function(why) { $scope.is_rebuilding = false; $scope.error = `Cannot rebuild: ${why.error.message}`; refreshContextMenu(); }; $scope.build.control('rebuild').then(success, failure); }; const doStop = function() { $scope.is_stopping = true; refreshContextMenu(); const success = res => null; const failure = function(why) { $scope.is_stopping = false; $scope.error = `Cannot Stop: ${why.error.message}`; refreshContextMenu(); }; $scope.build.control('stop').then(success, failure); }; var refreshContextMenu = function() { const actions = []; if (($scope.build == null)) { return; } faviconService.setFavIcon($scope.build); if ($scope.build.complete) { if ($scope.is_rebuilding) { actions.push({ caption: "Rebuilding...", icon: "spinner fa-spin", action: doRebuild }); } else { actions.push({ caption: "Rebuild", extra_class: "btn-default", action: doRebuild }); } } else { if ($scope.is_stopping) { actions.push({ caption: "Stopping...", icon: "spinner fa-spin", action: doStop }); } else { actions.push({ caption: "Stop", extra_class: "btn-default", action: doStop }); } } glTopbarContextualActionsService.setContextualActions(actions); }; $scope.$watch('build.complete', refreshContextMenu); // Clear breadcrumb and contextual action buttons on destroy const clearGl = function () { glBreadcrumbService.setBreadcrumb([]); glTopbarContextualActionsService.setContextualActions([]); }; $scope.$on('$destroy', clearGl); const data = dataService.open().closeOnDestroy($scope); data.getBuilders(builderid).onChange = function(builders) { let builder; $scope.builder = (builder = builders[0]); $window.document.title = $state.current.data.pageTitle({ builder: builder['name'], build: buildnumber}); // get the build plus the previous and next // note that this registers to the updates for all the builds for that builder // need to see how that scales builder.getBuilds({number__lt: buildnumber + 2, limit: 3, order: '-number'}).onChange = function(builds) { $scope.prevbuild = null; $scope.nextbuild = null; let build = null; for (let b of Array.from(builds)) { if (b.number === (buildnumber - 1)) { $scope.prevbuild = b; } if (b.number === buildnumber) { $scope.build = (build = b); } if (b.number === (buildnumber + 1)) { $scope.nextbuild = b; $scope.last_build = false; } } if (!build) { $state.go('build', {builder: builderid, build: builds[0].number}); return; } const breadcrumb = [{ caption: "Builders", sref: "builders" } , { caption: builder.name, sref: `builder({builder:${builderid}})` } , { caption: build.number, sref: `build({build:${buildnumber}})` } ]; glBreadcrumbService.setBreadcrumb(breadcrumb); var unwatch = $scope.$watch('nextbuild.number', function(n, o) { if (n != null) { $scope.last_build = false; unwatch(); } }); $scope.responsibles = {}; build.getProperties().onNew = function(properties) { $scope.properties = properties; var owner = properties.owner[0]; if (properties.scheduler[0] === 'force' && owner.match(/^.+\<.+\@.+\..+\>.*$/)) { var name = owner.split(new RegExp('<|>'))[0]; var email = owner.split(new RegExp('<|>'))[1]; $scope.responsibles[name] = email; } }; $scope.changes = build.getChanges(); $scope.changes.onNew = change => $scope.responsibles[change.author_name] = change.author_email; data.getWorkers(build.workerid).onNew = worker => $scope.worker = publicFieldsFilter(worker); data.getBuildrequests(build.buildrequestid).onNew = function(buildrequest) { $scope.buildrequest = buildrequest; data.getBuildsets(buildrequest.buildsetid).onNew = function(buildset) { $scope.buildset = buildset; if (buildset.parent_buildid) { data.getBuilds(buildset.parent_buildid).onNew = build => $scope.parent_build = build; } }; }; }; }; } } angular.module('app') .controller('buildController', ['$rootScope', '$scope', '$location', '$stateParams', '$state', 'faviconService', 'dataService', 'dataUtilsService', 'publicFieldsFilter', 'glBreadcrumbService', 'glTopbarContextualActionsService', 'resultsService', '$window', BuildController]); buildbot-3.4.0/www/base/src/app/builders/builds/build.route.js000066400000000000000000000036251413250514000243150ustar00rootroot00000000000000class BuildState { constructor($stateProvider, bbSettingsServiceProvider) { // Name of the state const name = 'build'; // Register new state const state = { controller: `${name}Controller`, template: require('./build.tpl.jade'), name, url: '/builders/:builder/builds/:build', data: { pageTitle: _.template("Buildbot: builder <%= builder %> build <%= build %>") } }; $stateProvider.state(state); bbSettingsServiceProvider.addSettingsGroup({ name:'LogPreview', caption: 'LogPreview related settings', items:[{ type:'integer', name:'loadlines', caption:'Initial number of lines to load', default_value: 40 } , { type:'integer', name:'maxlines', caption:'Maximum number of lines to show', default_value: 40 } , { type:'text', name:'expand_logs', caption:'Expand logs with these names (use ; as separator)', default_value: 'summary' } ]}); bbSettingsServiceProvider.addSettingsGroup({ name:'Build', caption: 'Build page related settings', items:[{ type:'integer', name:'trigger_step_page_size', caption:'Number of builds to show per page in trigger step', default_value: 20 }, { type:'bool', name:'show_urls', caption:'Always show URLs in step', default_value: true } ]}); } } angular.module('app') .config(['$stateProvider', 'bbSettingsServiceProvider', BuildState]); buildbot-3.4.0/www/base/src/app/builders/builds/build.tpl.jade000066400000000000000000000044401413250514000242410ustar00rootroot00000000000000.container .alert.alert-danger(ng-show="error") {{error}} nav ul.pager li.previous(ng-class="{'disabled': build.number == 1}") a(ng-if="build.number > 1 ", ui-sref="build({build:prevbuild.number})") span.badge-status(ng-class="results2class(prevbuild, 'pulse')") ← span.nomobile  Previous span(ng-if="build.number == 1") ← span.nomobile  Previous li(ng-if="build.complete" title="{{ build.complete_at | dateformat:'LLL' }}") Finished {{ build.complete_at | timeago }} li.next(ng-class="{'disabled': last_build}") a(ng-if="!last_build", ui-sref="build({build:nextbuild.number})") span.nomobile Next  span.badge-status(ng-class="results2class(nextbuild, 'pulse')") → span(ng-if="last_build") span.nomobile Next  span → .row uib-tabset uib-tab(heading="Build steps") buildsummary(ng-if="build", build="build", parentbuild="parent_build", parentrelationship="buildset.parent_relationship") uib-tab(heading="Build Properties") properties(properties="properties") uib-tab(heading="Worker: {{worker.name}}") table.table.table-hover.table-striped.table-condensed tbody tr td.text-left name td.text-center {{worker.name}} tr(ng-repeat="(name, value) in worker.workerinfo") td.text-left {{ name }} td.text-right {{ value }} uib-tab(heading="Responsible Users") ul.list-group li.list-group-item(ng-repeat="(author, email) in responsibles") .change-avatar img(ng-src="avatar?email={{email | encodeURI}}") a(ng-href="mailto:{{email}}") | {{ author }} uib-tab(heading="Changes") changelist(changes="changes") uib-tab(heading="Debug") h4 a(ui-sref="buildrequest({buildrequest:buildrequest.buildrequestid})") | Buildrequest: rawdata(data="buildrequest") h4 Buildset: rawdata(data="buildset") buildbot-3.4.0/www/base/src/app/builders/log/000077500000000000000000000000001413250514000210145ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/builders/log/log.controller.js000066400000000000000000000052161413250514000243210ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class LogController { constructor($scope, dataService, dataUtilsService, $stateParams, glBreadcrumbService, faviconService) { const data = dataService.open().closeOnDestroy($scope); $scope.jumpToLine = "end"; if ($stateParams.jump_to_line != null) { $scope.jumpToLine = $stateParams.jump_to_line; } const builderid = dataUtilsService.numberOrString($stateParams.builder); const buildnumber = dataUtilsService.numberOrString($stateParams.build); const stepnumber = dataUtilsService.numberOrString($stateParams.step); const slug = $stateParams.log; // Clear breadcrumb on destroy $scope.$on('$destroy', () => glBreadcrumbService.setBreadcrumb([])); data.getBuilders(builderid).onNew = function(builder) { $scope.builder = (builder = builder); builder.getBuilds(buildnumber).onNew = function(build) { $scope.build = build; build.getSteps(stepnumber).onNew = function(step) { $scope.step = step; faviconService.setFavIcon(step); step.getLogs(slug).onNew = function(log) { $scope.log = log; glBreadcrumbService.setBreadcrumb([{ caption: "Builders", sref: "builders" } , { caption: builder.name, sref: `builder({builder:${builder.builderid}})` } , { caption: build.number, sref: `build({builder:${builder.builderid}, build:${build.number}})` } , {caption: step.name} , { caption: log.name, sref: `log({builder:${builder.builderid}, build:${build.number}, step:${step.number}, log:'${log.slug}'})` } ]); }; }; }; }; } } angular.module('app') .controller('logController', ['$scope', 'dataService', 'dataUtilsService', '$stateParams', 'glBreadcrumbService', 'faviconService', LogController]); buildbot-3.4.0/www/base/src/app/builders/log/log.route.js000066400000000000000000000012231413250514000232660ustar00rootroot00000000000000class LogState { constructor($stateProvider) { // Name of the state const name = 'log'; // Configuration const cfg = { tabid: 'builders', pageTitle: _.template("Buildbot: log: <%= log %>") }; // Register new state const state = { controller: `${name}Controller`, template: require('./log.tpl.jade'), name, url: '/builders/:builder/builds/:build/steps/:step/logs/:log?jump_to_line', data: cfg }; $stateProvider.state(state); } } angular.module('app') .config(['$stateProvider', LogState]); buildbot-3.4.0/www/base/src/app/builders/log/log.tpl.jade000066400000000000000000000000601413250514000232140ustar00rootroot00000000000000logviewer(log="log", jump-to-line="jumpToLine") buildbot-3.4.0/www/base/src/app/builders/log/logviewer/000077500000000000000000000000001413250514000230175ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/builders/log/logviewer/logpreview.directive.js000066400000000000000000000155561413250514000275310ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Logpreview { constructor($sce, restService, ansicodesService, bbSettingsService) { return { replace: true, transclude: true, restrict: 'E', scope: { log: "<", fulldisplay: "<", buildnumber: "<", builderid: "<", step: "<" }, template: require('./logpreview.tpl.jade'), controllerAs: "logpreview", bindToController: true, controller: ["$scope", function ($scope) { this.$onInit = function () { let loadLines; this.settings = bbSettingsService.getSettingsGroup("LogPreview"); let pendingRequest = null; $scope.$on('$destroy', function () { if (pendingRequest) { pendingRequest.cancel(); } }); const loading = $sce.trustAs($sce.HTML, "..."); let unwatchLog = null; let unwatchLines = null; $scope.$watch("logpreview.fulldisplay", (n, o) => { // Cancel previous requests and stop fetching new lines first if (pendingRequest) { pendingRequest.cancel(); } if (unwatchLines) { unwatchLines(); } // Start fetching lines when the preview is visible. if (n) { unwatchLog = $scope.$watch("logpreview.log", fetchLog); } }); var fetchLog = (n, o) => { this.log.lines = []; if ((n == null)) { return; } unwatchLog(); if (unwatchLines) { unwatchLines(); } if (this.log.type === 'h') { pendingRequest = restService.get(`logs/${this.log.logid}/contents`); pendingRequest.then(content => { this.log.content = $sce.trustAs($sce.HTML, content.logchunks[0].content); }); } else { unwatchLines = $scope.$watch("logpreview.log.num_lines", loadLines); } }; loadLines = num_lines => { let limit, offset; if (this.log.lines.length === 0) { // initial load. only load the last few lines offset = this.log.num_lines - this.settings.loadlines.value; limit = this.settings.loadlines.value; if (offset < 0) { offset = 0; limit = this.log.num_lines; } } else { // The last element of the line is the last line loaded // This might be actually a loading marker offset = this.log.lines[this.log.lines.length - 1].number + 1; limit = this.log.num_lines - offset; // if log is advancing very fast no need to load too much lines if (limit > this.settings.maxlines.value) { offset = this.log.num_lines - this.settings.maxlines.value; limit = this.settings.maxlines.value; } } if (limit === 0) { return; } // this acts as a marker of the last loaded element // note that several elements can be loading at the same time // as we follow the log updates const loading_element = { content: loading, number: (offset + limit) - 1 }; this.log.lines.push(loading_element); pendingRequest = restService.get(`logs/${this.log.logid}/contents`, { offset, limit }); pendingRequest.then(content => { ({ content } = content.logchunks[0]); const lines = content.split("\n"); // there is a trailing '\n' generates an empty line in the end if (lines.length > 1) { lines.pop(); } let number = offset; // remove the loading element this.log.lines.splice(this.log.lines.indexOf(loading_element), 1); for (let line of Array.from(lines)) { let logclass = "o"; if ((line.length > 0) && (this.log.type === 's')) { logclass = line[0]; line = line.slice(1); } // we just push the lines in the end, and will apply sort eventually this.log.lines.push({ content: $sce.trustAs($sce.HTML, ansicodesService.ansi2html(line)), class: `log_${logclass}`, number }); number += 1; } this.log.lines.sort((a, b) => a.number - b.number); this.log.lines.splice(0, this.log.lines.length - this.settings.maxlines.value); }); }; } }], link(scope, elm, attr) { ansicodesService.injectStyle(); } }; } } angular.module('app') .directive('logpreview', ['$sce', 'restService', 'ansicodesService', 'bbSettingsService', Logpreview]); buildbot-3.4.0/www/base/src/app/builders/log/logviewer/logpreview.less000066400000000000000000000000451413250514000260710ustar00rootroot00000000000000.logpreview { margin-top:20px; } buildbot-3.4.0/www/base/src/app/builders/log/logviewer/logpreview.tpl.jade000066400000000000000000000023741413250514000266330ustar00rootroot00000000000000div.logpreview.panel(ng-class="logpreview.log.name=='err.html' && 'panel-danger' || 'panel-default'") div.panel-heading .panel-title .flex-row div.flex-grow-3(ng-click="logpreview.fulldisplay=!logpreview.fulldisplay") i.fa.fa-chevron-circle-right.rotate(ng-class="{'fa-rotate-90':logpreview.fulldisplay}") | #{' '} {{logpreview.log.name}} div.flex-grow-1 div.pull-right a(ui-sref="log({builder:logpreview.builderid, build:logpreview.buildnumber, step: logpreview.step.number, log:logpreview.log.slug})") | #{' '}view all {{logpreview.log.num_lines}} line{{logpreview.log.num_lines > 1?'s':''}} | #{' '} a.btn.btn-default.btn-xs(ng-href="api/v2/logs/{{logpreview.log.logid}}/raw", description="download log") i.fa.fa-download | #{' '} download div(ng-show="logpreview.fulldisplay") pre.select-content.log(ng-show="logpreview.log.type!='h'") .logline(ng-repeat="line in logpreview.log.lines") span.no-wrap(data-linenumber-content="{{line.number}}", class="{{line.class}}", ng-bind-html="line.content") div.panel-body(ng-if="logpreview.log.type=='h'", ng-bind-html="logpreview.log.content") buildbot-3.4.0/www/base/src/app/builders/log/logviewer/logviewer.directive.js000066400000000000000000000133061413250514000273400ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // logviewer. This directive uses jquery for simplicity class Logviewer { constructor($log, $window, $timeout, $sce, $q, dataService, restService, ansicodesService) { $window = angular.element($window); const directive = function() { let self; let data = null; return self = { toggleAutoScroll() { if (self.scope.jumpToLine === "end") { self.scope.jumpToLine = "none"; return self.scope.scroll_position = null; } else { self.scope.jumpToLine = "end"; return self.scope.scroll_position = self.scope.log.num_lines; } }, setHeight(elm) { const height = $window.height() - elm.offset().top; return elm.css({height: height + "px"}); }, updateLog() { var unwatch = self.scope.$watch("log", function(n, o) { if (n != null) { unwatch(); const { log } = self.scope; self.scope.raw_url = `api/v2/logs/${log.logid}/raw`; if (log.type === 'h') { restService.get(`logs/${log.logid}/contents`).then(content => self.scope.content = $sce.trustAs($sce.HTML, content.logchunks[0].content)); } } }); return self.scope.$watch("log.num_lines", function(n, o) { if (self.scope.jumpToLine === "end") { self.scope.scroll_position = n; } else if (self.scope.jumpToLine !== "none") { self.scope.scroll_position = self.scope.jumpToLine; } }); }, lines: { get(index, count) { const { log } = self.scope; if (index < 0) { count += index; index = 0; } if (count === 0) { return $q.when([]); } if (self.requests == null) { self.requests = {}; } const requestId = `${index}_${count}`; if ((self.requests[requestId] == null)) { self.requests[requestId] = $q(resolve => restService.get(`logs/${log.logid}/contents`, {offset:index, limit:count}).then(function(content) { content = content.logchunks; const ret = []; if (content.length === 0) { resolve(ret); return; } let offset = index; const lines = content[0].content.split("\n"); // there is a trailing '\n' generates an empty line in the end if (lines.length > 1) { lines.pop(); } for (let line of Array.from(lines)) { let logclass = "o"; if ((line.length > 0) && (self.scope.log.type === 's')) { logclass = line[0]; line = line.slice(1); } ret.push({ content: ansicodesService.ansi2html(line), class: `log_${logclass}` }); offset += 1; } resolve(ret); }) ); } return self.requests[requestId]; } }, // controller is called first and need to setup the scope for ui-scroll to find lines controller($scope) { $scope.lines = self.lines; self.scope = $scope; data = dataService.open().closeOnDestroy($scope); return self.updateLog(); }, link(scope, elm, attr) { elm = elm.children("pre"); self.setHeight(elm); self.elm = elm; self.raw = elm[0]; $window.resize(() => self.setHeight(elm)); } }; }; return { replace: true, transclude: true, restrict: 'E', scope: {log:"=", jumpToLine:"="}, template: require('./logviewer.tpl.jade'), controller: ["$scope", function($scope) { const self = directive(); $scope.logviewer = self; return self.controller($scope); } ], link(scope, elm, attr) { ansicodesService.injectStyle(); scope.logviewer.link(scope, elm, attr); } }; } } angular.module('app') .directive('logviewer', ['$log', '$window', '$timeout', '$sce', '$q', 'dataService', 'restService', 'ansicodesService', Logviewer]); buildbot-3.4.0/www/base/src/app/builders/log/logviewer/logviewer.less000066400000000000000000000012511413250514000257110ustar00rootroot00000000000000.logcontainer { width:100%; padding:0px; margin-top:-20px; pre { margin:0px; } } pre.log { div { height: 18px; } .linenumber { display: inline-block; width: 50px; } // It is much more difficult to do autoscroll if line wrap is enabled. // For now we just disable it. There is space for some smarter algorithm .no-wrap { white-space: pre; word-wrap: normal; } margin-top: 0px; margin-bottom: 0px; overflow: auto; } .logoptions { margin-top: 0px; position: absolute; right: 17px; } .loading { margin-top: -40px; position: absolute; right: 17px; z-index:10000; } buildbot-3.4.0/www/base/src/app/builders/log/logviewer/logviewer.tpl.jade000066400000000000000000000022401413250514000264430ustar00rootroot00000000000000.container.logcontainer .row.logoptions a.btn.btn-default(title="scroll to end", ng-click="logviewer.toggleAutoScroll()", ng-class="{active: jumpToLine=='end'}") i.fa.fa-angle-double-down a.btn.btn-default(download="{{log.name}}", title="download log as file", ng-href="{{raw_url}}") i.fa.fa-download a.btn.btn-default(title="load all data for use with browser search tool", aria-pressed="loadAll", ng-class="{active: loadAll}", ng-click="loadAll = !loadAll") i.fa(ng-class="loadAll && isLoading ? 'fa-spin fa-spinner': 'fa-search'") pre.row.log(ng-show="log.type!='h'", scroll-viewport) span(ng-if="log.type=='s' || log.type=='t'") div(scroll="line in lines", scroll-position="scroll_position", load-all="loadAll", is-loading="isLoading", total-size="log.num_lines", style="height:18px") span.no-wrap(data-linenumber-content="{{::$index}}", class="{{::line.class}}") {{::line.content}} div.panel(ng-if="log.type=='h'", ng-class="log.name=='err.html' && 'panel-danger' || 'panel-default'") div.panel-heading h4.panel-title {{log.name}} div.panel-body(ng-bind-html="content") buildbot-3.4.0/www/base/src/app/builders/log/logviewer/scrollviewport.directive.js000066400000000000000000000352421413250514000304360ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ /* based on https://github.com/Hill30/NGScroller (MIT license) ui.scroll is a good directive for infinite scrolling. Its inner working makes it not very adapted to viewing log: This scroll directive uses ui.scroll base, but replace the whole DOM manipulation code - Can directly scroll to arbitrary position - Don't remove out-of-sight DOM. Eventually this will result in huge dom, so please make sure to use bind-once childs. This however has the advantage on only loading each line once. - Support line count, and adapt scroll bar appropriately - Can follow the end of stream, via updating the scroll-position attribute - row height is fixed (or you cannot make geometric calculation to determine the positions of arbitrary elements) This directive uses JQuery for DOM manipulation Performance considerations: Having to deal with huge logs is not uncommon thing we buildbot, we need to deal with them as fast as possible. AngularJS does a lot of things with the DOM, and is not as fast as we can do. This is why using angularJS's linker is avoided. We rather use lodash template(), that is configured to simulate angularjs 1.3 "bindonce" templating. With this technic, we can load 20k lines log in 2 seconds. */ class ScrollViewport { constructor($log) { return { controller: [ '$scope', '$element', function(scope, element) { this.viewport = element; return this; } ] }; } } class Scroll { constructor($log, $injector, $rootScope, $timeout, $window) { return { require: ['?^scrollViewport'], transclude: 'element', priority: 1000, terminal: true, compile(elementTemplate, attr, linker) { return function($scope, element, $attr, controllers) { let loading; const log = $log.debug || $log.log; const match = $attr.scroll.match(/^\s*(\w+)\s+in\s+([\w\.]+)\s*$/); if (!match) { throw new Error("Expected scroll in form of '_item_ in _datasource_'"+ `but got '${$attr.uiScroll}'`); } const itemName = match[1]; const datasourceName = match[2]; const totalSize = null; const isDatasource = datasource => angular.isObject(datasource) && datasource.get && angular.isFunction(datasource.get); var getValueChain = function(targetScope, target) { if (!targetScope) { return null; } const chain = target.match(/^([\w]+)\.(.+)$/); if (!chain || (chain.length !== 3)) { return targetScope[target]; } return getValueChain(targetScope[chain[1]], chain[2]); }; const datasource = getValueChain($scope, datasourceName); if (!isDatasource(datasource)) { throw new Error(`${datasourceName} is not a valid datasource`); } let rowHeight = null; // this directive only works with fixed height rows. let viewport = null; // viewport is the parent element which contains the scrolled vieweport let padding = null; // padding is a function which creates padding element of a certain size let isLoading = false; // whether we are fetching data let loadAll = false; // should we load the whole log // Buffer is a sparse array containing list of rows that are already instantiated into dom // or padding. padding have the class .padding, and potentially following buffer elements are // sparsed out. const buffer = []; // Calling linker is the only way I found to get access to the tag name of the template // to prevent the directive scope from pollution a new scope is created and destroyed // right after the repeaterHandler creation is completed const tempScope = $scope.$new(); linker(tempScope, function(template) { const repeaterType = template[0].localName; ({ viewport } = controllers[0]); viewport.css({'overflow-y': 'auto', 'display': 'block'}); rowHeight = template.height(); // Replace angularjs linker by _.template, which is much faster let rowTemplate = `<${repeaterType} style='height:${rowHeight}px;'>` + `${template[0].innerHTML}`; rowTemplate = _.template(rowTemplate, {interpolate: /\{\{::(.+?)\}\}/g} ); linker = (scope, cb) => cb(angular.element(rowTemplate(scope))); padding = function(height) { const result = angular.element(`<${repeaterType} class='padding'>`); result.set_height = function(height) { // we use _height as a cache that holds the height of the padding // using jquery.height() is terribly slow, as it internally re-style the item result._height = height; if (!result._height_changing) { $timeout(function() { result.height(result._height * rowHeight); result._height_changing = false; }); } return result._height_changing = true; }; result.set_height(height); return result; }; return tempScope.$destroy(); }); // init with 1 row 0 size padding buffer[0] = padding(0,0); const parent = angular.element("
"); element.after(parent); parent.append(buffer[0]); const viewportScope = viewport.scope() || $rootScope; if (angular.isDefined(($attr.isLoading))) { loading = function(value) { isLoading = value; viewportScope[$attr.isLoading] = isLoading; if (datasource.loading) { return datasource.loading(value); } }; } else { loading = function(value) { isLoading = value; if (datasource.loading) { return datasource.loading(value); } }; } const insertItem = function(beforePos, pos, item) { // don't overwritte already loaded dom if ((buffer[pos] != null) && (buffer[pos]._height == null)) { return; } const itemScope = {}; itemScope[itemName] = item; itemScope.$index = pos; return linker(itemScope, function(clone) { let afterPadding = 0; if (buffer[beforePos]._height != null) { afterPadding = buffer[beforePos]._height; afterPadding -= ((pos - beforePos) + 1); buffer[beforePos].set_height(pos - beforePos); } buffer[beforePos].after(clone); if (beforePos === pos) { buffer[pos].remove(); buffer[pos] = undefined; } // push after padding next line or deleted it if (buffer[pos] != null) { if ((buffer[pos + 1] != null) || ((pos + 1) === buffer.length)) { buffer[pos].remove(); } else { buffer[pos].set_height(buffer[pos]._height - 1); buffer[pos + 1] = buffer[pos]; } } else if ((pos < (buffer.length - 1)) && (buffer[pos + 1] == null)) { buffer[pos + 1] = padding(afterPadding); clone.after(buffer[pos + 1]); } return buffer[pos] = clone; }); }; // calculate what rows to load given the scroll viewport const updateView = function() { let endIndex, topIndex; if (loadAll) { topIndex = 0; endIndex = buffer.length; } else { topIndex = Math.floor(viewport.scrollTop() / rowHeight); const numIndex = Math.floor(viewport.outerHeight() / rowHeight); topIndex -= numIndex; endIndex = topIndex + (numIndex * 3); if (topIndex > (buffer.length - 1)) { topIndex = buffer.length - 1; } if (topIndex < 0) { topIndex = 0; } if (endIndex > buffer.length) { endIndex = buffer.length; } } loadView(topIndex, endIndex); }; // load some lines to the DOM using the data source, making sure it is not already loaded var loadView = function(topIndex, endIndex) { const fetched = b => b._height == null; if (isLoading) { return; } while ((buffer[topIndex] != null) && fetched(buffer[topIndex]) && (topIndex < endIndex)) { topIndex++; } while ((buffer[endIndex - 1] != null) && fetched(buffer[endIndex - 1 ]) && (topIndex < endIndex)) { endIndex--; } if (topIndex === endIndex) { // all is loaded return; } loading(true); let previousElemIndex = findElement(topIndex); datasource.get(topIndex, endIndex - topIndex).then(function(d) { loading(false); for (let item of Array.from(d)) { insertItem(previousElemIndex, topIndex, item); previousElemIndex = topIndex; topIndex++; } $timeout(() => maybeUpdateView()); }); }; // find an element in the buffer, skipping undefined directly to padding element // representing this element var findElement = function(i) { while (i > 0) { if (buffer[i] != null) { return i; } i--; } return 0; }; // Create padding in the end of the buffer const updateTotalSize = function(newSize) { if (newSize > buffer.length) { const lastElementIndex = findElement(buffer.length - 1); const lastElement = buffer[lastElementIndex]; parent.height(newSize*rowHeight); buffer[newSize - 1] = undefined; if (lastElement._height != null) { lastElement.set_height(newSize - lastElementIndex); } return $timeout(() => maybeUpdateView()); } }; var maybeUpdateView = function() { if (!$rootScope.$$phase && !isLoading) { return $timeout(updateView); } }; const setScrollPosition = pos => $timeout(function() { viewport.scrollTop(pos * rowHeight); return maybeUpdateView(); } , 100) ; $(window).bind('resize', maybeUpdateView); viewport.bind('scroll', maybeUpdateView); $scope.$watch($attr.totalSize, n => updateTotalSize(n)); $scope.$watch($attr.scrollPosition, function(n) { if (n != null) { setScrollPosition(n); } }); $scope.$watch($attr.loadAll, function(n) { if (n) { loadAll = true; $timeout(maybeUpdateView); } }); $scope.$on('$destroy', function() { $(window).unbind('resize', maybeUpdateView); viewport.unbind('scroll', maybeUpdateView); }); }; } }; } } angular.module('app') .directive('scrollViewport', ['$log', ScrollViewport]) .directive('scroll', ['$log', '$injector', '$rootScope', '$timeout', '$window', Scroll]); buildbot-3.4.0/www/base/src/app/builders/log/logviewer/scrollviewport.spec.js000066400000000000000000000154331413250514000274120ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS202: Simplify dynamic range loops * DS205: Consider reworking code to avoid use of IIFEs * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('page with sidebar', function() { let queries, rootScope, scope, timeout; beforeEach(angular.mock.module("app")); let elmBody = (scope = (rootScope = (queries = (timeout = null)))); let elmContent = null; const padding = pix => ({type: "padding", height: pix}); const elements = (start, end) => ({type: "elements", start, end}); const assertDOM = function(l) { const childs = []; $("div", elmContent).each((i, c) => childs.push(c)); for (var item of Array.from(l)) { var c; if (item.type === "padding") { c = childs.shift(); expect($(c).hasClass("padding")).toBe(true, c.outerHTML); expect($(c).height()).toEqual(item.height, c.outerHTML); } if (item.type === "elements") { for (let i = item.start, { end } = item, asc = item.start <= end; asc ? i <= end : i >= end; asc ? i++ : i--) { c = childs.shift(); expect($(c).hasClass("padding")).toBe(false, c.outerHTML); expect(c.innerText).toEqual(i.toString() + "a" + i.toString(), c.outerHTML); } } } }; const printDOM = () => $("div", elmContent).each(function() { if ($(this).hasClass("padding")) { return console.log("padding", $(this).height()); } else { return console.log("row", this.innerText, $(this).height()); } }) ; const scrollTo = function(pos, verifyPos) { if (verifyPos == null) { verifyPos = pos; } // we scroll pos elmBody.scrollTop(pos); // make sure that worked expect(elmBody.scrollTop()).toBe(verifyPos); // as the scroll is automatic, we need to force the event elmBody.trigger("scroll"); timeout.flush(); // make sure it did not changed expect(elmBody.scrollTop()).toBe(verifyPos); }; beforeEach(inject(function($rootScope, $compile, glMenuService, $timeout, $q, $document) { timeout = $timeout; queries = []; rootScope = $rootScope; elmBody = angular.element( '
'+ '
{{::$index}}a{{::item.v}}'+ '
' ); scope = $rootScope.$new(); scope.items = { get(index, num) { queries.push([index, num]); const d = $q.defer(); $timeout(function() { const ret = []; for (let i = 0, end = num - 1, asc = 0 <= end; asc ? i <= end : i >= end; asc ? i++ : i--) { ret.push({v: (index + i)}); } d.resolve(ret); }); return d.promise; } }; $compile(elmBody)(scope)[0]; scope.$digest(); // we need to append to body, so that the element is styled properly, and gets a height elmBody.appendTo("body"); elmContent = $("div", elmBody)[0];})); // ViewPort height is 50, and item height is 10, so a screen should contain 5 item it('should initially load 2 screens', inject(function($timeout) { $timeout.flush(); expect(queries).toEqual([[0,10]]); assertDOM([ elements(0,9), padding(9900) ]); expect(elmBody[0].scrollHeight).toEqual(1000 * 10); }) ); it('if scroll to middle, should load 3 screens', inject(function($timeout) { // initial load $timeout.flush(); scrollTo(600); expect(queries).toEqual([[0,10], [55, 15]]); assertDOM([ elements(0,9), // 100 padding(450), // 550 elements(55,69), // 700 padding(10000 - 700) ]); expect(elmBody[0].scrollHeight).toEqual(1000 * 10); }) ); it('several scroll loads several screens, and paddings are cleaned out', inject(function($timeout) { // initial load $timeout.flush(); scrollTo(600); expect(queries).toEqual([[0,10], [55, 15]]); assertDOM([ elements(0,9), // 100 padding(450), // 550 elements(55,69), // 700 padding(10000 - 700) ]); expect(elmBody[0].scrollHeight).toEqual(1000 * 10); scrollTo(400); expect(queries).toEqual([[0,10], [55, 15], [35, 15]]); assertDOM([ elements(0,9), // 100 padding(250), // 350 elements(35,49), // 500 padding(50), // 550 elements(55,69), // 700 padding(10000 - 700) ]); expect(elmBody[0].scrollHeight).toEqual(1000 * 10); scrollTo(500); expect(queries).toEqual([[0,10], [55, 15], [35, 15], [50, 5]]); assertDOM([ elements(0,9), // 100 padding(250), // 350 elements(35, 69), // 700 padding(10000 - 700) ]); expect(elmBody[0].scrollHeight).toEqual(1000 * 10); scrollTo(100); expect(queries).toEqual([[0,10], [55, 15], [35, 15], [50, 5], [10, 10]]); assertDOM([ elements(0,19), // 200 padding(150), // 350 elements(35, 69), // 700 padding(10000 - 700) ]); expect(elmBody[0].scrollHeight).toEqual(1000 * 10); scrollTo(200); expect(queries).toEqual([[0,10], [55, 15], [35, 15], [50, 5], [10, 10], [20, 10]]); assertDOM([ elements(0,29), // 300 padding(50), // 350 elements(35, 69), // 700 padding(10000 - 700) ]); expect(elmBody[0].scrollHeight).toEqual(1000 * 10); scrollTo(300); expect(queries).toEqual([[0,10], [55, 15], [35, 15], [50, 5], [10, 10], [20, 10], [30, 5]]); assertDOM([ elements(0,69), // 700 padding(10000 - 700) ]); expect(elmBody[0].scrollHeight).toEqual(1000 * 10); }) ); it('Scroll to the end', inject(function($timeout) { // initial load $timeout.flush(); scrollTo(10000, 9950); expect(queries).toEqual([[0,10], [990, 10]]); assertDOM([ elements(0,9), // 100 padding(9800), // 9900 elements(990, 999) // 10000 ]);})); }); buildbot-3.4.0/www/base/src/app/builders/services/000077500000000000000000000000001413250514000220565ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/builders/services/findbuilds.factory.js000066400000000000000000000024451413250514000262120ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class FindBuilds { constructor($log, scopeTimeout, dataService, $state, RESULTS) { const find_build = function($scope, buildrequestid, redirect_to_build) { // get the builds that are addressing this buildrequestid const data = dataService.open().closeOnDestroy($scope); $scope.builds = data.getBuilds({buildrequestid}); $scope.builds.onNew = function(build) { if (build.results !== RESULTS.RETRY) { if (redirect_to_build) { $state.go("build", { builder:build.builderid, build:build.number } ); return; } // we found a candidate build, no need to keep registered to the stream of builds $scope.builds.close(); } }; }; return find_build; } } angular.module('app') .factory('findBuilds', ['$log', 'scopeTimeout', 'dataService', '$state', 'RESULTS', FindBuilds]); buildbot-3.4.0/www/base/src/app/builders/services/timeout.factory.js000066400000000000000000000010421413250514000255450ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class scopeTimeout { constructor($timeout) { return function($scope, fn, delay, invokeApply) { const ret = $timeout(fn, delay, invokeApply); $scope.$on('$destroy', () => $timeout.cancel(ret)); return ret; }; } } angular.module('app') .factory('scopeTimeout', ['$timeout', scopeTimeout]); buildbot-3.4.0/www/base/src/app/builders/step/000077500000000000000000000000001413250514000212065ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/builders/step/step.controller.js000066400000000000000000000042711413250514000247050ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class StepController { constructor($log, $scope, $location, dataService, dataUtilsService, faviconService, $stateParams, glBreadcrumbService, publicFieldsFilter) { const data = dataService.open().closeOnDestroy($scope); const builderid = dataUtilsService.numberOrString($stateParams.builder); const buildnumber = dataUtilsService.numberOrString($stateParams.build); const stepnumber = dataUtilsService.numberOrString($stateParams.step); data.getBuilders(builderid).then(function(builders) { let builder; $scope.builder = (builder = builders[0]); builder.getBuilds(buildnumber).then(function(builds) { let build; $scope.build = (build = builds[0]); build.getSteps(stepnumber).then(function(steps) { const step = steps[0]; faviconService.setFavIcon(step); glBreadcrumbService.setBreadcrumb([{ caption: "Builders", sref: "builders" } , { caption: builder.name, sref: `builder({builder:${builder.builderid}})` } , { caption: build.number, sref: `build({builder:${builder.builderid}, build:${build.number}})` } , { caption: step.name, sref: `step({builder:${builder.builderid}, build:${build.number}, step:${step.number}})` } ]); step.loadLogs(); $scope.step = publicFieldsFilter(step); }); }); }); } } angular.module('app') .controller('stepController', ['$log', '$scope', '$location', 'dataService', 'dataUtilsService', 'faviconService', '$stateParams', 'glBreadcrumbService', 'publicFieldsFilter', StepController]); buildbot-3.4.0/www/base/src/app/builders/step/step.route.js000066400000000000000000000011671413250514000236610ustar00rootroot00000000000000class StepState { constructor($stateProvider) { // Name of the state const name = 'step'; // Configuration const cfg = { tabid: 'builders', pageTitle: _.template("Buildbot: build <%= build %> step: <%= step %>") }; // Register new state $stateProvider.state({ controller: `${name}Controller`, template: require('./step.tpl.jade'), name, url: '/builders/:builder/builds/:build/steps/:step', data: cfg }); } } angular.module('app') .config(['$stateProvider', StepState]); buildbot-3.4.0/www/base/src/app/builders/step/step.tpl.jade000066400000000000000000000006401413250514000236040ustar00rootroot00000000000000.container .row .col-sm-6 | status: {{step.state_string}} ul.unstyled li(ng-repeat="log in step.logs") a(ui-sref="log({builder:builder.builderid, build:build.number, step: step.number, log:log.slug})") {{log.name}} ul.unstyled li(ng-repeat="url in step.urls") a(target="_blank", ng-href="{{url.url}}") {{url.name}} .col-sm-6 rawdata(data="step") buildbot-3.4.0/www/base/src/app/buildrequests/000077500000000000000000000000001413250514000213155ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/buildrequests/pendingbuildrequests.controller.js000066400000000000000000000024401413250514000302750ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Pendingbuildrequests { constructor($log, $scope, dataService, bbSettingsService, buildersService, restService) { $scope.settings = bbSettingsService.getSettingsGroup("BuildRequests"); const buildrequestFetchLimit = $scope.settings.buildrequestFetchLimit.value; const data = dataService.open().closeOnDestroy($scope); $scope.buildrequests = data.getBuildrequests({limit: buildrequestFetchLimit, order:'-submitted_at', claimed:false}); $scope.properties = {}; $scope.buildrequests.onNew = function(buildrequest) { restService.get(`buildsets/${buildrequest.buildsetid}/properties`).then(function(response) { buildrequest.properties = response.properties[0]; _.assign($scope.properties, response.properties[0]); }); buildrequest.builder = buildersService.getBuilder(buildrequest.builderid); }; } } angular.module('app') .controller('pendingbuildrequestsController', ['$log', '$scope', 'dataService', 'bbSettingsService', 'buildersService', 'restService', Pendingbuildrequests]); buildbot-3.4.0/www/base/src/app/buildrequests/pendingbuildrequests.route.js000066400000000000000000000021221413250514000272450ustar00rootroot00000000000000class PendingBuildRequestsState { constructor($stateProvider, bbSettingsServiceProvider) { // Name of the state const name = 'pendingbuildrequests'; // Configuration const cfg = { group: "builds", caption: 'Pending Buildrequests' }; // Register new state const state = { controller: `${name}Controller`, template: require('./pendingbuildrequests.tpl.jade'), name, url: '/pendingbuildrequests', data: cfg }; $stateProvider.state(state); bbSettingsServiceProvider.addSettingsGroup({ name:'BuildRequests', caption: 'Buildrequests page related settings', items:[{ type:'integer', name:'buildrequestFetchLimit', caption:'Maximum number of pending buildrequests to fetch', default_value: 50 } ]}); } } angular.module('app') .config(['$stateProvider', 'bbSettingsServiceProvider', PendingBuildRequestsState]); buildbot-3.4.0/www/base/src/app/buildrequests/pendingbuildrequests.tpl.jade000066400000000000000000000017361413250514000272070ustar00rootroot00000000000000.container .row h4 Pending Buildrequests: span(ng-hide='buildrequests.length>0') | None table.table.table-hover.table-striped.table-condensed.table-bordered(ng-show='buildrequests.length>0') tr td(width='100px') # td(width='150px') Builder td(width='150px') Submitted At td(width='150px') Owner td(ng-repeat="(k,v) in properties") {{k}} tr(ng-repeat='br in buildrequests | orderBy:"-submitted_at"' ) td a(ui-sref="buildrequest({buildrequest:br.buildrequestid})") span.badge-status {{br.buildrequestid}} td a(ui-sref="builder({builder:br.builderid})") span {{br.builder.name}} td span(title="{{br.submitted_at | dateformat:'LLL'}}") | {{br.submitted_at | timeago }} td span {{br.properties.owner[0]}} td(ng-repeat="(k, v) in properties") span {{br.properties[k][0]}} buildbot-3.4.0/www/base/src/app/changes/000077500000000000000000000000001413250514000200325ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/changes/changebuilds/000077500000000000000000000000001413250514000224625ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/changes/changebuilds/changebuilds.controller.js000066400000000000000000000030211413250514000276260ustar00rootroot00000000000000class ChangeBuildsController { constructor($scope, dataService, bbSettingsService, $stateParams, resultsService, $interval, restService) { _.mixin($scope, resultsService); $scope.settings = bbSettingsService.getSettingsGroup('ChangeBuilds'); $scope.$watch('settings', () => bbSettingsService.save() , true); const buildsFetchLimit = $scope.settings.buildsFetchLimit.value; const dataAccessor = dataService.open().closeOnDestroy($scope); $scope.builders = dataAccessor.getBuilders(); const changeId = $scope.changeId = $stateParams.changeid; dataAccessor.getChanges(changeId).onNew = function(change) { $scope.change = change; } const getBuildsData = function() { let requestUrl = `changes/${changeId}/builds?property=owners&property=workername`; if (!buildsFetchLimit == '') { requestUrl = `changes/${changeId}/builds?property=owners&property=workername&limit=${buildsFetchLimit}`; } restService.get(requestUrl).then((data) => { $scope.builds = data.builds; }); } getBuildsData(); const stop = $interval(() => { getBuildsData(); }, 5000); $scope.$on('$destroy', () => $interval.cancel(stop)); } } angular.module('app') .controller('changebuildsController', ['$scope', 'dataService', 'bbSettingsService', '$stateParams', 'resultsService', '$interval', 'restService', ChangeBuildsController]); buildbot-3.4.0/www/base/src/app/changes/changebuilds/changebuilds.route.js000066400000000000000000000017701413250514000266120ustar00rootroot00000000000000class ChangeBuildsState { constructor($stateProvider, bbSettingsServiceProvider) { // Name of the state const name = 'changebuilds'; // Configuration const cfg = {} // Register new state const state = { controller: `${name}Controller`, template: require('./changebuilds.tpl.jade'), name, url: '/changes/:changeid', data: cfg, reloadOnSearch: false } $stateProvider.state(state); bbSettingsServiceProvider.addSettingsGroup({ name:'ChangeBuilds', caption: 'ChangeBuilds page related settings', items:[{ type:'integer', name:'buildsFetchLimit', caption:'Maximum number of builds to fetch for the selected change', default_value: '' }] }); } } angular.module('app') .config(['$stateProvider', 'bbSettingsServiceProvider', ChangeBuildsState]); buildbot-3.4.0/www/base/src/app/changes/changebuilds/changebuilds.tpl.jade000066400000000000000000000002301413250514000265300ustar00rootroot00000000000000.container .row changedetails(change="change") div(ng-if="builds") builds-table(builds="builds", builders="builders", ng-if="builds") buildbot-3.4.0/www/base/src/app/changes/changes.controller.js000066400000000000000000000013471413250514000241670ustar00rootroot00000000000000class Changes { constructor($log, $scope, dataService, bbSettingsService, $location, $rootScope) { $scope.settings = bbSettingsService.getSettingsGroup("Changes"); $scope.$watch('settings', () => { bbSettingsService.save(); }, true); const changesFetchLimit = $scope.settings.changesFetchLimit.value; const data = dataService.open().closeOnDestroy($scope); // unlike other order, this particular order by changeid is optimised by the backend $scope.changes = data.getChanges({limit: changesFetchLimit, order:'-changeid'}); } } angular.module('app') .controller('changesController', ['$log', '$scope', 'dataService', 'bbSettingsService', '$location', '$rootScope', Changes]); buildbot-3.4.0/www/base/src/app/changes/changes.route.js000066400000000000000000000020021413250514000231270ustar00rootroot00000000000000class ChangesState { constructor($stateProvider, bbSettingsServiceProvider) { // Name of the state const name = 'changes'; // Configuration const cfg = { group: "builds", caption: 'Last Changes' }; // Register new state const state = { controller: `${name}Controller`, template: require('./changes.tpl.jade'), name, url: '/changes?id', data: cfg, reloadOnSearch: false }; $stateProvider.state(state); bbSettingsServiceProvider.addSettingsGroup({ name:'Changes', caption: 'Changes page related settings', items:[{ type:'integer', name:'changesFetchLimit', caption:'Maximum number of changes to fetch', default_value: 50 }]}); } } angular.module('app') .config(['$stateProvider', 'bbSettingsServiceProvider', ChangesState]); buildbot-3.4.0/www/base/src/app/changes/changes.tpl.jade000066400000000000000000000000661413250514000230670ustar00rootroot00000000000000.container .row changelist(changes="changes") buildbot-3.4.0/www/base/src/app/common/000077500000000000000000000000001413250514000177125ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/common.constant.js000066400000000000000000000040651413250514000233750ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const invert_constant = function(constant_name, inverted_constant_name) { const inverted = function(original) { return _.invert(original); } angular.module('common').service(inverted_constant_name, [constant_name, inverted]); } class Baseurlws { constructor() { let href = location.href.toString(); if (location.hash !== "") { href = href.replace(location.hash, ""); } if (href[href.length - 1] !== "/") { href = href + "/"; } return href.replace(/^http/, "ws") + "ws"; } } class Plurals { constructor() { return { build: "builds", builder: "builders", buildset: "buildsets", buildrequest: "buildrequests", worker: "workers", master: "masters", change: "changes", step: "steps", log: "logs", logchunk: "logchunks", forcescheduler: "forceschedulers", scheduler: "schedulers", spec: "specs", property: "properties" }; } } invert_constant('PLURALS', 'SINGULARS'); class Results { constructor() { return { SUCCESS: 0, WARNINGS: 1, FAILURE: 2, SKIPPED: 3, EXCEPTION: 4, RETRY: 5, CANCELLED: 6 }; } } invert_constant('RESULTS', 'RESULTS_TEXT'); class ResultsColor { constructor() { return { SUCCESS: '#8d4', WARNINGS: '#fa3', FAILURE: '#e88', SKIPPED: '#AADDEE', EXCEPTION: '#c6c', RETRY: '#ecc', CANCELLED: '#ecc' }; } } angular.module('common') .constant('BASEURLWS', new Baseurlws()) .constant('PLURALS', new Plurals()) .constant('RESULTS', new Results()) .constant('RESULTS_COLOR', new ResultsColor()); buildbot-3.4.0/www/base/src/app/common/common.module.js000066400000000000000000000003071413250514000230240ustar00rootroot00000000000000const dependencies = [ 'ui.router', 'RecursionHelper', 'buildbot_config' ]; // Register new module angular.module('common', dependencies); angular.module('app').requires.push('common'); buildbot-3.4.0/www/base/src/app/common/directives/000077500000000000000000000000001413250514000220535ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/basefield/000077500000000000000000000000001413250514000237715ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/basefield/basefield.directive.js000066400000000000000000000034761413250514000302340ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // base widget handling has-error, and error message popups class Basefield { constructor() { return { replace: true, transclude: true, restrict: 'E', scope: true, template: require('./basefield.tpl.jade'), controller: '_basefieldController' }; } } class _basefield { constructor($scope) { // clear error on value change $scope.$watch("field.value", (o,n) => $scope.field.haserrors = false); if ($scope.field.autopopulate) { const all = $scope.field.all_fields_by_name; // when our field change, we update the fields that we are suppose to $scope.$watch("field.value", function(n, o) { const autopopulate = $scope.field.autopopulate[n]; let errors = ""; if (autopopulate != null) { for (let k in autopopulate) { const v = autopopulate[k]; if (all[k] != null) { all[k].value = v; } else { errors += `${k} is not a field name`; } } } if (errors.length>0) { $scope.field.errors = `bad autopopulate configuration: ${errors}`; $scope.field.haserrors = true; } }); } } } angular.module('common') .directive('basefield', [Basefield]) .controller('_basefieldController', ['$scope', _basefield]); buildbot-3.4.0/www/base/src/app/common/directives/basefield/basefield.tpl.jade000066400000000000000000000004171413250514000273340ustar00rootroot00000000000000div div.form-group(ng-class="{'has-warning': field.warnings,'has-error': field.haserrors}") div(ng-transclude, uib-popover="{{field.errors}}", popover-title="{{field.label}}", popover-is-open="field.haserrors", popover-trigger="none") buildbot-3.4.0/www/base/src/app/common/directives/buildrequestsummary/000077500000000000000000000000001413250514000262015ustar00rootroot00000000000000buildrequestsummary.directive.js000066400000000000000000000031121413250514000345600ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/buildrequestsummary/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Buildrequestsummary { constructor(RecursionHelper) { return { replace: true, restrict: 'E', scope: {buildrequestid: '=?'}, template: require('./buildrequestsummary.tpl.jade'), compile: RecursionHelper.compile, controller: '_buildrequestsummaryController' }; } } class _buildrequestsummary { constructor($scope, dataService, buildersService, findBuilds, resultsService) { _.mixin($scope, resultsService); $scope.$watch("buildrequest.claimed", function(n, o) { if (n) { // if it is unclaimed, then claimed, we need to try again findBuilds($scope, $scope.buildrequest.buildrequestid); } }); const data = dataService.open().closeOnDestroy($scope); data.getBuildrequests($scope.buildrequestid).onNew = function(buildrequest) { $scope.buildrequest = buildrequest; data.getBuildsets(buildrequest.buildsetid).onNew = buildset => $scope.buildset = buildset; $scope.builder = buildersService.getBuilder(buildrequest.builderid); }; } } angular.module('common') .directive('buildrequestsummary', ['RecursionHelper', Buildrequestsummary]) .controller('_buildrequestsummaryController', ['$scope', 'dataService', 'buildersService', 'findBuilds', 'resultsService', _buildrequestsummary]); buildrequestsummary.directive.spec.js000066400000000000000000000054311413250514000355170ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/buildrequestsummary/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ beforeEach(angular.mock.module('app')); describe('buildrequest summary controller', function() { let $q, $rootScope, $timeout, createController; let $scope = ($rootScope = ($q = ($timeout = null))); let goneto = (createController = null); let dataService = null; // override "$state" beforeEach(angular.mock.module(function($provide) { $provide.value("$state", {go(...args) { return goneto = args; }}); }) ); const injected = function($injector) { $rootScope = $injector.get('$rootScope'); $scope = $rootScope.$new(); $scope.buildrequestid = 1; $timeout = $injector.get('$timeout'); const $controller = $injector.get('$controller'); $q = $injector.get('$q'); dataService = $injector.get('dataService'); // stub out the actual backend of mqservice createController = () => $controller('_buildrequestsummaryController', {$scope}) ; }; beforeEach(inject(injected)); it('should get the buildrequest', function() { const buildrequests = [{buildrequestid: 1, builderid: 2, buildsetid: 3}]; dataService.expect('builders', buildrequests); dataService.expect('buildrequests/1', buildrequests); dataService.expect('buildsets/3', buildrequests); expect(dataService.get).not.toHaveBeenCalled(); const controller = createController(); $timeout.flush(); dataService.verifyNoOutstandingExpectation(); expect($scope.buildrequest.buildrequestid).toBe(1); }); it('should query for builds again if first query returns 0', function() { const buildrequests = [{buildrequestid: 1, builderid: 2, buildsetid: 3}]; dataService.expect('builders', buildrequests); dataService.expect('buildrequests/1', buildrequests); dataService.expect('buildsets/3', buildrequests); let builds = []; const controller = createController(); $timeout.flush(); dataService.verifyNoOutstandingExpectation(); dataService.expect('builds', {buildrequestid: 1}, builds); $scope.$apply(() => $scope.buildrequest.claimed = true); $timeout.flush(); dataService.verifyNoOutstandingExpectation(); expect($scope.builds.length).toBe(builds.length); builds = [{buildid: 1, buildrequestid: 1}, {buildid: 2, buildrequestid: 1}]; $scope.builds.from(builds); $timeout.flush(); dataService.verifyNoOutstandingExpectation(); expect($scope.builds.length).toBe(builds.length); }); }); buildbot-3.4.0/www/base/src/app/common/directives/buildrequestsummary/buildrequestsummary.tpl.jade000066400000000000000000000023741413250514000337600ustar00rootroot00000000000000div div(ng-repeat="build in builds") buildsummary(buildid="build.buildid", condensed="true") div(ng-if="!builds") .panel.panel-default.results_PENDING(style="margin-bottom:0px;opacity:.7") .panel-heading.no-select .flex-row(ng-if="buildrequest.$resolved") .flex-grow-1 a(ui-sref="builder({builder:buildrequest.builderid})") | {{builder.name}} | #{' '}/ buildrequests /#{' '} a(ui-sref="buildrequest({buildrequest:buildrequest.buildrequestid})") | {{buildrequest.buildrequestid}} | #{' '}| {{buildset.reason}} .flex-grow-1.text-right div(ng-if="!buildrequest.claimed") span | waiting for available worker#{' '} .label.results_PENDING | ... div(ng-if="buildrequest.claimed") span | #{' '}{{buildrequest.state_string}}#{' '} .label(ng-class="results2class(buildrequest)") | {{results2text(buildrequest)}} .flex-row(ng-if="!buildrequest.$resolved") | loading buildrequests details... buildbot-3.4.0/www/base/src/app/common/directives/builds/000077500000000000000000000000001413250514000233355ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/builds/buildstable.directive.js000066400000000000000000000012321413250514000301400ustar00rootroot00000000000000class BuildsTable { constructor(RecursionHelper) { return { replace: true, restrict: 'E', scope: {builds: '=?', builder: '=?', builders: '=?'}, template: require('./buildstable.tpl.jade'), controller: '_buildstableController' }; } } class _buildstable { constructor($scope, resultsService) { // make resultsService utilities available in the template _.mixin($scope, resultsService); } } angular.module('common') .directive('buildsTable', ['RecursionHelper', BuildsTable]) .controller('_buildstableController', ['$scope', 'resultsService', _buildstable]); buildbot-3.4.0/www/base/src/app/common/directives/builds/buildstable.tpl.jade000066400000000000000000000036571413250514000272650ustar00rootroot00000000000000.row h4 Builds: span(ng-hide='builds.length>0') | None table.table.table-hover.table-striped.table-condensed(ng-show='builds.length>0') tr td(width='200px', ng-show="builders") Builder td(width='100px') # td(width='150px') Started At td(width='150px') Duration td(width='200px') Owners td(width='150px') Worker td Status tr(ng-repeat='build in builds | orderBy:"-started_at"') td(ng-show="builders") {{ builders.get(build.builderid).name }} td a.bb-buildid-link(ui-sref="build({builder:build.builderid, build:build.number})") script(type="text/ng-template" id="buildsummarytooltip") buildsummary(buildid="build.buildid" type="tooltip") span.badge-status(uib-tooltip-template="'buildsummarytooltip'" tooltip-class="buildsummarytooltipstyle" tooltip-placement="auto left-bottom" tooltip-popup-delay="400" tooltip-popup-close-delay="400" ng-class="results2class(build, 'pulse')") span.badge-inactive | {{build.number}} span.badge-active {{results2text(build)}} td span(title="{{build.started_at | dateformat:'LLL'}}") | {{build.started_at | timeago }} td span(ng-show="build.complete", title="{{(build.complete_at - build.started_at)| durationformat:'LLL' }}") | {{(build.complete_at - build.started_at)| duration }} td span(ng-repeat="owner in build.properties.owners[0]") {{owner}} td a(ui-sref='worker({worker: build.workerid})') | {{build.properties.workername[0]}} td ul.list-inline li | {{build.state_string}} buildbot-3.4.0/www/base/src/app/common/directives/buildsticker/000077500000000000000000000000001413250514000245375ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/buildsticker/buildsicker.less000066400000000000000000000002231413250514000277240ustar00rootroot00000000000000 .buildsticker { width: 100%; margin: 0 2px 2px 0; .row { white-space: nowrap; text-overflow: ellipsis; overflow: hidden; } } buildbot-3.4.0/www/base/src/app/common/directives/buildsticker/buildsticker.directive.js000066400000000000000000000032041413250514000315350ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ import moment from 'moment'; class Buildsticker { constructor(RecursionHelper) { return { replace: true, restrict: 'E', scope: {build: '=?', builder: '=?', buildid: '=?'}, template: require('./buildsticker.tpl.jade'), controller: '_buildstickerController' }; } } class _buildsticker { constructor($scope, dataService, buildersService, resultsService, $urlMatcherFactory, $location) { $scope.$watch((() => moment().unix()), () => $scope.now = moment().unix()); // make resultsService utilities available in the template _.mixin($scope, resultsService); const data = dataService.open().closeOnDestroy($scope); $scope.$watch("buildid", function(buildid) { if ((buildid == null)) { return; } data.getBuilds(buildid).onNew = build => $scope.build = build; }); $scope.$watch('build', function(build) { if (!$scope.builder && ((build != null ? build.builderid : undefined) != null)) { $scope.builder = buildersService.getBuilder(build.builderid); } }); } } angular.module('common') .directive('buildsticker', ['RecursionHelper', Buildsticker]) .controller('_buildstickerController', ['$scope', 'dataService', 'buildersService', 'resultsService', '$urlMatcherFactory', '$location', _buildsticker]); buildbot-3.4.0/www/base/src/app/common/directives/buildsticker/buildsticker.directive.spec.js000066400000000000000000000066211413250514000324740ustar00rootroot00000000000000beforeEach(angular.mock.module('app')); describe('buildsticker controller', function() { let $compile, $rootScope, $timeout, results, scope; let dataService = (scope = ($compile = (results = ($timeout = ($rootScope = null))))); const injected = function($injector) { $compile = $injector.get('$compile'); $rootScope = $injector.get('$rootScope'); scope = $rootScope.$new(); const $controller = $injector.get('$controller'); const $q = $injector.get('$q'); $timeout = $injector.get('$timeout'); results = $injector.get('RESULTS'); dataService = $injector.get('dataService'); }; beforeEach(inject(injected)); it('directive should generate correct html', function() { const build = {buildid: 3, builderid: 2, number: 1}; dataService.when('builds', [build]); dataService.when('builds/3', [build]); dataService.when('builders', [{builderid: 2}]); dataService.when('builders/2', [{builderid: 2}]); const data = dataService.open(); data.getBuilds(build.buildid).onNew = build => scope.build = build; const element = $compile("")(scope); $timeout.flush(); $rootScope.$digest(); const sticker = element.children().eq(0); const row0 = sticker.children().eq(0); const row1 = sticker.children().eq(1); const resultSpan = row0.children().eq(0); const buildLink = row0.children().eq(1); const durationSpan = row1.children().eq(0); const startedSpan = row1.children().eq(1); const stateSpan = row1.children().eq(2); // the link of build should be correct expect(buildLink.attr('href')).toBe('#/builders/2/builds/1'); // pending state scope.build.complete = false; scope.build.started_at = Date.now(); scope.build.results = -1; scope.build.state_string = 'pending'; scope.$apply(); expect(resultSpan.hasClass('results_PENDING')).toBe(true); expect(resultSpan.text()).toBe('...'); expect(durationSpan.hasClass('ng-hide')).toBe(true); expect(startedSpan.hasClass('ng-hide')).toBe(false); expect(stateSpan.text()).toBe('pending'); // success state scope.build.complete = true; scope.build.complete_at = scope.build.started_at + 1; scope.build.results = results.SUCCESS; scope.build.state_string = 'finished'; scope.$apply(); expect(resultSpan.hasClass('results_SUCCESS')).toBe(true); expect(resultSpan.text()).toBe('SUCCESS'); expect(durationSpan.hasClass('ng-hide')).toBe(false); expect(startedSpan.hasClass('ng-hide')).toBe(true); expect(durationSpan.text()).toBe('1 s'); expect(stateSpan.text()).toBe('finished'); // failed state scope.build.complete = true; scope.build.complete_at = scope.build.started_at + 1; scope.build.results = results.FAILURE; scope.build.state_string = 'failed'; scope.$apply(); expect(resultSpan.hasClass('results_FAILURE')).toBe(true); expect(resultSpan.text()).toBe('FAILURE'); expect(durationSpan.hasClass('ng-hide')).toBe(false); expect(startedSpan.hasClass('ng-hide')).toBe(true); expect(durationSpan.text()).toBe('1 s'); expect(stateSpan.text()).toBe('failed'); }); }); buildbot-3.4.0/www/base/src/app/common/directives/buildsticker/buildsticker.tpl.jade000066400000000000000000000011111413250514000306400ustar00rootroot00000000000000.panel.panel-default.buildsticker(ng-class="results2class(build)") .panel-body.no-select .row span.pull-right.label(ng-class="results2class(build)") {{results2text(build)}} a(ui-sref="build({builder:builder.builderid, build:build.number})") | {{builder.name}}/{{build.number}} .row span.pull-right(ng-show="build.complete") | {{(build.complete_at - build.started_at)| durationformat:'LLL' }} span.pull-right(ng-show="!build.complete") | {{(now - build.started_at)| durationformat:'LLL' }} span {{build.state_string}} buildbot-3.4.0/www/base/src/app/common/directives/buildsummary/000077500000000000000000000000001413250514000245705ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/buildsummary/buildsummary.directive.js000066400000000000000000000223521413250514000316240ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS104: Avoid inline assignments * DS204: Change includes calls to have a more natural evaluation order * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ import moment from 'moment'; import { memoize } from 'lodash' class Buildsummary { constructor(RecursionHelper) { return { replace: true, restrict: 'E', scope: { buildid: '=?', build: '=?', condensed: '=?', parentbuild: '=?', parentrelationship: '=?' }, bindToController: true, template: function (element, attrs) { if (attrs.type === "tooltip") return require('./buildsummarytooltip.tpl.jade'); else return require('./buildsummary.tpl.jade'); }, compile: RecursionHelper.compile, controller: '_buildsummaryController', controllerAs: 'buildsummary' }; } } class _buildsummary { constructor($scope, dataService, resultsService, buildersService, $urlMatcherFactory, $location, $interval, RESULTS, bbSettingsService, config) { const self = this; // make resultsService utilities available in the template _.mixin($scope, resultsService); const buildrequestURLMatchers = [] const buildURLMatchers = [] const baseUrls = config['buildbotURLs'] || [config['buildbotURL']] for (const baseurl of baseUrls) { buildrequestURLMatchers.push($urlMatcherFactory.compile( `${baseurl}#buildrequests/{buildrequestid:[0-9]+}`)) buildURLMatchers.push($urlMatcherFactory.compile( `${baseurl}#builders/{builderid:[0-9]+}/builds/{buildnumber:[0-9]+}`)); } function execMatchers(matchers, url) { for (const matcher of matchers) { const res = matcher.exec(url) if (res) { return res } } return null } this.stepUpdated = function(step) { step.fulldisplay = (step.complete === false) || (step.results > 0); if (step.complete) { step.duration = step.complete_at - step.started_at; } step.other_urls = [] step.buildrequests = [] step.builds = [] if (step.buildrequestsCurrentPage === undefined) { // uib-pagination starts counting at 1... step.buildrequestsCurrentPage = 1 } for (let url of step.urls) { let brRes = execMatchers(buildrequestURLMatchers, url.url) if (brRes !== null) { step.buildrequests.push({ buildrequestid: brRes.buildrequestid }) continue } let buildRes = execMatchers(buildURLMatchers, url.url) if (buildRes !== null) { step.builds.push({ builderid: buildRes.builderid, buildnumber: buildRes.buildnumber }) continue } step.other_urls.push(url) } } this.$onInit = function () { // to get an update of the current builds every seconds, we need to update self.now // but we want to stop counting when the scope destroys! const stop = $interval(() => { this.now = moment().unix(); }, 1000); $scope.$on("$destroy", () => $interval.cancel(stop)); $scope.settings = bbSettingsService.getSettingsGroup("LogPreview"); $scope.trigger_step_page_size = bbSettingsService.getSettingsGroup("Build").trigger_step_page_size.value; $scope.show_urls = bbSettingsService.getSettingsGroup("Build").show_urls.value; const NONE = 0; const ONLY_NOT_SUCCESS = 1; const EVERYTHING = 2; let details = EVERYTHING; if (this.condensed) { details = NONE; } this.toggleDetails = () => details = (details + 1) % 3; this.levelOfDetails = function () { switch (details) { case NONE: return "None"; case ONLY_NOT_SUCCESS: return "Problems"; case EVERYTHING: return "All"; } }; this.isStepDisplayed = function (step) { if (details === EVERYTHING) { return !step.hidden; } else if (details === ONLY_NOT_SUCCESS) { return (step.results == null) || (step.results !== RESULTS.SUCCESS); } else if (details === NONE) { return false; } }; this.assignDisplayedStepNumber = function (step) { if (step.number === 0) this.display_count = 0 if (this.isStepDisplayed(step)) step.display_num = (this.display_count)++; return true; }; this.getDisplayedStepCount = function () { return self.steps.filter(this.isStepDisplayed).length; }; this.getBuildProperty = function (property) { const hasProperty = self.properties && self.properties.hasOwnProperty(property); if (hasProperty) { return self.properties[property][0]; } else { return null; } }; this.isSummaryLog = log => log.name.toLowerCase() === "summary"; this.expandByName = function (log) { let needle; return (log.num_lines > 0) && (needle = log.name.toLowerCase(), Array.from($scope.settings.expand_logs.value.toLowerCase().split(";")).includes(needle)); }; // Returns the logs, sorted with the "Summary" log first, if it exists in the step's list of logs this.getLogs = function (step) { const summaryLogs = step.logs.filter(log => this.isSummaryLog(log)); const logs = summaryLogs.concat(step.logs.filter(log => !this.isSummaryLog(log))); return logs; }; this.toggleFullDisplay = function () { this.fulldisplay = !this.fulldisplay; if (this.fullDisplay) { details = EVERYTHING; } return Array.from(this.steps).map((step) => (step.fulldisplay = this.fulldisplay)); }; this.closeParentModal = function () { if ('modal' in $scope.$parent) return $scope.$parent.modal.close(); }; const data = dataService.open().closeOnDestroy($scope); $scope.$watch((() => this.buildid), function (buildid) { if ((buildid == null)) { return; } data.getBuilds(buildid).onNew = build => self.build = build; }); $scope.$watch((() => this.build), function (build) { if ((build == null)) { return; } if (self.builder) { return; } self.builder = buildersService.getBuilder(build.builderid); build.getProperties().onNew = function (properties) { self.properties = properties; self.reason = self.getBuildProperty('reason'); }; $scope.$watch((() => details), function (details) { if ((details !== NONE) && (self.steps == null)) { self.steps = build.getSteps(); self.steps.onNew = function (step) { step.logs = step.getLogs(); // onUpdate is only called onUpdate, not onNew // but we need to update our additional needed attributes self.steps.onUpdate(step); }; self.steps.onUpdate = self.stepUpdated } }); }); $scope.$watch((() => this.parentbuild), function (build, o) { if ((build == null)) { return; } self.parentbuilder = buildersService.getBuilder(build.builderid); }); } } } angular.module('common') .directive('buildsummary', ['RecursionHelper', Buildsummary]) .controller('_buildsummaryController', ['$scope', 'dataService', 'resultsService', 'buildersService', '$urlMatcherFactory', '$location', '$interval', 'RESULTS', 'bbSettingsService', 'config', _buildsummary]); buildbot-3.4.0/www/base/src/app/common/directives/buildsummary/buildsummary.directive.spec.js000066400000000000000000000147331413250514000325610ustar00rootroot00000000000000beforeEach(angular.mock.module('app')); describe('buildsummary controller', function() { let $compile, $rootScope, $stateParams, baseurl, createController, results, $scope; let dataService = ($scope = ($rootScope = ($compile = null))); let $timeout = (createController = ($stateParams = (results = (baseurl = null)))); const injected = function($injector) { results = $injector.get('RESULTS'); $rootScope = $injector.get('$rootScope'); $scope = $rootScope.$new(); $scope.buildid = 1; $scope.condensed = 0; $timeout = $injector.get('$timeout'); $stateParams = $injector.get('$stateParams'); const $q = $injector.get('$q'); $compile = $injector.get('$compile'); baseurl = $injector.get('config')['buildbotURL']; dataService = $injector.get('dataService'); dataService.when('builds/1', [{buildid: 1, builderid: 1}]); dataService.when('builders', [{builderid: 1}]); dataService.when('builders/1', [{builderid: 1}]); dataService.when('builds/1/steps', [{builderid: 1, stepid: 1, number: 1}]); dataService.when('steps/1/logs', [{stepid: 1, logid: 1}, {stepid: 1, logid: 2}]); }; beforeEach(inject(injected)); it('should provide correct isStepDisplayed when condensed', function() { $scope.condensed = true; const element = $compile("")($scope); $scope.$apply(); const { buildsummary } = element.isolateScope(); expect(buildsummary.isStepDisplayed({results:results.SUCCESS})).toBe(false); expect(buildsummary.isStepDisplayed({results:results.WARNING})).toBe(false); expect(buildsummary.isStepDisplayed({results:results.FAILURE})).toBe(false); buildsummary.toggleDetails(); expect(buildsummary.isStepDisplayed({results:results.SUCCESS})).toBe(false); expect(buildsummary.isStepDisplayed({results:results.WARNING})).toBe(true); expect(buildsummary.isStepDisplayed({results:results.FAILURE})).toBe(true); buildsummary.toggleDetails(); expect(buildsummary.isStepDisplayed({results:results.SUCCESS})).toBe(true); expect(buildsummary.isStepDisplayed({results:results.WARNING})).toBe(true); expect(buildsummary.isStepDisplayed({results:results.FAILURE})).toBe(true); buildsummary.toggleDetails(); expect(buildsummary.isStepDisplayed({results:results.SUCCESS})).toBe(false); expect(buildsummary.isStepDisplayed({results:results.WARNING})).toBe(false); expect(buildsummary.isStepDisplayed({results:results.FAILURE})).toBe(false); }); it('should provide correct isStepDisplayed when not condensed', function() { $scope.condensed = 0; const element = $compile("")($scope); $scope.$apply(); const { buildsummary } = element.isolateScope(); expect(buildsummary.isStepDisplayed({results:results.SUCCESS})).toBe(true); expect(buildsummary.isStepDisplayed({results:results.WARNING})).toBe(true); expect(buildsummary.isStepDisplayed({results:results.FAILURE})).toBe(true); buildsummary.toggleDetails(); expect(buildsummary.isStepDisplayed({results:results.SUCCESS})).toBe(false); expect(buildsummary.isStepDisplayed({results:results.WARNING})).toBe(false); expect(buildsummary.isStepDisplayed({results:results.FAILURE})).toBe(false); buildsummary.toggleDetails(); expect(buildsummary.isStepDisplayed({results:results.SUCCESS})).toBe(false); expect(buildsummary.isStepDisplayed({results:results.WARNING})).toBe(true); expect(buildsummary.isStepDisplayed({results:results.FAILURE})).toBe(true); buildsummary.toggleDetails(); }); it('should provide correct isStepDisplayed when details = EVERYTHING and when details = NONE', function() { const element = $compile("")($scope); $scope.$apply(); const { buildsummary } = element.isolateScope(); // details = EVERYTHING expect(buildsummary.isStepDisplayed({hidden: true})).toBe(false); expect(buildsummary.isStepDisplayed({hidden: false})).toBe(true); buildsummary.toggleDetails(); // set details = NONE expect(buildsummary.isStepDisplayed({hidden: false, results:results.FAILURE})).toBe(false); }); it('should provide correct getDisplayedStepCount', function() { const element = $compile("")($scope); $scope.$apply(); const { buildsummary } = element.isolateScope(); buildsummary.steps = [{hidden: false}, {hidden: false}]; expect(buildsummary.getDisplayedStepCount()).toEqual(2); buildsummary.steps = [{hidden: true}, {hidden: true}]; expect(buildsummary.getDisplayedStepCount()).toEqual(0); buildsummary.steps = [{hidden: true}, {hidden: false}]; expect(buildsummary.getDisplayedStepCount()).toEqual(1); }); it('assignDisplayedStepNumber should assign correct step display number', function() { const element = $compile("")($scope); $scope.$apply(); const { buildsummary } = element.isolateScope(); var step; step = {number: 0, hidden: true, display_num: null}; expect(buildsummary.assignDisplayedStepNumber(step)).toBe(true); expect(step.display_num).toEqual(null); step = {number: 1, hidden: false, display_num: null}; expect(buildsummary.assignDisplayedStepNumber(step)).toBe(true); expect(step.display_num).toEqual(0); step = {number: 2, hidden: false, display_num: null}; expect(buildsummary.assignDisplayedStepNumber(step)).toBe(true); expect(step.display_num).toEqual(1); step = {number: 3, hidden: false, display_num: null}; expect(buildsummary.assignDisplayedStepNumber(step)).toBe(true); expect(step.display_num).toEqual(2); // reset display_num to zero whenever step.number = 0 step = {number: 0, hidden: false, display_num: null}; expect(buildsummary.assignDisplayedStepNumber(step)).toBe(true); expect(step.display_num).toEqual(0); step = {number: 1, hidden: false, display_num: null}; expect(buildsummary.assignDisplayedStepNumber(step)).toBe(true); expect(step.display_num).toEqual(1); }); }); buildbot-3.4.0/www/base/src/app/common/directives/buildsummary/buildsummary.tpl.jade000066400000000000000000000107261413250514000307360ustar00rootroot00000000000000.panel.panel-default(ng-class="results2class(buildsummary.build)", style="margin-bottom:0px") .panel-heading .flex-row(ng-if="buildsummary.build.started_at") .flex-grow-1 .btn.btn-xs.btn-default(ng-click="buildsummary.toggleFullDisplay()", title="Expand all step logs") i.fa.fa-chevron-circle-right.rotate(ng-class="{'fa-rotate-90':buildsummary.fulldisplay}") .btn.btn-xs.btn-default(ng-click="buildsummary.toggleDetails()", title="Show steps according to their importance") i.fa.fa-expand | {{buildsummary.levelOfDetails()}} | #{' '} span(ng-if="buildsummary.prefix") {{buildsummary.prefix}}#{' '} a(ui-sref="build({builder:buildsummary.build.builderid, build:buildsummary.build.number})" ng-click="buildsummary.closeParentModal()") | {{buildsummary.builder.name}}/{{buildsummary.build.number}}#{' '} span(ng-if="buildsummary.reason") | {{buildsummary.reason}} .flex-grow-1.text-right span(ng-show="buildsummary.build.complete") | {{(buildsummary.build.complete_at - buildsummary.build.started_at)| durationformat:'LLL' }} span(ng-show="!buildsummary.build.complete") | {{(buildsummary.now - buildsummary.build.started_at)| durationformat:'LLL' }} span | #{' '}{{buildsummary.build.state_string}}#{' '} .label.bb-build-result(ng-class="results2class(buildsummary.build)") | {{results2text(buildsummary.build)}} span(ng-if="buildsummary.parentbuild") | #{' '} a.label(ng-class="results2class(buildsummary.parentbuild)", ui-sref="build({builder:buildsummary.parentbuilder.builderid, build:buildsummary.parentbuild.number})") | {{buildsummary.parentrelationship}}: | {{buildsummary.parentbuilder.name}}/{{buildsummary.parentbuild.number}} .flex-row(ng-if="!buildsummary.build.started_at") | loading build details... ul.list-group li.list-group-item(ng-if="buildsummary.isStepDisplayed(step)" ng-repeat="step in buildsummary.steps | orderBy: ['stepid']") div(ng-click="step.fulldisplay=!step.fulldisplay") span.pull-right(ng-if="step.started_at") span(ng-show="step.complete") | {{ step.duration| durationformat:'LLL' }} span(ng-show="!step.complete") | {{ buildsummary.now - step.started_at| durationformat:'LLL' }} | #{' '}{{step.state_string}} span.badge-status(ng-class="results2class(step, 'pulse')") | {{step.number}} | #{' '} i.fa.fa-chevron-circle-right.rotate(ng-class="{'fa-rotate-90':step.fulldisplay}", ng-if="step.logs.length || step.buildrequests.length || (step.other_urls.length && !show_urls)") | #{' '} {{step.name}} span(ng-if="step.buildrequests.length") #{' '} {{step.builds.length}} builds, {{step.buildrequests.length - step.builds.length}} pending builds ul(ng-if="show_urls") li(ng-repeat="url in step.other_urls") a(ng-href="{{url.url}}", target="_blank") {{url.name}} div.anim-stepdetails(ng-if="step.fulldisplay") ul(ng-if="!show_urls") li(ng-repeat="url in step.other_urls") a(ng-href="{{url.url}}", target="_blank") {{url.name}} ul(ng-if="step.buildrequests.length>trigger_step_page_size",uib-pagination,total-items="step.buildrequests.length", ng-model="step.buildrequestsCurrentPage",class="pagination-sm" boundary-link-numbers="true", max-size=10, items-per-page="trigger_step_page_size", style="margin-left:30px;margin-bottom:0px") ul.list-unstyled li(ng-repeat="br in step.buildrequests | limitTo: trigger_step_page_size :(step.buildrequestsCurrentPage-1)*trigger_step_page_size") buildrequestsummary(style="margin-left:30px;margin-top:8px",buildrequestid='br.buildrequestid') logpreview(ng-repeat="log in buildsummary.getLogs(step)", log="log", fulldisplay="step.logs.length == 1 || buildsummary.expandByName(log)", builderid="buildsummary.build.builderid", buildnumber="buildsummary.build.number", step="step") buildbot-3.4.0/www/base/src/app/common/directives/buildsummary/buildsummarytooltip.tpl.jade000066400000000000000000000054561413250514000323550ustar00rootroot00000000000000.panel.panel-default(ng-class="results2class(buildsummary.build)", style="margin-bottom:0px") .panel-heading .flex-row(ng-if="buildsummary.build.started_at") .flex-grow-1 span(ng-if="buildsummary.prefix") {{buildsummary.prefix}}#{' '} span | {{buildsummary.builder.name | limitStringLengthTo:80}} span.badge-status(ng-class="results2class(buildsummary.build)") | {{buildsummary.build.number}}#{' '} span(ng-if="buildsummary.reason") | {{" "}} | {{buildsummary.reason}} .flex-row(ng-if="buildsummary.build.started_at") .flex-grow-1 span(ng-show="buildsummary.build.complete") | {{(buildsummary.build.complete_at - buildsummary.build.started_at)| durationformat:'LLL' }} span(ng-show="!buildsummary.build.complete") | {{(buildsummary.now - buildsummary.build.started_at)| durationformat:'LLL' }} | #{' '}{{buildsummary.build.state_string | limitStringLengthTo:80}} | {{" "}} .label.bb-build-result(ng-class="results2class(buildsummary.build)") | {{results2text(buildsummary.build)}} .flex-row(ng-if="!buildsummary.build.started_at") | loading build details... ul.list-group li.list-group-item(ng-repeat="step in buildsummary.steps" ng-if="buildsummary.isStepDisplayed(step) && buildsummary.assignDisplayedStepNumber(step)" + "&& (step.display_num <= 3 || step.display_num >= buildsummary.getDisplayedStepCount()-3)") div.text-left(ng-if="buildsummary.getDisplayedStepCount() > 7 && step.display_num === 3") span.fa-lg ⋮ div.clearfix(ng-if="buildsummary.getDisplayedStepCount() <= 7 || step.display_num !== 3") span.pull-left span.badge-status(ng-class="results2class(step, 'pulse')") | {{step.number}} | {{" "}} span.pull-left | {{step.name | limitStringLengthTo:40}} span(ng-if="step.buildrequests.length") #{' '} {{step.builds.length}} builds, {{step.buildrequests.length - step.builds.length}} pending builds span | {{" ".repeat(10)}} span.pull-right(ng-if="step.started_at") span(ng-show="step.complete") | {{ step.duration| durationformat:'LLL' }} span(ng-show="!step.complete") | {{ buildsummary.now - step.started_at| durationformat:'LLL' }} | #{' '}{{step.state_string | limitStringLengthTo:40}} buildbot-3.4.0/www/base/src/app/common/directives/changedetails/000077500000000000000000000000001413250514000246465ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/changedetails/changedetails.directive.js000066400000000000000000000005471413250514000317620ustar00rootroot00000000000000class Changedetails { constructor() { return { replace: true, restrict: 'E', scope: { change: '=', compact: '=?' }, template: require('./changedetails.tpl.jade'), }; } } angular.module('common') .directive('changedetails', [Changedetails]); buildbot-3.4.0/www/base/src/app/common/directives/changedetails/changedetails.tpl.jade000066400000000000000000000043431413250514000310700ustar00rootroot00000000000000div.changedetails(style="width:100%;") div(style="width:100%;", ng-click="change.show_details = !change.show_details") .change-avatar(ng-if="!compact && change.author_email") a(ng-href="mailto:{{change.author_email}}", title="{{change.author_name}}") img(ng-src="avatar?email={{change.author_email | encodeURI}}") a(ng-if="change.revlink", ng-href="{{change.revlink}}", uib-tooltip="{{change.comments}}") | {{ change.comments.split("\n")[0] }} span(ng-if="!change.revlink", uib-tooltip="{{change.comments}}") | {{ change.comments.split("\n")[0] }} span(ng-if="!compact" uib-tooltip="{{change.when_timestamp | dateformat:'LLL'}}") | ({{ change.when_timestamp | timeago }}) i.fa.fa-chevron-circle-right.rotate.clickable(ng-class="{'fa-rotate-90':change.show_details}") div.anim-changedetails(ng-show="change.show_details") table.table.table-striped.table-condensed(ng-show="change.show_details") tr(ng-if="change.category") td Category td {{ change.category }} tr td Author td {{ change.author }} tr td Date td {{ change.when_timestamp | dateformat:'LLL'}} ({{ change.when_timestamp | timeago }}) tr(ng-show="change.codebase") td Codebase td {{ change.codebase }} tr(ng-show="change.repository") td Repository td {{ change.repository }} tr(ng-show="change.branch") td Branch td {{ change.branch }} tr td Revision td {{ change.revision }} tr td Properties td i.fa.fa-chevron-circle-right.rotate.clickable(ng-class="{'fa-rotate-90':change.show_props}", ng-click="change.show_props = !change.show_props") pre(ng-show="change.show_props", style="padding:unset; margin:unset; border:unset; background-color:unset") | {{ change.properties | json }} h5 Comment pre {{ change.comments }} h5 Changed files ul li(ng-repeat='file in change.files') {{file}} p(ng-hide="change.files.length") No files buildbot-3.4.0/www/base/src/app/common/directives/changelist/000077500000000000000000000000001413250514000241745ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/changelist/changelist.directive.js000066400000000000000000000017061413250514000306340ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Changelist { constructor() { return { replace: true, restrict: 'E', scope: {changes: '=?'}, template: require('./changelist.tpl.jade'), controller: '_changeListController' }; } } class _changeList { constructor($scope, dataUtilsService) { $scope.expandDetails = () => Array.from($scope.changes).map((change) => (change.show_details = true)) ; $scope.collapseDetails = () => Array.from($scope.changes).map((change) => (change.show_details = false)) ; } } angular.module('common') .directive('changelist', [Changelist]) .controller('_changeListController', ['$scope', 'dataUtilsService', _changeList]); buildbot-3.4.0/www/base/src/app/common/directives/changelist/changelist.tpl.jade000066400000000000000000000014501413250514000277400ustar00rootroot00000000000000.container-fluid .row .navbar.navbar-default .container-fluid .navbar-header .navbar-brand | {{changes.length}} changes .navbar-form.navbar-right .form-group .btn.btn-default(ng-click="collapseDetails()", title="Collapse all") i.fa.fa-minus .btn.btn-default(ng-click="expandDetails()", title="Expand all") i.fa.fa-plus .row ul.list-group li.list-group-item(ng-repeat="change in changes") a(ui-sref="changebuilds({changeid: change.changeid})") | See builds changedetails(change="change") buildbot-3.4.0/www/base/src/app/common/directives/connectionstatus/000077500000000000000000000000001413250514000254565ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/connectionstatus/connectionstatus.directive.js000066400000000000000000000023471413250514000334020ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class connectionstatus { constructor(RecursionHelper) { return { replace: true, restrict: 'E', scope: {}, template: require('./connectionstatus.tpl.jade'), compile: RecursionHelper.compile, controller: '_connectionstatusController' }; } } class _connectionstatus { constructor($scope, $timeout) { $scope.alertenabled = false; $scope.connectionlost = false; $scope.$on("mq.lost_connection", function() { $scope.connectionlost = true; return $scope.alertenabled = true; }); $scope.$on("mq.restored_connection", function() { $scope.connectionlost = false; $scope.alertenabled = true; return $timeout(() => $scope.alertenabled = false , 4000); }); } } angular.module('common') .directive('connectionstatus', ['RecursionHelper', connectionstatus]) .controller('_connectionstatusController', ['$scope', '$timeout', _connectionstatus]); buildbot-3.4.0/www/base/src/app/common/directives/connectionstatus/connectionstatus.less000066400000000000000000000003351413250514000317520ustar00rootroot00000000000000.connectionstatus { z-index: 1000; width: 100%; position: absolute; top: 20px; .alert { margin-left: auto; margin-right: auto; width: 300px; text-align: center; } } buildbot-3.4.0/www/base/src/app/common/directives/connectionstatus/connectionstatus.tpl.jade000066400000000000000000000004061413250514000325040ustar00rootroot00000000000000.connectionstatus.anim-fade(ng-show="alertenabled") .alert.alert-warning(ng-show="connectionlost") i.fa.fa-spin.fa-spinner |  Connection Lost. Retrying... .alert.alert-success(ng-hide="connectionlost") | Connection restored! buildbot-3.4.0/www/base/src/app/common/directives/forcefields/000077500000000000000000000000001413250514000243405ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/forcefields/boolfield.tpl.jade000066400000000000000000000002631413250514000277230ustar00rootroot00000000000000div.form-group .col-sm-10.col-sm-offset-2 .checkbox label input(type="checkbox", ng-model="field.value") | {{field.label}} buildbot-3.4.0/www/base/src/app/common/directives/forcefields/filefield.tpl.jade000066400000000000000000000010351413250514000277050ustar00rootroot00000000000000basefield label.control-label.col-sm-2(for="{{field.name}}") | {{field.label}} .col-sm-9 textarea.form-control(ng-if="field.safevalue !== false", rows="{{field.rows}}", ng-model="field.safevalue", ng-trim="false") label.control-label(ng-if="field.safevalue === false") {{field.value.length}} bytes file .col-sm-1 input(type="file", id="file-{{field.name}}", style="display:none;", fileread="field.value") label.btn.btn-default.btn-small(for="file-{{field.name}}") i.fa.fa-file buildbot-3.4.0/www/base/src/app/common/directives/forcefields/forcefields.directive.js000066400000000000000000000122711413250514000311430ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // This is the generic plugin-able field implementation // It will create and compile arbitrary field widget, without // parent template to have to know each field type in a big ng-switch // This is done by merging compile and link phasis, so that the template // includes directives whose types depend on the model. class Forcefield { constructor($log, $compile, RecursionHelper) { return { replace: true, restrict: 'E', scope: {field:"="}, compile(element, attrs) { return RecursionHelper.compile(element, function(scope, element, attrs) { let t; if (scope.field.type === 'nested') { t = scope.field.layout + "layout"; } else { t = scope.field.type + "field"; } element.html(`<${t}>`).show(); return $compile(element.contents())(scope); }); } }; } } // these directives, combined with "recursive" implement // the template of recursively nested field groups _.each(['verticallayout', 'simplelayout', 'tabslayout'], fieldtype => angular.module('common').directive(fieldtype, () => ({ replace: true, restrict: 'E', template: require(`./${fieldtype}.tpl.jade`), controller: [ "$scope", function($scope) { // filter out hidden fields, and nested params empty of full of hidden fields const filtered = []; for (let f of Array.from($scope.field.fields)) { if (f.hide) { continue; } if (f.type === "nested") { let all_hidden = true; for (let sf of Array.from(f.fields)) { if (!sf.hide) { all_hidden = false; } } if (all_hidden) { continue; } } filtered.push(f); } $scope.field.fields = filtered; return $scope.column_class = `col-sm-${(12 / $scope.field.columns).toString()}`; } ] }) ) ); // defines standard field directives which only have templates _.each([ 'textfield' , 'intfield', 'textareafield', 'listfield', 'boolfield'], fieldtype => angular.module('common').directive(fieldtype, () => ({ replace: false, restrict: 'E', scope: false, template: require(`./${fieldtype}.tpl.jade`), }) ) ); angular.module('common').directive('filefield', () => ({ replace: false, restrict: 'E', scope: false, // the template uses custom file input styling using trick from // https://tympanus.net/codrops/2015/09/15/styling-customizing-file-inputs-smart-way/ // which basically uses label(for="") to capture the click event and the ugly input(type="file") is just hidden template: require('./filefield.tpl.jade'), controller: [ "$scope", function($scope) { // If user selects a big file, then the UI will be completely blocked // while browser tries to display it in the textarea // so to avoid that we go through a safe value, and play the double binding game $scope.$watch("field.value", function(value) { if ((value != null ? value.length : undefined) > 10000) { $scope.field.safevalue = false; } else { $scope.field.safevalue = value; } }); $scope.$watch("field.safevalue", function(value) { if ((value != null) && (value !== false)) { $scope.field.value = value; } }); } ] }) ); angular.module('common').directive('fileread', () => ({ scope: { fileread: "=" }, // load the file's text via html5 FileReader API // note that for simplicity, we don't bother supporting older browsers link(scope, element, attributes) { element.bind("change", function(changeEvent) { const reader = new FileReader(); reader.onload = e => scope.$apply(() => scope.fileread = e.target.result) ; return reader.readAsText(changeEvent.target.files[0]); }); } }) ); angular.module('common') .directive('forcefield', ['$log', '$compile', 'RecursionHelper', Forcefield]); buildbot-3.4.0/www/base/src/app/common/directives/forcefields/intfield.tpl.jade000066400000000000000000000002521413250514000275600ustar00rootroot00000000000000basefield label.control-label.col-sm-2(for="{{field.name}}") | {{field.label}} .col-sm-10 input.form-control(type='text', ng-model="field.value") buildbot-3.4.0/www/base/src/app/common/directives/forcefields/listfield.tpl.jade000066400000000000000000000007511413250514000277450ustar00rootroot00000000000000basefield label.control-label.col-sm-2(for="{{field.name}}") | {{field.label}} .col-sm-10 select.form-control(ng-model="field.value", ng-if="!field.multiple", ng-options="v for v in field.choices") select.form-control(ng-model="field.value", multiple, ng-multiple="true", ng-if="field.multiple", ng-options="v for v in field.choices") input.select-editable.form-control(ng-if="!field.strict && !field.multiple", type='text', ng-model="field.value") buildbot-3.4.0/www/base/src/app/common/directives/forcefields/nestedfield.tpl.jade000066400000000000000000000007061413250514000302540ustar00rootroot00000000000000div.form-horizontal div(ng-repeat='field in fields', class="{{column_class}}") div(ng-if='field.type === "nested"') .panel.panel-default .panel-heading(ng-if="field.name") | {{field.name}} .panel-body nestedfield(fields="field.fields", columns="field.columns") div(ng-if='field.type!=="nested" && !field.hide') forcefield(field="field") buildbot-3.4.0/www/base/src/app/common/directives/forcefields/simplelayout.tpl.jade000066400000000000000000000002311413250514000305060ustar00rootroot00000000000000div.form-horizontal div(ng-repeat='field in field.fields | filter:{hide:false}', class="{{column_class}}") forcefield(field="field") buildbot-3.4.0/www/base/src/app/common/directives/forcefields/styles.less000066400000000000000000000004311413250514000265510ustar00rootroot00000000000000input.select-editable { /* we put the input on top of the select, with a width of 90% so that the right side of the select if visible */ position:absolute; top:0; border:none; margin:2px; width:90%; height:29px; } buildbot-3.4.0/www/base/src/app/common/directives/forcefields/tabslayout.tpl.jade000066400000000000000000000004061413250514000301520ustar00rootroot00000000000000div uib-tabset(justified="true") uib-tab(ng-repeat='field in field.fields | filter:{hide:false}', heading="{{field.tablabel}}", class="{{column_class}}") forcefield(recursive, field="field") buildbot-3.4.0/www/base/src/app/common/directives/forcefields/textareafield.tpl.jade000066400000000000000000000002671413250514000306110ustar00rootroot00000000000000basefield label.control-label.col-sm-2(for="{{field.name}}") | {{field.label}} .col-sm-10 textarea.form-control(rows="{{field.rows}}", ng-model="field.value") buildbot-3.4.0/www/base/src/app/common/directives/forcefields/textfield.tpl.jade000066400000000000000000000003211413250514000277470ustar00rootroot00000000000000basefield label.control-label.col-sm-2(for="{{field.name}}") | {{field.label}} .col-sm-10 input.form-control(type='text', ng-model="field.value", autocomplete="on" id="{{field.name}}") buildbot-3.4.0/www/base/src/app/common/directives/forcefields/verticallayout.tpl.jade000066400000000000000000000004421413250514000310320ustar00rootroot00000000000000.panel.panel-default .panel-heading(ng-if="field.label") | {{field.label}} .panel-body div.form-horizontal div(ng-repeat='cfield in field.fields | filter:{hide:false}', class="{{column_class}}") forcefield(field="cfield") buildbot-3.4.0/www/base/src/app/common/directives/lineplot/000077500000000000000000000000001413250514000237015ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/lineplot/lineplot.directive.js000066400000000000000000000066671413250514000300610ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class LinePlot { constructor(d3Service, $filter) { return { replace: true, restrict: 'E', scope: { data: '=', xattr: '@', yattr: '@', xunit: '@', yunit: '@', width: '@', height: '@' }, template: require('./lineplot.tpl.jade'), link(scope, elem, attrs){ d3Service.get().then(d3 => linkerWithD3(scope, d3, $filter, elem)); } }; } } // an half generic line plot for usage in buildbot views // we try to be generic enough to try and hope this can be reused for other kind of plot // while not over-engineer it too much var linkerWithD3 = function($scope, d3, $filter, elem) { let x, xaccessor, xscaledaccessor, y, yaccessor, yscaledaccessor; const margin = { top: 40, right: 20, bottom: 30, left: 100 }; const width = +$scope.width - (margin.left) - (margin.right); const height = +$scope.height - (margin.top) - (margin.bottom); // set the ranges if ($scope.xunit === 'timestamp') { x = d3.time.scale().range([ 0, width ]) .nice(); const { xattr } = $scope; xaccessor = d => new Date(d[xattr]*1000); xscaledaccessor = d => x(new Date(d[xattr]*1000)); } if (($scope.yunit === 'seconds') || ($scope.yunit === 'percent')) { y = d3.scale.linear().range([ height, 0 ]) .nice(); const { yattr } = $scope; yaccessor = d => d[yattr]; yscaledaccessor = d => y(d[yattr]); } // define the line const valueline = d3.svg.line().x(xscaledaccessor).y(yscaledaccessor).interpolate("bundle"); const svg = d3.select(elem[0]).attr('width', width + margin.left + margin.right).attr('height', height + margin.top + margin.bottom); const base_g = svg.append('g').attr('transform', `translate(${margin.left},${margin.top})`); const linepath = base_g.append('path'); const xaxis_g = base_g.append('g').attr("class", "axis"); const yaxis_g = base_g.append('g').attr("class", "axis"); $scope.$watch("data", function(data) { if ((data == null)) { return; } // Scale the range of the data x.domain(d3.extent(data, xaccessor)); y.domain([ d3.min(data, yaccessor), d3.max(data, yaccessor) ]); // Add the valueline path. linepath.data([ data ]).attr('class', 'line').attr('d', valueline); // Add the X Axis const xAxis = d3.svg.axis() .scale(x) .ticks(5); xaxis_g.attr('transform', `translate(0,${height})`).call(xAxis); const yAxis = d3.svg.axis() .orient('left') .scale(y) .ticks(3); if ($scope.yunit === 'seconds') { // duration format is defined in moment.filter.coffee yAxis.tickFormat($filter('durationformat')); } // Add the Y Axis yaxis_g.call(yAxis); }); }; angular.module('common') .directive('linePlot', ['d3Service', '$filter', LinePlot]); buildbot-3.4.0/www/base/src/app/common/directives/lineplot/lineplot.tpl.jade000066400000000000000000000000161413250514000271470ustar00rootroot00000000000000svg.bblineplotbuildbot-3.4.0/www/base/src/app/common/directives/lineplot/styles.less000066400000000000000000000004121413250514000261110ustar00rootroot00000000000000 .bblineplot { path { stroke: steelblue; stroke-width: 2; fill: none; } .axis path, .axis line { fill: none; stroke: grey; stroke-width: 1; shape-rendering: crispEdges; } }buildbot-3.4.0/www/base/src/app/common/directives/loginbar/000077500000000000000000000000001413250514000236505ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/loginbar/loginbar.directive.js000066400000000000000000000027051413250514000277640ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Loginbar { constructor() { return { controller: '_loginbarController', replace: true, restrict: 'E', scope: {}, template: require('./loginbar.tpl.jade'), }; } } class AutoLogin { constructor(config) { if ((config.auth != null) && config.auth.autologin && config.user.anonymous && config.auth.oauth2) { window.stop(); document.location = `auth/login?redirect=${document.location.hash.substr(1)}`; } } } class _loginbar { constructor($scope, config, $http, $location) { const baseurl = $location.absUrl().split("#")[0]; $scope.username = ""; $scope.password = ""; $scope.loginCollapsed = 1; $scope.config = config; // as the loginbar is never reloaded, we need to update the redirect // when the hash changes $scope.$watch((() => document.location.hash), () => $scope.redirect = document.location.hash.substr(1)); _.assign($scope, config.user); } } angular.module('common') .directive('loginbar', [Loginbar]) .controller('_loginbarController', ['$scope', 'config', '$http', '$location', _loginbar]); angular.module('app') .config(['config', AutoLogin]); buildbot-3.4.0/www/base/src/app/common/directives/loginbar/loginbar.tpl.jade000066400000000000000000000025441413250514000270750ustar00rootroot00000000000000ul.nav.navbar-nav.navbar-right(ng-show="config.auth.name != 'NoAuth'") li.dropdown(ng-show="anonymous", ng-class="loginCollapsed ? '':'open'") a(ng-click="loginCollapsed = !loginCollapsed") | Anonymous b.caret ul.dropdown-menu(uib-dropdown-menu) li a(href="auth/login?redirect={{redirect}}") span(ng-hide="config.auth.oauth2") i.fa.fa-sign-in |  Login span(ng-show="config.auth.oauth2") i.fa(ng-class="config.auth.fa_icon") |  Login with {{config.auth.name}} li.dropdown(uib-dropdown, ng-hide="anonymous") a.dropdown-toggle(uib-dropdown-toggle) img.avatar(ng-if="config.avatar_methods.length" ng-src="avatar?username={{username | encodeURI}}&email={{email | encodeURI}}") span(ng-if="!config.avatar_methods.length") | {{full_name || username}} b.caret ul.dropdown-menu(uib-dropdown-menu) li.dropdown-header(ng-if="full_name || email") i.fa.fa-user span {{ full_name }} {{ email }} li.divider li a(href="auth/logout?redirect={{redirect}}") i.fa.fa-sign-out | Logout buildbot-3.4.0/www/base/src/app/common/directives/properties/000077500000000000000000000000001413250514000242475ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/properties/properties.directive.js000066400000000000000000000020041413250514000307520ustar00rootroot00000000000000class Properties { constructor() { return { replace: true, restrict: 'E', scope: {properties: '='}, template: require('./properties.tpl.jade'), controller: '_propertiesController', }; } } function _properties($scope) { $scope.copy = function(value) { value = JSON.stringify(value); if (navigator.clipboard && navigator.clipboard.writeText) { navigator.clipboard.writeText(value); } else { var element = document.createElement('textarea'); element.style = 'position:absolute; width:1px; height:1px; top:-10000px; left:-10000px'; element.value = value; document.body.appendChild(element); element.select(); document.execCommand('copy'); document.body.removeChild(element); } } } angular.module('common') .directive('properties', [Properties]) .controller('_propertiesController', ['$scope', _properties]); buildbot-3.4.0/www/base/src/app/common/directives/properties/properties.tpl.jade000066400000000000000000000010711413250514000300650ustar00rootroot00000000000000table.table.table-hover.table-striped.table-condensed thead tr th.text-left Name th.text-center Value th.text-right Source tbody tr(ng-repeat="(name, value) in properties | publicFields") td.text-left {{ name }} td.text-left pre(style="vertical-align:top; padding:unset; margin:unset; display:inline-block; border:unset; background-color:unset") | {{ value[0] | json }} i.fa.fa-copy.clickable(style="vertical-align:top; padding:0 0.5ex" ng-click="copy(value[0])") td.text-right {{ value[1] }} buildbot-3.4.0/www/base/src/app/common/directives/rawdata/000077500000000000000000000000001413250514000234765ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/rawdata/rawdata.directive.js000066400000000000000000000012211413250514000274300ustar00rootroot00000000000000class Rawdata { constructor(RecursionHelper) { return { replace: true, restrict: 'E', scope: {data:'='}, template: require('./rawdata.tpl.jade'), compile: RecursionHelper.compile, controller: '_rawdataController' }; } } class _rawdata { constructor($scope) { $scope.isObject = v => _.isObject(v) && !_.isArray(v); $scope.isArrayOfObjects = v => _.isArray(v) && (v.length > 0) && _.isObject(v[0]); } } angular.module('common') .directive('rawdata', ['RecursionHelper', Rawdata]) .controller('_rawdataController', ['$scope', _rawdata]); buildbot-3.4.0/www/base/src/app/common/directives/rawdata/rawdata.tpl.jade000066400000000000000000000013361413250514000265470ustar00rootroot00000000000000dl.dl-horizontal div(ng-if="v !== undefined", ng-repeat="(k,v) in data._raw_data ? data._raw_data : data") dt {{k}} dd(ng-if="!isObject(v) && !isArrayOfObjects(v)") {{v}}  dd(ng-if="isArrayOfObjects(v)") i.fa.fa-chevron-circle-right.rotate(ng-class="{'fa-rotate-90':expanded}", ng-click="expanded = !expanded") ul(ng-if="expanded") li(ng-repeat="(kk,vv) in v") rawdata(data="vv") span(ng-if="!expanded") {{v}} dd(ng-if="isObject(v)") i.fa.fa-chevron-circle-right.rotate(ng-class="{'fa-rotate-90':expanded}", ng-click="expanded = !expanded") div(ng-if="expanded") rawdata(data="v") buildbot-3.4.0/www/base/src/app/common/directives/windowtitle/000077500000000000000000000000001413250514000244245ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/directives/windowtitle/windowtitle.directive.js000066400000000000000000000025441413250514000313150ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class WindowTitle { constructor($rootScope, $timeout, $stateParams, $window, faviconService) { return { restrict: 'A', link() { const listener = (event, toState) => $timeout(function() { faviconService.setFavIcon(); if (toState.data && toState.data.pageTitle) { if (typeof(toState.data.pageTitle) === "function") { $window.document.title = toState.data.pageTitle($stateParams); } else { $window.document.title = toState.data.pageTitle; } } else if (toState.data && toState.data.caption) { $window.document.title = `Buildbot: ${toState.data.caption}`; } else { $window.document.title = 'Buildbot'; } }) ; $rootScope.$on('$stateChangeSuccess', listener); } }; } } angular.module('common') .directive('windowTitle', ['$rootScope', '$timeout', '$stateParams', '$window', 'faviconService', WindowTitle]); buildbot-3.4.0/www/base/src/app/common/filters/000077500000000000000000000000001413250514000213625ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/filters/encodeURI.filter.js000066400000000000000000000003671413250514000250270ustar00rootroot00000000000000var encodeURI = function ($filter) { function encodeURI(input) { return window.encodeURIComponent((input == null) ? "" : input); } return encodeURI; } angular.module('common') .filter('encodeURI', ['$filter', encodeURI]); buildbot-3.4.0/www/base/src/app/common/filters/limitStringLength.filter.js000066400000000000000000000005611413250514000266550ustar00rootroot00000000000000var limitStringLength = function ($filter) { function limitStringLength(input, limit) { var newContent = $filter('limitTo')(input, limit); if(input.length > limit) { newContent += ' ...'; } return newContent; } return limitStringLength; } angular.module('common') .filter('limitStringLengthTo', ['$filter', limitStringLength]); buildbot-3.4.0/www/base/src/app/common/filters/moment/000077500000000000000000000000001413250514000226615ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/filters/moment/moment.constant.js000066400000000000000000000001311413250514000263410ustar00rootroot00000000000000 import * as moment from 'moment'; angular.module("common").constant("MOMENT", moment); buildbot-3.4.0/www/base/src/app/common/filters/moment/moment.filter.js000066400000000000000000000024401413250514000260020ustar00rootroot00000000000000class Timeago { constructor(MOMENT) { return time => MOMENT.unix(time).fromNow(); } } class Duration { constructor(MOMENT) { return time => MOMENT.unix(time).from(MOMENT.unix(0),1); } } class Durationformat { constructor(MOMENT) { return function(time) { if (time < 0) return "" const d = MOMENT.duration(time * 1000); const m = MOMENT.utc(d.asMilliseconds()); const days = Math.floor(d.asDays()); if (days) { let plural = ""; if (days > 1) { plural = "s"; } return `${days} day${plural} ` + m.format('H:mm:ss'); } if (d.hours()) { return m.format('H:mm:ss'); } if (d.minutes()) { return m.format('m:ss'); } else { return m.format('s') + " s"; } }; } } class Dateformat { constructor(MOMENT) { return (time, f) => MOMENT.unix(time).format(f); } } angular.module('common') .filter('timeago', ['MOMENT', Timeago]) .filter('duration', ['MOMENT', Duration]) .filter('durationformat', ['MOMENT', Durationformat]) .filter('dateformat', ['MOMENT', Dateformat]); buildbot-3.4.0/www/base/src/app/common/filters/publicFields.filter.js000066400000000000000000000014011413250514000256050ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class PublicFields { constructor() { return function(object) { if ((object == null)) { return object; } if (object._publicfields == null) { object._publicfields = {}; } for (let k in object) { const v = object[k]; if ((k.indexOf('_') !== 0) && object.hasOwnProperty(k)) { object._publicfields[k] = v; } } return object._publicfields; }; } } angular.module('common') .filter('publicFields', [PublicFields]); buildbot-3.4.0/www/base/src/app/common/services/000077500000000000000000000000001413250514000215355ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/services/ansicodes/000077500000000000000000000000001413250514000235055ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/services/ansicodes/ansicodes.service.js000066400000000000000000000150751413250514000274620ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // simple naive (think wrong) implementation of the spec: // https://en.wikipedia.org/wiki/ANSI_escape_code // we only support color modes, and we will just ignore (drop from the log) all others commands // One \x1b[NNm mode will change the class in the log to ansiNN // We support concatenated modes change via syntax like \x1b[1;33m // which is used for 'bright' colors. Previous example, will then convert to class="ansi1 ansi33" // Nested mode will work, e.g \x1b[1m\x1b[33m is equivalent to \x1b[1;33m. // \x1b[39m resets the color to default // This parser does not work across lines // css class will be reset at each new line const ANSI_RE = /^((\d+)(;\d+)*)?([a-zA-Z])/; class ansicodesService { constructor($log) { return { parseAnsiSgr(ansi_entry) { // simple utility to extract ansi sgr (Select Graphic Rendition) codes, // and ignore other codes. // Invalid codes are restored let classes = []; const res = ANSI_RE.exec(ansi_entry); if (res) { const mode = res[4]; ansi_entry = ansi_entry.substr(res[0].length); if (mode === 'm') { classes = res[1]; if (classes) { classes = res[1].split(";"); } else { classes = []; } } } else { // illegal code, restore the CSI ansi_entry = `\x1b[${ansi_entry}`; } return [ansi_entry, classes]; }, ansiSgrToCss(ansi_classes, css_classes) { if (ansi_classes.length === 0) { return css_classes; } const fgbg = {'38': 'fg', '48': 'bg'}; if (fgbg.hasOwnProperty(ansi_classes[0])) { if (ansi_classes.length !== 3) { return {}; } if (ansi_classes[1] === '5') { css_classes = { }; // (simplification) always reset color css_classes[fgbg[ansi_classes[0]] + '-' + ansi_classes[2]] = true; } } else { for (let i of Array.from(ansi_classes)) { if ((i === '39') || (i === '0')) { // "color reset" code and "all attributes off" code css_classes = {}; } else { css_classes[i] = true; } } } return css_classes; }, splitAnsiLine(line) { let i; const html_entries = []; let first_entry = true; i = 0; let css_classes = {}; for (let ansi_entry of Array.from(line.split(/\x1b\[/))) { let css_class = ""; if (!first_entry) { let ansi_classes; [ansi_entry, ansi_classes] = Array.from(this.parseAnsiSgr(ansi_entry)); css_classes = this.ansiSgrToCss(ansi_classes, css_classes); css_class = ((() => { const result = []; for (i in css_classes) { const v = css_classes[i]; result.push(`ansi${i}`); } return result; })()).join(' '); } if (ansi_entry.length > 0) { html_entries.push({class:css_class, text:_.escape(ansi_entry)}); } first_entry = false; } return html_entries; }, ansi2html(line) { const entries = this.splitAnsiLine(line); let html = ""; for (let entry of Array.from(entries)) { html += `${entry.text}`; } return html; }, injectStyle() { let node = document.getElementById("ansicolors"); if (node) { return; } node = document.createElement('style'); node.id = "ansicolors"; node.innerHTML = this.generateStyle(); document.body.appendChild(node); }, generateStyle() { let i; let ret = ""; // first there are the standard 16 colors const colors = [ '000','800','080','880','008','808','088','ccc', '888','f00','0f0','ff0','00f','f0f','0ff','fff' ]; // 6x6x6 color cube encoded in 3 digits hex form // note the non-linearity is based on this table // http://www.calmar.ws/vim/256-xterm-24bit-rgb-color-chart.html const clr = ['0', '6', '9', 'a', 'd', 'f']; for (let red = 0; red <= 5; red++) { for (let green = 0; green <= 5; green++) { for (let blue = 0; blue <= 5; blue++) { colors.push(clr[red] + clr[green] + clr[blue]); } } } // greyscale ramp encoded in 6 digits hex form for (i = 1; i <= 24; i++) { let c = Math.floor((i*256)/26).toString(16); if (c.length === 1) { c = `0${c}`; } colors.push(c + c + c); } for (i = 0; i < colors.length; i++) { const color = colors[i]; ret += `pre.log .ansifg-${i} { color: #${color}; }\n`; ret += `pre.log .ansibg-${i} { background-color: #${color}; }\n`; } return ret; } }; } } angular.module('common') .factory('ansicodesService', ['$log', ansicodesService]); buildbot-3.4.0/www/base/src/app/common/services/ansicodes/ansicodes.service.spec.js000066400000000000000000000103041413250514000304010ustar00rootroot00000000000000beforeEach(angular.mock.module('app')); describe('ansicode service', function() { let ansicodesService = null; const injected = $injector => ansicodesService = $injector.get('ansicodesService'); beforeEach(inject(injected)); const runTest = function(string, ...expected) { const ret = ansicodesService.parseAnsiSgr(string); expect(ret).toEqual(expected); }; it("test_ansi0m", () => runTest("mfoo", "foo", [])); it("test ansi1m" , () => runTest("33mfoo", "foo", ["33"])); it("test ansi2m" , () => runTest("1;33mfoo", "foo", ["1", "33"])); it("test ansi5m" , () => runTest("1;2;3;4;33mfoo", "foo", ["1", "2", "3", "4", "33"])); it("test ansi_notm" , () => runTest("33xfoo", "foo", [])); it("test ansi_invalid" , () => runTest("<>foo", "\x1b[<>foo", [])); it("test ansi_invalid_start_by_semicolon" , () => runTest(";3m", "\x1b[;3m", [])); it('should provide correct split_ansi_line', function() { const ret = ansicodesService.splitAnsiLine("\x1b[36mDEBUG [plugin]: \x1b[39mLoading plugin karma-jasmine."); expect(ret).toEqual([ {class: 'ansi36', text: 'DEBUG [plugin]: '}, {class: '', text: 'Loading plugin karma-jasmine.'}]); }); it('should provide correct split_ansi_line for nested codes', function() { const ret = ansicodesService.splitAnsiLine("\x1b[1m\x1b[36mDEBUG [plugin]: \x1b[39mLoading plugin karma-jasmine."); expect(ret).toEqual([ {class: 'ansi1 ansi36', text: 'DEBUG [plugin]: '}, {class: '', text: 'Loading plugin karma-jasmine.'}]); }); it('should provide correct split_ansi_line for reset codes', function() { // code sequence from protractor const ret = ansicodesService.splitAnsiLine("\x1b[32m.\x1b[0m\x1b[31mF\x1b[0m\x1b[32m.\x1b[39m\x1b[32m.\x1b[0m"); expect(ret).toEqual([ {class: "ansi32", text: "."}, {class: "ansi31", text: "F"}, {class: "ansi32", text: "."}, {class: "ansi32", text: "."}, ]); }); it('should provide correct split_ansi_line for 256 colors', function() { const ret = ansicodesService.splitAnsiLine("\x1b[48;5;71mDEBUG \x1b[38;5;72m[plugin]: \x1b[39mLoading plugin karma-jasmine."); expect(ret).toEqual([ {class: 'ansibg-71', text: 'DEBUG '}, {class: 'ansifg-72', text: '[plugin]: '}, {class: '', text: 'Loading plugin karma-jasmine.'}]); }); it('should provide correct split_ansi_line for joint codes', function() { const ret = ansicodesService.splitAnsiLine("\x1b[1;36mDEBUG [plugin]: \x1b[39mLoading plugin karma-jasmine."); expect(ret).toEqual([ {class: 'ansi1 ansi36', text: 'DEBUG [plugin]: '}, {class: '', text: 'Loading plugin karma-jasmine.'}]); }); it('should provide correct split_ansi_line for unsupported modes', function() { const val = "\x1b[1A\x1b[2KPhantomJS 1.9.8 (Linux 0.0.0)"; const ret = ansicodesService.splitAnsiLine(val); expect(ret).toEqual([ { class: '', text: 'PhantomJS 1.9.8 (Linux 0.0.0)'}]); }); it('should provide correct ansi2html', function() { const ret = ansicodesService.ansi2html("\x1b[36mDEBUG [plugin]: \x1b[39mLoading plugin karma-jasmine."); expect(ret).toEqual("DEBUG [plugin]: Loading plugin karma-jasmine."); }); it('should provide correct color cube generator', function() { const ret = ansicodesService.generateStyle(); expect(ret).toContain('pre.log .ansibg-232 { background-color: #090909; }'); expect(ret).toContain('pre.log .ansibg-241 { background-color: #626262; }'); expect(ret).toContain('pre.log .ansifg-209 { color: #f96; }'); }); it('should inject generated style only once', function() { const before = document.getElementsByTagName("style").length; ansicodesService.injectStyle(); const after1 = document.getElementsByTagName("style").length; ansicodesService.injectStyle(); const after2 = document.getElementsByTagName("style").length; expect(after1).toEqual(before + 1); expect(after2).toEqual(before + 1); }); }); buildbot-3.4.0/www/base/src/app/common/services/buildercache/000077500000000000000000000000001413250514000241475ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/services/buildercache/buildercache.service.js000066400000000000000000000027541413250514000305660ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // builder data used everywhere in the UI, so we implement a simple cache // TODO this caching mechanism needs to be implemented eventually in data module // Its much more complicated to do this generically, and keep the event mechanism, // this is why we do this temporary workaround // Objects returned by this service cannot use onNew/onUpdate mechanism of data module (as they are shared) class buildersService { constructor($log, dataService) { // we use an always one dataService instance const data = dataService.open(); const cache = {}; /* make only one full list of builders. this is much faster than querying builders one by one*/ data.getBuilders().onNew = builder => { let id = builder.builderid if (cache.hasOwnProperty(id)) { _.assign(cache[id], builder) } else { cache[id] = builder } } return { getBuilder(id) { if (cache.hasOwnProperty(id)) { return cache[id] } else { cache[id] = {} return cache[id] } } }; } } angular.module('common') .factory('buildersService', ['$log', 'dataService', buildersService]); buildbot-3.4.0/www/base/src/app/common/services/datagrouper/000077500000000000000000000000001413250514000240525ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/services/datagrouper/datagrouper.service.js000066400000000000000000000065371413250514000303770ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS104: Avoid inline assignments * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // this function is meant to group builds into builders, but is written generically // so that it can group any collection into another collection like a database join class dataGrouperService { constructor() { return { groupBy(collection1, collection2, joinid, attribute, joinlist) { // @param collection1: collection holding the groups // @param collection2: collection that will be split into the collection1 // @param joinid: the id that should be present in both collection items, // and meant to match them // @param attribute: the collection1 item's attribute where to store collection2 groups // @param joinlist: optional attribute of collection2 items if the collection2 // is pointing to several item of collection1 const temp_dict = {}; const { onNew } = collection1; collection1.onNew = function(item) { if (temp_dict.hasOwnProperty(item[joinid])) { item[attribute] = temp_dict[item[joinid]]; } onNew(item); }; if (joinlist != null) { collection2.onNew = item => item[joinlist] != null ? item[joinlist].forEach(function(item2) { // the collection1 might not be yet loaded, so we need to store the worker list let group; if (collection1.hasOwnProperty(item2[joinid])) { let base; group = (base = collection1.get(item2[joinid]))[attribute] != null ? base[attribute] : (base[attribute] = []); } else { group = temp_dict[item2[joinid]] != null ? temp_dict[item2[joinid]] : (temp_dict[item2[joinid]] = []); } if (!Array.from(group).includes(item)) { group.push(item); } }) : undefined ; } else { collection2.onNew = function(item) { // the collection1 might not be yet loaded, so we need to store the worker list let group; if (collection1.hasOwnProperty(item[joinid])) { let base; group = (base = collection1.get(item[joinid]))[attribute] != null ? base[attribute] : (base[attribute] = []); } else { group = temp_dict[item[joinid]] != null ? temp_dict[item[joinid]] : (temp_dict[item[joinid]] = []); } group.push(item); }; } } }; } } angular.module('common') .factory('dataGrouperService', [dataGrouperService]); buildbot-3.4.0/www/base/src/app/common/services/favicon/000077500000000000000000000000001413250514000231625ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/services/favicon/favicon.service.js000066400000000000000000000044361413250514000266130ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ /* Favicon service */ class FaviconService { constructor(RESULTS_COLOR, resultsService, $http){ return { setFavIcon(build_or_step){ if ((build_or_step == null)) { // by default, we take the original icon document.getElementById('bbicon').href = "img/icon.png"; return; } $http.get("img/icon16.svg").then(function(data) { // if there is a build or step associated to this page // we color the icon with result's color // We the raster the SVG to PNG, so that it can be displayed as favicon let color; ({ data } = data); const canvas = document.createElement('canvas'); canvas.width = (canvas.height = '300'); const ctx = canvas.getContext('2d'); const results_text = resultsService.results2text(build_or_step); if (_.has(RESULTS_COLOR, results_text)) { color = RESULTS_COLOR[results_text]; } else { color = '#E7D100'; } data = data.replace("#8da6d8", color); const DOMURL = window.URL || window.webkitURL || window; const img = new Image; const svg = new Blob([ data ], {type: 'image/svg+xml'}); const url = DOMURL.createObjectURL(svg); img.onload = function() { ctx.drawImage(img, 0, 0); document.getElementById('bbicon').href = canvas.toDataURL(); return DOMURL.revokeObjectURL(url); }; img.crossOrigin = 'Anonymous'; img.src = url; }); } }; } } angular.module('common') .factory('faviconService', ['RESULTS_COLOR', 'resultsService', '$http', FaviconService]); buildbot-3.4.0/www/base/src/app/common/services/results/000077500000000000000000000000001413250514000232365ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/services/results/results.service.js000066400000000000000000000027761413250514000267500ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class resultsService { constructor($log, RESULTS, RESULTS_TEXT) { return { results: RESULTS, resultsTexts: RESULTS_TEXT, results2class(build_or_step, pulse) { let ret = "results_UNKNOWN"; if (build_or_step != null) { if ((build_or_step.results != null) && _.has(RESULTS_TEXT, build_or_step.results)) { ret = `results_${RESULTS_TEXT[build_or_step.results]}`; } if ((build_or_step.complete === false) && (build_or_step.started_at > 0)) { ret = 'results_PENDING'; if (pulse != null) { ret += ` ${pulse}`; } } } return ret; }, results2text(build_or_step) { let ret = "..."; if (build_or_step != null) { if ((build_or_step.results != null) && _.has(RESULTS_TEXT, build_or_step.results)) { ret = RESULTS_TEXT[build_or_step.results]; } } return ret; } }; } } angular.module('common') .factory('resultsService', ['$log', 'RESULTS', 'RESULTS_TEXT', resultsService]); buildbot-3.4.0/www/base/src/app/common/services/results/results.service.spec.js000066400000000000000000000037321413250514000276720ustar00rootroot00000000000000beforeEach(angular.mock.module('app')); describe('results service', function() { let resultsService = null; const injected = $injector => resultsService = $injector.get('resultsService'); beforeEach(inject(injected)); it('should provide correct results2class', function() { const { results } = resultsService; const results2class = r => resultsService.results2class({results: r}); expect(results2class(results.SUCCESS)).toBe("results_SUCCESS"); expect(results2class(results.RETRY)).toBe("results_RETRY"); expect(results2class(1234)).toBe("results_UNKNOWN"); expect(resultsService.results2class(undefined)).toBe("results_UNKNOWN"); expect(resultsService.results2class({results:undefined})).toBe("results_UNKNOWN"); expect(resultsService.results2class({ results:undefined, complete:false, started_at:undefined })).toBe("results_UNKNOWN"); expect(resultsService.results2class({ results:undefined, complete:false, started_at:10 } , "pulse" )).toBe("results_PENDING pulse"); }); it('should provide correct results2Text', function() { const { results } = resultsService; const results2text = r => resultsService.results2text({results: r}); expect(results2text(results.SUCCESS)).toBe("SUCCESS"); expect(results2text(results.RETRY)).toBe("RETRY"); expect(results2text(1234)).toBe("..."); expect(resultsService.results2text(undefined)).toBe("..."); expect(resultsService.results2text({results:undefined})).toBe("..."); expect(resultsService.results2text({ results:undefined, complete:false, started_at:undefined })).toBe("..."); expect(resultsService.results2text({ results:undefined, complete:false, started_at:10 })).toBe("..."); }); }); buildbot-3.4.0/www/base/src/app/common/services/settings/000077500000000000000000000000001413250514000233755ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/common/services/settings/settings.service.js000066400000000000000000000077671413250514000272530ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS206: Consider reworking classes to avoid initClass * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class bbSettings { static initClass() { this.prototype.$get = [ function() { let groupAndSettingName, groupName, item, settingName; const self = this; if (self.ui_default_config != null) { for (let settingSelector in self.ui_default_config) { const v = self.ui_default_config[settingSelector]; groupAndSettingName = settingSelector.split('.'); if (groupAndSettingName.length !== 2) { console.log(`bad setting name ${settingSelector}`); continue; } [groupName, settingName] = Array.from(groupAndSettingName); if ((self.groups[groupName] == null)) { console.log(`bad setting name ${settingSelector}: group does not exist`); continue; } for (item of Array.from(self.groups[groupName].items)) { if ((item.name === settingName) && (item.value === item.default_value)) { item.value = v; } } } } return { getSettingsGroups() { return self.groups; }, getSettingsGroup(group){ const ret = {}; for (item of Array.from(self.groups[group].items)) { ret[item.name] = item; } return ret; }, save() { localStorage.setItem('settings', angular.toJson(self.groups)); return null; }, getSetting(settingSelector) { groupAndSettingName = settingSelector.split('.'); groupName = groupAndSettingName[0]; settingName = groupAndSettingName[1]; if (self.groups[groupName] != null) { for (let setting of Array.from(self.groups[groupName].items)) { if (setting.name === settingName) { return setting; } } } else { return undefined; } } }; } ]; } constructor(config) { this.groups = {}; this.ui_default_config = config.ui_default_config; } _mergeNewGroup(oldGroup, newGroup) { if ((newGroup == null)) { return undefined; } if ((oldGroup == null)) { for (let item of Array.from(newGroup.items)) { item.value = item.default_value; } return newGroup; } else { for (let newItem of Array.from(newGroup.items)) { newItem.value = newItem.default_value; for (let oldItem of Array.from(oldGroup.items)) { if ((newItem.name === oldItem.name) && (oldItem.value != null)) { newItem.value = oldItem.value; } } } return newGroup; } } addSettingsGroup(group) { const storageGroups = angular.fromJson(localStorage.getItem('settings')) || {}; if (group.name == null) { throw Error(`Group (with caption : ${group.caption}) must have a correct name property.`); } const newGroup = this._mergeNewGroup(storageGroups[group.name], group); this.groups[newGroup.name] = newGroup; return this.groups; } } bbSettings.initClass(); angular.module('common') .provider('bbSettingsService', ['config', bbSettings]); buildbot-3.4.0/www/base/src/app/common/services/settings/settings.service.spec.js000066400000000000000000000264461413250514000301770ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('settingsService', function() { let bbSettingsServiceProviderRef = ""; beforeEach(angular.mock.module('common', function(bbSettingsServiceProvider) { bbSettingsServiceProviderRef = bbSettingsServiceProvider; bbSettingsServiceProvider.ui_default_config = { 'User.config_overriden': 100 }; localStorage.clear(); bbSettingsServiceProvider.addSettingsGroup({ name:'User', caption: 'User related settings', items:[{ type:'bool', name:'checkbox1', default_value: false } , { type:'integer', name:'config_overriden', default_value: 10 } , { type:'choices', name:'radio', default_value: 'radio1', answers: [ { name: 'radio1' }, { name: 'radio2' } ] } ]}); bbSettingsServiceProvider.addSettingsGroup({ name:'Release', caption: 'Release related settings', items:[{ type:'bool', name:'checkbox_release', default_value: false } , { type:'bool', name:'checkbox_release2', default_value: false } , { type:'bool', name:'checkbox_release3', default_value: false } , { type:'choices', name, default_value: 'radio1', answers: [ { name: 'radio1' }, { name: 'radio2' } ] } ]}); }) ); it('should merge groups when old group has values already set', inject(function(bbSettingsService) { localStorage.clear(); const old_group = { name:'Auth', caption: 'Auth related settings', items:[{ type:'bool', name:'radio1', value: true, default_value: false } ] }; const new_group = { name:'Auth', caption: 'Auth related settings', items:[{ type:'bool', name:'radio1', default_value: false } , { type:'bool', name:'radio2', default_value: false } ] }; const group_result = bbSettingsServiceProviderRef._mergeNewGroup(old_group, new_group); expect(group_result).toEqual({ name:'Auth', caption: 'Auth related settings', items:[{ type:'bool', name:'radio1', value: true, default_value: false } , { type:'bool', name:'radio2', value:false, default_value: false } ]});})); it('should merge groups when new group is defined with no items', inject(function(bbSettingsService) { localStorage.clear(); const old_group = { name:'Auth', caption: 'Auth related settings', items:[{ type:'bool', name:'radio1', value: true, default_value: false } ] }; const new_group = { name:'Auth', caption: 'Auth related settings', items:[] }; const group_result = bbSettingsServiceProviderRef._mergeNewGroup(old_group, new_group); expect(group_result).toEqual({ name:'Auth', caption: 'Auth related settings', items:[]});})); it('should merge groups when old group is defined with no items', inject(function(bbSettingsService) { localStorage.clear(); const old_group = { name:'System', caption: 'System related settings', items:[] }; const new_group = { name:'System', caption: 'System related settings', items:[{ type:'bool', name:'checkbox_system', default_value: false } , { type:'bool', name:'checkbox_system2', default_value: false } ] }; const group_result = bbSettingsServiceProviderRef._mergeNewGroup(old_group, new_group); expect(group_result).toEqual({ name:'System', caption: 'System related settings', items:[{ type:'bool', name:'checkbox_system', value:false, default_value: false } , { type:'bool', name:'checkbox_system2', value:false, default_value: false } ]});})); it('should merge groups when new group is undefined', inject(function(bbSettingsService) { localStorage.clear(); const old_group = { name:'System', caption: 'System related settings', items:[{ type:'bool', name:'checkbox_system', default_value: false } , { type:'bool', name:'checkbox_system2', default_value: false } ] }; const group_result = bbSettingsServiceProviderRef._mergeNewGroup(old_group, undefined); expect(group_result).toBeUndefined(); }) ); it('should merge groups when old group is undefined', inject(function(bbSettingsService) { localStorage.clear(); const new_group = { name:'Auth', caption: 'Auth related settings', items:[{ type:'bool', name:'radio1', default_value: false } , { type:'bool', name:'radio2', default_value: false } ] }; const group_result = bbSettingsServiceProviderRef._mergeNewGroup(undefined, new_group); expect(group_result).toEqual({ name:'Auth', caption: 'Auth related settings', items:[{ type:'bool', name:'radio1', value: false, default_value: false } , { type:'bool', name:'radio2', value: false, default_value: false } ]});})); it('should not add a group without name', inject(function(bbSettingsService) { localStorage.clear(); const group = { caption: 'Auth related settings', items:[{ type:'bool', name:'radio1', default_value: false } , { type:'bool', name:'radio2', default_value: false } ] }; const exceptionRun = function() { let group_result; return group_result = bbSettingsServiceProviderRef.addSettingsGroup(group); }; expect(exceptionRun).toThrow(); }) ); it('should merge groups when new group has item with no default value', inject(function(bbSettingsService) { localStorage.clear(); const old_group = { name:'System', caption: 'System related settings', items:[] }; const new_group = { name:'System', caption: 'System related settings', items:[{ type:'bool', name:'checkbox_system', default_value: false } , { type:'bool', name:'checkbox_system2' } ] }; const group_result = bbSettingsServiceProviderRef._mergeNewGroup(old_group, new_group); expect(group_result).toEqual({ name:'System', caption: 'System related settings', items:[{ type:'bool', name:'checkbox_system', value: false, default_value: false } , { type:'bool', name:'checkbox_system2', value: undefined } ]});})); it('should generate correct settings', inject(function(bbSettingsService) { const groups = bbSettingsService.getSettingsGroups(); expect(groups['Release']).toEqual({ name:'Release', caption: 'Release related settings', items:[{ type:'bool', name:'checkbox_release', value:false, default_value: false } , { type:'bool', name:'checkbox_release2', value: false, default_value: false } , { type:'bool', name:'checkbox_release3', value:false, default_value: false } , { type:'choices', name, default_value: 'radio1', value:'radio1', answers: [ { name: 'radio1' }, { name: 'radio2' } ] } ]});})); it('should return correct setting', inject(function(bbSettingsService) { const userSetting1 = bbSettingsService.getSetting('User.checkbox1'); const userSetting2 = bbSettingsService.getSetting('User.whatever'); const userSetting3 = bbSettingsService.getSetting('UserAA.User_checkbox1'); expect(userSetting1).toBeDefined(); expect(userSetting2).toBeUndefined(); expect(userSetting3).toBeUndefined(); }) ); it('should save correct settings', inject(function(bbSettingsService) { const checkbox = bbSettingsService.getSetting('User.checkbox1'); expect(checkbox.value).toBe(false); checkbox.value = true; bbSettingsService.save(); const storageGroups = angular.fromJson(localStorage.getItem('settings')); const storageCheckbox = storageGroups['User'].items[0].value; expect(storageCheckbox).toBeTruthy(); }) ); it('should be overriden by master.cfg', inject(function(bbSettingsService) { let to_override = bbSettingsService.getSetting('User.config_overriden'); expect(to_override.value).toEqual(100); to_override.value = 200; bbSettingsService.save(); const storageGroups = angular.fromJson(localStorage.getItem('settings')); to_override = storageGroups['User'].items[1].value; expect(to_override).toEqual(200); }) ); }); buildbot-3.4.0/www/base/src/app/d3.module.js000066400000000000000000000000001413250514000205400ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/d3/000077500000000000000000000000001413250514000167305ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/d3/d3.service.js000066400000000000000000000012731413250514000212360ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // Load d3 script via jquery // We load those 50kB+ only when needed by plugins // actually, this is loaded when someone is requiring DI of this service class D3 { constructor($document, $q, config, $rootScope) { const d = $q.defer(); import('d3').then(module => { d.resolve(module); }); return { get() { return d.promise; } }; } } angular.module('app') .service('d3Service', ['$document', '$q', 'config', '$rootScope', D3]); buildbot-3.4.0/www/base/src/app/home/000077500000000000000000000000001413250514000173525ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/home/home.controller.js000066400000000000000000000030751413250514000230270ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Home { constructor($scope, dataService, config, $location) { $scope.baseurl = $location.absUrl().split("#")[0]; $scope.config = config; const data = dataService.open().closeOnDestroy($scope); $scope.buildsRunning = data.getBuilds({order: '-started_at', complete: false}); $scope.recentBuilds = data.getBuilds({order: '-buildid', complete: true, limit:20}); $scope.builders = data.getBuilders(); $scope.hasBuilds = b => (b.builds != null ? b.builds.length : undefined) > 0; const updateBuilds = function() { const byNumber = (a, b) => a.number - b.number; return $scope.recentBuilds.forEach(function(build) { const builder = $scope.builders.get(build.builderid); if (builder != null) { if (builder.builds == null) { builder.builds = []; } if (builder.builds.indexOf(build) < 0) { builder.builds.push(build); builder.builds.sort(byNumber); } } }); }; $scope.recentBuilds.onChange = updateBuilds; $scope.builders.onChange = updateBuilds; } } angular.module('app') .controller('homeController', ['$scope', 'dataService', 'config', '$location', Home]); buildbot-3.4.0/www/base/src/app/home/home.route.js000066400000000000000000000024311413250514000217750ustar00rootroot00000000000000class HomeState { constructor($stateProvider, glMenuServiceProvider, bbSettingsServiceProvider) { // Name of the state const name = 'home'; // Menu configuration glMenuServiceProvider.addGroup({ name, caption: 'Home', icon: 'home', order: 1 }); const cfg = { group: name, caption: 'Home' }; // Register new state $stateProvider.state({ controller: `${name}Controller`, template: require('./home.tpl.jade'), name, url: '/', data: cfg }); bbSettingsServiceProvider.addSettingsGroup({ name:'Home', caption: 'Home page related settings', items:[{ type:'integer', name:'max_recent_builds', caption:'Max recent builds', default_value: 10 } , { type:'integer', name:'max_recent_builders', caption:'Max recent builders', default_value: 10 } ]}); } } angular.module('app') .config(['$stateProvider', 'glMenuServiceProvider', 'bbSettingsServiceProvider', HomeState]); buildbot-3.4.0/www/base/src/app/home/home.tpl.jade000066400000000000000000000020541413250514000217260ustar00rootroot00000000000000.container .row(ng-if="config.buildbotURL != baseurl") .alert.alert-danger Warning: | c['buildbotURL'] is misconfigured to pre {{config.buildbotURL}} | Should be: pre {{baseurl}} .row .col-sm-12 .well h2 Welcome to buildbot h4 {{ buildsRunning.length }} build{{ buildsRunning.length == 1 ? '' : 's' }} running currently ul li.unstyled(ng-repeat="build in buildsRunning | filter:complete:false") buildsticker(build="build") h4 {{ recentBuilds.length }} recent builds .row .col-md-4(ng-repeat="builder in builders | filter:hasBuilds") .panel.panel-primary .panel-heading h4.panel-title a(ui-sref="builder({builder: builder.builderid})") {{ builder.name }} .panel-body span(ng-repeat="build in builder.builds | orderBy:'-number'") buildsticker(build="build", builder="builder") buildbot-3.4.0/www/base/src/app/index.jade000066400000000000000000000002641413250514000203600ustar00rootroot00000000000000extends layout.jade block content gl-page-with-sidebar gl-topbar gl-topbar-contextual-actions loginbar connectionstatus ui-view buildbot-3.4.0/www/base/src/app/layout.jade000066400000000000000000000025351413250514000205710ustar00rootroot00000000000000- var timestamp = process.env.SOURCE_DATE_EPOCH ? (parseInt(process.env.SOURCE_DATE_EPOCH) * 1000) : new Date().getTime(); doctype html html.no-js(ng-app="app", xmlns:ng='http://angularjs.org', xmlns:app='ignored') head meta(charset='utf-8') meta(http-equiv='X-UA-Compatible', content='IE=edge,chrome=1') title(window-title) Buildbot meta(name='description', content='Buildbot web UI') meta(name='viewport', content='initial-scale=1, minimum-scale=1, user-scalable=no, maximum-scale=1, width=device-width') link(href='styles.css', rel='stylesheet') link(id='bbicon', href='img/icon.png', rel='icon') link(href='img/icon.svg', title="Buildbot", rel='fluid-icon') div(id='outdated') body(ng-cloak) block content block footer script(src="browser-warning.js") link(href='browser-warning.css', rel='stylesheet') script(src="browser-warning-list.js") script | window.T = {{ custom_templates | tojson }}; script(src="scripts.js?_" + timestamp) | {% for app in config.plugins -%} script(src="{{app}}/scripts.js?_" + timestamp) link(href='{{app}}/styles.css?_' + timestamp, rel='stylesheet') script | angular.module('app').requires.push('{{app}}') | {% endfor %} script | angular.module("buildbot_config", []).constant("config", {{configjson|safe}}) buildbot-3.4.0/www/base/src/app/masters/000077500000000000000000000000001413250514000201005ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/masters/master/000077500000000000000000000000001413250514000213735ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/masters/master/master.route.js000066400000000000000000000001761413250514000243650ustar00rootroot00000000000000// TODO master route ({ buildmaster: { url: '/buildmasters/:buildmaster', tabid: 'buildmasters' } }); buildbot-3.4.0/www/base/src/app/masters/masters.controller.js000066400000000000000000000021761413250514000243040ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Masters { constructor($scope, dataService, dataGrouperService, resultsService, $stateParams) { _.mixin($scope, resultsService); $scope.maybeHideMaster = function(master) { if ($stateParams.master != null) { return master.masterid !== +$stateParams.master; } return 0; }; const data = dataService.open().closeOnDestroy($scope); $scope.masters = data.getMasters(); $scope.builders = data.getBuilders(); const workers = data.getWorkers(); const builds = data.getBuilds({limit: 100, order: '-started_at'}); dataGrouperService.groupBy($scope.masters, builds, 'masterid', 'builds'); dataGrouperService.groupBy($scope.masters, workers, 'masterid', 'workers', 'connected_to'); } } angular.module('app') .controller('mastersController', ['$scope', 'dataService', 'dataGrouperService', 'resultsService', '$stateParams', Masters]); buildbot-3.4.0/www/base/src/app/masters/masters.route.js000066400000000000000000000015201413250514000232470ustar00rootroot00000000000000class MastersState { constructor($stateProvider) { // Name of the state const name = 'masters'; // Menu configuration const cfg = { group: "builds", caption: 'Build Masters' }; // Register new state $stateProvider.state({ controller: `${name}Controller`, template: require('./masters.tpl.jade'), name, url: '/masters', data: cfg }); // master page is actually same as masters, just filtered $stateProvider.state({ controller: `${name}Controller`, template: require('./masters.tpl.jade'), name: 'master', url: '/masters/:master', data: {}}); } } angular.module('app') .config(['$stateProvider', MastersState]); buildbot-3.4.0/www/base/src/app/masters/masters.tpl.jade000066400000000000000000000033721413250514000232060ustar00rootroot00000000000000.container .row table.table.table-hover.table-striped.table-condensed tr th Active th Name th Recent Builds th Workers th Last Active tr(ng-repeat='master in masters | orderBy: ["-active", "name"]', ng-hide="maybeHideMaster(master)") td i.fa.fa-check.text-success(ng-show="master.active") i.fa.fa-times.text-danger(ng-hide="master.active") td {{ master.name}} td a(ng-repeat="build in master.builds | orderBy: '-number' | limitTo: '20' ", ui-sref="build({builder: build.builderid, build: build.number})") script(type="text/ng-template" id="buildsummarytooltip") buildsummary(buildid="build.buildid" type="tooltip") span.badge-status(uib-tooltip-template="'buildsummarytooltip'" tooltip-class="buildsummarytooltipstyle" tooltip-placement="auto left-bottom" tooltip-popup-delay="400" tooltip-popup-close-delay="400" ng-class="results2class(build, 'pulse')") | {{ builders.get(build.builderid).name }}/{{ build.number }} td span(ng-repeat="worker in master.workers | orderBy : 'name'") a(ui-sref='workers({worker: workerid})') span.badge-status.results_SUCCESS | {{ worker.name }} td {{master.last_active | timeago}} buildbot-3.4.0/www/base/src/app/schedulers/000077500000000000000000000000001413250514000205635ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/schedulers/schedulers.controller.js000066400000000000000000000013421413250514000254440ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class schedulers { constructor($log, $scope, $location, dataService) { const data = dataService.open().closeOnDestroy($scope); $scope.schedulers = data.getSchedulers(); $scope.change = function(s) { const newValue = s.enabled; const param = {enabled: newValue}; return dataService.control('schedulers', s.schedulerid, 'enable', param); }; } } angular.module('app') .controller('schedulersController', ['$log', '$scope', '$location', 'dataService', schedulers]); buildbot-3.4.0/www/base/src/app/schedulers/schedulers.route.js000066400000000000000000000010721413250514000244170ustar00rootroot00000000000000class SchedulersState { constructor($stateProvider) { // Name of the state const name = 'schedulers'; // Configuration const cfg = { group: "builds", caption: 'Schedulers' }; // Register new state $stateProvider.state({ controller: `${name}Controller`, template: require('./schedulers.tpl.jade'), name, url: '/schedulers', data: cfg }); } } angular.module('app') .config(['$stateProvider', SchedulersState]); buildbot-3.4.0/www/base/src/app/schedulers/schedulers.tpl.jade000066400000000000000000000017251413250514000243540ustar00rootroot00000000000000.container uib-tabset uib-tab(heading="All schedulers") table.table.table-hover.table-striped.table-condensed tr td Enabled td Scheduler Name td Master tr(ng-repeat='scheduler in schedulers | orderBy: ["name"]') td input(type="checkbox", ng-model="scheduler.enabled", ng-change="change(scheduler)") td {{ scheduler.name }} td {{ scheduler.master.name }} uib-tab(heading="All schedulers by master") table.table.table-hover.table-striped.table-condensed tr td Enabled td Scheduler Name td Master tr(ng-repeat='scheduler in schedulers | orderBy: ["master.name", "name"]') td input(type="checkbox", ng-model="scheduler.enabled", ng-change="change(scheduler)") td {{ scheduler.name }} td {{ scheduler.master.name }} buildbot-3.4.0/www/base/src/app/settings/000077500000000000000000000000001413250514000202625ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/settings/settings.controller.js000066400000000000000000000032611413250514000246440ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class SettingsController { constructor($scope, bbSettingsService) { // All settings definition // $scope.settingsGroups = bbSettingsService.getSettingsGroups(); $scope.$watch('settingsGroups', function(newGroups) { bbSettingsService.save(); computeMasterCfgSnippet(); } , true); var computeMasterCfgSnippet = function() { let code = "c['www']['ui_default_config'] = { \n"; const object = bbSettingsService.getSettingsGroups(); for (let groupName in object) { const group = object[groupName]; for (let item of Array.from(group.items)) { if ((item.value !== item.default_value) && (item.value !== null)) { let value = JSON.stringify(item.value); if (value === "true") { value = "True"; } if (value === "false") { value = "False"; } code += ` '${groupName}.${item.name}': ${value},\n`; } } } code += "}\n"; return $scope.master_cfg_override_snippet = code; }; computeMasterCfgSnippet(); } } angular.module('app') .controller('settingsController', ['$scope', 'bbSettingsService', SettingsController]); buildbot-3.4.0/www/base/src/app/settings/settings.route.js000066400000000000000000000014471413250514000236230ustar00rootroot00000000000000class Settings { constructor($stateProvider, glMenuServiceProvider) { // Name of the state const name = 'settings'; // Menu configuration glMenuServiceProvider.addGroup({ name, caption: 'Settings', icon: 'sliders', order: 99 }); // Configuration const cfg = { group: name, caption: 'Settings' }; // Register new state const state = { controller: `${name}Controller`, template: require('./settings.tpl.jade'), name, url: '/settings', data: cfg }; $stateProvider.state(state); } } angular.module('app') .config(['$stateProvider', 'glMenuServiceProvider', Settings]); buildbot-3.4.0/www/base/src/app/settings/settings.tpl.jade000066400000000000000000000036221413250514000235500ustar00rootroot00000000000000.container .panel.panel-default(ng-repeat="group in ::settingsGroups") .panel-heading h3.panel-title | {{group.caption}} .panel-body form(name="{{ group.name }}") div(ng-repeat="item in group.items") div.col-md-12(ng-switch="", on="item.type") .form-group(ng-switch-when="bool") label.checkbox-inline input(type="checkbox" name="{{item.name}}" ng-model="item.value" ng-value="item.value") | {{ item.caption }} .form-group(ng-switch-when="choices") div(ng-if="item.caption") label {{ item.caption }} label(ng-repeat="answer in item.answers").radio-inline input(type="radio" name="{{group.name + '_' + answer.name}}" ng-model="item.value" ng-value="answer.name") | {{ answer.name }} .form-group(ng-switch-when="integer") label {{ item.caption }} input.form-control(type="number" name="{{item.name}}" ng-model="item.value") .form-group(ng-switch-when="text") label {{ item.caption }} input.form-control(type="text" name="{{item.name}}" ng-model="item.value") .alert.alert-danger(ng-switch-default) bad item type: {{item.type}} should be one of: bool, choices, integer, text .panel.panel-default .panel-heading h3.panel-title | Override defaults for all users .panel-body p To override defaults for all users, put following code in master.cfg pre | {{master_cfg_override_snippet}} buildbot-3.4.0/www/base/src/app/workers/000077500000000000000000000000001413250514000201165ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/workers/worker/000077500000000000000000000000001413250514000214275ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/app/workers/worker/worker.route.js000066400000000000000000000001221413250514000244260ustar00rootroot00000000000000// TODO worker route ({ worker: { url: '/workers/:workerid' } }); buildbot-3.4.0/www/base/src/app/workers/workeraction.dialog.js000066400000000000000000000102431413250514000244210ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class WorkerActionState { constructor($stateProvider, bbSettingsServiceProvider) { const states = [{ name: "worker.actions", multiple: false } , { name: "workers.actions", multiple: true } ]; states.forEach(state => $stateProvider.state(state.name, { url: "/actions", data: { group: null }, onEnter: ['$stateParams', '$state', '$uibModal', 'dataService', '$q', function($stateParams, $state, $uibModal, dataService, $q) { const modal = {}; modal.modal = $uibModal.open({ template: require('./workeractions.tpl.jade'), controller: 'workerActionsDialogController', windowClass: 'modal-xlg', resolve: { workerid() { return $stateParams.worker; }, schedulerid() { return $stateParams.scheduler; }, multiple() { return state.multiple; }, modal() { return modal; }, workers() { const d = $q.defer(); dataService.getWorkers({subscribe: false}).onChange = function(workers) { workers.then = undefined; // angular will try to call it if it exists d.resolve(workers); }; return d.promise; } } }); const goUp = result => $state.go("^"); return modal.modal.result.then(goUp, goUp); }] } ) ); } } class workerActionsDialog { constructor($scope, config, $state, modal, workerid, multiple, $rootScope, $q, workers) { let worker; let w; $scope.select_options = []; $scope.worker_selection = []; if (!multiple) { worker = workers.get(workerid); $scope.worker_selection.push(worker.name); $scope.stop_disabled = worker.connected_to.length === 0; $scope.pause_disabled = worker.paused; $scope.unpause_disabled = !worker.paused; } else { $scope.stop_disabled = false; $scope.pause_disabled = false; $scope.unpause_disabled = false; } angular.extend($scope, { multiple, worker, select_options: (((() => { const result = []; for (w of Array.from(workers)) { result.push(w.name); } return result; })())), action(a){ const dl = []; workers.forEach(function(w) { if (Array.from($scope.worker_selection).includes(w.name)) { const p = w.control(a, {reason: $scope.reason}); p.catch(function(err) { let msg = `unable to ${a} worker ${w.name}:`; msg += err.error.message; $scope.error = msg; }); dl.push(p); } }); return $q.all(dl).then(res => modal.modal.close(res.result)); }, cancel() { return modal.modal.dismiss(); } } ); } } angular.module('app') .config(['$stateProvider', 'bbSettingsServiceProvider', WorkerActionState]) .controller('workerActionsDialogController', ['$scope', 'config', '$state', 'modal', 'workerid', 'multiple', '$rootScope', '$q', 'workers', workerActionsDialog]); buildbot-3.4.0/www/base/src/app/workers/workeractions.tpl.jade000066400000000000000000000022731413250514000244370ustar00rootroot00000000000000.modal-content .modal-header // put the header in the body in order to correctly display error popup h4(ng-show="multiple") .col-sm-4 Worker actions for... .col-sm-8 multiselect(ng-model="worker_selection" options="select_options" show-select-all="true" show-unselect-all="true" show-search="true") h4(ng-if="!multiple") Worker actions for {{worker.name}} hr .modal-body div.form-horizontal .alert.alert-danger(ng-show="error") {{error}} label.control-label.col-sm-2(for="reason") | Reason .col-sm-10 textarea.form-control(rows="15", ng-model="reason") .modal-footer button.btn.btn-default(ng-click="cancel()") Cancel button.btn.btn-primary(ng-click="action('stop')",ng-class="{disabled: stop_disabled}") Graceful Shutdown button.btn.btn-primary(ng-click="action('kill')",ng-class="{disabled: stop_disabled}") Force Shutdown button.btn.btn-primary(ng-click="action('pause')",ng-class="{disabled: pause_disabled}") Pause button.btn.btn-primary(ng-click="action('unpause')",ng-class="{disabled: unpause_disabled}") Unpause buildbot-3.4.0/www/base/src/app/workers/workers.controller.js000066400000000000000000000121631413250514000243350ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Workers { constructor($scope, dataService, bbSettingsService, resultsService, dataGrouperService, $stateParams, $state, glTopbarContextualActionsService, glBreadcrumbService) { let builds; $scope.capitalize = _.capitalize; _.mixin($scope, resultsService); $scope.getUniqueBuilders = function(worker) { const builders = {}; const masters = {}; for (let master of Array.from(worker.connected_to)) { masters[master.masterid] = true; } for (let buildermaster of Array.from(worker.configured_on)) { if ((worker.connected_to.length === 0) || masters.hasOwnProperty(buildermaster.masterid)) { const builder = $scope.builders.get(buildermaster.builderid); if (builder != null) { builders[buildermaster.builderid] = builder; } } } return _.values(builders); }; $scope.maybeHideWorker = function(worker) { if ($stateParams.worker != null) { return worker.workerid !== +$stateParams.worker; } if ($scope.settings.show_old_workers.value) { return 0; } return worker.configured_on.length === 0; }; const data = dataService.open().closeOnDestroy($scope); // Clear breadcrumb and contextual action buttons on destroy const clearGl = function() { glTopbarContextualActionsService.setContextualActions([]); glBreadcrumbService.setBreadcrumb([]); }; $scope.$on('$destroy', clearGl); $scope.builders = data.getBuilders(); $scope.masters = data.getMasters(); $scope.workers = data.getWorkers(); $scope.workers.onChange = function(workers) { let worker; const breadcrumb = [{ caption: "Workers", sref: "workers" } ]; const actions = []; if ($stateParams.worker != null) { $scope.worker = (worker = workers.get(+$stateParams.worker)); breadcrumb.push({ caption: worker.name, sref: `worker({worker:${worker.workerid}})` }); actions.push({ caption: "Actions...", extra_class: "btn-default", action() { return $state.go("worker.actions"); } }); } else { actions.push({ caption: "Actions...", extra_class: "btn-default", action() { return $state.go("workers.actions"); } }); } // reinstall breadcrumb when coming back from forcesched const setupGl = function() { glTopbarContextualActionsService.setContextualActions(actions); glBreadcrumbService.setBreadcrumb(breadcrumb); }; $scope.$on('$stateChangeSuccess', setupGl); setupGl(); $scope.worker_infos = []; for (worker of Array.from(workers)) { worker.num_connections = worker.connected_to.length; for (let k in worker.workerinfo) { // we only count workerinfo that is at least defined in one worker const v = worker.workerinfo[k]; if ((v != null) && (v !== "") && ($scope.worker_infos.indexOf(k) < 0)) { $scope.worker_infos.push(k); } } } $scope.worker_infos.sort(); }; const byNumber = (a, b) => a.number - b.number; $scope.numbuilds = 200; if ($stateParams.numbuilds != null) { $scope.numbuilds = +$stateParams.numbuilds; } if ($stateParams.worker != null) { $scope.builds = (builds = data.getBuilds({ limit: $scope.numbuilds, workerid: +$stateParams.worker, order: '-started_at', property: ["owners", "workername"]})); } else { builds = data.getBuilds({limit: $scope.numbuilds, order: '-started_at', property: ["owners", "workername"]}); } dataGrouperService.groupBy($scope.workers, builds, 'workerid', 'builds'); $scope.settings = bbSettingsService.getSettingsGroup("Workers"); $scope.$watch('settings', () => { bbSettingsService.save(); }, true); } } angular.module('app') .controller('workersController', ['$scope', 'dataService', 'bbSettingsService', 'resultsService', 'dataGrouperService', '$stateParams', '$state', 'glTopbarContextualActionsService', 'glBreadcrumbService', Workers]); buildbot-3.4.0/www/base/src/app/workers/workers.route.js000066400000000000000000000027151413250514000233120ustar00rootroot00000000000000class WorkersState { constructor($stateProvider, bbSettingsServiceProvider) { // Name of the state const name = 'workers'; // Menu Configuration const cfg = { group: "builds", caption: 'Workers' }; // Register new state $stateProvider.state({ controller: `${name}Controller`, template: require('./workers.tpl.jade'), name, url: '/workers?numbuilds', data: cfg }); // worker page is actually same as worker, just filtered $stateProvider.state({ controller: `${name}Controller`, template: require('./workers.tpl.jade'), name: 'worker', url: '/workers/:worker?numbuilds', data: {}}); bbSettingsServiceProvider.addSettingsGroup({ name:'Workers', caption: 'Workers page related settings', items:[{ type:'bool', name:'show_old_workers', caption:'Show old workers', default_value: false } , { type:'bool', name:'showWorkerBuilders', caption:'Show list of builders for each worker (can take a lot of time)', default_value: false } ]}); } } angular.module('app') .config(['$stateProvider', 'bbSettingsServiceProvider', WorkersState]); buildbot-3.4.0/www/base/src/app/workers/workers.tpl.jade000066400000000000000000000063611413250514000232430ustar00rootroot00000000000000.container .row table.table.table-hover.table-striped.table-condensed tr th State th Masters th WorkerName th Recent Builds th(ng-if="settings.showWorkerBuilders.value") Builders th(ng-repeat="info in worker_infos") {{ capitalize(info) }} tr(ng-repeat='worker in workers | orderBy: ["-num_connections", "name"]', ng-hide="maybeHideWorker(worker)") td a(ui-sref='worker.actions({worker: worker.workerid})') i.fa.fa-pause(ng-if="worker.paused", title="paused")   i.fa.fa-stop(ng-if="worker.graceful", title="graceful shutdown") i.fa.fa-smile-o(ng-if="!worker.paused && !worker.graceful && worker.num_connections") td div(ng-hide="worker.num_connections") i.fa.fa-times.text-danger(title="disconnected") div(ng-repeat="masterid in worker.connected_to") a(ui-sref="master({master:masterid.masterid})") span.badge-status.results_SUCCESS(title="{{ masters.get(masterid.masterid).name }}") | {{ masterid.masterid }} td a(ui-sref='worker({worker: worker.workerid})') | {{worker.name}} td a(ng-repeat="build in worker.builds | orderBy: '-buildid' | limitTo: '7' ", ui-sref="build({builder: build.builderid, build: build.number})") script(type="text/ng-template" id="buildsummarytooltip") buildsummary(buildid="build.buildid" type="tooltip") span.badge-status(uib-tooltip-template="'buildsummarytooltip'" tooltip-class="buildsummarytooltipstyle" tooltip-placement="auto left-bottom" tooltip-popup-delay="400" tooltip-popup-close-delay="400" ng-class="results2class(build, 'pulse')") | {{ builders.get(build.builderid).name }}/{{ build.number }} td(ng-if="settings.showWorkerBuilders.value") ul.list-inline li(ng-repeat='builder in getUniqueBuilders(worker) | orderBy: ["name"]') a(ui-sref="builder({builder: builder.builderid})") | {{ builder.name }} td(ng-repeat="info in worker_infos") | {{ worker.workerinfo[info] }} .row(ng-hide="builds") .form-group label.checkbox-inline input(type="checkbox" name="{{settings.show_old_workers.name}}" ng-model="settings.show_old_workers.value") | {{settings.show_old_workers.caption}} div(ng-if="builds") builds-table(builds="builds", builders="builders", ng-if="builds") a.btn.btn-default(ui-sref='worker({worker: worker.workerid, numbuilds: numbuilds + 100})', ng-if="builds.length==numbuilds") | more buildbot-3.4.0/www/base/src/icontestpage.html000066400000000000000000000021361413250514000212170ustar00rootroot00000000000000

Test page for Buildbot icons

buildbot-3.4.0/www/base/src/img/000077500000000000000000000000001413250514000164165ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/img/favicon.ico000066400000000000000000000021761413250514000205450ustar00rootroot00000000000000 h(  f8f8f8f8\C4knqqssqpRpo!rJ0f8f8f8f8f8f8f8f8f8b[hlxywxyyxxwwxxuuvutsWh;f8f8f8f8YT`'|p|~}}}}~~}}~~}}||}}[f8f8f8UGEjdsǩf8f8]Yg{f8WQ].xf8wn}um|5y?!0qag͜_NPvSO\{|Й_}y^[ro{`]L=2Dǥ8{Iay;شKC.̞J'>Xj qId3n&Y^n'#BiR8#;xGFP ɑ3B]`O%(+pFXs:5 rzC#F;QfSNԦ_mw_MRݥIZ5Qo#9 B5s5VS;7 6z֪' wFR9uɀ6RZ5|%uRei88,!ߜ\$| QI^rXī^C-Rv~A78,$QcfSzm'od_QL݈<& +f+vGr| \tyΪC(0& =kI @2LS8,bp9MCU R3$ᜱ3<o7!`PYa #;x.O>"0"'l_=o?s\iBig L_$wI³%µBid zT`g <%I弥 >=ȕrx֪BqXHΛLPp`& +2n p [kLo AHkqipI=v&9֧t5cP^Y[ 0uఎ$ LWIVnS'o>u6/;i("V"Srco*6_5>;B:%IXg@O͞gu9phZ!)5S̝BF"b\ca 0,=kk !دHKfT2O<OuR}XRJWBF|țLVTFfV2 EI:^}z:갊QRA!dN&3o*yIZy TW#W(Yâ8ڳVr=A,Z|$ +<WR{T\Qz\гaԅr_Q 0&:4Ջ垶͗ǡɎ y  g}U=;u @:Z!o/znyϕ;#L a-A/mDE@Q59[9a~؋O y 3ԫXA]yJ waiz?8/;?)X1zmrX3^93QJB1vJVJCi͒ ta0f|oءvҲ'po#Y6DK*2)xI+ |,U_V߳` &kT/£̛"խ^U_֩ 9f)DA!\}ǫ?DCnCЉz*/Kb:8 f((xN| ϓ;rC<r CgPԜs}޳JAèTj'C7gdOL}aI)AP5}=|-r뽧?~+T*9 `\1#=ks:A>(iN9 ԹgpM-F0Y:,SpMBãLzԨ!KPJ:5zdHSZc/F`N6lV(%轄jaߞ-TRW]NC]͍56ъbnav;vjVVg`Jph #H˙DvTFx 3Zky+,"A|޳V _tXwlvY,^2gxa)ھ]SQ :;{:JJDT#24cf-+8AgG =~=%pM0"қfdN`cٽmk~ 6yۤT*#-`g3v\lg¶u2p-`Uk+ښVVc0xϪ߽öuQsէj⓳zYXXptƧo<ޑ; Z%Ɨ8~;=1V6/{憚AL܈č#69yJFkc5MWspvj*>ĔQ|-Jصy9o>u&I9IvZ_rX'K{6$每~%s|ʗ=F[G:}0);3 r*WSYMX̦iy *nGQ 8ٳV/9b@rܩWrzbͲH k$)}"j:Un+1wl8"* {1Ǹ*."K HpoUU8+xQiz74F:oq-ݝn | H/"5k1 @hiGR6O<88̍CB#mu۳cG;4FM\!8yC\l{ԥR<4~hu>UV^{;xˁ=M= 򥹇5~ =~=M}:gE7:6 Rsf?4܃[聊"~MbRIJAK's`9:d]K=8SӁ/܂/8l}EL9);|__v n'!ome/wdc7.ɩο,ˏ^M[KTw3BOGXWlMLs%s||o$*9S&,:3*i>M:[DX-f.đ$)NKMYȮ?.;T==dw5lXd1${Vm-L@SlXyn}F2r 9C#8}{vα[6݄qbz6,:Dn?$r9ҧIeZ#u\b9wx*,f9cgzB` f̓,NC~nC+M۳YݍZ)#Cf݋)/,ٟ=O,I#>m"Wb5wˊ6Bބ9^?BiM+H2tX@Ph8 8CT0'm<}TJvTL\p O*%EmF&6f>6H"ۻSg;tb M=v"u`*90d3o DGU$ )nŇhO0̻ɝv,~ad?ڊ-t4wقn#;e JE !P$3-~[tZ9Wއ$V7sxW1HP,wHC$}+ݵHI&Qy9vh8AS+``ZH=bщ1XG=(/y9FԮ蘻!RGQ hA̻q2I
/W(dv`|φ / 3o,G'jW>RrjxVkhktyWL=,A+&$wlvnQJv98X7 4 @/{%w#}FPXb]JE_@֐w,{[feW9a=X£7x Lg{c!1ya7Eo)j;ϤC̸yW3\/X ZcD*w-ljg` f%&XµL5 @v:>mX{+VMw'܈S;IȘ¾aN=tp1q#HV` `֩/A}M!R`[_xdžPidžپ{Q{tRSO;*젼|͌3ॻhi+1//0yir;\AJ` ؖy/v}P83;*ݮ|5cmV_@r+om:O;,jؙhuzYzfETL;oG$y};o QI暪~mQ4uim&-\;y6o_EѶ_EO%1{v;O^*cθ^[;.jWSi\s=BRɟ8σ&x,9 Lb‚?o( LaE;ψ?k1 \O*wq1rsـ^E<-/8hAر{*bh݉ƑS;G-:czc:c8ZMXD8?Q]YAgG*@VV}=eBҧb_e6\溽t4Z:[lmZCo CNht*# IXL1#R '²/n1y멛_y4gX!)DE'{Cy)>q *Kw'vTSLbjhk.f5@W{-EH㲈1c1'Xx|gQ^5U%o"#GQ*)5H loaOGM;cH+Nr5Ԕ~A\}۷9#F>iĤmWLE>|.e%fh )% ˟4/`k a!8)x JM:t--}W6vE4,x[$f"9g.qh=|v.W{MU %I%|M9WSK4.~Q{能6jkbWT\*Ju@%B : I7!EQ!=Z ;Ag #eR|Ix{־CwgO^+6*vpX)Ğ :Qcf߬_:C(iNBŁ1ĹhU/qIAHxrT]m͟If٤䟀ƃ;]Se,im>DXDgׯ3LGom)-}ҙ R#̦jE'AmBHHu=D ~$? ]*ewR?)1DfI2&Mɺzvߜ{=bS_[IM^ 5l5U{흉R?lt) Kfe! ,=A7mpnz_'Zٻ]6}5|i/Z}0cG-[,Ua\1; `AJEceo-.n:)]~,Rv6'K/G-p0A?"$VMo(=!mFY$rX]mo_E8a\rw,@pZGIKdLocRǝLJ (~ʊ0=B!ub#p'H+[6f5S]z",.qb􏟿;sL].wkh_/IB\tw'h,\ٓ:U5@|\ \sEtw4(1ش.7(ehݫ\F߰hJsB* 0ۥEfhq̎ej hO`9+G!zuՂ+?Խ]\FBRvN5a(1 `] ǺT@OTlq x@\\0EXeGk#*y^kfƮ-aC;8`̬JӡTj-).!Mla|u/ݝm@$pGK@BqɿEW[?]??bvg&gXR֭o֗ٷwXDK&-D&lwRU^IUj>"D+CJ}dvM%5sM}77a6ڇ6/i CVBQ4ZgvE|t$ջcɱQʁ* `.B CuWlt3 P_nhxZ4(pXAQC)Ο8oR^Q8z vW<Ƕoj0ـ z|T_91R<^tBu@"Q{ptpn}֯Ml^mMBPo!Rap#6 ;x__D3046X[ IDATKtʒg>F墥.viypXT/U]P!41r_V6|r3%O` G7)|l!-ƇfX/~:R8N]8GIL&&^6¯/Dd sW ? wJ<0v_ q!Z Κپ>6 ,U4dM] w.kcC&; #HpdʠȦ6|'Z,ngeM7ppwJeayc 4n{v,Zt!qN]kؤ〠.1|Qͦ6|q; Ro@ C`DVMC4H(DTWA]s \"DX%84Q\|Aꩩb -l_iNBЂrN[a0|0CmfT_n jy-xή-X˟av&EYlZͽ쒺>sD8!ͧU?c4VZRU^HsCCQN_K2'Gh~ .Y sMOP$1PU+J9,[ "H5F0D^aE#vqs\x lv>eoK]!$ܽV+ !m~w:ho:.+"/#+J,fΏZk.;(YĕhE>+uS+;ލ%9K &,.҄eZ*/ċU,+tp /H֨C;zzaEuvsˋ_h/x#,ɯ0pOv5EX\t+@jXذSLuR+;Tp?kkHmҋXpJH*5`QbF!sx eYDP!;mTppk/JW=KwW}V`V:?lbώբ`MuK43yqN/N4wj=HH^ \n\BaҕϊlUzCz=;mwRkЇJ W9, u.ocKXLm%>U\' sՓT vK7wAab8D;#jv965ƎCcw]jwP9fɪtVpazYD&t9SoX3&{c)=8>_KWh [8D#YL~IA㦊Rm1uީT a)Ȍ)n3EKyHf=+:j )C/HJCKa (+A(zCӆx9&4<cpX_<)[xU3.)]R#,E߇Mwȷ[FX2T%]匛Vɝ}=Ak J pe`c.)h)$6OJLN.sXqw'3'ץ2`=lg]iaO35jj E(pJ1OCQRtHXѮW%$KG?nԲ_gCqX~)c LPTN,qmQH@LMylQі `@;pwH-.7jPc ER3gܽu6kB4zq#Ȅ3o0R#u /$$3ocXs<5 y.!?HEQ^T-:@/£.:ܘzRbiR@!ԱqF_cZٽu=8N>݆E,ui`K!?i^Ƙ OR.9 zͨD$LxY&:Uj t5K52`바~X d{ ѱqIVk$~e֯p8`bȨ3{πg JѦ{rk<($ 9G*-j ځo{8q( zp',"/7@"ձq b0|֯L-ۭ`>Df* }@I{e^2HI!k!nwt^1*JYxeWťR` *uX I#s[Ups".度gTw.E]E}i밙EE @,/A!ph ̈xwJ- ڻ:JC$W/}u@ Đ<$6=l#;~6sZ`Yîp-J0sy3\s8- A"Jǝzհ)%,.VW=QSU ez "oq B1XtRg*ƚJE+{N+ B&<T*S?)#>9+n}^p*B5\yUW'$y aYA}H:Δ!O crHy+jxfKw':ep =gW9]/@`G%y*$CYrK'I"v-EA}TvY:196\aSkXD܉F'=D{D] ==oz p<gV,V"ŵ{}XZ_&lsѕ(y9.*(A !Z1wJk~1j 5B$ʠJI)8 Ak)BC/#ܳ;x4zf)+cJ H5~[.Q3!t2wILM:R8}6L}aR:fB%(+O!BRs)m|> ],3!4δ \;ش4RUKޗX_wVc.`ryTKLVSU=m@uw՗R:Dc8(I8R";zS䰢prkQjmj@pKflޙDJhn83_H u7??)*gw ! "v"(.ZsK*6*6a[oo@T|&\"` ]߻JS亿IDcBTg~0'!S( wÌ=jbbբ>-]XL5FPV]IL#*4ěOU?N4CTݓF[}G 2- rM;,nǧ* Gٻs ݦ޻*O9.iod3 $!R5DF?BZciZ/*hٵg6+YǠ)Fs" #()\'&!L{bEI](qE`*2.?*Z%m'|:;zC͕=jTvWvBӲJwo R3$bڼiokr{z-$GVnTjf,8n~F8Ȱnѫa|’ dm-W0jEt}]s}kO\/* bw j EZ^(_Ax0>&rpa VB٤8P憃*bGM- K7<$=:Qit͹j<b dpӃqu$*p"rf%(Bʏ2a}x %ML{+Mdg!\ȢO8);]HS7>-ڪ7I'ep/'* g/Fk#.pAE3a̓~J"9m4y0z\FMT]Lɮ"gRidVv^ǒ6.*V8/e_DKS.;Owhuǘ.cώՔox`]Z!!9|27q.ᑮ+p2Q(-'5EGnooX͇ßVKM|RI:@8ʨ*47Ը*"cLj|RI(&x?ڌ1d[ni݆9,FٷS9:_E~?+JuwwGxd9c{lv?t7"&k1w IT*AJ","£≌I$,2N)率N#Rdj/QD='>mQ1blb{A XǏ? sgcs'BF;5F6nHK߈춀[tz:;Uj ᒵ:-ԭhwy%tuz2px: ;Q&#AcBSꌛ -ٿp-OwK)ul6 ݭv;K vX*'/!WR:#`̋].ի0(,Q>bd瘚ޓ!9zzx^'|4&_%`ū_Ϛex2ߠ͢xCX͵K<@iݮOi( 渆Ѥ̸F"fWQL-{-Jѣ5FȞ#S <\ii K3iػڝHP a#Hw'?* ; %E! }ڃ}1ٵ ;L-1 4~,*UkD΢zT4*Ks+L ǯ܃!>f փ[&ŻZ Ne ÉΎʊ6 ,!Ҵ 18zbQlh KSQQg\ȴᓱZ]MeTNJZɬ.e h6qIDATJk!JS TWT-&aɒ}-W-.3`xC7s:Ò?FBwSQ +>Koz1 *"6eox*몵 a#MHL)CGLAFj)>#\ *EbHH\ajl#ٛ9+<}VLlV+*vSce7b0P i&M"z"q:o0#/n=| Ьv-^pY|;vht$\12V moħWM֛m|fʊ6Q^ҢXkAcAxTbr m>Pevsp뛎e@"rvXF8v5z&\BH[5@ꋿGɾXˋd )]mTlcՔmt;V*pbF2̅.[l(I]^ Hz_XA0avSUFMDoJ䜂IݮOpThm:s\}3W$-Mu]JIP("ӎb%z_(E@[ 3m~Sa[DIDuM]صLy,T =wʊ6QS%4*mp,cX@Xr19ݎW*'"X#V,,CmBǒr̟Z(j1γT_q^% mTR^,=sI$Z>4ЄDg/"(ƷNkjwV ځ3%-U밚;H1*0F1rTzLv|1k8;|Aٹ6`QkB T8brNDf:?O@ؕ(v;.‡&ђ\cǒٱҢ$a]p,yDe.Xe/e&@ 6)A0 Q6+5^KtI^1?NBkv85?.7>IDIɰaUj-xB ׳}Z;:+ rXc*N;x&; %bt`hTo~Imǯ+yxBbZ(ryJDhxG-F&vo ~+pcgӾ_t50b\#eM_v[3ʇ/ESAN</Y>L]ۻIjCHB& ҿI.v8`Y2]{Ð9MҤQk%:|0n\tchu$a@%(z#}6m՛!p' p "HDVحTԶ2YӸg1`Vw$ R5DMT|"F4,Ba_C$su:>s@qXׅ[vI&bm/a~+Q0K*ɵGFaEf̧LTS !C @rXo MXڝeɾ\uK'gzw% ! u!|rG7rXAt6F_>ߔd5^ԣRw&#}WL;~#L޳}5eś0w;FRЇ%0+wrXjAdJ)>K~s/ܚw[Q}6j#O=$ ,$aCD*>ɇ<RKt@RW_k­To|]Za|ɷ&Ik!*J8IZ#ƨL’&u<:t4| zayuӄXZ0E XM-$hRgLlV>zkeLݒ$Ic\ *x<ɍZkO%yUh6`jfߪNw6~M.ݵCI"%b,"FF3RS c#jaA?['_.d`)=MG0w6HF,R?$>Є;8u͂fqR)IYU(MJQ_x!IQRoHT7jKK)K5 MbcvoIp gÙ'3c̽3EIs/PKL+zZ#,P_ R-;Զ-&=Vݐa랏q>3=qe_z^,O^o l|*Mgh l 4A=B}v@52׀sUZ#ubMu?[U)`$CϜ?Ŧ }nQ|#|\'ݰG>Ǿ=۸Uxm$F*2H,t3tZUPV1w~8Eb!&~ۍ[EMXFAe>UȮڅE]F[6<;r=~$[vVYAu+{xpGV+3uE7n1 7"CB}FyӏjהAn,gSqmL_;ˮƎ=\ʼnIChP5JpNXo$`97|xh¶ #=ݸU2CBbnL;ݛ{,w,_bƭc1qv?xB*EUeLߤHV}&=/i :*[wPzxpަR jAVet>_!qk2:nzco'}PAVAQhDI_sL;i8]mkzf*bЋ ׂb}$`U8Ei+?e'ö]jzM1P;K㸜bp!ߢ$`UT_XfP})Z}&T,D,ԧ2ݩbH Hx}㿨 od~^a[_H,I1" R HFN}$Aw:< X3'Li bA1@ոy;Nؼꋫ43v  + X-y1&ϰӼo,=֜NʬHX7 Xbƭs7ƭ uȅI*WWűǼErR QyOR\N7%on~?ФCb uEH.* 4n-Gꐿ1吀}K,<ՃG QA}]Dne: nW{zxAByUɭ LqJbr6PWo^!*@j 8 Jxե%7@ /"@zAa*`pu{F'NIENDB`buildbot-3.4.0/www/base/src/img/icon.svg000066400000000000000000000022341413250514000200700ustar00rootroot00000000000000 buildbot-3.4.0/www/base/src/img/icon16.svg000066400000000000000000000007431413250514000202420ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/img/nobody.png000066400000000000000000000043011413250514000204140ustar00rootroot00000000000000PNG  IHDR@@iqsRGBbKGD pHYsodtIME -^6AIDATxZLT>YgAvjAi@OqEŮPbv(%JL6҈LL ,5M$l܂ PA8{2ɂ38cssCT.]JV.[l[?<RXbnV]v==sP䵶 yE)$!! "( d|rBȳ% ++W^qN!dR*YJKKSSS"Dc2L $HHHzo񮌯'ljjҴt޼ysN{8x1S[[[c0| "sL(Qj҉̀}b|@)%eYX,܆-{{{{v03 P9?&[P՚椾hɒ%Vu)0~ZX0U]]]V5՟U׮]+>!LLL@ZZE@MM);(pEE@cc#2 3/0 ]]]P>]&+ V~cFF 4}bb"3l&m$ DQ$jکVƃ.efgz}ƍ#+**^OΛ""wc˷є JcYYYZ166?88xbxx)a@DY%(ZjPYY;~Ϟ={Xh "quʕ__|sBͯGݝCGt;v$.^Iiivtt8p:zzz_TTc\qƭw "DGG?۰aGgϞm ON2vvv,;m* QQQ|a/***̄7/VXX;-gfgј`0\)^p7@8J5T*捻ȑ#z9vټghh(err2pL{ @bbbѣ fUOOϞx#S, ndRR#..njrr,]n݇N3"Ax##EQ$ZvQ j4>\ s'"""k0 e(++՜PYYlxxŋr_9;J)aY<BR r_ sa:aZaz~:6+ h4?ˬ3CCCA'xA>k'T*ƒOB4=:?00:qb0ABlМYe!d_KKG#vCDDA2-X,l6߼.UVF$ (T0]0L!@)"h4Bdd2+`ѫ]Ot:s@Bo/_|W!p8A yonغu+,N/Ilڴ;}rrrY<'@DP*Cξ5]\._vVVHrr2AZ(~<+D[ AD~8(yyyn3Mm۶6q!I69,, ?oll݌>M&޾} Q?}tnDaW[ &&!!`]]ݷ'xpIdL&w#"T*Xz5 ۼysͩS` Xby]IENDB`buildbot-3.4.0/www/base/src/styles/000077500000000000000000000000001413250514000171655ustar00rootroot00000000000000buildbot-3.4.0/www/base/src/styles/animations.less000066400000000000000000000072601413250514000222240ustar00rootroot00000000000000/* This file contains css3 animation code Please read this, before playing with it: http://www.html5rocks.com/en/tutorials/speed/high-performance-animations/ tl;dr is: " Today transforms are the best properties to animate because the GPU can assist with the heavy lifting, so where you can limit your animations to these, do so. opacity translate rotate scale " */ /* animation for alert boxes. */ .anim-alert { &.ng-leave { transition: all 400ms cubic-bezier(0.250, 0.250, 0.750, 0.750); } &.ng-enter{ transition: all 100ms cubic-bezier(0.250, 0.250, 0.750, 0.750); } &.ng-leave.ng-leave-active, &.ng-enter { opacity: 0; transform: translateY(-70px); -webkit-transform: translateY(-70px); } &.ng-enter.ng-enter-active, &.ng-leave { opacity: 1; transform: translateY(0); -webkit-transform: translateY(0px); } } /* animation for */ .anim-fade { -webkit-transition: .5s linear all; transition: .5s linear all; opacity: 1; &.ng-hide-remove, &.ng-hide-add,.list.ng-hide { display:block!important; } &.ng-hide { opacity: 0; } } /* animation for validation tooltips in force forms */ .anim-popover { &.ng-enter, &.ng-leave{ transition: all 500ms cubic-bezier(0.000, 0.915, 0.480, 0.995); /* The animation preparation code */ } &.ng-enter, &.ng-leave.ng-leave-active { /* The animation code itself */ opacity: 0; transform: translateY(-150px) scale(.2); -webkit-transform: translateY(-150px) scale(.2); } &.ng-leave, &.ng-enter.ng-enter-active { /* The animation code itself */ opacity: 1; -webkit-transform: translateY(0px) scale(1); } } /* steps and change details animation */ .anim-stepdetails, .anim-changedetails { &.ng-hide-add, &.ng-hide-remove{ display:block!important; transition: all 200ms cubic-bezier(0.000, 0.915, 0.480, 0.995); /* The animation preparation code */ } /* The animation code itself */ opacity: 1; transform: translateY(0px); -webkit-transform: translateY(0px); &.ng-hide{ /* The animation code itself */ opacity: 0; transform: translateY(-20px); -webkit-transform: translateY(-20px); } } /* rotate animation for fold-unfold of details */ .rotate { transition: -webkit-transform 100ms; transition: transform 100ms; } /* heart-like pulse animation, used to show live stuff like current builds */ @-webkit-keyframes pulse_animation { 0% { -webkit-transform: scale(0.9); } 30% { -webkit-transform: scale(0.9); } 40% { -webkit-transform: scale(1.08); } 50% { -webkit-transform: scale(0.9); } 60% { -webkit-transform: scale(0.9); } 70% { -webkit-transform: scale(1.05); } 80% { -webkit-transform: scale(0.9); } 100% { -webkit-transform: scale(0.9); } } @keyframes pulse_animation { 0% { transform: scale(0.9); } 30% { transform: scale(0.9); } 40% { transform: scale(1.08); } 50% { transform: scale(0.9); } 60% { transform: scale(0.9); } 70% { transform: scale(1.05); } 80% { transform: scale(0.9); } 100% { transform: scale(0.9); } } .pulse { -webkit-animation-name: 'pulse_animation'; -webkit-animation-duration: 1000ms; -webkit-transform-origin:70% 70%; -webkit-animation-iteration-count: infinite; -webkit-animation-timing-function: linear; animation-name: 'pulse_animation'; animation-duration: 1000ms; transform-origin:70% 70%; animation-iteration-count: infinite; animation-timing-function: linear; } .nut-spin { animation: nut-spin .5s infinite linear; } @keyframes nut-spin { 0% { transform: rotate(0deg); } 50% { transform: rotate(20deg); } 100% { transform: rotate(0deg); } } buildbot-3.4.0/www/base/src/styles/colors.less000066400000000000000000000051001413250514000213520ustar00rootroot00000000000000/* This file contain meaningful coloring for status stuff (logs, status values) */ /* logs coloring this theme is based the iterm2 default theme */ pre.log { background-color: #000; color: #c7c7c7; .padding { // zebra pattern background: repeating-linear-gradient(-45deg,rgb(10, 10, 10),rgb(40, 40, 40) 7%, rgb(10,10,10) 10%) } .log_o, .ansi_white, .ansi30 { color: #c7c7c7; } .log_e, .ansi_red, .ansi31 { color: #c91b00; } .ansi_green, .ansi32 { color: #00c200; } .ansi_yellow, .ansi33 { color: #c7c400; } .ansi_blue, .ansi34 { color: #0225c7; } .ansi_magenta, .ansi35 { color: #ca30c7; } .log_h, .ansi_cyan, .ansi36 { color: #00c5c7; } // bright colors .ansi1 { &.ansi30 { color: #ffffff; } &.ansi31 { color: #ff6e67; } &.ansi32 { color: #5ffa68; } &.ansi33 { color: #fffc67; } &.ansi34 { color: #6871ff; } &.ansi35 { color: #f075f0; } &.ansi37 { color: #60fdff; } } } .label,.badge-status { &.results_SUCCESS, &.worker_CONNECTED { color: #000; background-color: #8d4; border-color: #4F8530; } &.results_WARNINGS { color: #FFFFFF; background-color: #fa3; border-color: #C29D46; } &.results_FAILURE, &.worker_DISCONNECTED { color: #000; background-color: #e88; border-color: #A77272; } &.results_SKIPPED { color: #000; background: #AADDEE; border-color: #AADDEE; } &.results_EXCEPTION { color: #FFFFFF; background-color: #c6c; border-color: #ACA0B3; } &.results_RETRY { color: #000; background-color: #ecc; border-color: #ACA0B3; } &.results_CANCELLED { color: #000; background-color: #ecc; border-color: #ACA0B3; } &.results_PENDING { color: #000; background-color: #E7D100; border-color: #ACA0B3; } &.results_UNKNOWN { color: #000; background-color: #EEE; border-color: #ACA0B3; } } .panel,.badge-status { &.results_SUCCESS { border-color: #4F8530; } &.results_WARNINGS { border-color: #C29D46; } &.results_FAILURE { border-color: #A77272; } &.results_SKIPPED { border-color: #AADDEE; } &.results_EXCEPTION { border-color: #ACA0B3; } &.results_RETRY { border-color: #ACA0B3; } &.results_CANCELLED { border-color: #ACA0B3; } &.results_PENDING { border-color: #ACA0B3; } &.results_UNKNOWN { border-color: #ACA0B3; } } buildbot-3.4.0/www/base/src/styles/mobile.less000066400000000000000000000013751413250514000213320ustar00rootroot00000000000000/* mobile tweaks Reduce a bit more the font sizes and paddings the make more stuff in the screen, trading a bit of uglyness... */ @media (max-width: 600px) { .navbar .breadcrumb { font-size: 12px !important;; } .container, .container-fluid, .panel-heading, .list-group-item { padding-left: 5px !important; padding-right: 5px !important; } .navbar { .container, .container-fluid { padding-left: 15px !important; padding-right: 15px !important; } } .nav.nav-tabs > li > a { padding: 5px !important; font-size: 12px !important; } .logline { height: 12px !important;; font-size: 10px !important; } .nomobile{ display: None} } buildbot-3.4.0/www/base/src/styles/styles.less000066400000000000000000000115771413250514000214130ustar00rootroot00000000000000@import "~bootstrap/less/bootstrap.less"; @import "~font-awesome/less/font-awesome.less"; @import "../app/**/*.less"; @import "./animations.less"; @import "./colors.less"; @import "./mobile.less"; @import (inline) "~guanlecoja-ui/dist/styles.css"; /* base css for angular, don't display anything until angular is running */ [ng\:cloak], [ng-cloak], [data-ng-cloak], [x-ng-cloak], .ng-cloak, .x-ng-cloak { display: none !important; } // Dashboards which use SVG to draw contents such as waterfall may require // setting body height to 100vh. (100% of viewport height) // e.g. Waterfall uses this, otherwise the contents do not appear. // The controller dynamically adds and removes this class to body // when entering and leaving Waterfall. // // (note: adding height:100vh to a div also would solve this problem // except that causes the dialogs to make the SVG disappear due to // the dialog setting the body to overflow:hidden via modal-open rule.) body.hundredpercent { height: 100vh; } /* modal dialog customizations */ @media screen { .modal-xlg { .modal-dialog { width: 90%; } /*Sets the maximum height of the modal body to 80% of the screen height*/ .modal-body { overflow-y: auto; max-height: 80vh; } } /* border-radius looks strange for navbar, on lower screen sizes it is 0 by default */ .navbar-static-top { border-radius: 0; } } /* make the sidebar be on the back of the modals dialog */ .sidebar { z-index: (@zindex-modal - 1) !important; } /* button bar inside the breadcrum, used for forcesched buttons */ .breadcrumb .btn { height: 29px; margin-top: -5px; padding-top: 4px; } .breadcrumb > li + li.pull-right:before { content: " "; padding: 0 2px; } /* status badge */ .badge-status { display: inline-block; min-width: 10px; padding: 3px 7px; font-size: @font-size-small; font-weight: @badge-font-weight; line-height: @badge-line-height; vertical-align: baseline; white-space: nowrap; text-align: center; border-radius: @badge-border-radius; transition: all 100ms cubic-bezier(0.250, 0.250, 0.750, 0.750); .badge-active{ display: none; } &:hover, &:focus { -webkit-transform: scale(1.3); transform: scale(1.3); .badge-active{ display: inline-block; } .badge-inactive{ display: none; } } } .clickable { cursor: pointer } .no-select { -webkit-touch-callout: none; -webkit-user-select: none; -khtml-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; } .select-content { -webkit-touch-callout: text; -webkit-user-select: text; -khtml-user-select: text; -moz-user-select: text; -ms-user-select: text; user-select: text; } /* https://danoc.me/blog/css-prevent-copy/ Firefox will insert newlines if the selection go over a no-select span, so we need to implement them with ::before */ [data-linenumber-content]::before, [data-linenumber-content--before]::before, [data-linenumber-content--after]::after { content: attr(data-linenumber-content); color: #c7c7c7; padding-right: 10px; } img.avatar { background-color: #ccc; border-radius: 50%; height: 40px; margin: -10px; width: 40px; } .change-avatar { background-size: 16px 16px; border-radius: 50%; display: inline-block; margin-bottom: -3px; margin-right: 10px; height: 20px; width: 20px; overflow: hidden; img { height: 20px; width: 20px; background-color: #ccc } } .changedetails > .no-select > * { margin-left: 0.3em; margin-right: 0.3em; } li.unstyled{ list-style: none; } .no-margin{ margin: 0px !important; } .mouse-over-only {display: none;} span:hover>.mouse-over-only {display: inline;} /* flex row will make the row grow automatically given the size of the content This will make sure all the inside div will fill all content. This is useful when you want to create a row with one left and one right the level of grow give the importance of the div. The more it has grow the more it will win space when fighting against the other divs You can use it with following jade code: .flex-row .flex-grow-1 left content .flex-grow-1 .pull-right right content */ .flex-row { display: flex; flex-flow: row; /* Then we define how is distributed the remaining space */ justify-content: space-around; .flex-grow-1 { flex-grow: 1 } .flex-grow-2 { flex-grow: 2 } .flex-grow-3 { flex-grow: 3 } .flex-grow-4 { flex-grow: 4 } } /* Buildsummary tooltip class */ .tooltip.buildsummarytooltipstyle .tooltip-inner { border: 1px; padding: 1px; min-width: 600px; color: #262626; } /* glyphicon to font awesome translation, for angular-bootstrap-multiselect */ .glyphicon { font-family: 'FontAwesome'; } .glyphicon-ok:before { content: @fa-var-check; } .glyphicon-remove:before { content: @fa-var-times; } buildbot-3.4.0/www/base/src/tests.webpack.js000066400000000000000000000005141413250514000207550ustar00rootroot00000000000000// This file is an entry point for angular tests // Avoids some weird issues when using webpack + angular. import 'angular'; import 'angular-mocks/angular-mocks'; import './app/app.module.js' import '../test/scripts/mocks/config.mock.js' const context = require.context('./', true, /\.spec.js$/); context.keys().forEach(context); buildbot-3.4.0/www/base/test/000077500000000000000000000000001413250514000160325ustar00rootroot00000000000000buildbot-3.4.0/www/base/test/scripts/000077500000000000000000000000001413250514000175215ustar00rootroot00000000000000buildbot-3.4.0/www/base/test/scripts/mocks/000077500000000000000000000000001413250514000206355ustar00rootroot00000000000000buildbot-3.4.0/www/base/test/scripts/mocks/config.mock.js000066400000000000000000000001661413250514000233730ustar00rootroot00000000000000angular.module("buildbot_config", []).constant("config", { title: "foo", buildbotURL: "test.example.com", }); buildbot-3.4.0/www/base/webpack.config.js000066400000000000000000000040561413250514000202760ustar00rootroot00000000000000'use strict'; const common = require('buildbot-build-common'); const env = require('yargs').argv.env; const pkg = require('./package.json'); const WebpackShellPlugin = require('webpack-shell-plugin'); const WebpackCopyPlugin = require('copy-webpack-plugin'); var event = process.env.npm_lifecycle_event; var isTest = event === 'test' || event === 'test-watch'; var isProd = env === 'prod'; module.exports = function() { const outputPath = __dirname + '/buildbot_www/static'; return common.createTemplateWebpackConfig({ entry: { scripts: './src/app/app.module.js', styles: './src/styles/styles.less', }, libraryName: pkg.name, pluginName: pkg.plugin_name, dirname: __dirname, isTest: isTest, isProd: isProd, outputPath: outputPath, extractStyles: true, extraRules: [{ test: /\.(ttf|eot|woff|woff2)(\?v=[0-9]\.[0-9]\.[0-9])?$/, use: 'file-loader' }, { test: /\.(jpe?g|png|svg|ico)(\?v=[0-9]\.[0-9]\.[0-9])?$/, use: [{ loader: 'file-loader', options: { name: '[name].[ext]', outputPath: 'img' } }] }], extraPlugins: [ new WebpackShellPlugin({ onBuildEnd:['./node_modules/.bin/pug src/app/index.jade -o buildbot_www/static/'] }), new WebpackCopyPlugin([ { from: './node_modules/outdated-browser-rework/dist/outdated-browser-rework.min.js', to: outputPath + '/browser-warning.js' }, { from: './node_modules/outdated-browser-rework/dist/style.css', to: outputPath + '/browser-warning.css' }, { from: './src/app/app.browserwarning.notranspile.js', to: outputPath + '/browser-warning-list.js' }, ]), ], provideJquery: true, supplyBaseExternals: true, }); }(); buildbot-3.4.0/www/base/yarn.lock000066400000000000000000012011571413250514000167050ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.7.4.tgz#37e864532200cb6b50ee9a4045f5f817840166ab" integrity sha512-+bYbx56j4nYBmpsWtnPUsKW3NdnYxbqyfrP2w9wILBuHzdfIKz9prieZK0DFPyIzkjYVUe4QkusGL07r5pXznQ== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helpers" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" convert-source-map "^1.7.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369" integrity sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg== dependencies: "@babel/types" "^7.7.4" jsesc "^2.5.1" lodash "^4.17.13" source-map "^0.5.0" "@babel/helper-annotate-as-pure@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz#bb3faf1e74b74bd547e867e48f551fa6b098b6ce" integrity sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og== dependencies: "@babel/types" "^7.7.4" "@babel/helper-builder-binary-assignment-operator-visitor@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz#5f73f2b28580e224b5b9bd03146a4015d6217f5f" integrity sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ== dependencies: "@babel/helper-explode-assignable-expression" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-call-delegate@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz#621b83e596722b50c0066f9dc37d3232e461b801" integrity sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-create-regexp-features-plugin@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz#6d5762359fd34f4da1500e4cff9955b5299aaf59" integrity sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A== dependencies: "@babel/helper-regex" "^7.4.4" regexpu-core "^4.6.0" "@babel/helper-define-map@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176" integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-explode-assignable-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz#fa700878e008d85dc51ba43e9fb835cddfe05c84" integrity sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg== dependencies: "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e" integrity sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ== dependencies: "@babel/helper-get-function-arity" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-get-function-arity@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0" integrity sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA== dependencies: "@babel/types" "^7.7.4" "@babel/helper-hoist-variables@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz#612384e3d823fdfaaf9fce31550fe5d4db0f3d12" integrity sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-member-expression-to-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz#356438e2569df7321a8326644d4b790d2122cb74" integrity sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-imports@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz#e5a92529f8888bf319a6376abfbd1cebc491ad91" integrity sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-transforms@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.7.4.tgz#8d7cdb1e1f8ea3d8c38b067345924ac4f8e0879a" integrity sha512-ehGBu4mXrhs0FxAqN8tWkzF8GSIGAiEumu4ONZ/hD9M88uHcD+Yu2ttKfOCgwzoesJOJrtQh7trI5YPbRtMmnA== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-simple-access" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-optimise-call-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz#034af31370d2995242aa4df402c3b7794b2dcdf2" integrity sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg== dependencies: "@babel/types" "^7.7.4" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz#c68c2407350d9af0e061ed6726afb4fff16d0234" integrity sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-wrap-function" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-replace-supers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz#3c881a6a6a7571275a72d82e6107126ec9e2cdd2" integrity sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg== dependencies: "@babel/helper-member-expression-to-functions" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-simple-access@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz#a169a0adb1b5f418cfc19f22586b2ebf58a9a294" integrity sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A== dependencies: "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-split-export-declaration@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8" integrity sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug== dependencies: "@babel/types" "^7.7.4" "@babel/helper-validator-identifier@^7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== "@babel/helper-wrap-function@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace" integrity sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helpers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.7.4.tgz#62c215b9e6c712dadc15a9a0dcab76c92a940302" integrity sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg== dependencies: "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/highlight@^7.0.0": version "7.5.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.6.0", "@babel/parser@^7.9.6": version "7.13.9" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.13.9.tgz#ca34cb95e1c2dd126863a84465ae8ef66114be99" integrity sha512-nEUfRiARCcaVo3ny3ZQjURjHQZUo/JkEw7rLlSZy/psWGnvwXFtPcr6jb7Yb41DVW5LTe6KRq9LGleRNsg1Frw== "@babel/parser@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb" integrity sha512-jIwvLO0zCL+O/LmEJQjWA75MQTWwx3c3u2JOTDK5D3/9egrWRRA0/0hk9XXywYnXZVVpzrBYeIQTmhwUaePI9g== "@babel/plugin-proposal-async-generator-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d" integrity sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-proposal-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz#dde64a7f127691758cbfed6cf70de0fa5879d52d" integrity sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz#7700a6bfda771d8dc81973249eac416c6b4c697d" integrity sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.4.tgz#cc57849894a5c774214178c8ab64f6334ec8af71" integrity sha512-rnpnZR3/iWKmiQyJ3LKJpSwLDcX/nSXhdLk4Aq/tXOApIvyu7qoabrige0ylsAJffaUC51WiBu209Q0U+86OWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz#ec21e8aeb09ec6711bc0a39ca49520abee1de379" integrity sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.4.tgz#7c239ccaf09470dbe1d453d50057460e84517ebb" integrity sha512-cHgqHgYvffluZk85dJ02vloErm3Y6xtH+2noOBOJ2kXOJH3aVCDnj5eR/lVNlTnYu4hndAPJD3rTFjW3qee0PA== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-async-generators@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz#331aaf310a10c80c44a66b238b6e49132bd3c889" integrity sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.2.0", "@babel/plugin-syntax-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz#29ca3b4415abfe4a5ec381e903862ad1a54c3aec" integrity sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz#86e63f7d2e22f9e27129ac4e83ea989a382e86cc" integrity sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz#47cf220d19d6d0d7b154304701f468fc1cc6ff46" integrity sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz#a3e38f59f4b6233867b4a92dcb0ee05b2c334aa6" integrity sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-top-level-await@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz#bd7d8fa7b9fee793a36e4027fd6dd1aa32f946da" integrity sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz#76309bd578addd8aee3b379d809c802305a98a12" integrity sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz#694cbeae6d613a34ef0292713fa42fb45c4470ba" integrity sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz#d0d9d5c269c78eaea76227ace214b8d01e4d837b" integrity sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz#200aad0dcd6bb80372f94d9e628ea062c58bf224" integrity sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.13" "@babel/plugin-transform-classes@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz#c92c14be0a1399e15df72667067a8f510c9400ec" integrity sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-define-map" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz#e856c1628d3238ffe12d668eb42559f79a81910d" integrity sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz#2b713729e5054a1135097b6a67da1b6fe8789267" integrity sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.4.tgz#f7ccda61118c5b7a2599a72d5e3210884a021e96" integrity sha512-mk0cH1zyMa/XHeb6LOTXTbG7uIJ8Rrjlzu91pUx/KS3JpcgaTDwMS8kM+ar8SLOvlL2Lofi4CGBAjCo3a2x+lw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-duplicate-keys@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz#3d21731a42e3f598a73835299dd0169c3b90ac91" integrity sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz#dd30c0191e3a1ba19bcc7e389bdfddc0729d5db9" integrity sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz#248800e3a5e507b1f103d8b4ca998e77c63932bc" integrity sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz#75a6d3303d50db638ff8b5385d12451c865025b1" integrity sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz#27fe87d2b5017a2a5a34d1c41a6b9f6a6262643e" integrity sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz#aee127f2f3339fc34ce5e3055d7ffbf7aa26f19a" integrity sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.4.tgz#276b3845ca2b228f2995e453adc2e6f54d72fb71" integrity sha512-/542/5LNA18YDtg1F+QHvvUSlxdvjZoD/aldQwkq+E3WCkbEjNSN9zdrOXaSlfg3IfGi22ijzecklF/A7kVZFQ== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-commonjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.4.tgz#bee4386e550446343dd52a571eda47851ff857a3" integrity sha512-k8iVS7Jhc367IcNF53KCwIXtKAH7czev866ThsTgy8CwlXjnKZna2VHwChglzLleYrcHz1eQEIJlGRQxB53nqA== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.7.4" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-systemjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz#cd98152339d3e763dfe838b7d4273edaf520bb30" integrity sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-umd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz#1027c355a118de0aae9fee00ad7813c584d9061f" integrity sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz#fb3bcc4ee4198e7385805007373d6b6f42c98220" integrity sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/plugin-transform-new-target@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz#4a0753d2d60639437be07b592a9e58ee00720167" integrity sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz#48488937a2d586c0148451bf51af9d7dda567262" integrity sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/plugin-transform-parameters@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.4.tgz#da4555c97f39b51ac089d31c7380f03bca4075ce" integrity sha512-VJwhVePWPa0DqE9vcfptaJSzNDKrWU/4FbYCjZERtmqEs05g3UMXnYMZoXja7JAJ7Y7sPZipwm/pGApZt7wHlw== dependencies: "@babel/helper-call-delegate" "^7.7.4" "@babel/helper-get-function-arity" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz#2388d6505ef89b266103f450f9167e6bd73f98c2" integrity sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.4.tgz#d18eac0312a70152d7d914cbed2dc3999601cfc0" integrity sha512-e7MWl5UJvmPEwFJTwkBlPmqixCtr9yAASBqff4ggXTNicZiwbF8Eefzm6NVgfiBp7JdAGItecnctKTgH44q2Jw== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz#6a7cf123ad175bb5c69aec8f6f0770387ed3f1eb" integrity sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.4.tgz#51fe458c1c1fa98a8b07934f4ed38b6cd62177a6" integrity sha512-O8kSkS5fP74Ad/8pfsCMGa8sBRdLxYoSReaARRNSz3FbFQj3z/QUvoUmJ28gn9BO93YfnXc3j+Xyaqe8cKDNBQ== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz#74a0a9b2f6d67a684c6fbfd5f0458eb7ba99891e" integrity sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz#aa673b356fe6b7e70d69b6e33a17fef641008578" integrity sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz#ffb68c05090c30732076b1285dc1401b404a123c" integrity sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz#1eb6411736dd3fe87dbd20cc6668e5121c17d604" integrity sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz#3174626214f2d6de322882e498a38e8371b2140e" integrity sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz#a3c0f65b117c4c81c5b6484f2a5e7b95346b83ae" integrity sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/preset-env@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.7.4.tgz#ccaf309ae8d1ee2409c85a4e2b5e280ceee830f8" integrity sha512-Dg+ciGJjwvC1NIe/DGblMbcGq1HOtKbw8RLl4nIjlfcILKEOkWT/vRqPpumswABEBVudii6dnVwrBtzD7ibm4g== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.7.4" "@babel/plugin-proposal-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-syntax-top-level-await" "^7.7.4" "@babel/plugin-transform-arrow-functions" "^7.7.4" "@babel/plugin-transform-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions" "^7.7.4" "@babel/plugin-transform-block-scoping" "^7.7.4" "@babel/plugin-transform-classes" "^7.7.4" "@babel/plugin-transform-computed-properties" "^7.7.4" "@babel/plugin-transform-destructuring" "^7.7.4" "@babel/plugin-transform-dotall-regex" "^7.7.4" "@babel/plugin-transform-duplicate-keys" "^7.7.4" "@babel/plugin-transform-exponentiation-operator" "^7.7.4" "@babel/plugin-transform-for-of" "^7.7.4" "@babel/plugin-transform-function-name" "^7.7.4" "@babel/plugin-transform-literals" "^7.7.4" "@babel/plugin-transform-member-expression-literals" "^7.7.4" "@babel/plugin-transform-modules-amd" "^7.7.4" "@babel/plugin-transform-modules-commonjs" "^7.7.4" "@babel/plugin-transform-modules-systemjs" "^7.7.4" "@babel/plugin-transform-modules-umd" "^7.7.4" "@babel/plugin-transform-named-capturing-groups-regex" "^7.7.4" "@babel/plugin-transform-new-target" "^7.7.4" "@babel/plugin-transform-object-super" "^7.7.4" "@babel/plugin-transform-parameters" "^7.7.4" "@babel/plugin-transform-property-literals" "^7.7.4" "@babel/plugin-transform-regenerator" "^7.7.4" "@babel/plugin-transform-reserved-words" "^7.7.4" "@babel/plugin-transform-shorthand-properties" "^7.7.4" "@babel/plugin-transform-spread" "^7.7.4" "@babel/plugin-transform-sticky-regex" "^7.7.4" "@babel/plugin-transform-template-literals" "^7.7.4" "@babel/plugin-transform-typeof-symbol" "^7.7.4" "@babel/plugin-transform-unicode-regex" "^7.7.4" "@babel/types" "^7.7.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.4.tgz#b23a856751e4bf099262f867767889c0e3fe175b" integrity sha512-r24eVUUr0QqNZa+qrImUk8fn5SPhHq+IfYvIoIMg0do3GdK9sMdiLKP3GYVVaxpPKORgm8KRKaNTEhAjgIpLMw== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b" integrity sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" "@babel/traverse@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.7.4.tgz#9c1e7c60fb679fe4fcfaa42500833333c2058558" integrity sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.13" "@babel/types@^7.6.1", "@babel/types@^7.9.6": version "7.13.0" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.13.0.tgz#74424d2816f0171b4100f0ab34e9a374efdf7f80" integrity sha512-hE+HE8rnG1Z6Wzo+MhaKE5lM5eMx71T4EHJgku2E3xIfaULhDcxiiRxUYgwX8qwP1BBSlag+TdGOt6JAidIZTA== dependencies: "@babel/helper-validator-identifier" "^7.12.11" lodash "^4.17.19" to-fast-properties "^2.0.0" "@babel/types@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193" integrity sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA== dependencies: esutils "^2.0.2" lodash "^4.17.13" to-fast-properties "^2.0.0" "@types/babel-types@*", "@types/babel-types@^7.0.0": version "7.0.9" resolved "https://registry.yarnpkg.com/@types/babel-types/-/babel-types-7.0.9.tgz#01d7b86949f455402a94c788883fe4ba574cad41" integrity sha512-qZLoYeXSTgQuK1h7QQS16hqLGdmqtRmN8w/rl3Au/l5x/zkHx+a4VHrHyBsi1I1vtK2oBHxSzKIu0R5p6spdOA== "@types/babylon@^6.16.2": version "6.16.5" resolved "https://registry.yarnpkg.com/@types/babylon/-/babylon-6.16.5.tgz#1c5641db69eb8cdf378edd25b4be7754beeb48b4" integrity sha512-xH2e58elpj1X4ynnKp9qSnWlsRTIs6n3tgLGNfwAGHwePw0mulHQllV34n0T25uYSu1k0hRKkWXF890B1yS47w== dependencies: "@types/babel-types" "*" "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== "@types/glob@^7.1.1": version "7.1.1" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== dependencies: "@types/events" "*" "@types/minimatch" "*" "@types/node" "*" "@types/minimatch@*": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/node@*": version "12.12.12" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.12.tgz#529bc3e73dbb35dd9e90b0a1c83606a9d3264bdb" integrity sha512-MGuvYJrPU0HUwqF7LqvIj50RZUX23Z+m583KBygKYUZLlZ88n6w28XRNJRJgsHukLEnLz6w6SvxZoLgbr5wLqQ== "@uirouter/angularjs@^1.0.15", "@uirouter/angularjs@^1.0.22": version "1.0.23" resolved "https://registry.yarnpkg.com/@uirouter/angularjs/-/angularjs-1.0.23.tgz#aeec0f96b0c42187c5044ef244ba6ccb75a5d835" integrity sha512-r4hLSw7R3mwXGC5Sq7yxNlBK1sSzQUm/1MzigwwYRHoMO5uKcBPUhxFYx5U7kufP2Xl1165KeZvRsLCh0/Z1ng== dependencies: "@uirouter/core" "6.0.1" "@uirouter/core@6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/@uirouter/core/-/core-6.0.1.tgz#93b02a5d178a7ab7313f34b7b3f019a000d23396" integrity sha512-mHCutiHtDDRKYmrJ92XPKDoSb2bgqaDyUpHdF4hUE+riwgkCvGdBjL8u+VDTcV3slBAk6B0LBIOIajjWkkObbQ== "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== dependencies: "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@webassemblyjs/floating-point-hex-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== "@webassemblyjs/helper-api-error@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== "@webassemblyjs/helper-buffer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== "@webassemblyjs/helper-code-frame@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== dependencies: "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/helper-fsm@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== "@webassemblyjs/helper-module-context@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== dependencies: "@webassemblyjs/ast" "1.8.5" mamacro "^0.0.3" "@webassemblyjs/helper-wasm-bytecode@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== "@webassemblyjs/helper-wasm-section@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/ieee754@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== "@webassemblyjs/wasm-edit@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/helper-wasm-section" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-opt" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/wasm-gen@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wasm-opt@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wasm-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wast-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/floating-point-hex-parser" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-code-frame" "1.8.5" "@webassemblyjs/helper-fsm" "1.8.5" "@xtuc/long" "4.2.2" "@webassemblyjs/wast-printer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== dependencies: mime-types "~2.1.24" negotiator "0.6.2" acorn-globals@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-3.1.0.tgz#fd8270f71fbb4996b004fa880ee5d46573a731bf" integrity sha1-/YJw9x+7SZawBPqIDuXUZXOnMb8= dependencies: acorn "^4.0.4" acorn@^3.1.0: version "3.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" integrity sha1-ReN/s56No/JbruP/U2niu18iAXo= acorn@^4.0.4, acorn@~4.0.2: version "4.0.13" resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787" integrity sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c= acorn@^6.2.1: version "6.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.3.0.tgz#0087509119ffa4fc0a0041d1e93a417e68cb856e" integrity sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA== acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: version "3.4.1" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== ajv@^5.0.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5: version "6.10.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" align-text@^0.1.1, align-text@^0.1.3: version "0.1.4" resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" integrity sha1-DNkKVhCT810KmSVsIrcGlDP60Rc= dependencies: kind-of "^3.0.2" longest "^1.0.1" repeat-string "^1.5.2" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= angular-animate@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-animate/-/angular-animate-1.7.9.tgz#a397f82434c1e7ed5b7a298fa70fc3de989a6785" integrity sha512-fV+AISy/HTzurQH2ngsJg+lLIvfu0ahc1h4AYKauaXVw97rZc2k4iUA1bMstiEyClsdayQX568kjQc1NK+oYhw== angular-bootstrap-multiselect@^1.1.11: version "1.1.11" resolved "https://registry.yarnpkg.com/angular-bootstrap-multiselect/-/angular-bootstrap-multiselect-1.1.11.tgz#28b7dd93cbe40d16035b25a1299107d106330766" integrity sha512-R2e9I+PLNHTxVs2+6e5KUzFJTlRwqX7OJI8YG7RgHLjWh/Kk/atsRT/07sf4yCu4DyzOQ2PhLrI3AZ1PZ9Dg3g== angular-mocks@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-mocks/-/angular-mocks-1.7.9.tgz#0a3b7e28b9a493b4e3010ed2b0f69a68e9b4f79b" integrity sha512-LQRqqiV3sZ7NTHBnNmLT0bXtE5e81t97+hkJ56oU0k3dqKv1s6F+nBWRlOVzqHWPGFOiPS8ZJVdrS8DFzHyNIA== angular-recursion@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/angular-recursion/-/angular-recursion-1.0.5.tgz#cd405428a0bf55faf52eaa7988c1fe69cd930543" integrity sha1-zUBUKKC/Vfr1Lqp5iMH+ac2TBUM= angular-ui-bootstrap@^2.5.6: version "2.5.6" resolved "https://registry.yarnpkg.com/angular-ui-bootstrap/-/angular-ui-bootstrap-2.5.6.tgz#23937322ec641a6fbee16498cc32452aa199e7c5" integrity sha512-yzcHpPMLQl0232nDzm5P4iAFTFQ9dMw0QgFLuKYbDj9M0xJ62z0oudYD/Lvh1pWfRsukiytP4Xj6BHOSrSXP8A== angular@^1.7.9, angular@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/angular/-/angular-1.8.0.tgz#b1ec179887869215cab6dfd0df2e42caa65b1b51" integrity sha512-VdaMx+Qk0Skla7B5gw77a8hzlcOakwF8mjlW13DpIWIDlfqwAbSSLfd8N/qZnzEmQF4jC4iofInd3gE7vL8ZZg== ansi-colors@^3.0.0: version "3.2.4" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-html@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.0, ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" normalize-path "^2.1.1" anymatch@~3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" aproba@^1.0.3, aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-flatten@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-never@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/assert-never/-/assert-never-1.2.1.tgz#11f0e363bf146205fb08193b5c7b90f4d1cf44fe" integrity sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw== assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert@^1.1.1: version "1.5.0" resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== dependencies: object-assign "^4.1.1" util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= async-each@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== async@1.x: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@^2.0.0, async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== dependencies: lodash "^4.17.14" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.5.1: version "9.7.2" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.2.tgz#26cf729fbb709323b40171a874304884dcceffed" integrity sha512-LCAfcdej1182uVvPOZnytbq61AhnOZ/4JelDaJGDeNwewyU1AMaNthcHsyz1NRjTmd2FkurMckLWfkHg3Z//KA== dependencies: browserslist "^4.7.3" caniuse-lite "^1.0.30001010" chalk "^2.4.2" normalize-range "^0.1.2" num2fraction "^1.2.2" postcss "^7.0.23" postcss-value-parser "^4.0.2" aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" babel-generator@^6.18.0: version "6.26.1" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.17.4" source-map "^0.5.7" trim-right "^1.0.1" babel-loader@^8.0.5: version "8.0.6" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== dependencies: find-cache-dir "^2.0.0" loader-utils "^1.0.2" mkdirp "^0.5.1" pify "^4.0.1" babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-plugin-dynamic-import-node@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: object.assign "^4.1.0" babel-runtime@^6.0.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-template@^6.16.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: babel-runtime "^6.26.0" babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" lodash "^4.17.4" babel-traverse@^6.18.0, babel-traverse@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: babel-code-frame "^6.26.0" babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" debug "^2.6.8" globals "^9.18.0" invariant "^2.2.2" lodash "^4.17.4" babel-types@^6.18.0, babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babel-walk@3.0.0-canary-5: version "3.0.0-canary-5" resolved "https://registry.yarnpkg.com/babel-walk/-/babel-walk-3.0.0-canary-5.tgz#f66ecd7298357aee44955f235a6ef54219104b11" integrity sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw== dependencies: "@babel/types" "^7.9.6" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@^1.0.2: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary-extensions@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c" integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow== blob@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683" integrity sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig== bluebird@^3.3.0, bluebird@^3.5.5: version "3.7.1" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de" integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== body-parser@1.19.0, body-parser@^1.16.1: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= dependencies: array-flatten "^2.1.0" deep-equal "^1.0.1" dns-equal "^1.0.0" dns-txt "^2.0.2" multicast-dns "^6.0.1" multicast-dns-service-types "^1.1.0" boolbase@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= bootstrap@^3.1.1: version "3.4.1" resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-3.4.1.tgz#c3a347d419e289ad11f4033e3c4132b87c081d72" integrity sha512-yN5oZVmRCwe5aKwzRj6736nSmKDX7pLYwsXiCj/EYmo16hODaBiT4En5btW/jhBF/seV+XMx3aYwukYC3A49DA== brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" brorand@^1.0.1, brorand@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.3" inherits "^2.0.1" safe-buffer "^5.0.1" browserify-cipher@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" des.js "^1.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" browserify-rsa@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= dependencies: bn.js "^4.1.0" randombytes "^2.0.1" browserify-sign@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= dependencies: bn.js "^4.1.1" browserify-rsa "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.2" elliptic "^6.0.0" inherits "^2.0.1" parse-asn1 "^5.0.0" browserify-zlib@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" browserslist@^4.6.0, browserslist@^4.7.3: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== dependencies: caniuse-lite "^1.0.30001219" colorette "^1.2.2" electron-to-chromium "^1.3.723" escalade "^3.1.1" node-releases "^1.1.71" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer-indexof@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= buffer@^4.3.0: version "4.9.2" resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" isarray "^1.0.0" "buildbot-build-common@link:../build_common": version "1.0.0" dependencies: "@babel/core" "^7.4.3" "@babel/plugin-syntax-dynamic-import" "^7.2.0" "@babel/plugin-transform-runtime" "^7.4.3" "@babel/preset-env" "^7.4.3" "@babel/runtime" "^7.4.3" autoprefixer "^9.5.1" babel-loader "^8.0.5" css-loader "^2.1.1" file-loader "^3.0.1" html-webpack-plugin "^3.2.0" import-glob-loader "^1.1.0" istanbul-instrumenter-loader "^3.0.1" jasmine-core "^3.4.0" karma "^4.1.0" karma-chrome-launcher "^2.2.0" karma-coverage "^1.1.2" karma-jasmine "^2.0.1" karma-sourcemap-loader "^0.3.7" karma-spec-reporter "^0.0.32" karma-webpack "^3.0.5" less "^3.9.0" less-loader "^5.0.0" mini-css-extract-plugin "^0.6.0" node-libs-browser "^2.2.0" null-loader "^1.0.0" postcss-loader "^3.0.0" pug "^3.0.1" raw-loader "^2.0.0" style-loader "^0.23.1" webpack "^4.30.0" webpack-cli "^3.3.1" webpack-dev-server "^3.3.1" webpack-fix-style-only-entries "^0.2.1" webpack-shell-plugin "^0.5.0" "buildbot-data-js@link:../data_module": version "3.0.1" dependencies: angular "^1.7.9" builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cacache@^12.0.2, cacache@^12.0.3: version "12.0.3" resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== dependencies: bluebird "^3.5.5" chownr "^1.1.1" figgy-pudding "^3.5.1" glob "^7.1.4" graceful-fs "^4.1.15" infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" ssri "^6.0.1" unique-filename "^1.1.1" y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" caller-callsite@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= dependencies: callsites "^2.0.0" caller-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= dependencies: caller-callsite "^2.0.0" callsite@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camel-case@3.0.x: version "3.0.0" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= dependencies: no-case "^2.2.0" upper-case "^1.1.1" camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^1.0.2: version "1.2.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" integrity sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk= camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0, camelcase@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30001010, caniuse-lite@^1.0.30001219: version "1.0.30001228" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz#bfdc5942cd3326fa51ee0b42fbef4da9d492a7fa" integrity sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= center-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" integrity sha1-qg0yYptu6XIgBBHL1EYckHvCt60= dependencies: align-text "^0.1.3" lazy-cache "^1.0.3" chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" chalk@^1.0.0, chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" character-parser@^2.1.1, character-parser@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chokidar@^2.0.2, chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== dependencies: anymatch "^2.0.0" async-each "^1.0.1" braces "^2.3.2" glob-parent "^3.1.0" inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" normalize-path "^3.0.0" path-is-absolute "^1.0.0" readdirp "^2.2.1" upath "^1.1.1" optionalDependencies: fsevents "^1.2.7" chokidar@^3.0.0: version "3.3.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6" integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A== dependencies: anymatch "~3.1.1" braces "~3.0.2" glob-parent "~5.1.0" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" readdirp "~3.2.0" optionalDependencies: fsevents "~2.1.1" chownr@^1.1.1, chownr@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== chrome-trace-event@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== dependencies: tslib "^1.9.0" cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@4.2.x, clean-css@^4.1.11: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cliui@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" integrity sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE= dependencies: center-align "^0.1.1" right-align "^0.1.1" wordwrap "0.0.2" cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi "^2.0.0" cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== dependencies: string-width "^3.1.0" strip-ansi "^5.2.0" wrap-ansi "^5.1.0" clone@^2.1.1, clone@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colorette@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== colors@^1.1.0, colors@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@2.17.x: version "2.17.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== commander@^2.20.0, commander@^2.8.1: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@~2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= compressible@~2.0.16: version "2.0.17" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== dependencies: mime-db ">= 1.40.0 < 2" compression@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" compressible "~2.0.16" debug "2.6.9" on-headers "~1.0.2" safe-buffer "5.1.2" vary "~1.1.2" concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" inherits "^2.0.3" readable-stream "^2.2.2" typedarray "^0.0.6" connect-history-api-fallback@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^3.0.1, constantinople@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-3.1.2.tgz#d45ed724f57d3d10500017a7d3a889c1381ae647" integrity sha512-yePcBqEFhLOqSBtwYOGGS1exHo/s1xjekXiinh4itpNQGCu4KA1euPh1fg07N2wMITZXQkBz75Ntdt1ctGZouw== dependencies: "@types/babel-types" "^7.0.0" "@types/babylon" "^6.16.2" babel-types "^6.26.0" babylon "^6.18.0" constantinople@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-4.0.1.tgz#0def113fa0e4dc8de83331a5cf79c8b325213151" integrity sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw== dependencies: "@babel/parser" "^7.6.0" "@babel/types" "^7.6.1" constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= content-disposition@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== dependencies: safe-buffer "5.1.2" content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= cookie@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== dependencies: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= copy-webpack-plugin@^5.0.3: version "5.0.5" resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-5.0.5.tgz#731df6a837a2ef0f8f8e2345bdfe9b7c62a2da68" integrity sha512-7N68eIoQTyudAuxkfPT7HzGoQ+TsmArN/I3HFwG+lVE3FNzqvZKIiaxtYh4o3BIznioxUvx9j26+Rtsc9htQUQ== dependencies: cacache "^12.0.3" find-cache-dir "^2.1.0" glob-parent "^3.1.0" globby "^7.1.1" is-glob "^4.0.1" loader-utils "^1.2.3" minimatch "^3.0.4" normalize-path "^3.0.0" p-limit "^2.2.1" schema-utils "^1.0.0" serialize-javascript "^2.1.0" webpack-log "^2.0.0" core-js-compat@^3.1.1: version "3.4.2" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.4.2.tgz#652fa7c54652b7f6586a893e37001df55ea2ac37" integrity sha512-W0Aj+LM3EAxxjD0Kp2o4be8UlnxIZHNupBv2znqrheR4aY2nOn91794k/xoSp+SxqqriiZpTsSwBtZr60cbkwQ== dependencies: browserslist "^4.7.3" semver "^6.3.0" core-js@^2.4.0: version "2.6.12" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.12.tgz#d9333dfa7b065e347cc5682219d6f690859cc2ec" integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== dependencies: import-fresh "^2.0.0" is-directory "^0.3.1" js-yaml "^3.13.1" parse-json "^4.0.0" create-ecdh@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" inherits "^2.0.1" md5.js "^1.3.4" ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" ripemd160 "^2.0.0" safe-buffer "^5.0.1" sha.js "^2.4.8" cross-spawn@6.0.5, cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" semver "^5.5.0" shebang-command "^1.2.0" which "^1.2.9" crypto-browserify@^3.11.0: version "3.12.0" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" create-ecdh "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.0" diffie-hellman "^5.0.0" inherits "^2.0.1" pbkdf2 "^3.0.3" public-encrypt "^4.0.0" randombytes "^2.0.0" randomfill "^1.0.3" css-loader@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" integrity sha512-OcKJU/lt232vl1P9EEDamhoO9iKY3tIjY5GU+XDLblAykTdgs6Ux9P1hTHve8nFKy5KPpOXOsVI/hIwi3841+w== dependencies: camelcase "^5.2.0" icss-utils "^4.1.0" loader-utils "^1.2.3" normalize-path "^3.0.0" postcss "^7.0.14" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^2.0.6" postcss-modules-scope "^2.1.0" postcss-modules-values "^2.0.0" postcss-value-parser "^3.3.0" schema-utils "^1.0.0" css-select@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= dependencies: boolbase "~1.0.0" css-what "2.1" domutils "1.5.1" nth-check "~1.0.1" css-what@2.1: version "2.1.3" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= d3@^3.5.17: version "3.5.17" resolved "https://registry.yarnpkg.com/d3/-/d3-3.5.17.tgz#bc46748004378b21a360c9fc7cf5231790762fb8" integrity sha1-vEZ0gAQ3iyGjYMn8fPUjF5B2L7g= d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" date-format@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-2.1.0.tgz#31d5b5ea211cf5fd764cd38baf9d033df7e125cf" integrity sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA== dateformat@^1.0.6: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.1.1, debug@^3.2.5, debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" decamelize@^1.0.0, decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= deep-equal@^1.0.1: version "1.1.1" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== dependencies: is-arguments "^1.0.4" is-date-object "^1.0.1" is-regex "^1.0.4" object-is "^1.0.1" object-keys "^1.1.1" regexp.prototype.flags "^1.2.0" deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-gateway@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== dependencies: execa "^1.0.0" ip-regex "^2.1.0" define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: "@types/glob" "^7.1.1" globby "^6.1.0" is-path-cwd "^2.0.0" is-path-in-cwd "^2.0.0" p-map "^2.0.0" pify "^4.0.1" rimraf "^2.6.3" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= des.js@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== dependencies: inherits "^2.0.1" minimalistic-assert "^1.0.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detect-node@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" randombytes "^2.0.0" dir-glob@^2.0.0: version "2.2.2" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" integrity sha512-f9LBi5QWzIW3I6e//uxZoLBlUt9kcp66qo0sSCxL6YZKc75R1c4MFCoe/LaZiBGmgujvQdxc5Bn3QhfyvK5Hsw== dependencies: path-type "^3.0.0" dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= dns-packet@^1.3.1: version "1.3.4" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.4.tgz#e3455065824a2507ba886c55a89963bb107dec6f" integrity sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA== dependencies: ip "^1.1.0" safe-buffer "^5.0.1" dns-txt@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= dependencies: buffer-indexof "^1.0.0" doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-converter@^0.2: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" dom-serializer@0: version "0.2.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== dependencies: domelementtype "^2.0.1" entities "^2.0.0" domain-browser@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== domelementtype@1, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== domhandler@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== dependencies: domelementtype "1" domutils@1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= dependencies: dom-serializer "0" domelementtype "1" domutils@^1.5.1: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.723: version "1.3.738" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.738.tgz#aec24b091c82acbfabbdcce08076a703941d17ca" integrity sha512-vCMf4gDOpEylPSLPLSwAEsz+R3ShP02Y3cAKMZvTqule3XcPp7tgc/0ESI7IS6ZeyBlGClE50N53fIOkcIVnpw== elliptic@^6.0.0: version "6.5.4" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== dependencies: bn.js "^4.11.9" brorand "^1.1.0" hash.js "^1.0.0" hmac-drbg "^1.0.1" inherits "^2.0.4" minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== emojis-list@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" engine.io-client@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.2.1.tgz#6f54c0475de487158a1a7c77d10178708b6add36" integrity sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "~3.1.0" engine.io-parser "~2.1.1" has-cors "1.1.0" indexof "0.0.1" parseqs "0.0.5" parseuri "0.0.5" ws "~3.3.1" xmlhttprequest-ssl "~1.5.4" yeast "0.1.2" engine.io-parser@~2.1.0, engine.io-parser@~2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6" integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA== dependencies: after "0.8.2" arraybuffer.slice "~0.0.7" base64-arraybuffer "0.1.5" blob "0.0.5" has-binary2 "~1.0.2" engine.io@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.2.1.tgz#b60281c35484a70ee0351ea0ebff83ec8c9522a2" integrity sha512-+VlKzHzMhaU+GsCIg4AoXF1UdDFjHHwMmMKqMJNDNLlUlejz58FCy4LBqB2YVJskHGYl06BatYWKP2TVdVXE5w== dependencies: accepts "~1.3.4" base64id "1.0.0" cookie "0.3.1" debug "~3.1.0" engine.io-parser "~2.1.0" ws "~3.3.1" enhanced-resolve@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" tapable "^1.0.0" enhanced-resolve@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== dependencies: graceful-fs "^4.1.2" memory-fs "^0.5.0" tapable "^1.0.0" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= entities@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-abstract@^1.5.1: version "1.16.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d" integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.0" is-callable "^1.1.4" is-regex "^1.0.4" object-inspect "^1.6.0" object-keys "^1.1.1" string.prototype.trimleft "^2.1.0" string.prototype.trimright "^2.1.0" es-to-primitive@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: version "0.10.53" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.3" next-tick "~1.0.0" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" ext "^1.1.2" escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" eslint-scope@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== dependencies: esrecurse "^4.1.0" estraverse "^4.1.1" esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esrecurse@^4.1.0: version "4.2.1" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== dependencies: estraverse "^4.1.0" estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@^4.1.0, estraverse@^4.1.1: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== eventsource@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" safe-buffer "^5.1.1" execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== dependencies: cross-spawn "^6.0.0" get-stream "^4.0.0" is-stream "^1.1.0" npm-run-path "^2.0.0" p-finally "^1.0.0" signal-exit "^3.0.0" strip-eof "^1.0.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== dependencies: accepts "~1.3.7" array-flatten "1.1.1" body-parser "1.19.0" content-disposition "0.5.3" content-type "~1.0.4" cookie "0.4.0" cookie-signature "1.0.6" debug "2.6.9" depd "~1.1.2" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" finalhandler "~1.1.2" fresh "0.5.2" merge-descriptors "1.0.1" methods "~1.1.2" on-finished "~2.3.0" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.5" qs "6.7.0" range-parser "~1.2.1" safe-buffer "5.1.2" send "0.17.1" serve-static "1.14.1" setprototypeof "1.1.1" statuses "~1.5.0" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" ext@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.2.0.tgz#8dd8d2dd21bcced3045be09621fa0cbf73908ba4" integrity sha512-0ccUQK/9e3NreLFg6K6np8aPyRgwycx+oFGtfx1dSp7Wj00Ozw9r05FgBRlzjf2XBM7LAzwgLyDscRrtSU91hA== dependencies: type "^2.0.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= dependencies: websocket-driver ">=0.5.1" faye-websocket@~0.11.1: version "0.11.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e" integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== dependencies: websocket-driver ">=0.5.1" figgy-pudding@^3.5.1: version "3.5.2" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== file-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== dependencies: loader-utils "^1.0.2" schema-utils "^1.0.0" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.1.2, finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-cache-dir@^2.0.0, find-cache-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" make-dir "^2.0.0" pkg-dir "^3.0.0" find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== dependencies: detect-file "^1.0.0" is-glob "^4.0.0" micromatch "^3.0.4" resolve-dir "^1.0.1" flatted@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08" integrity sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg== flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" readable-stream "^2.3.6" follow-redirects@^1.0.0: version "1.13.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.0.tgz#b42e8d93a2a7eea5ed88633676d6597bc8e384db" integrity sha512-aq6gF1BEKje4a9i9+5jimNFIpq4Q1WiwBToeRK5NvZBd/TRsmW8BsJfOEGkr76TbOyPVD3OVDN910EcUNtRYEA== font-awesome@^4.7.0: version "4.7.0" resolved "https://registry.yarnpkg.com/font-awesome/-/font-awesome-4.7.0.tgz#8fa8cf0411a1a31afd07b06d2902bb9fc815a133" integrity sha1-j6jPBBGhoxr9B7BtKQK7n8gVoTM= for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= from2@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= dependencies: inherits "^2.0.1" readable-stream "^2.0.0" fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" universalify "^0.1.0" fs-minipass@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= dependencies: graceful-fs "^4.1.2" iferr "^0.1.5" imurmurhash "^0.1.4" readable-stream "1 || 2" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" fsevents@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-caller-file@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-stream@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" glob-parent@~5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.0.tgz#5f4c1d1e748d30cd73ad2944b3577a81b081e8c2" integrity sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw== dependencies: is-glob "^4.0.1" glob@^5.0.13, glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" global-modules@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== dependencies: global-prefix "^3.0.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" global-prefix@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== dependencies: ini "^1.3.5" kind-of "^6.0.2" which "^1.3.1" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" globby@^7.1.1: version "7.1.1" resolved "https://registry.yarnpkg.com/globby/-/globby-7.1.1.tgz#fb2ccff9401f8600945dfada97440cca972b8680" integrity sha1-+yzP+UAfhgCUXfral0QMypcrhoA= dependencies: array-union "^1.0.1" dir-glob "^2.0.0" glob "^7.1.2" ignore "^3.3.5" pify "^3.0.0" slash "^1.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== "guanlecoja-ui@link:../guanlecoja-ui": version "2.0.0" dependencies: "@uirouter/angularjs" "^1.0.15" angular "^1.7.9" angular-animate "^1.7.9" angular-ui-bootstrap "^2.5.6" jquery "^3.4.0" lodash "^4.17.11" handle-thing@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== handlebars@^4.0.1: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== dependencies: minimist "^1.2.5" neo-async "^2.6.0" source-map "^0.6.1" wordwrap "^1.0.0" optionalDependencies: uglify-js "^3.1.4" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary2@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/has-binary2/-/has-binary2-1.0.3.tgz#7776ac627f3ea77250cfc332dab7ddf5e4f5d11d" integrity sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw== dependencies: isarray "2.0.1" has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0, has-symbols@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" minimalistic-assert "^1.0.1" he@1.2.x: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= dependencies: inherits "^2.0.1" obuf "^1.0.0" readable-stream "^2.0.1" wbuf "^1.1.0" html-entities@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= html-minifier@^3.2.3: version "3.5.21" resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== dependencies: camel-case "3.0.x" clean-css "4.2.x" commander "2.17.x" he "1.2.x" param-case "2.1.x" relateurl "0.2.x" uglify-js "3.4.x" html-webpack-plugin@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= dependencies: html-minifier "^3.2.3" loader-utils "^0.2.16" lodash "^4.17.3" pretty-error "^2.0.2" tapable "^1.0.0" toposort "^1.0.0" util.promisify "1.0.0" htmlparser2@^3.3.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== dependencies: domelementtype "^1.3.1" domhandler "^2.3.0" domutils "^1.5.1" entities "^1.1.1" inherits "^2.0.1" readable-stream "^3.1.1" http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= http-errors@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.0" statuses ">= 1.4.0 < 2" http-errors@~1.7.2: version "1.7.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== dependencies: depd "~1.1.2" inherits "2.0.4" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy-middleware@0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== dependencies: http-proxy "^1.17.0" is-glob "^4.0.0" lodash "^4.17.11" micromatch "^3.1.10" http-proxy@^1.13.0, http-proxy@^1.17.0: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" icss-replace-symbols@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= icss-utils@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== dependencies: postcss "^7.0.14" ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== iferr@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= ignore-walk@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== dependencies: minimatch "^3.0.4" ignore@^3.3.5: version "3.3.10" resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043" integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug== image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= import-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= dependencies: import-from "^2.1.0" import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= dependencies: caller-path "^2.0.0" resolve-from "^3.0.0" import-from@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" integrity sha1-M1238qev/VOqpHHUuAId7ja387E= dependencies: resolve-from "^3.0.0" import-glob-loader@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/import-glob-loader/-/import-glob-loader-1.1.0.tgz#98d84c0f661c8ba9f821d9ddb7c6b6dc8e97eca2" integrity sha1-mNhMD2Yci6n4Idndt8a23I6X7KI= dependencies: glob "^5.0.13" loader-utils "^0.2.10" import-local@2.0.0, import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== dependencies: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= infer-owner@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.7" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== internal-ip@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== dependencies: default-gateway "^4.2.0" ipaddr.js "^1.9.0" interpret@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= ipaddr.js@1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== ipaddr.js@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== is-absolute-url@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arguments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.0.4.tgz#3faf966c7cba0ff437fb31f6250082fcf0448cf3" integrity sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA== is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-core-module@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== dependencies: has "^1.0.3" is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-directory@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-expression@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-3.0.0.tgz#39acaa6be7fd1f3471dc42c7416e61c24317ac9f" integrity sha1-Oayqa+f9HzRx3ELHQW5hwkMXrJ8= dependencies: acorn "~4.0.2" object-assign "^4.0.1" is-expression@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-4.0.0.tgz#c33155962abf21d0afd2552514d67d2ec16fd2ab" integrity sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A== dependencies: acorn "^7.1.1" object-assign "^4.1.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== is-path-in-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: is-path-inside "^2.1.0" is-path-inside@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== dependencies: path-is-inside "^1.0.2" is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-regex@^1.0.3, is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: has-symbols "^1.0.1" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is-wsl@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isarray@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.1.tgz#a37d94ed9cda2d59865c9f76fe596ee1f338741e" integrity sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-instrumenter-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-3.0.1.tgz#9957bd59252b373fae5c52b7b5188e6fde2a0949" integrity sha512-a5SPObZgS0jB/ixaKSMdn6n/gXSrK2S6q/UfRJBT3e6gQmVjwZROTODQsYW5ZNwOu78hG62Y3fWlebaVOL0C+w== dependencies: convert-source-map "^1.5.0" istanbul-lib-instrument "^1.7.3" loader-utils "^1.1.0" schema-utils "^0.3.0" istanbul-lib-coverage@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== istanbul-lib-instrument@^1.7.3: version "1.10.2" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" istanbul-lib-coverage "^1.2.1" semver "^5.3.0" istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" jasmine-core@^3.3, jasmine-core@^3.4.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-3.5.0.tgz#132c23e645af96d85c8bca13c8758b18429fc1e4" integrity sha512-nCeAiw37MIMA9w9IXso7bRaLl+c/ef3wnxsoSAlYrzS+Ot0zTG6nU8G/cIfGkqpkjX2wNaIW9RFG0TwIFnG6bA== jquery@^3.4.0: version "3.5.1" resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.5.1.tgz#d7b4d08e1bfdb86ad2f1a3d039ea17304717abb5" integrity sha512-XwIBPqcMn57FxfT+Go5pzySnm4KWkT1Tv7gjrpT1srtf8Weynl6R273VJ5GjkRb51IzMp5nbaPjJXMWeju2MKg== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-stringify@^1.0.1, js-stringify@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@3.x, js-yaml@^3.13.1: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== json5@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== dependencies: minimist "^1.2.0" json5@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== dependencies: minimist "^1.2.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= optionalDependencies: graceful-fs "^4.1.6" jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" karma-chrome-launcher@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" integrity sha512-uf/ZVpAabDBPvdPdveyk1EPgbnloPvFFGgmRhYLTDH7gEB4nZdSBk8yTU47w1g/drLSx5uMOkjKk7IWKfWg/+w== dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coverage@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-1.1.2.tgz#cc09dceb589a83101aca5fe70c287645ef387689" integrity sha512-eQawj4Cl3z/CjxslYy9ariU4uDh7cCNFZHNWXWRpl0pNeblY/4wHR7M7boTYXWrn9bY0z2pZmr11eKje/S/hIw== dependencies: dateformat "^1.0.6" istanbul "^0.4.0" lodash "^4.17.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-2.0.1.tgz#26e3e31f2faf272dd80ebb0e1898914cc3a19763" integrity sha512-iuC0hmr9b+SNn1DaUD2QEYtUxkS1J+bSJSn7ejdEexs7P8EYvA1CWkEdrDQ+8jVH3AgWlCNwjYsT1chjcNW9lA== dependencies: jasmine-core "^3.3" karma-sourcemap-loader@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma-spec-reporter@^0.0.32: version "0.0.32" resolved "https://registry.yarnpkg.com/karma-spec-reporter/-/karma-spec-reporter-0.0.32.tgz#2e9c7207ea726771260259f82becb543209e440a" integrity sha1-LpxyB+pyZ3EmAln4K+y1QyCeRAo= dependencies: colors "^1.1.2" karma-webpack@^3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-3.0.5.tgz#1ff1e3a690fb73ae95ee95f9ab58f341cfc7b40f" integrity sha512-nRudGJWstvVuA6Tbju9tyGUfXTtI1UXMXoRHVmM2/78D0q6s/Ye2IC157PKNDC15PWFGR0mVIRtWLAdcfsRJoA== dependencies: async "^2.0.0" babel-runtime "^6.0.0" loader-utils "^1.0.0" lodash "^4.0.0" source-map "^0.5.6" webpack-dev-middleware "^2.0.6" karma@^4.1.0: version "4.4.1" resolved "https://registry.yarnpkg.com/karma/-/karma-4.4.1.tgz#6d9aaab037a31136dc074002620ee11e8c2e32ab" integrity sha512-L5SIaXEYqzrh6b1wqYC42tNsFMx2PWuxky84pK9coK09MvmL7mxii3G3bZBh/0rvD27lqDd0le9jyhzvwif73A== dependencies: bluebird "^3.3.0" body-parser "^1.16.1" braces "^3.0.2" chokidar "^3.0.0" colors "^1.1.0" connect "^3.6.0" di "^0.0.1" dom-serialize "^2.2.0" flatted "^2.0.0" glob "^7.1.1" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^4.17.14" log4js "^4.0.0" mime "^2.3.1" minimatch "^3.0.2" optimist "^0.6.1" qjobs "^1.1.4" range-parser "^1.2.0" rimraf "^2.6.0" safe-buffer "^5.0.1" socket.io "2.1.1" source-map "^0.6.1" tmp "0.0.33" useragent "2.3.0" killable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== lazy-cache@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" integrity sha1-odePw6UEdMuAhF07O24dpJpEbo4= lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" less-loader@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-5.0.0.tgz#498dde3a6c6c4f887458ee9ed3f086a12ad1b466" integrity sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg== dependencies: clone "^2.1.1" loader-utils "^1.1.0" pify "^4.0.1" less@^3.9.0: version "3.10.3" resolved "https://registry.yarnpkg.com/less/-/less-3.10.3.tgz#417a0975d5eeecc52cff4bcfa3c09d35781e6792" integrity sha512-vz32vqfgmoxF1h3K4J+yKCtajH0PWmjkIFgbs5d78E/c/e+UQTnI+lWK+1eQRE95PXM2mC3rJlLSSP9VQHnaow== dependencies: clone "^2.1.2" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" mime "^1.4.1" mkdirp "^0.5.0" promise "^7.1.1" request "^2.83.0" source-map "~0.6.0" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" loader-runner@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== dependencies: big.js "^5.2.2" emojis-list "^2.0.0" json5 "^1.0.1" loader-utils@^0.2.10, loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= dependencies: big.js "^3.1.3" emojis-list "^2.0.0" json5 "^0.5.0" object-assign "^4.0.1" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lodash@^4.0.0, lodash@^4.17.0, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.21, lodash@^4.17.3, lodash@^4.17.4: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: chalk "^2.0.1" log4js@^4.0.0: version "4.5.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-4.5.1.tgz#e543625e97d9e6f3e6e7c9fc196dd6ab2cae30b5" integrity sha512-EEEgFcE9bLgaYUKuozyFfytQM2wDHtXn4tAN41pkaxpNjAykv11GVdeI4tHtmPWW4Xrgh9R/2d7XYghDVjbKKw== dependencies: date-format "^2.0.0" debug "^4.1.1" flatted "^2.0.0" rfdc "^1.1.4" streamroller "^1.0.6" loglevel@^1.6.4: version "1.6.6" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.6.tgz#0ee6300cc058db6b3551fa1c4bf73b83bb771312" integrity sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ== loglevelnext@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/loglevelnext/-/loglevelnext-1.0.5.tgz#36fc4f5996d6640f539ff203ba819641680d75a2" integrity sha512-V/73qkPuJmx4BcBF19xPBr+0ZRVBhc4POxvZTZdMeXpJ4NItXSJ/MSwuFT0kQJlCbXvdlZoQQ/418bS1y9Jh6A== dependencies: es6-symbol "^3.1.1" object.assign "^4.1.0" longest@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" integrity sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc= loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0, loud-rejection@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= lru-cache@4.1.x: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" semver "^5.6.0" mamacro@^0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== dependencies: p-defer "^1.0.0" map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= mem@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^2.0.0" p-is-promise "^2.0.0" memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= dependencies: errno "^0.1.3" readable-stream "^2.0.1" memory-fs@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== dependencies: errno "^0.1.3" readable-stream "^2.0.1" meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" brorand "^1.0.1" mime-db@1.42.0, "mime-db@>= 1.40.0 < 2": version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.1.0, mime@^2.3.1, mime@^2.4.4: version "2.4.4" resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== mimic-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== mini-css-extract-plugin@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== dependencies: loader-utils "^1.1.0" normalize-url "^2.0.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== dependencies: concat-stream "^1.5.0" duplexify "^3.4.2" end-of-stream "^1.1.0" flush-write-stream "^1.0.0" from2 "^2.1.0" parallel-transform "^1.1.0" pump "^3.0.0" pumpify "^1.3.3" stream-each "^1.1.0" through2 "^2.0.0" mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" moment@^2.24.0: version "2.24.0" resolved "https://registry.yarnpkg.com/moment/-/moment-2.24.0.tgz#0d055d53f5052aa653c9f6eb68bb5d12bf5c2b5b" integrity sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg== move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= dependencies: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multicast-dns-service-types@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= multicast-dns@^6.0.1: version "6.2.3" resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== dependencies: dns-packet "^1.3.1" thunky "^1.0.2" nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== next-tick@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== dependencies: lower-case "^1.1.1" node-forge@0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579" integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ== node-libs-browser@^2.2.0, node-libs-browser@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== dependencies: assert "^1.1.1" browserify-zlib "^0.2.0" buffer "^4.3.0" console-browserify "^1.1.0" constants-browserify "^1.0.0" crypto-browserify "^3.11.0" domain-browser "^1.1.1" events "^3.0.0" https-browserify "^1.0.0" os-browserify "^0.3.0" path-browserify "0.0.1" process "^0.11.10" punycode "^1.2.4" querystring-es3 "^0.2.0" readable-stream "^2.3.3" stream-browserify "^2.0.1" stream-http "^2.7.2" string_decoder "^1.0.0" timers-browserify "^2.0.4" tty-browserify "0.0.0" url "^0.11.0" util "^0.11.0" vm-browserify "^1.0.1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.71: version "1.1.72" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== nopt@3.x: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= normalize-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== dependencies: prepend-http "^2.0.0" query-string "^5.0.1" sort-keys "^2.0.0" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.6" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4" integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" nth-check@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== dependencies: boolbase "~1.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= null-loader@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-1.0.0.tgz#90e85798e50e9dd1d568495a44e74829dec26744" integrity sha512-mYLDjDVTkjTlFoidxRhzO75rdcwfVXfw5G5zpj8sXnBkHtKJxMk4hTcRR4i5SOhDB6EvcQuYriy6IV23eq6uog== dependencies: loader-utils "^1.2.3" schema-utils "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-inspect@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-is@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.0.1.tgz#0aa60ec9989a0b3ed795cf4d06f62cf1ad6539b6" integrity sha1-CqYOyZiaCz7Xlc9NBvYs8a1lObY= object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@1.x, once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== dependencies: is-wsl "^1.1.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.6" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" word-wrap "~1.2.3" original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== dependencies: url-parse "^1.4.3" os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^3.0.0, os-locale@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== dependencies: execa "^1.0.0" lcid "^2.0.0" mem "^4.0.0" os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" outdated-browser-rework@^2.8.0: version "2.9.0" resolved "https://registry.yarnpkg.com/outdated-browser-rework/-/outdated-browser-rework-2.9.0.tgz#b10bea739ed799a6e51ff1fffe7e4dcd68b1e003" integrity sha512-JtHxFB1h9iN7MQmeK0TUPKLUBDKA1h3YB4UJ2lbv+ixvHjiMlzZ+IphpysoaB3YqBGU33/2i5L0Wufp8Issc1g== dependencies: ua-parser-js "^0.7.18" p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== p-limit@^2.0.0, p-limit@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== dependencies: retry "^0.12.0" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~1.0.5: version "1.0.10" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== parallel-transform@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== dependencies: cyclist "^1.0.1" inherits "^2.0.3" readable-stream "^2.1.5" param-case@2.1.x: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= dependencies: no-case "^2.2.0" parse-asn1@^5.0.0: version "5.1.5" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-key@^2.0.0, path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= path-parse@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" path-type@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== dependencies: pify "^3.0.0" pbkdf2@^3.0.3: version "3.0.17" resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" ripemd160 "^2.0.1" safe-buffer "^5.0.1" sha.js "^2.4.8" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= picomatch@^2.0.4: version "2.1.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.1.1.tgz#ecdfbea7704adb5fe6fb47f9866c4c0e15e905c5" integrity sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA== pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pify@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= pkg-dir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" popper.js@^1.15.0: version "1.16.0" resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.16.0.tgz#2e1816bcbbaa518ea6c2e15a466f4cb9c6e2fbb3" integrity sha512-+G+EkOPoE5S/zChTpmBSSDYmhXJ5PsW8eMhH8cP/CQHMFPBG/kC9Y5IIw6qNYgdJ+/COf0ddY2li28iHaZRSjw== portfinder@^1.0.25: version "1.0.25" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca" integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg== dependencies: async "^2.6.2" debug "^3.1.1" mkdirp "^0.5.1" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= postcss-load-config@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.0.tgz#c84d692b7bb7b41ddced94ee62e8ab31b417b003" integrity sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q== dependencies: cosmiconfig "^5.0.0" import-cwd "^2.0.0" postcss-loader@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== dependencies: loader-utils "^1.1.0" postcss "^7.0.0" postcss-load-config "^2.0.0" schema-utils "^1.0.0" postcss-modules-extract-imports@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== dependencies: postcss "^7.0.5" postcss-modules-local-by-default@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.6.tgz#dd9953f6dd476b5fd1ef2d8830c8929760b56e63" integrity sha512-oLUV5YNkeIBa0yQl7EYnxMgy4N6noxmiwZStaEJUSe2xPMcdNc8WmBQuQCx18H5psYbVxz8zoHk0RAAYZXP9gA== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-value-parser "^3.3.1" postcss-modules-scope@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.0.tgz#ad3f5bf7856114f6fcab901b0502e2a2bc39d4eb" integrity sha512-91Rjps0JnmtUB0cujlc8KIKCsJXWjzuxGeT/+Q2i2HXKZ7nBUeF9YQTZZTNvHVoNYj1AthsjnGLtqDUE0Op79A== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-modules-values@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-2.0.0.tgz#479b46dc0c5ca3dc7fa5270851836b9ec7152f64" integrity sha512-Ki7JZa7ff1N3EIMlPnGTZfUMe69FFwiQPnVSXC9mnn3jozCRBYIxiZd44yJOV2AmabOo4qFf8s0dC/+lweG7+w== dependencies: icss-replace-symbols "^1.1.0" postcss "^7.0.6" postcss-selector-parser@^6.0.0: version "6.0.2" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== dependencies: cssesc "^3.0.0" indexes-of "^1.0.1" uniq "^1.0.1" postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss-value-parser@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.23, postcss@^7.0.5, postcss@^7.0.6: version "7.0.36" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.36.tgz#056f8cffa939662a8f5905950c07d5285644dfcb" integrity sha512-BebJSIUMwJHRH0HAQoxN4u1CN86glsrwsW0q7T+/m44eXOUAxSNdHRkNZPYz5vVUbg17hFgOQDE7fZk7li3pZw== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= pretty-error@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= dependencies: renderkid "^2.0.1" utila "~0.4" private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= promise@^7.0.1, promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" proxy-addr@~2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.9.0" prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.24: version "1.4.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" create-hash "^1.1.0" parse-asn1 "^5.0.0" randombytes "^2.0.1" safe-buffer "^5.1.2" pug-attrs@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-2.0.4.tgz#b2f44c439e4eb4ad5d4ef25cac20d18ad28cc336" integrity sha512-TaZ4Z2TWUPDJcV3wjU3RtUXMrd3kM4Wzjbe3EWnSsZPsJ3LDI0F3yCnf2/W7PPFF+edUFQ0HgDL1IoxSz5K8EQ== dependencies: constantinople "^3.0.1" js-stringify "^1.0.1" pug-runtime "^2.0.5" pug-attrs@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-3.0.0.tgz#b10451e0348165e31fad1cc23ebddd9dc7347c41" integrity sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA== dependencies: constantinople "^4.0.1" js-stringify "^1.0.2" pug-runtime "^3.0.0" pug-cli@^1.0.0-alpha6: version "1.0.0-alpha6" resolved "https://registry.yarnpkg.com/pug-cli/-/pug-cli-1.0.0-alpha6.tgz#1ca539ea4ac0ebb69ce4aae84aeed5d64ffe6501" integrity sha1-HKU56krA67ac5KroSu7V1k/+ZQE= dependencies: chalk "^1.0.0" commander "^2.8.1" mkdirp "^0.5.1" pug "^2.0.0-alpha7" pug-code-gen@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-2.0.3.tgz#122eb9ada9b5bf601705fe15aaa0a7d26bc134ab" integrity sha512-r9sezXdDuZJfW9J91TN/2LFbiqDhmltTFmGpHTsGdrNGp3p4SxAjjXEfnuK2e4ywYsRIVP0NeLbSAMHUcaX1EA== dependencies: constantinople "^3.1.2" doctypes "^1.1.0" js-stringify "^1.0.1" pug-attrs "^2.0.4" pug-error "^1.3.3" pug-runtime "^2.0.5" void-elements "^2.0.1" with "^5.0.0" pug-code-gen@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-3.0.2.tgz#ad190f4943133bf186b60b80de483100e132e2ce" integrity sha512-nJMhW16MbiGRiyR4miDTQMRWDgKplnHyeLvioEJYbk1RsPI3FuA3saEP8uwnTb2nTJEKBU90NFVWJBk4OU5qyg== dependencies: constantinople "^4.0.1" doctypes "^1.1.0" js-stringify "^1.0.2" pug-attrs "^3.0.0" pug-error "^2.0.0" pug-runtime "^3.0.0" void-elements "^3.1.0" with "^7.0.0" pug-error@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-1.3.3.tgz#f342fb008752d58034c185de03602dd9ffe15fa6" integrity sha512-qE3YhESP2mRAWMFJgKdtT5D7ckThRScXRwkfo+Erqga7dyJdY3ZquspprMCj/9sJ2ijm5hXFWQE/A3l4poMWiQ== pug-error@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-2.0.0.tgz#5c62173cb09c34de2a2ce04f17b8adfec74d8ca5" integrity sha512-sjiUsi9M4RAGHktC1drQfCr5C5eriu24Lfbt4s+7SykztEOwVZtbFk1RRq0tzLxcMxMYTBR+zMQaG07J/btayQ== pug-filters@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-3.1.1.tgz#ab2cc82db9eeccf578bda89130e252a0db026aa7" integrity sha512-lFfjNyGEyVWC4BwX0WyvkoWLapI5xHSM3xZJFUhx4JM4XyyRdO8Aucc6pCygnqV2uSgJFaJWW3Ft1wCWSoQkQg== dependencies: clean-css "^4.1.11" constantinople "^3.0.1" jstransformer "1.0.0" pug-error "^1.3.3" pug-walk "^1.1.8" resolve "^1.1.6" uglify-js "^2.6.1" pug-filters@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-4.0.0.tgz#d3e49af5ba8472e9b7a66d980e707ce9d2cc9b5e" integrity sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A== dependencies: constantinople "^4.0.1" jstransformer "1.0.0" pug-error "^2.0.0" pug-walk "^2.0.0" resolve "^1.15.1" pug-lexer@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-4.1.0.tgz#531cde48c7c0b1fcbbc2b85485c8665e31489cfd" integrity sha512-i55yzEBtjm0mlplW4LoANq7k3S8gDdfC6+LThGEvsK4FuobcKfDAwt6V4jKPH9RtiE3a2Akfg5UpafZ1OksaPA== dependencies: character-parser "^2.1.1" is-expression "^3.0.0" pug-error "^1.3.3" pug-lexer@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-5.0.1.tgz#ae44628c5bef9b190b665683b288ca9024b8b0d5" integrity sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w== dependencies: character-parser "^2.2.0" is-expression "^4.0.0" pug-error "^2.0.0" pug-linker@^3.0.6: version "3.0.6" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-3.0.6.tgz#f5bf218b0efd65ce6670f7afc51658d0f82989fb" integrity sha512-bagfuHttfQOpANGy1Y6NJ+0mNb7dD2MswFG2ZKj22s8g0wVsojpRlqveEQHmgXXcfROB2RT6oqbPYr9EN2ZWzg== dependencies: pug-error "^1.3.3" pug-walk "^1.1.8" pug-linker@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-4.0.0.tgz#12cbc0594fc5a3e06b9fc59e6f93c146962a7708" integrity sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw== dependencies: pug-error "^2.0.0" pug-walk "^2.0.0" pug-load@^2.0.12: version "2.0.12" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-2.0.12.tgz#d38c85eb85f6e2f704dea14dcca94144d35d3e7b" integrity sha512-UqpgGpyyXRYgJs/X60sE6SIf8UBsmcHYKNaOccyVLEuT6OPBIMo6xMPhoJnqtB3Q3BbO4Z3Bjz5qDsUWh4rXsg== dependencies: object-assign "^4.1.0" pug-walk "^1.1.8" pug-load@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-3.0.0.tgz#9fd9cda52202b08adb11d25681fb9f34bd41b662" integrity sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ== dependencies: object-assign "^4.1.1" pug-walk "^2.0.0" pug-parser@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-5.0.1.tgz#03e7ada48b6840bd3822f867d7d90f842d0ffdc9" integrity sha512-nGHqK+w07p5/PsPIyzkTQfzlYfuqoiGjaoqHv1LjOv2ZLXmGX1O+4Vcvps+P4LhxZ3drYSljjq4b+Naid126wA== dependencies: pug-error "^1.3.3" token-stream "0.0.1" pug-parser@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-6.0.0.tgz#a8fdc035863a95b2c1dc5ebf4ecf80b4e76a1260" integrity sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw== dependencies: pug-error "^2.0.0" token-stream "1.0.0" pug-runtime@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-2.0.5.tgz#6da7976c36bf22f68e733c359240d8ae7a32953a" integrity sha512-P+rXKn9un4fQY77wtpcuFyvFaBww7/91f3jHa154qU26qFAnOe6SW1CbIDcxiG5lLK9HazYrMCCuDvNgDQNptw== pug-runtime@^3.0.0, pug-runtime@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-3.0.1.tgz#f636976204723f35a8c5f6fad6acda2a191b83d7" integrity sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg== pug-strip-comments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-1.0.4.tgz#cc1b6de1f6e8f5931cf02ec66cdffd3f50eaf8a8" integrity sha512-i5j/9CS4yFhSxHp5iKPHwigaig/VV9g+FgReLJWWHEHbvKsbqL0oP/K5ubuLco6Wu3Kan5p7u7qk8A4oLLh6vw== dependencies: pug-error "^1.3.3" pug-strip-comments@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz#f94b07fd6b495523330f490a7f554b4ff876303e" integrity sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ== dependencies: pug-error "^2.0.0" pug-walk@^1.1.8: version "1.1.8" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-1.1.8.tgz#b408f67f27912f8c21da2f45b7230c4bd2a5ea7a" integrity sha512-GMu3M5nUL3fju4/egXwZO0XLi6fW/K3T3VTgFQ14GxNi8btlxgT5qZL//JwZFm/2Fa64J/PNS8AZeys3wiMkVA== pug-walk@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-2.0.0.tgz#417aabc29232bb4499b5b5069a2b2d2a24d5f5fe" integrity sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ== pug@^2.0.0-alpha7: version "2.0.4" resolved "https://registry.yarnpkg.com/pug/-/pug-2.0.4.tgz#ee7682ec0a60494b38d48a88f05f3b0ac931377d" integrity sha512-XhoaDlvi6NIzL49nu094R2NA6P37ijtgMDuWE+ofekDChvfKnzFal60bhSdiy8y2PBO6fmz3oMEIcfpBVRUdvw== dependencies: pug-code-gen "^2.0.2" pug-filters "^3.1.1" pug-lexer "^4.1.0" pug-linker "^3.0.6" pug-load "^2.0.12" pug-parser "^5.0.1" pug-runtime "^2.0.5" pug-strip-comments "^1.0.4" pug@^3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/pug/-/pug-3.0.2.tgz#f35c7107343454e43bc27ae0ff76c731b78ea535" integrity sha512-bp0I/hiK1D1vChHh6EfDxtndHji55XP/ZJKwsRqrz6lRia6ZC2OZbdAymlxdVFwd1L70ebrVJw4/eZ79skrIaw== dependencies: pug-code-gen "^3.0.2" pug-filters "^4.0.0" pug-lexer "^5.0.1" pug-linker "^4.0.0" pug-load "^3.0.0" pug-parser "^6.0.0" pug-runtime "^3.0.1" pug-strip-comments "^2.0.0" pump@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pumpify@^1.3.3: version "1.5.1" resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" inherits "^2.0.3" pump "^2.0.0" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qjobs@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== query-string@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== dependencies: decode-uri-component "^0.2.0" object-assign "^4.1.0" strict-uri-encode "^1.0.0" querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= querystringify@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" safe-buffer "^5.1.0" range-parser@^1.0.3, range-parser@^1.2.0, range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-loader@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-2.0.0.tgz#e2813d9e1e3f80d1bbade5ad082e809679e20c26" integrity sha512-kZnO5MoIyrojfrPWqrhFNLZemIAX8edMOCp++yC5RKxzFB3m92DqKNhKlU6+FvpOhWtvyh3jOaD7J6/9tpdIKg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" readdirp@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839" integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ== dependencies: picomatch "^2.0.4" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.3" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" integrity sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw== regenerator-transform@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== dependencies: private "^0.1.6" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexp.prototype.flags@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.2.0.tgz#6b30724e306a27833eeb171b66ac8890ba37e41c" integrity sha512-ztaw4M1VqgMwl9HlPpOuiYgItcHlunW0He2fE6eNfT6E/CF2FtYi9ofOYe4mKntstYk0Fyh/rDRBdS3AnxjlrA== dependencies: define-properties "^1.1.2" regexpu-core@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" regjsgen@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c" integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" relateurl@0.2.x: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= renderkid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== dependencies: css-select "^1.1.0" dom-converter "^0.2" htmlparser2 "^3.3.0" strip-ansi "^3.0.0" utila "^0.4.0" repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.5.2, repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" request@^2.83.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= dependencies: resolve-from "^3.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" integrity sha1-six699nWiBvItuZTM17rywoYh0g= resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.1.6, resolve@^1.10.0, resolve@^1.3.2, resolve@^1.8.1: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" resolve@^1.15.1: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== dependencies: is-core-module "^2.2.0" path-parse "^1.0.6" ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= rfdc@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.1.4.tgz#ba72cc1367a0ccd9cf81a870b3b58bd3ad07f8c2" integrity sha512-5C9HXdzK8EAqN7JDif30jqsBzavB7wLpaubisuQIGHWf2gUXSpzy6ArX/+Da8RjFpagWsCn+pIgxTMAmKw9Zug== right-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" integrity sha1-YTObci/mo1FWiSENJOFMlhSGE+8= dependencies: align-text "^0.1.1" rimraf@^2.5.4, rimraf@^2.6.0, rimraf@^2.6.1, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" inherits "^2.0.1" run-queue@^1.0.0, run-queue@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= dependencies: aproba "^1.1.1" safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" integrity sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8= dependencies: ajv "^5.0.0" schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== dependencies: ajv "^6.1.0" ajv-errors "^1.0.0" ajv-keywords "^3.1.0" select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.7: version "1.10.7" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b" integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA== dependencies: node-forge "0.9.0" "semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== dependencies: debug "2.6.9" depd "~1.1.2" destroy "~1.0.4" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" http-errors "~1.7.2" mime "1.6.0" ms "2.1.1" on-finished "~2.3.0" range-parser "~1.2.1" statuses "~1.5.0" serialize-javascript@^1.7.0: version "1.9.1" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.9.1.tgz#cfc200aef77b600c47da9bb8149c943e798c2fdb" integrity sha512-0Vb/54WJ6k5v8sSWN09S0ora+Hnr+cX40r9F170nT+mSkaxltoE/7R3OrIdBSUv1OoiobH1QoWQbCnAO+e8J1A== serialize-javascript@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-2.1.0.tgz#9310276819efd0eb128258bb341957f6eb2fc570" integrity sha512-a/mxFfU00QT88umAJQsNWOnUKckhNCqOl028N48e7wFmo2/EHpTo9Wso+iJJCMrQnmFvcjto5RJdAHEvVhcyUQ== serve-index@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= dependencies: accepts "~1.3.4" batch "0.6.1" debug "2.6.9" escape-html "~1.0.3" http-errors "~1.6.2" mime-types "~2.1.17" parseurl "~1.3.2" serve-static@1.14.1: version "1.14.1" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.17.1" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setimmediate@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= slash@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" integrity sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" socket.io-adapter@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz#2a805e8a14d6372124dd9159ad4502f8cb07f06b" integrity sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs= socket.io-client@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.1.1.tgz#dcb38103436ab4578ddb026638ae2f21b623671f" integrity sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ== dependencies: backo2 "1.0.2" base64-arraybuffer "0.1.5" component-bind "1.0.0" component-emitter "1.2.1" debug "~3.1.0" engine.io-client "~3.2.0" has-binary2 "~1.0.2" has-cors "1.1.0" indexof "0.0.1" object-component "0.0.3" parseqs "0.0.5" parseuri "0.0.5" socket.io-parser "~3.2.0" to-array "0.1.4" socket.io-parser@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.2.0.tgz#e7c6228b6aa1f814e6148aea325b51aa9499e077" integrity sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA== dependencies: component-emitter "1.2.1" debug "~3.1.0" isarray "2.0.1" socket.io@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.1.1.tgz#a069c5feabee3e6b214a75b40ce0652e1cfb9980" integrity sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA== dependencies: debug "~3.1.0" engine.io "~3.2.0" has-binary2 "~1.0.2" socket.io-adapter "~1.1.0" socket.io-client "2.1.1" socket.io-parser "~3.2.0" sockjs-client@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.4.0.tgz#c9f2568e19c8fd8173b4997ea3420e0bb306c7d5" integrity sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g== dependencies: debug "^3.2.5" eventsource "^1.0.7" faye-websocket "~0.11.1" inherits "^2.0.3" json3 "^3.3.2" url-parse "^1.4.3" sockjs@0.3.19: version "0.3.19" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== dependencies: faye-websocket "^0.10.0" uuid "^3.0.1" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= dependencies: is-plain-obj "^1.0.0" source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.12: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.6, source-map@^0.5.7, source-map@~0.5.1: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== spdx-expression-parse@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.5" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" hpack.js "^2.1.6" obuf "^1.1.2" readable-stream "^3.0.6" wbuf "^1.7.3" spdy@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.1.tgz#6f12ed1c5db7ea4f24ebb8b89ba58c87c08257f2" integrity sha512-HeZS3PBdMA+sZSu0qwpCxl3DeALD5ASx8pAX0jZdKXSpPWbQ6SYGnlg3BBmYLx5LtiZrmkAZfErCm2oECBcioA== dependencies: debug "^4.1.0" handle-thing "^2.0.0" http-deceiver "^1.2.7" select-hose "^2.0.0" spdy-transport "^3.0.0" split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" ssri@^6.0.1: version "6.0.2" resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.2.tgz#157939134f20464e7301ddba3e90ffa8f7728ac5" integrity sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q== dependencies: figgy-pudding "^3.5.1" static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== dependencies: inherits "~2.0.1" readable-stream "^2.0.2" stream-each@^1.1.0: version "1.2.3" resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== dependencies: end-of-stream "^1.1.0" stream-shift "^1.0.0" stream-http@^2.7.2: version "2.8.3" resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= streamroller@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-1.0.6.tgz#8167d8496ed9f19f05ee4b158d9611321b8cacd9" integrity sha512-3QC47Mhv3/aZNFpDDVO44qQb9gwB9QggMEE0sQmkTAwBVYdBRWISdsywlkfm5II1Q5y/pmrHflti/IgmIzdDBg== dependencies: async "^2.6.2" date-format "^2.0.0" debug "^3.2.6" fs-extra "^7.0.1" lodash "^4.17.14" strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== dependencies: emoji-regex "^7.0.1" is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string.prototype.trimright@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= style-loader@^0.23.1: version "0.23.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925" integrity sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" supports-color@6.1.0, supports-color@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== dependencies: has-flag "^3.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" tapable@^1.0.0, tapable@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^4: version "4.4.19" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA== dependencies: chownr "^1.1.4" fs-minipass "^1.2.7" minipass "^2.9.0" minizlib "^1.3.3" mkdirp "^0.5.5" safe-buffer "^5.2.1" yallist "^3.1.1" terser-webpack-plugin@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.1.tgz#61b18e40eaee5be97e771cdbb10ed1280888c2b4" integrity sha512-ZXmmfiwtCLfz8WKZyYUuuHf3dMYEjg8NrjHMb0JqHVHVOSkzp3cW2/XG1fP3tRhqEqSzMwzzRQGtAPbs4Cncxg== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" serialize-javascript "^1.7.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" worker-farm "^1.7.0" terser@^4.1.2: version "4.4.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.4.0.tgz#22c46b4817cf4c9565434bfe6ad47336af259ac3" integrity sha512-oDG16n2WKm27JO8h4y/w3iqBGAOSCtq7k8dRmrn4Wf9NouL0b2WpMHGChFGZq4nFAQy1FsNJrVQHfurXOSTmOA== dependencies: commander "^2.20.0" source-map "~0.6.1" source-map-support "~0.5.12" through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" thunky@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== timers-browserify@^2.0.4: version "2.0.11" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== dependencies: setimmediate "^1.0.4" tmp@0.0.33, tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-0.0.1.tgz#ceeefc717a76c4316f126d0b9dbaa55d7e7df01a" integrity sha1-zu78cXp2xDFvEm0LnbqlXX598Bo= token-stream@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-1.0.0.tgz#cc200eab2613f4166d27ff9afc7ca56d49df6eb4" integrity sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ= toposort@^1.0.0: version "1.0.7" resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= ua-parser-js@^0.7.18: version "0.7.20" resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.20.tgz#7527178b82f6a62a0f243d1f94fd30e3e3c21098" integrity sha512-8OaIKfzL5cpx8eCMAhhvTlft8GYF8b2eQr6JkCyVdrgjcytyOmPCXrqXFcUnhonRpLlh5yxEZVohm6mzaowUOw== uglify-js@3.4.x: version "3.4.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== dependencies: commander "~2.19.0" source-map "~0.6.1" uglify-js@^2.6.1: version "2.8.29" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" integrity sha1-KcVzMUgFe7Th913zW3qcty5qWd0= dependencies: source-map "~0.5.1" yargs "~3.10.0" optionalDependencies: uglify-to-browserify "~1.0.0" uglify-js@^3.1.4: version "3.13.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.13.5.tgz#5d71d6dbba64cf441f32929b1efce7365bb4f113" integrity sha512-xtB8yEqIkn7zmOyS2zUNBsYCBRhDkvlNxMMY2smuJ/qA8NCHeQvKCF3i9Z4k8FJH4+PJvZRtMrPynfZ75+CSZw== uglify-to-browserify@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" integrity sha1-bgkk1r2mta/jSeOabWMoUKD4grc= ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^2.0.1" uniq@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== dependencies: unique-slug "^2.0.0" unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== dependencies: imurmurhash "^0.1.4" universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url-join@^2.0.2: version "2.0.5" resolved "https://registry.yarnpkg.com/url-join/-/url-join-2.0.5.tgz#5af22f18c052a000a48d7b82c5e9c2e2feeda728" integrity sha1-WvIvGMBSoACkjXuCxenC4v7tpyg= url-parse@^1.4.3: version "1.5.3" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== useragent@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== dependencies: inherits "2.0.3" utila@^0.4.0, utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== v8-compile-cache@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vm-browserify@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== void-elements@^2.0.0, void-elements@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= void-elements@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-3.1.0.tgz#614f7fbf8d801f0bb5f0661f5b2f5785750e4f09" integrity sha1-YU9/v42AHwu18GYfWy9XhXUOTwk= watchpack@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== dependencies: chokidar "^2.0.2" graceful-fs "^4.1.2" neo-async "^2.5.0" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" webpack-cli@^3.3.1: version "3.3.10" resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.10.tgz#17b279267e9b4fb549023fae170da8e6e766da13" integrity sha512-u1dgND9+MXaEt74sJR4PR7qkPxXUSQ0RXYq8x1L6Jg1MYVEmGPrH6Ah6C4arD4r0J1P5HKjRqpab36k0eIzPqg== dependencies: chalk "2.4.2" cross-spawn "6.0.5" enhanced-resolve "4.1.0" findup-sync "3.0.0" global-modules "2.0.0" import-local "2.0.0" interpret "1.2.0" loader-utils "1.2.3" supports-color "6.1.0" v8-compile-cache "2.0.3" yargs "13.2.4" webpack-dev-middleware@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-2.0.6.tgz#a51692801e8310844ef3e3790e1eacfe52326fd4" integrity sha512-tj5LLD9r4tDuRIDa5Mu9lnY2qBBehAITv6A9irqXhw/HQquZgTx3BCd57zYbU2gMDnncA49ufK2qVQSbaKJwOw== dependencies: loud-rejection "^1.6.0" memory-fs "~0.4.1" mime "^2.1.0" path-is-absolute "^1.0.0" range-parser "^1.0.3" url-join "^2.0.2" webpack-log "^1.0.1" webpack-dev-middleware@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3" integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw== dependencies: memory-fs "^0.4.1" mime "^2.4.4" mkdirp "^0.5.1" range-parser "^1.2.1" webpack-log "^2.0.0" webpack-dev-server@^3.3.1: version "3.9.0" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c" integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw== dependencies: ansi-html "0.0.7" bonjour "^3.5.0" chokidar "^2.1.8" compression "^1.7.4" connect-history-api-fallback "^1.6.0" debug "^4.1.1" del "^4.1.1" express "^4.17.1" html-entities "^1.2.1" http-proxy-middleware "0.19.1" import-local "^2.0.0" internal-ip "^4.3.0" ip "^1.1.5" is-absolute-url "^3.0.3" killable "^1.0.1" loglevel "^1.6.4" opn "^5.5.0" p-retry "^3.0.1" portfinder "^1.0.25" schema-utils "^1.0.0" selfsigned "^1.10.7" semver "^6.3.0" serve-index "^1.9.1" sockjs "0.3.19" sockjs-client "1.4.0" spdy "^4.0.1" strip-ansi "^3.0.1" supports-color "^6.1.0" url "^0.11.0" webpack-dev-middleware "^3.7.2" webpack-log "^2.0.0" ws "^6.2.1" yargs "12.0.5" webpack-fix-style-only-entries@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/webpack-fix-style-only-entries/-/webpack-fix-style-only-entries-0.2.2.tgz#60331c608b944ac821a3b6f2ae491a6d79ba40eb" integrity sha512-0wcrLCnISP8htV0NP1mT0e2mHhfjGQdNk82s8BTLVvF7rXuoJuUUzP3aCUXnRqlLgmTBx5WgqPhnczjatl+iSQ== webpack-log@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-1.2.0.tgz#a4b34cda6b22b518dbb0ab32e567962d5c72a43d" integrity sha512-U9AnICnu50HXtiqiDxuli5gLB5PGBo7VvcHx36jRZHwK4vzOYLbImqT4lwWwoMHdQWwEKw736fCHEekokTEKHA== dependencies: chalk "^2.1.0" log-symbols "^2.1.0" loglevelnext "^1.0.1" uuid "^3.1.0" webpack-log@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== dependencies: ansi-colors "^3.0.0" uuid "^3.3.2" webpack-shell-plugin@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/webpack-shell-plugin/-/webpack-shell-plugin-0.5.0.tgz#29b8a1d80ddeae0ddb10e729667f728653c2c742" integrity sha1-Kbih2A3erg3bEOcpZn9yhlPCx0I= webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack@^4.30.0: version "4.41.2" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e" integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/wasm-edit" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" acorn "^6.2.1" ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" enhanced-resolve "^4.1.0" eslint-scope "^4.0.3" json-parse-better-errors "^1.0.2" loader-runner "^2.4.0" loader-utils "^1.2.3" memory-fs "^0.4.1" micromatch "^3.1.10" mkdirp "^0.5.1" neo-async "^2.6.1" node-libs-browser "^2.2.1" schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.1" watchpack "^1.6.0" webpack-sources "^1.4.1" websocket-driver@>=0.5.1: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.4" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" window-size@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" integrity sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0= with@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/with/-/with-5.1.1.tgz#fa4daa92daf32c4ea94ed453c81f04686b575dfe" integrity sha1-+k2qktrzLE6pTtRTyB8EaGtXXf4= dependencies: acorn "^3.1.0" acorn-globals "^3.0.0" with@^7.0.0: version "7.0.2" resolved "https://registry.yarnpkg.com/with/-/with-7.0.2.tgz#ccee3ad542d25538a7a7a80aad212b9828495bac" integrity sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w== dependencies: "@babel/parser" "^7.9.6" "@babel/types" "^7.9.6" assert-never "^1.2.1" babel-walk "3.0.0-canary-5" word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== wordwrap@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" integrity sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8= wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= worker-farm@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== dependencies: ansi-styles "^3.2.0" string-width "^3.0.0" strip-ansi "^5.0.0" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== dependencies: async-limiter "~1.0.0" ws@~3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== dependencies: async-limiter "~1.0.0" safe-buffer "~5.1.0" ultron "~1.1.0" xmlhttprequest-ssl@~1.5.4: version "1.5.5" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e" integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4= xtend@^4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== "y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2, yallist@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs-parser@^13.1.0: version "13.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== dependencies: cliui "^4.0.0" decamelize "^1.2.0" find-up "^3.0.0" get-caller-file "^1.0.1" os-locale "^3.0.0" require-directory "^2.1.1" require-main-filename "^1.0.1" set-blocking "^2.0.0" string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" yargs@13.2.4: version "13.2.4" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== dependencies: cliui "^5.0.0" find-up "^3.0.0" get-caller-file "^2.0.1" os-locale "^3.1.0" require-directory "^2.1.1" require-main-filename "^2.0.0" set-blocking "^2.0.0" string-width "^3.0.0" which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^13.1.0" yargs@~3.10.0: version "3.10.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" integrity sha1-9+572FfdfB0tOMDnTvvWgdFDH9E= dependencies: camelcase "^1.0.2" cliui "^2.1.0" decamelize "^1.0.0" window-size "0.1.0" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk= buildbot-3.4.0/www/build_common/000077500000000000000000000000001413250514000166105ustar00rootroot00000000000000buildbot-3.4.0/www/build_common/package.json000066400000000000000000000025751413250514000211070ustar00rootroot00000000000000{ "name": "buildbot-build-common", "version": "1.0.0", "main": "src/main.module.js", "dependencies": { "@babel/core": "^7.4.3", "@babel/plugin-syntax-dynamic-import": "^7.2.0", "@babel/plugin-transform-runtime": "^7.4.3", "@babel/preset-env": "^7.4.3", "@babel/runtime": "^7.4.3", "autoprefixer": "^9.5.1", "babel-loader": "^8.0.5", "css-loader": "^2.1.1", "file-loader": "^3.0.1", "html-webpack-plugin": "^3.2.0", "import-glob-loader": "^1.1.0", "istanbul-instrumenter-loader": "^3.0.1", "jasmine-core": "^3.4.0", "karma": "^4.1.0", "karma-chrome-launcher": "^2.2.0", "karma-coverage": "^1.1.2", "karma-jasmine": "^2.0.1", "karma-sourcemap-loader": "^0.3.7", "karma-spec-reporter": "^0.0.32", "karma-webpack": "^3.0.5", "less": "^3.9.0", "less-loader": "^5.0.0", "mini-css-extract-plugin": "^0.6.0", "node-libs-browser": "^2.2.0", "null-loader": "^1.0.0", "postcss-loader": "^3.0.0", "pug": "^3.0.1", "raw-loader": "^2.0.0", "style-loader": "^0.23.1", "webpack": "^4.30.0", "webpack-cli": "^3.3.1", "webpack-dev-server": "^3.3.1", "webpack-fix-style-only-entries": "^0.2.1", "webpack-shell-plugin": "^0.5.0" } } buildbot-3.4.0/www/build_common/src/000077500000000000000000000000001413250514000173775ustar00rootroot00000000000000buildbot-3.4.0/www/build_common/src/karma.js000066400000000000000000000022271413250514000210330ustar00rootroot00000000000000 module.exports.createTemplateKarmaConfig = function(config, options) { config.set({ frameworks: [ 'jasmine' ], reporters: [ 'progress', 'coverage' ], files: [ options.testRoot ], preprocessors: { [options.testRoot]: ['webpack', 'sourcemap'] }, browsers: [ 'Chrome' ], singleRun: true, // Configure code coverage reporter coverageReporter: { dir: 'coverage/', reporters: [ {type: 'text-summary'}, {type: 'html'} ] }, webpack: options.webpack, // Hide webpack build information from output webpackMiddleware: { noInfo: 'errors-only' }, customLaunchers: { BBChromeHeadless: { base: 'ChromeHeadless', flags: [ '--headless', '--disable-gpu', '--no-sandbox', '--window-size=1024,768', ], } }, }); } buildbot-3.4.0/www/build_common/src/main.module.js000066400000000000000000000001431413250514000221430ustar00rootroot00000000000000'use strict'; module.exports = { ...require('./karma.js'), ...require('./webpack.js'), }; buildbot-3.4.0/www/build_common/src/ng-template-loader.js000066400000000000000000000016451413250514000234240ustar00rootroot00000000000000const pug = require('pug'); const path = require('path') var loaderUtils = require("loader-utils"); /* ultra simple loader that only support our simple usecase pug-loader is much more complicated, but do support lot of features, including require inside template, which we don't really need. */ module.exports = function() { var fileName = this.resourcePath; var code = pug.compileFile(fileName); var content = code(); var pluginName = loaderUtils.getOptions(this).pluginName; // compute template name (as defined by ancient gulp based build system) var tplName = "views/" + path.parse(fileName).name.replace(/.tpl$/,'') + ".html"; if (pluginName != "buildbot-www") { tplName = pluginName + "/" + tplName; } // search for custom_templates (we use T as a short name to avoid consume to much bytes) content = `module.exports = window.T['${tplName}'] || ${JSON.stringify(content)};`; return content; }buildbot-3.4.0/www/build_common/src/webpack.js000066400000000000000000000115511413250514000213540ustar00rootroot00000000000000'use strict'; const path = require('path'); const webpack = require('webpack'); const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const TerserPlugin = require('terser-webpack-plugin'); const FixStyleOnlyEntriesPlugin = require("webpack-fix-style-only-entries"); const WebpackShellPlugin = require('webpack-shell-plugin'); module.exports.createTemplateWebpackConfig = function(options) { const isTest = options.isTest || false; const isProd = options.isProd || false; const requiredOptions = [ 'dirname', 'libraryName', 'pluginName', ] requiredOptions.forEach((option) => { if (!option in options) { throw new Error(`${option} option is required in options`); } }); const outputPath = options.outputPath || options.dirname + '/dist'; const provideJquery = options.provideJquery || false; const extraCommandsOnBuildEnd = options.extraCommandsOnBuildEnd || []; const extraRules = options.extraRules || []; const extraPlugins = options.extraPlugins || []; const extractStyles = options.extractStyles || false; const resolveAngular = options.resolveAngular || false; const supplyBaseExternals = options.supplyBaseExternals || false; var config = {}; if (isProd) { config.mode = 'production'; } else { config.mode = 'development'; } config.entry = options.entry; config.output = isTest ? {} : { path: outputPath, filename: '[name].js', library: options.libraryName, libraryTarget: 'umd', umdNamedDefine: true, globalObject: "(typeof self !== 'undefined' ? self : this)", }; config.optimization = { minimize: isProd, minimizer: [ new TerserPlugin({ cache: true, parallel: true, sourceMap: true, terserOptions: { keep_classnames: true } }), ], }; if (isTest) { config.devtool = 'inline-source-map'; } else { config.devtool = 'source-map'; } config.plugins = []; var cssExtractLoader = []; if (extractStyles) { config.plugins.push(new FixStyleOnlyEntriesPlugin()); config.plugins.push(new MiniCssExtractPlugin({ filename: 'styles.css', })); cssExtractLoader = [{ loader: MiniCssExtractPlugin.loader, options: { hmr: process.env.NODE_ENV === 'development', }, } ]; } config.plugins = config.plugins.concat(extraPlugins); if (provideJquery) { config.plugins.push(new webpack.ProvidePlugin({ "window.jQuery": "jquery", "$": "jquery", })); } if (extraCommandsOnBuildEnd.length > 0) { if (!isTest) { config.plugins.push(new WebpackShellPlugin({ onBuildEnd:extraCommandsOnBuildEnd })); } } if (!isTest && !supplyBaseExternals) { config.externals = [ '@uirouter/angularjs', 'angular', 'angular-animate', 'angular-ui-bootstrap', 'buildbot-data-js', 'd3', 'guanlecoja-ui', 'jquery', ]; } config.module = { rules: [{ test: /\.js$/, loader: 'babel-loader', options: { 'plugins': ['@babel/plugin-syntax-dynamic-import'], 'presets': [ [ '@babel/preset-env', { 'useBuiltIns': 'entry', 'targets': { 'chrome': '56', 'firefox': '52', 'edge': '13', 'safari': '10' }, 'modules': false } ] ] }, exclude: /node_modules/ }, { test: /\.jade$/, loader: path.resolve(__dirname, './ng-template-loader.js'), options, exclude: /node_modules/ }, { test: /\.css$/, use: [ ...cssExtractLoader, 'css-loader', ], }, { test: /\.less$/, use: [ ...cssExtractLoader, 'css-loader', 'less-loader', 'import-glob-loader', ], }, ...extraRules ]}; // avoid duplicate load of angular config.resolve = { alias: { 'angular': path.resolve(path.join(__dirname, '..', '..', 'guanlecoja-ui', 'node_modules', 'angular')) }, }; return config; }; buildbot-3.4.0/www/build_common/yarn.lock000066400000000000000000011004031413250514000204320ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0.tgz#06e2ab19bdb535385559aabb5ba59729482800f8" integrity sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.4.5" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.4.5.tgz#081f97e8ffca65a9b4b0fdc7e274e703f000c06a" integrity sha512-OvjIh6aqXtlsA8ujtGKfC7LYWksYSX8yQcM8Ay3LuvVeQ63lcOKgoZWVqcpFwkd29aYU9rVx7jxhfhiEDV9MZA== dependencies: "@babel/code-frame" "^7.0.0" "@babel/generator" "^7.4.4" "@babel/helpers" "^7.4.4" "@babel/parser" "^7.4.5" "@babel/template" "^7.4.4" "@babel/traverse" "^7.4.5" "@babel/types" "^7.4.4" convert-source-map "^1.1.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.11" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.4.4.tgz#174a215eb843fc392c7edcaabeaa873de6e8f041" integrity sha512-53UOLK6TVNqKxf7RUh8NE851EHRxOOeVXKbK2bivdb+iziMyk03Sr4eaE9OELCbyZAAafAKPDwF2TPUES5QbxQ== dependencies: "@babel/types" "^7.4.4" jsesc "^2.5.1" lodash "^4.17.11" source-map "^0.5.0" trim-right "^1.0.1" "@babel/helper-annotate-as-pure@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.0.0.tgz#323d39dd0b50e10c7c06ca7d7638e6864d8c5c32" integrity sha512-3UYcJUj9kvSLbLbUIfQTqzcy5VX7GRZ/CCDrnOaZorFFM01aXp1+GJwuFGV4NDDoAS+mOUyHcO6UD/RfqOks3Q== dependencies: "@babel/types" "^7.0.0" "@babel/helper-builder-binary-assignment-operator-visitor@^7.1.0": version "7.1.0" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.1.0.tgz#6b69628dfe4087798e0c4ed98e3d4a6b2fbd2f5f" integrity sha512-qNSR4jrmJ8M1VMM9tibvyRAHXQs2PmaksQF7c1CGJNipfe3D8p+wgNwgso/P2A2r2mdgBWAXljNWR0QRZAMW8w== dependencies: "@babel/helper-explode-assignable-expression" "^7.1.0" "@babel/types" "^7.0.0" "@babel/helper-call-delegate@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.4.4.tgz#87c1f8ca19ad552a736a7a27b1c1fcf8b1ff1f43" integrity sha512-l79boDFJ8S1c5hvQvG+rc+wHw6IuH7YldmRKsYtpbawsxURu/paVy57FZMomGK22/JckepaikOkY0MoAmdyOlQ== dependencies: "@babel/helper-hoist-variables" "^7.4.4" "@babel/traverse" "^7.4.4" "@babel/types" "^7.4.4" "@babel/helper-define-map@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.4.4.tgz#6969d1f570b46bdc900d1eba8e5d59c48ba2c12a" integrity sha512-IX3Ln8gLhZpSuqHJSnTNBWGDE9kdkTEWl21A/K7PQ00tseBwbqCHTvNLHSBd9M0R5rER4h5Rsvj9vw0R5SieBg== dependencies: "@babel/helper-function-name" "^7.1.0" "@babel/types" "^7.4.4" lodash "^4.17.11" "@babel/helper-explode-assignable-expression@^7.1.0": version "7.1.0" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.1.0.tgz#537fa13f6f1674df745b0c00ec8fe4e99681c8f6" integrity sha512-NRQpfHrJ1msCHtKjbzs9YcMmJZOg6mQMmGRB+hbamEdG5PNpaSm95275VD92DvJKuyl0s2sFiDmMZ+EnnvufqA== dependencies: "@babel/traverse" "^7.1.0" "@babel/types" "^7.0.0" "@babel/helper-function-name@^7.1.0": version "7.1.0" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.1.0.tgz#a0ceb01685f73355d4360c1247f582bfafc8ff53" integrity sha512-A95XEoCpb3TO+KZzJ4S/5uW5fNe26DjBGqf1o9ucyLyCmi1dXq/B3c8iaWTfBk3VvetUxl16e8tIrd5teOCfGw== dependencies: "@babel/helper-get-function-arity" "^7.0.0" "@babel/template" "^7.1.0" "@babel/types" "^7.0.0" "@babel/helper-get-function-arity@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0.tgz#83572d4320e2a4657263734113c42868b64e49c3" integrity sha512-r2DbJeg4svYvt3HOS74U4eWKsUAMRH01Z1ds1zx8KNTPtpTL5JAsdFv8BNyOpVqdFhHkkRDIg5B4AsxmkjAlmQ== dependencies: "@babel/types" "^7.0.0" "@babel/helper-hoist-variables@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.4.4.tgz#0298b5f25c8c09c53102d52ac4a98f773eb2850a" integrity sha512-VYk2/H/BnYbZDDg39hr3t2kKyifAm1W6zHRfhx8jGjIHpQEBv9dry7oQ2f3+J703TLu69nYdxsovl0XYfcnK4w== dependencies: "@babel/types" "^7.4.4" "@babel/helper-member-expression-to-functions@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.0.0.tgz#8cd14b0a0df7ff00f009e7d7a436945f47c7a16f" integrity sha512-avo+lm/QmZlv27Zsi0xEor2fKcqWG56D5ae9dzklpIaY7cQMK5N8VSpaNVPPagiqmy7LrEjK1IWdGMOqPu5csg== dependencies: "@babel/types" "^7.0.0" "@babel/helper-module-imports@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.0.0.tgz#96081b7111e486da4d2cd971ad1a4fe216cc2e3d" integrity sha512-aP/hlLq01DWNEiDg4Jn23i+CXxW/owM4WpDLFUbpjxe4NS3BhLVZQ5i7E0ZrxuQ/vwekIeciyamgB1UIYxxM6A== dependencies: "@babel/types" "^7.0.0" "@babel/helper-module-transforms@^7.1.0", "@babel/helper-module-transforms@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.4.4.tgz#96115ea42a2f139e619e98ed46df6019b94414b8" integrity sha512-3Z1yp8TVQf+B4ynN7WoHPKS8EkdTbgAEy0nU0rs/1Kw4pDgmvYH3rz3aI11KgxKCba2cn7N+tqzV1mY2HMN96w== dependencies: "@babel/helper-module-imports" "^7.0.0" "@babel/helper-simple-access" "^7.1.0" "@babel/helper-split-export-declaration" "^7.4.4" "@babel/template" "^7.4.4" "@babel/types" "^7.4.4" lodash "^4.17.11" "@babel/helper-optimise-call-expression@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.0.0.tgz#a2920c5702b073c15de51106200aa8cad20497d5" integrity sha512-u8nd9NQePYNQV8iPWu/pLLYBqZBa4ZaY1YWRFMuxrid94wKI1QNt67NEZ7GAe5Kc/0LLScbim05xZFWkAdrj9g== dependencies: "@babel/types" "^7.0.0" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.4.4.tgz#a47e02bc91fb259d2e6727c2a30013e3ac13c4a2" integrity sha512-Y5nuB/kESmR3tKjU8Nkn1wMGEx1tjJX076HBMeL3XLQCu6vA/YRzuTW0bbb+qRnXvQGn+d6Rx953yffl8vEy7Q== dependencies: lodash "^4.17.11" "@babel/helper-remap-async-to-generator@^7.1.0": version "7.1.0" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.1.0.tgz#361d80821b6f38da75bd3f0785ece20a88c5fe7f" integrity sha512-3fOK0L+Fdlg8S5al8u/hWE6vhufGSn0bN09xm2LXMy//REAF8kDCrYoOBKYmA8m5Nom+sV9LyLCwrFynA8/slg== dependencies: "@babel/helper-annotate-as-pure" "^7.0.0" "@babel/helper-wrap-function" "^7.1.0" "@babel/template" "^7.1.0" "@babel/traverse" "^7.1.0" "@babel/types" "^7.0.0" "@babel/helper-replace-supers@^7.1.0", "@babel/helper-replace-supers@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.4.4.tgz#aee41783ebe4f2d3ab3ae775e1cc6f1a90cefa27" integrity sha512-04xGEnd+s01nY1l15EuMS1rfKktNF+1CkKmHoErDppjAAZL+IUBZpzT748x262HF7fibaQPhbvWUl5HeSt1EXg== dependencies: "@babel/helper-member-expression-to-functions" "^7.0.0" "@babel/helper-optimise-call-expression" "^7.0.0" "@babel/traverse" "^7.4.4" "@babel/types" "^7.4.4" "@babel/helper-simple-access@^7.1.0": version "7.1.0" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.1.0.tgz#65eeb954c8c245beaa4e859da6188f39d71e585c" integrity sha512-Vk+78hNjRbsiu49zAPALxTb+JUQCz1aolpd8osOF16BGnLtseD21nbHgLPGUwrXEurZgiCOUmvs3ExTu4F5x6w== dependencies: "@babel/template" "^7.1.0" "@babel/types" "^7.0.0" "@babel/helper-split-export-declaration@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.4.4.tgz#ff94894a340be78f53f06af038b205c49d993677" integrity sha512-Ro/XkzLf3JFITkW6b+hNxzZ1n5OQ80NvIUdmHspih1XAhtN3vPTuUFT4eQnela+2MaZ5ulH+iyP513KJrxbN7Q== dependencies: "@babel/types" "^7.4.4" "@babel/helper-validator-identifier@^7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== "@babel/helper-wrap-function@^7.1.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.2.0.tgz#c4e0012445769e2815b55296ead43a958549f6fa" integrity sha512-o9fP1BZLLSrYlxYEYyl2aS+Flun5gtjTIG8iln+XuEzQTs0PLagAGSXUcqruJwD5fM48jzIEggCKpIfWTcR7pQ== dependencies: "@babel/helper-function-name" "^7.1.0" "@babel/template" "^7.1.0" "@babel/traverse" "^7.1.0" "@babel/types" "^7.2.0" "@babel/helpers@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.4.4.tgz#868b0ef59c1dd4e78744562d5ce1b59c89f2f2a5" integrity sha512-igczbR/0SeuPR8RFfC7tGrbdTbFL3QTvH6D+Z6zNxnTe//GyqmtHmDkzrqDmyZ3eSwPqB/LhyKoU5DXsp+Vp2A== dependencies: "@babel/template" "^7.4.4" "@babel/traverse" "^7.4.4" "@babel/types" "^7.4.4" "@babel/highlight@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.0.0.tgz#f710c38c8d458e6dd9a201afb637fcb781ce99e4" integrity sha512-UFMC4ZeFC48Tpvj7C8UgLvtkaUuovQX+5xNWrsIoMG8o2z+XFKjKaN9iVmS84dPwVN00W4wPmqvYoZF3EGAsfw== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.4.4", "@babel/parser@^7.4.5": version "7.4.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.4.5.tgz#04af8d5d5a2b044a2a1bffacc1e5e6673544e872" integrity sha512-9mUqkL1FF5T7f0WDFfAoDdiMVPWsdD1gZYzSnaXsxUCUqzuch/8of9G3VUSNiZmMBoRxT3neyVsqeiL/ZPcjew== "@babel/parser@^7.6.0", "@babel/parser@^7.9.6": version "7.13.9" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.13.9.tgz#ca34cb95e1c2dd126863a84465ae8ef66114be99" integrity sha512-nEUfRiARCcaVo3ny3ZQjURjHQZUo/JkEw7rLlSZy/psWGnvwXFtPcr6jb7Yb41DVW5LTe6KRq9LGleRNsg1Frw== "@babel/plugin-proposal-async-generator-functions@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.2.0.tgz#b289b306669dce4ad20b0252889a15768c9d417e" integrity sha512-+Dfo/SCQqrwx48ptLVGLdE39YtWRuKc/Y9I5Fy0P1DDBB9lsAHpjcEJQt+4IifuSOSTLBKJObJqMvaO1pIE8LQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.1.0" "@babel/plugin-syntax-async-generators" "^7.2.0" "@babel/plugin-proposal-json-strings@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.2.0.tgz#568ecc446c6148ae6b267f02551130891e29f317" integrity sha512-MAFV1CA/YVmYwZG0fBQyXhmj0BHCB5egZHCKWIFVv/XCxAeVGIHfos3SwDck4LvCllENIAg7xMKOG5kH0dzyUg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.2.0" "@babel/plugin-proposal-object-rest-spread@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.4.4.tgz#1ef173fcf24b3e2df92a678f027673b55e7e3005" integrity sha512-dMBG6cSPBbHeEBdFXeQ2QLc5gUpg4Vkaz8octD4aoW/ISO+jBOcsuxYL7bsb5WSu8RLP6boxrBIALEHgoHtO9g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.2.0" "@babel/plugin-proposal-optional-catch-binding@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.2.0.tgz#135d81edb68a081e55e56ec48541ece8065c38f5" integrity sha512-mgYj3jCcxug6KUcX4OBoOJz3CMrwRfQELPQ5560F70YQUBZB7uac9fqaWamKR1iWUzGiK2t0ygzjTScZnVz75g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.2.0" "@babel/plugin-proposal-unicode-property-regex@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.4.4.tgz#501ffd9826c0b91da22690720722ac7cb1ca9c78" integrity sha512-j1NwnOqMG9mFUOH58JTFsA/+ZYzQLUZ/drqWUqxCYLGeu2JFZL8YrNC9hBxKmWtAuOCHPcRpgv7fhap09Fb4kA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.4.4" regexpu-core "^4.5.4" "@babel/plugin-syntax-async-generators@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.2.0.tgz#69e1f0db34c6f5a0cf7e2b3323bf159a76c8cb7f" integrity sha512-1ZrIRBv2t0GSlcwVoQ6VgSLpLgiN/FVQUzt9znxo7v2Ov4jJrs8RY8tv0wvDmFN3qIdMKWrmMMW6yZ0G19MfGg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.2.0.tgz#69c159ffaf4998122161ad8ebc5e6d1f55df8612" integrity sha512-mVxuJ0YroI/h/tbFTPGZR8cv6ai+STMKNBq0f8hFxsxWjl94qqhsb+wXbpNMDPU3cfR1TIsVFzU3nXyZMqyK4w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.2.0.tgz#72bd13f6ffe1d25938129d2a186b11fd62951470" integrity sha512-5UGYnMSLRE1dqqZwug+1LISpA403HzlSfsg6P9VXU6TBjcSHeNlw4DxDx7LgpF+iKZoOG/+uzqoRHTdcUpiZNg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.2.0.tgz#3b7a3e733510c57e820b9142a6579ac8b0dfad2e" integrity sha512-t0JKGgqk2We+9may3t0xDdmneaXmyxq0xieYcKHxIsrJO64n1OiMWNUtc5gQK1PA0NpdCRrtZp4z+IUaKugrSA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.2.0.tgz#a94013d6eda8908dfe6a477e7f9eda85656ecf5c" integrity sha512-bDe4xKNhb0LI7IvZHiA13kff0KEfaGX/Hv4lMA9+7TEc63hMNvfKo6ZFpXhKuEp+II/q35Gc4NoMeDZyaUbj9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.2.0.tgz#9aeafbe4d6ffc6563bf8f8372091628f00779550" integrity sha512-ER77Cax1+8/8jCB9fo4Ud161OZzWN5qawi4GusDuRLcDbDG+bIGYY20zb2dfAFdTRGzrfq2xZPvF0R64EHnimg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.4.4.tgz#a3f1d01f2f21cadab20b33a82133116f14fb5894" integrity sha512-YiqW2Li8TXmzgbXw+STsSqPBPFnGviiaSp6CYOq55X8GQ2SGVLrXB6pNid8HkqkZAzOH6knbai3snhP7v0fNwA== dependencies: "@babel/helper-module-imports" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.1.0" "@babel/plugin-transform-block-scoped-functions@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.2.0.tgz#5d3cc11e8d5ddd752aa64c9148d0db6cb79fd190" integrity sha512-ntQPR6q1/NKuphly49+QiQiTN0O63uOwjdD6dhIjSWBI5xlrbUFh720TIpzBhpnrLfv2tNH/BXvLIab1+BAI0w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.4.4.tgz#c13279fabf6b916661531841a23c4b7dae29646d" integrity sha512-jkTUyWZcTrwxu5DD4rWz6rDB5Cjdmgz6z7M7RLXOJyCUkFBawssDGcGh8M/0FTSB87avyJI1HsTwUXp9nKA1PA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.11" "@babel/plugin-transform-classes@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.4.4.tgz#0ce4094cdafd709721076d3b9c38ad31ca715eb6" integrity sha512-/e44eFLImEGIpL9qPxSRat13I5QNRgBLu2hOQJCF7VLy/otSM/sypV1+XaIw5+502RX/+6YaSAPmldk+nhHDPw== dependencies: "@babel/helper-annotate-as-pure" "^7.0.0" "@babel/helper-define-map" "^7.4.4" "@babel/helper-function-name" "^7.1.0" "@babel/helper-optimise-call-expression" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.4.4" "@babel/helper-split-export-declaration" "^7.4.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.2.0.tgz#83a7df6a658865b1c8f641d510c6f3af220216da" integrity sha512-kP/drqTxY6Xt3NNpKiMomfgkNn4o7+vKxK2DDKcBG9sHj51vHqMBGy8wbDS/J4lMxnqs153/T3+DmCEAkC5cpA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.4.4.tgz#9d964717829cc9e4b601fc82a26a71a4d8faf20f" integrity sha512-/aOx+nW0w8eHiEHm+BTERB2oJn5D127iye/SUQl7NjHy0lf+j7h4MKMMSOwdazGq9OxgiNADncE+SRJkCxjZpQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.4.4.tgz#361a148bc951444312c69446d76ed1ea8e4450c3" integrity sha512-P05YEhRc2h53lZDjRPk/OektxCVevFzZs2Gfjd545Wde3k+yFDbXORgl2e0xpbq8mLcKJ7Idss4fAg0zORN/zg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.4.4" regexpu-core "^4.5.4" "@babel/plugin-transform-duplicate-keys@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.2.0.tgz#d952c4930f312a4dbfff18f0b2914e60c35530b3" integrity sha512-q+yuxW4DsTjNceUiTzK0L+AfQ0zD9rWaTLiUqHA8p0gxx7lu1EylenfzjeIWNkPy6e/0VG/Wjw9uf9LueQwLOw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.2.0.tgz#a63868289e5b4007f7054d46491af51435766008" integrity sha512-umh4hR6N7mu4Elq9GG8TOu9M0bakvlsREEC+ialrQN6ABS4oDQ69qJv1VtR3uxlKMCQMCvzk7vr17RHKcjx68A== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.1.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.4.4.tgz#0267fc735e24c808ba173866c6c4d1440fc3c556" integrity sha512-9T/5Dlr14Z9TIEXLXkt8T1DU7F24cbhwhMNUziN3hB1AXoZcdzPcTiKGRn/6iOymDqtTKWnr/BtRKN9JwbKtdQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.4.4.tgz#e1436116abb0610c2259094848754ac5230922ad" integrity sha512-iU9pv7U+2jC9ANQkKeNF6DrPy4GBa4NWQtl6dHB4Pb3izX2JOEvDTFarlNsBj/63ZEzNNIAMs3Qw4fNCcSOXJA== dependencies: "@babel/helper-function-name" "^7.1.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.2.0.tgz#690353e81f9267dad4fd8cfd77eafa86aba53ea1" integrity sha512-2ThDhm4lI4oV7fVQ6pNNK+sx+c/GM5/SaML0w/r4ZB7sAneD/piDJtwdKlNckXeyGK7wlwg2E2w33C/Hh+VFCg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.2.0.tgz#fa10aa5c58a2cb6afcf2c9ffa8cb4d8b3d489a2d" integrity sha512-HiU3zKkSU6scTidmnFJ0bMX8hz5ixC93b4MHMiYebmk2lUVNGOboPsqQvx5LzooihijUoLR/v7Nc1rbBtnc7FA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.2.0.tgz#82a9bce45b95441f617a24011dc89d12da7f4ee6" integrity sha512-mK2A8ucqz1qhrdqjS9VMIDfIvvT2thrEsIQzbaTdc5QFzhDjQv2CkJJ5f6BXIkgbmaoax3zBr2RyvV/8zeoUZw== dependencies: "@babel/helper-module-transforms" "^7.1.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-commonjs@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.4.4.tgz#0bef4713d30f1d78c2e59b3d6db40e60192cac1e" integrity sha512-4sfBOJt58sEo9a2BQXnZq+Q3ZTSAUXyK3E30o36BOGnJ+tvJ6YSxF0PG6kERvbeISgProodWuI9UVG3/FMY6iw== dependencies: "@babel/helper-module-transforms" "^7.4.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.1.0" "@babel/plugin-transform-modules-systemjs@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.4.4.tgz#dc83c5665b07d6c2a7b224c00ac63659ea36a405" integrity sha512-MSiModfILQc3/oqnG7NrP1jHaSPryO6tA2kOMmAQApz5dayPxWiHqmq4sWH2xF5LcQK56LlbKByCd8Aah/OIkQ== dependencies: "@babel/helper-hoist-variables" "^7.4.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-umd@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.2.0.tgz#7678ce75169f0877b8eb2235538c074268dd01ae" integrity sha512-BV3bw6MyUH1iIsGhXlOK6sXhmSarZjtJ/vMiD9dNmpY8QXFFQTj+6v92pcfy1iqa8DeAfJFwoxcrS/TUZda6sw== dependencies: "@babel/helper-module-transforms" "^7.1.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.4.5": version "7.4.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.4.5.tgz#9d269fd28a370258199b4294736813a60bbdd106" integrity sha512-z7+2IsWafTBbjNsOxU/Iv5CvTJlr5w4+HGu1HovKYTtgJ362f7kBcQglkfmlspKKZ3bgrbSGvLfNx++ZJgCWsg== dependencies: regexp-tree "^0.1.6" "@babel/plugin-transform-new-target@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.4.4.tgz#18d120438b0cc9ee95a47f2c72bc9768fbed60a5" integrity sha512-r1z3T2DNGQwwe2vPGZMBNjioT2scgWzK9BCnDEh+46z8EEwXBq24uRzd65I7pjtugzPSj921aM15RpESgzsSuA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.2.0.tgz#b35d4c10f56bab5d650047dad0f1d8e8814b6598" integrity sha512-VMyhPYZISFZAqAPVkiYb7dUe2AsVi2/wCT5+wZdsNO31FojQJa9ns40hzZ6U9f50Jlq4w6qwzdBB2uwqZ00ebg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.1.0" "@babel/plugin-transform-parameters@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.4.4.tgz#7556cf03f318bd2719fe4c922d2d808be5571e16" integrity sha512-oMh5DUO1V63nZcu/ZVLQFqiihBGo4OpxJxR1otF50GMeCLiRx5nUdtokd+u9SuVJrvvuIh9OosRFPP4pIPnwmw== dependencies: "@babel/helper-call-delegate" "^7.4.4" "@babel/helper-get-function-arity" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.2.0.tgz#03e33f653f5b25c4eb572c98b9485055b389e905" integrity sha512-9q7Dbk4RhgcLp8ebduOpCbtjh7C0itoLYHXd9ueASKAG/is5PQtMR5VJGka9NKqGhYEGn5ITahd4h9QeBMylWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.4.5": version "7.4.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.4.5.tgz#629dc82512c55cee01341fb27bdfcb210354680f" integrity sha512-gBKRh5qAaCWntnd09S8QC7r3auLCqq5DI6O0DlfoyDjslSBVqBibrMdsqO+Uhmx3+BlOmE/Kw1HFxmGbv0N9dA== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.2.0.tgz#4792af87c998a49367597d07fedf02636d2e1634" integrity sha512-fz43fqW8E1tAB3DKF19/vxbpib1fuyCwSPE418ge5ZxILnBhWyhtPgz8eh1RCGGJlwvksHkyxMxh0eenFi+kFw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.4.4.tgz#a50f5d16e9c3a4ac18a1a9f9803c107c380bce08" integrity sha512-aMVojEjPszvau3NRg+TIH14ynZLvPewH4xhlCW1w6A3rkxTS1m4uwzRclYR9oS+rl/dr+kT+pzbfHuAWP/lc7Q== dependencies: "@babel/helper-module-imports" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.2.0.tgz#6333aee2f8d6ee7e28615457298934a3b46198f0" integrity sha512-QP4eUM83ha9zmYtpbnyjTLAGKQritA5XW/iG9cjtuOI8s1RuL/3V6a3DeSHfKutJQ+ayUfeZJPcnCYEQzaPQqg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.2.0": version "7.2.2" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.2.2.tgz#3103a9abe22f742b6d406ecd3cd49b774919b406" integrity sha512-KWfky/58vubwtS0hLqEnrWJjsMGaOeSBn90Ezn5Jeg9Z8KKHmELbP1yGylMlm5N6TPKeY9A2+UaSYLdxahg01w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.2.0.tgz#a1e454b5995560a9c1e0d537dfc15061fd2687e1" integrity sha512-KKYCoGaRAf+ckH8gEL3JHUaFVyNHKe3ASNsZ+AlktgHevvxGigoIttrEJb8iKN03Q7Eazlv1s6cx2B2cQ3Jabw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.4.4.tgz#9d28fea7bbce637fb7612a0750989d8321d4bcb0" integrity sha512-mQrEC4TWkhLN0z8ygIvEL9ZEToPhG5K7KDW3pzGqOfIGZ28Jb0POUkeWcoz8HnHvhFy6dwAT1j8OzqN8s804+g== dependencies: "@babel/helper-annotate-as-pure" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.2.0": version "7.2.0" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.2.0.tgz#117d2bcec2fbf64b4b59d1f9819894682d29f2b2" integrity sha512-2LNhETWYxiYysBtrBTqL8+La0jIoQQnIScUJc74OYvUGRmkskNY4EzLCnjHBzdmb38wqtTaixpo1NctEcvMDZw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.4.4.tgz#ab4634bb4f14d36728bf5978322b35587787970f" integrity sha512-il+/XdNw01i93+M9J9u4T7/e/Ue/vWfNZE4IRUQjplu2Mqb/AFTDimkw2tdEdSH50wuQXZAbXSql0UphQke+vA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.4.4" regexpu-core "^4.5.4" "@babel/preset-env@^7.4.3": version "7.4.5" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.4.5.tgz#2fad7f62983d5af563b5f3139242755884998a58" integrity sha512-f2yNVXM+FsR5V8UwcFeIHzHWgnhXg3NpRmy0ADvALpnhB0SLbCvrCRr4BLOUYbQNLS+Z0Yer46x9dJXpXewI7w== dependencies: "@babel/helper-module-imports" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.2.0" "@babel/plugin-proposal-json-strings" "^7.2.0" "@babel/plugin-proposal-object-rest-spread" "^7.4.4" "@babel/plugin-proposal-optional-catch-binding" "^7.2.0" "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" "@babel/plugin-syntax-async-generators" "^7.2.0" "@babel/plugin-syntax-json-strings" "^7.2.0" "@babel/plugin-syntax-object-rest-spread" "^7.2.0" "@babel/plugin-syntax-optional-catch-binding" "^7.2.0" "@babel/plugin-transform-arrow-functions" "^7.2.0" "@babel/plugin-transform-async-to-generator" "^7.4.4" "@babel/plugin-transform-block-scoped-functions" "^7.2.0" "@babel/plugin-transform-block-scoping" "^7.4.4" "@babel/plugin-transform-classes" "^7.4.4" "@babel/plugin-transform-computed-properties" "^7.2.0" "@babel/plugin-transform-destructuring" "^7.4.4" "@babel/plugin-transform-dotall-regex" "^7.4.4" "@babel/plugin-transform-duplicate-keys" "^7.2.0" "@babel/plugin-transform-exponentiation-operator" "^7.2.0" "@babel/plugin-transform-for-of" "^7.4.4" "@babel/plugin-transform-function-name" "^7.4.4" "@babel/plugin-transform-literals" "^7.2.0" "@babel/plugin-transform-member-expression-literals" "^7.2.0" "@babel/plugin-transform-modules-amd" "^7.2.0" "@babel/plugin-transform-modules-commonjs" "^7.4.4" "@babel/plugin-transform-modules-systemjs" "^7.4.4" "@babel/plugin-transform-modules-umd" "^7.2.0" "@babel/plugin-transform-named-capturing-groups-regex" "^7.4.5" "@babel/plugin-transform-new-target" "^7.4.4" "@babel/plugin-transform-object-super" "^7.2.0" "@babel/plugin-transform-parameters" "^7.4.4" "@babel/plugin-transform-property-literals" "^7.2.0" "@babel/plugin-transform-regenerator" "^7.4.5" "@babel/plugin-transform-reserved-words" "^7.2.0" "@babel/plugin-transform-shorthand-properties" "^7.2.0" "@babel/plugin-transform-spread" "^7.2.0" "@babel/plugin-transform-sticky-regex" "^7.2.0" "@babel/plugin-transform-template-literals" "^7.4.4" "@babel/plugin-transform-typeof-symbol" "^7.2.0" "@babel/plugin-transform-unicode-regex" "^7.4.4" "@babel/types" "^7.4.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.4.5" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.4.5.tgz#582bb531f5f9dc67d2fcb682979894f75e253f12" integrity sha512-TuI4qpWZP6lGOGIuGWtp9sPluqYICmbk8T/1vpSysqJxRPkudh/ofFWyqdcMsDf2s7KvDL4/YHgKyvcS3g9CJQ== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.1.0", "@babel/template@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.4.4.tgz#f4b88d1225689a08f5bc3a17483545be9e4ed237" integrity sha512-CiGzLN9KgAvgZsnivND7rkA+AeJ9JB0ciPOD4U59GKbQP2iQl+olF1l76kJOupqidozfZ32ghwBEJDhnk9MEcw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.4.4" "@babel/types" "^7.4.4" "@babel/traverse@^7.1.0", "@babel/traverse@^7.4.4", "@babel/traverse@^7.4.5": version "7.4.5" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.4.5.tgz#4e92d1728fd2f1897dafdd321efbff92156c3216" integrity sha512-Vc+qjynwkjRmIFGxy0KYoPj4FdVDxLej89kMHFsWScq999uX+pwcX4v9mWRjW0KcAYTPAuVQl2LKP1wEVLsp+A== dependencies: "@babel/code-frame" "^7.0.0" "@babel/generator" "^7.4.4" "@babel/helper-function-name" "^7.1.0" "@babel/helper-split-export-declaration" "^7.4.4" "@babel/parser" "^7.4.5" "@babel/types" "^7.4.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.11" "@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4": version "7.4.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.4.4.tgz#8db9e9a629bb7c29370009b4b779ed93fe57d5f0" integrity sha512-dOllgYdnEFOebhkKCjzSVFqw/PmmB8pH6RGOWkY4GsboQNd47b1fBThBSwlHAq9alF9vc1M3+6oqR47R50L0tQ== dependencies: esutils "^2.0.2" lodash "^4.17.11" to-fast-properties "^2.0.0" "@babel/types@^7.6.1", "@babel/types@^7.9.6": version "7.13.0" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.13.0.tgz#74424d2816f0171b4100f0ab34e9a374efdf7f80" integrity sha512-hE+HE8rnG1Z6Wzo+MhaKE5lM5eMx71T4EHJgku2E3xIfaULhDcxiiRxUYgwX8qwP1BBSlag+TdGOt6JAidIZTA== dependencies: "@babel/helper-validator-identifier" "^7.12.11" lodash "^4.17.19" to-fast-properties "^2.0.0" "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== "@types/glob@^7.1.1": version "7.1.1" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== dependencies: "@types/events" "*" "@types/minimatch" "*" "@types/node" "*" "@types/minimatch@*": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/node@*": version "12.0.8" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.0.8.tgz#551466be11b2adc3f3d47156758f610bd9f6b1d8" integrity sha512-b8bbUOTwzIY3V5vDTY1fIJ+ePKDUBqt2hC2woVGotdQQhG/2Sh62HOKHrT7ab+VerXAcPyAiTEipPu/FsreUtg== "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== dependencies: "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@webassemblyjs/floating-point-hex-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== "@webassemblyjs/helper-api-error@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== "@webassemblyjs/helper-buffer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== "@webassemblyjs/helper-code-frame@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== dependencies: "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/helper-fsm@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== "@webassemblyjs/helper-module-context@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== dependencies: "@webassemblyjs/ast" "1.8.5" mamacro "^0.0.3" "@webassemblyjs/helper-wasm-bytecode@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== "@webassemblyjs/helper-wasm-section@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/ieee754@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== "@webassemblyjs/wasm-edit@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/helper-wasm-section" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-opt" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/wasm-gen@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wasm-opt@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wasm-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wast-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/floating-point-hex-parser" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-code-frame" "1.8.5" "@webassemblyjs/helper-fsm" "1.8.5" "@xtuc/long" "4.2.2" "@webassemblyjs/wast-printer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== dependencies: mime-types "~2.1.24" negotiator "0.6.2" acorn-dynamic-import@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/acorn-dynamic-import/-/acorn-dynamic-import-4.0.0.tgz#482210140582a36b83c3e342e1cfebcaa9240948" integrity sha512-d3OEjQV4ROpoflsnUA8HozoIR504TFxNivYEUi6uwz0IYhBkTDXGuWlNdMtybRt3nqVx/L6XqMt0FxkXuWKZhw== acorn@^6.0.5: version "6.1.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.1.1.tgz#7d25ae05bb8ad1f9b699108e1094ecd7884adc1f" integrity sha512-jPTiwtOxaHNaAPg/dmrJ/beuzLRnXtB0kQPQ8JpotKJgTB6rX6c8mlf315941pyjBSaPg8NHXS9fhP4u17DpGA== acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0: version "3.4.0" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.0.tgz#4b831e7b531415a7cc518cd404e73f6193c6349d" integrity sha512-aUjdRFISbuFOl0EIZc+9e4FfZp0bDZgAdOOf30bJmw8VM9v84SHyVyxDfbWxpGYbdZD/9XoKxfHVNmxPkhwyGw== ajv@^5.0.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" ajv@^6.1.0, ajv@^6.5.5: version "6.10.0" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.0.tgz#90d0d54439da587cd7e843bfb7045f50bd22bdf1" integrity sha512-nffhOpkymDECQyR0mnsUtoCE8RlX38G0rYP+wgLWFyZuUyuuojSSvi/+euOiQBIn63whYwYVIIH1TvE3tu4OEg== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= ansi-colors@^3.0.0: version "3.2.4" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-html@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" normalize-path "^2.1.1" aproba@^1.0.3, aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-flatten@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-never@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/assert-never/-/assert-never-1.2.1.tgz#11f0e363bf146205fb08193b5c7b90f4d1cf44fe" integrity sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw== assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert@^1.1.1: version "1.5.0" resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== dependencies: object-assign "^4.1.1" util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= async-each@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-limiter@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.0.tgz#78faed8c3d074ab81f22b4e985d79e8738f720f8" integrity sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg== async@1.x, async@^1.5.2: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@^2.0.0, async@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/async/-/async-2.6.2.tgz#18330ea7e6e313887f5d2f2a904bac6fe4dd5381" integrity sha512-H1qVYh1MYhEEFLsP97cVKqCGo7KfCyTt6uEWqsTBr9SO84oK9Uwbyd/yCW+6rKJLHksBNUVWZDAjfS+Ccx0Bbg== dependencies: lodash "^4.17.11" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.5.1: version "9.6.0" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.6.0.tgz#0111c6bde2ad20c6f17995a33fad7cf6854b4c87" integrity sha512-kuip9YilBqhirhHEGHaBTZKXL//xxGnzvsD0FtBQa6z+A69qZD6s/BAX9VzDF1i9VKDquTJDQaPLSEhOnL6FvQ== dependencies: browserslist "^4.6.1" caniuse-lite "^1.0.30000971" chalk "^2.4.2" normalize-range "^0.1.2" num2fraction "^1.2.2" postcss "^7.0.16" postcss-value-parser "^3.3.1" aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" babel-generator@^6.18.0: version "6.26.1" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.17.4" source-map "^0.5.7" trim-right "^1.0.1" babel-loader@^8.0.5: version "8.0.6" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== dependencies: find-cache-dir "^2.0.0" loader-utils "^1.0.2" mkdirp "^0.5.1" pify "^4.0.1" babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-runtime@^6.0.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-template@^6.16.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: babel-runtime "^6.26.0" babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" lodash "^4.17.4" babel-traverse@^6.18.0, babel-traverse@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: babel-code-frame "^6.26.0" babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" debug "^2.6.8" globals "^9.18.0" invariant "^2.2.2" lodash "^4.17.4" babel-types@^6.18.0, babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babel-walk@3.0.0-canary-5: version "3.0.0-canary-5" resolved "https://registry.yarnpkg.com/babel-walk/-/babel-walk-3.0.0-canary-5.tgz#f66ecd7298357aee44955f235a6ef54219104b11" integrity sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw== dependencies: "@babel/types" "^7.9.6" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@^1.0.2: version "1.3.0" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.0.tgz#cab1e6118f051095e58b5281aea8c1cd22bfc0e3" integrity sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw== base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== blob@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683" integrity sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig== bluebird@^3.3.0, bluebird@^3.5.3: version "3.5.5" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.5.tgz#a8d0afd73251effbbd5fe384a77d73003c17a71f" integrity sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== body-parser@1.19.0, body-parser@^1.16.1: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= dependencies: array-flatten "^2.1.0" deep-equal "^1.0.1" dns-equal "^1.0.0" dns-txt "^2.0.2" multicast-dns "^6.0.1" multicast-dns-service-types "^1.1.0" boolbase@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" brorand@^1.0.1, brorand@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.3" inherits "^2.0.1" safe-buffer "^5.0.1" browserify-cipher@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" des.js "^1.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" browserify-rsa@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= dependencies: bn.js "^4.1.0" randombytes "^2.0.1" browserify-sign@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= dependencies: bn.js "^4.1.1" browserify-rsa "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.2" elliptic "^6.0.0" inherits "^2.0.1" parse-asn1 "^5.0.0" browserify-zlib@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" browserslist@^4.6.0, browserslist@^4.6.1: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== dependencies: caniuse-lite "^1.0.30001219" colorette "^1.2.2" electron-to-chromium "^1.3.723" escalade "^3.1.1" node-releases "^1.1.71" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer-indexof@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= buffer@^4.3.0: version "4.9.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.1.tgz#6d1bb601b07a4efced97094132093027c95bc298" integrity sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg= dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" isarray "^1.0.0" builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cacache@^11.3.2: version "11.3.2" resolved "https://registry.yarnpkg.com/cacache/-/cacache-11.3.2.tgz#2d81e308e3d258ca38125b676b98b2ac9ce69bfa" integrity sha512-E0zP4EPGDOaT2chM08Als91eYnf8Z+eH1awwwVsngUmgppfM5jjJ8l3z5vO5p5w/I3LsiXawb1sW0VY65pQABg== dependencies: bluebird "^3.5.3" chownr "^1.1.1" figgy-pudding "^3.5.1" glob "^7.1.3" graceful-fs "^4.1.15" lru-cache "^5.1.1" mississippi "^3.0.0" mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.2" ssri "^6.0.1" unique-filename "^1.1.1" y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" caller-callsite@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= dependencies: callsites "^2.0.0" caller-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= dependencies: caller-callsite "^2.0.0" callsite@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camel-case@3.0.x: version "3.0.0" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= dependencies: no-case "^2.2.0" upper-case "^1.1.1" camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0, camelcase@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30000971, caniuse-lite@^1.0.30001219: version "1.0.30001228" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz#bfdc5942cd3326fa51ee0b42fbef4da9d492a7fa" integrity sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" character-parser@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chokidar@^2.0.2, chokidar@^2.0.3, chokidar@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.6.tgz#b6cad653a929e244ce8a834244164d241fa954c5" integrity sha512-V2jUo67OKkc6ySiRpJrjlpJKl9kDuG+Xb8VgsGzb+aEouhgS1D0weyPU4lEzdAcsCAvrih2J2BqyXqHWvVLw5g== dependencies: anymatch "^2.0.0" async-each "^1.0.1" braces "^2.3.2" glob-parent "^3.1.0" inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" normalize-path "^3.0.0" path-is-absolute "^1.0.0" readdirp "^2.2.1" upath "^1.1.1" optionalDependencies: fsevents "^1.2.7" chownr@^1.1.1, chownr@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== chrome-trace-event@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== dependencies: tslib "^1.9.0" cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@4.2.x: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi "^2.0.0" clone@^2.1.1, clone@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colorette@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== colors@^1.1.0, colors@^1.1.2: version "1.3.3" resolved "https://registry.yarnpkg.com/colors/-/colors-1.3.3.tgz#39e005d546afe01e01f9c4ca8fa50f686a01205d" integrity sha512-mmGt/1pZqYRjMxB1axhTo16/snVZ5krrKkcmMeVKxzECMMXoCgnvTPp10QgHfcbQZw8Dq2jMNG6je4JlWU0gWg== combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@2.17.x: version "2.17.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== commander@^2.19.0: version "2.20.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.0.tgz#d58bb2b5c1ee8f87b0d340027e9e94e222c5a422" integrity sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ== commander@~2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= compressible@~2.0.16: version "2.0.17" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== dependencies: mime-db ">= 1.40.0 < 2" compression@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" compressible "~2.0.16" debug "2.6.9" on-headers "~1.0.2" safe-buffer "5.1.2" vary "~1.1.2" concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" inherits "^2.0.3" readable-stream "^2.2.2" typedarray "^0.0.6" connect-history-api-fallback@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.1.0.tgz#f0241c45730a9fc6323b206dbf38edc741d0bb10" integrity sha1-8CQcRXMKn8YyOyBtvzjtx0HQuxA= dependencies: date-now "^0.1.4" console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-4.0.1.tgz#0def113fa0e4dc8de83331a5cf79c8b325213151" integrity sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw== dependencies: "@babel/parser" "^7.6.0" "@babel/types" "^7.6.1" constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= content-disposition@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== dependencies: safe-buffer "5.1.2" content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.1.0, convert-source-map@^1.5.0: version "1.6.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.6.0.tgz#51b537a8c43e0f04dec1993bffcdd504e758ac20" integrity sha512-eFu7XigvxdZ1ETfbgPBohgyQ/Z++C0eEhTor0qRwBw9unw+L0/6V8wkSuGgzdThkiS5lSpdptOQPD8Ak40a+7A== dependencies: safe-buffer "~5.1.1" cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= cookie@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== dependencies: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js-compat@^3.1.1: version "3.1.3" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.1.3.tgz#0cc3ba4c7f62928c2837e1cffbe8dc78b4f1ae14" integrity sha512-EP018pVhgwsKHz3YoN1hTq49aRe+h017Kjz0NQz3nXV0cCRMvH3fLQl+vEPGr4r4J5sk4sU3tUC7U1aqTCeJeA== dependencies: browserslist "^4.6.0" core-js-pure "3.1.3" semver "^6.1.0" core-js-pure@3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.1.3.tgz#4c90752d5b9471f641514f3728f51c1e0783d0b5" integrity sha512-k3JWTrcQBKqjkjI0bkfXS0lbpWPxYuHWfMMjC1VDmzU4Q58IwSbuXSo99YO/hUHlw/EB4AlfA2PVxOGkrIq6dA== core-js@^2.2.0, core-js@^2.4.0: version "2.6.9" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.9.tgz#6b4b214620c834152e179323727fc19741b084f2" integrity sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== dependencies: import-fresh "^2.0.0" is-directory "^0.3.1" js-yaml "^3.13.1" parse-json "^4.0.0" create-ecdh@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" inherits "^2.0.1" md5.js "^1.3.4" ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" ripemd160 "^2.0.0" safe-buffer "^5.0.1" sha.js "^2.4.8" cross-spawn@^6.0.0, cross-spawn@^6.0.5: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" semver "^5.5.0" shebang-command "^1.2.0" which "^1.2.9" crypto-browserify@^3.11.0: version "3.12.0" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" create-ecdh "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.0" diffie-hellman "^5.0.0" inherits "^2.0.1" pbkdf2 "^3.0.3" public-encrypt "^4.0.0" randombytes "^2.0.0" randomfill "^1.0.3" css-loader@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" integrity sha512-OcKJU/lt232vl1P9EEDamhoO9iKY3tIjY5GU+XDLblAykTdgs6Ux9P1hTHve8nFKy5KPpOXOsVI/hIwi3841+w== dependencies: camelcase "^5.2.0" icss-utils "^4.1.0" loader-utils "^1.2.3" normalize-path "^3.0.0" postcss "^7.0.14" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^2.0.6" postcss-modules-scope "^2.1.0" postcss-modules-values "^2.0.0" postcss-value-parser "^3.3.0" schema-utils "^1.0.0" css-select@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= dependencies: boolbase "~1.0.0" css-what "2.1" domutils "1.5.1" nth-check "~1.0.1" css-what@2.1: version "2.1.3" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= cyclist@~0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-0.2.2.tgz#1b33792e11e914a2fd6d6ed6447464444e5fa640" integrity sha1-GzN5LhHpFKL9bW7WRHRkRE5fpkA= d@1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" date-format@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-2.0.0.tgz#7cf7b172f1ec564f0003b39ea302c5498fb98c8f" integrity sha512-M6UqVvZVgFYqZL1SfHsRGIQSz3ZL+qgbsV5Lp1Vj61LZVYuEwcMXYay7DRDtYs2HQQBK5hQtQ0fD9aEJ89V0LA== date-now@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b" integrity sha1-6vQ5/U1ISK105cx9vvIAZyueNFs= dateformat@^1.0.6: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.2.5, debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= deep-equal@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.0.1.tgz#f5d260292b660e084eff4cdbc9f08ad3247448b5" integrity sha1-9dJgKStmDghO/0zbyfCK0yR0SLU= deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-gateway@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== dependencies: execa "^1.0.0" ip-regex "^2.1.0" define-properties@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: "@types/glob" "^7.1.1" globby "^6.1.0" is-path-cwd "^2.0.0" is-path-in-cwd "^2.0.0" p-map "^2.0.0" pify "^4.0.1" rimraf "^2.6.3" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= des.js@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.0.tgz#c074d2e2aa6a8a9a07dbd61f9a15c2cd83ec8ecc" integrity sha1-wHTS4qpqipoH29YfmhXCzYPsjsw= dependencies: inherits "^2.0.1" minimalistic-assert "^1.0.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detect-node@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" randombytes "^2.0.0" dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= dns-packet@^1.3.1: version "1.3.4" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.4.tgz#e3455065824a2507ba886c55a89963bb107dec6f" integrity sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA== dependencies: ip "^1.1.0" safe-buffer "^5.0.1" dns-txt@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= dependencies: buffer-indexof "^1.0.0" doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-converter@^0.2: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" dom-serializer@0: version "0.1.1" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.1.1.tgz#1ec4059e284babed36eec2941d4a970a189ce7c0" integrity sha512-l0IU0pPzLWSHBcieZbpOKgkIn3ts3vAh7ZuFyXNwJxJXk/c4Gwj9xaTJwIDVQCXawWD0qb3IzMGH5rglQaO0XA== dependencies: domelementtype "^1.3.0" entities "^1.1.1" domain-browser@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== domelementtype@1, domelementtype@^1.3.0, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domhandler@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== dependencies: domelementtype "1" domutils@1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= dependencies: dom-serializer "0" domelementtype "1" domutils@^1.5.1: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.723: version "1.3.738" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.738.tgz#aec24b091c82acbfabbdcce08076a703941d17ca" integrity sha512-vCMf4gDOpEylPSLPLSwAEsz+R3ShP02Y3cAKMZvTqule3XcPp7tgc/0ESI7IS6ZeyBlGClE50N53fIOkcIVnpw== elliptic@^6.0.0: version "6.5.4" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== dependencies: bn.js "^4.11.9" brorand "^1.1.0" hash.js "^1.0.0" hmac-drbg "^1.0.1" inherits "^2.0.4" minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" emojis-list@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.1" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43" integrity sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q== dependencies: once "^1.4.0" engine.io-client@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.2.1.tgz#6f54c0475de487158a1a7c77d10178708b6add36" integrity sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "~3.1.0" engine.io-parser "~2.1.1" has-cors "1.1.0" indexof "0.0.1" parseqs "0.0.5" parseuri "0.0.5" ws "~3.3.1" xmlhttprequest-ssl "~1.5.4" yeast "0.1.2" engine.io-parser@~2.1.0, engine.io-parser@~2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6" integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA== dependencies: after "0.8.2" arraybuffer.slice "~0.0.7" base64-arraybuffer "0.1.5" blob "0.0.5" has-binary2 "~1.0.2" engine.io@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.2.1.tgz#b60281c35484a70ee0351ea0ebff83ec8c9522a2" integrity sha512-+VlKzHzMhaU+GsCIg4AoXF1UdDFjHHwMmMKqMJNDNLlUlejz58FCy4LBqB2YVJskHGYl06BatYWKP2TVdVXE5w== dependencies: accepts "~1.3.4" base64id "1.0.0" cookie "0.3.1" debug "~3.1.0" engine.io-parser "~2.1.0" ws "~3.3.1" enhanced-resolve@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" tapable "^1.0.0" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= entities@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-abstract@^1.5.1: version "1.13.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.13.0.tgz#ac86145fdd5099d8dd49558ccba2eaf9b88e24e9" integrity sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" is-callable "^1.1.4" is-regex "^1.0.4" object-keys "^1.0.12" es-to-primitive@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.0.tgz#edf72478033456e8dda8ef09e00ad9650707f377" integrity sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50, es5-ext@~0.10.14: version "0.10.50" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.50.tgz#6d0e23a0abdb27018e5ac4fd09b412bc5517a778" integrity sha512-KMzZTPBkeQV/JcSQhI5/z6d9VWJ3EnQ194USTUwIYZ2ZbpN8+SGXQKt1h68EX44+qt+Fzr8DO17vnxrw7c3agw== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.1" next-tick "^1.0.0" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77" integrity sha1-vwDvT9q2uhtG7Le2KbTH7VcVzHc= dependencies: d "1" es5-ext "~0.10.14" escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" eslint-scope@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== dependencies: esrecurse "^4.1.0" estraverse "^4.1.1" esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esrecurse@^4.1.0: version "4.2.1" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== dependencies: estraverse "^4.1.0" estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@^4.1.0, estraverse@^4.1.1: version "4.2.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" integrity sha1-De4/7TH81GlhjOc0IJn8GvoL2xM= esutils@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" integrity sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs= etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== eventsource@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" safe-buffer "^5.1.1" execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== dependencies: cross-spawn "^6.0.0" get-stream "^4.0.0" is-stream "^1.1.0" npm-run-path "^2.0.0" p-finally "^1.0.0" signal-exit "^3.0.0" strip-eof "^1.0.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== dependencies: accepts "~1.3.7" array-flatten "1.1.1" body-parser "1.19.0" content-disposition "0.5.3" content-type "~1.0.4" cookie "0.4.0" cookie-signature "1.0.6" debug "2.6.9" depd "~1.1.2" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" finalhandler "~1.1.2" fresh "0.5.2" merge-descriptors "1.0.1" methods "~1.1.2" on-finished "~2.3.0" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.5" qs "6.7.0" range-parser "~1.2.1" safe-buffer "5.1.2" send "0.17.1" serve-static "1.14.1" setprototypeof "1.1.1" statuses "~1.5.0" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.4: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= dependencies: websocket-driver ">=0.5.1" faye-websocket@~0.11.1: version "0.11.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e" integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== dependencies: websocket-driver ">=0.5.1" figgy-pudding@^3.5.1: version "3.5.2" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== file-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== dependencies: loader-utils "^1.0.2" schema-utils "^1.0.0" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" finalhandler@1.1.2, finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-cache-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" make-dir "^2.0.0" pkg-dir "^3.0.0" find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-2.0.0.tgz#9326b1488c22d1a6088650a86901b2d9a90a2cbc" integrity sha1-kyaxSIwi0aYIhlCoaQGy2akKLLw= dependencies: detect-file "^1.0.0" is-glob "^3.1.0" micromatch "^3.0.4" resolve-dir "^1.0.1" flatted@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.0.tgz#55122b6536ea496b4b44893ee2608141d10d9916" integrity sha512-R+H8IZclI8AAkSBRQJLVOsxwAoHd6WC40b4QTNWIjzAa6BXOBfQcM587MXDTVPeYaopFNWHUFLx7eNmHDSxMWg== flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" readable-stream "^2.3.6" follow-redirects@^1.0.0: version "1.13.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.0.tgz#b42e8d93a2a7eea5ed88633676d6597bc8e384db" integrity sha512-aq6gF1BEKje4a9i9+5jimNFIpq4Q1WiwBToeRK5NvZBd/TRsmW8BsJfOEGkr76TbOyPVD3OVDN910EcUNtRYEA== for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= from2@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= dependencies: inherits "^2.0.1" readable-stream "^2.0.0" fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" universalify "^0.1.0" fs-minipass@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= dependencies: graceful-fs "^4.1.2" iferr "^0.1.5" imurmurhash "^0.1.4" readable-stream "1 || 2" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-stream@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" glob@^5.0.13, glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.3, glob@^7.1.1, glob@^7.1.3: version "7.1.4" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.1.15" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.15.tgz#ffb703e1066e8a0eeaa4c8b80ba9253eeefbfb00" integrity sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA== handle-thing@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== handlebars@^4.0.1: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== dependencies: minimist "^1.2.5" neo-async "^2.6.0" source-map "^0.6.1" wordwrap "^1.0.0" optionalDependencies: uglify-js "^3.1.4" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary2@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/has-binary2/-/has-binary2-1.0.3.tgz#7776ac627f3ea77250cfc332dab7ddf5e4f5d11d" integrity sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw== dependencies: isarray "2.0.1" has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.0.tgz#ba1a8f1af2a0fc39650f5c850367704122063b44" integrity sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q= has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" minimalistic-assert "^1.0.1" he@1.2.x: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= dependencies: inherits "^2.0.1" obuf "^1.0.0" readable-stream "^2.0.1" wbuf "^1.1.0" html-entities@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= html-minifier@^3.2.3: version "3.5.21" resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== dependencies: camel-case "3.0.x" clean-css "4.2.x" commander "2.17.x" he "1.2.x" param-case "2.1.x" relateurl "0.2.x" uglify-js "3.4.x" html-webpack-plugin@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= dependencies: html-minifier "^3.2.3" loader-utils "^0.2.16" lodash "^4.17.3" pretty-error "^2.0.2" tapable "^1.0.0" toposort "^1.0.0" util.promisify "1.0.0" htmlparser2@^3.3.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== dependencies: domelementtype "^1.3.1" domhandler "^2.3.0" domutils "^1.5.1" entities "^1.1.1" inherits "^2.0.1" readable-stream "^3.1.1" http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= http-errors@1.7.2, http-errors@~1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.0" statuses ">= 1.4.0 < 2" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy-middleware@^0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== dependencies: http-proxy "^1.17.0" is-glob "^4.0.0" lodash "^4.17.11" micromatch "^3.1.10" http-proxy@^1.13.0, http-proxy@^1.17.0: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" icss-replace-symbols@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= icss-utils@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== dependencies: postcss "^7.0.14" ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== iferr@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= ignore-walk@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.1.tgz#a83e62e7d272ac0e3b551aaa82831a19b69f82f8" integrity sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ== dependencies: minimatch "^3.0.4" image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= import-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= dependencies: import-from "^2.1.0" import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= dependencies: caller-path "^2.0.0" resolve-from "^3.0.0" import-from@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" integrity sha1-M1238qev/VOqpHHUuAId7ja387E= dependencies: resolve-from "^3.0.0" import-glob-loader@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/import-glob-loader/-/import-glob-loader-1.1.0.tgz#98d84c0f661c8ba9f821d9ddb7c6b6dc8e97eca2" integrity sha1-mNhMD2Yci6n4Idndt8a23I6X7KI= dependencies: glob "^5.0.13" loader-utils "^0.2.10" import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== dependencies: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@~1.3.0: version "1.3.7" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== internal-ip@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== dependencies: default-gateway "^4.2.0" ipaddr.js "^1.9.0" interpret@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= ipaddr.js@1.9.0, ipaddr.js@^1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-core-module@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== dependencies: has "^1.0.3" is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-directory@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-expression@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-4.0.0.tgz#c33155962abf21d0afd2552514d67d2ec16fd2ab" integrity sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A== dependencies: acorn "^7.1.1" object-assign "^4.1.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-glob@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-path-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.1.0.tgz#2e0c7e463ff5b7a0eb60852d851a6809347a124c" integrity sha512-Sc5j3/YnM8tDeyCsVeKlm/0p95075DyLmDEIkSgQ7mXkrOX+uTCtmQFm0CYzVyJwcCCmO3k8qfJt17SxQwB5Zw== is-path-in-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: is-path-inside "^2.1.0" is-path-inside@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== dependencies: path-is-inside "^1.0.2" is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-regex@^1.0.3, is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-symbol@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.2.tgz#a055f6ae57192caee329e7a860118b497a950f38" integrity sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw== dependencies: has-symbols "^1.0.0" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is-wsl@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isarray@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.1.tgz#a37d94ed9cda2d59865c9f76fe596ee1f338741e" integrity sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-instrumenter-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-3.0.1.tgz#9957bd59252b373fae5c52b7b5188e6fde2a0949" integrity sha512-a5SPObZgS0jB/ixaKSMdn6n/gXSrK2S6q/UfRJBT3e6gQmVjwZROTODQsYW5ZNwOu78hG62Y3fWlebaVOL0C+w== dependencies: convert-source-map "^1.5.0" istanbul-lib-instrument "^1.7.3" loader-utils "^1.1.0" schema-utils "^0.3.0" istanbul-lib-coverage@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== istanbul-lib-instrument@^1.7.3: version "1.10.2" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" istanbul-lib-coverage "^1.2.1" semver "^5.3.0" istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" jasmine-core@^3.3, jasmine-core@^3.4.0: version "3.4.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-3.4.0.tgz#2a74618e966026530c3518f03e9f845d26473ce3" integrity sha512-HU/YxV4i6GcmiH4duATwAbJQMlE0MsDIR5XmSVxURxKHn3aGAdbY1/ZJFmVRbKtnLwIxxMJD7gYaPsypcbYimg== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-stringify@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@3.x, js-yaml@^3.13.1: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== json5@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== dependencies: minimist "^1.2.0" json5@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.0.tgz#e7a0c62c48285c628d20a10b85c89bb807c32850" integrity sha512-8Mh9h6xViijj36g7Dxi+Y4S6hNGV96vcJZr/SrlHh1LR/pEn/8j/+qIBbs44YKl69Lrfctp4QD+AdWLTMqEZAQ== dependencies: minimist "^1.2.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= optionalDependencies: graceful-fs "^4.1.6" jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" karma-chrome-launcher@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" integrity sha512-uf/ZVpAabDBPvdPdveyk1EPgbnloPvFFGgmRhYLTDH7gEB4nZdSBk8yTU47w1g/drLSx5uMOkjKk7IWKfWg/+w== dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coverage@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-1.1.2.tgz#cc09dceb589a83101aca5fe70c287645ef387689" integrity sha512-eQawj4Cl3z/CjxslYy9ariU4uDh7cCNFZHNWXWRpl0pNeblY/4wHR7M7boTYXWrn9bY0z2pZmr11eKje/S/hIw== dependencies: dateformat "^1.0.6" istanbul "^0.4.0" lodash "^4.17.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-2.0.1.tgz#26e3e31f2faf272dd80ebb0e1898914cc3a19763" integrity sha512-iuC0hmr9b+SNn1DaUD2QEYtUxkS1J+bSJSn7ejdEexs7P8EYvA1CWkEdrDQ+8jVH3AgWlCNwjYsT1chjcNW9lA== dependencies: jasmine-core "^3.3" karma-sourcemap-loader@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma-spec-reporter@^0.0.32: version "0.0.32" resolved "https://registry.yarnpkg.com/karma-spec-reporter/-/karma-spec-reporter-0.0.32.tgz#2e9c7207ea726771260259f82becb543209e440a" integrity sha1-LpxyB+pyZ3EmAln4K+y1QyCeRAo= dependencies: colors "^1.1.2" karma-webpack@^3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-3.0.5.tgz#1ff1e3a690fb73ae95ee95f9ab58f341cfc7b40f" integrity sha512-nRudGJWstvVuA6Tbju9tyGUfXTtI1UXMXoRHVmM2/78D0q6s/Ye2IC157PKNDC15PWFGR0mVIRtWLAdcfsRJoA== dependencies: async "^2.0.0" babel-runtime "^6.0.0" loader-utils "^1.0.0" lodash "^4.0.0" source-map "^0.5.6" webpack-dev-middleware "^2.0.6" karma@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/karma/-/karma-4.1.0.tgz#d07387c9743a575b40faf73e8a3eb5421c2193e1" integrity sha512-xckiDqyNi512U4dXGOOSyLKPwek6X/vUizSy2f3geYevbLj+UIdvNwbn7IwfUIL2g1GXEPWt/87qFD1fBbl/Uw== dependencies: bluebird "^3.3.0" body-parser "^1.16.1" braces "^2.3.2" chokidar "^2.0.3" colors "^1.1.0" connect "^3.6.0" core-js "^2.2.0" di "^0.0.1" dom-serialize "^2.2.0" flatted "^2.0.0" glob "^7.1.1" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^4.17.11" log4js "^4.0.0" mime "^2.3.1" minimatch "^3.0.2" optimist "^0.6.1" qjobs "^1.1.4" range-parser "^1.2.0" rimraf "^2.6.0" safe-buffer "^5.0.1" socket.io "2.1.1" source-map "^0.6.1" tmp "0.0.33" useragent "2.3.0" killable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" less-loader@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-5.0.0.tgz#498dde3a6c6c4f887458ee9ed3f086a12ad1b466" integrity sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg== dependencies: clone "^2.1.1" loader-utils "^1.1.0" pify "^4.0.1" less@^3.9.0: version "3.9.0" resolved "https://registry.yarnpkg.com/less/-/less-3.9.0.tgz#b7511c43f37cf57dc87dffd9883ec121289b1474" integrity sha512-31CmtPEZraNUtuUREYjSqRkeETFdyEHSEPAGq4erDlUXtda7pzNmctdljdIagSb589d/qXGWiiP31R5JVf+v0w== dependencies: clone "^2.1.2" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" mime "^1.4.1" mkdirp "^0.5.0" promise "^7.1.1" request "^2.83.0" source-map "~0.6.0" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" loader-runner@^2.3.0: version "2.4.0" resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== loader-utils@^0.2.10, loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= dependencies: big.js "^3.1.3" emojis-list "^2.0.0" json5 "^0.5.0" object-assign "^4.0.1" loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== dependencies: big.js "^5.2.2" emojis-list "^2.0.0" json5 "^1.0.1" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lodash@^4.0.0, lodash@^4.17.0, lodash@^4.17.11, lodash@^4.17.19, lodash@^4.17.3, lodash@^4.17.4: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: chalk "^2.0.1" log4js@^4.0.0: version "4.3.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-4.3.1.tgz#026cb6fb3cd5b9c4682a96478c356c97b497686e" integrity sha512-nPGS7w7kBnzNm1j8JycFxwLCbIMae8tHCo0cCdx/khB20Tcod8SZThYEB9E0c27ObcTGA1mlPowaf3hantQ/FA== dependencies: date-format "^2.0.0" debug "^4.1.1" flatted "^2.0.0" rfdc "^1.1.2" streamroller "^1.0.5" loglevel@^1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.3.tgz#77f2eb64be55a404c9fd04ad16d57c1d6d6b1280" integrity sha512-LoEDv5pgpvWgPF4kNYuIp0qqSJVWak/dML0RY74xlzMZiT9w77teNAwKYKWBTYjlokMirg+o3jBwp+vlLrcfAA== loglevelnext@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/loglevelnext/-/loglevelnext-1.0.5.tgz#36fc4f5996d6640f539ff203ba819641680d75a2" integrity sha512-V/73qkPuJmx4BcBF19xPBr+0ZRVBhc4POxvZTZdMeXpJ4NItXSJ/MSwuFT0kQJlCbXvdlZoQQ/418bS1y9Jh6A== dependencies: es6-symbol "^3.1.1" object.assign "^4.1.0" loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0, loud-rejection@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= lru-cache@4.1.x: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" semver "^5.6.0" mamacro@^0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== dependencies: p-defer "^1.0.0" map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= mem@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^2.0.0" p-is-promise "^2.0.0" memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= dependencies: errno "^0.1.3" readable-stream "^2.0.1" meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4, micromatch@^3.1.8: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" brorand "^1.0.1" mime-db@1.40.0, "mime-db@>= 1.40.0 < 2": version "1.40.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.40.0.tgz#a65057e998db090f732a68f6c276d387d4126c32" integrity sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: version "2.1.24" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.24.tgz#b6f8d0b3e951efb77dedeca194cff6d16f676f81" integrity sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ== dependencies: mime-db "1.40.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.1.0, mime@^2.3.1, mime@^2.4.2: version "2.4.4" resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== mimic-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== mini-css-extract-plugin@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== dependencies: loader-utils "^1.1.0" normalize-url "^2.0.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== dependencies: concat-stream "^1.5.0" duplexify "^3.4.2" end-of-stream "^1.1.0" flush-write-stream "^1.0.0" from2 "^2.1.0" parallel-transform "^1.1.0" pump "^3.0.0" pumpify "^1.3.3" stream-each "^1.1.0" through2 "^2.0.0" mixin-deep@^1.2.0: version "1.3.1" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.1.tgz#a49e7268dce1a0d9698e45326c5626df3543d0fe" integrity sha512-8ZItLHeEgaqEvd5lYBXfm4EZSFCX29Jb9K+lAHhDKzReKBQKj3R+7NOF6tjqYi9t4oI8VUfaWITJQm86wnXGNQ== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.5, mkdirp@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= dependencies: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multicast-dns-service-types@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= multicast-dns@^6.0.1: version "6.2.3" resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== dependencies: dns-packet "^1.3.1" thunky "^1.0.2" nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== neo-async@^2.5.0, neo-async@^2.6.0: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== next-tick@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== dependencies: lower-case "^1.1.1" node-forge@0.7.5: version "0.7.5" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.5.tgz#6c152c345ce11c52f465c2abd957e8639cd674df" integrity sha512-MmbQJ2MTESTjt3Gi/3yG1wGpIMhUfcIypUCGtTizFR9IiccFwxSpfp0vtIZlkFclEqERemxfnSdZEMR9VqqEFQ== node-libs-browser@^2.0.0, node-libs-browser@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== dependencies: assert "^1.1.1" browserify-zlib "^0.2.0" buffer "^4.3.0" console-browserify "^1.1.0" constants-browserify "^1.0.0" crypto-browserify "^3.11.0" domain-browser "^1.1.1" events "^3.0.0" https-browserify "^1.0.0" os-browserify "^0.3.0" path-browserify "0.0.1" process "^0.11.10" punycode "^1.2.4" querystring-es3 "^0.2.0" readable-stream "^2.3.3" stream-browserify "^2.0.1" stream-http "^2.7.2" string_decoder "^1.0.0" timers-browserify "^2.0.4" tty-browserify "0.0.0" url "^0.11.0" util "^0.11.0" vm-browserify "^1.0.1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.71: version "1.1.72" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== nopt@3.x: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= normalize-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== dependencies: prepend-http "^2.0.0" query-string "^5.0.1" sort-keys "^2.0.0" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.1" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.1.tgz#19064cdf988da80ea3cee45533879d90192bbfbc" integrity sha512-+TcdO7HJJ8peiiYhvPxsEDhF3PJFGUGRcFsGve3vxvxdcpO2Z4Z7rkosRM0kWj6LfbK/P0gu3dzk5RU1ffvFcw== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" nth-check@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== dependencies: boolbase "~1.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= null-loader@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-1.0.0.tgz#90e85798e50e9dd1d568495a44e74829dec26744" integrity sha512-mYLDjDVTkjTlFoidxRhzO75rdcwfVXfw5G5zpj8sXnBkHtKJxMk4hTcRR4i5SOhDB6EvcQuYriy6IV23eq6uog== dependencies: loader-utils "^1.2.3" schema-utils "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-keys@^1.0.11, object-keys@^1.0.12: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@1.x, once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== dependencies: is-wsl "^1.1.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" integrity sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q= dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.4" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" wordwrap "~1.0.0" original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== dependencies: url-parse "^1.4.3" os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== dependencies: execa "^1.0.0" lcid "^2.0.0" mem "^4.0.0" os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== p-limit@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.0.tgz#417c9941e6027a9abcba5092dd2904e255b5fbc2" integrity sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== dependencies: retry "^0.12.0" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~1.0.5: version "1.0.10" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== parallel-transform@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.1.0.tgz#d410f065b05da23081fcd10f28854c29bda33b06" integrity sha1-1BDwZbBdojCB/NEPKIVMKb2jOwY= dependencies: cyclist "~0.2.2" inherits "^2.0.3" readable-stream "^2.1.5" param-case@2.1.x: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= dependencies: no-case "^2.2.0" parse-asn1@^5.0.0: version "5.1.4" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.4.tgz#37f6628f823fbdeb2273b4d540434a22f3ef1fcc" integrity sha512-Qs5duJcuvNExRfFZ99HDD3z4mAi3r9Wl/FOjEOijlxwCZs7E7mW2vjTpgQ4J8LpTF8x5v+1Vn5UQFejmWT11aw== dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-key@^2.0.0, path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= path-parse@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" pbkdf2@^3.0.3: version "3.0.17" resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" ripemd160 "^2.0.1" safe-buffer "^5.0.1" sha.js "^2.4.8" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= pkg-dir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" portfinder@^1.0.20: version "1.0.20" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.20.tgz#bea68632e54b2e13ab7b0c4775e9b41bf270e44a" integrity sha512-Yxe4mTyDzTd59PZJY4ojZR8F+E5e97iq2ZOHPz3HDgSvYC5siNad2tLooQ5y5QHyQhc3xVqvyk/eNA3wuoa7Sw== dependencies: async "^1.5.2" debug "^2.2.0" mkdirp "0.5.x" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= postcss-load-config@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.0.tgz#c84d692b7bb7b41ddced94ee62e8ab31b417b003" integrity sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q== dependencies: cosmiconfig "^5.0.0" import-cwd "^2.0.0" postcss-loader@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== dependencies: loader-utils "^1.1.0" postcss "^7.0.0" postcss-load-config "^2.0.0" schema-utils "^1.0.0" postcss-modules-extract-imports@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== dependencies: postcss "^7.0.5" postcss-modules-local-by-default@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.6.tgz#dd9953f6dd476b5fd1ef2d8830c8929760b56e63" integrity sha512-oLUV5YNkeIBa0yQl7EYnxMgy4N6noxmiwZStaEJUSe2xPMcdNc8WmBQuQCx18H5psYbVxz8zoHk0RAAYZXP9gA== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-value-parser "^3.3.1" postcss-modules-scope@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.0.tgz#ad3f5bf7856114f6fcab901b0502e2a2bc39d4eb" integrity sha512-91Rjps0JnmtUB0cujlc8KIKCsJXWjzuxGeT/+Q2i2HXKZ7nBUeF9YQTZZTNvHVoNYj1AthsjnGLtqDUE0Op79A== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-modules-values@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-2.0.0.tgz#479b46dc0c5ca3dc7fa5270851836b9ec7152f64" integrity sha512-Ki7JZa7ff1N3EIMlPnGTZfUMe69FFwiQPnVSXC9mnn3jozCRBYIxiZd44yJOV2AmabOo4qFf8s0dC/+lweG7+w== dependencies: icss-replace-symbols "^1.1.0" postcss "^7.0.6" postcss-selector-parser@^6.0.0: version "6.0.2" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== dependencies: cssesc "^3.0.0" indexes-of "^1.0.1" uniq "^1.0.1" postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.5, postcss@^7.0.6: version "7.0.36" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.36.tgz#056f8cffa939662a8f5905950c07d5285644dfcb" integrity sha512-BebJSIUMwJHRH0HAQoxN4u1CN86glsrwsW0q7T+/m44eXOUAxSNdHRkNZPYz5vVUbg17hFgOQDE7fZk7li3pZw== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= prettier@^1.17.0: version "1.18.2" resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.18.2.tgz#6823e7c5900017b4bd3acf46fe9ac4b4d7bda9ea" integrity sha512-OeHeMc0JhFE9idD4ZdtNibzY0+TPHSpSSb9h8FqtP+YnoZZ1sl8Vc9b1sasjfymH3SonAF4QcA2+mzHPhMvIiw== pretty-error@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= dependencies: renderkid "^2.0.1" utila "~0.4" private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa" integrity sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw== process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= promise@^7.0.1, promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" proxy-addr@~2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.9.0" prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.24: version "1.1.32" resolved "https://registry.yarnpkg.com/psl/-/psl-1.1.32.tgz#3f132717cf2f9c169724b2b6caf373cf694198db" integrity sha512-MHACAkHpihU/REGGPLj4sEfc/XKW2bheigvHO1dUqjaKigMp1C8+WLQYRGgeKFMsw5PMfegZcaN8IDXK/cD0+g== public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" create-hash "^1.1.0" parse-asn1 "^5.0.0" randombytes "^2.0.1" safe-buffer "^5.1.2" pug-attrs@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-3.0.0.tgz#b10451e0348165e31fad1cc23ebddd9dc7347c41" integrity sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA== dependencies: constantinople "^4.0.1" js-stringify "^1.0.2" pug-runtime "^3.0.0" pug-code-gen@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-3.0.2.tgz#ad190f4943133bf186b60b80de483100e132e2ce" integrity sha512-nJMhW16MbiGRiyR4miDTQMRWDgKplnHyeLvioEJYbk1RsPI3FuA3saEP8uwnTb2nTJEKBU90NFVWJBk4OU5qyg== dependencies: constantinople "^4.0.1" doctypes "^1.1.0" js-stringify "^1.0.2" pug-attrs "^3.0.0" pug-error "^2.0.0" pug-runtime "^3.0.0" void-elements "^3.1.0" with "^7.0.0" pug-error@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-2.0.0.tgz#5c62173cb09c34de2a2ce04f17b8adfec74d8ca5" integrity sha512-sjiUsi9M4RAGHktC1drQfCr5C5eriu24Lfbt4s+7SykztEOwVZtbFk1RRq0tzLxcMxMYTBR+zMQaG07J/btayQ== pug-filters@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-4.0.0.tgz#d3e49af5ba8472e9b7a66d980e707ce9d2cc9b5e" integrity sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A== dependencies: constantinople "^4.0.1" jstransformer "1.0.0" pug-error "^2.0.0" pug-walk "^2.0.0" resolve "^1.15.1" pug-lexer@^5.0.0: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-5.0.1.tgz#ae44628c5bef9b190b665683b288ca9024b8b0d5" integrity sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w== dependencies: character-parser "^2.2.0" is-expression "^4.0.0" pug-error "^2.0.0" pug-linker@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-4.0.0.tgz#12cbc0594fc5a3e06b9fc59e6f93c146962a7708" integrity sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw== dependencies: pug-error "^2.0.0" pug-walk "^2.0.0" pug-load@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-3.0.0.tgz#9fd9cda52202b08adb11d25681fb9f34bd41b662" integrity sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ== dependencies: object-assign "^4.1.1" pug-walk "^2.0.0" pug-parser@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-6.0.0.tgz#a8fdc035863a95b2c1dc5ebf4ecf80b4e76a1260" integrity sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw== dependencies: pug-error "^2.0.0" token-stream "1.0.0" pug-runtime@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-3.0.1.tgz#f636976204723f35a8c5f6fad6acda2a191b83d7" integrity sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg== pug-strip-comments@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz#f94b07fd6b495523330f490a7f554b4ff876303e" integrity sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ== dependencies: pug-error "^2.0.0" pug-walk@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-2.0.0.tgz#417aabc29232bb4499b5b5069a2b2d2a24d5f5fe" integrity sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ== pug@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/pug/-/pug-3.0.1.tgz#9b287554043e6d18852673a382b0350595bdc067" integrity sha512-9v1o2yXMfSKJy2PykKyWUhpgx9Pf9D/UlPgIs2pTTxR6DQZ0oivy4I9f8PlWXRY4sjIhDU4TMJ7hQmYnNJc2bw== dependencies: pug-code-gen "^3.0.2" pug-filters "^4.0.0" pug-lexer "^5.0.0" pug-linker "^4.0.0" pug-load "^3.0.0" pug-parser "^6.0.0" pug-runtime "^3.0.0" pug-strip-comments "^2.0.0" pump@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pumpify@^1.3.3: version "1.5.1" resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" inherits "^2.0.3" pump "^2.0.0" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qjobs@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== query-string@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== dependencies: decode-uri-component "^0.2.0" object-assign "^4.1.0" strict-uri-encode "^1.0.0" querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= querystringify@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" safe-buffer "^5.1.0" range-parser@^1.0.3, range-parser@^1.2.0, range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-loader@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-2.0.0.tgz#e2813d9e1e3f80d1bbade5ad082e809679e20c26" integrity sha512-kZnO5MoIyrojfrPWqrhFNLZemIAX8edMOCp++yC5RKxzFB3m92DqKNhKlU6+FvpOhWtvyh3jOaD7J6/9tpdIKg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" regenerate-unicode-properties@^8.0.2: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.2" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.2.tgz#32e59c9a6fb9b1a4aff09b4930ca2d4477343447" integrity sha512-S/TQAZJO+D3m9xeN1WTI8dLKBBiRgXBlTJvbWjCThHWZj9EvHK70Ff50/tYj2J/fvBY6JtFVwRuazHN2E7M9BA== regenerator-transform@^0.14.0: version "0.14.0" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.0.tgz#2ca9aaf7a2c239dd32e4761218425b8c7a86ecaf" integrity sha512-rtOelq4Cawlbmq9xuMR5gdFmv7ku/sFoB7sRiywx7aq53bc52b4j6zvH7Te1Vt/X2YveDKnCGUbioieU7FEL3w== dependencies: private "^0.1.6" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexp-tree@^0.1.6: version "0.1.10" resolved "https://registry.yarnpkg.com/regexp-tree/-/regexp-tree-0.1.10.tgz#d837816a039c7af8a8d64d7a7c3cf6a1d93450bc" integrity sha512-K1qVSbcedffwuIslMwpe6vGlj+ZXRnGkvjAtFHfDZZZuEdA/h0dxljAPu9vhUo6Rrx2U2AwJ+nSQ6hK+lrP5MQ== regexpu-core@^4.5.4: version "4.5.4" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.4.tgz#080d9d02289aa87fe1667a4f5136bc98a6aebaae" integrity sha512-BtizvGtFQKGPUcTy56o3nk1bGRp4SZOTYrDtGNlqCQufptV5IkkLN6Emw+yunAJjzf+C9FQFtvq7IoA3+oMYHQ== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.0.2" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" regjsgen@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.0.tgz#a7634dc08f89209c2049adda3525711fb97265dd" integrity sha512-RnIrLhrXCX5ow/E5/Mh2O4e/oa1/jW0eaBKTSy3LaCj+M3Bqvm97GWDp2yUtzIs4LEn65zR2yiYGFqb2ApnzDA== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" relateurl@0.2.x: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= renderkid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== dependencies: css-select "^1.1.0" dom-converter "^0.2" htmlparser2 "^3.3.0" strip-ansi "^3.0.0" utila "^0.4.0" repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" request@^2.83.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= dependencies: resolve-from "^3.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" integrity sha1-six699nWiBvItuZTM17rywoYh0g= resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.10.0, resolve@^1.3.2, resolve@^1.8.1: version "1.11.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.11.0.tgz#4014870ba296176b86343d50b60f3b50609ce232" integrity sha512-WL2pBDjqT6pGUNSUzMw00o4T7If+z4H2x3Gz893WoUQ5KW8Vr9txp00ykiP16VBaZF5+j/OcXJHZ9+PCvdiDKw== dependencies: path-parse "^1.0.6" resolve@^1.15.1: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== dependencies: is-core-module "^2.2.0" path-parse "^1.0.6" ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= rfdc@^1.1.2: version "1.1.4" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.1.4.tgz#ba72cc1367a0ccd9cf81a870b3b58bd3ad07f8c2" integrity sha512-5C9HXdzK8EAqN7JDif30jqsBzavB7wLpaubisuQIGHWf2gUXSpzy6ArX/+Da8RjFpagWsCn+pIgxTMAmKw9Zug== rimraf@^2.5.4, rimraf@^2.6.0, rimraf@^2.6.1, rimraf@^2.6.2, rimraf@^2.6.3: version "2.6.3" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" inherits "^2.0.1" run-queue@^1.0.0, run-queue@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= dependencies: aproba "^1.1.1" safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" integrity sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8= dependencies: ajv "^5.0.0" schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== dependencies: ajv "^6.1.0" ajv-errors "^1.0.0" ajv-keywords "^3.1.0" select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.4: version "1.10.4" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.4.tgz#cdd7eccfca4ed7635d47a08bf2d5d3074092e2cd" integrity sha512-9AukTiDmHXGXWtWjembZ5NDmVvP2695EtpgbCsxCa68w3c88B+alqbmZ4O3hZ4VWGXeGWzEVdvqgAJD8DQPCDw== dependencies: node-forge "0.7.5" "semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.0" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b" integrity sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA== semver@^6.1.0, semver@^6.1.1: version "6.1.1" resolved "https://registry.yarnpkg.com/semver/-/semver-6.1.1.tgz#53f53da9b30b2103cd4f15eab3a18ecbcb210c9b" integrity sha512-rWYq2e5iYW+fFe/oPPtYJxYgjBm8sC4rmoGdUOgBB7VnwKt6HrL793l2voH1UlsyYZpJ4g0wfjnTEO1s1NP2eQ== send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== dependencies: debug "2.6.9" depd "~1.1.2" destroy "~1.0.4" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" http-errors "~1.7.2" mime "1.6.0" ms "2.1.1" on-finished "~2.3.0" range-parser "~1.2.1" statuses "~1.5.0" serialize-javascript@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.7.0.tgz#d6e0dfb2a3832a8c94468e6eb1db97e55a192a65" integrity sha512-ke8UG8ulpFOxO8f8gRYabHQe/ZntKlcig2Mp+8+URDP1D8vJZ0KUt7LYo07q25Z/+JVSgpr/cui9PIp5H6/+nA== serve-index@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= dependencies: accepts "~1.3.4" batch "0.6.1" debug "2.6.9" escape-html "~1.0.3" http-errors "~1.6.2" mime-types "~2.1.17" parseurl "~1.3.2" serve-static@1.14.1: version "1.14.1" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.17.1" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^0.4.3: version "0.4.3" resolved "https://registry.yarnpkg.com/set-value/-/set-value-0.4.3.tgz#7db08f9d3d22dc7f78e53af3c3bf4666ecdfccf1" integrity sha1-fbCPnT0i3H945Trzw79GZuzfzPE= dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.1" to-object-path "^0.3.0" set-value@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.0.tgz#71ae4a88f0feefbbf52d1ea604f3fb315ebb6274" integrity sha512-hw0yxk9GT/Hr5yJEYnHNKYXkIA8mVJgd9ditYZCe16ZczcaELYYcfvaXesNACk2O8O0nTiPQcQhGUQj8JLzeeg== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setimmediate@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" socket.io-adapter@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz#2a805e8a14d6372124dd9159ad4502f8cb07f06b" integrity sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs= socket.io-client@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.1.1.tgz#dcb38103436ab4578ddb026638ae2f21b623671f" integrity sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ== dependencies: backo2 "1.0.2" base64-arraybuffer "0.1.5" component-bind "1.0.0" component-emitter "1.2.1" debug "~3.1.0" engine.io-client "~3.2.0" has-binary2 "~1.0.2" has-cors "1.1.0" indexof "0.0.1" object-component "0.0.3" parseqs "0.0.5" parseuri "0.0.5" socket.io-parser "~3.2.0" to-array "0.1.4" socket.io-parser@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.2.0.tgz#e7c6228b6aa1f814e6148aea325b51aa9499e077" integrity sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA== dependencies: component-emitter "1.2.1" debug "~3.1.0" isarray "2.0.1" socket.io@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.1.1.tgz#a069c5feabee3e6b214a75b40ce0652e1cfb9980" integrity sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA== dependencies: debug "~3.1.0" engine.io "~3.2.0" has-binary2 "~1.0.2" socket.io-adapter "~1.1.0" socket.io-client "2.1.1" socket.io-parser "~3.2.0" sockjs-client@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.3.0.tgz#12fc9d6cb663da5739d3dc5fb6e8687da95cb177" integrity sha512-R9jxEzhnnrdxLCNln0xg5uGHqMnkhPSTzUZH2eXcR03S/On9Yvoq2wyUZILRUhZCNVu2PmwWVoyuiPz8th8zbg== dependencies: debug "^3.2.5" eventsource "^1.0.7" faye-websocket "~0.11.1" inherits "^2.0.3" json3 "^3.3.2" url-parse "^1.4.3" sockjs@0.3.19: version "0.3.19" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== dependencies: faye-websocket "^0.10.0" uuid "^3.0.1" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= dependencies: is-plain-obj "^1.0.0" source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.10: version "0.5.12" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.12.tgz#b4f3b10d51857a5af0138d3ce8003b201613d599" integrity sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.6, source-map@^0.5.7: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== spdx-expression-parse@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz#75ecd1a88de8c184ef015eafb51b5b48bfd11bb1" integrity sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA== spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" hpack.js "^2.1.6" obuf "^1.1.2" readable-stream "^3.0.6" wbuf "^1.7.3" spdy@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.0.tgz#81f222b5a743a329aa12cea6a390e60e9b613c52" integrity sha512-ot0oEGT/PGUpzf/6uk4AWLqkq+irlqHXkrdbk51oWONh3bxQmBuljxPNl66zlRRcIJStWq0QkLUCPOPjgjvU0Q== dependencies: debug "^4.1.0" handle-thing "^2.0.0" http-deceiver "^1.2.7" select-hose "^2.0.0" spdy-transport "^3.0.0" split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" ssri@^6.0.1: version "6.0.2" resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.2.tgz#157939134f20464e7301ddba3e90ffa8f7728ac5" integrity sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q== dependencies: figgy-pudding "^3.5.1" static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== dependencies: inherits "~2.0.1" readable-stream "^2.0.2" stream-each@^1.1.0: version "1.2.3" resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== dependencies: end-of-stream "^1.1.0" stream-shift "^1.0.0" stream-http@^2.7.2: version "2.8.3" resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= streamroller@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-1.0.5.tgz#71660c20b06b1a7b204d46085731ad13c10a562d" integrity sha512-iGVaMcyF5PcUY0cPbW3xFQUXnr9O4RZXNBBjhuLZgrjLO4XCLLGfx4T2sGqygSeylUjwgWRsnNbT9aV0Zb8AYw== dependencies: async "^2.6.2" date-format "^2.0.0" debug "^3.2.6" fs-extra "^7.0.1" lodash "^4.17.11" strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.2.0.tgz#fe86e738b19544afe70469243b2a1ee9240eae8d" integrity sha512-6YqyX6ZWEYguAxgZzHGL7SsCeGx3V2TtOTqZz1xSTSWnqsbWwbptafNyvf/ACquZUXV3DANr5BDIwNYe1mN42w== dependencies: safe-buffer "~5.1.0" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= style-loader@^0.23.1: version "0.23.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925" integrity sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0, supports-color@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" supports-color@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== dependencies: has-flag "^3.0.0" tapable@^1.0.0, tapable@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^4: version "4.4.19" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA== dependencies: chownr "^1.1.4" fs-minipass "^1.2.7" minipass "^2.9.0" minizlib "^1.3.3" mkdirp "^0.5.5" safe-buffer "^5.2.1" yallist "^3.1.1" terser-webpack-plugin@^1.1.0: version "1.3.0" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.3.0.tgz#69aa22426299f4b5b3775cbed8cb2c5d419aa1d4" integrity sha512-W2YWmxPjjkUcOWa4pBEv4OP4er1aeQJlSo2UhtCFQCuRXEHjOFscO8VyWHj9JLlA0RzQb8Y2/Ta78XZvT54uGg== dependencies: cacache "^11.3.2" find-cache-dir "^2.0.0" is-wsl "^1.1.0" loader-utils "^1.2.3" schema-utils "^1.0.0" serialize-javascript "^1.7.0" source-map "^0.6.1" terser "^4.0.0" webpack-sources "^1.3.0" worker-farm "^1.7.0" terser@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.0.0.tgz#ef356f6f359a963e2cc675517f21c1c382877374" integrity sha512-dOapGTU0hETFl1tCo4t56FN+2jffoKyER9qBGoUFyZ6y7WLoKT0bF+lAYi6B6YsILcGF3q1C2FBh8QcKSCgkgA== dependencies: commander "^2.19.0" source-map "~0.6.1" source-map-support "~0.5.10" through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" thunky@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.0.3.tgz#f5df732453407b09191dae73e2a8cc73f381a826" integrity sha512-YwT8pjmNcAXBZqrubu22P4FYsh2D4dxRmnWBOL8Jk8bUcRUtc5326kx32tuTmFDAZtLOGEVNl8POAR8j896Iow== timers-browserify@^2.0.4: version "2.0.10" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.10.tgz#1d28e3d2aadf1d5a5996c4e9f95601cd053480ae" integrity sha512-YvC1SV1XdOUaL6gx5CoGroT3Gu49pK9+TZ38ErPldOWW4j49GI1HKs9DV+KGq/w6y+LZ72W1c8cKz2vzY+qpzg== dependencies: setimmediate "^1.0.4" tmp@0.0.33, tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-1.0.0.tgz#cc200eab2613f4166d27ff9afc7ca56d49df6eb4" integrity sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ= toposort@^1.0.0: version "1.0.7" resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/type/-/type-1.0.1.tgz#084c9a17fcc9151a2cdb1459905c2e45e4bb7d61" integrity sha512-MAM5dBMJCJNKs9E7JXo4CXRAansRfG0nlJxW7Wf6GZzSOvH31zClSaHdIMWLehe/EGMBkqeC55rrkaOr5Oo7Nw== typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= uglify-js@3.4.x: version "3.4.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== dependencies: commander "~2.19.0" source-map "~0.6.1" uglify-js@^3.1.4: version "3.13.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.13.5.tgz#5d71d6dbba64cf441f32929b1efce7365bb4f113" integrity sha512-xtB8yEqIkn7zmOyS2zUNBsYCBRhDkvlNxMMY2smuJ/qA8NCHeQvKCF3i9Z4k8FJH4+PJvZRtMrPynfZ75+CSZw== ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.0.tgz#5c71c34cb5bad5dcebe3ea0cd08207ba5aa1aea4" integrity sha1-XHHDTLW61dzr4+oM0IIHulqhrqQ= dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^0.4.3" uniq@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== dependencies: unique-slug "^2.0.0" unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== dependencies: imurmurhash "^0.1.4" universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" upath@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/upath/-/upath-1.1.2.tgz#3db658600edaeeccbe6db5e684d67ee8c2acd068" integrity sha512-kXpym8nmDmlCBr7nKdIx8P2jNBa+pBpIUFRnKJ4dr8htyYGJFokkr2ZvERRtUN+9SY+JqXouNgUPtv6JQva/2Q== upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url-join@^2.0.2: version "2.0.5" resolved "https://registry.yarnpkg.com/url-join/-/url-join-2.0.5.tgz#5af22f18c052a000a48d7b82c5e9c2e2feeda728" integrity sha1-WvIvGMBSoACkjXuCxenC4v7tpyg= url-parse@^1.4.3: version "1.5.3" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== useragent@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== dependencies: inherits "2.0.3" utila@^0.4.0, utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA== v8-compile-cache@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vm-browserify@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.0.tgz#bd76d6a23323e2ca8ffa12028dc04559c75f9019" integrity sha512-iq+S7vZJE60yejDYM0ek6zg308+UZsdtPExWP9VZoCFCz1zkJoXFnAX7aZfd/ZwrkidzdUZL0C/ryW+JwAiIGw== void-elements@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= void-elements@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-3.1.0.tgz#614f7fbf8d801f0bb5f0661f5b2f5785750e4f09" integrity sha1-YU9/v42AHwu18GYfWy9XhXUOTwk= watchpack@^1.5.0: version "1.6.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== dependencies: chokidar "^2.0.2" graceful-fs "^4.1.2" neo-async "^2.5.0" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" webpack-cli@^3.3.1: version "3.3.4" resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.4.tgz#de27e281c48a897b8c219cb093e261d5f6afe44a" integrity sha512-ubJGQEKMtBSpT+LiL5hXvn2GIOWiRWItR1DGUqJRhwRBeGhpRXjvF5f0erqdRJLErkfqS5/Ldkkedh4AL5Q1ZQ== dependencies: chalk "^2.4.1" cross-spawn "^6.0.5" enhanced-resolve "^4.1.0" findup-sync "^2.0.0" global-modules "^1.0.0" import-local "^2.0.0" interpret "^1.1.0" loader-utils "^1.1.0" prettier "^1.17.0" supports-color "^5.5.0" v8-compile-cache "^2.0.2" yargs "^12.0.5" webpack-dev-middleware@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-2.0.6.tgz#a51692801e8310844ef3e3790e1eacfe52326fd4" integrity sha512-tj5LLD9r4tDuRIDa5Mu9lnY2qBBehAITv6A9irqXhw/HQquZgTx3BCd57zYbU2gMDnncA49ufK2qVQSbaKJwOw== dependencies: loud-rejection "^1.6.0" memory-fs "~0.4.1" mime "^2.1.0" path-is-absolute "^1.0.0" range-parser "^1.0.3" url-join "^2.0.2" webpack-log "^1.0.1" webpack-dev-middleware@^3.7.0: version "3.7.0" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.0.tgz#ef751d25f4e9a5c8a35da600c5fda3582b5c6cff" integrity sha512-qvDesR1QZRIAZHOE3iQ4CXLZZSQ1lAUsSpnQmlB1PBfoN/xdRjmge3Dok0W4IdaVLJOGJy3sGI4sZHwjRU0PCA== dependencies: memory-fs "^0.4.1" mime "^2.4.2" range-parser "^1.2.1" webpack-log "^2.0.0" webpack-dev-server@^3.3.1: version "3.7.1" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.7.1.tgz#ce10ca0ad6cf28b03e2ce9808684a8616039155d" integrity sha512-GSBjjDMQ+uJI/Rcw/NfXDq5QpfE4HviafCy2SdbJ8Q22MwsnyoHd5TbWRfxgkbklsMx+ZNgWIKK+cB28ynjiDQ== dependencies: ansi-html "0.0.7" bonjour "^3.5.0" chokidar "^2.1.6" compression "^1.7.4" connect-history-api-fallback "^1.6.0" debug "^4.1.1" del "^4.1.1" express "^4.17.1" html-entities "^1.2.1" http-proxy-middleware "^0.19.1" import-local "^2.0.0" internal-ip "^4.3.0" ip "^1.1.5" killable "^1.0.1" loglevel "^1.6.2" opn "^5.5.0" p-retry "^3.0.1" portfinder "^1.0.20" schema-utils "^1.0.0" selfsigned "^1.10.4" semver "^6.1.1" serve-index "^1.9.1" sockjs "0.3.19" sockjs-client "1.3.0" spdy "^4.0.0" strip-ansi "^3.0.1" supports-color "^6.1.0" url "^0.11.0" webpack-dev-middleware "^3.7.0" webpack-log "^2.0.0" yargs "12.0.5" webpack-fix-style-only-entries@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/webpack-fix-style-only-entries/-/webpack-fix-style-only-entries-0.2.2.tgz#60331c608b944ac821a3b6f2ae491a6d79ba40eb" integrity sha512-0wcrLCnISP8htV0NP1mT0e2mHhfjGQdNk82s8BTLVvF7rXuoJuUUzP3aCUXnRqlLgmTBx5WgqPhnczjatl+iSQ== webpack-log@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-1.2.0.tgz#a4b34cda6b22b518dbb0ab32e567962d5c72a43d" integrity sha512-U9AnICnu50HXtiqiDxuli5gLB5PGBo7VvcHx36jRZHwK4vzOYLbImqT4lwWwoMHdQWwEKw736fCHEekokTEKHA== dependencies: chalk "^2.1.0" log-symbols "^2.1.0" loglevelnext "^1.0.1" uuid "^3.1.0" webpack-log@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== dependencies: ansi-colors "^3.0.0" uuid "^3.3.2" webpack-shell-plugin@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/webpack-shell-plugin/-/webpack-shell-plugin-0.5.0.tgz#29b8a1d80ddeae0ddb10e729667f728653c2c742" integrity sha1-Kbih2A3erg3bEOcpZn9yhlPCx0I= webpack-sources@^1.1.0, webpack-sources@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.3.0.tgz#2a28dcb9f1f45fe960d8f1493252b5ee6530fa85" integrity sha512-OiVgSrbGu7NEnEvQJJgdSFPl2qWKkWq5lHMhgiToIiN9w34EBnjYzSYs+VbL5KoYiLNtFFa7BZIKxRED3I32pA== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack@^4.30.0: version "4.34.0" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.34.0.tgz#a4c30129482f7b4ece4c0842002dedf2b56fab58" integrity sha512-ry2IQy1wJjOefLe1uJLzn5tG/DdIKzQqNlIAd2L84kcaADqNvQDTBlo8UcCNyDaT5FiaB+16jhAkb63YeG3H8Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/wasm-edit" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" acorn "^6.0.5" acorn-dynamic-import "^4.0.0" ajv "^6.1.0" ajv-keywords "^3.1.0" chrome-trace-event "^1.0.0" enhanced-resolve "^4.1.0" eslint-scope "^4.0.0" json-parse-better-errors "^1.0.2" loader-runner "^2.3.0" loader-utils "^1.1.0" memory-fs "~0.4.1" micromatch "^3.1.8" mkdirp "~0.5.0" neo-async "^2.5.0" node-libs-browser "^2.0.0" schema-utils "^1.0.0" tapable "^1.1.0" terser-webpack-plugin "^1.1.0" watchpack "^1.5.0" webpack-sources "^1.3.0" websocket-driver@>=0.5.1: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.4" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" with@^7.0.0: version "7.0.2" resolved "https://registry.yarnpkg.com/with/-/with-7.0.2.tgz#ccee3ad542d25538a7a7a80aad212b9828495bac" integrity sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w== dependencies: "@babel/parser" "^7.9.6" "@babel/types" "^7.9.6" assert-never "^1.2.1" babel-walk "3.0.0-canary-5" wordwrap@^1.0.0, wordwrap@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= worker-farm@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@~3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== dependencies: async-limiter "~1.0.0" safe-buffer "~5.1.0" ultron "~1.1.0" xmlhttprequest-ssl@~1.5.4: version "1.5.5" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e" integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4= xtend@^4.0.0, xtend@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" integrity sha1-pcbVMr5lbiPbgg77lDofBJmNY68= "y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2, yallist@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@12.0.5, yargs@^12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== dependencies: cliui "^4.0.0" decamelize "^1.2.0" find-up "^3.0.0" get-caller-file "^1.0.1" os-locale "^3.0.0" require-directory "^2.1.1" require-main-filename "^1.0.1" set-blocking "^2.0.0" string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk= buildbot-3.4.0/www/codeparameter/000077500000000000000000000000001413250514000167545ustar00rootroot00000000000000buildbot-3.4.0/www/codeparameter/buildbot_codeparameter/000077500000000000000000000000001413250514000234535ustar00rootroot00000000000000buildbot-3.4.0/www/codeparameter/buildbot_codeparameter/__init__.py000066400000000000000000000021421413250514000255630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.schedulers.forcesched import TextParameter from buildbot.www.plugin import Application class CodeParameter(TextParameter): """A code editor using ace""" spec_attributes = ["mode", "height"] type = "code" mode = "text" height = 200 # create the interface for the setuptools entry point ep = Application(__name__, "Buildbot forcescheduler parameter using ace.js to submit code") buildbot-3.4.0/www/codeparameter/karma.conf.js000066400000000000000000000003641413250514000213340ustar00rootroot00000000000000const common = require('buildbot-build-common'); module.exports = function karmaConfig (config) { common.createTemplateKarmaConfig(config, { testRoot: 'src/tests.webpack.js', webpack: require('./webpack.config') }); }; buildbot-3.4.0/www/codeparameter/package.json000066400000000000000000000016411413250514000212440ustar00rootroot00000000000000{ "name": "codeparameter", "private": true, "main": "buildbot_codeparameter/static/scripts.js", "scripts": { "build": "rimraf buildbot_codeparameter/static && webpack --bail --progress --profile --env prod", "build-dev": "rimraf buildbot_codeparameter/static && webpack --bail --progress --profile --env dev", "dev": "webpack --bail --progress --profile --watch --env dev", "test": "karma start", "test-watch": "karma start --auto-watch --no-single-run" }, "devDependencies": { "angular-mocks": "^1.7.9", "buildbot-build-common": "link:../build_common", "lodash": "^4.17.19", "rimraf": "^2.6.3", "style-loader": "^0.23.1" }, "dependencies": { "ace-builds": "^1.4.4", "angular": "^1.8.0", "angular-ui-ace": "^0.2.3", "angular-ui-bootstrap": "^2.5.6", "jquery": "^3.5.0" } } buildbot-3.4.0/www/codeparameter/postcss.config.js000066400000000000000000000001711413250514000222530ustar00rootroot00000000000000module.exports = { plugins: { autoprefixer: { browsers: ['last 2 versions'] }, }, }; buildbot-3.4.0/www/codeparameter/setup.cfg000066400000000000000000000000001413250514000205630ustar00rootroot00000000000000buildbot-3.4.0/www/codeparameter/setup.py000066400000000000000000000030611413250514000204660ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from buildbot_pkg import setup_www_plugin except ImportError: import sys print('Please install buildbot_pkg module in order to install that ' 'package, or use the pre-build .whl modules available on pypi', file=sys.stderr) sys.exit(1) setup_www_plugin( name='buildbot-codeparameter', description='Buildbot Forcescheduler Parameter that use ace.js to display code', author=u'Pierre Tardy', author_email=u'tardyp@gmail.com', url='http://buildbot.net/', packages=['buildbot_codeparameter'], package_data={ '': [ 'VERSION', 'static/*' ] }, entry_points=""" [buildbot.www] codeparameter = buildbot_codeparameter:ep """, classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)' ], ) buildbot-3.4.0/www/codeparameter/src/000077500000000000000000000000001413250514000175435ustar00rootroot00000000000000buildbot-3.4.0/www/codeparameter/src/module/000077500000000000000000000000001413250514000210305ustar00rootroot00000000000000buildbot-3.4.0/www/codeparameter/src/module/codefield.directive.js000066400000000000000000000005261413250514000252640ustar00rootroot00000000000000 // defines custom field directives which only have templates class Codefield { constructor() { return { replace: false, restrict: 'E', scope: false, template: require('./codefield.tpl.jade') }; } } angular.module('codeparameter') .directive('codefield', [Codefield]); buildbot-3.4.0/www/codeparameter/src/module/codefield.tpl.jade000066400000000000000000000004261413250514000243730ustar00rootroot00000000000000basefield div.form label.control-label.col-sm-12(for="{{field.name}}") | {{field.label}} .col-sm-12 div(style="height:{{field.height}}px", ui-ace="{mode:field.mode}", readonly="field.readonly", ng-model="field.value") buildbot-3.4.0/www/codeparameter/src/module/main.module.js000066400000000000000000000003061413250514000235750ustar00rootroot00000000000000 import 'ace-builds/src-noconflict/ace'; import "ace-builds/webpack-resolver"; import 'angular-ui-ace'; angular.module("codeparameter", ['ui.ace', 'common']); require('./codefield.directive.js'); buildbot-3.4.0/www/codeparameter/src/module/main.module.spec.js000066400000000000000000000020431413250514000245260ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ angular.module('common', []).constant('config', {'url': 'foourl'}); beforeEach(angular.mock.module('codeparameter')); describe('minimalistic test', function() { let scope; let elmBody = (scope = null); const injected = function($rootScope, $compile) { elmBody = angular.element( '' ); scope = $rootScope; scope.field = { height:400, mode:"python", readonly:true }; $compile(elmBody)(scope); return scope.$digest(); }; beforeEach((inject(injected))); it('should load ace ui ', function() { expect(elmBody).toBeDefined(); // if we can find a div with class ace_layer, then ace has loaded const elm = elmBody.find('.ace_layer'); expect(elm.length).toBeGreaterThan(0); }); }); buildbot-3.4.0/www/codeparameter/src/styles/000077500000000000000000000000001413250514000210665ustar00rootroot00000000000000buildbot-3.4.0/www/codeparameter/src/styles/styles.less000066400000000000000000000000001413250514000232670ustar00rootroot00000000000000buildbot-3.4.0/www/codeparameter/src/tests.webpack.js000066400000000000000000000004421413250514000226560ustar00rootroot00000000000000// This file is an entry point for angular tests // Avoids some weird issues when using webpack + angular. import 'angular'; import 'angular-mocks/angular-mocks'; import './module/main.module.js' const context = require.context('./', true, /\.spec.js$/); context.keys().forEach(context); buildbot-3.4.0/www/codeparameter/webpack.config.js000066400000000000000000000012561413250514000221760ustar00rootroot00000000000000'use strict'; const common = require('buildbot-build-common'); const env = require('yargs').argv.env; const pkg = require('./package.json'); var event = process.env.npm_lifecycle_event; var isTest = event === 'test' || event === 'test-watch'; var isProd = env === 'prod'; module.exports = function() { return common.createTemplateWebpackConfig({ entry: { scripts: './src/module/main.module.js', }, libraryName: pkg.name, pluginName: pkg.plugin_name, dirname: __dirname, isTest: isTest, isProd: isProd, outputPath: __dirname + '/buildbot_codeparameter/static', provideJquery: true, }); }(); buildbot-3.4.0/www/codeparameter/yarn.lock000066400000000000000000011431141413250514000206040ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.7.4.tgz#37e864532200cb6b50ee9a4045f5f817840166ab" integrity sha512-+bYbx56j4nYBmpsWtnPUsKW3NdnYxbqyfrP2w9wILBuHzdfIKz9prieZK0DFPyIzkjYVUe4QkusGL07r5pXznQ== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helpers" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" convert-source-map "^1.7.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369" integrity sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg== dependencies: "@babel/types" "^7.7.4" jsesc "^2.5.1" lodash "^4.17.13" source-map "^0.5.0" "@babel/helper-annotate-as-pure@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz#bb3faf1e74b74bd547e867e48f551fa6b098b6ce" integrity sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og== dependencies: "@babel/types" "^7.7.4" "@babel/helper-builder-binary-assignment-operator-visitor@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz#5f73f2b28580e224b5b9bd03146a4015d6217f5f" integrity sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ== dependencies: "@babel/helper-explode-assignable-expression" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-call-delegate@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz#621b83e596722b50c0066f9dc37d3232e461b801" integrity sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-create-regexp-features-plugin@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz#6d5762359fd34f4da1500e4cff9955b5299aaf59" integrity sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A== dependencies: "@babel/helper-regex" "^7.4.4" regexpu-core "^4.6.0" "@babel/helper-define-map@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176" integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-explode-assignable-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz#fa700878e008d85dc51ba43e9fb835cddfe05c84" integrity sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg== dependencies: "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e" integrity sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ== dependencies: "@babel/helper-get-function-arity" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-get-function-arity@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0" integrity sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA== dependencies: "@babel/types" "^7.7.4" "@babel/helper-hoist-variables@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz#612384e3d823fdfaaf9fce31550fe5d4db0f3d12" integrity sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-member-expression-to-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz#356438e2569df7321a8326644d4b790d2122cb74" integrity sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-imports@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz#e5a92529f8888bf319a6376abfbd1cebc491ad91" integrity sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-transforms@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.7.4.tgz#8d7cdb1e1f8ea3d8c38b067345924ac4f8e0879a" integrity sha512-ehGBu4mXrhs0FxAqN8tWkzF8GSIGAiEumu4ONZ/hD9M88uHcD+Yu2ttKfOCgwzoesJOJrtQh7trI5YPbRtMmnA== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-simple-access" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-optimise-call-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz#034af31370d2995242aa4df402c3b7794b2dcdf2" integrity sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg== dependencies: "@babel/types" "^7.7.4" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz#c68c2407350d9af0e061ed6726afb4fff16d0234" integrity sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-wrap-function" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-replace-supers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz#3c881a6a6a7571275a72d82e6107126ec9e2cdd2" integrity sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg== dependencies: "@babel/helper-member-expression-to-functions" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-simple-access@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz#a169a0adb1b5f418cfc19f22586b2ebf58a9a294" integrity sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A== dependencies: "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-split-export-declaration@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8" integrity sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug== dependencies: "@babel/types" "^7.7.4" "@babel/helper-validator-identifier@^7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== "@babel/helper-wrap-function@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace" integrity sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helpers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.7.4.tgz#62c215b9e6c712dadc15a9a0dcab76c92a940302" integrity sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg== dependencies: "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/highlight@^7.0.0": version "7.5.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.6.0", "@babel/parser@^7.9.6": version "7.13.9" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.13.9.tgz#ca34cb95e1c2dd126863a84465ae8ef66114be99" integrity sha512-nEUfRiARCcaVo3ny3ZQjURjHQZUo/JkEw7rLlSZy/psWGnvwXFtPcr6jb7Yb41DVW5LTe6KRq9LGleRNsg1Frw== "@babel/parser@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb" integrity sha512-jIwvLO0zCL+O/LmEJQjWA75MQTWwx3c3u2JOTDK5D3/9egrWRRA0/0hk9XXywYnXZVVpzrBYeIQTmhwUaePI9g== "@babel/plugin-proposal-async-generator-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d" integrity sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-proposal-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz#dde64a7f127691758cbfed6cf70de0fa5879d52d" integrity sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz#7700a6bfda771d8dc81973249eac416c6b4c697d" integrity sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.4.tgz#cc57849894a5c774214178c8ab64f6334ec8af71" integrity sha512-rnpnZR3/iWKmiQyJ3LKJpSwLDcX/nSXhdLk4Aq/tXOApIvyu7qoabrige0ylsAJffaUC51WiBu209Q0U+86OWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz#ec21e8aeb09ec6711bc0a39ca49520abee1de379" integrity sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.4.tgz#7c239ccaf09470dbe1d453d50057460e84517ebb" integrity sha512-cHgqHgYvffluZk85dJ02vloErm3Y6xtH+2noOBOJ2kXOJH3aVCDnj5eR/lVNlTnYu4hndAPJD3rTFjW3qee0PA== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-async-generators@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz#331aaf310a10c80c44a66b238b6e49132bd3c889" integrity sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.2.0", "@babel/plugin-syntax-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz#29ca3b4415abfe4a5ec381e903862ad1a54c3aec" integrity sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz#86e63f7d2e22f9e27129ac4e83ea989a382e86cc" integrity sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz#47cf220d19d6d0d7b154304701f468fc1cc6ff46" integrity sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz#a3e38f59f4b6233867b4a92dcb0ee05b2c334aa6" integrity sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-top-level-await@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz#bd7d8fa7b9fee793a36e4027fd6dd1aa32f946da" integrity sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz#76309bd578addd8aee3b379d809c802305a98a12" integrity sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz#694cbeae6d613a34ef0292713fa42fb45c4470ba" integrity sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz#d0d9d5c269c78eaea76227ace214b8d01e4d837b" integrity sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz#200aad0dcd6bb80372f94d9e628ea062c58bf224" integrity sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.13" "@babel/plugin-transform-classes@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz#c92c14be0a1399e15df72667067a8f510c9400ec" integrity sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-define-map" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz#e856c1628d3238ffe12d668eb42559f79a81910d" integrity sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz#2b713729e5054a1135097b6a67da1b6fe8789267" integrity sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.4.tgz#f7ccda61118c5b7a2599a72d5e3210884a021e96" integrity sha512-mk0cH1zyMa/XHeb6LOTXTbG7uIJ8Rrjlzu91pUx/KS3JpcgaTDwMS8kM+ar8SLOvlL2Lofi4CGBAjCo3a2x+lw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-duplicate-keys@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz#3d21731a42e3f598a73835299dd0169c3b90ac91" integrity sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz#dd30c0191e3a1ba19bcc7e389bdfddc0729d5db9" integrity sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz#248800e3a5e507b1f103d8b4ca998e77c63932bc" integrity sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz#75a6d3303d50db638ff8b5385d12451c865025b1" integrity sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz#27fe87d2b5017a2a5a34d1c41a6b9f6a6262643e" integrity sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz#aee127f2f3339fc34ce5e3055d7ffbf7aa26f19a" integrity sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.4.tgz#276b3845ca2b228f2995e453adc2e6f54d72fb71" integrity sha512-/542/5LNA18YDtg1F+QHvvUSlxdvjZoD/aldQwkq+E3WCkbEjNSN9zdrOXaSlfg3IfGi22ijzecklF/A7kVZFQ== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-commonjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.4.tgz#bee4386e550446343dd52a571eda47851ff857a3" integrity sha512-k8iVS7Jhc367IcNF53KCwIXtKAH7czev866ThsTgy8CwlXjnKZna2VHwChglzLleYrcHz1eQEIJlGRQxB53nqA== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.7.4" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-systemjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz#cd98152339d3e763dfe838b7d4273edaf520bb30" integrity sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-umd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz#1027c355a118de0aae9fee00ad7813c584d9061f" integrity sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz#fb3bcc4ee4198e7385805007373d6b6f42c98220" integrity sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/plugin-transform-new-target@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz#4a0753d2d60639437be07b592a9e58ee00720167" integrity sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz#48488937a2d586c0148451bf51af9d7dda567262" integrity sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/plugin-transform-parameters@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.4.tgz#da4555c97f39b51ac089d31c7380f03bca4075ce" integrity sha512-VJwhVePWPa0DqE9vcfptaJSzNDKrWU/4FbYCjZERtmqEs05g3UMXnYMZoXja7JAJ7Y7sPZipwm/pGApZt7wHlw== dependencies: "@babel/helper-call-delegate" "^7.7.4" "@babel/helper-get-function-arity" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz#2388d6505ef89b266103f450f9167e6bd73f98c2" integrity sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.4.tgz#d18eac0312a70152d7d914cbed2dc3999601cfc0" integrity sha512-e7MWl5UJvmPEwFJTwkBlPmqixCtr9yAASBqff4ggXTNicZiwbF8Eefzm6NVgfiBp7JdAGItecnctKTgH44q2Jw== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz#6a7cf123ad175bb5c69aec8f6f0770387ed3f1eb" integrity sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.4.tgz#51fe458c1c1fa98a8b07934f4ed38b6cd62177a6" integrity sha512-O8kSkS5fP74Ad/8pfsCMGa8sBRdLxYoSReaARRNSz3FbFQj3z/QUvoUmJ28gn9BO93YfnXc3j+Xyaqe8cKDNBQ== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz#74a0a9b2f6d67a684c6fbfd5f0458eb7ba99891e" integrity sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz#aa673b356fe6b7e70d69b6e33a17fef641008578" integrity sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz#ffb68c05090c30732076b1285dc1401b404a123c" integrity sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz#1eb6411736dd3fe87dbd20cc6668e5121c17d604" integrity sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz#3174626214f2d6de322882e498a38e8371b2140e" integrity sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz#a3c0f65b117c4c81c5b6484f2a5e7b95346b83ae" integrity sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/preset-env@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.7.4.tgz#ccaf309ae8d1ee2409c85a4e2b5e280ceee830f8" integrity sha512-Dg+ciGJjwvC1NIe/DGblMbcGq1HOtKbw8RLl4nIjlfcILKEOkWT/vRqPpumswABEBVudii6dnVwrBtzD7ibm4g== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.7.4" "@babel/plugin-proposal-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-syntax-top-level-await" "^7.7.4" "@babel/plugin-transform-arrow-functions" "^7.7.4" "@babel/plugin-transform-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions" "^7.7.4" "@babel/plugin-transform-block-scoping" "^7.7.4" "@babel/plugin-transform-classes" "^7.7.4" "@babel/plugin-transform-computed-properties" "^7.7.4" "@babel/plugin-transform-destructuring" "^7.7.4" "@babel/plugin-transform-dotall-regex" "^7.7.4" "@babel/plugin-transform-duplicate-keys" "^7.7.4" "@babel/plugin-transform-exponentiation-operator" "^7.7.4" "@babel/plugin-transform-for-of" "^7.7.4" "@babel/plugin-transform-function-name" "^7.7.4" "@babel/plugin-transform-literals" "^7.7.4" "@babel/plugin-transform-member-expression-literals" "^7.7.4" "@babel/plugin-transform-modules-amd" "^7.7.4" "@babel/plugin-transform-modules-commonjs" "^7.7.4" "@babel/plugin-transform-modules-systemjs" "^7.7.4" "@babel/plugin-transform-modules-umd" "^7.7.4" "@babel/plugin-transform-named-capturing-groups-regex" "^7.7.4" "@babel/plugin-transform-new-target" "^7.7.4" "@babel/plugin-transform-object-super" "^7.7.4" "@babel/plugin-transform-parameters" "^7.7.4" "@babel/plugin-transform-property-literals" "^7.7.4" "@babel/plugin-transform-regenerator" "^7.7.4" "@babel/plugin-transform-reserved-words" "^7.7.4" "@babel/plugin-transform-shorthand-properties" "^7.7.4" "@babel/plugin-transform-spread" "^7.7.4" "@babel/plugin-transform-sticky-regex" "^7.7.4" "@babel/plugin-transform-template-literals" "^7.7.4" "@babel/plugin-transform-typeof-symbol" "^7.7.4" "@babel/plugin-transform-unicode-regex" "^7.7.4" "@babel/types" "^7.7.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.4.tgz#b23a856751e4bf099262f867767889c0e3fe175b" integrity sha512-r24eVUUr0QqNZa+qrImUk8fn5SPhHq+IfYvIoIMg0do3GdK9sMdiLKP3GYVVaxpPKORgm8KRKaNTEhAjgIpLMw== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b" integrity sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" "@babel/traverse@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.7.4.tgz#9c1e7c60fb679fe4fcfaa42500833333c2058558" integrity sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.13" "@babel/types@^7.6.1", "@babel/types@^7.9.6": version "7.13.0" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.13.0.tgz#74424d2816f0171b4100f0ab34e9a374efdf7f80" integrity sha512-hE+HE8rnG1Z6Wzo+MhaKE5lM5eMx71T4EHJgku2E3xIfaULhDcxiiRxUYgwX8qwP1BBSlag+TdGOt6JAidIZTA== dependencies: "@babel/helper-validator-identifier" "^7.12.11" lodash "^4.17.19" to-fast-properties "^2.0.0" "@babel/types@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193" integrity sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA== dependencies: esutils "^2.0.2" lodash "^4.17.13" to-fast-properties "^2.0.0" "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== "@types/glob@^7.1.1": version "7.1.1" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== dependencies: "@types/events" "*" "@types/minimatch" "*" "@types/node" "*" "@types/minimatch@*": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/node@*": version "12.12.12" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.12.tgz#529bc3e73dbb35dd9e90b0a1c83606a9d3264bdb" integrity sha512-MGuvYJrPU0HUwqF7LqvIj50RZUX23Z+m583KBygKYUZLlZ88n6w28XRNJRJgsHukLEnLz6w6SvxZoLgbr5wLqQ== "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== dependencies: "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@webassemblyjs/floating-point-hex-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== "@webassemblyjs/helper-api-error@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== "@webassemblyjs/helper-buffer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== "@webassemblyjs/helper-code-frame@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== dependencies: "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/helper-fsm@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== "@webassemblyjs/helper-module-context@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== dependencies: "@webassemblyjs/ast" "1.8.5" mamacro "^0.0.3" "@webassemblyjs/helper-wasm-bytecode@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== "@webassemblyjs/helper-wasm-section@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/ieee754@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== "@webassemblyjs/wasm-edit@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/helper-wasm-section" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-opt" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/wasm-gen@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wasm-opt@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wasm-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wast-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/floating-point-hex-parser" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-code-frame" "1.8.5" "@webassemblyjs/helper-fsm" "1.8.5" "@xtuc/long" "4.2.2" "@webassemblyjs/wast-printer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== dependencies: mime-types "~2.1.24" negotiator "0.6.2" ace-builds@^1.4.4: version "1.4.7" resolved "https://registry.yarnpkg.com/ace-builds/-/ace-builds-1.4.7.tgz#56e5465270b6c48a48d30e70d6b8f6b92fbf2b08" integrity sha512-gwQGVFewBopRLho08BfahyvRa9FlB43JUig5ItAKTYc9kJJsbA9QNz75p28QtQomoPQ9rJx82ymL21x4ZSZmdg== acorn@^6.2.1: version "6.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.3.0.tgz#0087509119ffa4fc0a0041d1e93a417e68cb856e" integrity sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA== acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: version "3.4.1" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== ajv@^5.0.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5: version "6.10.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= angular-mocks@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-mocks/-/angular-mocks-1.7.9.tgz#0a3b7e28b9a493b4e3010ed2b0f69a68e9b4f79b" integrity sha512-LQRqqiV3sZ7NTHBnNmLT0bXtE5e81t97+hkJ56oU0k3dqKv1s6F+nBWRlOVzqHWPGFOiPS8ZJVdrS8DFzHyNIA== angular-ui-ace@^0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/angular-ui-ace/-/angular-ui-ace-0.2.3.tgz#3cb903428100621a367fc7f641440e97a42a26d0" integrity sha1-PLkDQoEAYho2f8f2QUQOl6QqJtA= angular-ui-bootstrap@^2.5.6: version "2.5.6" resolved "https://registry.yarnpkg.com/angular-ui-bootstrap/-/angular-ui-bootstrap-2.5.6.tgz#23937322ec641a6fbee16498cc32452aa199e7c5" integrity sha512-yzcHpPMLQl0232nDzm5P4iAFTFQ9dMw0QgFLuKYbDj9M0xJ62z0oudYD/Lvh1pWfRsukiytP4Xj6BHOSrSXP8A== angular@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/angular/-/angular-1.8.0.tgz#b1ec179887869215cab6dfd0df2e42caa65b1b51" integrity sha512-VdaMx+Qk0Skla7B5gw77a8hzlcOakwF8mjlW13DpIWIDlfqwAbSSLfd8N/qZnzEmQF4jC4iofInd3gE7vL8ZZg== ansi-colors@^3.0.0: version "3.2.4" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-html@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.0, ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" normalize-path "^2.1.1" anymatch@~3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" aproba@^1.0.3, aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-flatten@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-never@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/assert-never/-/assert-never-1.2.1.tgz#11f0e363bf146205fb08193b5c7b90f4d1cf44fe" integrity sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw== assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert@^1.1.1: version "1.5.0" resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== dependencies: object-assign "^4.1.1" util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= async-each@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== async@1.x: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@^2.0.0, async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== dependencies: lodash "^4.17.14" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.5.1: version "9.7.2" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.2.tgz#26cf729fbb709323b40171a874304884dcceffed" integrity sha512-LCAfcdej1182uVvPOZnytbq61AhnOZ/4JelDaJGDeNwewyU1AMaNthcHsyz1NRjTmd2FkurMckLWfkHg3Z//KA== dependencies: browserslist "^4.7.3" caniuse-lite "^1.0.30001010" chalk "^2.4.2" normalize-range "^0.1.2" num2fraction "^1.2.2" postcss "^7.0.23" postcss-value-parser "^4.0.2" aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" babel-generator@^6.18.0: version "6.26.1" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.17.4" source-map "^0.5.7" trim-right "^1.0.1" babel-loader@^8.0.5: version "8.0.6" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== dependencies: find-cache-dir "^2.0.0" loader-utils "^1.0.2" mkdirp "^0.5.1" pify "^4.0.1" babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-plugin-dynamic-import-node@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: object.assign "^4.1.0" babel-runtime@^6.0.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-template@^6.16.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: babel-runtime "^6.26.0" babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" lodash "^4.17.4" babel-traverse@^6.18.0, babel-traverse@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: babel-code-frame "^6.26.0" babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" debug "^2.6.8" globals "^9.18.0" invariant "^2.2.2" lodash "^4.17.4" babel-types@^6.18.0, babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babel-walk@3.0.0-canary-5: version "3.0.0-canary-5" resolved "https://registry.yarnpkg.com/babel-walk/-/babel-walk-3.0.0-canary-5.tgz#f66ecd7298357aee44955f235a6ef54219104b11" integrity sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw== dependencies: "@babel/types" "^7.9.6" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@^1.0.2: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary-extensions@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c" integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow== blob@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683" integrity sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig== bluebird@^3.3.0, bluebird@^3.5.5: version "3.7.1" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de" integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== body-parser@1.19.0, body-parser@^1.16.1: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= dependencies: array-flatten "^2.1.0" deep-equal "^1.0.1" dns-equal "^1.0.0" dns-txt "^2.0.2" multicast-dns "^6.0.1" multicast-dns-service-types "^1.1.0" boolbase@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" brorand@^1.0.1, brorand@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.3" inherits "^2.0.1" safe-buffer "^5.0.1" browserify-cipher@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" des.js "^1.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" browserify-rsa@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= dependencies: bn.js "^4.1.0" randombytes "^2.0.1" browserify-sign@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= dependencies: bn.js "^4.1.1" browserify-rsa "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.2" elliptic "^6.0.0" inherits "^2.0.1" parse-asn1 "^5.0.0" browserify-zlib@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" browserslist@^4.6.0, browserslist@^4.7.3: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== dependencies: caniuse-lite "^1.0.30001219" colorette "^1.2.2" electron-to-chromium "^1.3.723" escalade "^3.1.1" node-releases "^1.1.71" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer-indexof@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= buffer@^4.3.0: version "4.9.2" resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" isarray "^1.0.0" "buildbot-build-common@link:../build_common": version "1.0.0" dependencies: "@babel/core" "^7.4.3" "@babel/plugin-syntax-dynamic-import" "^7.2.0" "@babel/plugin-transform-runtime" "^7.4.3" "@babel/preset-env" "^7.4.3" "@babel/runtime" "^7.4.3" autoprefixer "^9.5.1" babel-loader "^8.0.5" css-loader "^2.1.1" file-loader "^3.0.1" html-webpack-plugin "^3.2.0" import-glob-loader "^1.1.0" istanbul-instrumenter-loader "^3.0.1" jasmine-core "^3.4.0" karma "^4.1.0" karma-chrome-launcher "^2.2.0" karma-coverage "^1.1.2" karma-jasmine "^2.0.1" karma-sourcemap-loader "^0.3.7" karma-spec-reporter "^0.0.32" karma-webpack "^3.0.5" less "^3.9.0" less-loader "^5.0.0" mini-css-extract-plugin "^0.6.0" node-libs-browser "^2.2.0" null-loader "^1.0.0" postcss-loader "^3.0.0" pug "^3.0.1" raw-loader "^2.0.0" style-loader "^0.23.1" webpack "^4.30.0" webpack-cli "^3.3.1" webpack-dev-server "^3.3.1" webpack-fix-style-only-entries "^0.2.1" webpack-shell-plugin "^0.5.0" builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cacache@^12.0.2: version "12.0.3" resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== dependencies: bluebird "^3.5.5" chownr "^1.1.1" figgy-pudding "^3.5.1" glob "^7.1.4" graceful-fs "^4.1.15" infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" ssri "^6.0.1" unique-filename "^1.1.1" y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" caller-callsite@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= dependencies: callsites "^2.0.0" caller-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= dependencies: caller-callsite "^2.0.0" callsite@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camel-case@3.0.x: version "3.0.0" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= dependencies: no-case "^2.2.0" upper-case "^1.1.1" camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0, camelcase@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30001010, caniuse-lite@^1.0.30001219: version "1.0.30001228" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz#bfdc5942cd3326fa51ee0b42fbef4da9d492a7fa" integrity sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" character-parser@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chokidar@^2.0.2, chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== dependencies: anymatch "^2.0.0" async-each "^1.0.1" braces "^2.3.2" glob-parent "^3.1.0" inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" normalize-path "^3.0.0" path-is-absolute "^1.0.0" readdirp "^2.2.1" upath "^1.1.1" optionalDependencies: fsevents "^1.2.7" chokidar@^3.0.0: version "3.3.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6" integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A== dependencies: anymatch "~3.1.1" braces "~3.0.2" glob-parent "~5.1.0" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" readdirp "~3.2.0" optionalDependencies: fsevents "~2.1.1" chownr@^1.1.1, chownr@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== chrome-trace-event@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== dependencies: tslib "^1.9.0" cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@4.2.x: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi "^2.0.0" cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== dependencies: string-width "^3.1.0" strip-ansi "^5.2.0" wrap-ansi "^5.1.0" clone@^2.1.1, clone@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colorette@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== colors@^1.1.0, colors@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@2.17.x: version "2.17.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== commander@^2.20.0: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@~2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= compressible@~2.0.16: version "2.0.17" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== dependencies: mime-db ">= 1.40.0 < 2" compression@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" compressible "~2.0.16" debug "2.6.9" on-headers "~1.0.2" safe-buffer "5.1.2" vary "~1.1.2" concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" inherits "^2.0.3" readable-stream "^2.2.2" typedarray "^0.0.6" connect-history-api-fallback@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-4.0.1.tgz#0def113fa0e4dc8de83331a5cf79c8b325213151" integrity sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw== dependencies: "@babel/parser" "^7.6.0" "@babel/types" "^7.6.1" constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= content-disposition@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== dependencies: safe-buffer "5.1.2" content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= cookie@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== dependencies: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js-compat@^3.1.1: version "3.4.2" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.4.2.tgz#652fa7c54652b7f6586a893e37001df55ea2ac37" integrity sha512-W0Aj+LM3EAxxjD0Kp2o4be8UlnxIZHNupBv2znqrheR4aY2nOn91794k/xoSp+SxqqriiZpTsSwBtZr60cbkwQ== dependencies: browserslist "^4.7.3" semver "^6.3.0" core-js@^2.4.0: version "2.6.10" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f" integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== dependencies: import-fresh "^2.0.0" is-directory "^0.3.1" js-yaml "^3.13.1" parse-json "^4.0.0" create-ecdh@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" inherits "^2.0.1" md5.js "^1.3.4" ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" ripemd160 "^2.0.0" safe-buffer "^5.0.1" sha.js "^2.4.8" cross-spawn@6.0.5, cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" semver "^5.5.0" shebang-command "^1.2.0" which "^1.2.9" crypto-browserify@^3.11.0: version "3.12.0" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" create-ecdh "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.0" diffie-hellman "^5.0.0" inherits "^2.0.1" pbkdf2 "^3.0.3" public-encrypt "^4.0.0" randombytes "^2.0.0" randomfill "^1.0.3" css-loader@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" integrity sha512-OcKJU/lt232vl1P9EEDamhoO9iKY3tIjY5GU+XDLblAykTdgs6Ux9P1hTHve8nFKy5KPpOXOsVI/hIwi3841+w== dependencies: camelcase "^5.2.0" icss-utils "^4.1.0" loader-utils "^1.2.3" normalize-path "^3.0.0" postcss "^7.0.14" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^2.0.6" postcss-modules-scope "^2.1.0" postcss-modules-values "^2.0.0" postcss-value-parser "^3.3.0" schema-utils "^1.0.0" css-select@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= dependencies: boolbase "~1.0.0" css-what "2.1" domutils "1.5.1" nth-check "~1.0.1" css-what@2.1: version "2.1.3" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" date-format@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-2.1.0.tgz#31d5b5ea211cf5fd764cd38baf9d033df7e125cf" integrity sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA== dateformat@^1.0.6: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.1.1, debug@^3.2.5, debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= deep-equal@^1.0.1: version "1.1.1" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== dependencies: is-arguments "^1.0.4" is-date-object "^1.0.1" is-regex "^1.0.4" object-is "^1.0.1" object-keys "^1.1.1" regexp.prototype.flags "^1.2.0" deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-gateway@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== dependencies: execa "^1.0.0" ip-regex "^2.1.0" define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: "@types/glob" "^7.1.1" globby "^6.1.0" is-path-cwd "^2.0.0" is-path-in-cwd "^2.0.0" p-map "^2.0.0" pify "^4.0.1" rimraf "^2.6.3" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= des.js@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== dependencies: inherits "^2.0.1" minimalistic-assert "^1.0.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detect-node@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" randombytes "^2.0.0" dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= dns-packet@^1.3.1: version "1.3.4" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.4.tgz#e3455065824a2507ba886c55a89963bb107dec6f" integrity sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA== dependencies: ip "^1.1.0" safe-buffer "^5.0.1" dns-txt@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= dependencies: buffer-indexof "^1.0.0" doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-converter@^0.2: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" dom-serializer@0: version "0.2.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== dependencies: domelementtype "^2.0.1" entities "^2.0.0" domain-browser@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== domelementtype@1, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== domhandler@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== dependencies: domelementtype "1" domutils@1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= dependencies: dom-serializer "0" domelementtype "1" domutils@^1.5.1: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.723: version "1.3.738" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.738.tgz#aec24b091c82acbfabbdcce08076a703941d17ca" integrity sha512-vCMf4gDOpEylPSLPLSwAEsz+R3ShP02Y3cAKMZvTqule3XcPp7tgc/0ESI7IS6ZeyBlGClE50N53fIOkcIVnpw== elliptic@^6.0.0: version "6.5.4" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== dependencies: bn.js "^4.11.9" brorand "^1.1.0" hash.js "^1.0.0" hmac-drbg "^1.0.1" inherits "^2.0.4" minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== emojis-list@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" engine.io-client@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.2.1.tgz#6f54c0475de487158a1a7c77d10178708b6add36" integrity sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "~3.1.0" engine.io-parser "~2.1.1" has-cors "1.1.0" indexof "0.0.1" parseqs "0.0.5" parseuri "0.0.5" ws "~3.3.1" xmlhttprequest-ssl "~1.5.4" yeast "0.1.2" engine.io-parser@~2.1.0, engine.io-parser@~2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6" integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA== dependencies: after "0.8.2" arraybuffer.slice "~0.0.7" base64-arraybuffer "0.1.5" blob "0.0.5" has-binary2 "~1.0.2" engine.io@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.2.1.tgz#b60281c35484a70ee0351ea0ebff83ec8c9522a2" integrity sha512-+VlKzHzMhaU+GsCIg4AoXF1UdDFjHHwMmMKqMJNDNLlUlejz58FCy4LBqB2YVJskHGYl06BatYWKP2TVdVXE5w== dependencies: accepts "~1.3.4" base64id "1.0.0" cookie "0.3.1" debug "~3.1.0" engine.io-parser "~2.1.0" ws "~3.3.1" enhanced-resolve@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" tapable "^1.0.0" enhanced-resolve@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== dependencies: graceful-fs "^4.1.2" memory-fs "^0.5.0" tapable "^1.0.0" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= entities@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-abstract@^1.5.1: version "1.16.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d" integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.0" is-callable "^1.1.4" is-regex "^1.0.4" object-inspect "^1.6.0" object-keys "^1.1.1" string.prototype.trimleft "^2.1.0" string.prototype.trimright "^2.1.0" es-to-primitive@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: version "0.10.53" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.3" next-tick "~1.0.0" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" ext "^1.1.2" escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" eslint-scope@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== dependencies: esrecurse "^4.1.0" estraverse "^4.1.1" esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esrecurse@^4.1.0: version "4.2.1" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== dependencies: estraverse "^4.1.0" estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@^4.1.0, estraverse@^4.1.1: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== eventsource@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" safe-buffer "^5.1.1" execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== dependencies: cross-spawn "^6.0.0" get-stream "^4.0.0" is-stream "^1.1.0" npm-run-path "^2.0.0" p-finally "^1.0.0" signal-exit "^3.0.0" strip-eof "^1.0.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== dependencies: accepts "~1.3.7" array-flatten "1.1.1" body-parser "1.19.0" content-disposition "0.5.3" content-type "~1.0.4" cookie "0.4.0" cookie-signature "1.0.6" debug "2.6.9" depd "~1.1.2" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" finalhandler "~1.1.2" fresh "0.5.2" merge-descriptors "1.0.1" methods "~1.1.2" on-finished "~2.3.0" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.5" qs "6.7.0" range-parser "~1.2.1" safe-buffer "5.1.2" send "0.17.1" serve-static "1.14.1" setprototypeof "1.1.1" statuses "~1.5.0" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" ext@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.2.0.tgz#8dd8d2dd21bcced3045be09621fa0cbf73908ba4" integrity sha512-0ccUQK/9e3NreLFg6K6np8aPyRgwycx+oFGtfx1dSp7Wj00Ozw9r05FgBRlzjf2XBM7LAzwgLyDscRrtSU91hA== dependencies: type "^2.0.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= dependencies: websocket-driver ">=0.5.1" faye-websocket@~0.11.1: version "0.11.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e" integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== dependencies: websocket-driver ">=0.5.1" figgy-pudding@^3.5.1: version "3.5.2" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== file-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== dependencies: loader-utils "^1.0.2" schema-utils "^1.0.0" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.1.2, finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-cache-dir@^2.0.0, find-cache-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" make-dir "^2.0.0" pkg-dir "^3.0.0" find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== dependencies: detect-file "^1.0.0" is-glob "^4.0.0" micromatch "^3.0.4" resolve-dir "^1.0.1" flatted@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08" integrity sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg== flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" readable-stream "^2.3.6" follow-redirects@^1.0.0: version "1.13.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.0.tgz#b42e8d93a2a7eea5ed88633676d6597bc8e384db" integrity sha512-aq6gF1BEKje4a9i9+5jimNFIpq4Q1WiwBToeRK5NvZBd/TRsmW8BsJfOEGkr76TbOyPVD3OVDN910EcUNtRYEA== for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= from2@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= dependencies: inherits "^2.0.1" readable-stream "^2.0.0" fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" universalify "^0.1.0" fs-minipass@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= dependencies: graceful-fs "^4.1.2" iferr "^0.1.5" imurmurhash "^0.1.4" readable-stream "1 || 2" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" fsevents@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-caller-file@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-stream@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" glob-parent@~5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.0.tgz#5f4c1d1e748d30cd73ad2944b3577a81b081e8c2" integrity sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw== dependencies: is-glob "^4.0.1" glob@^5.0.13, glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.3, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" global-modules@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== dependencies: global-prefix "^3.0.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" global-prefix@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== dependencies: ini "^1.3.5" kind-of "^6.0.2" which "^1.3.1" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== handle-thing@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== handlebars@^4.0.1: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== dependencies: minimist "^1.2.5" neo-async "^2.6.0" source-map "^0.6.1" wordwrap "^1.0.0" optionalDependencies: uglify-js "^3.1.4" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary2@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/has-binary2/-/has-binary2-1.0.3.tgz#7776ac627f3ea77250cfc332dab7ddf5e4f5d11d" integrity sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw== dependencies: isarray "2.0.1" has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0, has-symbols@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" minimalistic-assert "^1.0.1" he@1.2.x: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= dependencies: inherits "^2.0.1" obuf "^1.0.0" readable-stream "^2.0.1" wbuf "^1.1.0" html-entities@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= html-minifier@^3.2.3: version "3.5.21" resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== dependencies: camel-case "3.0.x" clean-css "4.2.x" commander "2.17.x" he "1.2.x" param-case "2.1.x" relateurl "0.2.x" uglify-js "3.4.x" html-webpack-plugin@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= dependencies: html-minifier "^3.2.3" loader-utils "^0.2.16" lodash "^4.17.3" pretty-error "^2.0.2" tapable "^1.0.0" toposort "^1.0.0" util.promisify "1.0.0" htmlparser2@^3.3.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== dependencies: domelementtype "^1.3.1" domhandler "^2.3.0" domutils "^1.5.1" entities "^1.1.1" inherits "^2.0.1" readable-stream "^3.1.1" http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= http-errors@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.0" statuses ">= 1.4.0 < 2" http-errors@~1.7.2: version "1.7.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== dependencies: depd "~1.1.2" inherits "2.0.4" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy-middleware@0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== dependencies: http-proxy "^1.17.0" is-glob "^4.0.0" lodash "^4.17.11" micromatch "^3.1.10" http-proxy@^1.13.0, http-proxy@^1.17.0: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" icss-replace-symbols@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= icss-utils@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== dependencies: postcss "^7.0.14" ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== iferr@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= ignore-walk@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== dependencies: minimatch "^3.0.4" image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= import-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= dependencies: import-from "^2.1.0" import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= dependencies: caller-path "^2.0.0" resolve-from "^3.0.0" import-from@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" integrity sha1-M1238qev/VOqpHHUuAId7ja387E= dependencies: resolve-from "^3.0.0" import-glob-loader@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/import-glob-loader/-/import-glob-loader-1.1.0.tgz#98d84c0f661c8ba9f821d9ddb7c6b6dc8e97eca2" integrity sha1-mNhMD2Yci6n4Idndt8a23I6X7KI= dependencies: glob "^5.0.13" loader-utils "^0.2.10" import-local@2.0.0, import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== dependencies: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= infer-owner@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.7" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== internal-ip@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== dependencies: default-gateway "^4.2.0" ipaddr.js "^1.9.0" interpret@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= ipaddr.js@1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== ipaddr.js@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== is-absolute-url@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arguments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.0.4.tgz#3faf966c7cba0ff437fb31f6250082fcf0448cf3" integrity sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA== is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-core-module@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== dependencies: has "^1.0.3" is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-directory@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-expression@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-4.0.0.tgz#c33155962abf21d0afd2552514d67d2ec16fd2ab" integrity sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A== dependencies: acorn "^7.1.1" object-assign "^4.1.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== is-path-in-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: is-path-inside "^2.1.0" is-path-inside@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== dependencies: path-is-inside "^1.0.2" is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-regex@^1.0.3, is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: has-symbols "^1.0.1" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is-wsl@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isarray@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.1.tgz#a37d94ed9cda2d59865c9f76fe596ee1f338741e" integrity sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-instrumenter-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-3.0.1.tgz#9957bd59252b373fae5c52b7b5188e6fde2a0949" integrity sha512-a5SPObZgS0jB/ixaKSMdn6n/gXSrK2S6q/UfRJBT3e6gQmVjwZROTODQsYW5ZNwOu78hG62Y3fWlebaVOL0C+w== dependencies: convert-source-map "^1.5.0" istanbul-lib-instrument "^1.7.3" loader-utils "^1.1.0" schema-utils "^0.3.0" istanbul-lib-coverage@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== istanbul-lib-instrument@^1.7.3: version "1.10.2" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" istanbul-lib-coverage "^1.2.1" semver "^5.3.0" istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" jasmine-core@^3.3, jasmine-core@^3.4.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-3.5.0.tgz#132c23e645af96d85c8bca13c8758b18429fc1e4" integrity sha512-nCeAiw37MIMA9w9IXso7bRaLl+c/ef3wnxsoSAlYrzS+Ot0zTG6nU8G/cIfGkqpkjX2wNaIW9RFG0TwIFnG6bA== jquery@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.5.0.tgz#9980b97d9e4194611c36530e7dc46a58d7340fc9" integrity sha512-Xb7SVYMvygPxbFMpTFQiHh1J7HClEaThguL15N/Gg37Lri/qKyhRGZYzHRyLH8Stq3Aow0LsHO2O2ci86fCrNQ== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-stringify@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@3.x, js-yaml@^3.13.1: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== json5@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== dependencies: minimist "^1.2.0" json5@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== dependencies: minimist "^1.2.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= optionalDependencies: graceful-fs "^4.1.6" jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" karma-chrome-launcher@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" integrity sha512-uf/ZVpAabDBPvdPdveyk1EPgbnloPvFFGgmRhYLTDH7gEB4nZdSBk8yTU47w1g/drLSx5uMOkjKk7IWKfWg/+w== dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coverage@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-1.1.2.tgz#cc09dceb589a83101aca5fe70c287645ef387689" integrity sha512-eQawj4Cl3z/CjxslYy9ariU4uDh7cCNFZHNWXWRpl0pNeblY/4wHR7M7boTYXWrn9bY0z2pZmr11eKje/S/hIw== dependencies: dateformat "^1.0.6" istanbul "^0.4.0" lodash "^4.17.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-2.0.1.tgz#26e3e31f2faf272dd80ebb0e1898914cc3a19763" integrity sha512-iuC0hmr9b+SNn1DaUD2QEYtUxkS1J+bSJSn7ejdEexs7P8EYvA1CWkEdrDQ+8jVH3AgWlCNwjYsT1chjcNW9lA== dependencies: jasmine-core "^3.3" karma-sourcemap-loader@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma-spec-reporter@^0.0.32: version "0.0.32" resolved "https://registry.yarnpkg.com/karma-spec-reporter/-/karma-spec-reporter-0.0.32.tgz#2e9c7207ea726771260259f82becb543209e440a" integrity sha1-LpxyB+pyZ3EmAln4K+y1QyCeRAo= dependencies: colors "^1.1.2" karma-webpack@^3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-3.0.5.tgz#1ff1e3a690fb73ae95ee95f9ab58f341cfc7b40f" integrity sha512-nRudGJWstvVuA6Tbju9tyGUfXTtI1UXMXoRHVmM2/78D0q6s/Ye2IC157PKNDC15PWFGR0mVIRtWLAdcfsRJoA== dependencies: async "^2.0.0" babel-runtime "^6.0.0" loader-utils "^1.0.0" lodash "^4.0.0" source-map "^0.5.6" webpack-dev-middleware "^2.0.6" karma@^4.1.0: version "4.4.1" resolved "https://registry.yarnpkg.com/karma/-/karma-4.4.1.tgz#6d9aaab037a31136dc074002620ee11e8c2e32ab" integrity sha512-L5SIaXEYqzrh6b1wqYC42tNsFMx2PWuxky84pK9coK09MvmL7mxii3G3bZBh/0rvD27lqDd0le9jyhzvwif73A== dependencies: bluebird "^3.3.0" body-parser "^1.16.1" braces "^3.0.2" chokidar "^3.0.0" colors "^1.1.0" connect "^3.6.0" di "^0.0.1" dom-serialize "^2.2.0" flatted "^2.0.0" glob "^7.1.1" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^4.17.14" log4js "^4.0.0" mime "^2.3.1" minimatch "^3.0.2" optimist "^0.6.1" qjobs "^1.1.4" range-parser "^1.2.0" rimraf "^2.6.0" safe-buffer "^5.0.1" socket.io "2.1.1" source-map "^0.6.1" tmp "0.0.33" useragent "2.3.0" killable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" less-loader@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-5.0.0.tgz#498dde3a6c6c4f887458ee9ed3f086a12ad1b466" integrity sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg== dependencies: clone "^2.1.1" loader-utils "^1.1.0" pify "^4.0.1" less@^3.9.0: version "3.10.3" resolved "https://registry.yarnpkg.com/less/-/less-3.10.3.tgz#417a0975d5eeecc52cff4bcfa3c09d35781e6792" integrity sha512-vz32vqfgmoxF1h3K4J+yKCtajH0PWmjkIFgbs5d78E/c/e+UQTnI+lWK+1eQRE95PXM2mC3rJlLSSP9VQHnaow== dependencies: clone "^2.1.2" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" mime "^1.4.1" mkdirp "^0.5.0" promise "^7.1.1" request "^2.83.0" source-map "~0.6.0" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" loader-runner@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== dependencies: big.js "^5.2.2" emojis-list "^2.0.0" json5 "^1.0.1" loader-utils@^0.2.10, loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= dependencies: big.js "^3.1.3" emojis-list "^2.0.0" json5 "^0.5.0" object-assign "^4.0.1" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lodash@^4.0.0, lodash@^4.17.0, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.3, lodash@^4.17.4: version "4.17.19" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== lodash@^4.17.19: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: chalk "^2.0.1" log4js@^4.0.0: version "4.5.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-4.5.1.tgz#e543625e97d9e6f3e6e7c9fc196dd6ab2cae30b5" integrity sha512-EEEgFcE9bLgaYUKuozyFfytQM2wDHtXn4tAN41pkaxpNjAykv11GVdeI4tHtmPWW4Xrgh9R/2d7XYghDVjbKKw== dependencies: date-format "^2.0.0" debug "^4.1.1" flatted "^2.0.0" rfdc "^1.1.4" streamroller "^1.0.6" loglevel@^1.6.4: version "1.6.6" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.6.tgz#0ee6300cc058db6b3551fa1c4bf73b83bb771312" integrity sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ== loglevelnext@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/loglevelnext/-/loglevelnext-1.0.5.tgz#36fc4f5996d6640f539ff203ba819641680d75a2" integrity sha512-V/73qkPuJmx4BcBF19xPBr+0ZRVBhc4POxvZTZdMeXpJ4NItXSJ/MSwuFT0kQJlCbXvdlZoQQ/418bS1y9Jh6A== dependencies: es6-symbol "^3.1.1" object.assign "^4.1.0" loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0, loud-rejection@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= lru-cache@4.1.x: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" semver "^5.6.0" mamacro@^0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== dependencies: p-defer "^1.0.0" map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= mem@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^2.0.0" p-is-promise "^2.0.0" memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= dependencies: errno "^0.1.3" readable-stream "^2.0.1" memory-fs@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== dependencies: errno "^0.1.3" readable-stream "^2.0.1" meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" brorand "^1.0.1" mime-db@1.42.0, "mime-db@>= 1.40.0 < 2": version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.1.0, mime@^2.3.1, mime@^2.4.4: version "2.4.4" resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== mimic-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== mini-css-extract-plugin@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== dependencies: loader-utils "^1.1.0" normalize-url "^2.0.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== dependencies: concat-stream "^1.5.0" duplexify "^3.4.2" end-of-stream "^1.1.0" flush-write-stream "^1.0.0" from2 "^2.1.0" parallel-transform "^1.1.0" pump "^3.0.0" pumpify "^1.3.3" stream-each "^1.1.0" through2 "^2.0.0" mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= dependencies: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multicast-dns-service-types@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= multicast-dns@^6.0.1: version "6.2.3" resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== dependencies: dns-packet "^1.3.1" thunky "^1.0.2" nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== next-tick@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== dependencies: lower-case "^1.1.1" node-forge@0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579" integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ== node-libs-browser@^2.2.0, node-libs-browser@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== dependencies: assert "^1.1.1" browserify-zlib "^0.2.0" buffer "^4.3.0" console-browserify "^1.1.0" constants-browserify "^1.0.0" crypto-browserify "^3.11.0" domain-browser "^1.1.1" events "^3.0.0" https-browserify "^1.0.0" os-browserify "^0.3.0" path-browserify "0.0.1" process "^0.11.10" punycode "^1.2.4" querystring-es3 "^0.2.0" readable-stream "^2.3.3" stream-browserify "^2.0.1" stream-http "^2.7.2" string_decoder "^1.0.0" timers-browserify "^2.0.4" tty-browserify "0.0.0" url "^0.11.0" util "^0.11.0" vm-browserify "^1.0.1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.71: version "1.1.72" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== nopt@3.x: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= normalize-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== dependencies: prepend-http "^2.0.0" query-string "^5.0.1" sort-keys "^2.0.0" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.6" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4" integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" nth-check@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== dependencies: boolbase "~1.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= null-loader@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-1.0.0.tgz#90e85798e50e9dd1d568495a44e74829dec26744" integrity sha512-mYLDjDVTkjTlFoidxRhzO75rdcwfVXfw5G5zpj8sXnBkHtKJxMk4hTcRR4i5SOhDB6EvcQuYriy6IV23eq6uog== dependencies: loader-utils "^1.2.3" schema-utils "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-inspect@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-is@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.0.1.tgz#0aa60ec9989a0b3ed795cf4d06f62cf1ad6539b6" integrity sha1-CqYOyZiaCz7Xlc9NBvYs8a1lObY= object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@1.x, once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== dependencies: is-wsl "^1.1.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.6" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" word-wrap "~1.2.3" original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== dependencies: url-parse "^1.4.3" os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^3.0.0, os-locale@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== dependencies: execa "^1.0.0" lcid "^2.0.0" mem "^4.0.0" os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== p-limit@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== dependencies: retry "^0.12.0" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~1.0.5: version "1.0.10" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== parallel-transform@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== dependencies: cyclist "^1.0.1" inherits "^2.0.3" readable-stream "^2.1.5" param-case@2.1.x: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= dependencies: no-case "^2.2.0" parse-asn1@^5.0.0: version "5.1.5" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-key@^2.0.0, path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= path-parse@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" pbkdf2@^3.0.3: version "3.0.17" resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" ripemd160 "^2.0.1" safe-buffer "^5.0.1" sha.js "^2.4.8" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= picomatch@^2.0.4: version "2.1.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.1.1.tgz#ecdfbea7704adb5fe6fb47f9866c4c0e15e905c5" integrity sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA== pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= pkg-dir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" portfinder@^1.0.25: version "1.0.25" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca" integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg== dependencies: async "^2.6.2" debug "^3.1.1" mkdirp "^0.5.1" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= postcss-load-config@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.0.tgz#c84d692b7bb7b41ddced94ee62e8ab31b417b003" integrity sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q== dependencies: cosmiconfig "^5.0.0" import-cwd "^2.0.0" postcss-loader@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== dependencies: loader-utils "^1.1.0" postcss "^7.0.0" postcss-load-config "^2.0.0" schema-utils "^1.0.0" postcss-modules-extract-imports@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== dependencies: postcss "^7.0.5" postcss-modules-local-by-default@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.6.tgz#dd9953f6dd476b5fd1ef2d8830c8929760b56e63" integrity sha512-oLUV5YNkeIBa0yQl7EYnxMgy4N6noxmiwZStaEJUSe2xPMcdNc8WmBQuQCx18H5psYbVxz8zoHk0RAAYZXP9gA== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-value-parser "^3.3.1" postcss-modules-scope@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.0.tgz#ad3f5bf7856114f6fcab901b0502e2a2bc39d4eb" integrity sha512-91Rjps0JnmtUB0cujlc8KIKCsJXWjzuxGeT/+Q2i2HXKZ7nBUeF9YQTZZTNvHVoNYj1AthsjnGLtqDUE0Op79A== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-modules-values@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-2.0.0.tgz#479b46dc0c5ca3dc7fa5270851836b9ec7152f64" integrity sha512-Ki7JZa7ff1N3EIMlPnGTZfUMe69FFwiQPnVSXC9mnn3jozCRBYIxiZd44yJOV2AmabOo4qFf8s0dC/+lweG7+w== dependencies: icss-replace-symbols "^1.1.0" postcss "^7.0.6" postcss-selector-parser@^6.0.0: version "6.0.2" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== dependencies: cssesc "^3.0.0" indexes-of "^1.0.1" uniq "^1.0.1" postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss-value-parser@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.23, postcss@^7.0.5, postcss@^7.0.6: version "7.0.36" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.36.tgz#056f8cffa939662a8f5905950c07d5285644dfcb" integrity sha512-BebJSIUMwJHRH0HAQoxN4u1CN86glsrwsW0q7T+/m44eXOUAxSNdHRkNZPYz5vVUbg17hFgOQDE7fZk7li3pZw== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= pretty-error@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= dependencies: renderkid "^2.0.1" utila "~0.4" private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= promise@^7.0.1, promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" proxy-addr@~2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.9.0" prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.24: version "1.4.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" create-hash "^1.1.0" parse-asn1 "^5.0.0" randombytes "^2.0.1" safe-buffer "^5.1.2" pug-attrs@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-3.0.0.tgz#b10451e0348165e31fad1cc23ebddd9dc7347c41" integrity sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA== dependencies: constantinople "^4.0.1" js-stringify "^1.0.2" pug-runtime "^3.0.0" pug-code-gen@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-3.0.2.tgz#ad190f4943133bf186b60b80de483100e132e2ce" integrity sha512-nJMhW16MbiGRiyR4miDTQMRWDgKplnHyeLvioEJYbk1RsPI3FuA3saEP8uwnTb2nTJEKBU90NFVWJBk4OU5qyg== dependencies: constantinople "^4.0.1" doctypes "^1.1.0" js-stringify "^1.0.2" pug-attrs "^3.0.0" pug-error "^2.0.0" pug-runtime "^3.0.0" void-elements "^3.1.0" with "^7.0.0" pug-error@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-2.0.0.tgz#5c62173cb09c34de2a2ce04f17b8adfec74d8ca5" integrity sha512-sjiUsi9M4RAGHktC1drQfCr5C5eriu24Lfbt4s+7SykztEOwVZtbFk1RRq0tzLxcMxMYTBR+zMQaG07J/btayQ== pug-filters@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-4.0.0.tgz#d3e49af5ba8472e9b7a66d980e707ce9d2cc9b5e" integrity sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A== dependencies: constantinople "^4.0.1" jstransformer "1.0.0" pug-error "^2.0.0" pug-walk "^2.0.0" resolve "^1.15.1" pug-lexer@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-5.0.1.tgz#ae44628c5bef9b190b665683b288ca9024b8b0d5" integrity sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w== dependencies: character-parser "^2.2.0" is-expression "^4.0.0" pug-error "^2.0.0" pug-linker@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-4.0.0.tgz#12cbc0594fc5a3e06b9fc59e6f93c146962a7708" integrity sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw== dependencies: pug-error "^2.0.0" pug-walk "^2.0.0" pug-load@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-3.0.0.tgz#9fd9cda52202b08adb11d25681fb9f34bd41b662" integrity sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ== dependencies: object-assign "^4.1.1" pug-walk "^2.0.0" pug-parser@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-6.0.0.tgz#a8fdc035863a95b2c1dc5ebf4ecf80b4e76a1260" integrity sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw== dependencies: pug-error "^2.0.0" token-stream "1.0.0" pug-runtime@^3.0.0, pug-runtime@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-3.0.1.tgz#f636976204723f35a8c5f6fad6acda2a191b83d7" integrity sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg== pug-strip-comments@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz#f94b07fd6b495523330f490a7f554b4ff876303e" integrity sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ== dependencies: pug-error "^2.0.0" pug-walk@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-2.0.0.tgz#417aabc29232bb4499b5b5069a2b2d2a24d5f5fe" integrity sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ== pug@^3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/pug/-/pug-3.0.2.tgz#f35c7107343454e43bc27ae0ff76c731b78ea535" integrity sha512-bp0I/hiK1D1vChHh6EfDxtndHji55XP/ZJKwsRqrz6lRia6ZC2OZbdAymlxdVFwd1L70ebrVJw4/eZ79skrIaw== dependencies: pug-code-gen "^3.0.2" pug-filters "^4.0.0" pug-lexer "^5.0.1" pug-linker "^4.0.0" pug-load "^3.0.0" pug-parser "^6.0.0" pug-runtime "^3.0.1" pug-strip-comments "^2.0.0" pump@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pumpify@^1.3.3: version "1.5.1" resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" inherits "^2.0.3" pump "^2.0.0" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qjobs@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== query-string@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== dependencies: decode-uri-component "^0.2.0" object-assign "^4.1.0" strict-uri-encode "^1.0.0" querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= querystringify@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" safe-buffer "^5.1.0" range-parser@^1.0.3, range-parser@^1.2.0, range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-loader@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-2.0.0.tgz#e2813d9e1e3f80d1bbade5ad082e809679e20c26" integrity sha512-kZnO5MoIyrojfrPWqrhFNLZemIAX8edMOCp++yC5RKxzFB3m92DqKNhKlU6+FvpOhWtvyh3jOaD7J6/9tpdIKg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" readdirp@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839" integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ== dependencies: picomatch "^2.0.4" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.3" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" integrity sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw== regenerator-transform@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== dependencies: private "^0.1.6" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexp.prototype.flags@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.2.0.tgz#6b30724e306a27833eeb171b66ac8890ba37e41c" integrity sha512-ztaw4M1VqgMwl9HlPpOuiYgItcHlunW0He2fE6eNfT6E/CF2FtYi9ofOYe4mKntstYk0Fyh/rDRBdS3AnxjlrA== dependencies: define-properties "^1.1.2" regexpu-core@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" regjsgen@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c" integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" relateurl@0.2.x: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= renderkid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== dependencies: css-select "^1.1.0" dom-converter "^0.2" htmlparser2 "^3.3.0" strip-ansi "^3.0.0" utila "^0.4.0" repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" request@^2.83.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= dependencies: resolve-from "^3.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" integrity sha1-six699nWiBvItuZTM17rywoYh0g= resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.10.0, resolve@^1.3.2, resolve@^1.8.1: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" resolve@^1.15.1: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== dependencies: is-core-module "^2.2.0" path-parse "^1.0.6" ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= rfdc@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.1.4.tgz#ba72cc1367a0ccd9cf81a870b3b58bd3ad07f8c2" integrity sha512-5C9HXdzK8EAqN7JDif30jqsBzavB7wLpaubisuQIGHWf2gUXSpzy6ArX/+Da8RjFpagWsCn+pIgxTMAmKw9Zug== rimraf@^2.5.4, rimraf@^2.6.0, rimraf@^2.6.1, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" inherits "^2.0.1" run-queue@^1.0.0, run-queue@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= dependencies: aproba "^1.1.1" safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" integrity sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8= dependencies: ajv "^5.0.0" schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== dependencies: ajv "^6.1.0" ajv-errors "^1.0.0" ajv-keywords "^3.1.0" select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.7: version "1.10.7" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b" integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA== dependencies: node-forge "0.9.0" "semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== dependencies: debug "2.6.9" depd "~1.1.2" destroy "~1.0.4" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" http-errors "~1.7.2" mime "1.6.0" ms "2.1.1" on-finished "~2.3.0" range-parser "~1.2.1" statuses "~1.5.0" serialize-javascript@^1.7.0: version "1.9.1" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.9.1.tgz#cfc200aef77b600c47da9bb8149c943e798c2fdb" integrity sha512-0Vb/54WJ6k5v8sSWN09S0ora+Hnr+cX40r9F170nT+mSkaxltoE/7R3OrIdBSUv1OoiobH1QoWQbCnAO+e8J1A== serve-index@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= dependencies: accepts "~1.3.4" batch "0.6.1" debug "2.6.9" escape-html "~1.0.3" http-errors "~1.6.2" mime-types "~2.1.17" parseurl "~1.3.2" serve-static@1.14.1: version "1.14.1" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.17.1" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setimmediate@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" socket.io-adapter@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz#2a805e8a14d6372124dd9159ad4502f8cb07f06b" integrity sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs= socket.io-client@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.1.1.tgz#dcb38103436ab4578ddb026638ae2f21b623671f" integrity sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ== dependencies: backo2 "1.0.2" base64-arraybuffer "0.1.5" component-bind "1.0.0" component-emitter "1.2.1" debug "~3.1.0" engine.io-client "~3.2.0" has-binary2 "~1.0.2" has-cors "1.1.0" indexof "0.0.1" object-component "0.0.3" parseqs "0.0.5" parseuri "0.0.5" socket.io-parser "~3.2.0" to-array "0.1.4" socket.io-parser@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.2.0.tgz#e7c6228b6aa1f814e6148aea325b51aa9499e077" integrity sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA== dependencies: component-emitter "1.2.1" debug "~3.1.0" isarray "2.0.1" socket.io@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.1.1.tgz#a069c5feabee3e6b214a75b40ce0652e1cfb9980" integrity sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA== dependencies: debug "~3.1.0" engine.io "~3.2.0" has-binary2 "~1.0.2" socket.io-adapter "~1.1.0" socket.io-client "2.1.1" socket.io-parser "~3.2.0" sockjs-client@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.4.0.tgz#c9f2568e19c8fd8173b4997ea3420e0bb306c7d5" integrity sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g== dependencies: debug "^3.2.5" eventsource "^1.0.7" faye-websocket "~0.11.1" inherits "^2.0.3" json3 "^3.3.2" url-parse "^1.4.3" sockjs@0.3.19: version "0.3.19" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== dependencies: faye-websocket "^0.10.0" uuid "^3.0.1" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= dependencies: is-plain-obj "^1.0.0" source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.12: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.6, source-map@^0.5.7: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== spdx-expression-parse@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.5" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" hpack.js "^2.1.6" obuf "^1.1.2" readable-stream "^3.0.6" wbuf "^1.7.3" spdy@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.1.tgz#6f12ed1c5db7ea4f24ebb8b89ba58c87c08257f2" integrity sha512-HeZS3PBdMA+sZSu0qwpCxl3DeALD5ASx8pAX0jZdKXSpPWbQ6SYGnlg3BBmYLx5LtiZrmkAZfErCm2oECBcioA== dependencies: debug "^4.1.0" handle-thing "^2.0.0" http-deceiver "^1.2.7" select-hose "^2.0.0" spdy-transport "^3.0.0" split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" ssri@^6.0.1: version "6.0.2" resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.2.tgz#157939134f20464e7301ddba3e90ffa8f7728ac5" integrity sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q== dependencies: figgy-pudding "^3.5.1" static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== dependencies: inherits "~2.0.1" readable-stream "^2.0.2" stream-each@^1.1.0: version "1.2.3" resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== dependencies: end-of-stream "^1.1.0" stream-shift "^1.0.0" stream-http@^2.7.2: version "2.8.3" resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= streamroller@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-1.0.6.tgz#8167d8496ed9f19f05ee4b158d9611321b8cacd9" integrity sha512-3QC47Mhv3/aZNFpDDVO44qQb9gwB9QggMEE0sQmkTAwBVYdBRWISdsywlkfm5II1Q5y/pmrHflti/IgmIzdDBg== dependencies: async "^2.6.2" date-format "^2.0.0" debug "^3.2.6" fs-extra "^7.0.1" lodash "^4.17.14" strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== dependencies: emoji-regex "^7.0.1" is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string.prototype.trimright@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= style-loader@^0.23.1: version "0.23.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925" integrity sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" supports-color@6.1.0, supports-color@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== dependencies: has-flag "^3.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" tapable@^1.0.0, tapable@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^4: version "4.4.19" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA== dependencies: chownr "^1.1.4" fs-minipass "^1.2.7" minipass "^2.9.0" minizlib "^1.3.3" mkdirp "^0.5.5" safe-buffer "^5.2.1" yallist "^3.1.1" terser-webpack-plugin@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.1.tgz#61b18e40eaee5be97e771cdbb10ed1280888c2b4" integrity sha512-ZXmmfiwtCLfz8WKZyYUuuHf3dMYEjg8NrjHMb0JqHVHVOSkzp3cW2/XG1fP3tRhqEqSzMwzzRQGtAPbs4Cncxg== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" serialize-javascript "^1.7.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" worker-farm "^1.7.0" terser@^4.1.2: version "4.4.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.4.0.tgz#22c46b4817cf4c9565434bfe6ad47336af259ac3" integrity sha512-oDG16n2WKm27JO8h4y/w3iqBGAOSCtq7k8dRmrn4Wf9NouL0b2WpMHGChFGZq4nFAQy1FsNJrVQHfurXOSTmOA== dependencies: commander "^2.20.0" source-map "~0.6.1" source-map-support "~0.5.12" through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" thunky@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== timers-browserify@^2.0.4: version "2.0.11" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== dependencies: setimmediate "^1.0.4" tmp@0.0.33, tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-1.0.0.tgz#cc200eab2613f4166d27ff9afc7ca56d49df6eb4" integrity sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ= toposort@^1.0.0: version "1.0.7" resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= uglify-js@3.4.x: version "3.4.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== dependencies: commander "~2.19.0" source-map "~0.6.1" uglify-js@^3.1.4: version "3.13.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.13.5.tgz#5d71d6dbba64cf441f32929b1efce7365bb4f113" integrity sha512-xtB8yEqIkn7zmOyS2zUNBsYCBRhDkvlNxMMY2smuJ/qA8NCHeQvKCF3i9Z4k8FJH4+PJvZRtMrPynfZ75+CSZw== ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^2.0.1" uniq@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== dependencies: unique-slug "^2.0.0" unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== dependencies: imurmurhash "^0.1.4" universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url-join@^2.0.2: version "2.0.5" resolved "https://registry.yarnpkg.com/url-join/-/url-join-2.0.5.tgz#5af22f18c052a000a48d7b82c5e9c2e2feeda728" integrity sha1-WvIvGMBSoACkjXuCxenC4v7tpyg= url-parse@^1.4.3: version "1.5.3" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== useragent@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== dependencies: inherits "2.0.3" utila@^0.4.0, utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== v8-compile-cache@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vm-browserify@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== void-elements@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= void-elements@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-3.1.0.tgz#614f7fbf8d801f0bb5f0661f5b2f5785750e4f09" integrity sha1-YU9/v42AHwu18GYfWy9XhXUOTwk= watchpack@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== dependencies: chokidar "^2.0.2" graceful-fs "^4.1.2" neo-async "^2.5.0" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" webpack-cli@^3.3.1: version "3.3.10" resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.10.tgz#17b279267e9b4fb549023fae170da8e6e766da13" integrity sha512-u1dgND9+MXaEt74sJR4PR7qkPxXUSQ0RXYq8x1L6Jg1MYVEmGPrH6Ah6C4arD4r0J1P5HKjRqpab36k0eIzPqg== dependencies: chalk "2.4.2" cross-spawn "6.0.5" enhanced-resolve "4.1.0" findup-sync "3.0.0" global-modules "2.0.0" import-local "2.0.0" interpret "1.2.0" loader-utils "1.2.3" supports-color "6.1.0" v8-compile-cache "2.0.3" yargs "13.2.4" webpack-dev-middleware@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-2.0.6.tgz#a51692801e8310844ef3e3790e1eacfe52326fd4" integrity sha512-tj5LLD9r4tDuRIDa5Mu9lnY2qBBehAITv6A9irqXhw/HQquZgTx3BCd57zYbU2gMDnncA49ufK2qVQSbaKJwOw== dependencies: loud-rejection "^1.6.0" memory-fs "~0.4.1" mime "^2.1.0" path-is-absolute "^1.0.0" range-parser "^1.0.3" url-join "^2.0.2" webpack-log "^1.0.1" webpack-dev-middleware@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3" integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw== dependencies: memory-fs "^0.4.1" mime "^2.4.4" mkdirp "^0.5.1" range-parser "^1.2.1" webpack-log "^2.0.0" webpack-dev-server@^3.3.1: version "3.9.0" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c" integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw== dependencies: ansi-html "0.0.7" bonjour "^3.5.0" chokidar "^2.1.8" compression "^1.7.4" connect-history-api-fallback "^1.6.0" debug "^4.1.1" del "^4.1.1" express "^4.17.1" html-entities "^1.2.1" http-proxy-middleware "0.19.1" import-local "^2.0.0" internal-ip "^4.3.0" ip "^1.1.5" is-absolute-url "^3.0.3" killable "^1.0.1" loglevel "^1.6.4" opn "^5.5.0" p-retry "^3.0.1" portfinder "^1.0.25" schema-utils "^1.0.0" selfsigned "^1.10.7" semver "^6.3.0" serve-index "^1.9.1" sockjs "0.3.19" sockjs-client "1.4.0" spdy "^4.0.1" strip-ansi "^3.0.1" supports-color "^6.1.0" url "^0.11.0" webpack-dev-middleware "^3.7.2" webpack-log "^2.0.0" ws "^6.2.1" yargs "12.0.5" webpack-fix-style-only-entries@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/webpack-fix-style-only-entries/-/webpack-fix-style-only-entries-0.2.2.tgz#60331c608b944ac821a3b6f2ae491a6d79ba40eb" integrity sha512-0wcrLCnISP8htV0NP1mT0e2mHhfjGQdNk82s8BTLVvF7rXuoJuUUzP3aCUXnRqlLgmTBx5WgqPhnczjatl+iSQ== webpack-log@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-1.2.0.tgz#a4b34cda6b22b518dbb0ab32e567962d5c72a43d" integrity sha512-U9AnICnu50HXtiqiDxuli5gLB5PGBo7VvcHx36jRZHwK4vzOYLbImqT4lwWwoMHdQWwEKw736fCHEekokTEKHA== dependencies: chalk "^2.1.0" log-symbols "^2.1.0" loglevelnext "^1.0.1" uuid "^3.1.0" webpack-log@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== dependencies: ansi-colors "^3.0.0" uuid "^3.3.2" webpack-shell-plugin@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/webpack-shell-plugin/-/webpack-shell-plugin-0.5.0.tgz#29b8a1d80ddeae0ddb10e729667f728653c2c742" integrity sha1-Kbih2A3erg3bEOcpZn9yhlPCx0I= webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack@^4.30.0: version "4.41.2" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e" integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/wasm-edit" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" acorn "^6.2.1" ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" enhanced-resolve "^4.1.0" eslint-scope "^4.0.3" json-parse-better-errors "^1.0.2" loader-runner "^2.4.0" loader-utils "^1.2.3" memory-fs "^0.4.1" micromatch "^3.1.10" mkdirp "^0.5.1" neo-async "^2.6.1" node-libs-browser "^2.2.1" schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.1" watchpack "^1.6.0" webpack-sources "^1.4.1" websocket-driver@>=0.5.1: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.4" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" with@^7.0.0: version "7.0.2" resolved "https://registry.yarnpkg.com/with/-/with-7.0.2.tgz#ccee3ad542d25538a7a7a80aad212b9828495bac" integrity sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w== dependencies: "@babel/parser" "^7.9.6" "@babel/types" "^7.9.6" assert-never "^1.2.1" babel-walk "3.0.0-canary-5" word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= worker-farm@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== dependencies: ansi-styles "^3.2.0" string-width "^3.0.0" strip-ansi "^5.0.0" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== dependencies: async-limiter "~1.0.0" ws@~3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== dependencies: async-limiter "~1.0.0" safe-buffer "~5.1.0" ultron "~1.1.0" xmlhttprequest-ssl@~1.5.4: version "1.5.5" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e" integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4= xtend@^4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== "y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2, yallist@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs-parser@^13.1.0: version "13.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== dependencies: cliui "^4.0.0" decamelize "^1.2.0" find-up "^3.0.0" get-caller-file "^1.0.1" os-locale "^3.0.0" require-directory "^2.1.1" require-main-filename "^1.0.1" set-blocking "^2.0.0" string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" yargs@13.2.4: version "13.2.4" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== dependencies: cliui "^5.0.0" find-up "^3.0.0" get-caller-file "^2.0.1" os-locale "^3.1.0" require-directory "^2.1.1" require-main-filename "^2.0.0" set-blocking "^2.0.0" string-width "^3.0.0" which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^13.1.0" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk= buildbot-3.4.0/www/console_view/000077500000000000000000000000001413250514000166355ustar00rootroot00000000000000buildbot-3.4.0/www/console_view/buildbot_console_view/000077500000000000000000000000001413250514000232155ustar00rootroot00000000000000buildbot-3.4.0/www/console_view/buildbot_console_view/__init__.py000066400000000000000000000015341413250514000253310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.www.plugin import Application # create the interface for the setuptools entry point ep = Application(__name__, "Buildbot Console View UI") buildbot-3.4.0/www/console_view/karma.conf.js000066400000000000000000000003641413250514000212150ustar00rootroot00000000000000const common = require('buildbot-build-common'); module.exports = function karmaConfig (config) { common.createTemplateKarmaConfig(config, { testRoot: 'src/tests.webpack.js', webpack: require('./webpack.config') }); }; buildbot-3.4.0/www/console_view/package.json000066400000000000000000000020071413250514000211220ustar00rootroot00000000000000{ "name": "buildbot-console-view", "plugin_name": "console_view", "private": true, "main": "buildbot_console_view/static/scripts.js", "style": "buildbot_console_view/static/styles.js", "scripts": { "build": "rimraf buildbot_console_view/static && webpack --bail --progress --profile --env prod", "build-dev": "rimraf buildbot_console_view/static && webpack --bail --progress --profile --env dev", "dev": "webpack --bail --progress --profile --watch --env dev", "test": "karma start", "test-watch": "karma start --auto-watch --no-single-run" }, "devDependencies": { "angular-mocks": "^1.7.9", "buildbot-build-common": "link:../build_common", "lodash": "^4.17.11", "rimraf": "^2.6.3" }, "dependencies": { "@uirouter/angularjs": "^1.0.15", "angular": "^1.7.9", "angular-animate": "^1.7.9", "buildbot-data-js": "link:../data_module", "guanlecoja-ui": "link:../guanlecoja-ui" } } buildbot-3.4.0/www/console_view/postcss.config.js000066400000000000000000000001711413250514000221340ustar00rootroot00000000000000module.exports = { plugins: { autoprefixer: { browsers: ['last 2 versions'] }, }, }; buildbot-3.4.0/www/console_view/setup.cfg000066400000000000000000000000001413250514000204440ustar00rootroot00000000000000buildbot-3.4.0/www/console_view/setup.py000077500000000000000000000030101413250514000203440ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from buildbot_pkg import setup_www_plugin except ImportError: import sys print('Please install buildbot_pkg module in order to install that ' 'package, or use the pre-build .whl modules available on pypi', file=sys.stderr) sys.exit(1) setup_www_plugin( name='buildbot-console-view', description='Buildbot Console View plugin', author=u'Pierre Tardy', author_email=u'tardyp@gmail.com', url='http://buildbot.net/', packages=['buildbot_console_view'], package_data={ '': [ 'VERSION', 'static/*' ] }, entry_points=""" [buildbot.www] console_view = buildbot_console_view:ep """, classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)' ], ) buildbot-3.4.0/www/console_view/src/000077500000000000000000000000001413250514000174245ustar00rootroot00000000000000buildbot-3.4.0/www/console_view/src/module/000077500000000000000000000000001413250514000207115ustar00rootroot00000000000000buildbot-3.4.0/www/console_view/src/module/builders.fixture.json000066400000000000000000000124201413250514000251010ustar00rootroot00000000000000{"builders":[{"builderid":1,"description":null,"masterids":[1],"name":"buildbot-job","tags":["job","buildbot"]},{"builderid":2,"description":null,"masterids":[1],"name":"buildbot","tags":["buildbot","trunk"]},{"builderid":3,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:pylint TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TESTS:pylint","TWISTED:latest","python:2.7"]},{"builderid":4,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:flake8 TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","python:2.7","TESTS:flake8"]},{"builderid":5,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:isort TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","python:2.7","TESTS:isort"]},{"builderid":6,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:docs TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","python:2.7","TESTS:docs"]},{"builderid":7,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:coverage TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","python:2.7","TESTS:coverage"]},{"builderid":8,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:js TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","python:2.7","TESTS:js"]},{"builderid":9,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:smokes TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","python:2.7","TESTS:smokes"]},{"builderid":10,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial TWISTED:14.0.2 python:2.7","tags":["buildbot","SQLALCHEMY:latest","python:2.7","TESTS:trial","TWISTED:14.0.2"]},{"builderid":11,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial TWISTED:15.4.0 python:2.7","tags":["buildbot","SQLALCHEMY:latest","python:2.7","TESTS:trial","TWISTED:15.4.0"]},{"builderid":12,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","python:2.7","TESTS:trial"]},{"builderid":13,"description":null,"masterids":[],"name":"buildbot DB_TYPE:sqlite SQLALCHEMY:latest TESTS:trial TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","python:2.7","TESTS:trial","DB_TYPE:sqlite"]},{"builderid":14,"description":null,"masterids":[],"name":"buildbot DB_TYPE:mysql SQLALCHEMY:latest TESTS:trial TWISTED:latest python:2.7","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","python:2.7","TESTS:trial","DB_TYPE:mysql"]},{"builderid":15,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:0.8.0 TESTS:trial TWISTED:15.5.0 python:2.7","tags":["buildbot","python:2.7","TESTS:trial","SQLALCHEMY:0.8.0","TWISTED:15.5.0"]},{"builderid":16,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial TWISTED:15.5.0 python:2.7","tags":["buildbot","SQLALCHEMY:latest","python:2.7","TESTS:trial","TWISTED:15.5.0"]},{"builderid":17,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial_worker TWISTED:10.2.0 python:2.7","tags":["buildbot","SQLALCHEMY:latest","python:2.7","TESTS:trial_worker","TWISTED:10.2.0"]},{"builderid":18,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial_worker TWISTED:11.1.0 python:2.7","tags":["buildbot","SQLALCHEMY:latest","python:2.7","TESTS:trial_worker","TWISTED:11.1.0"]},{"builderid":19,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial_worker TWISTED:12.2.0 python:2.7","tags":["buildbot","SQLALCHEMY:latest","python:2.7","TESTS:trial_worker","TWISTED:12.2.0"]},{"builderid":20,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial_worker TWISTED:13.2.0 python:2.7","tags":["buildbot","SQLALCHEMY:latest","python:2.7","TESTS:trial_worker","TWISTED:13.2.0"]},{"builderid":21,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial_worker TWISTED:14.0.2 python:2.6","tags":["buildbot","SQLALCHEMY:latest","TWISTED:14.0.2","TESTS:trial_worker","python:2.6"]},{"builderid":22,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial_worker TWISTED:15.4.0 python:2.6","tags":["buildbot","SQLALCHEMY:latest","TWISTED:15.4.0","TESTS:trial_worker","python:2.6"]},{"builderid":23,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:coverage TWISTED:trunk python:3.5","tags":["buildbot","SQLALCHEMY:latest","TESTS:coverage","TWISTED:trunk","python:3.5"]},{"builderid":24,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:flake8 TWISTED:trunk python:3.5","tags":["buildbot","SQLALCHEMY:latest","TESTS:flake8","TWISTED:trunk","python:3.5"]},{"builderid":25,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:smokes TWISTED:latest python:3.5","tags":["buildbot","SQLALCHEMY:latest","TWISTED:latest","TESTS:smokes","python:3.5"]},{"builderid":26,"description":null,"masterids":[],"name":"buildbot SQLALCHEMY:latest TESTS:trial TWISTED:trunk python:3.6","tags":["buildbot","SQLALCHEMY:latest","TESTS:trial","TWISTED:trunk","python:3.6"]}],"meta":{"total":26}} buildbot-3.4.0/www/console_view/src/module/console.tpl.jade000066400000000000000000000050541413250514000240020ustar00rootroot00000000000000.console .load-indicator(ng-hide='c.builds.$resolved && c.changes.$resolved && c.buildrequests.$resolved && c.buildsets.$resolved') .spinner i.fa.fa-circle-o-notch.fa-spin.fa-2x p loading div(ng-show="c.changes.$resolved && c.filtered_changes.length==0") p No changes. Console view needs changesource to be setup, and a(href="#changes") changes | to be in the system. table.table.table-striped.table-bordered(ng-hide="c.filtered_changes.length==0" ng-class="{'table-fixedwidth': c.isBigTable()}") tr.first-row th.row-header(ng-style="{'width': c.getRowHeaderWidth()}") i.fa.fa-plus-circle.pull-left(ng-click='c.openAll()' uib-tooltip='Open information for all changes' uib-tooltip-placement='right') i.fa.fa-minus-circle.pull-left(ng-click='c.closeAll()' uib-tooltip='Close information for all changes' uib-tooltip-placement='right') th.column(ng-repeat="builder in c.builders") span.builder(ng-style="{'margin-top': c.getColHeaderHeight()}") a(ng-href='#/builders/{{ builder.builderid }}' ng-bind='builder.name') tr.tag_row(ng-repeat="tag_line in c.tag_lines") td.row-header td(ng-repeat="tag in tag_line" colspan="{{tag.colspan}}") span(uib-tooltip='{{ tag.tag }}' ng-style='{width: tag.colspan*50}') {{tag.tag}} tr(ng-repeat="change in c.filtered_changes | orderBy: ['-when_timestamp'] track by change.changeid") td changedetails(change="change") td.column(ng-repeat="builder in change.builders" title="{{builder.name}}") span(ng-repeat="build in builder.builds | orderBy: ['number']") script(type="text/ng-template" id="buildsummarytooltip") buildsummary(buildid="build.buildid" type="tooltip") span.badge-status(ng-if='build.buildid' uib-tooltip-template="'buildsummarytooltip'" tooltip-class="buildsummarytooltipstyle" tooltip-placement="auto left-bottom" tooltip-popup-delay="400" tooltip-popup-close-delay="400" ng-class="c.results2class(build, 'pulse')" ng-click='c.selectBuild(build)') | {{ build.number }} buildbot-3.4.0/www/console_view/src/module/main.module.js000066400000000000000000000433751413250514000234730ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ import 'angular-animate'; import '@uirouter/angularjs'; import 'guanlecoja-ui'; import 'buildbot-data-js'; class ConsoleState { constructor($stateProvider, glMenuServiceProvider, bbSettingsServiceProvider) { // Name of the state const name = 'console'; // Menu configuration glMenuServiceProvider.addGroup({ name, caption: 'Console View', icon: 'exclamation-circle', order: 5 }); // Configuration const cfg = { group: name, caption: 'Console View' }; // Register new state const state = { controller: `${name}Controller`, controllerAs: "c", template: require('./console.tpl.jade'), name, url: `/${name}`, data: cfg }; $stateProvider.state(state); bbSettingsServiceProvider.addSettingsGroup({ name: 'Console', caption: 'Console related settings', items: [{ type: 'integer', name: 'buildLimit', caption: 'Number of builds to fetch', default_value: 200 } , { type: 'integer', name: 'changeLimit', caption: 'Number of changes to fetch', default_value: 30 } ]}); } } class Console { constructor($scope, $q, $window, dataService, bbSettingsService, resultsService, $uibModal, $timeout) { this.onChange = this.onChange.bind(this); this._onChange = this._onChange.bind(this); this.matchBuildWithChange = this.matchBuildWithChange.bind(this); this.makeFakeChange = this.makeFakeChange.bind(this); this.$scope = $scope; this.$window = $window; this.$uibModal = $uibModal; this.$timeout = $timeout; angular.extend(this, resultsService); const settings = bbSettingsService.getSettingsGroup('Console'); this.buildLimit = settings.buildLimit.value; this.changeLimit = settings.changeLimit.value; this.dataAccessor = dataService.open().closeOnDestroy(this.$scope); this._infoIsExpanded = {}; this.$scope.all_builders = (this.all_builders = this.dataAccessor.getBuilders()); this.$scope.builders = (this.builders = []); if (typeof Intl !== 'undefined' && Intl !== null) { const collator = new Intl.Collator(undefined, {numeric: true, sensitivity: 'base'}); this.strcompare = collator.compare; } else { this.strcompare = function(a, b) { if (a < b) { return -1; } if (a === b) { return 0; } return 1; }; } this.$scope.builds = (this.builds = this.dataAccessor.getBuilds({ property: ["got_revision"], limit: this.buildLimit, order: '-started_at' })); this.changes = this.dataAccessor.getChanges({limit: this.changeLimit, order: '-changeid'}); this.buildrequests = this.dataAccessor.getBuildrequests({limit: this.buildLimit, order: '-submitted_at'}); this.buildsets = this.dataAccessor.getBuildsets({limit: this.buildLimit, order: '-submitted_at'}); this.builds.onChange = this.onChange; this.changes.onChange = this.onChange; this.buildrequests.onChange = this.onChange; this.buildsets.onChange = this.onChange; } onChange(s) { // if there is no data, no need to try and build something. if ((this.builds.length === 0) || (this.all_builders.length === 0) || !this.changes.$resolved || (this.buildsets.length === 0) || (this.buildrequests === 0)) { return; } if ((this.onchange_debounce == null)) { this.onchange_debounce = this.$timeout(this._onChange, 100); } } _onChange() { let build, change; this.onchange_debounce = undefined; // we only display builders who actually have builds for (build of Array.from(this.builds)) { this.all_builders.get(build.builderid).hasBuild = true; } this.sortBuildersByTags(this.all_builders); if (this.changesBySSID == null) { this.changesBySSID = {}; } if (this.changesByRevision == null) { this.changesByRevision = {}; } for (change of Array.from(this.changes)) { this.changesBySSID[change.sourcestamp.ssid] = change; this.changesByRevision[change.revision] = change; this.populateChange(change); } for (build of Array.from(this.builds)) { this.matchBuildWithChange(build); } this.filtered_changes = []; for (let ssid in this.changesBySSID) { change = this.changesBySSID[ssid]; if (change.comments) { change.subject = change.comments.split("\n")[0]; } for (let builder of Array.from(change.builders)) { if (builder.builds.length > 0) { this.filtered_changes.push(change); break; } } } } /* * Sort builders by tags * Buildbot eight has the category option, but it was only limited to one category per builder, * which make it easy to sort by category * Here, we have multiple tags per builder, we need to try to group builders with same tags together * The algorithm is rather twisted. It is a first try at the concept of grouping builders by tags.. */ sortBuildersByTags(all_builders) { // first we only want builders with builds let tag; const builders_with_builds = []; let builderids_with_builds = ""; for (let builder of Array.from(all_builders)) { if (builder.hasBuild) { builders_with_builds.push(builder); builderids_with_builds += `.${builder.builderid}`; } } if (builderids_with_builds === this.last_builderids_with_builds) { // don't recalculate if it hasn't changed! return; } // we call recursive function, which finds non-overlapping groups let tag_line = this._sortBuildersByTags(builders_with_builds); // we get a tree of builders grouped by tags // we now need to flatten the tree, in order to build several lines of tags // (each line is representing a depth in the tag tree) // we walk the tree left to right and build the list of builders in the tree order, and the tag_lines // in the tree, there are groups of remaining builders, which could not be grouped together, // those have the empty tag '' const tag_lines = []; let sorted_builders = []; const set_tag_line = function(depth, tag, colspan) { // we build the tag lines by using a sparse array let _tag_line = tag_lines[depth]; if ((_tag_line == null)) { // initialize the sparse array _tag_line = (tag_lines[depth] = []); } else { // if we were already initialized, look at the last tag if this is the same // we merge the two entries const last_tag = _tag_line[_tag_line.length - 1]; if (last_tag.tag === tag) { last_tag.colspan += colspan; return; } } return _tag_line.push({tag, colspan}); }; const self = this; // recursive tree walking var walk_tree = function(tag, depth) { set_tag_line(depth, tag.tag, tag.builders.length); if ((tag.tag_line == null) || (tag.tag_line.length === 0)) { // this is the leaf of the tree, sort by buildername, and add them to the // list of sorted builders tag.builders.sort((a, b) => self.strcompare(a.name, b.name)); sorted_builders = sorted_builders.concat(tag.builders); for (let i = 1; i <= 100; i++) { // set the remaining depth of the tree to the same colspan // (we hardcode the maximum depth for now :/ ) set_tag_line(depth + i, '', tag.builders.length); } return; } return Array.from(tag.tag_line).map((_tag) => walk_tree(_tag, depth + 1)); }; for (tag of Array.from(tag_line)) { walk_tree(tag, 0); } this.builders = sorted_builders; this.tag_lines = []; // make a new array to avoid it to be sparse, and to remove lines filled with null tags for (tag_line of Array.from(tag_lines)) { if (!((tag_line.length === 1) && (tag_line[0].tag === ""))) { this.tag_lines.push(tag_line); } } return this.last_builderids_with_builds = builderids_with_builds; } /* * recursive function which sorts the builders by tags * call recursively with groups of builders smaller and smaller */ _sortBuildersByTags(all_builders) { // first find out how many builders there is by tags in that group let builder, builders, tag; const builders_by_tags = {}; for (builder of Array.from(all_builders)) { if (builder.tags != null) { for (tag of Array.from(builder.tags)) { if ((builders_by_tags[tag] == null)) { builders_by_tags[tag] = []; } builders_by_tags[tag].push(builder); } } } const tags = []; for (tag in builders_by_tags) { // we don't want the tags that are on all the builders builders = builders_by_tags[tag]; if (builders.length < all_builders.length) { tags.push({tag, builders}); } } // sort the tags to first look at tags with the larger number of builders // @FIXME maybe this is not the best method to find the best groups tags.sort((a, b) => b.builders.length - a.builders.length); const tag_line = []; const chosen_builderids = {}; // pick the tags one by one, by making sure we make non-overalaping groups for (tag of Array.from(tags)) { let excluded = false; for (builder of Array.from(tag.builders)) { if (chosen_builderids.hasOwnProperty(builder.builderid)) { excluded = true; break; } } if (!excluded) { for (builder of Array.from(tag.builders)) { chosen_builderids[builder.builderid] = tag.tag; } tag_line.push(tag); } } // some builders do not have tags, we put them in another group const remaining_builders = []; for (builder of Array.from(all_builders)) { if (!chosen_builderids.hasOwnProperty(builder.builderid)) { remaining_builders.push(builder); } } if (remaining_builders.length) { tag_line.push({tag: "", builders: remaining_builders}); } // if there is more than one tag in this line, we need to recurse if (tag_line.length > 1) { for (tag of Array.from(tag_line)) { tag.tag_line = this._sortBuildersByTags(tag.builders); } } return tag_line; } /* * fill a change with a list of builders */ populateChange(change) { change.builders = []; change.buildersById = {}; for (let builder of Array.from(this.builders)) { builder = {builderid: builder.builderid, name: builder.name, builds: []}; change.builders.push(builder); change.buildersById[builder.builderid] = builder; } } /* * Match builds with a change */ matchBuildWithChange(build) { let change, revision; const buildrequest = this.buildrequests.get(build.buildrequestid); if ((buildrequest == null)) { return; } const buildset = this.buildsets.get(buildrequest.buildsetid); if ((buildset == null)) { return; } if ((buildset != null) && (buildset.sourcestamps != null)) { for (let sourcestamp of Array.from(buildset.sourcestamps)) { change = this.changesBySSID[sourcestamp.ssid]; if (change != null) { break; } } } if ((change == null) && ((build.properties != null ? build.properties.got_revision : undefined) != null)) { const rev = build.properties.got_revision[0]; // got_revision can be per codebase or just the revision string if (typeof(rev) === "string") { change = this.changesByRevision[rev]; if ((change == null)) { change = this.makeFakeChange("", rev, build.started_at); } } else { let codebase; for (codebase in rev) { revision = rev[codebase]; change = this.changesByRevision[revision]; if (change != null) { break; } } if ((change == null)) { revision = rev === {} ? "" : rev[rev.keys()[0]]; change = this.makeFakeChange(codebase, revision, build.started_at); } } } if ((change == null)) { revision = `unknown revision ${build.builderid}-${build.buildid}`; change = this.makeFakeChange("unknown codebase", revision, build.started_at); } return change.buildersById[build.builderid].builds.push(build); } makeFakeChange(codebase, revision, when_timestamp) { let change = this.changesBySSID[revision]; if ((change == null)) { change = { codebase, revision, changeid: revision, when_timestamp, author: `unknown author for ${revision}`, comments: revision + "\n\nFake comment for revision: No change for this revision, please setup a changesource in Buildbot" }; this.changesBySSID[revision] = change; this.populateChange(change); } return change; } /* * Open all change row information */ openAll() { return Array.from(this.filtered_changes).map((change) => (change.show_details = true)); } /* * Close all change row information */ closeAll() { return Array.from(this.filtered_changes).map((change) => (change.show_details = false)); } /* * Calculate row header (aka first column) width * depending if we display commit comment, we reserve more space */ getRowHeaderWidth() { if (this.hasExpanded()) { return 400; // magic value enough to hold 78 characters lines } else { return 200; } } /* * Calculate col header (aka first row) height * It depends on the length of the longest builder */ getColHeaderHeight() { let max_buildername = 0; for (let builder of Array.from(this.builders)) { max_buildername = Math.max(builder.name.length, max_buildername); } return Math.max(100, max_buildername * 3); } /* * * Determine if we use a 100% width table or if we allow horizontal scrollbar * depending on number of builders, and size of window, we need a fixed column size or a 100% width table * */ isBigTable() { const padding = this.getRowHeaderWidth(); if (((this.$window.innerWidth - padding) / this.builders.length) < 40) { return true; } return false; } /* * * do we have at least one change expanded? * */ hasExpanded() { for (let change of Array.from(this.changes)) { if (this.infoIsExpanded(change)) { return true; } } return false; } /* * * display build details * */ selectBuild(build) { let modal; return modal = this.$uibModal.open({ template: require('./view/modal/modal.tpl.jade'), controller: 'consoleModalController as modal', windowClass: 'modal-big', resolve: { selectedBuild() { return build; } } }); } /* * * toggle display of additional info for that change * */ toggleInfo(change) { return change.show_details = !change.show_details; } infoIsExpanded(change) { return change.show_details; } } angular.module('console_view', [ 'ui.router', 'ui.bootstrap', 'ngAnimate', 'guanlecoja.ui', 'bbData']) .config(['$stateProvider', 'glMenuServiceProvider', 'bbSettingsServiceProvider', ConsoleState]) .controller('consoleController', ['$scope', '$q', '$window', 'dataService', 'bbSettingsService', 'resultsService', '$uibModal', '$timeout', Console]); require('./view/modal/modal.controller.js'); buildbot-3.4.0/www/console_view/src/module/main.module.spec.js000066400000000000000000000145221413250514000244140ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ beforeEach(function() { angular.mock.module(function($provide) { $provide.service('$uibModal', function() { return {open() {}}; }); }); angular.mock.module(function($provide) { $provide.service('resultsService', function() { return {results2class() {}}; }); }); // Mock bbSettingsProvider angular.mock.module(function($provide) { $provide.provider('bbSettingsService', (function() { let group = undefined; const Cls = class { static initClass() { group = {}; } addSettingsGroup(g) { return g.items.map(function(i) { if (i.name === 'lazy_limit_waterfall') { i.default_value = 2; } return group[i.name] = {value: i.default_value}; }); } $get() { return { getSettingsGroup() { return group; }, save() {} }; } }; Cls.initClass(); return Cls; })() ); }); angular.mock.module('console_view'); }); describe('Console view', function() { let $state = null; beforeEach(inject($injector => $state = $injector.get('$state')) ); it('should register a new state with the correct configuration', function() { const name = 'console'; const state = $state.get().pop(); const { data } = state; expect(state.controller).toBe(`${name}Controller`); expect(state.controllerAs).toBe('c'); expect(state.url).toBe(`/${name}`); }); }); describe('Console view controller', function() { // Test data let $rootScope, $timeout, $window, dataService, scope; let builders = [{ builderid: 1, masterids: [1] } , { builderid: 2, masterids: [1] } , { builderid: 3, masterids: [1] } , { builderid: 4, masterids: [1] } ]; const builds1 = [{ buildid: 1, builderid: 1, buildrequestid: 1 } , { buildid: 2, builderid: 2, buildrequestid: 1 } , { buildid: 3, builderid: 4, buildrequestid: 2 } , { buildid: 4, builderid: 3, buildrequestid: 2 } ]; const builds2 = [{ buildid: 5, builderid: 2, buildrequestid: 3 } ]; const builds = builds1.concat(builds2); const buildrequests = [{ builderid: 1, buildrequestid: 1, buildsetid: 1 } , { builderid: 1, buildrequestid: 2, buildsetid: 1 } , { builderid: 1, buildrequestid: 3, buildsetid: 2 } ]; const buildsets = [{ bsid: 1, sourcestamps: [ {ssid: 1} ] } , { bsid: 2, sourcestamps: [ {ssid: 2} ] } ]; const changes = [{ changeid: 1, sourcestamp: { ssid: 1 } } ]; let createController = (scope = ($rootScope = (dataService = ($window = ($timeout = null))))); const injected = function($injector) { const $q = $injector.get('$q'); $rootScope = $injector.get('$rootScope'); $window = $injector.get('$window'); $timeout = $injector.get('$timeout'); dataService = $injector.get('dataService'); scope = $rootScope.$new(); dataService.when('builds', builds); dataService.when('builders', builders); dataService.when('changes', changes); dataService.when('buildrequests', buildrequests); dataService.when('buildsets', buildsets); // Create new controller using controller as syntax const $controller = $injector.get('$controller'); createController = () => $controller('consoleController as c', { // Inject controller dependencies $q, $window, $scope: scope } ) ; }; beforeEach(inject(injected)); it('should be defined', function() { createController(); expect(scope.c).toBeDefined(); }); it('should bind the builds, builders, changes, buildrequests and buildsets to scope', function() { createController(); $rootScope.$digest(); $timeout.flush(); expect(scope.c.builds).toBeDefined(); expect(scope.c.builds.length).toBe(builds.length); expect(scope.c.all_builders).toBeDefined(); expect(scope.c.all_builders.length).toBe(builders.length); expect(scope.c.changes).toBeDefined(); expect(scope.c.changes.length).toBe(changes.length); expect(scope.c.buildrequests).toBeDefined(); expect(scope.c.buildrequests.length).toBe(buildrequests.length); expect(scope.c.buildsets).toBeDefined(); expect(scope.c.buildsets.length).toBe(buildsets.length); }); it('should match the builds with the change', function() { createController(); $timeout.flush(); $rootScope.$digest(); $timeout.flush(); expect(scope.c.changes[0]).toBeDefined(); expect(scope.c.changes[0].builders).toBeDefined(); ({ builders } = scope.c.changes[0]); expect(builders[0].builds[0].buildid).toBe(1); expect(builders[1].builds[0].buildid).toBe(2); expect(builders[2].builds[0].buildid).toBe(4); expect(builders[3].builds[0].buildid).toBe(3); }); xit('should match sort the builders by tag groups', function() { createController(); const _builders = FIXTURES['builders.fixture.json'].builders; for (let builder of Array.from(_builders)) { builder.hasBuild = true; } scope.c.sortBuildersByTags(_builders); expect(_builders.length).toBe(scope.c.builders.length); expect(scope.c.tag_lines.length).toEqual(5); }); }); buildbot-3.4.0/www/console_view/src/module/view/000077500000000000000000000000001413250514000216635ustar00rootroot00000000000000buildbot-3.4.0/www/console_view/src/module/view/modal/000077500000000000000000000000001413250514000227575ustar00rootroot00000000000000buildbot-3.4.0/www/console_view/src/module/view/modal/modal.controller.js000066400000000000000000000012471413250514000265770ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class ConsoleModal { constructor($scope, $uibModalInstance, selectedBuild) { this.$uibModalInstance = $uibModalInstance; this.selectedBuild = selectedBuild; $scope.$on('$stateChangeStart', () => { return this.close(); }); } close() { return this.$uibModalInstance.close(); } } angular.module('console_view') .controller('consoleModalController', ['$scope', '$uibModalInstance', 'selectedBuild', ConsoleModal]); buildbot-3.4.0/www/console_view/src/module/view/modal/modal.style.less000066400000000000000000000001271413250514000261020ustar00rootroot00000000000000.modal-big { .modal-dialog { width: 80%; } .fa { cursor: pointer; } } buildbot-3.4.0/www/console_view/src/module/view/modal/modal.tpl.jade000066400000000000000000000004041413250514000254740ustar00rootroot00000000000000// Show build summary for the selected build in a modal window .modal-header i.fa.fa-times.pull-right(ng-click='modal.close()') h4.modal-title Build summary .modal-body buildsummary(ng-if='modal.selectedBuild' buildid='modal.selectedBuild.buildid')buildbot-3.4.0/www/console_view/src/styles/000077500000000000000000000000001413250514000207475ustar00rootroot00000000000000buildbot-3.4.0/www/console_view/src/styles/styles.less000066400000000000000000000026061413250514000231660ustar00rootroot00000000000000 @import "../module/**/*.less"; @column-width: 40px; .console { .table-fixedwidth { width: initial; } .load-indicator { width: 100%; height: 100%; z-index: 900; background-color: #ffffff; display: table; .spinner { display: table-cell; vertical-align: middle; text-align: center; p { font-weight: 300; margin-top: 10px; } } } .column { min-width: @column-width; max-width: @column-width; width: @column-width; } table { border: none; } .tag_row{ td { margin:0px; padding:0px; } span { position: relative; float: left; font-size: 10px; overflow: hidden; text-decoration: none; white-space: nowrap; } } tr.first-row { background-color: #fff!important; th { border: none; background-color: #fff !important; } .builder { position: relative; float: left; font-size: 12px; text-align: center; transform: rotate(-25deg) ; transform-origin: 0% 100%; text-decoration: none; white-space: nowrap; } } } buildbot-3.4.0/www/console_view/src/tests.webpack.js000066400000000000000000000004421413250514000225370ustar00rootroot00000000000000// This file is an entry point for angular tests // Avoids some weird issues when using webpack + angular. import 'angular'; import 'angular-mocks/angular-mocks'; import './module/main.module.js' const context = require.context('./', true, /\.spec.js$/); context.keys().forEach(context); buildbot-3.4.0/www/console_view/test/000077500000000000000000000000001413250514000176145ustar00rootroot00000000000000buildbot-3.4.0/www/console_view/test/main.js000066400000000000000000000002121413250514000210710ustar00rootroot00000000000000// app module is necessary for plugins, but only in the test environment angular.module("app", []).constant("config", {"url": "foourl"}); buildbot-3.4.0/www/console_view/webpack.config.js000066400000000000000000000013351413250514000220550ustar00rootroot00000000000000'use strict'; const common = require('buildbot-build-common'); const env = require('yargs').argv.env; const pkg = require('./package.json'); var event = process.env.npm_lifecycle_event; var isTest = event === 'test' || event === 'test-watch'; var isProd = env === 'prod'; module.exports = function() { return common.createTemplateWebpackConfig({ entry: { scripts: './src/module/main.module.js', styles: './src/styles/styles.less', }, libraryName: pkg.name, pluginName: pkg.plugin_name, dirname: __dirname, isTest: isTest, isProd: isProd, outputPath: __dirname + '/buildbot_console_view/static', extractStyles: true, }); }(); buildbot-3.4.0/www/console_view/yarn.lock000066400000000000000000011435731413250514000204760ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.7.4.tgz#37e864532200cb6b50ee9a4045f5f817840166ab" integrity sha512-+bYbx56j4nYBmpsWtnPUsKW3NdnYxbqyfrP2w9wILBuHzdfIKz9prieZK0DFPyIzkjYVUe4QkusGL07r5pXznQ== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helpers" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" convert-source-map "^1.7.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369" integrity sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg== dependencies: "@babel/types" "^7.7.4" jsesc "^2.5.1" lodash "^4.17.13" source-map "^0.5.0" "@babel/helper-annotate-as-pure@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz#bb3faf1e74b74bd547e867e48f551fa6b098b6ce" integrity sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og== dependencies: "@babel/types" "^7.7.4" "@babel/helper-builder-binary-assignment-operator-visitor@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz#5f73f2b28580e224b5b9bd03146a4015d6217f5f" integrity sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ== dependencies: "@babel/helper-explode-assignable-expression" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-call-delegate@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz#621b83e596722b50c0066f9dc37d3232e461b801" integrity sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-create-regexp-features-plugin@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz#6d5762359fd34f4da1500e4cff9955b5299aaf59" integrity sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A== dependencies: "@babel/helper-regex" "^7.4.4" regexpu-core "^4.6.0" "@babel/helper-define-map@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176" integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-explode-assignable-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz#fa700878e008d85dc51ba43e9fb835cddfe05c84" integrity sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg== dependencies: "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e" integrity sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ== dependencies: "@babel/helper-get-function-arity" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-get-function-arity@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0" integrity sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA== dependencies: "@babel/types" "^7.7.4" "@babel/helper-hoist-variables@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz#612384e3d823fdfaaf9fce31550fe5d4db0f3d12" integrity sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-member-expression-to-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz#356438e2569df7321a8326644d4b790d2122cb74" integrity sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-imports@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz#e5a92529f8888bf319a6376abfbd1cebc491ad91" integrity sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-transforms@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.7.4.tgz#8d7cdb1e1f8ea3d8c38b067345924ac4f8e0879a" integrity sha512-ehGBu4mXrhs0FxAqN8tWkzF8GSIGAiEumu4ONZ/hD9M88uHcD+Yu2ttKfOCgwzoesJOJrtQh7trI5YPbRtMmnA== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-simple-access" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-optimise-call-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz#034af31370d2995242aa4df402c3b7794b2dcdf2" integrity sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg== dependencies: "@babel/types" "^7.7.4" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz#c68c2407350d9af0e061ed6726afb4fff16d0234" integrity sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-wrap-function" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-replace-supers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz#3c881a6a6a7571275a72d82e6107126ec9e2cdd2" integrity sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg== dependencies: "@babel/helper-member-expression-to-functions" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-simple-access@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz#a169a0adb1b5f418cfc19f22586b2ebf58a9a294" integrity sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A== dependencies: "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-split-export-declaration@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8" integrity sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug== dependencies: "@babel/types" "^7.7.4" "@babel/helper-wrap-function@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace" integrity sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helpers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.7.4.tgz#62c215b9e6c712dadc15a9a0dcab76c92a940302" integrity sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg== dependencies: "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/highlight@^7.0.0": version "7.5.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb" integrity sha512-jIwvLO0zCL+O/LmEJQjWA75MQTWwx3c3u2JOTDK5D3/9egrWRRA0/0hk9XXywYnXZVVpzrBYeIQTmhwUaePI9g== "@babel/plugin-proposal-async-generator-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d" integrity sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-proposal-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz#dde64a7f127691758cbfed6cf70de0fa5879d52d" integrity sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz#7700a6bfda771d8dc81973249eac416c6b4c697d" integrity sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.4.tgz#cc57849894a5c774214178c8ab64f6334ec8af71" integrity sha512-rnpnZR3/iWKmiQyJ3LKJpSwLDcX/nSXhdLk4Aq/tXOApIvyu7qoabrige0ylsAJffaUC51WiBu209Q0U+86OWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz#ec21e8aeb09ec6711bc0a39ca49520abee1de379" integrity sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.4.tgz#7c239ccaf09470dbe1d453d50057460e84517ebb" integrity sha512-cHgqHgYvffluZk85dJ02vloErm3Y6xtH+2noOBOJ2kXOJH3aVCDnj5eR/lVNlTnYu4hndAPJD3rTFjW3qee0PA== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-async-generators@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz#331aaf310a10c80c44a66b238b6e49132bd3c889" integrity sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.2.0", "@babel/plugin-syntax-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz#29ca3b4415abfe4a5ec381e903862ad1a54c3aec" integrity sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz#86e63f7d2e22f9e27129ac4e83ea989a382e86cc" integrity sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz#47cf220d19d6d0d7b154304701f468fc1cc6ff46" integrity sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz#a3e38f59f4b6233867b4a92dcb0ee05b2c334aa6" integrity sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-top-level-await@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz#bd7d8fa7b9fee793a36e4027fd6dd1aa32f946da" integrity sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz#76309bd578addd8aee3b379d809c802305a98a12" integrity sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz#694cbeae6d613a34ef0292713fa42fb45c4470ba" integrity sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz#d0d9d5c269c78eaea76227ace214b8d01e4d837b" integrity sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz#200aad0dcd6bb80372f94d9e628ea062c58bf224" integrity sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.13" "@babel/plugin-transform-classes@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz#c92c14be0a1399e15df72667067a8f510c9400ec" integrity sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-define-map" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz#e856c1628d3238ffe12d668eb42559f79a81910d" integrity sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz#2b713729e5054a1135097b6a67da1b6fe8789267" integrity sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.4.tgz#f7ccda61118c5b7a2599a72d5e3210884a021e96" integrity sha512-mk0cH1zyMa/XHeb6LOTXTbG7uIJ8Rrjlzu91pUx/KS3JpcgaTDwMS8kM+ar8SLOvlL2Lofi4CGBAjCo3a2x+lw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-duplicate-keys@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz#3d21731a42e3f598a73835299dd0169c3b90ac91" integrity sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz#dd30c0191e3a1ba19bcc7e389bdfddc0729d5db9" integrity sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz#248800e3a5e507b1f103d8b4ca998e77c63932bc" integrity sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz#75a6d3303d50db638ff8b5385d12451c865025b1" integrity sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz#27fe87d2b5017a2a5a34d1c41a6b9f6a6262643e" integrity sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz#aee127f2f3339fc34ce5e3055d7ffbf7aa26f19a" integrity sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.4.tgz#276b3845ca2b228f2995e453adc2e6f54d72fb71" integrity sha512-/542/5LNA18YDtg1F+QHvvUSlxdvjZoD/aldQwkq+E3WCkbEjNSN9zdrOXaSlfg3IfGi22ijzecklF/A7kVZFQ== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-commonjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.4.tgz#bee4386e550446343dd52a571eda47851ff857a3" integrity sha512-k8iVS7Jhc367IcNF53KCwIXtKAH7czev866ThsTgy8CwlXjnKZna2VHwChglzLleYrcHz1eQEIJlGRQxB53nqA== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.7.4" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-systemjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz#cd98152339d3e763dfe838b7d4273edaf520bb30" integrity sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-umd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz#1027c355a118de0aae9fee00ad7813c584d9061f" integrity sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz#fb3bcc4ee4198e7385805007373d6b6f42c98220" integrity sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/plugin-transform-new-target@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz#4a0753d2d60639437be07b592a9e58ee00720167" integrity sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz#48488937a2d586c0148451bf51af9d7dda567262" integrity sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/plugin-transform-parameters@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.4.tgz#da4555c97f39b51ac089d31c7380f03bca4075ce" integrity sha512-VJwhVePWPa0DqE9vcfptaJSzNDKrWU/4FbYCjZERtmqEs05g3UMXnYMZoXja7JAJ7Y7sPZipwm/pGApZt7wHlw== dependencies: "@babel/helper-call-delegate" "^7.7.4" "@babel/helper-get-function-arity" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz#2388d6505ef89b266103f450f9167e6bd73f98c2" integrity sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.4.tgz#d18eac0312a70152d7d914cbed2dc3999601cfc0" integrity sha512-e7MWl5UJvmPEwFJTwkBlPmqixCtr9yAASBqff4ggXTNicZiwbF8Eefzm6NVgfiBp7JdAGItecnctKTgH44q2Jw== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz#6a7cf123ad175bb5c69aec8f6f0770387ed3f1eb" integrity sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.4.tgz#51fe458c1c1fa98a8b07934f4ed38b6cd62177a6" integrity sha512-O8kSkS5fP74Ad/8pfsCMGa8sBRdLxYoSReaARRNSz3FbFQj3z/QUvoUmJ28gn9BO93YfnXc3j+Xyaqe8cKDNBQ== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz#74a0a9b2f6d67a684c6fbfd5f0458eb7ba99891e" integrity sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz#aa673b356fe6b7e70d69b6e33a17fef641008578" integrity sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz#ffb68c05090c30732076b1285dc1401b404a123c" integrity sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz#1eb6411736dd3fe87dbd20cc6668e5121c17d604" integrity sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz#3174626214f2d6de322882e498a38e8371b2140e" integrity sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz#a3c0f65b117c4c81c5b6484f2a5e7b95346b83ae" integrity sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/preset-env@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.7.4.tgz#ccaf309ae8d1ee2409c85a4e2b5e280ceee830f8" integrity sha512-Dg+ciGJjwvC1NIe/DGblMbcGq1HOtKbw8RLl4nIjlfcILKEOkWT/vRqPpumswABEBVudii6dnVwrBtzD7ibm4g== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.7.4" "@babel/plugin-proposal-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-syntax-top-level-await" "^7.7.4" "@babel/plugin-transform-arrow-functions" "^7.7.4" "@babel/plugin-transform-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions" "^7.7.4" "@babel/plugin-transform-block-scoping" "^7.7.4" "@babel/plugin-transform-classes" "^7.7.4" "@babel/plugin-transform-computed-properties" "^7.7.4" "@babel/plugin-transform-destructuring" "^7.7.4" "@babel/plugin-transform-dotall-regex" "^7.7.4" "@babel/plugin-transform-duplicate-keys" "^7.7.4" "@babel/plugin-transform-exponentiation-operator" "^7.7.4" "@babel/plugin-transform-for-of" "^7.7.4" "@babel/plugin-transform-function-name" "^7.7.4" "@babel/plugin-transform-literals" "^7.7.4" "@babel/plugin-transform-member-expression-literals" "^7.7.4" "@babel/plugin-transform-modules-amd" "^7.7.4" "@babel/plugin-transform-modules-commonjs" "^7.7.4" "@babel/plugin-transform-modules-systemjs" "^7.7.4" "@babel/plugin-transform-modules-umd" "^7.7.4" "@babel/plugin-transform-named-capturing-groups-regex" "^7.7.4" "@babel/plugin-transform-new-target" "^7.7.4" "@babel/plugin-transform-object-super" "^7.7.4" "@babel/plugin-transform-parameters" "^7.7.4" "@babel/plugin-transform-property-literals" "^7.7.4" "@babel/plugin-transform-regenerator" "^7.7.4" "@babel/plugin-transform-reserved-words" "^7.7.4" "@babel/plugin-transform-shorthand-properties" "^7.7.4" "@babel/plugin-transform-spread" "^7.7.4" "@babel/plugin-transform-sticky-regex" "^7.7.4" "@babel/plugin-transform-template-literals" "^7.7.4" "@babel/plugin-transform-typeof-symbol" "^7.7.4" "@babel/plugin-transform-unicode-regex" "^7.7.4" "@babel/types" "^7.7.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.4.tgz#b23a856751e4bf099262f867767889c0e3fe175b" integrity sha512-r24eVUUr0QqNZa+qrImUk8fn5SPhHq+IfYvIoIMg0do3GdK9sMdiLKP3GYVVaxpPKORgm8KRKaNTEhAjgIpLMw== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b" integrity sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" "@babel/traverse@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.7.4.tgz#9c1e7c60fb679fe4fcfaa42500833333c2058558" integrity sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.13" "@babel/types@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193" integrity sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA== dependencies: esutils "^2.0.2" lodash "^4.17.13" to-fast-properties "^2.0.0" "@types/babel-types@*", "@types/babel-types@^7.0.0": version "7.0.7" resolved "https://registry.yarnpkg.com/@types/babel-types/-/babel-types-7.0.7.tgz#667eb1640e8039436028055737d2b9986ee336e3" integrity sha512-dBtBbrc+qTHy1WdfHYjBwRln4+LWqASWakLHsWHR2NWHIFkv4W3O070IGoGLEBrJBvct3r0L1BUPuvURi7kYUQ== "@types/babylon@^6.16.2": version "6.16.5" resolved "https://registry.yarnpkg.com/@types/babylon/-/babylon-6.16.5.tgz#1c5641db69eb8cdf378edd25b4be7754beeb48b4" integrity sha512-xH2e58elpj1X4ynnKp9qSnWlsRTIs6n3tgLGNfwAGHwePw0mulHQllV34n0T25uYSu1k0hRKkWXF890B1yS47w== dependencies: "@types/babel-types" "*" "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== "@types/glob@^7.1.1": version "7.1.1" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== dependencies: "@types/events" "*" "@types/minimatch" "*" "@types/node" "*" "@types/minimatch@*": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/node@*": version "12.12.12" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.12.tgz#529bc3e73dbb35dd9e90b0a1c83606a9d3264bdb" integrity sha512-MGuvYJrPU0HUwqF7LqvIj50RZUX23Z+m583KBygKYUZLlZ88n6w28XRNJRJgsHukLEnLz6w6SvxZoLgbr5wLqQ== "@uirouter/angularjs@^1.0.15": version "1.0.23" resolved "https://registry.yarnpkg.com/@uirouter/angularjs/-/angularjs-1.0.23.tgz#aeec0f96b0c42187c5044ef244ba6ccb75a5d835" integrity sha512-r4hLSw7R3mwXGC5Sq7yxNlBK1sSzQUm/1MzigwwYRHoMO5uKcBPUhxFYx5U7kufP2Xl1165KeZvRsLCh0/Z1ng== dependencies: "@uirouter/core" "6.0.1" "@uirouter/core@6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/@uirouter/core/-/core-6.0.1.tgz#93b02a5d178a7ab7313f34b7b3f019a000d23396" integrity sha512-mHCutiHtDDRKYmrJ92XPKDoSb2bgqaDyUpHdF4hUE+riwgkCvGdBjL8u+VDTcV3slBAk6B0LBIOIajjWkkObbQ== "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== dependencies: "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@webassemblyjs/floating-point-hex-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== "@webassemblyjs/helper-api-error@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== "@webassemblyjs/helper-buffer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== "@webassemblyjs/helper-code-frame@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== dependencies: "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/helper-fsm@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== "@webassemblyjs/helper-module-context@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== dependencies: "@webassemblyjs/ast" "1.8.5" mamacro "^0.0.3" "@webassemblyjs/helper-wasm-bytecode@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== "@webassemblyjs/helper-wasm-section@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/ieee754@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== "@webassemblyjs/wasm-edit@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/helper-wasm-section" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-opt" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/wasm-gen@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wasm-opt@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wasm-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wast-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/floating-point-hex-parser" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-code-frame" "1.8.5" "@webassemblyjs/helper-fsm" "1.8.5" "@xtuc/long" "4.2.2" "@webassemblyjs/wast-printer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== dependencies: mime-types "~2.1.24" negotiator "0.6.2" acorn-globals@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-3.1.0.tgz#fd8270f71fbb4996b004fa880ee5d46573a731bf" integrity sha1-/YJw9x+7SZawBPqIDuXUZXOnMb8= dependencies: acorn "^4.0.4" acorn@^3.1.0: version "3.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" integrity sha1-ReN/s56No/JbruP/U2niu18iAXo= acorn@^4.0.4, acorn@~4.0.2: version "4.0.13" resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787" integrity sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c= acorn@^6.2.1: version "6.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.3.0.tgz#0087509119ffa4fc0a0041d1e93a417e68cb856e" integrity sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA== after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: version "3.4.1" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== ajv@^5.0.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5: version "6.10.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" align-text@^0.1.1, align-text@^0.1.3: version "0.1.4" resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" integrity sha1-DNkKVhCT810KmSVsIrcGlDP60Rc= dependencies: kind-of "^3.0.2" longest "^1.0.1" repeat-string "^1.5.2" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= angular-animate@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-animate/-/angular-animate-1.7.9.tgz#a397f82434c1e7ed5b7a298fa70fc3de989a6785" integrity sha512-fV+AISy/HTzurQH2ngsJg+lLIvfu0ahc1h4AYKauaXVw97rZc2k4iUA1bMstiEyClsdayQX568kjQc1NK+oYhw== angular-mocks@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-mocks/-/angular-mocks-1.7.9.tgz#0a3b7e28b9a493b4e3010ed2b0f69a68e9b4f79b" integrity sha512-LQRqqiV3sZ7NTHBnNmLT0bXtE5e81t97+hkJ56oU0k3dqKv1s6F+nBWRlOVzqHWPGFOiPS8ZJVdrS8DFzHyNIA== angular-ui-bootstrap@^2.5.6: version "2.5.6" resolved "https://registry.yarnpkg.com/angular-ui-bootstrap/-/angular-ui-bootstrap-2.5.6.tgz#23937322ec641a6fbee16498cc32452aa199e7c5" integrity sha512-yzcHpPMLQl0232nDzm5P4iAFTFQ9dMw0QgFLuKYbDj9M0xJ62z0oudYD/Lvh1pWfRsukiytP4Xj6BHOSrSXP8A== angular@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular/-/angular-1.7.9.tgz#e52616e8701c17724c3c238cfe4f9446fd570bc4" integrity sha512-5se7ZpcOtu0MBFlzGv5dsM1quQDoDeUTwZrWjGtTNA7O88cD8TEk5IEKCTDa3uECV9XnvKREVUr7du1ACiWGFQ== ansi-colors@^3.0.0: version "3.2.4" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-html@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.0, ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" normalize-path "^2.1.1" anymatch@~3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" aproba@^1.0.3, aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-flatten@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert@^1.1.1: version "1.5.0" resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== dependencies: object-assign "^4.1.1" util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= async-each@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== async@1.x: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@^2.0.0, async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== dependencies: lodash "^4.17.14" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.5.1: version "9.7.2" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.2.tgz#26cf729fbb709323b40171a874304884dcceffed" integrity sha512-LCAfcdej1182uVvPOZnytbq61AhnOZ/4JelDaJGDeNwewyU1AMaNthcHsyz1NRjTmd2FkurMckLWfkHg3Z//KA== dependencies: browserslist "^4.7.3" caniuse-lite "^1.0.30001010" chalk "^2.4.2" normalize-range "^0.1.2" num2fraction "^1.2.2" postcss "^7.0.23" postcss-value-parser "^4.0.2" aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" babel-generator@^6.18.0: version "6.26.1" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.17.4" source-map "^0.5.7" trim-right "^1.0.1" babel-loader@^8.0.5: version "8.0.6" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== dependencies: find-cache-dir "^2.0.0" loader-utils "^1.0.2" mkdirp "^0.5.1" pify "^4.0.1" babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-plugin-dynamic-import-node@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: object.assign "^4.1.0" babel-runtime@^6.0.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-template@^6.16.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: babel-runtime "^6.26.0" babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" lodash "^4.17.4" babel-traverse@^6.18.0, babel-traverse@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: babel-code-frame "^6.26.0" babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" debug "^2.6.8" globals "^9.18.0" invariant "^2.2.2" lodash "^4.17.4" babel-types@^6.18.0, babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@^1.0.2: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary-extensions@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c" integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow== blob@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683" integrity sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig== bluebird@^3.3.0, bluebird@^3.5.5: version "3.7.1" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de" integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0: version "4.11.8" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f" integrity sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA== body-parser@1.19.0, body-parser@^1.16.1: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= dependencies: array-flatten "^2.1.0" deep-equal "^1.0.1" dns-equal "^1.0.0" dns-txt "^2.0.2" multicast-dns "^6.0.1" multicast-dns-service-types "^1.1.0" boolbase@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" brorand@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.3" inherits "^2.0.1" safe-buffer "^5.0.1" browserify-cipher@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" des.js "^1.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" browserify-rsa@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= dependencies: bn.js "^4.1.0" randombytes "^2.0.1" browserify-sign@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= dependencies: bn.js "^4.1.1" browserify-rsa "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.2" elliptic "^6.0.0" inherits "^2.0.1" parse-asn1 "^5.0.0" browserify-zlib@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" browserslist@^4.6.0, browserslist@^4.7.3: version "4.7.3" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.7.3.tgz#02341f162b6bcc1e1028e30624815d4924442dc3" integrity sha512-jWvmhqYpx+9EZm/FxcZSbUZyDEvDTLDi3nSAKbzEkyWvtI0mNSmUosey+5awDW1RUlrgXbQb5A6qY1xQH9U6MQ== dependencies: caniuse-lite "^1.0.30001010" electron-to-chromium "^1.3.306" node-releases "^1.1.40" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer-indexof@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= buffer@^4.3.0: version "4.9.2" resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" isarray "^1.0.0" "buildbot-build-common@link:../build_common": version "0.0.0" uid "" "buildbot-data-js@link:../data_module": version "0.0.0" uid "" builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cacache@^12.0.2: version "12.0.3" resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== dependencies: bluebird "^3.5.5" chownr "^1.1.1" figgy-pudding "^3.5.1" glob "^7.1.4" graceful-fs "^4.1.15" infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" ssri "^6.0.1" unique-filename "^1.1.1" y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" caller-callsite@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= dependencies: callsites "^2.0.0" caller-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= dependencies: caller-callsite "^2.0.0" callsite@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camel-case@3.0.x: version "3.0.0" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= dependencies: no-case "^2.2.0" upper-case "^1.1.1" camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^1.0.2: version "1.2.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" integrity sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk= camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0, camelcase@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30001010: version "1.0.30001011" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001011.tgz#0d6c4549c78c4a800bb043a83ca0cbe0aee6c6e1" integrity sha512-h+Eqyn/YA6o6ZTqpS86PyRmNWOs1r54EBDcd2NTwwfsXQ8re1B38SnB+p2RKF8OUsyEIjeDU8XGec1RGO/wYCg== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= center-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" integrity sha1-qg0yYptu6XIgBBHL1EYckHvCt60= dependencies: align-text "^0.1.3" lazy-cache "^1.0.3" chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" character-parser@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chokidar@^2.0.2, chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== dependencies: anymatch "^2.0.0" async-each "^1.0.1" braces "^2.3.2" glob-parent "^3.1.0" inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" normalize-path "^3.0.0" path-is-absolute "^1.0.0" readdirp "^2.2.1" upath "^1.1.1" optionalDependencies: fsevents "^1.2.7" chokidar@^3.0.0: version "3.3.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6" integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A== dependencies: anymatch "~3.1.1" braces "~3.0.2" glob-parent "~5.1.0" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" readdirp "~3.2.0" optionalDependencies: fsevents "~2.1.1" chownr@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.3.tgz#42d837d5239688d55f303003a508230fa6727142" integrity sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw== chrome-trace-event@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== dependencies: tslib "^1.9.0" cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@4.2.x, clean-css@^4.1.11: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cliui@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" integrity sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE= dependencies: center-align "^0.1.1" right-align "^0.1.1" wordwrap "0.0.2" cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi "^2.0.0" cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== dependencies: string-width "^3.1.0" strip-ansi "^5.2.0" wrap-ansi "^5.1.0" clone@^2.1.1, clone@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colors@^1.1.0, colors@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@2.17.x: version "2.17.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== commander@^2.20.0, commander@~2.20.3: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@~2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= compressible@~2.0.16: version "2.0.17" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== dependencies: mime-db ">= 1.40.0 < 2" compression@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" compressible "~2.0.16" debug "2.6.9" on-headers "~1.0.2" safe-buffer "5.1.2" vary "~1.1.2" concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" inherits "^2.0.3" readable-stream "^2.2.2" typedarray "^0.0.6" connect-history-api-fallback@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^3.0.1, constantinople@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-3.1.2.tgz#d45ed724f57d3d10500017a7d3a889c1381ae647" integrity sha512-yePcBqEFhLOqSBtwYOGGS1exHo/s1xjekXiinh4itpNQGCu4KA1euPh1fg07N2wMITZXQkBz75Ntdt1ctGZouw== dependencies: "@types/babel-types" "^7.0.0" "@types/babylon" "^6.16.2" babel-types "^6.26.0" babylon "^6.18.0" constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= content-disposition@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== dependencies: safe-buffer "5.1.2" content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= cookie@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== dependencies: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js-compat@^3.1.1: version "3.4.2" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.4.2.tgz#652fa7c54652b7f6586a893e37001df55ea2ac37" integrity sha512-W0Aj+LM3EAxxjD0Kp2o4be8UlnxIZHNupBv2znqrheR4aY2nOn91794k/xoSp+SxqqriiZpTsSwBtZr60cbkwQ== dependencies: browserslist "^4.7.3" semver "^6.3.0" core-js@^2.4.0: version "2.6.10" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f" integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== dependencies: import-fresh "^2.0.0" is-directory "^0.3.1" js-yaml "^3.13.1" parse-json "^4.0.0" create-ecdh@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" inherits "^2.0.1" md5.js "^1.3.4" ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" ripemd160 "^2.0.0" safe-buffer "^5.0.1" sha.js "^2.4.8" cross-spawn@6.0.5, cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" semver "^5.5.0" shebang-command "^1.2.0" which "^1.2.9" crypto-browserify@^3.11.0: version "3.12.0" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" create-ecdh "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.0" diffie-hellman "^5.0.0" inherits "^2.0.1" pbkdf2 "^3.0.3" public-encrypt "^4.0.0" randombytes "^2.0.0" randomfill "^1.0.3" css-loader@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" integrity sha512-OcKJU/lt232vl1P9EEDamhoO9iKY3tIjY5GU+XDLblAykTdgs6Ux9P1hTHve8nFKy5KPpOXOsVI/hIwi3841+w== dependencies: camelcase "^5.2.0" icss-utils "^4.1.0" loader-utils "^1.2.3" normalize-path "^3.0.0" postcss "^7.0.14" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^2.0.6" postcss-modules-scope "^2.1.0" postcss-modules-values "^2.0.0" postcss-value-parser "^3.3.0" schema-utils "^1.0.0" css-select@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= dependencies: boolbase "~1.0.0" css-what "2.1" domutils "1.5.1" nth-check "~1.0.1" css-what@2.1: version "2.1.3" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" date-format@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-2.1.0.tgz#31d5b5ea211cf5fd764cd38baf9d033df7e125cf" integrity sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA== dateformat@^1.0.6: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.0.0, debug@^3.1.1, debug@^3.2.5, debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" decamelize@^1.0.0, decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= deep-equal@^1.0.1: version "1.1.1" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== dependencies: is-arguments "^1.0.4" is-date-object "^1.0.1" is-regex "^1.0.4" object-is "^1.0.1" object-keys "^1.1.1" regexp.prototype.flags "^1.2.0" deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-gateway@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== dependencies: execa "^1.0.0" ip-regex "^2.1.0" define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: "@types/glob" "^7.1.1" globby "^6.1.0" is-path-cwd "^2.0.0" is-path-in-cwd "^2.0.0" p-map "^2.0.0" pify "^4.0.1" rimraf "^2.6.3" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= des.js@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== dependencies: inherits "^2.0.1" minimalistic-assert "^1.0.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detect-node@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" randombytes "^2.0.0" dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= dns-packet@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.1.tgz#12aa426981075be500b910eedcd0b47dd7deda5a" integrity sha512-0UxfQkMhYAUaZI+xrNZOz/as5KgDU0M/fQ9b6SpkyLbk3GEswDi6PADJVaYJradtRVsRIlF1zLyOodbcTCDzUg== dependencies: ip "^1.1.0" safe-buffer "^5.0.1" dns-txt@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= dependencies: buffer-indexof "^1.0.0" doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-converter@^0.2: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" dom-serializer@0: version "0.2.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== dependencies: domelementtype "^2.0.1" entities "^2.0.0" domain-browser@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== domelementtype@1, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== domhandler@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== dependencies: domelementtype "1" domutils@1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= dependencies: dom-serializer "0" domelementtype "1" domutils@^1.5.1: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.306: version "1.3.311" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.311.tgz#73baa361e2b1f44b7b4f1a443aaa1372f8074ebb" integrity sha512-7GH6RKCzziLzJ9ejmbiBEdzHZsc6C3eRpav14dmRfTWMpNgMqpP1ukw/FU/Le2fR+ep642naq7a23xNdmh2s+A== elliptic@^6.0.0: version "6.5.2" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.2.tgz#05c5678d7173c049d8ca433552224a495d0e3762" integrity sha512-f4x70okzZbIQl/NSRLkI/+tteV/9WqL98zx+SQ69KbXxmVrmjwsNUPn/gYJJ0sHvEak24cZgHIPegRePAtA/xw== dependencies: bn.js "^4.4.0" brorand "^1.0.1" hash.js "^1.0.0" hmac-drbg "^1.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.0" emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== emojis-list@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" engine.io-client@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.2.1.tgz#6f54c0475de487158a1a7c77d10178708b6add36" integrity sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "~3.1.0" engine.io-parser "~2.1.1" has-cors "1.1.0" indexof "0.0.1" parseqs "0.0.5" parseuri "0.0.5" ws "~3.3.1" xmlhttprequest-ssl "~1.5.4" yeast "0.1.2" engine.io-parser@~2.1.0, engine.io-parser@~2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6" integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA== dependencies: after "0.8.2" arraybuffer.slice "~0.0.7" base64-arraybuffer "0.1.5" blob "0.0.5" has-binary2 "~1.0.2" engine.io@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.2.1.tgz#b60281c35484a70ee0351ea0ebff83ec8c9522a2" integrity sha512-+VlKzHzMhaU+GsCIg4AoXF1UdDFjHHwMmMKqMJNDNLlUlejz58FCy4LBqB2YVJskHGYl06BatYWKP2TVdVXE5w== dependencies: accepts "~1.3.4" base64id "1.0.0" cookie "0.3.1" debug "~3.1.0" engine.io-parser "~2.1.0" ws "~3.3.1" enhanced-resolve@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" tapable "^1.0.0" enhanced-resolve@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== dependencies: graceful-fs "^4.1.2" memory-fs "^0.5.0" tapable "^1.0.0" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= entities@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-abstract@^1.5.1: version "1.16.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d" integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.0" is-callable "^1.1.4" is-regex "^1.0.4" object-inspect "^1.6.0" object-keys "^1.1.1" string.prototype.trimleft "^2.1.0" string.prototype.trimright "^2.1.0" es-to-primitive@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: version "0.10.53" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.3" next-tick "~1.0.0" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" ext "^1.1.2" escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" eslint-scope@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== dependencies: esrecurse "^4.1.0" estraverse "^4.1.1" esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esrecurse@^4.1.0: version "4.2.1" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== dependencies: estraverse "^4.1.0" estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@^4.1.0, estraverse@^4.1.1: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= eventemitter3@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.0.tgz#d65176163887ee59f386d64c82610b696a4a74eb" integrity sha512-qerSRB0p+UDEssxTtm6EDKcE7W4OaoisfIMl4CngyEhjpYglocpNg6UEqCvemdGhosAsg4sO2dXJOdyBifPGCg== events@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== eventsource@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" safe-buffer "^5.1.1" execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== dependencies: cross-spawn "^6.0.0" get-stream "^4.0.0" is-stream "^1.1.0" npm-run-path "^2.0.0" p-finally "^1.0.0" signal-exit "^3.0.0" strip-eof "^1.0.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== dependencies: accepts "~1.3.7" array-flatten "1.1.1" body-parser "1.19.0" content-disposition "0.5.3" content-type "~1.0.4" cookie "0.4.0" cookie-signature "1.0.6" debug "2.6.9" depd "~1.1.2" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" finalhandler "~1.1.2" fresh "0.5.2" merge-descriptors "1.0.1" methods "~1.1.2" on-finished "~2.3.0" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.5" qs "6.7.0" range-parser "~1.2.1" safe-buffer "5.1.2" send "0.17.1" serve-static "1.14.1" setprototypeof "1.1.1" statuses "~1.5.0" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" ext@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.2.0.tgz#8dd8d2dd21bcced3045be09621fa0cbf73908ba4" integrity sha512-0ccUQK/9e3NreLFg6K6np8aPyRgwycx+oFGtfx1dSp7Wj00Ozw9r05FgBRlzjf2XBM7LAzwgLyDscRrtSU91hA== dependencies: type "^2.0.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= dependencies: websocket-driver ">=0.5.1" faye-websocket@~0.11.1: version "0.11.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e" integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== dependencies: websocket-driver ">=0.5.1" figgy-pudding@^3.5.1: version "3.5.1" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.1.tgz#862470112901c727a0e495a80744bd5baa1d6790" integrity sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w== file-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== dependencies: loader-utils "^1.0.2" schema-utils "^1.0.0" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.1.2, finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-cache-dir@^2.0.0, find-cache-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" make-dir "^2.0.0" pkg-dir "^3.0.0" find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== dependencies: detect-file "^1.0.0" is-glob "^4.0.0" micromatch "^3.0.4" resolve-dir "^1.0.1" flatted@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08" integrity sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg== flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" readable-stream "^2.3.6" follow-redirects@^1.0.0: version "1.9.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.9.0.tgz#8d5bcdc65b7108fe1508649c79c12d732dcedb4f" integrity sha512-CRcPzsSIbXyVDl0QI01muNDu69S8trU4jArW9LpOt2WtC6LyUJetcIrmfHsRBx7/Jb6GHJUiuqyYxPooFfNt6A== dependencies: debug "^3.0.0" for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= from2@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= dependencies: inherits "^2.0.1" readable-stream "^2.0.0" fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" universalify "^0.1.0" fs-minipass@^1.2.5: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= dependencies: graceful-fs "^4.1.2" iferr "^0.1.5" imurmurhash "^0.1.4" readable-stream "1 || 2" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" fsevents@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-caller-file@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-stream@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" glob-parent@~5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.0.tgz#5f4c1d1e748d30cd73ad2944b3577a81b081e8c2" integrity sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw== dependencies: is-glob "^4.0.1" glob@^5.0.13, glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.3, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" global-modules@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== dependencies: global-prefix "^3.0.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" global-prefix@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== dependencies: ini "^1.3.5" kind-of "^6.0.2" which "^1.3.1" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== "guanlecoja-ui@link:../guanlecoja-ui": version "0.0.0" uid "" handle-thing@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== handlebars@^4.0.1: version "4.5.3" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.5.3.tgz#5cf75bd8714f7605713511a56be7c349becb0482" integrity sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA== dependencies: neo-async "^2.6.0" optimist "^0.6.1" source-map "^0.6.1" optionalDependencies: uglify-js "^3.1.4" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary2@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/has-binary2/-/has-binary2-1.0.3.tgz#7776ac627f3ea77250cfc332dab7ddf5e4f5d11d" integrity sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw== dependencies: isarray "2.0.1" has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0, has-symbols@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" minimalistic-assert "^1.0.1" he@1.2.x: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.5" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.5.tgz#759cfcf2c4d156ade59b0b2dfabddc42a6b9c70c" integrity sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg== hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= dependencies: inherits "^2.0.1" obuf "^1.0.0" readable-stream "^2.0.1" wbuf "^1.1.0" html-entities@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= html-minifier@^3.2.3: version "3.5.21" resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== dependencies: camel-case "3.0.x" clean-css "4.2.x" commander "2.17.x" he "1.2.x" param-case "2.1.x" relateurl "0.2.x" uglify-js "3.4.x" html-webpack-plugin@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= dependencies: html-minifier "^3.2.3" loader-utils "^0.2.16" lodash "^4.17.3" pretty-error "^2.0.2" tapable "^1.0.0" toposort "^1.0.0" util.promisify "1.0.0" htmlparser2@^3.3.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== dependencies: domelementtype "^1.3.1" domhandler "^2.3.0" domutils "^1.5.1" entities "^1.1.1" inherits "^2.0.1" readable-stream "^3.1.1" http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= http-errors@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.0" statuses ">= 1.4.0 < 2" http-errors@~1.7.2: version "1.7.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== dependencies: depd "~1.1.2" inherits "2.0.4" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy-middleware@0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== dependencies: http-proxy "^1.17.0" is-glob "^4.0.0" lodash "^4.17.11" micromatch "^3.1.10" http-proxy@^1.13.0, http-proxy@^1.17.0: version "1.18.0" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.0.tgz#dbe55f63e75a347db7f3d99974f2692a314a6a3a" integrity sha512-84I2iJM/n1d4Hdgc6y2+qY5mDaz2PUVjlg9znE9byl+q0uC3DeByqBGReQu5tpLK0TAqTIXScRUV+dg7+bUPpQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" icss-replace-symbols@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= icss-utils@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== dependencies: postcss "^7.0.14" ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== iferr@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= ignore-walk@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== dependencies: minimatch "^3.0.4" image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= import-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= dependencies: import-from "^2.1.0" import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= dependencies: caller-path "^2.0.0" resolve-from "^3.0.0" import-from@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" integrity sha1-M1238qev/VOqpHHUuAId7ja387E= dependencies: resolve-from "^3.0.0" import-glob-loader@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/import-glob-loader/-/import-glob-loader-1.1.0.tgz#98d84c0f661c8ba9f821d9ddb7c6b6dc8e97eca2" integrity sha1-mNhMD2Yci6n4Idndt8a23I6X7KI= dependencies: glob "^5.0.13" loader-utils "^0.2.10" import-local@2.0.0, import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== dependencies: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= infer-owner@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.5" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== internal-ip@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== dependencies: default-gateway "^4.2.0" ipaddr.js "^1.9.0" interpret@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= ipaddr.js@1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== ipaddr.js@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== is-absolute-url@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arguments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.0.4.tgz#3faf966c7cba0ff437fb31f6250082fcf0448cf3" integrity sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA== is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-directory@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-expression@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-3.0.0.tgz#39acaa6be7fd1f3471dc42c7416e61c24317ac9f" integrity sha1-Oayqa+f9HzRx3ELHQW5hwkMXrJ8= dependencies: acorn "~4.0.2" object-assign "^4.0.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== is-path-in-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: is-path-inside "^2.1.0" is-path-inside@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== dependencies: path-is-inside "^1.0.2" is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-regex@^1.0.3, is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: has-symbols "^1.0.1" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is-wsl@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isarray@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.1.tgz#a37d94ed9cda2d59865c9f76fe596ee1f338741e" integrity sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-instrumenter-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-3.0.1.tgz#9957bd59252b373fae5c52b7b5188e6fde2a0949" integrity sha512-a5SPObZgS0jB/ixaKSMdn6n/gXSrK2S6q/UfRJBT3e6gQmVjwZROTODQsYW5ZNwOu78hG62Y3fWlebaVOL0C+w== dependencies: convert-source-map "^1.5.0" istanbul-lib-instrument "^1.7.3" loader-utils "^1.1.0" schema-utils "^0.3.0" istanbul-lib-coverage@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== istanbul-lib-instrument@^1.7.3: version "1.10.2" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" istanbul-lib-coverage "^1.2.1" semver "^5.3.0" istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" jasmine-core@^3.3, jasmine-core@^3.4.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-3.5.0.tgz#132c23e645af96d85c8bca13c8758b18429fc1e4" integrity sha512-nCeAiw37MIMA9w9IXso7bRaLl+c/ef3wnxsoSAlYrzS+Ot0zTG6nU8G/cIfGkqpkjX2wNaIW9RFG0TwIFnG6bA== jquery@^3.4.0: version "3.4.1" resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.4.1.tgz#714f1f8d9dde4bdfa55764ba37ef214630d80ef2" integrity sha512-36+AdBzCL+y6qjw5Tx7HgzeGCzC81MDDgaUP8ld2zhx58HdqXGoBd+tHdrBMiyjGQs0Hxs/MLZTu/eHNJJuWPw== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-stringify@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@3.x, js-yaml@^3.13.1: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== json5@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== dependencies: minimist "^1.2.0" json5@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== dependencies: minimist "^1.2.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= optionalDependencies: graceful-fs "^4.1.6" jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" karma-chrome-launcher@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" integrity sha512-uf/ZVpAabDBPvdPdveyk1EPgbnloPvFFGgmRhYLTDH7gEB4nZdSBk8yTU47w1g/drLSx5uMOkjKk7IWKfWg/+w== dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coverage@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-1.1.2.tgz#cc09dceb589a83101aca5fe70c287645ef387689" integrity sha512-eQawj4Cl3z/CjxslYy9ariU4uDh7cCNFZHNWXWRpl0pNeblY/4wHR7M7boTYXWrn9bY0z2pZmr11eKje/S/hIw== dependencies: dateformat "^1.0.6" istanbul "^0.4.0" lodash "^4.17.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-2.0.1.tgz#26e3e31f2faf272dd80ebb0e1898914cc3a19763" integrity sha512-iuC0hmr9b+SNn1DaUD2QEYtUxkS1J+bSJSn7ejdEexs7P8EYvA1CWkEdrDQ+8jVH3AgWlCNwjYsT1chjcNW9lA== dependencies: jasmine-core "^3.3" karma-sourcemap-loader@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma-spec-reporter@^0.0.32: version "0.0.32" resolved "https://registry.yarnpkg.com/karma-spec-reporter/-/karma-spec-reporter-0.0.32.tgz#2e9c7207ea726771260259f82becb543209e440a" integrity sha1-LpxyB+pyZ3EmAln4K+y1QyCeRAo= dependencies: colors "^1.1.2" karma-webpack@^3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-3.0.5.tgz#1ff1e3a690fb73ae95ee95f9ab58f341cfc7b40f" integrity sha512-nRudGJWstvVuA6Tbju9tyGUfXTtI1UXMXoRHVmM2/78D0q6s/Ye2IC157PKNDC15PWFGR0mVIRtWLAdcfsRJoA== dependencies: async "^2.0.0" babel-runtime "^6.0.0" loader-utils "^1.0.0" lodash "^4.0.0" source-map "^0.5.6" webpack-dev-middleware "^2.0.6" karma@^4.1.0: version "4.4.1" resolved "https://registry.yarnpkg.com/karma/-/karma-4.4.1.tgz#6d9aaab037a31136dc074002620ee11e8c2e32ab" integrity sha512-L5SIaXEYqzrh6b1wqYC42tNsFMx2PWuxky84pK9coK09MvmL7mxii3G3bZBh/0rvD27lqDd0le9jyhzvwif73A== dependencies: bluebird "^3.3.0" body-parser "^1.16.1" braces "^3.0.2" chokidar "^3.0.0" colors "^1.1.0" connect "^3.6.0" di "^0.0.1" dom-serialize "^2.2.0" flatted "^2.0.0" glob "^7.1.1" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^4.17.14" log4js "^4.0.0" mime "^2.3.1" minimatch "^3.0.2" optimist "^0.6.1" qjobs "^1.1.4" range-parser "^1.2.0" rimraf "^2.6.0" safe-buffer "^5.0.1" socket.io "2.1.1" source-map "^0.6.1" tmp "0.0.33" useragent "2.3.0" killable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== lazy-cache@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" integrity sha1-odePw6UEdMuAhF07O24dpJpEbo4= lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" less-loader@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-5.0.0.tgz#498dde3a6c6c4f887458ee9ed3f086a12ad1b466" integrity sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg== dependencies: clone "^2.1.1" loader-utils "^1.1.0" pify "^4.0.1" less@^3.9.0: version "3.10.3" resolved "https://registry.yarnpkg.com/less/-/less-3.10.3.tgz#417a0975d5eeecc52cff4bcfa3c09d35781e6792" integrity sha512-vz32vqfgmoxF1h3K4J+yKCtajH0PWmjkIFgbs5d78E/c/e+UQTnI+lWK+1eQRE95PXM2mC3rJlLSSP9VQHnaow== dependencies: clone "^2.1.2" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" mime "^1.4.1" mkdirp "^0.5.0" promise "^7.1.1" request "^2.83.0" source-map "~0.6.0" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" loader-runner@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== dependencies: big.js "^5.2.2" emojis-list "^2.0.0" json5 "^1.0.1" loader-utils@^0.2.10, loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= dependencies: big.js "^3.1.3" emojis-list "^2.0.0" json5 "^0.5.0" object-assign "^4.0.1" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lodash@^4.0.0, lodash@^4.17.0, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.3, lodash@^4.17.4: version "4.17.15" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== log-symbols@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: chalk "^2.0.1" log4js@^4.0.0: version "4.5.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-4.5.1.tgz#e543625e97d9e6f3e6e7c9fc196dd6ab2cae30b5" integrity sha512-EEEgFcE9bLgaYUKuozyFfytQM2wDHtXn4tAN41pkaxpNjAykv11GVdeI4tHtmPWW4Xrgh9R/2d7XYghDVjbKKw== dependencies: date-format "^2.0.0" debug "^4.1.1" flatted "^2.0.0" rfdc "^1.1.4" streamroller "^1.0.6" loglevel@^1.6.4: version "1.6.6" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.6.tgz#0ee6300cc058db6b3551fa1c4bf73b83bb771312" integrity sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ== loglevelnext@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/loglevelnext/-/loglevelnext-1.0.5.tgz#36fc4f5996d6640f539ff203ba819641680d75a2" integrity sha512-V/73qkPuJmx4BcBF19xPBr+0ZRVBhc4POxvZTZdMeXpJ4NItXSJ/MSwuFT0kQJlCbXvdlZoQQ/418bS1y9Jh6A== dependencies: es6-symbol "^3.1.1" object.assign "^4.1.0" longest@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" integrity sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc= loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0, loud-rejection@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= lru-cache@4.1.x: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" semver "^5.6.0" mamacro@^0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== dependencies: p-defer "^1.0.0" map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= mem@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^2.0.0" p-is-promise "^2.0.0" memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= dependencies: errno "^0.1.3" readable-stream "^2.0.1" memory-fs@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== dependencies: errno "^0.1.3" readable-stream "^2.0.1" meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" brorand "^1.0.1" mime-db@1.42.0, "mime-db@>= 1.40.0 < 2": version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.1.0, mime@^2.3.1, mime@^2.4.4: version "2.4.4" resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== mimic-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== mini-css-extract-plugin@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== dependencies: loader-utils "^1.1.0" normalize-url "^2.0.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@0.0.8: version "0.0.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= minimist@^1.1.3, minimist@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.8.6, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.2.1: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== dependencies: concat-stream "^1.5.0" duplexify "^3.4.2" end-of-stream "^1.1.0" flush-write-stream "^1.0.0" from2 "^2.1.0" parallel-transform "^1.1.0" pump "^3.0.0" pumpify "^1.3.3" stream-each "^1.1.0" through2 "^2.0.0" mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= dependencies: minimist "0.0.8" move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= dependencies: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multicast-dns-service-types@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= multicast-dns@^6.0.1: version "6.2.3" resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== dependencies: dns-packet "^1.3.1" thunky "^1.0.2" nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1: version "2.6.1" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c" integrity sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw== next-tick@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== dependencies: lower-case "^1.1.1" node-forge@0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579" integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ== node-libs-browser@^2.2.0, node-libs-browser@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== dependencies: assert "^1.1.1" browserify-zlib "^0.2.0" buffer "^4.3.0" console-browserify "^1.1.0" constants-browserify "^1.0.0" crypto-browserify "^3.11.0" domain-browser "^1.1.1" events "^3.0.0" https-browserify "^1.0.0" os-browserify "^0.3.0" path-browserify "0.0.1" process "^0.11.10" punycode "^1.2.4" querystring-es3 "^0.2.0" readable-stream "^2.3.3" stream-browserify "^2.0.1" stream-http "^2.7.2" string_decoder "^1.0.0" timers-browserify "^2.0.4" tty-browserify "0.0.0" url "^0.11.0" util "^0.11.0" vm-browserify "^1.0.1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.40: version "1.1.41" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.41.tgz#57674a82a37f812d18e3b26118aefaf53a00afed" integrity sha512-+IctMa7wIs8Cfsa8iYzeaLTFwv5Y4r5jZud+4AnfymzeEXKBCavFX0KBgzVaPVqf0ywa6PrO8/b+bPqdwjGBSg== dependencies: semver "^6.3.0" nopt@3.x: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= normalize-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== dependencies: prepend-http "^2.0.0" query-string "^5.0.1" sort-keys "^2.0.0" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.6" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4" integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" nth-check@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== dependencies: boolbase "~1.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= null-loader@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-1.0.0.tgz#90e85798e50e9dd1d568495a44e74829dec26744" integrity sha512-mYLDjDVTkjTlFoidxRhzO75rdcwfVXfw5G5zpj8sXnBkHtKJxMk4hTcRR4i5SOhDB6EvcQuYriy6IV23eq6uog== dependencies: loader-utils "^1.2.3" schema-utils "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-inspect@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-is@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.0.1.tgz#0aa60ec9989a0b3ed795cf4d06f62cf1ad6539b6" integrity sha1-CqYOyZiaCz7Xlc9NBvYs8a1lObY= object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@1.x, once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== dependencies: is-wsl "^1.1.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.6" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" word-wrap "~1.2.3" original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== dependencies: url-parse "^1.4.3" os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^3.0.0, os-locale@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== dependencies: execa "^1.0.0" lcid "^2.0.0" mem "^4.0.0" os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== p-limit@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== dependencies: retry "^0.12.0" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~1.0.5: version "1.0.10" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== parallel-transform@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== dependencies: cyclist "^1.0.1" inherits "^2.0.3" readable-stream "^2.1.5" param-case@2.1.x: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= dependencies: no-case "^2.2.0" parse-asn1@^5.0.0: version "5.1.5" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-key@^2.0.0, path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= path-parse@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" pbkdf2@^3.0.3: version "3.0.17" resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" ripemd160 "^2.0.1" safe-buffer "^5.0.1" sha.js "^2.4.8" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= picomatch@^2.0.4: version "2.1.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.1.1.tgz#ecdfbea7704adb5fe6fb47f9866c4c0e15e905c5" integrity sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA== pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= pkg-dir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" portfinder@^1.0.25: version "1.0.25" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca" integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg== dependencies: async "^2.6.2" debug "^3.1.1" mkdirp "^0.5.1" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= postcss-load-config@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.0.tgz#c84d692b7bb7b41ddced94ee62e8ab31b417b003" integrity sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q== dependencies: cosmiconfig "^5.0.0" import-cwd "^2.0.0" postcss-loader@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== dependencies: loader-utils "^1.1.0" postcss "^7.0.0" postcss-load-config "^2.0.0" schema-utils "^1.0.0" postcss-modules-extract-imports@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== dependencies: postcss "^7.0.5" postcss-modules-local-by-default@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.6.tgz#dd9953f6dd476b5fd1ef2d8830c8929760b56e63" integrity sha512-oLUV5YNkeIBa0yQl7EYnxMgy4N6noxmiwZStaEJUSe2xPMcdNc8WmBQuQCx18H5psYbVxz8zoHk0RAAYZXP9gA== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-value-parser "^3.3.1" postcss-modules-scope@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.0.tgz#ad3f5bf7856114f6fcab901b0502e2a2bc39d4eb" integrity sha512-91Rjps0JnmtUB0cujlc8KIKCsJXWjzuxGeT/+Q2i2HXKZ7nBUeF9YQTZZTNvHVoNYj1AthsjnGLtqDUE0Op79A== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-modules-values@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-2.0.0.tgz#479b46dc0c5ca3dc7fa5270851836b9ec7152f64" integrity sha512-Ki7JZa7ff1N3EIMlPnGTZfUMe69FFwiQPnVSXC9mnn3jozCRBYIxiZd44yJOV2AmabOo4qFf8s0dC/+lweG7+w== dependencies: icss-replace-symbols "^1.1.0" postcss "^7.0.6" postcss-selector-parser@^6.0.0: version "6.0.2" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== dependencies: cssesc "^3.0.0" indexes-of "^1.0.1" uniq "^1.0.1" postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss-value-parser@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.23, postcss@^7.0.5, postcss@^7.0.6: version "7.0.23" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.23.tgz#9f9759fad661b15964f3cfc3140f66f1e05eadc1" integrity sha512-hOlMf3ouRIFXD+j2VJecwssTwbvsPGJVMzupptg+85WA+i7MwyrydmQAgY3R+m0Bc0exunhbJmijy8u8+vufuQ== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= pretty-error@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= dependencies: renderkid "^2.0.1" utila "~0.4" private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= promise@^7.0.1, promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" proxy-addr@~2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.9.0" prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.24: version "1.4.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" create-hash "^1.1.0" parse-asn1 "^5.0.0" randombytes "^2.0.1" safe-buffer "^5.1.2" pug-attrs@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-2.0.4.tgz#b2f44c439e4eb4ad5d4ef25cac20d18ad28cc336" integrity sha512-TaZ4Z2TWUPDJcV3wjU3RtUXMrd3kM4Wzjbe3EWnSsZPsJ3LDI0F3yCnf2/W7PPFF+edUFQ0HgDL1IoxSz5K8EQ== dependencies: constantinople "^3.0.1" js-stringify "^1.0.1" pug-runtime "^2.0.5" pug-code-gen@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-2.0.2.tgz#ad0967162aea077dcf787838d94ed14acb0217c2" integrity sha512-kROFWv/AHx/9CRgoGJeRSm+4mLWchbgpRzTEn8XCiwwOy6Vh0gAClS8Vh5TEJ9DBjaP8wCjS3J6HKsEsYdvaCw== dependencies: constantinople "^3.1.2" doctypes "^1.1.0" js-stringify "^1.0.1" pug-attrs "^2.0.4" pug-error "^1.3.3" pug-runtime "^2.0.5" void-elements "^2.0.1" with "^5.0.0" pug-error@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-1.3.3.tgz#f342fb008752d58034c185de03602dd9ffe15fa6" integrity sha512-qE3YhESP2mRAWMFJgKdtT5D7ckThRScXRwkfo+Erqga7dyJdY3ZquspprMCj/9sJ2ijm5hXFWQE/A3l4poMWiQ== pug-filters@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-3.1.1.tgz#ab2cc82db9eeccf578bda89130e252a0db026aa7" integrity sha512-lFfjNyGEyVWC4BwX0WyvkoWLapI5xHSM3xZJFUhx4JM4XyyRdO8Aucc6pCygnqV2uSgJFaJWW3Ft1wCWSoQkQg== dependencies: clean-css "^4.1.11" constantinople "^3.0.1" jstransformer "1.0.0" pug-error "^1.3.3" pug-walk "^1.1.8" resolve "^1.1.6" uglify-js "^2.6.1" pug-lexer@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-4.1.0.tgz#531cde48c7c0b1fcbbc2b85485c8665e31489cfd" integrity sha512-i55yzEBtjm0mlplW4LoANq7k3S8gDdfC6+LThGEvsK4FuobcKfDAwt6V4jKPH9RtiE3a2Akfg5UpafZ1OksaPA== dependencies: character-parser "^2.1.1" is-expression "^3.0.0" pug-error "^1.3.3" pug-linker@^3.0.6: version "3.0.6" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-3.0.6.tgz#f5bf218b0efd65ce6670f7afc51658d0f82989fb" integrity sha512-bagfuHttfQOpANGy1Y6NJ+0mNb7dD2MswFG2ZKj22s8g0wVsojpRlqveEQHmgXXcfROB2RT6oqbPYr9EN2ZWzg== dependencies: pug-error "^1.3.3" pug-walk "^1.1.8" pug-load@^2.0.12: version "2.0.12" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-2.0.12.tgz#d38c85eb85f6e2f704dea14dcca94144d35d3e7b" integrity sha512-UqpgGpyyXRYgJs/X60sE6SIf8UBsmcHYKNaOccyVLEuT6OPBIMo6xMPhoJnqtB3Q3BbO4Z3Bjz5qDsUWh4rXsg== dependencies: object-assign "^4.1.0" pug-walk "^1.1.8" pug-parser@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-5.0.1.tgz#03e7ada48b6840bd3822f867d7d90f842d0ffdc9" integrity sha512-nGHqK+w07p5/PsPIyzkTQfzlYfuqoiGjaoqHv1LjOv2ZLXmGX1O+4Vcvps+P4LhxZ3drYSljjq4b+Naid126wA== dependencies: pug-error "^1.3.3" token-stream "0.0.1" pug-runtime@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-2.0.5.tgz#6da7976c36bf22f68e733c359240d8ae7a32953a" integrity sha512-P+rXKn9un4fQY77wtpcuFyvFaBww7/91f3jHa154qU26qFAnOe6SW1CbIDcxiG5lLK9HazYrMCCuDvNgDQNptw== pug-strip-comments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-1.0.4.tgz#cc1b6de1f6e8f5931cf02ec66cdffd3f50eaf8a8" integrity sha512-i5j/9CS4yFhSxHp5iKPHwigaig/VV9g+FgReLJWWHEHbvKsbqL0oP/K5ubuLco6Wu3Kan5p7u7qk8A4oLLh6vw== dependencies: pug-error "^1.3.3" pug-walk@^1.1.8: version "1.1.8" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-1.1.8.tgz#b408f67f27912f8c21da2f45b7230c4bd2a5ea7a" integrity sha512-GMu3M5nUL3fju4/egXwZO0XLi6fW/K3T3VTgFQ14GxNi8btlxgT5qZL//JwZFm/2Fa64J/PNS8AZeys3wiMkVA== pug@^2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/pug/-/pug-2.0.4.tgz#ee7682ec0a60494b38d48a88f05f3b0ac931377d" integrity sha512-XhoaDlvi6NIzL49nu094R2NA6P37ijtgMDuWE+ofekDChvfKnzFal60bhSdiy8y2PBO6fmz3oMEIcfpBVRUdvw== dependencies: pug-code-gen "^2.0.2" pug-filters "^3.1.1" pug-lexer "^4.1.0" pug-linker "^3.0.6" pug-load "^2.0.12" pug-parser "^5.0.1" pug-runtime "^2.0.5" pug-strip-comments "^1.0.4" pump@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pumpify@^1.3.3: version "1.5.1" resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" inherits "^2.0.3" pump "^2.0.0" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qjobs@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== query-string@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== dependencies: decode-uri-component "^0.2.0" object-assign "^4.1.0" strict-uri-encode "^1.0.0" querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= querystringify@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.1.1.tgz#60e5a5fd64a7f8bfa4d2ab2ed6fdf4c85bad154e" integrity sha512-w7fLxIRCRT7U8Qu53jQnJyPkYZIaR4n5151KMfcJlO/A9397Wxb1amJvROTK6TOnp7PfoAmg/qXiNHI+08jRfA== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" safe-buffer "^5.1.0" range-parser@^1.0.3, range-parser@^1.2.0, range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-loader@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-2.0.0.tgz#e2813d9e1e3f80d1bbade5ad082e809679e20c26" integrity sha512-kZnO5MoIyrojfrPWqrhFNLZemIAX8edMOCp++yC5RKxzFB3m92DqKNhKlU6+FvpOhWtvyh3jOaD7J6/9tpdIKg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" readdirp@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839" integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ== dependencies: picomatch "^2.0.4" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.3" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" integrity sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw== regenerator-transform@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== dependencies: private "^0.1.6" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexp.prototype.flags@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.2.0.tgz#6b30724e306a27833eeb171b66ac8890ba37e41c" integrity sha512-ztaw4M1VqgMwl9HlPpOuiYgItcHlunW0He2fE6eNfT6E/CF2FtYi9ofOYe4mKntstYk0Fyh/rDRBdS3AnxjlrA== dependencies: define-properties "^1.1.2" regexpu-core@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" regjsgen@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c" integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" relateurl@0.2.x: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= renderkid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== dependencies: css-select "^1.1.0" dom-converter "^0.2" htmlparser2 "^3.3.0" strip-ansi "^3.0.0" utila "^0.4.0" repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.5.2, repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" request@^2.83.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= dependencies: resolve-from "^3.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" integrity sha1-six699nWiBvItuZTM17rywoYh0g= resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.1.6, resolve@^1.10.0, resolve@^1.3.2, resolve@^1.8.1: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= rfdc@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.1.4.tgz#ba72cc1367a0ccd9cf81a870b3b58bd3ad07f8c2" integrity sha512-5C9HXdzK8EAqN7JDif30jqsBzavB7wLpaubisuQIGHWf2gUXSpzy6ArX/+Da8RjFpagWsCn+pIgxTMAmKw9Zug== right-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" integrity sha1-YTObci/mo1FWiSENJOFMlhSGE+8= dependencies: align-text "^0.1.1" rimraf@^2.5.4, rimraf@^2.6.0, rimraf@^2.6.1, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" inherits "^2.0.1" run-queue@^1.0.0, run-queue@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= dependencies: aproba "^1.1.1" safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" integrity sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8= dependencies: ajv "^5.0.0" schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== dependencies: ajv "^6.1.0" ajv-errors "^1.0.0" ajv-keywords "^3.1.0" select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.7: version "1.10.7" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b" integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA== dependencies: node-forge "0.9.0" "semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== dependencies: debug "2.6.9" depd "~1.1.2" destroy "~1.0.4" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" http-errors "~1.7.2" mime "1.6.0" ms "2.1.1" on-finished "~2.3.0" range-parser "~1.2.1" statuses "~1.5.0" serialize-javascript@^1.7.0: version "1.9.1" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.9.1.tgz#cfc200aef77b600c47da9bb8149c943e798c2fdb" integrity sha512-0Vb/54WJ6k5v8sSWN09S0ora+Hnr+cX40r9F170nT+mSkaxltoE/7R3OrIdBSUv1OoiobH1QoWQbCnAO+e8J1A== serve-index@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= dependencies: accepts "~1.3.4" batch "0.6.1" debug "2.6.9" escape-html "~1.0.3" http-errors "~1.6.2" mime-types "~2.1.17" parseurl "~1.3.2" serve-static@1.14.1: version "1.14.1" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.17.1" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setimmediate@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" socket.io-adapter@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz#2a805e8a14d6372124dd9159ad4502f8cb07f06b" integrity sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs= socket.io-client@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.1.1.tgz#dcb38103436ab4578ddb026638ae2f21b623671f" integrity sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ== dependencies: backo2 "1.0.2" base64-arraybuffer "0.1.5" component-bind "1.0.0" component-emitter "1.2.1" debug "~3.1.0" engine.io-client "~3.2.0" has-binary2 "~1.0.2" has-cors "1.1.0" indexof "0.0.1" object-component "0.0.3" parseqs "0.0.5" parseuri "0.0.5" socket.io-parser "~3.2.0" to-array "0.1.4" socket.io-parser@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.2.0.tgz#e7c6228b6aa1f814e6148aea325b51aa9499e077" integrity sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA== dependencies: component-emitter "1.2.1" debug "~3.1.0" isarray "2.0.1" socket.io@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.1.1.tgz#a069c5feabee3e6b214a75b40ce0652e1cfb9980" integrity sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA== dependencies: debug "~3.1.0" engine.io "~3.2.0" has-binary2 "~1.0.2" socket.io-adapter "~1.1.0" socket.io-client "2.1.1" socket.io-parser "~3.2.0" sockjs-client@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.4.0.tgz#c9f2568e19c8fd8173b4997ea3420e0bb306c7d5" integrity sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g== dependencies: debug "^3.2.5" eventsource "^1.0.7" faye-websocket "~0.11.1" inherits "^2.0.3" json3 "^3.3.2" url-parse "^1.4.3" sockjs@0.3.19: version "0.3.19" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== dependencies: faye-websocket "^0.10.0" uuid "^3.0.1" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= dependencies: is-plain-obj "^1.0.0" source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.12: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.6, source-map@^0.5.7, source-map@~0.5.1: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== spdx-expression-parse@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.5" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" hpack.js "^2.1.6" obuf "^1.1.2" readable-stream "^3.0.6" wbuf "^1.7.3" spdy@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.1.tgz#6f12ed1c5db7ea4f24ebb8b89ba58c87c08257f2" integrity sha512-HeZS3PBdMA+sZSu0qwpCxl3DeALD5ASx8pAX0jZdKXSpPWbQ6SYGnlg3BBmYLx5LtiZrmkAZfErCm2oECBcioA== dependencies: debug "^4.1.0" handle-thing "^2.0.0" http-deceiver "^1.2.7" select-hose "^2.0.0" spdy-transport "^3.0.0" split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" ssri@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.1.tgz#2a3c41b28dd45b62b63676ecb74001265ae9edd8" integrity sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA== dependencies: figgy-pudding "^3.5.1" static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== dependencies: inherits "~2.0.1" readable-stream "^2.0.2" stream-each@^1.1.0: version "1.2.3" resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== dependencies: end-of-stream "^1.1.0" stream-shift "^1.0.0" stream-http@^2.7.2: version "2.8.3" resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= streamroller@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-1.0.6.tgz#8167d8496ed9f19f05ee4b158d9611321b8cacd9" integrity sha512-3QC47Mhv3/aZNFpDDVO44qQb9gwB9QggMEE0sQmkTAwBVYdBRWISdsywlkfm5II1Q5y/pmrHflti/IgmIzdDBg== dependencies: async "^2.6.2" date-format "^2.0.0" debug "^3.2.6" fs-extra "^7.0.1" lodash "^4.17.14" strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== dependencies: emoji-regex "^7.0.1" is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string.prototype.trimright@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= style-loader@^0.23.1: version "0.23.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925" integrity sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" supports-color@6.1.0, supports-color@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== dependencies: has-flag "^3.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" tapable@^1.0.0, tapable@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^4: version "4.4.13" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.13.tgz#43b364bc52888d555298637b10d60790254ab525" integrity sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA== dependencies: chownr "^1.1.1" fs-minipass "^1.2.5" minipass "^2.8.6" minizlib "^1.2.1" mkdirp "^0.5.0" safe-buffer "^5.1.2" yallist "^3.0.3" terser-webpack-plugin@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.1.tgz#61b18e40eaee5be97e771cdbb10ed1280888c2b4" integrity sha512-ZXmmfiwtCLfz8WKZyYUuuHf3dMYEjg8NrjHMb0JqHVHVOSkzp3cW2/XG1fP3tRhqEqSzMwzzRQGtAPbs4Cncxg== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" serialize-javascript "^1.7.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" worker-farm "^1.7.0" terser@^4.1.2: version "4.4.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.4.0.tgz#22c46b4817cf4c9565434bfe6ad47336af259ac3" integrity sha512-oDG16n2WKm27JO8h4y/w3iqBGAOSCtq7k8dRmrn4Wf9NouL0b2WpMHGChFGZq4nFAQy1FsNJrVQHfurXOSTmOA== dependencies: commander "^2.20.0" source-map "~0.6.1" source-map-support "~0.5.12" through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" thunky@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== timers-browserify@^2.0.4: version "2.0.11" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== dependencies: setimmediate "^1.0.4" tmp@0.0.33, tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-0.0.1.tgz#ceeefc717a76c4316f126d0b9dbaa55d7e7df01a" integrity sha1-zu78cXp2xDFvEm0LnbqlXX598Bo= toposort@^1.0.0: version "1.0.7" resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= uglify-js@3.4.x: version "3.4.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== dependencies: commander "~2.19.0" source-map "~0.6.1" uglify-js@^2.6.1: version "2.8.29" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" integrity sha1-KcVzMUgFe7Th913zW3qcty5qWd0= dependencies: source-map "~0.5.1" yargs "~3.10.0" optionalDependencies: uglify-to-browserify "~1.0.0" uglify-js@^3.1.4: version "3.6.9" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.6.9.tgz#85d353edb6ddfb62a9d798f36e91792249320611" integrity sha512-pcnnhaoG6RtrvHJ1dFncAe8Od6Nuy30oaJ82ts6//sGSXOP5UjBMEthiProjXmMNHOfd93sqlkztifFMcb+4yw== dependencies: commander "~2.20.3" source-map "~0.6.1" uglify-to-browserify@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" integrity sha1-bgkk1r2mta/jSeOabWMoUKD4grc= ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^2.0.1" uniq@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== dependencies: unique-slug "^2.0.0" unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== dependencies: imurmurhash "^0.1.4" universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url-join@^2.0.2: version "2.0.5" resolved "https://registry.yarnpkg.com/url-join/-/url-join-2.0.5.tgz#5af22f18c052a000a48d7b82c5e9c2e2feeda728" integrity sha1-WvIvGMBSoACkjXuCxenC4v7tpyg= url-parse@^1.4.3: version "1.4.7" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.7.tgz#a8a83535e8c00a316e403a5db4ac1b9b853ae278" integrity sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== useragent@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== dependencies: inherits "2.0.3" utila@^0.4.0, utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== v8-compile-cache@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vm-browserify@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== void-elements@^2.0.0, void-elements@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= watchpack@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== dependencies: chokidar "^2.0.2" graceful-fs "^4.1.2" neo-async "^2.5.0" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" webpack-cli@^3.3.1: version "3.3.10" resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.10.tgz#17b279267e9b4fb549023fae170da8e6e766da13" integrity sha512-u1dgND9+MXaEt74sJR4PR7qkPxXUSQ0RXYq8x1L6Jg1MYVEmGPrH6Ah6C4arD4r0J1P5HKjRqpab36k0eIzPqg== dependencies: chalk "2.4.2" cross-spawn "6.0.5" enhanced-resolve "4.1.0" findup-sync "3.0.0" global-modules "2.0.0" import-local "2.0.0" interpret "1.2.0" loader-utils "1.2.3" supports-color "6.1.0" v8-compile-cache "2.0.3" yargs "13.2.4" webpack-dev-middleware@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-2.0.6.tgz#a51692801e8310844ef3e3790e1eacfe52326fd4" integrity sha512-tj5LLD9r4tDuRIDa5Mu9lnY2qBBehAITv6A9irqXhw/HQquZgTx3BCd57zYbU2gMDnncA49ufK2qVQSbaKJwOw== dependencies: loud-rejection "^1.6.0" memory-fs "~0.4.1" mime "^2.1.0" path-is-absolute "^1.0.0" range-parser "^1.0.3" url-join "^2.0.2" webpack-log "^1.0.1" webpack-dev-middleware@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3" integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw== dependencies: memory-fs "^0.4.1" mime "^2.4.4" mkdirp "^0.5.1" range-parser "^1.2.1" webpack-log "^2.0.0" webpack-dev-server@^3.3.1: version "3.9.0" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c" integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw== dependencies: ansi-html "0.0.7" bonjour "^3.5.0" chokidar "^2.1.8" compression "^1.7.4" connect-history-api-fallback "^1.6.0" debug "^4.1.1" del "^4.1.1" express "^4.17.1" html-entities "^1.2.1" http-proxy-middleware "0.19.1" import-local "^2.0.0" internal-ip "^4.3.0" ip "^1.1.5" is-absolute-url "^3.0.3" killable "^1.0.1" loglevel "^1.6.4" opn "^5.5.0" p-retry "^3.0.1" portfinder "^1.0.25" schema-utils "^1.0.0" selfsigned "^1.10.7" semver "^6.3.0" serve-index "^1.9.1" sockjs "0.3.19" sockjs-client "1.4.0" spdy "^4.0.1" strip-ansi "^3.0.1" supports-color "^6.1.0" url "^0.11.0" webpack-dev-middleware "^3.7.2" webpack-log "^2.0.0" ws "^6.2.1" yargs "12.0.5" webpack-fix-style-only-entries@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/webpack-fix-style-only-entries/-/webpack-fix-style-only-entries-0.2.2.tgz#60331c608b944ac821a3b6f2ae491a6d79ba40eb" integrity sha512-0wcrLCnISP8htV0NP1mT0e2mHhfjGQdNk82s8BTLVvF7rXuoJuUUzP3aCUXnRqlLgmTBx5WgqPhnczjatl+iSQ== webpack-log@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-1.2.0.tgz#a4b34cda6b22b518dbb0ab32e567962d5c72a43d" integrity sha512-U9AnICnu50HXtiqiDxuli5gLB5PGBo7VvcHx36jRZHwK4vzOYLbImqT4lwWwoMHdQWwEKw736fCHEekokTEKHA== dependencies: chalk "^2.1.0" log-symbols "^2.1.0" loglevelnext "^1.0.1" uuid "^3.1.0" webpack-log@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== dependencies: ansi-colors "^3.0.0" uuid "^3.3.2" webpack-shell-plugin@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/webpack-shell-plugin/-/webpack-shell-plugin-0.5.0.tgz#29b8a1d80ddeae0ddb10e729667f728653c2c742" integrity sha1-Kbih2A3erg3bEOcpZn9yhlPCx0I= webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack@^4.30.0: version "4.41.2" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e" integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/wasm-edit" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" acorn "^6.2.1" ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" enhanced-resolve "^4.1.0" eslint-scope "^4.0.3" json-parse-better-errors "^1.0.2" loader-runner "^2.4.0" loader-utils "^1.2.3" memory-fs "^0.4.1" micromatch "^3.1.10" mkdirp "^0.5.1" neo-async "^2.6.1" node-libs-browser "^2.2.1" schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.1" watchpack "^1.6.0" webpack-sources "^1.4.1" websocket-driver@>=0.5.1: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.3.tgz#5d2ff22977003ec687a4b87073dfbbac146ccf29" integrity sha512-nqHUnMXmBzT0w570r2JpJxfiSD1IzoI+HGVdd3aZ0yNi3ngvQ4jv1dtHt5VGxfI2yj5yqImPhOK4vmIh2xMbGg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" window-size@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" integrity sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0= with@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/with/-/with-5.1.1.tgz#fa4daa92daf32c4ea94ed453c81f04686b575dfe" integrity sha1-+k2qktrzLE6pTtRTyB8EaGtXXf4= dependencies: acorn "^3.1.0" acorn-globals "^3.0.0" word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== wordwrap@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" integrity sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8= wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= worker-farm@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== dependencies: ansi-styles "^3.2.0" string-width "^3.0.0" strip-ansi "^5.0.0" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== dependencies: async-limiter "~1.0.0" ws@~3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== dependencies: async-limiter "~1.0.0" safe-buffer "~5.1.0" ultron "~1.1.0" xmlhttprequest-ssl@~1.5.4: version "1.5.5" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e" integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4= xtend@^4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== "y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2, yallist@^3.0.3: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs-parser@^13.1.0: version "13.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== dependencies: cliui "^4.0.0" decamelize "^1.2.0" find-up "^3.0.0" get-caller-file "^1.0.1" os-locale "^3.0.0" require-directory "^2.1.1" require-main-filename "^1.0.1" set-blocking "^2.0.0" string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" yargs@13.2.4: version "13.2.4" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== dependencies: cliui "^5.0.0" find-up "^3.0.0" get-caller-file "^2.0.1" os-locale "^3.1.0" require-directory "^2.1.1" require-main-filename "^2.0.0" set-blocking "^2.0.0" string-width "^3.0.0" which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^13.1.0" yargs@~3.10.0: version "3.10.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" integrity sha1-9+572FfdfB0tOMDnTvvWgdFDH9E= dependencies: camelcase "^1.0.2" cliui "^2.1.0" decamelize "^1.0.0" window-size "0.1.0" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk= buildbot-3.4.0/www/data_module/000077500000000000000000000000001413250514000164175ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/README.md000066400000000000000000000015401413250514000176760ustar00rootroot00000000000000# Buildbot data module Buildbot data module is an AngularJS module for Buildbot nine clients. ## Installation ``` $ bower install buildbot-data ``` ## Adding dependency to your project ``` angular.module('myModule', ['bbData']); ``` ## Building ``` $ yarn install $ gulp ``` ## Running tests ``` $ yarn install $ karma start ``` ## How to test within buildbot/www/base ? * run `gulp prod` in base (dependencies are installed) * run `gulp prod` in data_module * create symlink from `www/data_module/dist` to `www/base/libs/buildbot-data/dist` * run `gulp dev proxy` in base * run `gulp dev` in data_module ## How to publish the results (for buildbot maintainers) ? ``` $ vi guanlecoja/config.coffee # bump the version manually $ gulp publish ``` This will commit and publish a new tag in the bower repository, with the content of your working directory buildbot-3.4.0/www/data_module/karma.conf.js000066400000000000000000000003641413250514000207770ustar00rootroot00000000000000const common = require('buildbot-build-common'); module.exports = function karmaConfig (config) { common.createTemplateKarmaConfig(config, { testRoot: 'src/tests.webpack.js', webpack: require('./webpack.config') }); }; buildbot-3.4.0/www/data_module/package.json000066400000000000000000000013461413250514000207110ustar00rootroot00000000000000{ "name": "buildbot-data-js", "version": "3.0.1", "description": "Buildbot AngularJS data module", "readme": "README.md", "author": "Andras Toth ", "main": "dist/buildbot-data-js.js", "scripts": { "build": "rimraf dist && webpack --bail --progress --profile --env dev && webpack --bail --progress --profile --env prod", "build-dev": "rimraf dist && webpack --bail --progress --profile --env dev", "test": "karma start", "test-watch": "karma start --auto-watch --no-single-run" }, "devDependencies": { "angular-mocks": "^1.7.9", "buildbot-build-common": "link:../build_common", "rimraf": "^2.6.3" }, "dependencies": { "angular": "^1.7.9" }, "license": "MIT" } buildbot-3.4.0/www/data_module/postcss.config.js000066400000000000000000000001711413250514000217160ustar00rootroot00000000000000module.exports = { plugins: { autoprefixer: { browsers: ['last 2 versions'] }, }, }; buildbot-3.4.0/www/data_module/src/000077500000000000000000000000001413250514000172065ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/src/classes/000077500000000000000000000000001413250514000206435ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/src/classes/base.service.js000066400000000000000000000060161413250514000235550ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Base { constructor(dataService, socketService, dataUtilsService) { let BaseInstance; return (BaseInstance = class BaseInstance { constructor(object, _endpoint, childEndpoints) { this._endpoint = _endpoint; if (childEndpoints == null) { childEndpoints = []; } if (!angular.isString(this._endpoint)) { throw new TypeError(`Parameter 'endpoint' must be a string, not ${typeof this.endpoint}`); } this.$accessor = null; // add object fields to the instance this.update(object); // generate loadXXX functions this.constructor.generateFunctions(childEndpoints); // get the id of the class type const classId = dataUtilsService.classId(this._endpoint); this._id = this[classId]; // reset endpoint to base if (this._id != null) { this._endpoint = dataUtilsService.type(this._endpoint); } } setAccessor(a) { return this.$accessor = a; } update(o) { return angular.extend(this, o); } get(...args) { return dataService.get(this._endpoint, this._id, ...Array.from(args)); } control(method, params) { return dataService.control(this._endpoint, this._id, method, params); } // generate endpoint functions for the class static generateFunctions(endpoints) { return endpoints.forEach(e => { // capitalize endpoint names const E = dataUtilsService.capitalize(e); // adds loadXXX functions to the prototype this.prototype[`load${E}`] = function(...args) { return this[e] = this.get(e, ...Array.from(args)); }; // adds getXXX functions to the prototype return this.prototype[`get${E}`] = function(...args) { let query; [args, query] = Array.from(dataUtilsService.splitOptions(args)); if (this.$accessor) { if (query.subscribe == null) { query.subscribe = true; } query.accessor = this.$accessor; } return this.get(e, ...Array.from(args), query); }; }); } }); } } angular.module('bbData') .factory('Base', ['dataService', 'socketService', 'dataUtilsService', Base]); buildbot-3.4.0/www/data_module/src/classes/base.service.spec.js000066400000000000000000000021561413250514000245070ustar00rootroot00000000000000describe('Base class', function() { let $q, dataService, socketService; beforeEach(angular.mock.module('bbData')); let Base = (dataService = (socketService = ($q = null))); const injected = function($injector) { Base = $injector.get('Base'); dataService = $injector.get('dataService'); socketService = $injector.get('socketService'); $q = $injector.get('$q'); }; beforeEach(inject(injected)); it('should be defined', () => expect(Base).toBeDefined()); it('should merge the passed in object with the instance', function() { const object = {a: 1, b: 2}; const base = new Base(object, 'ab'); expect(base.a).toEqual(object.a); expect(base.b).toEqual(object.b); }); it('should have loadXxx function for child endpoints', function() { const children = ['a', 'bcd', 'ccc']; const base = new Base({}, 'ab', children); expect(angular.isFunction(base.loadA)).toBeTruthy(); expect(angular.isFunction(base.loadBcd)).toBeTruthy(); expect(angular.isFunction(base.loadCcc)).toBeTruthy(); }); }); buildbot-3.4.0/www/data_module/src/classes/build.service.js000066400000000000000000000011701413250514000237360ustar00rootroot00000000000000class Build { constructor(Base, dataService) { let BuildInstance; return (BuildInstance = class BuildInstance extends Base { constructor(object, endpoint) { const endpoints = [ 'changes', // /changes 'properties', // /properties 'steps' // /steps/:name // /steps/:stepid ]; super(object, endpoint, endpoints); } }); } } angular.module('bbData') .factory('Build', ['Base', 'dataService', Build]); buildbot-3.4.0/www/data_module/src/classes/builder.service.js000066400000000000000000000014411413250514000242660ustar00rootroot00000000000000class Builder { constructor(Base, dataService) { let BuilderInstance; return (BuilderInstance = class BuilderInstance extends Base { constructor(object, endpoint) { const endpoints = [ 'builds', // /builds/:buildid 'buildrequests', // /buildrequests/:buildrequestid 'forceschedulers', // /forceschedulers 'workers', // /workers/:workerid // /workers/:name 'masters' // /masters/:masterid ]; super(object, endpoint, endpoints); } }); } } angular.module('bbData') .factory('Builder', ['Base', 'dataService', Builder]); buildbot-3.4.0/www/data_module/src/classes/buildrequest.service.js000066400000000000000000000007661413250514000253610ustar00rootroot00000000000000class Buildrequest { constructor(Base, dataService) { let BuildrequestInstance; return (BuildrequestInstance = class BuildrequestInstance extends Base { constructor(object, endpoint) { const endpoints = [ 'builds' // /builds ]; super(object, endpoint, endpoints); } }); } } angular.module('bbData') .factory('Buildrequest', ['Base', 'dataService', Buildrequest]); buildbot-3.4.0/www/data_module/src/classes/buildset.service.js000066400000000000000000000007421413250514000244560ustar00rootroot00000000000000class Buildset { constructor(Base, dataService) { let BuildsetInstance; return (BuildsetInstance = class BuildsetInstance extends Base { constructor(object, endpoint) { const endpoints = [ 'properties' // /properties ]; super(object, endpoint, endpoints); } }); } } angular.module('bbData') .factory('Buildset', ['Base', 'dataService', Buildset]); buildbot-3.4.0/www/data_module/src/classes/change.service.js000066400000000000000000000024431413250514000240700ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Change { constructor(Base, dataService, dataUtilsService) { let ChangeInstance; return (ChangeInstance = class ChangeInstance extends Base { constructor(object, endpoint) { super(object, endpoint); let { author } = this; if ((this.author == null)) { author = "unknown"; } const email = dataUtilsService.emailInString(author); // Remove email from author string if (email) { if (author.split(' ').length > 1) { this.author_name = author.replace(new RegExp(`\\s<${email}>`), ''); this.author_email = email; } else { this.author_name = email.split("@")[0]; this.author_email = email; } } else { this.author_name = author; } } }); } } angular.module('bbData') .factory('Change', ['Base', 'dataService', 'dataUtilsService', Change]); buildbot-3.4.0/www/data_module/src/classes/changes.spec.js000066400000000000000000000013431413250514000235430ustar00rootroot00000000000000describe('change class', function() { beforeEach(angular.mock.module('bbData')); it('should calculate authors emails', inject(function(Change) { const changes = [ new Change({author: "foo "}, "changes") , new Change({author: "foo@foo.com"}, "changes") , new Change({author: "foo"}, "changes") ]; expect(changes[0].author_email).toBe("bar@foo.com"); expect(changes[1].author_email).toBe("foo@foo.com"); expect(changes[2].author_email).toBeUndefined(); expect(changes[0].author_name).toBe("foo"); expect(changes[1].author_name).toBe("foo"); expect(changes[2].author_name).toBe("foo"); }) ); }); buildbot-3.4.0/www/data_module/src/classes/changesource.service.js000066400000000000000000000006001413250514000253020ustar00rootroot00000000000000class Changesource { constructor(dataService, Base) { let ChangesourceInstance; return (ChangesourceInstance = class ChangesourceInstance extends Base { constructor(object, endpoint) { super(object, endpoint); } }); } } angular.module('bbData') .factory('Changesource', ['dataService', 'Base', Changesource]); buildbot-3.4.0/www/data_module/src/classes/forcescheduler.service.js000066400000000000000000000006141413250514000256360ustar00rootroot00000000000000class Forcescheduler { constructor(Base, dataService) { let ForceschedulerInstance; return (ForceschedulerInstance = class ForceschedulerInstance extends Base { constructor(object, endpoint) { super(object, endpoint); } }); } } angular.module('bbData') .factory('Forcescheduler', ['Base', 'dataService', Forcescheduler]); buildbot-3.4.0/www/data_module/src/classes/logs.service.js000066400000000000000000000007441413250514000236110ustar00rootroot00000000000000class Log { constructor(Base, dataService) { let BuildInstance; return (BuildInstance = class BuildInstance extends Base { constructor(object, endpoint) { const endpoints = [ 'chunks', // /chunks 'contents' ]; super(object, endpoint, endpoints); } }); } } angular.module('bbData') .factory('Log', ['Base', 'dataService', Log]); buildbot-3.4.0/www/data_module/src/classes/master.service.js000066400000000000000000000013501413250514000241320ustar00rootroot00000000000000class Master { constructor(Base, dataService) { let MasterInstance; return (MasterInstance = class MasterInstance extends Base { constructor(object, endpoint) { const endpoints = [ 'builders', // /builders/:builderid 'workers', // /workers/:workerid // /workers/:name 'changesources', // /changesources/:changesourceid 'schedulers' // /schedulers/:schedulerid ]; super(object, endpoint, endpoints); } }); } } angular.module('bbData') .factory('Master', ['Base', 'dataService', Master]); buildbot-3.4.0/www/data_module/src/classes/properties.service.js000066400000000000000000000006451413250514000250410ustar00rootroot00000000000000// damn grammar. I claim that properties singular is propertie class Propertie { constructor(Base, dataService) { let BuildInstance; return (BuildInstance = class BuildInstance extends Base { constructor(object, endpoint) { super(object, endpoint, []); } }); } } angular.module('bbData') .factory('Propertie', ['Base', 'dataService', Propertie]); buildbot-3.4.0/www/data_module/src/classes/scheduler.service.js000066400000000000000000000005561413250514000246240ustar00rootroot00000000000000class Scheduler { constructor(Base, dataService) { let SchedulerInstance; return (SchedulerInstance = class SchedulerInstance extends Base { constructor(object, endpoint) { super(object, endpoint); } }); } } angular.module('bbData') .factory('Scheduler', ['Base', 'dataService', Scheduler]); buildbot-3.4.0/www/data_module/src/classes/sourcestamp.service.js000066400000000000000000000007611413250514000252110ustar00rootroot00000000000000class Sourcestamp { constructor(Base, dataService) { let SourcestampInstance; return (SourcestampInstance = class SourcestampInstance extends Base { constructor(object, endpoint) { const endpoints = [ 'changes' // /changes ]; super(object, endpoint, endpoints); } }); } } angular.module('bbData') .factory('Sourcestamp', ['Base', 'dataService', Sourcestamp]); buildbot-3.4.0/www/data_module/src/classes/step.service.js000066400000000000000000000007041413250514000236140ustar00rootroot00000000000000class Step { constructor(Base, dataService) { let BuildInstance; return (BuildInstance = class BuildInstance extends Base { constructor(object, endpoint) { const endpoints = [ 'logs' // /logs ]; super(object, endpoint, endpoints); } }); } } angular.module('bbData') .factory('Step', ['Base', 'dataService', Step]); buildbot-3.4.0/www/data_module/src/classes/worker.service.js000066400000000000000000000005341413250514000241530ustar00rootroot00000000000000class Worker { constructor(Base, dataService) { let WorkerInstance; return (WorkerInstance = class WorkerInstance extends Base { constructor(object, endpoint) { super(object, endpoint); } }); } } angular.module('bbData') .factory('Worker', ['Base', 'dataService', Worker]); buildbot-3.4.0/www/data_module/src/data.constant.js000066400000000000000000000010341413250514000223030ustar00rootroot00000000000000class Api { constructor() { return new String('api/v2/'); } } class Endpoints { constructor() { // Rootlinks return [ 'builders', 'builds', 'buildrequests', 'workers', 'buildsets', 'changes', 'changesources', 'masters', 'sourcestamps', 'schedulers', 'forceschedulers' ]; } } angular.module('bbData') .constant('API', new Api()) .constant('ENDPOINTS', new Endpoints()); buildbot-3.4.0/www/data_module/src/data.module.js000066400000000000000000000022071413250514000217420ustar00rootroot00000000000000angular.module('bbData', []); require('./classes/base.service.js'); require('./classes/builder.service.js'); require('./classes/buildrequest.service.js'); require('./classes/build.service.js'); require('./classes/buildset.service.js'); require('./classes/change.service.js'); require('./classes/changesource.service.js'); require('./classes/forcescheduler.service.js'); require('./classes/logs.service.js'); require('./classes/master.service.js'); require('./classes/properties.service.js'); require('./classes/scheduler.service.js'); require('./classes/sourcestamp.service.js'); require('./classes/step.service.js'); require('./classes/worker.service.js'); require('./data.constant.js'); require('./services/data/collection/collection.service.js'); require('./services/data/collection/dataquery.service.js'); require('./services/data/data.service.js'); require('./services/dataUtils/dataUtils.service.js'); require('./services/rest/rest.service.js'); require('./services/socket/socket.service.js'); require('./services/socket/webSocketBackend.service.js'); require('./services/socket/websocket.service.js'); require('./services/stream/stream.service.js'); buildbot-3.4.0/www/data_module/src/services/000077500000000000000000000000001413250514000210315ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/src/services/data/000077500000000000000000000000001413250514000217425ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/src/services/data/collection/000077500000000000000000000000001413250514000240755ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/src/services/data/collection/collection.service.js000066400000000000000000000200501413250514000302220ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Collection { constructor($q, $injector, $log, dataUtilsService, socketService, DataQuery, $timeout) { let CollectionInstance; angular.isArray = (Array.isArray = arg => arg instanceof Array); CollectionInstance = class CollectionInstance extends Array { constructor(restPath, query, accessor) { // this contructor is used to construct completely new instances only. // We override constructor property for existing instances so that // Array.prototype.filter() passes the restPath, query and accessor properties // to the new instance. super(); this.constructorImpl(restPath, query, accessor); } constructorImpl(restPath, query, accessor) { let className; this.listener = this.listener.bind(this); this.restPath = restPath; if (query == null) { query = {}; } this.query = query; this.accessor = accessor; this.socketPath = dataUtilsService.socketPath(this.restPath); this.type = dataUtilsService.type(this.restPath); this.id = dataUtilsService.classId(this.restPath); this.endpoint = dataUtilsService.endpointPath(this.restPath); this.socketPathRE = dataUtilsService.socketPathRE(this.socketPath); this.queryExecutor = new DataQuery(this.query); // default event handlers this.onUpdate = angular.noop; this.onNew = angular.noop; this.onChange = angular.noop; this._new = []; this._updated = []; this._byId = {}; this.$resolved = false; try { // try to get the wrapper class className = dataUtilsService.className(this.restPath); // the classes have the dataService as a dependency // $injector.get doesn't throw circular dependency exception this.WrapperClass = $injector.get(className); } catch (e) { // use the Base class otherwise console.log("unknown wrapper for", className); this.WrapperClass = $injector.get('Base'); } socketService.eventStream.subscribe(this.listener); if (this.accessor != null) { this.accessor.registerCollection(this); } } then(callback) { console.log("Should not use collection as a promise. Callback will be called several times!"); this.onChange = callback; } getArray() { console.log("getArray() is deprecated. dataService.get() directly returns the collection!"); return this; } get(id) { return this._byId[id]; } hasOwnProperty(id) { return this._byId.hasOwnProperty(id); } listener(data) { const key = data.k; const message = data.m; // Test if the message is for me if (this.socketPathRE.test(key)) { this.put(message); this.recomputeQuery(); return this.sendEvents(); } } subscribe() { return socketService.subscribe(this.socketPath, this); } close() { return socketService.unsubscribe(this.socketPath, this); } initial(data) { this.$resolved = true; // put items one by one if not already in the array // if they are that means they come from an update event // the event is always considered the latest data // so we don't overwrite it with REST data for (let i of Array.from(data)) { if (!this.hasOwnProperty(i[this.id])) { this.put(i); } } this.recomputeQuery(); return this.sendEvents({initial:true}); } from(data) { // put items one by one for (let i of Array.from(data)) { this.put(i); } this.recomputeQuery(); return this.sendEvents(); } item(i) { return this[i]; } add(element) { // don't create wrapper if element is filtered if (this.queryExecutor.filter([element]).length === 0) { return; } const instance = new this.WrapperClass(element, this.endpoint); instance.setAccessor(this.accessor); instance.$collection = this; this._new.push(instance); this._byId[instance[this.id]] = instance; return this.push(instance); } put(element) { for (let old of Array.from(this)) { if (old[this.id] === element[this.id]) { old.update(element); this._updated.push(old); return; } } // if not found, add it. return this.add(element); } clear() { while (this.length > 0) { this.pop(); } } delete(element) { const index = this.indexOf(element); if (index > -1) { return this.splice(index, 1); } } recomputeQuery() { return this.queryExecutor.computeQuery(this); } sendEvents(opts){ // send the events asynchronously const { _new } = this; const { _updated } = this; this._updated = []; this._new = []; return $timeout(() => { let i; let changed = false; for (i of Array.from(_new)) { // is it still in the array? if (Array.from(this).includes(i)) { this.onNew(i); changed = true; } } for (i of Array.from(_updated)) { // is it still in the array? if (Array.from(this).includes(i)) { this.onUpdate(i); changed = true; } } if (changed || (opts != null ? opts.initial : undefined)) { this.onChange(this); } } , 0); } }; // see explanation in CollectionInstance.constructor() above Object.defineProperty(CollectionInstance.prototype, 'constructor', { get: function() { let copyFrom = this; return function(length) { return copyFrom.constructorImpl(copyFrom.restPath, copyFrom.query, copyfrom.accessor); }; } }); return CollectionInstance; } } angular.module('bbData') .factory('Collection', ['$q', '$injector', '$log', 'dataUtilsService', 'socketService', 'DataQuery', '$timeout', Collection]); buildbot-3.4.0/www/data_module/src/services/data/collection/collection.service.spec.js000066400000000000000000000114001413250514000311520ustar00rootroot00000000000000describe('Collection', function() { let $filter, $q, $rootScope, $timeout, c, indexedDBService, tabexService; beforeEach(angular.mock.module('bbData')); let Collection = ($q = ($rootScope = (tabexService = (indexedDBService = (c = ($timeout = ($filter = undefined))))))); const injected = function($injector) { $q = $injector.get('$q'); $rootScope = $injector.get('$rootScope'); Collection = $injector.get('Collection'); $timeout = $injector.get('$timeout'); $filter = $injector.get('$filter'); }; beforeEach(inject(injected)); describe("simple collection", function() { beforeEach(function() { c = new Collection('builds') }); it('should be defined', function() { expect(Collection).toBeDefined(); expect(c).toBeDefined(); }); it('should be like an array', () => expect(angular.isArray(c)).toBeTruthy()); it('should be filterable with angular.filter', function() { c.from([ {buildid: 1} , {buildid: 2} ]); const filtered = $filter('filter')(c, {buildid:1}); expect(filtered.length).toBe(1); }); it('empty collection should be filterable with angular.filter', function() { const filtered = $filter('filter')(c, {buildid:1}); expect(filtered.length).toBe(0); }); it('should have a put function, which does not add twice for the same id', function() { c.put({buildid: 1}); expect(c.length).toEqual(1); c.put({buildid: 1}); expect(c.length).toEqual(1); c.put({buildid: 2}); expect(c.length).toEqual(2); }); it('should have a from function, which iteratively inserts data', function() { c.from([ {buildid: 1} , {buildid: 2} , {buildid: 2} ]); expect(c.length).toEqual(2); }); it("should order the updates correctly", function() { c.listener({k: "builds/1/update", m: {buildid: 1, value:1}}); c.initial([{ buildid: 1, value: 0 } ]); expect(c[0].value).toEqual(1); c.listener({k: "builds/1/update", m: {buildid: 1, value:2}}); expect(c[0].value).toEqual(2); }); }); describe("queried collection", function() { beforeEach(function() { c = new Collection('builds', {order:'-buildid', limit:2}) } ); it('should have a from function, which iteratively inserts data', function() { c.from([ {buildid: 1} , {buildid: 2} , {buildid: 2} ]); expect(c.length).toEqual(2); c.from([ {buildid: 3} , {buildid: 4} , {buildid: 5} ]); expect(c.length).toEqual(2); expect(c[0].buildid).toEqual(5); expect(c[1].buildid).toEqual(4); }); it('should call the event handlers', function() { spyOn(c, 'onNew'); spyOn(c, 'onChange'); spyOn(c, 'onUpdate'); c.from([ {buildid: 1} , {buildid: 2} , {buildid: 2} ]); $timeout.flush(); expect(c.onNew.calls.count()).toEqual(2); expect(c.onUpdate.calls.count()).toEqual(1); expect(c.onChange.calls.count()).toEqual(1); c.onNew.calls.reset(); c.onUpdate.calls.reset(); c.onChange.calls.reset(); c.from([ {buildid: 3} , {buildid: 4} , {buildid: 5} ]); $timeout.flush(); expect(c.onNew.calls.count()).toEqual(2); expect(c.onUpdate.calls.count()).toEqual(0); expect(c.onChange.calls.count()).toEqual(1); }); }); describe("singleid collection", function() { beforeEach(function() { c = new Collection('builds/1'); } ); it("should manage the updates correctly", function() { c.listener({k: "builds/1/update", m: {buildid: 1, value:1}}); c.listener({k: "builds/2/update", m: {buildid: 2, value:2}}); c.initial([{ buildid: 1, value: 0 } ]); expect(c.length).toEqual(1); expect(c[0].value).toEqual(1); c.listener({k: "builds/1/update", m: {buildid: 1, value:2}}); expect(c[0].value).toEqual(2); }); }); }); buildbot-3.4.0/www/data_module/src/services/data/collection/dataquery.service.js000066400000000000000000000122101413250514000300650ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS203: Remove `|| {}` from converted for-own loops * DS205: Consider reworking code to avoid use of IIFEs * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class DataQuery { constructor($http, $q, API) { let DataQueryClass; return (DataQueryClass = class DataQueryClass { constructor(query) { if (query == null) { query = {}; } this.query = query; this.filters = {}; for (let fieldAndOperator in query) { let value = query[fieldAndOperator]; if (['field', 'limit', 'offset', 'order', 'property'].indexOf(fieldAndOperator) < 0) { if (['on', 'true', 'yes'].indexOf(value) > -1) { value = true; } else if (['off', 'false', 'no'].indexOf(value) > -1) { value = false; } this.filters[fieldAndOperator] = value; } } } computeQuery(array) { // 1. filtering this.filter(array); // 2. sorting const order = this.query != null ? this.query.order : undefined; this.sort(array, order); // 3. limit const limit = this.query != null ? this.query.limit : undefined; return this.limit(array, limit); } isFiltered(v) { const cmpByOp = {}; for (let fieldAndOperator in this.filters) { const value = this.filters[fieldAndOperator]; const [field, operator] = Array.from(fieldAndOperator.split('__')); let cmp = false; switch (operator) { case 'ne': cmp = v[field] !== value; break; case 'lt': cmp = v[field] < value; break; case 'le': cmp = v[field] <= value; break; case 'gt': cmp = v[field] > value; break; case 'ge': cmp = v[field] >= value; break; default: cmp = (v[field] === value) || (angular.isArray(v[field]) && Array.from(v[field]).includes(value)) || (angular.isArray(value) && (value.length === 0)) || (angular.isArray(value) && Array.from(value).includes(v[field])) || // private fields added by the data service (v[`_${field}`] === value) || (angular.isArray(v[`_${field}`]) && Array.from(v[`_${field}`]).includes(value)) || (angular.isArray(value) && Array.from(value).includes(v[`_${field}`])); } cmpByOp[fieldAndOperator] = cmpByOp[fieldAndOperator] || cmp; } for (let op of Object.keys(cmpByOp || {})) { v = cmpByOp[op]; if (!v) { return false; } } return true; } filter(array) { let i = 0; return (() => { const result = []; while (i < array.length) { const v = array[i]; if (this.isFiltered(v)) { result.push(i += 1); } else { result.push(array.splice(i, 1)); } } return result; })(); } sort(array, order) { const compare = function(property) { let reverse = false; if (property[0] === '-') { property = property.slice(1); reverse = true; } return function(a, b) { if (reverse) { [a, b] = Array.from([b, a]); } if (a[property] < b[property]) { return -1; } else if (a[property] > b[property]) { return 1; } else { return 0; } }; }; if (angular.isString(order)) { return array.sort(compare(order)); } else if (angular.isArray(order)) { return array.sort(function(a, b) { for (let o of Array.from(order)) { const f = compare(o)(a, b); if (f) { return f; } } return 0; }); } } limit(array, limit) { while (array.length > limit) { array.pop(); } } }); } } angular.module('bbData') .factory('DataQuery', ['$http', '$q', 'API', DataQuery]); buildbot-3.4.0/www/data_module/src/services/data/collection/dataquery.service.spec.js000066400000000000000000000166271413250514000310360ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('dataquery service', function() { let $rootScope, testArray, wrappedDataQuery; beforeEach(angular.mock.module('bbData')); let DataQuery = (testArray = ($rootScope = (wrappedDataQuery = undefined))); const injected = function($injector) { DataQuery = $injector.get('DataQuery'); $rootScope = $injector.get('$rootScope'); testArray = [{ builderid: 1, buildid: 3, buildrequestid: 1, complete: false, complete_at: null, started_at: 1417802797 } , { builderid: 2, buildid: 1, buildrequestid: 1, complete: true, complete_at: 1417803429, started_at: 1417803026 } , { builderid: 1, buildid: 2, buildrequestid: 1, complete: true, complete_at: 1417803038, started_at: 1417803025 } ]; class WrappedDataQuery { filter(array, query) { const q = new DataQuery(query); array = angular.copy(array); q.filter(array); return array; } sort(array, order) { const q = new DataQuery({order}); array = angular.copy(array); q.sort(array, order); return array; } limit(array, limit) { const q = new DataQuery({limit}); array = angular.copy(array); q.limit(array, limit); return array; } } wrappedDataQuery = new WrappedDataQuery(); }; beforeEach(inject(injected)); it('should be defined', () => expect(DataQuery).toBeDefined()); describe('filter(array, filters)', function() { it('should filter the array (one filter)', function() { const result = wrappedDataQuery.filter(testArray, {complete: false}); expect(result.length).toBe(1); expect(result).toContain(testArray[0]); }); it('should filter the array (more than one filters)', function() { const result = wrappedDataQuery.filter(testArray, {complete: true, buildrequestid: 1}); expect(result.length).toBe(2); expect(result).toContain(testArray[1]); expect(result).toContain(testArray[2]); }); it('should filter the array (eq - equal)', function() { const result = wrappedDataQuery.filter(testArray, {'complete__eq': true}); expect(result.length).toBe(2); expect(result).toContain(testArray[1]); expect(result).toContain(testArray[2]); }); it('should filter the array (two eq)', function() { const result = wrappedDataQuery.filter(testArray, {'buildid__eq': [1, 2]}); expect(result.length).toBe(2); expect(result).toContain(testArray[1]); expect(result).toContain(testArray[2]); }); it('should treat empty eq criteria as no restriction', function() { const result = wrappedDataQuery.filter(testArray, {'buildid__eq': []}); expect(result.length).toBe(3); }); it('should filter the array (ne - not equal)', function() { const result = wrappedDataQuery.filter(testArray, {'complete__ne': true}); expect(result.length).toBe(1); expect(result).toContain(testArray[0]); }); it('should filter the array (lt - less than)', function() { const result = wrappedDataQuery.filter(testArray, {'buildid__lt': 3}); expect(result.length).toBe(2); expect(result).toContain(testArray[1]); expect(result).toContain(testArray[2]); }); it('should filter the array (le - less than or equal to)', function() { const result = wrappedDataQuery.filter(testArray, {'buildid__le': 3}); expect(result.length).toBe(3); }); it('should filter the array (gt - greater than)', function() { const result = wrappedDataQuery.filter(testArray, {'started_at__gt': 1417803025}); expect(result.length).toBe(1); expect(result).toContain(testArray[1]); }); it('should filter the array (ge - greater than or equal to)', function() { const result = wrappedDataQuery.filter(testArray, {'started_at__ge': 1417803025}); expect(result.length).toBe(2); expect(result).toContain(testArray[1]); expect(result).toContain(testArray[2]); }); it('should convert on/off, true/false, yes/no to boolean', function() { const resultTrue = wrappedDataQuery.filter(testArray, {complete: true}); const resultFalse = wrappedDataQuery.filter(testArray, {complete: false}); let result = wrappedDataQuery.filter(testArray, {complete: 'on'}); expect(result).toEqual(resultTrue); result = wrappedDataQuery.filter(testArray, {complete: 'true'}); expect(result).toEqual(resultTrue); result = wrappedDataQuery.filter(testArray, {complete: 'yes'}); expect(result).toEqual(resultTrue); result = wrappedDataQuery.filter(testArray, {complete: 'off'}); expect(result).toEqual(resultFalse); result = wrappedDataQuery.filter(testArray, {complete: 'false'}); expect(result).toEqual(resultFalse); result = wrappedDataQuery.filter(testArray, {complete: 'no'}); expect(result).toEqual(resultFalse); }); }); describe('sort(array, order)', function() { it('should sort the array (one parameter)', function() { const result = wrappedDataQuery.sort(testArray, 'buildid'); expect(result[0]).toEqual(testArray[1]); expect(result[1]).toEqual(testArray[2]); expect(result[2]).toEqual(testArray[0]); }); it('should sort the array (one parameter, - reverse)', function() { const result = wrappedDataQuery.sort(testArray, '-buildid'); expect(result[0]).toEqual(testArray[0]); expect(result[1]).toEqual(testArray[2]); expect(result[2]).toEqual(testArray[1]); }); it('should sort the array (more parameter)', function() { const result = wrappedDataQuery.sort(testArray, ['builderid', '-buildid']); expect(result[0]).toEqual(testArray[0]); expect(result[1]).toEqual(testArray[2]); expect(result[2]).toEqual(testArray[1]); }); }); describe('limit(array, limit)', function() { it('should slice the array', function() { const result = wrappedDataQuery.limit(testArray, 1); expect(result.length).toBe(1); expect(result[0]).toEqual(testArray[0]); }); it('should return the array when the limit >= array.length', function() { const result = wrappedDataQuery.limit(testArray, 3); expect(result.length).toBe(3); expect(result[2]).toEqual(testArray[2]); }); }); }); buildbot-3.4.0/www/data_module/src/services/data/data.service.js000066400000000000000000000260751413250514000246620ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS206: Consider reworking classes to avoid initClass * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Data { static initClass() { // TODO caching this.prototype.cache = false; } constructor($log, $q, restService, socketService, dataUtilsService, Collection, ENDPOINTS) { let DataService; return new ((DataService = (function() { let self = undefined; DataService = class DataService { static initClass() { self = null; //############# utils for testing // register return values for the mocked get function this.prototype.mocks = {}; this.prototype.spied = false; } constructor() { self = this; // setup socket listeners //socketService.eventStream.onUnsubscribe = @unsubscribeListener socketService.onclose = this.socketCloseListener; // generate loadXXX functions for root endpoints this.constructor.generateEndpoints(); } // the arguments are in this order: endpoint, id, child, id of child, query get(...args) { // get the query parameters let accessor, query, subscribePromise; [args, query] = Array.from(dataUtilsService.splitOptions(args)); let subscribe = (accessor = undefined); // subscribe for changes if 'subscribe' is true subscribe = query.subscribe === true; ({ accessor } = query); if (subscribe && !accessor) { $log.warn("subscribe call should be done after DataService.open()"); $log.warn("for maintaining trace of observers"); subscribe = false; } // 'subscribe' is not part of the query delete query.subscribe; delete query.accessor; const restPath = dataUtilsService.restPath(args); // up to date array, this will be returned const collection = new Collection(restPath, query, accessor); if (subscribe) { subscribePromise = collection.subscribe(); } else { subscribePromise = $q.resolve(); } subscribePromise.then(() => // get the data from the rest api restService.get(restPath, query).then(function(response) { const type = dataUtilsService.type(restPath); const datalist = response[type]; // the response should always be an array if (!angular.isArray(datalist)) { const e = `${datalist} is not an array`; $log.error(e); return; } // fill up the collection with initial data collection.initial(datalist); }) ); return collection; } control(ep, id, method, params) { if (params == null) { params = {}; } const restPath = dataUtilsService.restPath([ep, id]); return restService.post(restPath, { id: this.getNextId(), jsonrpc: '2.0', method, params } ); } // returns next id for jsonrpc2 control messages getNextId() { if (this.jsonrpc == null) { this.jsonrpc = 1; } return this.jsonrpc++; } // generate functions for root endpoints static generateEndpoints() { return ENDPOINTS.forEach(e => { // capitalize endpoint names const E = dataUtilsService.capitalize(e); return this.prototype[`get${E}`] = (...args) => self.get(e, ...Array.from(args)); }); } // opens a new accessor open() { let DataAccessor; return new ((DataAccessor = (function() { let collectionRefs = undefined; DataAccessor = class DataAccessor { static initClass() { collectionRefs = []; } constructor() { this.constructor.generateEndpoints(); } registerCollection(c) { return collectionRefs.push(c); } close() { return collectionRefs.forEach(c => c.close()); } // Closes the group when the scope is destroyed closeOnDestroy(scope) { if (!angular.isFunction(scope.$on)) { throw new TypeError("Parameter 'scope' doesn't have an $on function"); } scope.$on('$destroy', () => this.close()); return this; } // Generate functions for root endpoints static generateEndpoints() { return ENDPOINTS.forEach(e => { // capitalize endpoint names const E = dataUtilsService.capitalize(e); this.prototype[`get${E}`] = function(...args) { let query; [args, query] = Array.from(dataUtilsService.splitOptions(args)); if (query.subscribe == null) { query.subscribe = true; } query.accessor = this; return self.get(e, ...Array.from(args), query); }; }); } }; DataAccessor.initClass(); return DataAccessor; })())); } when(url, query, returnValue) { if ((returnValue == null)) { [query, returnValue] = Array.from([{}, query]); } if ((typeof jasmine !== 'undefined' && jasmine !== null) && !this.spied) { spyOn(this, 'get').and.callFake(this._mockGet); this.spied = true; } if (this.mocks[url] == null) { this.mocks[url] = {}; } return this.mocks[url][query] = returnValue; } expect(url, query, returnValue) { if ((returnValue == null)) { [query, returnValue] = Array.from([{}, query]); } if (this._expects == null) { this._expects = []; } this._expects.push([url, query]); return this.when(url, query, returnValue); } verifyNoOutstandingExpectation() { if ((this._expects != null) && this._expects.length) { return fail(`expecting ${this._expects.length} more data requests ` + `(${angular.toJson(this._expects)})`); } } // register return values with the .when function // when testing get will return the given values _mockGet(...args) { const [url, query] = Array.from(this.processArguments(args)); const queryWithoutSubscribe = {}; for (let k in query) { const v = query[k]; if ((k !== "subscribe") && (k !== "accessor")) { queryWithoutSubscribe[k] = v; } } if (this._expects) { const [exp_url, exp_query] = Array.from(this._expects.shift()); expect(exp_url).toEqual(url); expect(exp_query).toEqual(queryWithoutSubscribe); } const returnValue = (this.mocks[url] != null ? this.mocks[url][query] : undefined) || (this.mocks[url] != null ? this.mocks[url][queryWithoutSubscribe] : undefined); if ((returnValue == null)) { throw new Error(`No return value for: ${url} ` + `(${angular.toJson(queryWithoutSubscribe)})`); } const collection = this.createCollection(url, queryWithoutSubscribe, returnValue); return collection; } processArguments(args) { let query; [args, query] = Array.from(dataUtilsService.splitOptions(args)); const restPath = dataUtilsService.restPath(args); return [restPath, query || {}]; } // for easier testing createCollection(url, query, response) { const restPath = url; const type = dataUtilsService.type(restPath); const collection = new Collection(restPath, query); // populate the response with default ids // for convenience const { id } = collection; let idCounter = 1; response.forEach(function(d) { if (!d.hasOwnProperty(id)) { d[id] = idCounter++; } }); collection.initial(response); return collection; } }; DataService.initClass(); return DataService; })())); } } Data.initClass(); angular.module('bbData') .service('dataService', ['$log', '$q', 'restService', 'socketService', 'dataUtilsService', 'Collection', 'ENDPOINTS', Data]); buildbot-3.4.0/www/data_module/src/services/data/data.service.spec.js000066400000000000000000000165231413250514000256100ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('Data service', function() { let $httpBackend, $q, $rootScope, $timeout, ENDPOINTS, restService, socketService; beforeEach(angular.mock.module('bbData')); let dataService = (restService = (socketService = (ENDPOINTS = ($rootScope = ($q = ($httpBackend = ($timeout = null))))))); const injected = function($injector) { dataService = $injector.get('dataService'); restService = $injector.get('restService'); $timeout = $injector.get('$timeout'); socketService = $injector.get('socketService'); ENDPOINTS = $injector.get('ENDPOINTS'); $rootScope = $injector.get('$rootScope'); $q = $injector.get('$q'); $httpBackend = $injector.get('$httpBackend'); }; beforeEach(inject(injected)); it('should be defined', () => expect(dataService).toBeDefined()); it('should have getXxx functions for endpoints', () => (() => { const result = []; for (let e of Array.from(ENDPOINTS)) { const E = e[0].toUpperCase() + e.slice(1).toLowerCase(); expect(dataService[`get${E}`]).toBeDefined(); result.push(expect(angular.isFunction(dataService[`get${E}`])).toBeTruthy()); } result; })() ); describe('get()', function() { it('should return a collection', function() { const ret = dataService.getBuilds(); expect(ret.length).toBeDefined(); }); it('should call get for the rest api endpoint', function() { const d = $q.defer(); spyOn(restService, 'get').and.returnValue(d.promise); expect(restService.get).not.toHaveBeenCalled(); $rootScope.$apply(() => dataService.get('asd', {subscribe: false})); // the query should not contain the subscribe field expect(restService.get).toHaveBeenCalledWith('asd', {}); }); it('should send startConsuming with the socket path', function() { const data = dataService.open(); const p = $q.resolve([]); spyOn(socketService, 'send').and.returnValue(p); spyOn(restService, 'get').and.returnValue(p); expect(socketService.send).not.toHaveBeenCalled(); $rootScope.$apply(() => data.getBuilds()); expect(socketService.send).toHaveBeenCalledWith({ cmd: 'startConsuming', path: 'builds/*/*' }); socketService.send.calls.reset(); $rootScope.$apply(() => data.getBuilds(1)); expect(socketService.send).toHaveBeenCalledWith({ cmd: 'startConsuming', path: 'builds/1/*' }); // get same build again, it should not register again socketService.send.calls.reset(); $rootScope.$apply(() => data.getBuilds(1)); expect(socketService.send).not.toHaveBeenCalled(); // now we close the accessor, and we should send stopConsuming $rootScope.$apply(() => data.close()); expect(socketService.send).toHaveBeenCalledWith({ cmd: 'stopConsuming', path: 'builds/*/*' }); expect(socketService.send).toHaveBeenCalledWith({ cmd: 'stopConsuming', path: 'builds/1/*' }); }); it('should not call startConsuming when {subscribe: false} is passed in', function() { const d = $q.defer(); spyOn(restService, 'get').and.returnValue(d.promise); spyOn(socketService, 'send').and.returnValue(d.promise); expect(socketService.send).not.toHaveBeenCalled(); $rootScope.$apply(() => dataService.getBuilds({subscribe: false})); expect(socketService.send).not.toHaveBeenCalled(); }); it('should add the new instance on /new WebSocket message', function() { spyOn(restService, 'get').and.returnValue($q.resolve({builds: []})); let builds = null; $rootScope.$apply(() => builds = dataService.getBuilds({subscribe: false})); socketService.eventStream.push({ k: 'builds/111/new', m: { asd: 111 } }); expect(builds.pop().asd).toBe(111); }); }); describe('control(method, params)', () => it('should send a jsonrpc message using POST', function() { spyOn(restService, 'post'); expect(restService.post).not.toHaveBeenCalled(); const method = 'force'; const params = {a: 1}; dataService.control("a", 1, method, params); expect(restService.post).toHaveBeenCalledWith("a/1", { id: 1, jsonrpc: '2.0', method, params } ); }) ); describe('open()', function() { let opened = null; beforeEach(() => opened = dataService.open()); it('should return a new accessor', () => expect(opened).toEqual(jasmine.any(Object))); it('should have getXxx functions for endpoints', () => (() => { const result = []; for (let e of Array.from(ENDPOINTS)) { const E = e[0].toUpperCase() + e.slice(1).toLowerCase(); expect(opened[`get${E}`]).toBeDefined(); result.push(expect(angular.isFunction(opened[`get${E}`])).toBeTruthy()); } result; })() ); it('should call unsubscribe on each subscribed collection on close', function() { const p = $q.resolve({builds: [{buildid:1}, {buildid:2}, {buildid:3}]}); spyOn(restService, 'get').and.returnValue(p); let builds = null; $rootScope.$apply(() => builds = opened.getBuilds({subscribe: false})); expect(builds.length).toBe(3); spyOn(builds, 'close'); opened.close(); expect(builds.close).toHaveBeenCalled(); }); it('should call close when the $scope is destroyed', function() { spyOn(opened, 'close'); const scope = $rootScope.$new(); opened.closeOnDestroy(scope); expect(opened.close).not.toHaveBeenCalled(); scope.$destroy(); expect(opened.close).toHaveBeenCalled(); }); it('should work with mock calls as well', function() { let builds; dataService.when('builds/1', [{buildid: 1, builderid: 1}]); builds = opened.getBuilds(1, {subscribe: false}); }); }); describe('when()', () => it('should autopopulate ids', function(done) { dataService.when('builds', [{}, {}, {}]); dataService.getBuilds().onChange = function(builds) { expect(builds.length).toBe(3); expect(builds[1].buildid).toBe(2); expect(builds[2].buildid).toBe(3); done(); }; $timeout.flush(); }) ); }); buildbot-3.4.0/www/data_module/src/services/dataUtils/000077500000000000000000000000001413250514000227635ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/src/services/dataUtils/dataUtils.service.js000066400000000000000000000122011413250514000267060ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS201: Simplify complex destructure assignments * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class DataUtils { constructor() { let dataUtilsService; return new (dataUtilsService = class dataUtilsService { // capitalize first word capitalize(string) { return string[0].toUpperCase() + string.slice(1).toLowerCase(); } // returns the type of the endpoint type(arg) { let a = this.copyOrSplit(arg); a = a.filter(e => e !== '*'); // if the argument count is even, the last argument is an id if ((a.length % 2) === 0) { a.pop(); } let type = a.pop(); if (type === "contents") { type = "logchunks"; } return type; } // singularize the type name singularType(arg) { return this.type(arg).replace(/s$/, ''); } className(arg) { return this.capitalize(this.singularType(arg)); } classId(arg) { if (this.singularType(arg) === "forcescheduler") { return "name"; } if (this.singularType(arg) === "buildset") { return "bsid"; } return this.singularType(arg) + "id"; } socketPath(arg) { const a = this.copyOrSplit(arg); // if the argument count is even, the last argument is an id // Format of properties endpoint is an exception // and needs to be properties/*, not properties/*/* const stars = ['*']; // is it odd? if (((a.length % 2) === 1) && !arg.endsWith("/properties")) { stars.push('*'); } return a.concat(stars).join('/'); } socketPathRE(socketPath) { return new RegExp(`^${socketPath.replace(/\*/g, "[^/]+")}$`); } restPath(arg) { let a = this.copyOrSplit(arg); a = a.filter(e => e !== '*'); return a.join('/'); } endpointPath(arg) { // if the argument count is even, the last argument is an id let a = this.copyOrSplit(arg); a = a.filter(e => e !== '*'); // is it even? if ((a.length % 2) === 0) { a.pop(); } return a.join('/'); } copyOrSplit(arrayOrString) { if (angular.isArray(arrayOrString)) { // return a copy return arrayOrString.slice(); } else if (angular.isString(arrayOrString)) { // split the string to get an array return arrayOrString.split('/'); } else { throw new TypeError(`Parameter 'arrayOrString' must be a array or a string, not ${typeof arrayOrString}`); } } unWrap(object, path) { return object[this.type(path)]; } splitOptions(args) { // keep defined arguments only let accessor; args = args.filter(e => e != null); let query = {}; // default // get the query parameters const last = args[args.length - 1]; const subscribe = (accessor = null); if (angular.isObject(last)) { query = args.pop(); } return [args, query]; } parse(object) { for (let k in object) { const v = object[k]; try { object[k] = angular.fromJson(v); } catch (error) {} } // ignore return object; } numberOrString(str = null) { // if already a number if (angular.isNumber(str)) { return str; } // else parse string to integer const number = parseInt(str, 10); if (!isNaN(number)) { return number; } else { return str; } } emailInString(string) { if (!angular.isString(string)) { throw new TypeError(`Parameter 'string' must be a string, not ${typeof string}`); } const emailRegex = /[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*/; try { return emailRegex.exec(string).pop() || ''; } catch (error) { return ''; } } }); } } angular.module('bbData') .service('dataUtilsService', [DataUtils]); buildbot-3.4.0/www/data_module/src/services/dataUtils/dataUtils.service.spec.js000066400000000000000000000131721413250514000276470ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('Data utils service', function() { beforeEach(angular.mock.module('bbData')); let dataUtilsService = undefined; const injected = $injector => dataUtilsService = $injector.get('dataUtilsService'); beforeEach(inject(injected)); it('should be defined', () => expect(dataUtilsService).toBeDefined()); describe('capitalize(string)', () => it('should capitalize the parameter string', function() { let result = dataUtilsService.capitalize('test'); expect(result).toBe('Test'); result = dataUtilsService.capitalize('t'); expect(result).toBe('T'); }) ); describe('type(arg)', () => it('should return the type of the parameter endpoint', function() { let result = dataUtilsService.type('asd/1'); expect(result).toBe('asd'); result = dataUtilsService.type('asd/1/bnm'); expect(result).toBe('bnm'); }) ); describe('singularType(arg)', () => it('should return the singular the type name of the parameter endpoint', function() { let result = dataUtilsService.singularType('tests/1'); expect(result).toBe('test'); result = dataUtilsService.singularType('tests'); expect(result).toBe('test'); }) ); describe('socketPath(arg)', () => it('should return the WebSocket subscribe path of the parameter path', function() { let result = dataUtilsService.socketPath('asd/1/bnm'); expect(result).toBe('asd/1/bnm/*/*'); result = dataUtilsService.socketPath('asd/1'); expect(result).toBe('asd/1/*'); }) ); describe('socketPathRE(arg)', () => it('should return the WebSocket subscribe path of the parameter path', function() { let result = dataUtilsService.socketPathRE('asd/1/*'); expect(result.test("asd/1/new")).toBeTruthy(); result = dataUtilsService.socketPathRE('asd/1/bnm/*/*').source; expect([ '^asd\\/1\\/bnm\\/[^\\/]+\\/[^\\/]+$', '^asd\\/1\\/bnm\\/[^/]+\\/[^/]+$' ]).toContain(result); result = dataUtilsService.socketPathRE('asd/1/*').source; expect([ '^asd\\/1\\/[^\\/]+$', '^asd\\/1\\/[^/]+$' ]).toContain(result); }) ); describe('restPath(arg)', () => it('should return the rest path of the parameter WebSocket subscribe path', function() { let result = dataUtilsService.restPath('asd/1/bnm/*/*'); expect(result).toBe('asd/1/bnm'); result = dataUtilsService.restPath('asd/1/*'); expect(result).toBe('asd/1'); }) ); describe('endpointPath(arg)', () => it('should return the endpoint path of the parameter rest or WebSocket path', function() { let result = dataUtilsService.endpointPath('asd/1/bnm/*/*'); expect(result).toBe('asd/1/bnm'); result = dataUtilsService.endpointPath('asd/1/*'); expect(result).toBe('asd'); }) ); describe('copyOrSplit(arrayOrString)', function() { it('should copy an array', function() { const array = [1, 2, 3]; const result = dataUtilsService.copyOrSplit(array); expect(result).not.toBe(array); expect(result).toEqual(array); }); it('should split a string', function() { const string = 'asd/123/bnm'; const result = dataUtilsService.copyOrSplit(string); expect(result).toEqual(['asd', '123', 'bnm']); }); }); describe('unWrap(data, path)', () => it('should return the array of the type based on the path', function() { const data = { asd: [{'data': 'data'}], meta: {} }; let result = dataUtilsService.unWrap(data, 'bnm/1/asd'); expect(result).toBe(data.asd); result = dataUtilsService.unWrap(data, 'bnm/1/asd/2'); expect(result).toBe(data.asd); }) ); describe('parse(object)', () => it('should parse fields from JSON', function() { const test = { a: 1, b: 'asd3', c: angular.toJson(['a', 1, 2]), d: angular.toJson({asd: [], bsd: {}}) }; const copy = angular.copy(test); copy.c = angular.toJson(copy.c); copy.d = angular.toJson(copy.d); const parsed = dataUtilsService.parse(test); expect(parsed).toEqual(test); }) ); describe('numberOrString(string)', function() { it('should convert a string to a number if possible', function() { const result = dataUtilsService.numberOrString('12'); expect(result).toBe(12); }); it('should return the string if it is not a number', function() { const result = dataUtilsService.numberOrString('w3as'); expect(result).toBe('w3as'); }); }); describe('emailInString(string)', () => it('should return an email from a string', function() { let email = dataUtilsService.emailInString('foo '); expect(email).toBe('bar@foo.com'); email = dataUtilsService.emailInString('bar@foo.com'); expect(email).toBe('bar@foo.com'); }) ); }); buildbot-3.4.0/www/data_module/src/services/rest/000077500000000000000000000000001413250514000220065ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/src/services/rest/rest.service.js000066400000000000000000000043401413250514000247610ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Rest { constructor($http, $q, API) { let RestService; return new (RestService = class RestService { execute(config) { return $q((resolve, reject) => $http(config).then(function(response) { try { const data = angular.fromJson(response.data); resolve(data); } catch (e) { reject(e); } }, function(response) { reject(response.data); }) ); } get(url, params) { if (params == null) { params = {}; } const canceller = $q.defer(); const config = { method: 'GET', url: this.parse(API, url), params, headers: { 'Accept': 'application/json' }, timeout: canceller.promise }; const promise = this.execute(config); promise.cancel = canceller.resolve; return promise; } post(url, data) { if (data == null) { data = {}; } const canceller = $q.defer(); const config = { method: 'POST', url: this.parse(API, url), data, headers: { 'Content-Type': 'application/json' }, timeout: canceller.promise }; const promise = this.execute(config); promise.cancel = canceller.resolve; return promise; } parse(...args) { return args.join('/').replace(/\/\//, '/'); } }); } } angular.module('bbData') .service('restService', ['$http', '$q', 'API', Rest]); buildbot-3.4.0/www/data_module/src/services/rest/rest.service.spec.js000066400000000000000000000061301413250514000257110ustar00rootroot00000000000000describe('Rest service', function() { let $httpBackend; beforeEach(angular.mock.module('bbData')); beforeEach(() => angular.mock.module($provide => $provide.constant('API', '/api/')) ); let restService = ($httpBackend = undefined); const injected = function($injector) { restService = $injector.get('restService'); $httpBackend = $injector.get('$httpBackend'); }; beforeEach(inject(injected)); afterEach(function() { $httpBackend.verifyNoOutstandingExpectation(); $httpBackend.verifyNoOutstandingRequest(); }); it('should be defined', () => expect(restService).toBeDefined()); it('should make an ajax GET call to /api/endpoint', function() { const response = {a: 'A'}; $httpBackend.whenGET('/api/endpoint').respond(response); let gotResponse = null; restService.get('endpoint').then(r => gotResponse = r); expect(gotResponse).toBeNull(); $httpBackend.flush(); expect(gotResponse).toEqual(response); }); it('should make an ajax GET call to /api/endpoint with parameters', function() { const params = {key: 'value'}; $httpBackend.whenGET('/api/endpoint?key=value').respond(200); restService.get('endpoint', params); $httpBackend.flush(); }); it('should reject the promise on error', function() { const error = 'Internal server error'; $httpBackend.expectGET('/api/endpoint').respond(500, error); let gotResponse = null; restService.get('endpoint').then(response => gotResponse = response , reason => gotResponse = reason); $httpBackend.flush(); expect(gotResponse).toBe(error); }); it('should make an ajax POST call to /api/endpoint', function() { const response = {}; const data = {b: 'B'}; $httpBackend.expectPOST('/api/endpoint', data).respond(response); let gotResponse = null; restService.post('endpoint', data).then(r => gotResponse = r); $httpBackend.flush(); expect(gotResponse).toEqual(response); }); it('should reject the promise when the response is not valid JSON', function() { const response = 'aaa'; const data = {b: 'B'}; $httpBackend.expectPOST('/api/endpoint', data).respond(response); let gotResponse = null; restService.post('endpoint', data).then(response => gotResponse = response , reason => gotResponse = reason); $httpBackend.flush(); expect(gotResponse).not.toBeNull(); expect(gotResponse).not.toEqual(response); }); it('should reject the promise when cancelled', inject(function($rootScope) { $httpBackend.expectGET('/api/endpoint').respond({}); let gotResponse = null; let rejected = false; const request = restService.get('endpoint'); request.then(response => gotResponse = response , reason => rejected = true); request.cancel(); $rootScope.$apply(); expect(gotResponse).toBeNull(); expect(rejected).toBe(true); }) ); }); buildbot-3.4.0/www/data_module/src/services/socket/000077500000000000000000000000001413250514000223215ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/src/services/socket/socket.service.js000066400000000000000000000147031413250514000256130ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS206: Consider reworking classes to avoid initClass * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Socket { constructor($log, $q, $rootScope, $location, Stream, webSocketService, $timeout) { let SocketService; return new ((SocketService = (function() { SocketService = class SocketService { static initClass() { // subscribe to event stream to get WebSocket messages this.prototype.eventStream = null; } constructor() { // waiting queue this.queue = []; // deferred object for resolving response promises // map of id: promise this.deferred = {}; this.subscribers = {}; // open socket this.open(); } open() { if (this.socket == null) { this.socket = webSocketService.getWebSocket(this.getUrl()); } // flush queue on open this.socket.onopen = () => this.flush(); return this.setupEventStream(); } setupEventStream() { if (this.eventStream == null) { this.eventStream = new Stream(); } return this.socket.onmessage = message => { let id; try { const data = angular.fromJson(message.data); // response message if (data.code != null) { id = data._id; if (data.code === 200) { return (this.deferred[id] != null ? this.deferred[id].resolve(true) : undefined); } else { return (this.deferred[id] != null ? this.deferred[id].reject(data) : undefined); } // status update message } else { return $rootScope.$applyAsync(() => { return this.eventStream.push(data); }); } } catch (e) { return (this.deferred[id] != null ? this.deferred[id].reject(e) : undefined); } }; } close() { return this.socket.close(); } send(data) { // add _id to each message const id = this.nextId(); data._id = id; if (this.deferred[id] == null) { this.deferred[id] = $q.defer(); } data = angular.toJson(data); // ReconnectingWebSocket does not put status constants on instance if (this.socket.readyState === (this.socket.OPEN || 1)) { this.socket.send(data); } else { // if the WebSocket is not open yet, add the data to the queue this.queue.push(data); } // socket is not watched by protractor, so we need to // create a timeout while we are using the socket so that protractor waits for it const to = $timeout( ()=> {}, 20000) // return promise, which will be resolved once a response message has the same id return this.deferred[id].promise.then((r) => { $timeout.cancel(to); return r; }) } flush() { // send all the data waiting in the queue let data; while ((data = this.queue.pop())) { this.socket.send(data); } } nextId() { if (this.id == null) { this.id = 0; } this.id = this.id < 1000 ? this.id + 1 : 0; return this.id; } getRootPath() { return location.pathname; } getUrl() { const host = $location.host(); const protocol = $location.protocol() === 'https' ? 'wss' : 'ws'; const defaultport = $location.protocol() === 'https' ? 443 : 80; const path = this.getRootPath(); const port = $location.port() === defaultport ? '' : `:${$location.port()}`; return `${protocol}://${host}${port}${path}ws`; } // High level api. Maintain a list of subscribers for one event path subscribe(eventPath, collection) { const l = this.subscribers[eventPath] != null ? this.subscribers[eventPath] : (this.subscribers[eventPath] = []); l.push(collection); if (l.length === 1) { return this.send({ cmd: "startConsuming", path: eventPath }); } return $q.resolve(); } unsubscribe(eventPath, collection) { const l = this.subscribers[eventPath] != null ? this.subscribers[eventPath] : (this.subscribers[eventPath] = []); const pos = l.indexOf(collection); if (pos >= 0) { l.splice(pos, 1); if (l.length === 0) { return this.send({ cmd: "stopConsuming", path: eventPath }); } } return $q.resolve(); } }; SocketService.initClass(); return SocketService; })())); } } angular.module('bbData') .service('socketService', ['$log', '$q', '$rootScope', '$location', 'Stream', 'webSocketService', '$timeout', Socket]); buildbot-3.4.0/www/data_module/src/services/socket/socket.service.spec.js000066400000000000000000000174021413250514000265430ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('Socket service', function() { let $location, socket, socketService; var WebSocketBackend = (function() { let self = undefined; let MockWebSocket = undefined; WebSocketBackend = class WebSocketBackend { static initClass() { this.prototype.sendQueue = []; this.prototype.receiveQueue = []; self = null; // mocked WebSocket MockWebSocket = (function() { MockWebSocket = class MockWebSocket { static initClass() { this.prototype.OPEN = 1; } send(message) { return self.receiveQueue.push(message); } }; MockWebSocket.initClass(); return MockWebSocket; })(); } constructor() { self = this; this.webSocket = new MockWebSocket(); } send(message) { const data = {data: message}; return this.sendQueue.push(data); } flush() { let message; while ((message = this.sendQueue.shift())) { this.webSocket.onmessage(message); } } getWebSocket() { return this.webSocket; } }; WebSocketBackend.initClass(); return WebSocketBackend; })(); const webSocketBackend = new WebSocketBackend(); beforeEach(function() { angular.mock.module('bbData'); angular.mock.module($provide => $provide.constant('webSocketService', webSocketBackend)); }); let $rootScope = (socketService = (socket = ($location = null))); const injected = function($injector) { $rootScope = $injector.get('$rootScope'); $location = $injector.get('$location'); socketService = $injector.get('socketService'); ({ socket } = socketService); spyOn(socket, 'send').and.callThrough(); spyOn(socket, 'onmessage').and.callThrough(); }; beforeEach(inject(injected)); it('should be defined', () => expect(socketService).toBeDefined()); it('should send the data, when the WebSocket is open', function() { // socket is opening socket.readyState = 0; // 2 message to be sent const msg1 = {a: 1}; const msg2 = {b: 2}; const msg3 = {c: 3}; socketService.send(msg1); socketService.send(msg2); expect(socket.send).not.toHaveBeenCalled(); // open the socket socket.onopen(); expect(socket.send).toHaveBeenCalled(); expect(webSocketBackend.receiveQueue).toContain(angular.toJson(msg1)); expect(webSocketBackend.receiveQueue).toContain(angular.toJson(msg2)); expect(webSocketBackend.receiveQueue).not.toContain(angular.toJson(msg3)); }); it('should add an _id to each message', function() { socket.readyState = 1; expect(socket.send).not.toHaveBeenCalled(); socketService.send({}); expect(socket.send).toHaveBeenCalledWith(jasmine.any(String)); const argument = socket.send.calls.argsFor(0)[0]; expect(angular.fromJson(argument)._id).toBeDefined(); }); it('should resolve the promise when a response message is received with code 200', function() { socket.readyState = 1; const msg = {cmd: 'command'}; const promise = socketService.send(msg); const handler = jasmine.createSpy('handler'); promise.then(handler); // the promise should not be resolved expect(handler).not.toHaveBeenCalled(); // get the id from the message const argument = socket.send.calls.argsFor(0)[0]; const id = angular.fromJson(argument)._id; // create a response message with status code 200 const response = angular.toJson({_id: id, code: 200}); // send the message webSocketBackend.send(response); $rootScope.$apply(() => webSocketBackend.flush()); // the promise should be resolved expect(handler).toHaveBeenCalled(); }); it('should reject the promise when a response message is received, but the code is not 200', function() { socket.readyState = 1; const msg = {cmd: 'command'}; const promise = socketService.send(msg); const handler = jasmine.createSpy('handler'); const errorHandler = jasmine.createSpy('errorHandler'); promise.then(handler, errorHandler); // the promise should not be rejected expect(handler).not.toHaveBeenCalled(); expect(errorHandler).not.toHaveBeenCalled(); // get the id from the message const argument = socket.send.calls.argsFor(0)[0]; const id = angular.fromJson(argument)._id; // create a response message with status code 500 const response = angular.toJson({_id: id, code: 500}); // send the message webSocketBackend.send(response); $rootScope.$apply(() => webSocketBackend.flush()); // the promise should be rejected expect(handler).not.toHaveBeenCalled(); expect(errorHandler).toHaveBeenCalled(); }); describe('getUrl()', function() { it('should return the WebSocket url based on the host and port (localhost)', function() { const host = 'localhost'; const port = 8080; spyOn($location, 'host').and.returnValue(host); spyOn($location, 'port').and.returnValue(port); spyOn(socketService, 'getRootPath').and.returnValue('/'); const url = socketService.getUrl(); expect(url).toBe('ws://localhost:8080/ws'); }); it('should return the WebSocket url based on the host and port', function() { const host = 'buildbot.test'; const port = 80; spyOn($location, 'host').and.returnValue(host); spyOn($location, 'port').and.returnValue(port); spyOn(socketService, 'getRootPath').and.returnValue('/'); const url = socketService.getUrl(); expect(url).toBe('ws://buildbot.test/ws'); }); it('should return the WebSocket url based on the host and port and protocol', function() { const host = 'buildbot.test'; const port = 443; const protocol = 'https'; spyOn($location, 'host').and.returnValue(host); spyOn($location, 'port').and.returnValue(port); spyOn($location, 'protocol').and.returnValue(protocol); spyOn(socketService, 'getRootPath').and.returnValue('/'); const url = socketService.getUrl(); expect(url).toBe('wss://buildbot.test/ws'); }); it('should return the WebSocket url based on the host and port and protocol and basedir', function() { const host = 'buildbot.test'; const port = 443; const protocol = 'https'; const path = '/travis/'; spyOn($location, 'host').and.returnValue(host); spyOn($location, 'port').and.returnValue(port); spyOn($location, 'protocol').and.returnValue(protocol); spyOn(socketService, 'getRootPath').and.returnValue(path); const url = socketService.getUrl(); expect(url).toBe('wss://buildbot.test/travis/ws'); }); }); }); buildbot-3.4.0/www/data_module/src/services/socket/webSocketBackend.service.js000066400000000000000000000034521413250514000275200ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ var WebSocketBackend = (function() { let self = undefined; let MockWebSocket = undefined; WebSocketBackend = class WebSocketBackend { static initClass() { self = null; this.prototype.sendQueue = []; this.prototype.receiveQueue = []; // mocked WebSocket MockWebSocket = (function() { MockWebSocket = class MockWebSocket { static initClass() { this.prototype.OPEN = 1; } send(message) { return self.receiveQueue.push(message); } close() { return (typeof this.onclose === 'function' ? this.onclose() : undefined); } }; MockWebSocket.initClass(); return MockWebSocket; })(); } constructor() { self = this; this.webSocket = new MockWebSocket(); } send(message) { const data = {data: message}; return this.sendQueue.push(data); } flush() { let message; while ((message = this.sendQueue.shift())) { this.webSocket.onmessage(message); } } getWebSocket() { return this.webSocket; } }; WebSocketBackend.initClass(); return WebSocketBackend; })(); angular.module('bbData') .service('webSocketBackendService', [WebSocketBackend]); buildbot-3.4.0/www/data_module/src/services/socket/websocket.service.js000066400000000000000000000020361413250514000263050ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class WebSocket { constructor($window) { let WebSocketProvider; return new (WebSocketProvider = class WebSocketProvider { constructor() {} // this function will be mocked in the tests getWebSocket(url) { const match = /wss?:\/\//.exec(url); if (!match) { throw new Error('Invalid url provided'); } // use ReconnectingWebSocket if available // TODO write own implementation? if ($window.ReconnectingWebSocket != null) { return new $window.ReconnectingWebSocket(url); } else { return new $window.WebSocket(url); } } }); } } angular.module('bbData') .service('webSocketService', ['$window', WebSocket]); buildbot-3.4.0/www/data_module/src/services/stream/000077500000000000000000000000001413250514000223245ustar00rootroot00000000000000buildbot-3.4.0/www/data_module/src/services/stream/stream.service.js000066400000000000000000000045161413250514000256220ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS206: Consider reworking classes to avoid initClass * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Stream { constructor() { let StreamInstance; return StreamInstance = (function() { StreamInstance = class StreamInstance { static initClass() { // the unsubscribe listener will be called on each unsubscribe call this.prototype.onUnsubscribe = null; this.prototype.listeners = []; } subscribe(listener) { if (!angular.isFunction(listener)) { throw new TypeError(`Parameter 'listener' must be a function, not ${typeof listener}`); } listener.id = this.generateId(); this.listeners.push(listener); // unsubscribe return () => { const i = this.listeners.indexOf(listener); const removed = this.listeners.splice(i, 1); // call the unsubscribe listener if it's a function if (angular.isFunction(this.onUnsubscribe)) { return this.onUnsubscribe(listener); } }; } push(data) { // call each listener return Array.from(this.listeners).map((listener) => listener(data)); } destroy() { // @listeners = [], but keep the reference while (this.listeners.length > 0) { this.listeners.pop(); } } generateId() { if (this.lastId == null) { this.lastId = 0; } return this.lastId++; } }; StreamInstance.initClass(); return StreamInstance; })(); } } angular.module('bbData') .factory('Stream', [Stream]); buildbot-3.4.0/www/data_module/src/services/stream/stream.service.spec.js000066400000000000000000000057631413250514000265600ustar00rootroot00000000000000describe('Stream service', function() { let stream; beforeEach(angular.mock.module('bbData')); let Stream = (stream = null); const injected = function($injector) { Stream = $injector.get('Stream'); stream = new Stream(); }; beforeEach(inject(injected)); it('should be defined', function() { expect(Stream).toBeDefined(); expect(stream).toBeDefined(); }); it('should add the listener to listeners on subscribe call', function() { const { listeners } = stream; expect(listeners.length).toBe(0); stream.subscribe(function() {}); expect(listeners.length).toBe(1); }); it('should add a unique id to each listener passed in to subscribe', function() { const { listeners } = stream; const listener1 = function() {}; const listener2 = function() {}; stream.subscribe(listener1); stream.subscribe(listener2); expect(listener1.id).toBeDefined(); expect(listener2.id).toBeDefined(); expect(listener1.id).not.toBe(listener2.id); }); it('should return the unsubscribe function on subscribe call', function() { const { listeners } = stream; const listener = function() {}; const otherListener = function() {}; const unsubscribe = stream.subscribe(listener); stream.subscribe(otherListener); expect(listeners).toContain(listener); unsubscribe(); expect(listeners).not.toContain(listener); expect(listeners).toContain(otherListener); }); it('should call all listeners on push call', function() { const data = {a: 'A', b: 'B'}; const listeners = { first(data) { expect(data).toEqual({a: 'A', b: 'B'}); }, second(data) { expect(data).toEqual({a: 'A', b: 'B'}); } }; spyOn(listeners, 'first').and.callThrough(); spyOn(listeners, 'second').and.callThrough(); stream.subscribe(listeners.first); stream.subscribe(listeners.second); expect(listeners.first).not.toHaveBeenCalled(); expect(listeners.second).not.toHaveBeenCalled(); stream.push(data); expect(listeners.first).toHaveBeenCalled(); expect(listeners.second).toHaveBeenCalled(); }); it('should remove all listeners on destroy call', function() { const { listeners } = stream; expect(listeners.length).toBe(0); stream.subscribe(function() {}); stream.subscribe(function() {}); expect(listeners.length).not.toBe(0); stream.destroy(); expect(listeners.length).toBe(0); }); it('should call the unsubscribe listener on unsubscribe call', function() { spyOn(stream, 'onUnsubscribe'); const listener = function() {}; const unsubscribe = stream.subscribe(listener); expect(stream.onUnsubscribe).not.toHaveBeenCalled(); unsubscribe(); expect(stream.onUnsubscribe).toHaveBeenCalledWith(listener); }); }); buildbot-3.4.0/www/data_module/src/tests.webpack.js000066400000000000000000000004341413250514000223220ustar00rootroot00000000000000// This file is an entry point for angular tests // Avoids some weird issues when using webpack + angular. import 'angular'; import 'angular-mocks/angular-mocks'; import './data.module.js' const context = require.context('./', true, /\.spec.js$/); context.keys().forEach(context); buildbot-3.4.0/www/data_module/webpack.config.js000066400000000000000000000012571413250514000216420ustar00rootroot00000000000000'use strict'; const common = require('buildbot-build-common'); const env = require('yargs').argv.env; const pkg = require('./package.json'); var event = process.env.npm_lifecycle_event; var isTest = event === 'test' || event === 'test-watch'; var isProd = env === 'prod'; module.exports = function() { var basename = isProd ? pkg.name + '.min' : pkg.name; return common.createTemplateWebpackConfig({ entry: { [basename]: './src/data.module.js', }, libraryName: pkg.name, pluginName: pkg.plugin_name, dirname: __dirname, isTest: isTest, isProd: isProd, outputPath: __dirname + '/dist', }); }(); buildbot-3.4.0/www/data_module/yarn.lock000066400000000000000000011405521413250514000202520ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.7.4.tgz#37e864532200cb6b50ee9a4045f5f817840166ab" integrity sha512-+bYbx56j4nYBmpsWtnPUsKW3NdnYxbqyfrP2w9wILBuHzdfIKz9prieZK0DFPyIzkjYVUe4QkusGL07r5pXznQ== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helpers" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" convert-source-map "^1.7.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369" integrity sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg== dependencies: "@babel/types" "^7.7.4" jsesc "^2.5.1" lodash "^4.17.13" source-map "^0.5.0" "@babel/helper-annotate-as-pure@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz#bb3faf1e74b74bd547e867e48f551fa6b098b6ce" integrity sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og== dependencies: "@babel/types" "^7.7.4" "@babel/helper-builder-binary-assignment-operator-visitor@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz#5f73f2b28580e224b5b9bd03146a4015d6217f5f" integrity sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ== dependencies: "@babel/helper-explode-assignable-expression" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-call-delegate@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz#621b83e596722b50c0066f9dc37d3232e461b801" integrity sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-create-regexp-features-plugin@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz#6d5762359fd34f4da1500e4cff9955b5299aaf59" integrity sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A== dependencies: "@babel/helper-regex" "^7.4.4" regexpu-core "^4.6.0" "@babel/helper-define-map@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176" integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-explode-assignable-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz#fa700878e008d85dc51ba43e9fb835cddfe05c84" integrity sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg== dependencies: "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e" integrity sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ== dependencies: "@babel/helper-get-function-arity" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-get-function-arity@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0" integrity sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA== dependencies: "@babel/types" "^7.7.4" "@babel/helper-hoist-variables@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz#612384e3d823fdfaaf9fce31550fe5d4db0f3d12" integrity sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-member-expression-to-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz#356438e2569df7321a8326644d4b790d2122cb74" integrity sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-imports@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz#e5a92529f8888bf319a6376abfbd1cebc491ad91" integrity sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-transforms@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.7.4.tgz#8d7cdb1e1f8ea3d8c38b067345924ac4f8e0879a" integrity sha512-ehGBu4mXrhs0FxAqN8tWkzF8GSIGAiEumu4ONZ/hD9M88uHcD+Yu2ttKfOCgwzoesJOJrtQh7trI5YPbRtMmnA== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-simple-access" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-optimise-call-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz#034af31370d2995242aa4df402c3b7794b2dcdf2" integrity sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg== dependencies: "@babel/types" "^7.7.4" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz#c68c2407350d9af0e061ed6726afb4fff16d0234" integrity sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-wrap-function" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-replace-supers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz#3c881a6a6a7571275a72d82e6107126ec9e2cdd2" integrity sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg== dependencies: "@babel/helper-member-expression-to-functions" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-simple-access@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz#a169a0adb1b5f418cfc19f22586b2ebf58a9a294" integrity sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A== dependencies: "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-split-export-declaration@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8" integrity sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug== dependencies: "@babel/types" "^7.7.4" "@babel/helper-wrap-function@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace" integrity sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helpers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.7.4.tgz#62c215b9e6c712dadc15a9a0dcab76c92a940302" integrity sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg== dependencies: "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/highlight@^7.0.0": version "7.5.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb" integrity sha512-jIwvLO0zCL+O/LmEJQjWA75MQTWwx3c3u2JOTDK5D3/9egrWRRA0/0hk9XXywYnXZVVpzrBYeIQTmhwUaePI9g== "@babel/plugin-proposal-async-generator-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d" integrity sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-proposal-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz#dde64a7f127691758cbfed6cf70de0fa5879d52d" integrity sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz#7700a6bfda771d8dc81973249eac416c6b4c697d" integrity sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.4.tgz#cc57849894a5c774214178c8ab64f6334ec8af71" integrity sha512-rnpnZR3/iWKmiQyJ3LKJpSwLDcX/nSXhdLk4Aq/tXOApIvyu7qoabrige0ylsAJffaUC51WiBu209Q0U+86OWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz#ec21e8aeb09ec6711bc0a39ca49520abee1de379" integrity sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.4.tgz#7c239ccaf09470dbe1d453d50057460e84517ebb" integrity sha512-cHgqHgYvffluZk85dJ02vloErm3Y6xtH+2noOBOJ2kXOJH3aVCDnj5eR/lVNlTnYu4hndAPJD3rTFjW3qee0PA== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-async-generators@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz#331aaf310a10c80c44a66b238b6e49132bd3c889" integrity sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.2.0", "@babel/plugin-syntax-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz#29ca3b4415abfe4a5ec381e903862ad1a54c3aec" integrity sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz#86e63f7d2e22f9e27129ac4e83ea989a382e86cc" integrity sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz#47cf220d19d6d0d7b154304701f468fc1cc6ff46" integrity sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz#a3e38f59f4b6233867b4a92dcb0ee05b2c334aa6" integrity sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-top-level-await@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz#bd7d8fa7b9fee793a36e4027fd6dd1aa32f946da" integrity sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz#76309bd578addd8aee3b379d809c802305a98a12" integrity sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz#694cbeae6d613a34ef0292713fa42fb45c4470ba" integrity sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz#d0d9d5c269c78eaea76227ace214b8d01e4d837b" integrity sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz#200aad0dcd6bb80372f94d9e628ea062c58bf224" integrity sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.13" "@babel/plugin-transform-classes@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz#c92c14be0a1399e15df72667067a8f510c9400ec" integrity sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-define-map" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz#e856c1628d3238ffe12d668eb42559f79a81910d" integrity sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz#2b713729e5054a1135097b6a67da1b6fe8789267" integrity sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.4.tgz#f7ccda61118c5b7a2599a72d5e3210884a021e96" integrity sha512-mk0cH1zyMa/XHeb6LOTXTbG7uIJ8Rrjlzu91pUx/KS3JpcgaTDwMS8kM+ar8SLOvlL2Lofi4CGBAjCo3a2x+lw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-duplicate-keys@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz#3d21731a42e3f598a73835299dd0169c3b90ac91" integrity sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz#dd30c0191e3a1ba19bcc7e389bdfddc0729d5db9" integrity sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz#248800e3a5e507b1f103d8b4ca998e77c63932bc" integrity sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz#75a6d3303d50db638ff8b5385d12451c865025b1" integrity sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz#27fe87d2b5017a2a5a34d1c41a6b9f6a6262643e" integrity sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz#aee127f2f3339fc34ce5e3055d7ffbf7aa26f19a" integrity sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.4.tgz#276b3845ca2b228f2995e453adc2e6f54d72fb71" integrity sha512-/542/5LNA18YDtg1F+QHvvUSlxdvjZoD/aldQwkq+E3WCkbEjNSN9zdrOXaSlfg3IfGi22ijzecklF/A7kVZFQ== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-commonjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.4.tgz#bee4386e550446343dd52a571eda47851ff857a3" integrity sha512-k8iVS7Jhc367IcNF53KCwIXtKAH7czev866ThsTgy8CwlXjnKZna2VHwChglzLleYrcHz1eQEIJlGRQxB53nqA== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.7.4" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-systemjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz#cd98152339d3e763dfe838b7d4273edaf520bb30" integrity sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-umd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz#1027c355a118de0aae9fee00ad7813c584d9061f" integrity sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz#fb3bcc4ee4198e7385805007373d6b6f42c98220" integrity sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/plugin-transform-new-target@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz#4a0753d2d60639437be07b592a9e58ee00720167" integrity sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz#48488937a2d586c0148451bf51af9d7dda567262" integrity sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/plugin-transform-parameters@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.4.tgz#da4555c97f39b51ac089d31c7380f03bca4075ce" integrity sha512-VJwhVePWPa0DqE9vcfptaJSzNDKrWU/4FbYCjZERtmqEs05g3UMXnYMZoXja7JAJ7Y7sPZipwm/pGApZt7wHlw== dependencies: "@babel/helper-call-delegate" "^7.7.4" "@babel/helper-get-function-arity" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz#2388d6505ef89b266103f450f9167e6bd73f98c2" integrity sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.4.tgz#d18eac0312a70152d7d914cbed2dc3999601cfc0" integrity sha512-e7MWl5UJvmPEwFJTwkBlPmqixCtr9yAASBqff4ggXTNicZiwbF8Eefzm6NVgfiBp7JdAGItecnctKTgH44q2Jw== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz#6a7cf123ad175bb5c69aec8f6f0770387ed3f1eb" integrity sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.4.tgz#51fe458c1c1fa98a8b07934f4ed38b6cd62177a6" integrity sha512-O8kSkS5fP74Ad/8pfsCMGa8sBRdLxYoSReaARRNSz3FbFQj3z/QUvoUmJ28gn9BO93YfnXc3j+Xyaqe8cKDNBQ== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz#74a0a9b2f6d67a684c6fbfd5f0458eb7ba99891e" integrity sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz#aa673b356fe6b7e70d69b6e33a17fef641008578" integrity sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz#ffb68c05090c30732076b1285dc1401b404a123c" integrity sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz#1eb6411736dd3fe87dbd20cc6668e5121c17d604" integrity sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz#3174626214f2d6de322882e498a38e8371b2140e" integrity sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz#a3c0f65b117c4c81c5b6484f2a5e7b95346b83ae" integrity sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/preset-env@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.7.4.tgz#ccaf309ae8d1ee2409c85a4e2b5e280ceee830f8" integrity sha512-Dg+ciGJjwvC1NIe/DGblMbcGq1HOtKbw8RLl4nIjlfcILKEOkWT/vRqPpumswABEBVudii6dnVwrBtzD7ibm4g== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.7.4" "@babel/plugin-proposal-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-syntax-top-level-await" "^7.7.4" "@babel/plugin-transform-arrow-functions" "^7.7.4" "@babel/plugin-transform-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions" "^7.7.4" "@babel/plugin-transform-block-scoping" "^7.7.4" "@babel/plugin-transform-classes" "^7.7.4" "@babel/plugin-transform-computed-properties" "^7.7.4" "@babel/plugin-transform-destructuring" "^7.7.4" "@babel/plugin-transform-dotall-regex" "^7.7.4" "@babel/plugin-transform-duplicate-keys" "^7.7.4" "@babel/plugin-transform-exponentiation-operator" "^7.7.4" "@babel/plugin-transform-for-of" "^7.7.4" "@babel/plugin-transform-function-name" "^7.7.4" "@babel/plugin-transform-literals" "^7.7.4" "@babel/plugin-transform-member-expression-literals" "^7.7.4" "@babel/plugin-transform-modules-amd" "^7.7.4" "@babel/plugin-transform-modules-commonjs" "^7.7.4" "@babel/plugin-transform-modules-systemjs" "^7.7.4" "@babel/plugin-transform-modules-umd" "^7.7.4" "@babel/plugin-transform-named-capturing-groups-regex" "^7.7.4" "@babel/plugin-transform-new-target" "^7.7.4" "@babel/plugin-transform-object-super" "^7.7.4" "@babel/plugin-transform-parameters" "^7.7.4" "@babel/plugin-transform-property-literals" "^7.7.4" "@babel/plugin-transform-regenerator" "^7.7.4" "@babel/plugin-transform-reserved-words" "^7.7.4" "@babel/plugin-transform-shorthand-properties" "^7.7.4" "@babel/plugin-transform-spread" "^7.7.4" "@babel/plugin-transform-sticky-regex" "^7.7.4" "@babel/plugin-transform-template-literals" "^7.7.4" "@babel/plugin-transform-typeof-symbol" "^7.7.4" "@babel/plugin-transform-unicode-regex" "^7.7.4" "@babel/types" "^7.7.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.4.tgz#b23a856751e4bf099262f867767889c0e3fe175b" integrity sha512-r24eVUUr0QqNZa+qrImUk8fn5SPhHq+IfYvIoIMg0do3GdK9sMdiLKP3GYVVaxpPKORgm8KRKaNTEhAjgIpLMw== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b" integrity sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" "@babel/traverse@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.7.4.tgz#9c1e7c60fb679fe4fcfaa42500833333c2058558" integrity sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.13" "@babel/types@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193" integrity sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA== dependencies: esutils "^2.0.2" lodash "^4.17.13" to-fast-properties "^2.0.0" "@types/babel-types@*", "@types/babel-types@^7.0.0": version "7.0.7" resolved "https://registry.yarnpkg.com/@types/babel-types/-/babel-types-7.0.7.tgz#667eb1640e8039436028055737d2b9986ee336e3" integrity sha512-dBtBbrc+qTHy1WdfHYjBwRln4+LWqASWakLHsWHR2NWHIFkv4W3O070IGoGLEBrJBvct3r0L1BUPuvURi7kYUQ== "@types/babylon@^6.16.2": version "6.16.5" resolved "https://registry.yarnpkg.com/@types/babylon/-/babylon-6.16.5.tgz#1c5641db69eb8cdf378edd25b4be7754beeb48b4" integrity sha512-xH2e58elpj1X4ynnKp9qSnWlsRTIs6n3tgLGNfwAGHwePw0mulHQllV34n0T25uYSu1k0hRKkWXF890B1yS47w== dependencies: "@types/babel-types" "*" "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== "@types/glob@^7.1.1": version "7.1.1" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== dependencies: "@types/events" "*" "@types/minimatch" "*" "@types/node" "*" "@types/minimatch@*": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/node@*": version "12.12.12" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.12.tgz#529bc3e73dbb35dd9e90b0a1c83606a9d3264bdb" integrity sha512-MGuvYJrPU0HUwqF7LqvIj50RZUX23Z+m583KBygKYUZLlZ88n6w28XRNJRJgsHukLEnLz6w6SvxZoLgbr5wLqQ== "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== dependencies: "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@webassemblyjs/floating-point-hex-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== "@webassemblyjs/helper-api-error@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== "@webassemblyjs/helper-buffer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== "@webassemblyjs/helper-code-frame@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== dependencies: "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/helper-fsm@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== "@webassemblyjs/helper-module-context@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== dependencies: "@webassemblyjs/ast" "1.8.5" mamacro "^0.0.3" "@webassemblyjs/helper-wasm-bytecode@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== "@webassemblyjs/helper-wasm-section@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/ieee754@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== "@webassemblyjs/wasm-edit@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/helper-wasm-section" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-opt" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/wasm-gen@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wasm-opt@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wasm-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wast-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/floating-point-hex-parser" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-code-frame" "1.8.5" "@webassemblyjs/helper-fsm" "1.8.5" "@xtuc/long" "4.2.2" "@webassemblyjs/wast-printer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== dependencies: mime-types "~2.1.24" negotiator "0.6.2" acorn-globals@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-3.1.0.tgz#fd8270f71fbb4996b004fa880ee5d46573a731bf" integrity sha1-/YJw9x+7SZawBPqIDuXUZXOnMb8= dependencies: acorn "^4.0.4" acorn@^3.1.0: version "3.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" integrity sha1-ReN/s56No/JbruP/U2niu18iAXo= acorn@^4.0.4, acorn@~4.0.2: version "4.0.13" resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787" integrity sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c= acorn@^6.2.1: version "6.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.3.0.tgz#0087509119ffa4fc0a0041d1e93a417e68cb856e" integrity sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA== after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: version "3.4.1" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== ajv@^5.0.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5: version "6.10.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" align-text@^0.1.1, align-text@^0.1.3: version "0.1.4" resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" integrity sha1-DNkKVhCT810KmSVsIrcGlDP60Rc= dependencies: kind-of "^3.0.2" longest "^1.0.1" repeat-string "^1.5.2" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= angular-mocks@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-mocks/-/angular-mocks-1.7.9.tgz#0a3b7e28b9a493b4e3010ed2b0f69a68e9b4f79b" integrity sha512-LQRqqiV3sZ7NTHBnNmLT0bXtE5e81t97+hkJ56oU0k3dqKv1s6F+nBWRlOVzqHWPGFOiPS8ZJVdrS8DFzHyNIA== angular@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular/-/angular-1.7.9.tgz#e52616e8701c17724c3c238cfe4f9446fd570bc4" integrity sha512-5se7ZpcOtu0MBFlzGv5dsM1quQDoDeUTwZrWjGtTNA7O88cD8TEk5IEKCTDa3uECV9XnvKREVUr7du1ACiWGFQ== ansi-colors@^3.0.0: version "3.2.4" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-html@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.0, ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" normalize-path "^2.1.1" anymatch@~3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" aproba@^1.0.3, aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-flatten@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert@^1.1.1: version "1.5.0" resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== dependencies: object-assign "^4.1.1" util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= async-each@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== async@1.x: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@^2.0.0, async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== dependencies: lodash "^4.17.14" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.5.1: version "9.7.2" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.2.tgz#26cf729fbb709323b40171a874304884dcceffed" integrity sha512-LCAfcdej1182uVvPOZnytbq61AhnOZ/4JelDaJGDeNwewyU1AMaNthcHsyz1NRjTmd2FkurMckLWfkHg3Z//KA== dependencies: browserslist "^4.7.3" caniuse-lite "^1.0.30001010" chalk "^2.4.2" normalize-range "^0.1.2" num2fraction "^1.2.2" postcss "^7.0.23" postcss-value-parser "^4.0.2" aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" babel-generator@^6.18.0: version "6.26.1" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.17.4" source-map "^0.5.7" trim-right "^1.0.1" babel-loader@^8.0.5: version "8.0.6" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== dependencies: find-cache-dir "^2.0.0" loader-utils "^1.0.2" mkdirp "^0.5.1" pify "^4.0.1" babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-plugin-dynamic-import-node@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: object.assign "^4.1.0" babel-runtime@^6.0.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-template@^6.16.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: babel-runtime "^6.26.0" babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" lodash "^4.17.4" babel-traverse@^6.18.0, babel-traverse@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: babel-code-frame "^6.26.0" babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" debug "^2.6.8" globals "^9.18.0" invariant "^2.2.2" lodash "^4.17.4" babel-types@^6.18.0, babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@^1.0.2: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary-extensions@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c" integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow== blob@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683" integrity sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig== bluebird@^3.3.0, bluebird@^3.5.5: version "3.7.1" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de" integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0: version "4.11.8" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f" integrity sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA== body-parser@1.19.0, body-parser@^1.16.1: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= dependencies: array-flatten "^2.1.0" deep-equal "^1.0.1" dns-equal "^1.0.0" dns-txt "^2.0.2" multicast-dns "^6.0.1" multicast-dns-service-types "^1.1.0" boolbase@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" brorand@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.3" inherits "^2.0.1" safe-buffer "^5.0.1" browserify-cipher@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" des.js "^1.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" browserify-rsa@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= dependencies: bn.js "^4.1.0" randombytes "^2.0.1" browserify-sign@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= dependencies: bn.js "^4.1.1" browserify-rsa "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.2" elliptic "^6.0.0" inherits "^2.0.1" parse-asn1 "^5.0.0" browserify-zlib@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" browserslist@^4.6.0, browserslist@^4.7.3: version "4.7.3" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.7.3.tgz#02341f162b6bcc1e1028e30624815d4924442dc3" integrity sha512-jWvmhqYpx+9EZm/FxcZSbUZyDEvDTLDi3nSAKbzEkyWvtI0mNSmUosey+5awDW1RUlrgXbQb5A6qY1xQH9U6MQ== dependencies: caniuse-lite "^1.0.30001010" electron-to-chromium "^1.3.306" node-releases "^1.1.40" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer-indexof@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= buffer@^4.3.0: version "4.9.2" resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" isarray "^1.0.0" "buildbot-build-common@link:../build_common": version "0.0.0" uid "" builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cacache@^12.0.2: version "12.0.3" resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== dependencies: bluebird "^3.5.5" chownr "^1.1.1" figgy-pudding "^3.5.1" glob "^7.1.4" graceful-fs "^4.1.15" infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" ssri "^6.0.1" unique-filename "^1.1.1" y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" caller-callsite@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= dependencies: callsites "^2.0.0" caller-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= dependencies: caller-callsite "^2.0.0" callsite@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camel-case@3.0.x: version "3.0.0" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= dependencies: no-case "^2.2.0" upper-case "^1.1.1" camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^1.0.2: version "1.2.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" integrity sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk= camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0, camelcase@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30001010: version "1.0.30001011" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001011.tgz#0d6c4549c78c4a800bb043a83ca0cbe0aee6c6e1" integrity sha512-h+Eqyn/YA6o6ZTqpS86PyRmNWOs1r54EBDcd2NTwwfsXQ8re1B38SnB+p2RKF8OUsyEIjeDU8XGec1RGO/wYCg== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= center-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" integrity sha1-qg0yYptu6XIgBBHL1EYckHvCt60= dependencies: align-text "^0.1.3" lazy-cache "^1.0.3" chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" character-parser@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chokidar@^2.0.2, chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== dependencies: anymatch "^2.0.0" async-each "^1.0.1" braces "^2.3.2" glob-parent "^3.1.0" inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" normalize-path "^3.0.0" path-is-absolute "^1.0.0" readdirp "^2.2.1" upath "^1.1.1" optionalDependencies: fsevents "^1.2.7" chokidar@^3.0.0: version "3.3.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6" integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A== dependencies: anymatch "~3.1.1" braces "~3.0.2" glob-parent "~5.1.0" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" readdirp "~3.2.0" optionalDependencies: fsevents "~2.1.1" chownr@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.3.tgz#42d837d5239688d55f303003a508230fa6727142" integrity sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw== chrome-trace-event@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== dependencies: tslib "^1.9.0" cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@4.2.x, clean-css@^4.1.11: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cliui@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" integrity sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE= dependencies: center-align "^0.1.1" right-align "^0.1.1" wordwrap "0.0.2" cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi "^2.0.0" cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== dependencies: string-width "^3.1.0" strip-ansi "^5.2.0" wrap-ansi "^5.1.0" clone@^2.1.1, clone@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colors@^1.1.0, colors@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@2.17.x: version "2.17.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== commander@^2.20.0, commander@~2.20.3: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@~2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= compressible@~2.0.16: version "2.0.17" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== dependencies: mime-db ">= 1.40.0 < 2" compression@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" compressible "~2.0.16" debug "2.6.9" on-headers "~1.0.2" safe-buffer "5.1.2" vary "~1.1.2" concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" inherits "^2.0.3" readable-stream "^2.2.2" typedarray "^0.0.6" connect-history-api-fallback@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^3.0.1, constantinople@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-3.1.2.tgz#d45ed724f57d3d10500017a7d3a889c1381ae647" integrity sha512-yePcBqEFhLOqSBtwYOGGS1exHo/s1xjekXiinh4itpNQGCu4KA1euPh1fg07N2wMITZXQkBz75Ntdt1ctGZouw== dependencies: "@types/babel-types" "^7.0.0" "@types/babylon" "^6.16.2" babel-types "^6.26.0" babylon "^6.18.0" constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= content-disposition@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== dependencies: safe-buffer "5.1.2" content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= cookie@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== dependencies: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js-compat@^3.1.1: version "3.4.2" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.4.2.tgz#652fa7c54652b7f6586a893e37001df55ea2ac37" integrity sha512-W0Aj+LM3EAxxjD0Kp2o4be8UlnxIZHNupBv2znqrheR4aY2nOn91794k/xoSp+SxqqriiZpTsSwBtZr60cbkwQ== dependencies: browserslist "^4.7.3" semver "^6.3.0" core-js@^2.4.0: version "2.6.10" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f" integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== dependencies: import-fresh "^2.0.0" is-directory "^0.3.1" js-yaml "^3.13.1" parse-json "^4.0.0" create-ecdh@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" inherits "^2.0.1" md5.js "^1.3.4" ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" ripemd160 "^2.0.0" safe-buffer "^5.0.1" sha.js "^2.4.8" cross-spawn@6.0.5, cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" semver "^5.5.0" shebang-command "^1.2.0" which "^1.2.9" crypto-browserify@^3.11.0: version "3.12.0" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" create-ecdh "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.0" diffie-hellman "^5.0.0" inherits "^2.0.1" pbkdf2 "^3.0.3" public-encrypt "^4.0.0" randombytes "^2.0.0" randomfill "^1.0.3" css-loader@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" integrity sha512-OcKJU/lt232vl1P9EEDamhoO9iKY3tIjY5GU+XDLblAykTdgs6Ux9P1hTHve8nFKy5KPpOXOsVI/hIwi3841+w== dependencies: camelcase "^5.2.0" icss-utils "^4.1.0" loader-utils "^1.2.3" normalize-path "^3.0.0" postcss "^7.0.14" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^2.0.6" postcss-modules-scope "^2.1.0" postcss-modules-values "^2.0.0" postcss-value-parser "^3.3.0" schema-utils "^1.0.0" css-select@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= dependencies: boolbase "~1.0.0" css-what "2.1" domutils "1.5.1" nth-check "~1.0.1" css-what@2.1: version "2.1.3" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" date-format@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-2.1.0.tgz#31d5b5ea211cf5fd764cd38baf9d033df7e125cf" integrity sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA== dateformat@^1.0.6: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.0.0, debug@^3.1.1, debug@^3.2.5, debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" decamelize@^1.0.0, decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= deep-equal@^1.0.1: version "1.1.1" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== dependencies: is-arguments "^1.0.4" is-date-object "^1.0.1" is-regex "^1.0.4" object-is "^1.0.1" object-keys "^1.1.1" regexp.prototype.flags "^1.2.0" deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-gateway@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== dependencies: execa "^1.0.0" ip-regex "^2.1.0" define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: "@types/glob" "^7.1.1" globby "^6.1.0" is-path-cwd "^2.0.0" is-path-in-cwd "^2.0.0" p-map "^2.0.0" pify "^4.0.1" rimraf "^2.6.3" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= des.js@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== dependencies: inherits "^2.0.1" minimalistic-assert "^1.0.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detect-node@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" randombytes "^2.0.0" dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= dns-packet@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.1.tgz#12aa426981075be500b910eedcd0b47dd7deda5a" integrity sha512-0UxfQkMhYAUaZI+xrNZOz/as5KgDU0M/fQ9b6SpkyLbk3GEswDi6PADJVaYJradtRVsRIlF1zLyOodbcTCDzUg== dependencies: ip "^1.1.0" safe-buffer "^5.0.1" dns-txt@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= dependencies: buffer-indexof "^1.0.0" doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-converter@^0.2: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" dom-serializer@0: version "0.2.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== dependencies: domelementtype "^2.0.1" entities "^2.0.0" domain-browser@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== domelementtype@1, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== domhandler@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== dependencies: domelementtype "1" domutils@1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= dependencies: dom-serializer "0" domelementtype "1" domutils@^1.5.1: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.306: version "1.3.311" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.311.tgz#73baa361e2b1f44b7b4f1a443aaa1372f8074ebb" integrity sha512-7GH6RKCzziLzJ9ejmbiBEdzHZsc6C3eRpav14dmRfTWMpNgMqpP1ukw/FU/Le2fR+ep642naq7a23xNdmh2s+A== elliptic@^6.0.0: version "6.5.2" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.2.tgz#05c5678d7173c049d8ca433552224a495d0e3762" integrity sha512-f4x70okzZbIQl/NSRLkI/+tteV/9WqL98zx+SQ69KbXxmVrmjwsNUPn/gYJJ0sHvEak24cZgHIPegRePAtA/xw== dependencies: bn.js "^4.4.0" brorand "^1.0.1" hash.js "^1.0.0" hmac-drbg "^1.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.0" emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== emojis-list@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" engine.io-client@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.2.1.tgz#6f54c0475de487158a1a7c77d10178708b6add36" integrity sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "~3.1.0" engine.io-parser "~2.1.1" has-cors "1.1.0" indexof "0.0.1" parseqs "0.0.5" parseuri "0.0.5" ws "~3.3.1" xmlhttprequest-ssl "~1.5.4" yeast "0.1.2" engine.io-parser@~2.1.0, engine.io-parser@~2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6" integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA== dependencies: after "0.8.2" arraybuffer.slice "~0.0.7" base64-arraybuffer "0.1.5" blob "0.0.5" has-binary2 "~1.0.2" engine.io@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.2.1.tgz#b60281c35484a70ee0351ea0ebff83ec8c9522a2" integrity sha512-+VlKzHzMhaU+GsCIg4AoXF1UdDFjHHwMmMKqMJNDNLlUlejz58FCy4LBqB2YVJskHGYl06BatYWKP2TVdVXE5w== dependencies: accepts "~1.3.4" base64id "1.0.0" cookie "0.3.1" debug "~3.1.0" engine.io-parser "~2.1.0" ws "~3.3.1" enhanced-resolve@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" tapable "^1.0.0" enhanced-resolve@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== dependencies: graceful-fs "^4.1.2" memory-fs "^0.5.0" tapable "^1.0.0" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= entities@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-abstract@^1.5.1: version "1.16.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d" integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.0" is-callable "^1.1.4" is-regex "^1.0.4" object-inspect "^1.6.0" object-keys "^1.1.1" string.prototype.trimleft "^2.1.0" string.prototype.trimright "^2.1.0" es-to-primitive@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: version "0.10.53" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.3" next-tick "~1.0.0" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" ext "^1.1.2" escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" eslint-scope@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== dependencies: esrecurse "^4.1.0" estraverse "^4.1.1" esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esrecurse@^4.1.0: version "4.2.1" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== dependencies: estraverse "^4.1.0" estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@^4.1.0, estraverse@^4.1.1: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= eventemitter3@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.0.tgz#d65176163887ee59f386d64c82610b696a4a74eb" integrity sha512-qerSRB0p+UDEssxTtm6EDKcE7W4OaoisfIMl4CngyEhjpYglocpNg6UEqCvemdGhosAsg4sO2dXJOdyBifPGCg== events@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== eventsource@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" safe-buffer "^5.1.1" execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== dependencies: cross-spawn "^6.0.0" get-stream "^4.0.0" is-stream "^1.1.0" npm-run-path "^2.0.0" p-finally "^1.0.0" signal-exit "^3.0.0" strip-eof "^1.0.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== dependencies: accepts "~1.3.7" array-flatten "1.1.1" body-parser "1.19.0" content-disposition "0.5.3" content-type "~1.0.4" cookie "0.4.0" cookie-signature "1.0.6" debug "2.6.9" depd "~1.1.2" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" finalhandler "~1.1.2" fresh "0.5.2" merge-descriptors "1.0.1" methods "~1.1.2" on-finished "~2.3.0" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.5" qs "6.7.0" range-parser "~1.2.1" safe-buffer "5.1.2" send "0.17.1" serve-static "1.14.1" setprototypeof "1.1.1" statuses "~1.5.0" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" ext@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.2.0.tgz#8dd8d2dd21bcced3045be09621fa0cbf73908ba4" integrity sha512-0ccUQK/9e3NreLFg6K6np8aPyRgwycx+oFGtfx1dSp7Wj00Ozw9r05FgBRlzjf2XBM7LAzwgLyDscRrtSU91hA== dependencies: type "^2.0.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= dependencies: websocket-driver ">=0.5.1" faye-websocket@~0.11.1: version "0.11.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e" integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== dependencies: websocket-driver ">=0.5.1" figgy-pudding@^3.5.1: version "3.5.1" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.1.tgz#862470112901c727a0e495a80744bd5baa1d6790" integrity sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w== file-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== dependencies: loader-utils "^1.0.2" schema-utils "^1.0.0" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.1.2, finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-cache-dir@^2.0.0, find-cache-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" make-dir "^2.0.0" pkg-dir "^3.0.0" find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== dependencies: detect-file "^1.0.0" is-glob "^4.0.0" micromatch "^3.0.4" resolve-dir "^1.0.1" flatted@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08" integrity sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg== flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" readable-stream "^2.3.6" follow-redirects@^1.0.0: version "1.9.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.9.0.tgz#8d5bcdc65b7108fe1508649c79c12d732dcedb4f" integrity sha512-CRcPzsSIbXyVDl0QI01muNDu69S8trU4jArW9LpOt2WtC6LyUJetcIrmfHsRBx7/Jb6GHJUiuqyYxPooFfNt6A== dependencies: debug "^3.0.0" for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= from2@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= dependencies: inherits "^2.0.1" readable-stream "^2.0.0" fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" universalify "^0.1.0" fs-minipass@^1.2.5: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= dependencies: graceful-fs "^4.1.2" iferr "^0.1.5" imurmurhash "^0.1.4" readable-stream "1 || 2" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" fsevents@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-caller-file@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-stream@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" glob-parent@~5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.0.tgz#5f4c1d1e748d30cd73ad2944b3577a81b081e8c2" integrity sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw== dependencies: is-glob "^4.0.1" glob@^5.0.13, glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.3, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" global-modules@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== dependencies: global-prefix "^3.0.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" global-prefix@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== dependencies: ini "^1.3.5" kind-of "^6.0.2" which "^1.3.1" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== handle-thing@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== handlebars@^4.0.1: version "4.5.3" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.5.3.tgz#5cf75bd8714f7605713511a56be7c349becb0482" integrity sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA== dependencies: neo-async "^2.6.0" optimist "^0.6.1" source-map "^0.6.1" optionalDependencies: uglify-js "^3.1.4" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary2@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/has-binary2/-/has-binary2-1.0.3.tgz#7776ac627f3ea77250cfc332dab7ddf5e4f5d11d" integrity sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw== dependencies: isarray "2.0.1" has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0, has-symbols@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" minimalistic-assert "^1.0.1" he@1.2.x: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.5" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.5.tgz#759cfcf2c4d156ade59b0b2dfabddc42a6b9c70c" integrity sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg== hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= dependencies: inherits "^2.0.1" obuf "^1.0.0" readable-stream "^2.0.1" wbuf "^1.1.0" html-entities@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= html-minifier@^3.2.3: version "3.5.21" resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== dependencies: camel-case "3.0.x" clean-css "4.2.x" commander "2.17.x" he "1.2.x" param-case "2.1.x" relateurl "0.2.x" uglify-js "3.4.x" html-webpack-plugin@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= dependencies: html-minifier "^3.2.3" loader-utils "^0.2.16" lodash "^4.17.3" pretty-error "^2.0.2" tapable "^1.0.0" toposort "^1.0.0" util.promisify "1.0.0" htmlparser2@^3.3.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== dependencies: domelementtype "^1.3.1" domhandler "^2.3.0" domutils "^1.5.1" entities "^1.1.1" inherits "^2.0.1" readable-stream "^3.1.1" http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= http-errors@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.0" statuses ">= 1.4.0 < 2" http-errors@~1.7.2: version "1.7.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== dependencies: depd "~1.1.2" inherits "2.0.4" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy-middleware@0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== dependencies: http-proxy "^1.17.0" is-glob "^4.0.0" lodash "^4.17.11" micromatch "^3.1.10" http-proxy@^1.13.0, http-proxy@^1.17.0: version "1.18.0" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.0.tgz#dbe55f63e75a347db7f3d99974f2692a314a6a3a" integrity sha512-84I2iJM/n1d4Hdgc6y2+qY5mDaz2PUVjlg9znE9byl+q0uC3DeByqBGReQu5tpLK0TAqTIXScRUV+dg7+bUPpQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" icss-replace-symbols@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= icss-utils@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== dependencies: postcss "^7.0.14" ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== iferr@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= ignore-walk@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== dependencies: minimatch "^3.0.4" image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= import-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= dependencies: import-from "^2.1.0" import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= dependencies: caller-path "^2.0.0" resolve-from "^3.0.0" import-from@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" integrity sha1-M1238qev/VOqpHHUuAId7ja387E= dependencies: resolve-from "^3.0.0" import-glob-loader@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/import-glob-loader/-/import-glob-loader-1.1.0.tgz#98d84c0f661c8ba9f821d9ddb7c6b6dc8e97eca2" integrity sha1-mNhMD2Yci6n4Idndt8a23I6X7KI= dependencies: glob "^5.0.13" loader-utils "^0.2.10" import-local@2.0.0, import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== dependencies: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= infer-owner@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.5" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== internal-ip@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== dependencies: default-gateway "^4.2.0" ipaddr.js "^1.9.0" interpret@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= ipaddr.js@1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== ipaddr.js@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== is-absolute-url@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arguments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.0.4.tgz#3faf966c7cba0ff437fb31f6250082fcf0448cf3" integrity sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA== is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-directory@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-expression@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-3.0.0.tgz#39acaa6be7fd1f3471dc42c7416e61c24317ac9f" integrity sha1-Oayqa+f9HzRx3ELHQW5hwkMXrJ8= dependencies: acorn "~4.0.2" object-assign "^4.0.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== is-path-in-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: is-path-inside "^2.1.0" is-path-inside@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== dependencies: path-is-inside "^1.0.2" is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-regex@^1.0.3, is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: has-symbols "^1.0.1" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is-wsl@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isarray@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.1.tgz#a37d94ed9cda2d59865c9f76fe596ee1f338741e" integrity sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-instrumenter-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-3.0.1.tgz#9957bd59252b373fae5c52b7b5188e6fde2a0949" integrity sha512-a5SPObZgS0jB/ixaKSMdn6n/gXSrK2S6q/UfRJBT3e6gQmVjwZROTODQsYW5ZNwOu78hG62Y3fWlebaVOL0C+w== dependencies: convert-source-map "^1.5.0" istanbul-lib-instrument "^1.7.3" loader-utils "^1.1.0" schema-utils "^0.3.0" istanbul-lib-coverage@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== istanbul-lib-instrument@^1.7.3: version "1.10.2" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" istanbul-lib-coverage "^1.2.1" semver "^5.3.0" istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" jasmine-core@^3.3, jasmine-core@^3.4.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-3.5.0.tgz#132c23e645af96d85c8bca13c8758b18429fc1e4" integrity sha512-nCeAiw37MIMA9w9IXso7bRaLl+c/ef3wnxsoSAlYrzS+Ot0zTG6nU8G/cIfGkqpkjX2wNaIW9RFG0TwIFnG6bA== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-stringify@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@3.x, js-yaml@^3.13.1: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== json5@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== dependencies: minimist "^1.2.0" json5@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== dependencies: minimist "^1.2.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= optionalDependencies: graceful-fs "^4.1.6" jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" karma-chrome-launcher@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" integrity sha512-uf/ZVpAabDBPvdPdveyk1EPgbnloPvFFGgmRhYLTDH7gEB4nZdSBk8yTU47w1g/drLSx5uMOkjKk7IWKfWg/+w== dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coverage@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-1.1.2.tgz#cc09dceb589a83101aca5fe70c287645ef387689" integrity sha512-eQawj4Cl3z/CjxslYy9ariU4uDh7cCNFZHNWXWRpl0pNeblY/4wHR7M7boTYXWrn9bY0z2pZmr11eKje/S/hIw== dependencies: dateformat "^1.0.6" istanbul "^0.4.0" lodash "^4.17.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-2.0.1.tgz#26e3e31f2faf272dd80ebb0e1898914cc3a19763" integrity sha512-iuC0hmr9b+SNn1DaUD2QEYtUxkS1J+bSJSn7ejdEexs7P8EYvA1CWkEdrDQ+8jVH3AgWlCNwjYsT1chjcNW9lA== dependencies: jasmine-core "^3.3" karma-sourcemap-loader@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma-spec-reporter@^0.0.32: version "0.0.32" resolved "https://registry.yarnpkg.com/karma-spec-reporter/-/karma-spec-reporter-0.0.32.tgz#2e9c7207ea726771260259f82becb543209e440a" integrity sha1-LpxyB+pyZ3EmAln4K+y1QyCeRAo= dependencies: colors "^1.1.2" karma-webpack@^3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-3.0.5.tgz#1ff1e3a690fb73ae95ee95f9ab58f341cfc7b40f" integrity sha512-nRudGJWstvVuA6Tbju9tyGUfXTtI1UXMXoRHVmM2/78D0q6s/Ye2IC157PKNDC15PWFGR0mVIRtWLAdcfsRJoA== dependencies: async "^2.0.0" babel-runtime "^6.0.0" loader-utils "^1.0.0" lodash "^4.0.0" source-map "^0.5.6" webpack-dev-middleware "^2.0.6" karma@^4.1.0: version "4.4.1" resolved "https://registry.yarnpkg.com/karma/-/karma-4.4.1.tgz#6d9aaab037a31136dc074002620ee11e8c2e32ab" integrity sha512-L5SIaXEYqzrh6b1wqYC42tNsFMx2PWuxky84pK9coK09MvmL7mxii3G3bZBh/0rvD27lqDd0le9jyhzvwif73A== dependencies: bluebird "^3.3.0" body-parser "^1.16.1" braces "^3.0.2" chokidar "^3.0.0" colors "^1.1.0" connect "^3.6.0" di "^0.0.1" dom-serialize "^2.2.0" flatted "^2.0.0" glob "^7.1.1" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^4.17.14" log4js "^4.0.0" mime "^2.3.1" minimatch "^3.0.2" optimist "^0.6.1" qjobs "^1.1.4" range-parser "^1.2.0" rimraf "^2.6.0" safe-buffer "^5.0.1" socket.io "2.1.1" source-map "^0.6.1" tmp "0.0.33" useragent "2.3.0" killable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== lazy-cache@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" integrity sha1-odePw6UEdMuAhF07O24dpJpEbo4= lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" less-loader@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-5.0.0.tgz#498dde3a6c6c4f887458ee9ed3f086a12ad1b466" integrity sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg== dependencies: clone "^2.1.1" loader-utils "^1.1.0" pify "^4.0.1" less@^3.9.0: version "3.10.3" resolved "https://registry.yarnpkg.com/less/-/less-3.10.3.tgz#417a0975d5eeecc52cff4bcfa3c09d35781e6792" integrity sha512-vz32vqfgmoxF1h3K4J+yKCtajH0PWmjkIFgbs5d78E/c/e+UQTnI+lWK+1eQRE95PXM2mC3rJlLSSP9VQHnaow== dependencies: clone "^2.1.2" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" mime "^1.4.1" mkdirp "^0.5.0" promise "^7.1.1" request "^2.83.0" source-map "~0.6.0" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" loader-runner@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== dependencies: big.js "^5.2.2" emojis-list "^2.0.0" json5 "^1.0.1" loader-utils@^0.2.10, loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= dependencies: big.js "^3.1.3" emojis-list "^2.0.0" json5 "^0.5.0" object-assign "^4.0.1" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lodash@^4.0.0, lodash@^4.17.0, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.3, lodash@^4.17.4: version "4.17.15" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== log-symbols@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: chalk "^2.0.1" log4js@^4.0.0: version "4.5.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-4.5.1.tgz#e543625e97d9e6f3e6e7c9fc196dd6ab2cae30b5" integrity sha512-EEEgFcE9bLgaYUKuozyFfytQM2wDHtXn4tAN41pkaxpNjAykv11GVdeI4tHtmPWW4Xrgh9R/2d7XYghDVjbKKw== dependencies: date-format "^2.0.0" debug "^4.1.1" flatted "^2.0.0" rfdc "^1.1.4" streamroller "^1.0.6" loglevel@^1.6.4: version "1.6.6" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.6.tgz#0ee6300cc058db6b3551fa1c4bf73b83bb771312" integrity sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ== loglevelnext@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/loglevelnext/-/loglevelnext-1.0.5.tgz#36fc4f5996d6640f539ff203ba819641680d75a2" integrity sha512-V/73qkPuJmx4BcBF19xPBr+0ZRVBhc4POxvZTZdMeXpJ4NItXSJ/MSwuFT0kQJlCbXvdlZoQQ/418bS1y9Jh6A== dependencies: es6-symbol "^3.1.1" object.assign "^4.1.0" longest@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" integrity sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc= loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0, loud-rejection@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= lru-cache@4.1.x: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" semver "^5.6.0" mamacro@^0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== dependencies: p-defer "^1.0.0" map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= mem@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^2.0.0" p-is-promise "^2.0.0" memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= dependencies: errno "^0.1.3" readable-stream "^2.0.1" memory-fs@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== dependencies: errno "^0.1.3" readable-stream "^2.0.1" meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" brorand "^1.0.1" mime-db@1.42.0, "mime-db@>= 1.40.0 < 2": version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.1.0, mime@^2.3.1, mime@^2.4.4: version "2.4.4" resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== mimic-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== mini-css-extract-plugin@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== dependencies: loader-utils "^1.1.0" normalize-url "^2.0.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@0.0.8: version "0.0.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= minimist@^1.1.3, minimist@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.8.6, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.2.1: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== dependencies: concat-stream "^1.5.0" duplexify "^3.4.2" end-of-stream "^1.1.0" flush-write-stream "^1.0.0" from2 "^2.1.0" parallel-transform "^1.1.0" pump "^3.0.0" pumpify "^1.3.3" stream-each "^1.1.0" through2 "^2.0.0" mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= dependencies: minimist "0.0.8" move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= dependencies: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multicast-dns-service-types@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= multicast-dns@^6.0.1: version "6.2.3" resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== dependencies: dns-packet "^1.3.1" thunky "^1.0.2" nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1: version "2.6.1" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c" integrity sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw== next-tick@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== dependencies: lower-case "^1.1.1" node-forge@0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579" integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ== node-libs-browser@^2.2.0, node-libs-browser@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== dependencies: assert "^1.1.1" browserify-zlib "^0.2.0" buffer "^4.3.0" console-browserify "^1.1.0" constants-browserify "^1.0.0" crypto-browserify "^3.11.0" domain-browser "^1.1.1" events "^3.0.0" https-browserify "^1.0.0" os-browserify "^0.3.0" path-browserify "0.0.1" process "^0.11.10" punycode "^1.2.4" querystring-es3 "^0.2.0" readable-stream "^2.3.3" stream-browserify "^2.0.1" stream-http "^2.7.2" string_decoder "^1.0.0" timers-browserify "^2.0.4" tty-browserify "0.0.0" url "^0.11.0" util "^0.11.0" vm-browserify "^1.0.1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.40: version "1.1.41" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.41.tgz#57674a82a37f812d18e3b26118aefaf53a00afed" integrity sha512-+IctMa7wIs8Cfsa8iYzeaLTFwv5Y4r5jZud+4AnfymzeEXKBCavFX0KBgzVaPVqf0ywa6PrO8/b+bPqdwjGBSg== dependencies: semver "^6.3.0" nopt@3.x: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= normalize-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== dependencies: prepend-http "^2.0.0" query-string "^5.0.1" sort-keys "^2.0.0" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.6" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4" integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" nth-check@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== dependencies: boolbase "~1.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= null-loader@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-1.0.0.tgz#90e85798e50e9dd1d568495a44e74829dec26744" integrity sha512-mYLDjDVTkjTlFoidxRhzO75rdcwfVXfw5G5zpj8sXnBkHtKJxMk4hTcRR4i5SOhDB6EvcQuYriy6IV23eq6uog== dependencies: loader-utils "^1.2.3" schema-utils "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-inspect@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-is@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.0.1.tgz#0aa60ec9989a0b3ed795cf4d06f62cf1ad6539b6" integrity sha1-CqYOyZiaCz7Xlc9NBvYs8a1lObY= object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@1.x, once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== dependencies: is-wsl "^1.1.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.6" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" word-wrap "~1.2.3" original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== dependencies: url-parse "^1.4.3" os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^3.0.0, os-locale@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== dependencies: execa "^1.0.0" lcid "^2.0.0" mem "^4.0.0" os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== p-limit@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== dependencies: retry "^0.12.0" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~1.0.5: version "1.0.10" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== parallel-transform@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== dependencies: cyclist "^1.0.1" inherits "^2.0.3" readable-stream "^2.1.5" param-case@2.1.x: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= dependencies: no-case "^2.2.0" parse-asn1@^5.0.0: version "5.1.5" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-key@^2.0.0, path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= path-parse@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" pbkdf2@^3.0.3: version "3.0.17" resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" ripemd160 "^2.0.1" safe-buffer "^5.0.1" sha.js "^2.4.8" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= picomatch@^2.0.4: version "2.1.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.1.1.tgz#ecdfbea7704adb5fe6fb47f9866c4c0e15e905c5" integrity sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA== pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= pkg-dir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" portfinder@^1.0.25: version "1.0.25" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca" integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg== dependencies: async "^2.6.2" debug "^3.1.1" mkdirp "^0.5.1" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= postcss-load-config@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.0.tgz#c84d692b7bb7b41ddced94ee62e8ab31b417b003" integrity sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q== dependencies: cosmiconfig "^5.0.0" import-cwd "^2.0.0" postcss-loader@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== dependencies: loader-utils "^1.1.0" postcss "^7.0.0" postcss-load-config "^2.0.0" schema-utils "^1.0.0" postcss-modules-extract-imports@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== dependencies: postcss "^7.0.5" postcss-modules-local-by-default@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.6.tgz#dd9953f6dd476b5fd1ef2d8830c8929760b56e63" integrity sha512-oLUV5YNkeIBa0yQl7EYnxMgy4N6noxmiwZStaEJUSe2xPMcdNc8WmBQuQCx18H5psYbVxz8zoHk0RAAYZXP9gA== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-value-parser "^3.3.1" postcss-modules-scope@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.0.tgz#ad3f5bf7856114f6fcab901b0502e2a2bc39d4eb" integrity sha512-91Rjps0JnmtUB0cujlc8KIKCsJXWjzuxGeT/+Q2i2HXKZ7nBUeF9YQTZZTNvHVoNYj1AthsjnGLtqDUE0Op79A== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-modules-values@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-2.0.0.tgz#479b46dc0c5ca3dc7fa5270851836b9ec7152f64" integrity sha512-Ki7JZa7ff1N3EIMlPnGTZfUMe69FFwiQPnVSXC9mnn3jozCRBYIxiZd44yJOV2AmabOo4qFf8s0dC/+lweG7+w== dependencies: icss-replace-symbols "^1.1.0" postcss "^7.0.6" postcss-selector-parser@^6.0.0: version "6.0.2" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== dependencies: cssesc "^3.0.0" indexes-of "^1.0.1" uniq "^1.0.1" postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss-value-parser@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.23, postcss@^7.0.5, postcss@^7.0.6: version "7.0.23" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.23.tgz#9f9759fad661b15964f3cfc3140f66f1e05eadc1" integrity sha512-hOlMf3ouRIFXD+j2VJecwssTwbvsPGJVMzupptg+85WA+i7MwyrydmQAgY3R+m0Bc0exunhbJmijy8u8+vufuQ== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= pretty-error@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= dependencies: renderkid "^2.0.1" utila "~0.4" private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= promise@^7.0.1, promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" proxy-addr@~2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.9.0" prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.24: version "1.4.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" create-hash "^1.1.0" parse-asn1 "^5.0.0" randombytes "^2.0.1" safe-buffer "^5.1.2" pug-attrs@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-2.0.4.tgz#b2f44c439e4eb4ad5d4ef25cac20d18ad28cc336" integrity sha512-TaZ4Z2TWUPDJcV3wjU3RtUXMrd3kM4Wzjbe3EWnSsZPsJ3LDI0F3yCnf2/W7PPFF+edUFQ0HgDL1IoxSz5K8EQ== dependencies: constantinople "^3.0.1" js-stringify "^1.0.1" pug-runtime "^2.0.5" pug-code-gen@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-2.0.2.tgz#ad0967162aea077dcf787838d94ed14acb0217c2" integrity sha512-kROFWv/AHx/9CRgoGJeRSm+4mLWchbgpRzTEn8XCiwwOy6Vh0gAClS8Vh5TEJ9DBjaP8wCjS3J6HKsEsYdvaCw== dependencies: constantinople "^3.1.2" doctypes "^1.1.0" js-stringify "^1.0.1" pug-attrs "^2.0.4" pug-error "^1.3.3" pug-runtime "^2.0.5" void-elements "^2.0.1" with "^5.0.0" pug-error@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-1.3.3.tgz#f342fb008752d58034c185de03602dd9ffe15fa6" integrity sha512-qE3YhESP2mRAWMFJgKdtT5D7ckThRScXRwkfo+Erqga7dyJdY3ZquspprMCj/9sJ2ijm5hXFWQE/A3l4poMWiQ== pug-filters@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-3.1.1.tgz#ab2cc82db9eeccf578bda89130e252a0db026aa7" integrity sha512-lFfjNyGEyVWC4BwX0WyvkoWLapI5xHSM3xZJFUhx4JM4XyyRdO8Aucc6pCygnqV2uSgJFaJWW3Ft1wCWSoQkQg== dependencies: clean-css "^4.1.11" constantinople "^3.0.1" jstransformer "1.0.0" pug-error "^1.3.3" pug-walk "^1.1.8" resolve "^1.1.6" uglify-js "^2.6.1" pug-lexer@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-4.1.0.tgz#531cde48c7c0b1fcbbc2b85485c8665e31489cfd" integrity sha512-i55yzEBtjm0mlplW4LoANq7k3S8gDdfC6+LThGEvsK4FuobcKfDAwt6V4jKPH9RtiE3a2Akfg5UpafZ1OksaPA== dependencies: character-parser "^2.1.1" is-expression "^3.0.0" pug-error "^1.3.3" pug-linker@^3.0.6: version "3.0.6" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-3.0.6.tgz#f5bf218b0efd65ce6670f7afc51658d0f82989fb" integrity sha512-bagfuHttfQOpANGy1Y6NJ+0mNb7dD2MswFG2ZKj22s8g0wVsojpRlqveEQHmgXXcfROB2RT6oqbPYr9EN2ZWzg== dependencies: pug-error "^1.3.3" pug-walk "^1.1.8" pug-load@^2.0.12: version "2.0.12" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-2.0.12.tgz#d38c85eb85f6e2f704dea14dcca94144d35d3e7b" integrity sha512-UqpgGpyyXRYgJs/X60sE6SIf8UBsmcHYKNaOccyVLEuT6OPBIMo6xMPhoJnqtB3Q3BbO4Z3Bjz5qDsUWh4rXsg== dependencies: object-assign "^4.1.0" pug-walk "^1.1.8" pug-parser@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-5.0.1.tgz#03e7ada48b6840bd3822f867d7d90f842d0ffdc9" integrity sha512-nGHqK+w07p5/PsPIyzkTQfzlYfuqoiGjaoqHv1LjOv2ZLXmGX1O+4Vcvps+P4LhxZ3drYSljjq4b+Naid126wA== dependencies: pug-error "^1.3.3" token-stream "0.0.1" pug-runtime@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-2.0.5.tgz#6da7976c36bf22f68e733c359240d8ae7a32953a" integrity sha512-P+rXKn9un4fQY77wtpcuFyvFaBww7/91f3jHa154qU26qFAnOe6SW1CbIDcxiG5lLK9HazYrMCCuDvNgDQNptw== pug-strip-comments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-1.0.4.tgz#cc1b6de1f6e8f5931cf02ec66cdffd3f50eaf8a8" integrity sha512-i5j/9CS4yFhSxHp5iKPHwigaig/VV9g+FgReLJWWHEHbvKsbqL0oP/K5ubuLco6Wu3Kan5p7u7qk8A4oLLh6vw== dependencies: pug-error "^1.3.3" pug-walk@^1.1.8: version "1.1.8" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-1.1.8.tgz#b408f67f27912f8c21da2f45b7230c4bd2a5ea7a" integrity sha512-GMu3M5nUL3fju4/egXwZO0XLi6fW/K3T3VTgFQ14GxNi8btlxgT5qZL//JwZFm/2Fa64J/PNS8AZeys3wiMkVA== pug@^2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/pug/-/pug-2.0.4.tgz#ee7682ec0a60494b38d48a88f05f3b0ac931377d" integrity sha512-XhoaDlvi6NIzL49nu094R2NA6P37ijtgMDuWE+ofekDChvfKnzFal60bhSdiy8y2PBO6fmz3oMEIcfpBVRUdvw== dependencies: pug-code-gen "^2.0.2" pug-filters "^3.1.1" pug-lexer "^4.1.0" pug-linker "^3.0.6" pug-load "^2.0.12" pug-parser "^5.0.1" pug-runtime "^2.0.5" pug-strip-comments "^1.0.4" pump@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pumpify@^1.3.3: version "1.5.1" resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" inherits "^2.0.3" pump "^2.0.0" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qjobs@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== query-string@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== dependencies: decode-uri-component "^0.2.0" object-assign "^4.1.0" strict-uri-encode "^1.0.0" querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= querystringify@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.1.1.tgz#60e5a5fd64a7f8bfa4d2ab2ed6fdf4c85bad154e" integrity sha512-w7fLxIRCRT7U8Qu53jQnJyPkYZIaR4n5151KMfcJlO/A9397Wxb1amJvROTK6TOnp7PfoAmg/qXiNHI+08jRfA== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" safe-buffer "^5.1.0" range-parser@^1.0.3, range-parser@^1.2.0, range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-loader@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-2.0.0.tgz#e2813d9e1e3f80d1bbade5ad082e809679e20c26" integrity sha512-kZnO5MoIyrojfrPWqrhFNLZemIAX8edMOCp++yC5RKxzFB3m92DqKNhKlU6+FvpOhWtvyh3jOaD7J6/9tpdIKg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" readdirp@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839" integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ== dependencies: picomatch "^2.0.4" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.3" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" integrity sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw== regenerator-transform@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== dependencies: private "^0.1.6" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexp.prototype.flags@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.2.0.tgz#6b30724e306a27833eeb171b66ac8890ba37e41c" integrity sha512-ztaw4M1VqgMwl9HlPpOuiYgItcHlunW0He2fE6eNfT6E/CF2FtYi9ofOYe4mKntstYk0Fyh/rDRBdS3AnxjlrA== dependencies: define-properties "^1.1.2" regexpu-core@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" regjsgen@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c" integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" relateurl@0.2.x: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= renderkid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== dependencies: css-select "^1.1.0" dom-converter "^0.2" htmlparser2 "^3.3.0" strip-ansi "^3.0.0" utila "^0.4.0" repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.5.2, repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" request@^2.83.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= dependencies: resolve-from "^3.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" integrity sha1-six699nWiBvItuZTM17rywoYh0g= resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.1.6, resolve@^1.10.0, resolve@^1.3.2, resolve@^1.8.1: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= rfdc@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.1.4.tgz#ba72cc1367a0ccd9cf81a870b3b58bd3ad07f8c2" integrity sha512-5C9HXdzK8EAqN7JDif30jqsBzavB7wLpaubisuQIGHWf2gUXSpzy6ArX/+Da8RjFpagWsCn+pIgxTMAmKw9Zug== right-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" integrity sha1-YTObci/mo1FWiSENJOFMlhSGE+8= dependencies: align-text "^0.1.1" rimraf@^2.5.4, rimraf@^2.6.0, rimraf@^2.6.1, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" inherits "^2.0.1" run-queue@^1.0.0, run-queue@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= dependencies: aproba "^1.1.1" safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" integrity sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8= dependencies: ajv "^5.0.0" schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== dependencies: ajv "^6.1.0" ajv-errors "^1.0.0" ajv-keywords "^3.1.0" select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.7: version "1.10.7" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b" integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA== dependencies: node-forge "0.9.0" "semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== dependencies: debug "2.6.9" depd "~1.1.2" destroy "~1.0.4" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" http-errors "~1.7.2" mime "1.6.0" ms "2.1.1" on-finished "~2.3.0" range-parser "~1.2.1" statuses "~1.5.0" serialize-javascript@^1.7.0: version "1.9.1" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.9.1.tgz#cfc200aef77b600c47da9bb8149c943e798c2fdb" integrity sha512-0Vb/54WJ6k5v8sSWN09S0ora+Hnr+cX40r9F170nT+mSkaxltoE/7R3OrIdBSUv1OoiobH1QoWQbCnAO+e8J1A== serve-index@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= dependencies: accepts "~1.3.4" batch "0.6.1" debug "2.6.9" escape-html "~1.0.3" http-errors "~1.6.2" mime-types "~2.1.17" parseurl "~1.3.2" serve-static@1.14.1: version "1.14.1" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.17.1" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setimmediate@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" socket.io-adapter@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz#2a805e8a14d6372124dd9159ad4502f8cb07f06b" integrity sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs= socket.io-client@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.1.1.tgz#dcb38103436ab4578ddb026638ae2f21b623671f" integrity sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ== dependencies: backo2 "1.0.2" base64-arraybuffer "0.1.5" component-bind "1.0.0" component-emitter "1.2.1" debug "~3.1.0" engine.io-client "~3.2.0" has-binary2 "~1.0.2" has-cors "1.1.0" indexof "0.0.1" object-component "0.0.3" parseqs "0.0.5" parseuri "0.0.5" socket.io-parser "~3.2.0" to-array "0.1.4" socket.io-parser@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.2.0.tgz#e7c6228b6aa1f814e6148aea325b51aa9499e077" integrity sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA== dependencies: component-emitter "1.2.1" debug "~3.1.0" isarray "2.0.1" socket.io@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.1.1.tgz#a069c5feabee3e6b214a75b40ce0652e1cfb9980" integrity sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA== dependencies: debug "~3.1.0" engine.io "~3.2.0" has-binary2 "~1.0.2" socket.io-adapter "~1.1.0" socket.io-client "2.1.1" socket.io-parser "~3.2.0" sockjs-client@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.4.0.tgz#c9f2568e19c8fd8173b4997ea3420e0bb306c7d5" integrity sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g== dependencies: debug "^3.2.5" eventsource "^1.0.7" faye-websocket "~0.11.1" inherits "^2.0.3" json3 "^3.3.2" url-parse "^1.4.3" sockjs@0.3.19: version "0.3.19" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== dependencies: faye-websocket "^0.10.0" uuid "^3.0.1" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= dependencies: is-plain-obj "^1.0.0" source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.12: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.6, source-map@^0.5.7, source-map@~0.5.1: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== spdx-expression-parse@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.5" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" hpack.js "^2.1.6" obuf "^1.1.2" readable-stream "^3.0.6" wbuf "^1.7.3" spdy@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.1.tgz#6f12ed1c5db7ea4f24ebb8b89ba58c87c08257f2" integrity sha512-HeZS3PBdMA+sZSu0qwpCxl3DeALD5ASx8pAX0jZdKXSpPWbQ6SYGnlg3BBmYLx5LtiZrmkAZfErCm2oECBcioA== dependencies: debug "^4.1.0" handle-thing "^2.0.0" http-deceiver "^1.2.7" select-hose "^2.0.0" spdy-transport "^3.0.0" split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" ssri@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.1.tgz#2a3c41b28dd45b62b63676ecb74001265ae9edd8" integrity sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA== dependencies: figgy-pudding "^3.5.1" static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== dependencies: inherits "~2.0.1" readable-stream "^2.0.2" stream-each@^1.1.0: version "1.2.3" resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== dependencies: end-of-stream "^1.1.0" stream-shift "^1.0.0" stream-http@^2.7.2: version "2.8.3" resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= streamroller@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-1.0.6.tgz#8167d8496ed9f19f05ee4b158d9611321b8cacd9" integrity sha512-3QC47Mhv3/aZNFpDDVO44qQb9gwB9QggMEE0sQmkTAwBVYdBRWISdsywlkfm5II1Q5y/pmrHflti/IgmIzdDBg== dependencies: async "^2.6.2" date-format "^2.0.0" debug "^3.2.6" fs-extra "^7.0.1" lodash "^4.17.14" strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== dependencies: emoji-regex "^7.0.1" is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string.prototype.trimright@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= style-loader@^0.23.1: version "0.23.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925" integrity sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" supports-color@6.1.0, supports-color@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== dependencies: has-flag "^3.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" tapable@^1.0.0, tapable@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^4: version "4.4.13" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.13.tgz#43b364bc52888d555298637b10d60790254ab525" integrity sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA== dependencies: chownr "^1.1.1" fs-minipass "^1.2.5" minipass "^2.8.6" minizlib "^1.2.1" mkdirp "^0.5.0" safe-buffer "^5.1.2" yallist "^3.0.3" terser-webpack-plugin@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.1.tgz#61b18e40eaee5be97e771cdbb10ed1280888c2b4" integrity sha512-ZXmmfiwtCLfz8WKZyYUuuHf3dMYEjg8NrjHMb0JqHVHVOSkzp3cW2/XG1fP3tRhqEqSzMwzzRQGtAPbs4Cncxg== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" serialize-javascript "^1.7.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" worker-farm "^1.7.0" terser@^4.1.2: version "4.4.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.4.0.tgz#22c46b4817cf4c9565434bfe6ad47336af259ac3" integrity sha512-oDG16n2WKm27JO8h4y/w3iqBGAOSCtq7k8dRmrn4Wf9NouL0b2WpMHGChFGZq4nFAQy1FsNJrVQHfurXOSTmOA== dependencies: commander "^2.20.0" source-map "~0.6.1" source-map-support "~0.5.12" through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" thunky@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== timers-browserify@^2.0.4: version "2.0.11" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== dependencies: setimmediate "^1.0.4" tmp@0.0.33, tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-0.0.1.tgz#ceeefc717a76c4316f126d0b9dbaa55d7e7df01a" integrity sha1-zu78cXp2xDFvEm0LnbqlXX598Bo= toposort@^1.0.0: version "1.0.7" resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= uglify-js@3.4.x: version "3.4.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== dependencies: commander "~2.19.0" source-map "~0.6.1" uglify-js@^2.6.1: version "2.8.29" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" integrity sha1-KcVzMUgFe7Th913zW3qcty5qWd0= dependencies: source-map "~0.5.1" yargs "~3.10.0" optionalDependencies: uglify-to-browserify "~1.0.0" uglify-js@^3.1.4: version "3.6.9" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.6.9.tgz#85d353edb6ddfb62a9d798f36e91792249320611" integrity sha512-pcnnhaoG6RtrvHJ1dFncAe8Od6Nuy30oaJ82ts6//sGSXOP5UjBMEthiProjXmMNHOfd93sqlkztifFMcb+4yw== dependencies: commander "~2.20.3" source-map "~0.6.1" uglify-to-browserify@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" integrity sha1-bgkk1r2mta/jSeOabWMoUKD4grc= ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^2.0.1" uniq@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== dependencies: unique-slug "^2.0.0" unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== dependencies: imurmurhash "^0.1.4" universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url-join@^2.0.2: version "2.0.5" resolved "https://registry.yarnpkg.com/url-join/-/url-join-2.0.5.tgz#5af22f18c052a000a48d7b82c5e9c2e2feeda728" integrity sha1-WvIvGMBSoACkjXuCxenC4v7tpyg= url-parse@^1.4.3: version "1.4.7" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.7.tgz#a8a83535e8c00a316e403a5db4ac1b9b853ae278" integrity sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== useragent@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== dependencies: inherits "2.0.3" utila@^0.4.0, utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== v8-compile-cache@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vm-browserify@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== void-elements@^2.0.0, void-elements@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= watchpack@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== dependencies: chokidar "^2.0.2" graceful-fs "^4.1.2" neo-async "^2.5.0" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" webpack-cli@^3.3.1: version "3.3.10" resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.10.tgz#17b279267e9b4fb549023fae170da8e6e766da13" integrity sha512-u1dgND9+MXaEt74sJR4PR7qkPxXUSQ0RXYq8x1L6Jg1MYVEmGPrH6Ah6C4arD4r0J1P5HKjRqpab36k0eIzPqg== dependencies: chalk "2.4.2" cross-spawn "6.0.5" enhanced-resolve "4.1.0" findup-sync "3.0.0" global-modules "2.0.0" import-local "2.0.0" interpret "1.2.0" loader-utils "1.2.3" supports-color "6.1.0" v8-compile-cache "2.0.3" yargs "13.2.4" webpack-dev-middleware@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-2.0.6.tgz#a51692801e8310844ef3e3790e1eacfe52326fd4" integrity sha512-tj5LLD9r4tDuRIDa5Mu9lnY2qBBehAITv6A9irqXhw/HQquZgTx3BCd57zYbU2gMDnncA49ufK2qVQSbaKJwOw== dependencies: loud-rejection "^1.6.0" memory-fs "~0.4.1" mime "^2.1.0" path-is-absolute "^1.0.0" range-parser "^1.0.3" url-join "^2.0.2" webpack-log "^1.0.1" webpack-dev-middleware@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3" integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw== dependencies: memory-fs "^0.4.1" mime "^2.4.4" mkdirp "^0.5.1" range-parser "^1.2.1" webpack-log "^2.0.0" webpack-dev-server@^3.3.1: version "3.9.0" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c" integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw== dependencies: ansi-html "0.0.7" bonjour "^3.5.0" chokidar "^2.1.8" compression "^1.7.4" connect-history-api-fallback "^1.6.0" debug "^4.1.1" del "^4.1.1" express "^4.17.1" html-entities "^1.2.1" http-proxy-middleware "0.19.1" import-local "^2.0.0" internal-ip "^4.3.0" ip "^1.1.5" is-absolute-url "^3.0.3" killable "^1.0.1" loglevel "^1.6.4" opn "^5.5.0" p-retry "^3.0.1" portfinder "^1.0.25" schema-utils "^1.0.0" selfsigned "^1.10.7" semver "^6.3.0" serve-index "^1.9.1" sockjs "0.3.19" sockjs-client "1.4.0" spdy "^4.0.1" strip-ansi "^3.0.1" supports-color "^6.1.0" url "^0.11.0" webpack-dev-middleware "^3.7.2" webpack-log "^2.0.0" ws "^6.2.1" yargs "12.0.5" webpack-fix-style-only-entries@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/webpack-fix-style-only-entries/-/webpack-fix-style-only-entries-0.2.2.tgz#60331c608b944ac821a3b6f2ae491a6d79ba40eb" integrity sha512-0wcrLCnISP8htV0NP1mT0e2mHhfjGQdNk82s8BTLVvF7rXuoJuUUzP3aCUXnRqlLgmTBx5WgqPhnczjatl+iSQ== webpack-log@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-1.2.0.tgz#a4b34cda6b22b518dbb0ab32e567962d5c72a43d" integrity sha512-U9AnICnu50HXtiqiDxuli5gLB5PGBo7VvcHx36jRZHwK4vzOYLbImqT4lwWwoMHdQWwEKw736fCHEekokTEKHA== dependencies: chalk "^2.1.0" log-symbols "^2.1.0" loglevelnext "^1.0.1" uuid "^3.1.0" webpack-log@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== dependencies: ansi-colors "^3.0.0" uuid "^3.3.2" webpack-shell-plugin@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/webpack-shell-plugin/-/webpack-shell-plugin-0.5.0.tgz#29b8a1d80ddeae0ddb10e729667f728653c2c742" integrity sha1-Kbih2A3erg3bEOcpZn9yhlPCx0I= webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack@^4.30.0: version "4.41.2" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e" integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/wasm-edit" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" acorn "^6.2.1" ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" enhanced-resolve "^4.1.0" eslint-scope "^4.0.3" json-parse-better-errors "^1.0.2" loader-runner "^2.4.0" loader-utils "^1.2.3" memory-fs "^0.4.1" micromatch "^3.1.10" mkdirp "^0.5.1" neo-async "^2.6.1" node-libs-browser "^2.2.1" schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.1" watchpack "^1.6.0" webpack-sources "^1.4.1" websocket-driver@>=0.5.1: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.3.tgz#5d2ff22977003ec687a4b87073dfbbac146ccf29" integrity sha512-nqHUnMXmBzT0w570r2JpJxfiSD1IzoI+HGVdd3aZ0yNi3ngvQ4jv1dtHt5VGxfI2yj5yqImPhOK4vmIh2xMbGg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" window-size@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" integrity sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0= with@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/with/-/with-5.1.1.tgz#fa4daa92daf32c4ea94ed453c81f04686b575dfe" integrity sha1-+k2qktrzLE6pTtRTyB8EaGtXXf4= dependencies: acorn "^3.1.0" acorn-globals "^3.0.0" word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== wordwrap@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" integrity sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8= wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= worker-farm@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== dependencies: ansi-styles "^3.2.0" string-width "^3.0.0" strip-ansi "^5.0.0" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== dependencies: async-limiter "~1.0.0" ws@~3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== dependencies: async-limiter "~1.0.0" safe-buffer "~5.1.0" ultron "~1.1.0" xmlhttprequest-ssl@~1.5.4: version "1.5.5" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e" integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4= xtend@^4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== "y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2, yallist@^3.0.3: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs-parser@^13.1.0: version "13.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== dependencies: cliui "^4.0.0" decamelize "^1.2.0" find-up "^3.0.0" get-caller-file "^1.0.1" os-locale "^3.0.0" require-directory "^2.1.1" require-main-filename "^1.0.1" set-blocking "^2.0.0" string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" yargs@13.2.4: version "13.2.4" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== dependencies: cliui "^5.0.0" find-up "^3.0.0" get-caller-file "^2.0.1" os-locale "^3.1.0" require-directory "^2.1.1" require-main-filename "^2.0.0" set-blocking "^2.0.0" string-width "^3.0.0" which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^13.1.0" yargs@~3.10.0: version "3.10.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" integrity sha1-9+572FfdfB0tOMDnTvvWgdFDH9E= dependencies: camelcase "^1.0.2" cliui "^2.1.0" decamelize "^1.0.0" window-size "0.1.0" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk= buildbot-3.4.0/www/grid_view/000077500000000000000000000000001413250514000161205ustar00rootroot00000000000000buildbot-3.4.0/www/grid_view/buildbot_grid_view/000077500000000000000000000000001413250514000217635ustar00rootroot00000000000000buildbot-3.4.0/www/grid_view/buildbot_grid_view/__init__.py000066400000000000000000000015351413250514000241000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.www.plugin import Application # create the interface for the setuptools entry point ep = Application(__name__, "Buildbot Grid View plugin") buildbot-3.4.0/www/grid_view/karma.conf.js000066400000000000000000000003641413250514000205000ustar00rootroot00000000000000const common = require('buildbot-build-common'); module.exports = function karmaConfig (config) { common.createTemplateKarmaConfig(config, { testRoot: 'src/tests.webpack.js', webpack: require('./webpack.config') }); }; buildbot-3.4.0/www/grid_view/package.json000066400000000000000000000020221413250514000204020ustar00rootroot00000000000000{ "plugin_name": "grid_view", "name": "buildbot-grid-view", "private": true, "main": "buildbot_grid_view/static/scripts.js", "style": "buildbot_grid_view/static/styles.css", "scripts": { "build": "rimraf buildbot_grid_view/static && webpack --bail --progress --profile --env prod", "build-dev": "rimraf buildbot_grid_view/static && webpack --bail --progress --profile --env dev", "dev": "webpack --bail --progress --profile --watch --env dev", "test": "karma start", "test-watch": "karma start --auto-watch --no-single-run" }, "devDependencies": { "angular-mocks": "^1.7.9", "buildbot-build-common": "link:../build_common", "lodash": "^4.17.19", "rimraf": "^2.6.3" }, "dependencies": { "angular": "^1.8.0", "angular-animate": "^1.7.9", "angular-ui-bootstrap": "^2.5.6", "buildbot-data-js": "link:../data_module", "guanlecoja-ui": "link:../guanlecoja-ui", "jquery": "^3.5.0" } } buildbot-3.4.0/www/grid_view/postcss.config.js000066400000000000000000000001711413250514000214170ustar00rootroot00000000000000module.exports = { plugins: { autoprefixer: { browsers: ['last 2 versions'] }, }, }; buildbot-3.4.0/www/grid_view/setup.cfg000066400000000000000000000000001413250514000177270ustar00rootroot00000000000000buildbot-3.4.0/www/grid_view/setup.py000066400000000000000000000027751413250514000176450ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from buildbot_pkg import setup_www_plugin except ImportError: import sys print('Please install buildbot_pkg module in order to install that ' 'package, or use the pre-build .whl modules available on pypi', file=sys.stderr) sys.exit(1) setup_www_plugin( name='buildbot-grid-view', description='Buildbot Grid View plugin', author=u'Robin Jarry', author_email=u'robin.jarry@6wind.com', url='http://buildbot.net/', packages=['buildbot_grid_view'], package_data={ '': [ 'VERSION', 'static/*' ] }, entry_points=""" [buildbot.www] grid_view = buildbot_grid_view:ep """, classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)' ], ) buildbot-3.4.0/www/grid_view/src/000077500000000000000000000000001413250514000167075ustar00rootroot00000000000000buildbot-3.4.0/www/grid_view/src/module/000077500000000000000000000000001413250514000201745ustar00rootroot00000000000000buildbot-3.4.0/www/grid_view/src/module/grid.controller.js000066400000000000000000000237141413250514000236500ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS203: Remove `|| {}` from converted for-own loops * DS205: Consider reworking code to avoid use of IIFEs * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // This file is part of Buildbot. Buildbot is free software: you can // redistribute it and/or modify it under the terms of the GNU General Public // License as published by the Free Software Foundation, version 2. // // This program is distributed in the hope that it will be useful, but WITHOUT // ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS // FOR A PARTICULAR PURPOSE. See the GNU General Public License for more // details. // // You should have received a copy of the GNU General Public License along with // this program; if not, write to the Free Software Foundation, Inc., 51 // Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. // // Copyright Buildbot Team Members class Grid { constructor($scope, $stateParams, $state, resultsService, dataService, bbSettingsService) { this.onChange = this.onChange.bind(this); this.changeBranch = this.changeBranch.bind(this); this.changeResult = this.changeResult.bind(this); this.toggleTag = this.toggleTag.bind(this); this.resetTags = this.resetTags.bind(this); this.refresh = this.refresh.bind(this); this.isBuilderDisplayed = this.isBuilderDisplayed.bind(this); this.isTagToggled = this.isTagToggled.bind(this); this.$scope = $scope; this.$stateParams = $stateParams; this.$state = $state; _.mixin(this.$scope, resultsService); this.data = dataService.open().closeOnDestroy(this.$scope); this.branch = this.$stateParams.branch; this.tags = this.$stateParams.tag != null ? this.$stateParams.tag : []; if (!angular.isArray(this.tags)) { this.tags = [this.tags]; } this.result = this.$stateParams.result; // XXX: Angular ngOptions tag only works with string values. Force // convert the result code to string. this.results = ((() => { const result = []; for (let c in resultsService.resultsTexts) { const t = resultsService.resultsTexts[c]; result.push({code: c + '', text: t}); } return result; })()); const settings = bbSettingsService.getSettingsGroup('Grid'); this.revisionLimit = settings.revisionLimit.value; this.changeFetchLimit = settings.changeFetchLimit.value; this.buildFetchLimit = settings.buildFetchLimit.value; this.fullChanges = settings.fullChanges.value; this.leftToRight = settings.leftToRight.value; this.buildsets = this.data.getBuildsets({ limit: this.buildFetchLimit, order: '-bsid' }); this.changes = this.data.getChanges({ limit: this.changeFetchLimit, order: '-changeid' }); this.builders = this.data.getBuilders(); this.buildrequests = this.data.getBuildrequests({ limit: this.buildFetchLimit, order: '-buildrequestid' }); this.builds = this.data.getBuilds({ limit: this.buildFetchLimit, order: '-buildrequestid' }); this.buildsets.onChange = this.onChange; this.changes.onChange = this.onChange; this.builders.onChange = this.onChange; this.buildrequests.change = this.onChange; this.builds.onChange = this.onChange; } dataReady() { for (let collection of [this.buildsets, this.changes, this.builders, this.buildrequests, this.builds]) { if (!(collection.$resolved && (collection.length > 0))) { return false; } } return true; } dataFetched() { for (let collection of [this.buildsets, this.changes, this.builders, this.buildrequests, this.builds]) { if (!collection.$resolved) { return false; } } return true; } onChange() { let bset, c, req; let change, i, builder; if (!this.dataReady()) { return; } let changes = {}; const branches = {}; // map changes by source stamp id const changesBySSID = {}; for (c of Array.from(this.changes)) { changesBySSID[c.sourcestamp.ssid] = c; c.buildsets = {}; } // there can be multiple buildsets by change // associate buildsets to each change and remember existing branches for (bset of Array.from(this.buildsets)) { change = changesBySSID[_.last(bset.sourcestamps).ssid]; if (change == null) { continue; } change.buildsets[bset.bsid] = bset; if (change.branch == null) { change.branch = 'master'; } branches[change.branch] = true; if (this.branch && (change.branch !== this.branch)) { continue; } changes[change.changeid] = change; } // only keep the @revisionLimit most recent changes for display changes = ((() => { const result = []; for (let cid of Object.keys(changes || {})) { change = changes[cid]; result.push(change); } return result; })()); if (this.leftToRight) { changes.sort((a, b) => a.changeid - b.changeid); if (changes.length > this.revisionLimit) { changes = changes.slice(changes.length - this.revisionLimit); } } else { changes.sort((a, b) => b.changeid - a.changeid); if (changes.length > this.revisionLimit) { changes = changes.slice(0, this.revisionLimit); } } this.$scope.changes = changes; this.$scope.branches = ((() => { const result1 = []; for (let br in branches) { result1.push(br); } return result1; })()); const requestsByBSID = {}; for (req of Array.from(this.buildrequests)) { (requestsByBSID[req.buildsetid] != null ? requestsByBSID[req.buildsetid] : (requestsByBSID[req.buildsetid] = [])).push(req); } const buildsByReqID = {}; for (let build of Array.from(this.builds)) { // There may be multiple builds for a given request // (for example when a worker connection is lost). (buildsByReqID[build.buildrequestid] != null ? buildsByReqID[build.buildrequestid] : (buildsByReqID[build.buildrequestid] = [])).push(build); } for (builder of Array.from(this.builders)) { builder.builds = {}; } const buildersById = {}; // find builds for the selected changes and associate them to builders for (c of Array.from(this.$scope.changes)) { for (let bsid of Object.keys(c.buildsets || {})) { bset = c.buildsets[bsid]; const requests = requestsByBSID[bsid]; if (requests == null) { continue; } for (req of Array.from(requests)) { const builds = buildsByReqID[req.buildrequestid] != null ? buildsByReqID[req.buildrequestid] : []; if ((this.result != null) && (this.result !== '') && !isNaN(this.result)) { i = 0; while (i < builds.length) { if (parseInt(builds[i].results) !== parseInt(this.result)) { builds.splice(i, 1); } else { i += 1; } } } if (!(builds.length > 0)) { continue; } builder = this.builders.get(builds[0].builderid); if (!this.isBuilderDisplayed(builder)) { continue; } buildersById[builder.builderid] = builder; builder.builds[c.changeid] = builds; } } } return this.$scope.builders = ((() => { const result2 = []; for (i of Object.keys(buildersById || {})) { builder = buildersById[i]; result2.push(builder); } return result2; })()); } changeBranch(branch) { this.branch = branch; return this.refresh(); } changeResult(result) { this.result = result; return this.refresh(); } toggleTag(tag) { const i = this.tags.indexOf(tag); if (i < 0) { this.tags.push(tag); } else { this.tags.splice(i, 1); } return this.refresh(); } resetTags() { this.tags = []; return this.refresh(); } refresh() { const params = { branch: this.branch, tag: this.tags.length === 0 ? undefined : this.tags, result: this.result }; // change URL without reloading page this.$state.transitionTo(this.$state.current, params, {notify: false}); this.onChange(); } isBuilderDisplayed(builder) { for (let tag of Array.from(this.tags)) { if (builder.tags.indexOf(tag) < 0) { return false; } } return true; } isTagToggled(tag) { return this.tags.indexOf(tag) >= 0; } } angular.module('grid_view') .controller('gridController', ['$scope', '$stateParams', '$state', 'resultsService', 'dataService', 'bbSettingsService', Grid]); buildbot-3.4.0/www/grid_view/src/module/grid.tpl.jade000066400000000000000000000061521413250514000225500ustar00rootroot00000000000000.container.grid .load-indicator(ng-hide='C.dataFetched()') .spinner i.fa.fa-circle-o-notch.fa-spin.fa-2x p loading p(ng-show="C.dataFetched() && C.changes.length == 0") | No changes. Grid View needs a changesource to be setup, and a(href="#/changes") | changes | to be in the system. .form-inline(ng-show="C.dataReady()") .form-group label Branch select.form-control(ng-model="C.branch", ng-change="C.changeBranch(C.branch)", ng-options="br for br in branches | orderBy") option(value="") (all) .form-group label Results select.form-control(ng-model="C.result", ng-change="C.changeResult(C.result)", ng-options="r.code as r.text for r in C.results") option(value="") (all) table.table.table-condensed.table-striped.table-hover(ng-show="C.dataReady()") thead tr th | Builder th span(ng-show="C.tags.length == 0") | Tags span(ng-show="C.tags.length < 5", ng-repeat="tag in C.tags") span.builder-tag.label.clickable.label-success(ng-click="C.toggleTag(tag)") | {{ tag }} span(ng-show="C.tags.length >= 5") span.label.label-success | {{ C.tags.length }} tags span(ng-show="C.tags.length > 0") span.label.label-danger.clickable(ng-click="C.resetTags()", uib-tooltip="Reset tags filter") x th.change(ng-repeat="ch in changes track by ch.changeid") changedetails(change="ch", compact="!C.fullChanges") tbody tr(ng-repeat="b in builders | orderBy: 'name'") th a(ui-sref="builder({builder: b.builderid})") | {{ b.name }} td span(ng-repeat="tag in b.tags") span.builder-tag.label.clickable(ng-click="C.toggleTag(tag)", ng-class="C.isTagToggled(tag) ? 'label-success': 'label-default'") | {{ tag }} td(ng-repeat="ch in changes track by ch.changeid") a(ng-repeat="build in b.builds[ch.changeid] | orderBy: 'buildid'", ui-sref="build({builder: b.builderid, build: build.number})") script(type="text/ng-template" id="buildsummarytooltip") buildsummary(buildid="build.buildid" type="tooltip") span.badge-status(uib-tooltip-template="'buildsummarytooltip'" tooltip-class="buildsummarytooltipstyle" tooltip-placement="auto left-bottom" tooltip-popup-delay="400" tooltip-popup-close-delay="400" ng-class="results2class(build, 'pulse')") | {{ build.number }} buildbot-3.4.0/www/grid_view/src/module/main.module.js000066400000000000000000000060101413250514000227370ustar00rootroot00000000000000// This file is part of Buildbot. Buildbot is free software: you can // redistribute it and/or modify it under the terms of the GNU General Public // License as published by the Free Software Foundation, version 2. // // This program is distributed in the hope that it will be useful, but WITHOUT // ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS // FOR A PARTICULAR PURPOSE. See the GNU General Public License for more // details. // // You should have received a copy of the GNU General Public License along with // this program; if not, write to the Free Software Foundation, Inc., 51 // Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. // // Copyright Buildbot Team Members import 'angular-animate'; import 'guanlecoja-ui'; import 'buildbot-data-js'; class GridState { constructor($stateProvider, glMenuServiceProvider, bbSettingsServiceProvider) { // Menu configuration glMenuServiceProvider.addGroup({ name: 'grid', caption: 'Grid View', icon: 'cubes', order: 4 }); // Register URL routing $stateProvider .state({ name: 'grid', controller: 'gridController', controllerAs: 'C', template: require('./grid.tpl.jade'), url: '/grid?branch&tag&result', reloadOnSearch: false, data: { group: 'grid', caption: 'Grid View' } }); bbSettingsServiceProvider.addSettingsGroup({ name: 'Grid', caption: 'Grid related settings', items: [{ type: 'bool', name: 'fullChanges', caption: 'Show avatar and time ago in change details', defaultValue: false } , { type: 'bool', name: 'leftToRight', caption: 'Show most recent changes on the right', defaultValue: false } , { type: 'integer', name: 'revisionLimit', caption: 'Maximum number of revisions to display', default_value: 5 } , { type: 'integer', name: 'changeFetchLimit', caption: 'Maximum number of changes to fetch', default_value: 100 } , { type: 'integer', name: 'buildFetchLimit', caption: 'Maximum number of builds to fetch', default_value: 1000 } ] }); } } angular.module('grid_view', [ 'ui.router', 'ui.bootstrap', 'ngAnimate', 'guanlecoja.ui', 'bbData']) .config(['$stateProvider', 'glMenuServiceProvider', 'bbSettingsServiceProvider', GridState]); require('./grid.controller.js'); buildbot-3.4.0/www/grid_view/src/module/main.module.spec.js000066400000000000000000000147621413250514000237050ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ beforeEach(function() { angular.mock.module(function($provide) { $provide.service('resultsService', function() { return {results2class() {}}; }); }); // Mock bbSettingsProvider angular.mock.module(function($provide) { $provide.provider('bbSettingsService', (function() { let group = undefined; const Cls = class { static initClass() { group = {}; } addSettingsGroup(g) { return g.items.map(function(i) { if (i.name === 'lazy_limit_waterfall') { i.default_value = 2; } return group[i.name] = {value: i.default_value}; }); } $get() { return { getSettingsGroup() { return group; }, save() {} }; } }; Cls.initClass(); return Cls; })() ); }); angular.mock.module('grid_view'); }); describe('Grid view controller', function() { // Test data let $rootScope, dataService, scope; const builders = [{ builderid: 1, tags: [] } , { builderid: 2, tags: ['a'] } , { builderid: 3, tags: ['a', 'b'] } , { builderid: 4, tags: ['b'] } ]; const builds = [{ buildid: 1, buildrequestid: 1, builderid: 1 } , { buildid: 2, buildrequestid: 2, builderid: 2 } , { buildid: 3, buildrequestid: 3, builderid: 4 } , { buildid: 4, buildrequestid: 4, builderid: 3 } , { buildid: 5, buildrequestid: 5, builderid: 1 } , { buildid: 6, buildrequestid: 6, builderid: 4 } , { buildid: 7, buildrequestid: 7, builderid: 3 } , { buildid: 8, buildrequestid: 8, builderid: 2 } ]; const buildrequests = [{ buildrequestid: 1, builderid: 1, buildsetid: 1 } , { buildrequestid: 2, builderid: 2, buildsetid: 1 } , { buildrequestid: 3, builderid: 1, buildsetid: 2 } , { buildrequestid: 4, builderid: 3, buildsetid: 2 } , { buildrequestid: 5, builderid: 4, buildsetid: 2 } , { buildrequestid: 6, builderid: 4, buildsetid: 3 } , { buildrequestid: 7, builderid: 3, buildsetid: 3 } , { buildrequestid: 8, builderid: 2, buildsetid: 3 } ]; const buildsets = [{ bsid: 1, sourcestamps: [ {ssid: 1} ] } , { bsid: 2, sourcestamps: [ {ssid: 2} ] } , { bsid: 3, sourcestamps: [ {ssid: 3} ] } ]; const changes = [{ changeid: 3, branch: 'refs/pull/3333/merge', sourcestamp: { ssid: 3 } } , { changeid: 1, branch: 'master', sourcestamp: { ssid: 1 } } , { changeid: 2, branch: null, sourcestamp: { ssid: 2 } } ]; let createController = (scope = ($rootScope = (dataService = null))); const injected = function($injector) { $rootScope = $injector.get('$rootScope'); dataService = $injector.get('dataService'); scope = $rootScope.$new(); dataService.when('builds', builds); dataService.when('builders', builders); dataService.when('changes', changes); dataService.when('buildrequests', buildrequests); dataService.when('buildsets', buildsets); // Create new controller using controller as syntax const $controller = $injector.get('$controller'); createController = () => $controller('gridController as C', // Inject controller dependencies {$scope: scope}) ; }; beforeEach(inject(injected)); it('should be defined', function() { createController(); expect(scope.C).toBeDefined(); }); it('should bind the builds, builders, changes, buildrequests and buildsets to scope', function() { createController(); $rootScope.$digest(); expect(scope.C.builds).toBeDefined(); expect(scope.C.builds.length).toBe(builds.length); expect(scope.C.builders).toBeDefined(); expect(scope.C.builders.length).toBe(builders.length); expect(scope.C.changes).toBeDefined(); expect(scope.C.changes.length).toBe(changes.length); expect(scope.C.buildrequests).toBeDefined(); expect(scope.C.buildrequests.length).toBe(buildrequests.length); expect(scope.C.buildsets).toBeDefined(); expect(scope.C.buildsets.length).toBe(buildsets.length); }); it('should list branches', function() { createController(); $rootScope.$digest(); scope.C.onChange(); expect(scope.branches).toBeDefined(); expect(scope.branches).toEqual(['refs/pull/3333/merge', 'master']); }); it('should only list changes of the selected branch', function() { createController(); $rootScope.$digest(); scope.C.branch = 'master'; scope.C.onChange(); expect(scope.changes).toBeDefined(); expect(scope.changes.length).toBe(2); }); it('should only list builders with builds of the selected branch', function() { createController(); $rootScope.$digest(); scope.C.branch = 'refs/pull/3333/merge'; scope.C.onChange(); expect(scope.builders).toBeDefined(); expect(scope.builders.length).toBe(3); }); it('should only list builders with the selected tags', function() { createController(); $rootScope.$digest(); scope.C.tags = ['b']; scope.C.onChange(); expect(scope.builders).toBeDefined(); expect(scope.builders.length).toBe(2); }); }); buildbot-3.4.0/www/grid_view/src/styles/000077500000000000000000000000001413250514000202325ustar00rootroot00000000000000buildbot-3.4.0/www/grid_view/src/styles/styles.less000066400000000000000000000012311413250514000224420ustar00rootroot00000000000000.grid { .load-indicator { width: 100%; height: 100%; z-index: 900; background-color: #ffffff; display: table; .spinner { display: table-cell; vertical-align: middle; text-align: center; p { font-weight: 300; margin-top: 10px; } } } .clickable { cursor: pointer; } .builder-tag { margin-right: 0.3em; margin-left: 0.3em; cursor: pointer; } .form-inline { label { padding-left: 0.5em; padding-right: 0.5em; } } } buildbot-3.4.0/www/grid_view/src/tests.webpack.js000066400000000000000000000004421413250514000220220ustar00rootroot00000000000000// This file is an entry point for angular tests // Avoids some weird issues when using webpack + angular. import 'angular'; import 'angular-mocks/angular-mocks'; import './module/main.module.js' const context = require.context('./', true, /\.spec.js$/); context.keys().forEach(context); buildbot-3.4.0/www/grid_view/test/000077500000000000000000000000001413250514000170775ustar00rootroot00000000000000buildbot-3.4.0/www/grid_view/test/main.js000066400000000000000000000002121413250514000203540ustar00rootroot00000000000000// app module is necessary for plugins, but only in the test environment angular.module("app", []).constant("config", {"url": "foourl"}); buildbot-3.4.0/www/grid_view/webpack.config.js000066400000000000000000000013321413250514000213350ustar00rootroot00000000000000'use strict'; const common = require('buildbot-build-common'); const env = require('yargs').argv.env; const pkg = require('./package.json'); var event = process.env.npm_lifecycle_event; var isTest = event === 'test' || event === 'test-watch'; var isProd = env === 'prod'; module.exports = function() { return common.createTemplateWebpackConfig({ entry: { scripts: './src/module/main.module.js', styles: './src/styles/styles.less', }, libraryName: pkg.name, pluginName: pkg.plugin_name, dirname: __dirname, isTest: isTest, isProd: isProd, outputPath: __dirname + '/buildbot_grid_view/static', extractStyles: true, }); }(); buildbot-3.4.0/www/grid_view/yarn.lock000066400000000000000000011445051413250514000177550ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.7.4.tgz#37e864532200cb6b50ee9a4045f5f817840166ab" integrity sha512-+bYbx56j4nYBmpsWtnPUsKW3NdnYxbqyfrP2w9wILBuHzdfIKz9prieZK0DFPyIzkjYVUe4QkusGL07r5pXznQ== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helpers" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" convert-source-map "^1.7.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369" integrity sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg== dependencies: "@babel/types" "^7.7.4" jsesc "^2.5.1" lodash "^4.17.13" source-map "^0.5.0" "@babel/helper-annotate-as-pure@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz#bb3faf1e74b74bd547e867e48f551fa6b098b6ce" integrity sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og== dependencies: "@babel/types" "^7.7.4" "@babel/helper-builder-binary-assignment-operator-visitor@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz#5f73f2b28580e224b5b9bd03146a4015d6217f5f" integrity sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ== dependencies: "@babel/helper-explode-assignable-expression" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-call-delegate@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz#621b83e596722b50c0066f9dc37d3232e461b801" integrity sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-create-regexp-features-plugin@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz#6d5762359fd34f4da1500e4cff9955b5299aaf59" integrity sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A== dependencies: "@babel/helper-regex" "^7.4.4" regexpu-core "^4.6.0" "@babel/helper-define-map@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176" integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-explode-assignable-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz#fa700878e008d85dc51ba43e9fb835cddfe05c84" integrity sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg== dependencies: "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e" integrity sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ== dependencies: "@babel/helper-get-function-arity" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-get-function-arity@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0" integrity sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA== dependencies: "@babel/types" "^7.7.4" "@babel/helper-hoist-variables@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz#612384e3d823fdfaaf9fce31550fe5d4db0f3d12" integrity sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-member-expression-to-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz#356438e2569df7321a8326644d4b790d2122cb74" integrity sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-imports@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz#e5a92529f8888bf319a6376abfbd1cebc491ad91" integrity sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-transforms@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.7.4.tgz#8d7cdb1e1f8ea3d8c38b067345924ac4f8e0879a" integrity sha512-ehGBu4mXrhs0FxAqN8tWkzF8GSIGAiEumu4ONZ/hD9M88uHcD+Yu2ttKfOCgwzoesJOJrtQh7trI5YPbRtMmnA== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-simple-access" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-optimise-call-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz#034af31370d2995242aa4df402c3b7794b2dcdf2" integrity sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg== dependencies: "@babel/types" "^7.7.4" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz#c68c2407350d9af0e061ed6726afb4fff16d0234" integrity sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-wrap-function" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-replace-supers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz#3c881a6a6a7571275a72d82e6107126ec9e2cdd2" integrity sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg== dependencies: "@babel/helper-member-expression-to-functions" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-simple-access@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz#a169a0adb1b5f418cfc19f22586b2ebf58a9a294" integrity sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A== dependencies: "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-split-export-declaration@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8" integrity sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug== dependencies: "@babel/types" "^7.7.4" "@babel/helper-validator-identifier@^7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== "@babel/helper-wrap-function@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace" integrity sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helpers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.7.4.tgz#62c215b9e6c712dadc15a9a0dcab76c92a940302" integrity sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg== dependencies: "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/highlight@^7.0.0": version "7.5.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.6.0", "@babel/parser@^7.9.6": version "7.13.9" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.13.9.tgz#ca34cb95e1c2dd126863a84465ae8ef66114be99" integrity sha512-nEUfRiARCcaVo3ny3ZQjURjHQZUo/JkEw7rLlSZy/psWGnvwXFtPcr6jb7Yb41DVW5LTe6KRq9LGleRNsg1Frw== "@babel/parser@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb" integrity sha512-jIwvLO0zCL+O/LmEJQjWA75MQTWwx3c3u2JOTDK5D3/9egrWRRA0/0hk9XXywYnXZVVpzrBYeIQTmhwUaePI9g== "@babel/plugin-proposal-async-generator-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d" integrity sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-proposal-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz#dde64a7f127691758cbfed6cf70de0fa5879d52d" integrity sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz#7700a6bfda771d8dc81973249eac416c6b4c697d" integrity sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.4.tgz#cc57849894a5c774214178c8ab64f6334ec8af71" integrity sha512-rnpnZR3/iWKmiQyJ3LKJpSwLDcX/nSXhdLk4Aq/tXOApIvyu7qoabrige0ylsAJffaUC51WiBu209Q0U+86OWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz#ec21e8aeb09ec6711bc0a39ca49520abee1de379" integrity sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.4.tgz#7c239ccaf09470dbe1d453d50057460e84517ebb" integrity sha512-cHgqHgYvffluZk85dJ02vloErm3Y6xtH+2noOBOJ2kXOJH3aVCDnj5eR/lVNlTnYu4hndAPJD3rTFjW3qee0PA== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-async-generators@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz#331aaf310a10c80c44a66b238b6e49132bd3c889" integrity sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.2.0", "@babel/plugin-syntax-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz#29ca3b4415abfe4a5ec381e903862ad1a54c3aec" integrity sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz#86e63f7d2e22f9e27129ac4e83ea989a382e86cc" integrity sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz#47cf220d19d6d0d7b154304701f468fc1cc6ff46" integrity sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz#a3e38f59f4b6233867b4a92dcb0ee05b2c334aa6" integrity sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-top-level-await@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz#bd7d8fa7b9fee793a36e4027fd6dd1aa32f946da" integrity sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz#76309bd578addd8aee3b379d809c802305a98a12" integrity sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz#694cbeae6d613a34ef0292713fa42fb45c4470ba" integrity sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz#d0d9d5c269c78eaea76227ace214b8d01e4d837b" integrity sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz#200aad0dcd6bb80372f94d9e628ea062c58bf224" integrity sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.13" "@babel/plugin-transform-classes@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz#c92c14be0a1399e15df72667067a8f510c9400ec" integrity sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-define-map" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz#e856c1628d3238ffe12d668eb42559f79a81910d" integrity sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz#2b713729e5054a1135097b6a67da1b6fe8789267" integrity sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.4.tgz#f7ccda61118c5b7a2599a72d5e3210884a021e96" integrity sha512-mk0cH1zyMa/XHeb6LOTXTbG7uIJ8Rrjlzu91pUx/KS3JpcgaTDwMS8kM+ar8SLOvlL2Lofi4CGBAjCo3a2x+lw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-duplicate-keys@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz#3d21731a42e3f598a73835299dd0169c3b90ac91" integrity sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz#dd30c0191e3a1ba19bcc7e389bdfddc0729d5db9" integrity sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz#248800e3a5e507b1f103d8b4ca998e77c63932bc" integrity sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz#75a6d3303d50db638ff8b5385d12451c865025b1" integrity sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz#27fe87d2b5017a2a5a34d1c41a6b9f6a6262643e" integrity sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz#aee127f2f3339fc34ce5e3055d7ffbf7aa26f19a" integrity sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.4.tgz#276b3845ca2b228f2995e453adc2e6f54d72fb71" integrity sha512-/542/5LNA18YDtg1F+QHvvUSlxdvjZoD/aldQwkq+E3WCkbEjNSN9zdrOXaSlfg3IfGi22ijzecklF/A7kVZFQ== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-commonjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.4.tgz#bee4386e550446343dd52a571eda47851ff857a3" integrity sha512-k8iVS7Jhc367IcNF53KCwIXtKAH7czev866ThsTgy8CwlXjnKZna2VHwChglzLleYrcHz1eQEIJlGRQxB53nqA== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.7.4" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-systemjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz#cd98152339d3e763dfe838b7d4273edaf520bb30" integrity sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-umd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz#1027c355a118de0aae9fee00ad7813c584d9061f" integrity sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz#fb3bcc4ee4198e7385805007373d6b6f42c98220" integrity sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/plugin-transform-new-target@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz#4a0753d2d60639437be07b592a9e58ee00720167" integrity sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz#48488937a2d586c0148451bf51af9d7dda567262" integrity sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/plugin-transform-parameters@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.4.tgz#da4555c97f39b51ac089d31c7380f03bca4075ce" integrity sha512-VJwhVePWPa0DqE9vcfptaJSzNDKrWU/4FbYCjZERtmqEs05g3UMXnYMZoXja7JAJ7Y7sPZipwm/pGApZt7wHlw== dependencies: "@babel/helper-call-delegate" "^7.7.4" "@babel/helper-get-function-arity" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz#2388d6505ef89b266103f450f9167e6bd73f98c2" integrity sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.4.tgz#d18eac0312a70152d7d914cbed2dc3999601cfc0" integrity sha512-e7MWl5UJvmPEwFJTwkBlPmqixCtr9yAASBqff4ggXTNicZiwbF8Eefzm6NVgfiBp7JdAGItecnctKTgH44q2Jw== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz#6a7cf123ad175bb5c69aec8f6f0770387ed3f1eb" integrity sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.4.tgz#51fe458c1c1fa98a8b07934f4ed38b6cd62177a6" integrity sha512-O8kSkS5fP74Ad/8pfsCMGa8sBRdLxYoSReaARRNSz3FbFQj3z/QUvoUmJ28gn9BO93YfnXc3j+Xyaqe8cKDNBQ== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz#74a0a9b2f6d67a684c6fbfd5f0458eb7ba99891e" integrity sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz#aa673b356fe6b7e70d69b6e33a17fef641008578" integrity sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz#ffb68c05090c30732076b1285dc1401b404a123c" integrity sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz#1eb6411736dd3fe87dbd20cc6668e5121c17d604" integrity sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz#3174626214f2d6de322882e498a38e8371b2140e" integrity sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz#a3c0f65b117c4c81c5b6484f2a5e7b95346b83ae" integrity sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/preset-env@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.7.4.tgz#ccaf309ae8d1ee2409c85a4e2b5e280ceee830f8" integrity sha512-Dg+ciGJjwvC1NIe/DGblMbcGq1HOtKbw8RLl4nIjlfcILKEOkWT/vRqPpumswABEBVudii6dnVwrBtzD7ibm4g== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.7.4" "@babel/plugin-proposal-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-syntax-top-level-await" "^7.7.4" "@babel/plugin-transform-arrow-functions" "^7.7.4" "@babel/plugin-transform-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions" "^7.7.4" "@babel/plugin-transform-block-scoping" "^7.7.4" "@babel/plugin-transform-classes" "^7.7.4" "@babel/plugin-transform-computed-properties" "^7.7.4" "@babel/plugin-transform-destructuring" "^7.7.4" "@babel/plugin-transform-dotall-regex" "^7.7.4" "@babel/plugin-transform-duplicate-keys" "^7.7.4" "@babel/plugin-transform-exponentiation-operator" "^7.7.4" "@babel/plugin-transform-for-of" "^7.7.4" "@babel/plugin-transform-function-name" "^7.7.4" "@babel/plugin-transform-literals" "^7.7.4" "@babel/plugin-transform-member-expression-literals" "^7.7.4" "@babel/plugin-transform-modules-amd" "^7.7.4" "@babel/plugin-transform-modules-commonjs" "^7.7.4" "@babel/plugin-transform-modules-systemjs" "^7.7.4" "@babel/plugin-transform-modules-umd" "^7.7.4" "@babel/plugin-transform-named-capturing-groups-regex" "^7.7.4" "@babel/plugin-transform-new-target" "^7.7.4" "@babel/plugin-transform-object-super" "^7.7.4" "@babel/plugin-transform-parameters" "^7.7.4" "@babel/plugin-transform-property-literals" "^7.7.4" "@babel/plugin-transform-regenerator" "^7.7.4" "@babel/plugin-transform-reserved-words" "^7.7.4" "@babel/plugin-transform-shorthand-properties" "^7.7.4" "@babel/plugin-transform-spread" "^7.7.4" "@babel/plugin-transform-sticky-regex" "^7.7.4" "@babel/plugin-transform-template-literals" "^7.7.4" "@babel/plugin-transform-typeof-symbol" "^7.7.4" "@babel/plugin-transform-unicode-regex" "^7.7.4" "@babel/types" "^7.7.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.4.tgz#b23a856751e4bf099262f867767889c0e3fe175b" integrity sha512-r24eVUUr0QqNZa+qrImUk8fn5SPhHq+IfYvIoIMg0do3GdK9sMdiLKP3GYVVaxpPKORgm8KRKaNTEhAjgIpLMw== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b" integrity sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" "@babel/traverse@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.7.4.tgz#9c1e7c60fb679fe4fcfaa42500833333c2058558" integrity sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.13" "@babel/types@^7.6.1", "@babel/types@^7.9.6": version "7.13.0" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.13.0.tgz#74424d2816f0171b4100f0ab34e9a374efdf7f80" integrity sha512-hE+HE8rnG1Z6Wzo+MhaKE5lM5eMx71T4EHJgku2E3xIfaULhDcxiiRxUYgwX8qwP1BBSlag+TdGOt6JAidIZTA== dependencies: "@babel/helper-validator-identifier" "^7.12.11" lodash "^4.17.19" to-fast-properties "^2.0.0" "@babel/types@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193" integrity sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA== dependencies: esutils "^2.0.2" lodash "^4.17.13" to-fast-properties "^2.0.0" "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== "@types/glob@^7.1.1": version "7.1.1" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== dependencies: "@types/events" "*" "@types/minimatch" "*" "@types/node" "*" "@types/minimatch@*": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/node@*": version "12.12.12" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.12.tgz#529bc3e73dbb35dd9e90b0a1c83606a9d3264bdb" integrity sha512-MGuvYJrPU0HUwqF7LqvIj50RZUX23Z+m583KBygKYUZLlZ88n6w28XRNJRJgsHukLEnLz6w6SvxZoLgbr5wLqQ== "@uirouter/angularjs@^1.0.15": version "1.0.23" resolved "https://registry.yarnpkg.com/@uirouter/angularjs/-/angularjs-1.0.23.tgz#aeec0f96b0c42187c5044ef244ba6ccb75a5d835" integrity sha512-r4hLSw7R3mwXGC5Sq7yxNlBK1sSzQUm/1MzigwwYRHoMO5uKcBPUhxFYx5U7kufP2Xl1165KeZvRsLCh0/Z1ng== dependencies: "@uirouter/core" "6.0.1" "@uirouter/core@6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/@uirouter/core/-/core-6.0.1.tgz#93b02a5d178a7ab7313f34b7b3f019a000d23396" integrity sha512-mHCutiHtDDRKYmrJ92XPKDoSb2bgqaDyUpHdF4hUE+riwgkCvGdBjL8u+VDTcV3slBAk6B0LBIOIajjWkkObbQ== "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== dependencies: "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@webassemblyjs/floating-point-hex-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== "@webassemblyjs/helper-api-error@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== "@webassemblyjs/helper-buffer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== "@webassemblyjs/helper-code-frame@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== dependencies: "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/helper-fsm@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== "@webassemblyjs/helper-module-context@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== dependencies: "@webassemblyjs/ast" "1.8.5" mamacro "^0.0.3" "@webassemblyjs/helper-wasm-bytecode@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== "@webassemblyjs/helper-wasm-section@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/ieee754@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== "@webassemblyjs/wasm-edit@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/helper-wasm-section" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-opt" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/wasm-gen@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wasm-opt@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wasm-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wast-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/floating-point-hex-parser" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-code-frame" "1.8.5" "@webassemblyjs/helper-fsm" "1.8.5" "@xtuc/long" "4.2.2" "@webassemblyjs/wast-printer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== dependencies: mime-types "~2.1.24" negotiator "0.6.2" acorn@^6.2.1: version "6.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.3.0.tgz#0087509119ffa4fc0a0041d1e93a417e68cb856e" integrity sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA== acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: version "3.4.1" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== ajv@^5.0.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5: version "6.10.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= angular-animate@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-animate/-/angular-animate-1.7.9.tgz#a397f82434c1e7ed5b7a298fa70fc3de989a6785" integrity sha512-fV+AISy/HTzurQH2ngsJg+lLIvfu0ahc1h4AYKauaXVw97rZc2k4iUA1bMstiEyClsdayQX568kjQc1NK+oYhw== angular-mocks@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-mocks/-/angular-mocks-1.7.9.tgz#0a3b7e28b9a493b4e3010ed2b0f69a68e9b4f79b" integrity sha512-LQRqqiV3sZ7NTHBnNmLT0bXtE5e81t97+hkJ56oU0k3dqKv1s6F+nBWRlOVzqHWPGFOiPS8ZJVdrS8DFzHyNIA== angular-ui-bootstrap@^2.5.6: version "2.5.6" resolved "https://registry.yarnpkg.com/angular-ui-bootstrap/-/angular-ui-bootstrap-2.5.6.tgz#23937322ec641a6fbee16498cc32452aa199e7c5" integrity sha512-yzcHpPMLQl0232nDzm5P4iAFTFQ9dMw0QgFLuKYbDj9M0xJ62z0oudYD/Lvh1pWfRsukiytP4Xj6BHOSrSXP8A== angular@^1.7.9, angular@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/angular/-/angular-1.8.0.tgz#b1ec179887869215cab6dfd0df2e42caa65b1b51" integrity sha512-VdaMx+Qk0Skla7B5gw77a8hzlcOakwF8mjlW13DpIWIDlfqwAbSSLfd8N/qZnzEmQF4jC4iofInd3gE7vL8ZZg== ansi-colors@^3.0.0: version "3.2.4" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-html@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.0, ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" normalize-path "^2.1.1" anymatch@~3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" aproba@^1.0.3, aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-flatten@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-never@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/assert-never/-/assert-never-1.2.1.tgz#11f0e363bf146205fb08193b5c7b90f4d1cf44fe" integrity sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw== assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert@^1.1.1: version "1.5.0" resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== dependencies: object-assign "^4.1.1" util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= async-each@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== async@1.x: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@^2.0.0, async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== dependencies: lodash "^4.17.14" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.5.1: version "9.7.2" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.2.tgz#26cf729fbb709323b40171a874304884dcceffed" integrity sha512-LCAfcdej1182uVvPOZnytbq61AhnOZ/4JelDaJGDeNwewyU1AMaNthcHsyz1NRjTmd2FkurMckLWfkHg3Z//KA== dependencies: browserslist "^4.7.3" caniuse-lite "^1.0.30001010" chalk "^2.4.2" normalize-range "^0.1.2" num2fraction "^1.2.2" postcss "^7.0.23" postcss-value-parser "^4.0.2" aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" babel-generator@^6.18.0: version "6.26.1" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.17.4" source-map "^0.5.7" trim-right "^1.0.1" babel-loader@^8.0.5: version "8.0.6" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== dependencies: find-cache-dir "^2.0.0" loader-utils "^1.0.2" mkdirp "^0.5.1" pify "^4.0.1" babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-plugin-dynamic-import-node@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: object.assign "^4.1.0" babel-runtime@^6.0.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-template@^6.16.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: babel-runtime "^6.26.0" babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" lodash "^4.17.4" babel-traverse@^6.18.0, babel-traverse@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: babel-code-frame "^6.26.0" babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" debug "^2.6.8" globals "^9.18.0" invariant "^2.2.2" lodash "^4.17.4" babel-types@^6.18.0, babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babel-walk@3.0.0-canary-5: version "3.0.0-canary-5" resolved "https://registry.yarnpkg.com/babel-walk/-/babel-walk-3.0.0-canary-5.tgz#f66ecd7298357aee44955f235a6ef54219104b11" integrity sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw== dependencies: "@babel/types" "^7.9.6" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@^1.0.2: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary-extensions@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c" integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow== blob@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683" integrity sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig== bluebird@^3.3.0, bluebird@^3.5.5: version "3.7.1" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de" integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== body-parser@1.19.0, body-parser@^1.16.1: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= dependencies: array-flatten "^2.1.0" deep-equal "^1.0.1" dns-equal "^1.0.0" dns-txt "^2.0.2" multicast-dns "^6.0.1" multicast-dns-service-types "^1.1.0" boolbase@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" brorand@^1.0.1, brorand@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.3" inherits "^2.0.1" safe-buffer "^5.0.1" browserify-cipher@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" des.js "^1.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" browserify-rsa@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= dependencies: bn.js "^4.1.0" randombytes "^2.0.1" browserify-sign@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= dependencies: bn.js "^4.1.1" browserify-rsa "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.2" elliptic "^6.0.0" inherits "^2.0.1" parse-asn1 "^5.0.0" browserify-zlib@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" browserslist@^4.6.0, browserslist@^4.7.3: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== dependencies: caniuse-lite "^1.0.30001219" colorette "^1.2.2" electron-to-chromium "^1.3.723" escalade "^3.1.1" node-releases "^1.1.71" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer-indexof@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= buffer@^4.3.0: version "4.9.2" resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" isarray "^1.0.0" "buildbot-build-common@link:../build_common": version "1.0.0" dependencies: "@babel/core" "^7.4.3" "@babel/plugin-syntax-dynamic-import" "^7.2.0" "@babel/plugin-transform-runtime" "^7.4.3" "@babel/preset-env" "^7.4.3" "@babel/runtime" "^7.4.3" autoprefixer "^9.5.1" babel-loader "^8.0.5" css-loader "^2.1.1" file-loader "^3.0.1" html-webpack-plugin "^3.2.0" import-glob-loader "^1.1.0" istanbul-instrumenter-loader "^3.0.1" jasmine-core "^3.4.0" karma "^4.1.0" karma-chrome-launcher "^2.2.0" karma-coverage "^1.1.2" karma-jasmine "^2.0.1" karma-sourcemap-loader "^0.3.7" karma-spec-reporter "^0.0.32" karma-webpack "^3.0.5" less "^3.9.0" less-loader "^5.0.0" mini-css-extract-plugin "^0.6.0" node-libs-browser "^2.2.0" null-loader "^1.0.0" postcss-loader "^3.0.0" pug "^3.0.1" raw-loader "^2.0.0" style-loader "^0.23.1" webpack "^4.30.0" webpack-cli "^3.3.1" webpack-dev-server "^3.3.1" webpack-fix-style-only-entries "^0.2.1" webpack-shell-plugin "^0.5.0" "buildbot-data-js@link:../data_module": version "3.0.1" dependencies: angular "^1.7.9" builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cacache@^12.0.2: version "12.0.3" resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== dependencies: bluebird "^3.5.5" chownr "^1.1.1" figgy-pudding "^3.5.1" glob "^7.1.4" graceful-fs "^4.1.15" infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" ssri "^6.0.1" unique-filename "^1.1.1" y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" caller-callsite@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= dependencies: callsites "^2.0.0" caller-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= dependencies: caller-callsite "^2.0.0" callsite@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camel-case@3.0.x: version "3.0.0" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= dependencies: no-case "^2.2.0" upper-case "^1.1.1" camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0, camelcase@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30001010, caniuse-lite@^1.0.30001219: version "1.0.30001228" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz#bfdc5942cd3326fa51ee0b42fbef4da9d492a7fa" integrity sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" character-parser@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chokidar@^2.0.2, chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== dependencies: anymatch "^2.0.0" async-each "^1.0.1" braces "^2.3.2" glob-parent "^3.1.0" inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" normalize-path "^3.0.0" path-is-absolute "^1.0.0" readdirp "^2.2.1" upath "^1.1.1" optionalDependencies: fsevents "^1.2.7" chokidar@^3.0.0: version "3.3.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6" integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A== dependencies: anymatch "~3.1.1" braces "~3.0.2" glob-parent "~5.1.0" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" readdirp "~3.2.0" optionalDependencies: fsevents "~2.1.1" chownr@^1.1.1, chownr@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== chrome-trace-event@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== dependencies: tslib "^1.9.0" cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@4.2.x: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi "^2.0.0" cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== dependencies: string-width "^3.1.0" strip-ansi "^5.2.0" wrap-ansi "^5.1.0" clone@^2.1.1, clone@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colorette@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== colors@^1.1.0, colors@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@2.17.x: version "2.17.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== commander@^2.20.0: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@~2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= compressible@~2.0.16: version "2.0.17" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== dependencies: mime-db ">= 1.40.0 < 2" compression@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" compressible "~2.0.16" debug "2.6.9" on-headers "~1.0.2" safe-buffer "5.1.2" vary "~1.1.2" concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" inherits "^2.0.3" readable-stream "^2.2.2" typedarray "^0.0.6" connect-history-api-fallback@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-4.0.1.tgz#0def113fa0e4dc8de83331a5cf79c8b325213151" integrity sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw== dependencies: "@babel/parser" "^7.6.0" "@babel/types" "^7.6.1" constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= content-disposition@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== dependencies: safe-buffer "5.1.2" content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= cookie@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== dependencies: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js-compat@^3.1.1: version "3.4.2" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.4.2.tgz#652fa7c54652b7f6586a893e37001df55ea2ac37" integrity sha512-W0Aj+LM3EAxxjD0Kp2o4be8UlnxIZHNupBv2znqrheR4aY2nOn91794k/xoSp+SxqqriiZpTsSwBtZr60cbkwQ== dependencies: browserslist "^4.7.3" semver "^6.3.0" core-js@^2.4.0: version "2.6.10" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f" integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== dependencies: import-fresh "^2.0.0" is-directory "^0.3.1" js-yaml "^3.13.1" parse-json "^4.0.0" create-ecdh@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" inherits "^2.0.1" md5.js "^1.3.4" ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" ripemd160 "^2.0.0" safe-buffer "^5.0.1" sha.js "^2.4.8" cross-spawn@6.0.5, cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" semver "^5.5.0" shebang-command "^1.2.0" which "^1.2.9" crypto-browserify@^3.11.0: version "3.12.0" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" create-ecdh "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.0" diffie-hellman "^5.0.0" inherits "^2.0.1" pbkdf2 "^3.0.3" public-encrypt "^4.0.0" randombytes "^2.0.0" randomfill "^1.0.3" css-loader@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" integrity sha512-OcKJU/lt232vl1P9EEDamhoO9iKY3tIjY5GU+XDLblAykTdgs6Ux9P1hTHve8nFKy5KPpOXOsVI/hIwi3841+w== dependencies: camelcase "^5.2.0" icss-utils "^4.1.0" loader-utils "^1.2.3" normalize-path "^3.0.0" postcss "^7.0.14" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^2.0.6" postcss-modules-scope "^2.1.0" postcss-modules-values "^2.0.0" postcss-value-parser "^3.3.0" schema-utils "^1.0.0" css-select@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= dependencies: boolbase "~1.0.0" css-what "2.1" domutils "1.5.1" nth-check "~1.0.1" css-what@2.1: version "2.1.3" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" date-format@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-2.1.0.tgz#31d5b5ea211cf5fd764cd38baf9d033df7e125cf" integrity sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA== dateformat@^1.0.6: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.1.1, debug@^3.2.5, debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= deep-equal@^1.0.1: version "1.1.1" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== dependencies: is-arguments "^1.0.4" is-date-object "^1.0.1" is-regex "^1.0.4" object-is "^1.0.1" object-keys "^1.1.1" regexp.prototype.flags "^1.2.0" deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-gateway@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== dependencies: execa "^1.0.0" ip-regex "^2.1.0" define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: "@types/glob" "^7.1.1" globby "^6.1.0" is-path-cwd "^2.0.0" is-path-in-cwd "^2.0.0" p-map "^2.0.0" pify "^4.0.1" rimraf "^2.6.3" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= des.js@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== dependencies: inherits "^2.0.1" minimalistic-assert "^1.0.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detect-node@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" randombytes "^2.0.0" dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= dns-packet@^1.3.1: version "1.3.4" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.4.tgz#e3455065824a2507ba886c55a89963bb107dec6f" integrity sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA== dependencies: ip "^1.1.0" safe-buffer "^5.0.1" dns-txt@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= dependencies: buffer-indexof "^1.0.0" doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-converter@^0.2: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" dom-serializer@0: version "0.2.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== dependencies: domelementtype "^2.0.1" entities "^2.0.0" domain-browser@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== domelementtype@1, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== domhandler@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== dependencies: domelementtype "1" domutils@1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= dependencies: dom-serializer "0" domelementtype "1" domutils@^1.5.1: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.723: version "1.3.738" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.738.tgz#aec24b091c82acbfabbdcce08076a703941d17ca" integrity sha512-vCMf4gDOpEylPSLPLSwAEsz+R3ShP02Y3cAKMZvTqule3XcPp7tgc/0ESI7IS6ZeyBlGClE50N53fIOkcIVnpw== elliptic@^6.0.0: version "6.5.4" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== dependencies: bn.js "^4.11.9" brorand "^1.1.0" hash.js "^1.0.0" hmac-drbg "^1.0.1" inherits "^2.0.4" minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== emojis-list@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" engine.io-client@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.2.1.tgz#6f54c0475de487158a1a7c77d10178708b6add36" integrity sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "~3.1.0" engine.io-parser "~2.1.1" has-cors "1.1.0" indexof "0.0.1" parseqs "0.0.5" parseuri "0.0.5" ws "~3.3.1" xmlhttprequest-ssl "~1.5.4" yeast "0.1.2" engine.io-parser@~2.1.0, engine.io-parser@~2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6" integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA== dependencies: after "0.8.2" arraybuffer.slice "~0.0.7" base64-arraybuffer "0.1.5" blob "0.0.5" has-binary2 "~1.0.2" engine.io@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.2.1.tgz#b60281c35484a70ee0351ea0ebff83ec8c9522a2" integrity sha512-+VlKzHzMhaU+GsCIg4AoXF1UdDFjHHwMmMKqMJNDNLlUlejz58FCy4LBqB2YVJskHGYl06BatYWKP2TVdVXE5w== dependencies: accepts "~1.3.4" base64id "1.0.0" cookie "0.3.1" debug "~3.1.0" engine.io-parser "~2.1.0" ws "~3.3.1" enhanced-resolve@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" tapable "^1.0.0" enhanced-resolve@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== dependencies: graceful-fs "^4.1.2" memory-fs "^0.5.0" tapable "^1.0.0" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= entities@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-abstract@^1.5.1: version "1.16.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d" integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.0" is-callable "^1.1.4" is-regex "^1.0.4" object-inspect "^1.6.0" object-keys "^1.1.1" string.prototype.trimleft "^2.1.0" string.prototype.trimright "^2.1.0" es-to-primitive@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: version "0.10.53" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.3" next-tick "~1.0.0" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" ext "^1.1.2" escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" eslint-scope@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== dependencies: esrecurse "^4.1.0" estraverse "^4.1.1" esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esrecurse@^4.1.0: version "4.2.1" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== dependencies: estraverse "^4.1.0" estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@^4.1.0, estraverse@^4.1.1: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== eventsource@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" safe-buffer "^5.1.1" execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== dependencies: cross-spawn "^6.0.0" get-stream "^4.0.0" is-stream "^1.1.0" npm-run-path "^2.0.0" p-finally "^1.0.0" signal-exit "^3.0.0" strip-eof "^1.0.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== dependencies: accepts "~1.3.7" array-flatten "1.1.1" body-parser "1.19.0" content-disposition "0.5.3" content-type "~1.0.4" cookie "0.4.0" cookie-signature "1.0.6" debug "2.6.9" depd "~1.1.2" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" finalhandler "~1.1.2" fresh "0.5.2" merge-descriptors "1.0.1" methods "~1.1.2" on-finished "~2.3.0" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.5" qs "6.7.0" range-parser "~1.2.1" safe-buffer "5.1.2" send "0.17.1" serve-static "1.14.1" setprototypeof "1.1.1" statuses "~1.5.0" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" ext@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.2.0.tgz#8dd8d2dd21bcced3045be09621fa0cbf73908ba4" integrity sha512-0ccUQK/9e3NreLFg6K6np8aPyRgwycx+oFGtfx1dSp7Wj00Ozw9r05FgBRlzjf2XBM7LAzwgLyDscRrtSU91hA== dependencies: type "^2.0.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= dependencies: websocket-driver ">=0.5.1" faye-websocket@~0.11.1: version "0.11.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e" integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== dependencies: websocket-driver ">=0.5.1" figgy-pudding@^3.5.1: version "3.5.2" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== file-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== dependencies: loader-utils "^1.0.2" schema-utils "^1.0.0" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.1.2, finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-cache-dir@^2.0.0, find-cache-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" make-dir "^2.0.0" pkg-dir "^3.0.0" find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== dependencies: detect-file "^1.0.0" is-glob "^4.0.0" micromatch "^3.0.4" resolve-dir "^1.0.1" flatted@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08" integrity sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg== flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" readable-stream "^2.3.6" follow-redirects@^1.0.0: version "1.13.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.0.tgz#b42e8d93a2a7eea5ed88633676d6597bc8e384db" integrity sha512-aq6gF1BEKje4a9i9+5jimNFIpq4Q1WiwBToeRK5NvZBd/TRsmW8BsJfOEGkr76TbOyPVD3OVDN910EcUNtRYEA== for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= from2@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= dependencies: inherits "^2.0.1" readable-stream "^2.0.0" fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" universalify "^0.1.0" fs-minipass@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= dependencies: graceful-fs "^4.1.2" iferr "^0.1.5" imurmurhash "^0.1.4" readable-stream "1 || 2" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" fsevents@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-caller-file@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-stream@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" glob-parent@~5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.0.tgz#5f4c1d1e748d30cd73ad2944b3577a81b081e8c2" integrity sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw== dependencies: is-glob "^4.0.1" glob@^5.0.13, glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.3, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" global-modules@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== dependencies: global-prefix "^3.0.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" global-prefix@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== dependencies: ini "^1.3.5" kind-of "^6.0.2" which "^1.3.1" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== "guanlecoja-ui@link:../guanlecoja-ui": version "2.0.0" dependencies: "@uirouter/angularjs" "^1.0.15" angular "^1.7.9" angular-animate "^1.7.9" angular-ui-bootstrap "^2.5.6" jquery "^3.4.0" lodash "^4.17.11" handle-thing@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== handlebars@^4.0.1: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== dependencies: minimist "^1.2.5" neo-async "^2.6.0" source-map "^0.6.1" wordwrap "^1.0.0" optionalDependencies: uglify-js "^3.1.4" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary2@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/has-binary2/-/has-binary2-1.0.3.tgz#7776ac627f3ea77250cfc332dab7ddf5e4f5d11d" integrity sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw== dependencies: isarray "2.0.1" has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0, has-symbols@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" minimalistic-assert "^1.0.1" he@1.2.x: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= dependencies: inherits "^2.0.1" obuf "^1.0.0" readable-stream "^2.0.1" wbuf "^1.1.0" html-entities@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= html-minifier@^3.2.3: version "3.5.21" resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== dependencies: camel-case "3.0.x" clean-css "4.2.x" commander "2.17.x" he "1.2.x" param-case "2.1.x" relateurl "0.2.x" uglify-js "3.4.x" html-webpack-plugin@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= dependencies: html-minifier "^3.2.3" loader-utils "^0.2.16" lodash "^4.17.3" pretty-error "^2.0.2" tapable "^1.0.0" toposort "^1.0.0" util.promisify "1.0.0" htmlparser2@^3.3.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== dependencies: domelementtype "^1.3.1" domhandler "^2.3.0" domutils "^1.5.1" entities "^1.1.1" inherits "^2.0.1" readable-stream "^3.1.1" http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= http-errors@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.0" statuses ">= 1.4.0 < 2" http-errors@~1.7.2: version "1.7.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== dependencies: depd "~1.1.2" inherits "2.0.4" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy-middleware@0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== dependencies: http-proxy "^1.17.0" is-glob "^4.0.0" lodash "^4.17.11" micromatch "^3.1.10" http-proxy@^1.13.0, http-proxy@^1.17.0: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" icss-replace-symbols@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= icss-utils@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== dependencies: postcss "^7.0.14" ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== iferr@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= ignore-walk@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== dependencies: minimatch "^3.0.4" image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= import-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= dependencies: import-from "^2.1.0" import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= dependencies: caller-path "^2.0.0" resolve-from "^3.0.0" import-from@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" integrity sha1-M1238qev/VOqpHHUuAId7ja387E= dependencies: resolve-from "^3.0.0" import-glob-loader@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/import-glob-loader/-/import-glob-loader-1.1.0.tgz#98d84c0f661c8ba9f821d9ddb7c6b6dc8e97eca2" integrity sha1-mNhMD2Yci6n4Idndt8a23I6X7KI= dependencies: glob "^5.0.13" loader-utils "^0.2.10" import-local@2.0.0, import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== dependencies: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= infer-owner@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.7" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== internal-ip@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== dependencies: default-gateway "^4.2.0" ipaddr.js "^1.9.0" interpret@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= ipaddr.js@1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== ipaddr.js@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== is-absolute-url@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arguments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.0.4.tgz#3faf966c7cba0ff437fb31f6250082fcf0448cf3" integrity sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA== is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-core-module@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== dependencies: has "^1.0.3" is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-directory@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-expression@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-4.0.0.tgz#c33155962abf21d0afd2552514d67d2ec16fd2ab" integrity sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A== dependencies: acorn "^7.1.1" object-assign "^4.1.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== is-path-in-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: is-path-inside "^2.1.0" is-path-inside@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== dependencies: path-is-inside "^1.0.2" is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-regex@^1.0.3, is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: has-symbols "^1.0.1" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is-wsl@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isarray@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.1.tgz#a37d94ed9cda2d59865c9f76fe596ee1f338741e" integrity sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-instrumenter-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-3.0.1.tgz#9957bd59252b373fae5c52b7b5188e6fde2a0949" integrity sha512-a5SPObZgS0jB/ixaKSMdn6n/gXSrK2S6q/UfRJBT3e6gQmVjwZROTODQsYW5ZNwOu78hG62Y3fWlebaVOL0C+w== dependencies: convert-source-map "^1.5.0" istanbul-lib-instrument "^1.7.3" loader-utils "^1.1.0" schema-utils "^0.3.0" istanbul-lib-coverage@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== istanbul-lib-instrument@^1.7.3: version "1.10.2" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" istanbul-lib-coverage "^1.2.1" semver "^5.3.0" istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" jasmine-core@^3.3, jasmine-core@^3.4.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-3.5.0.tgz#132c23e645af96d85c8bca13c8758b18429fc1e4" integrity sha512-nCeAiw37MIMA9w9IXso7bRaLl+c/ef3wnxsoSAlYrzS+Ot0zTG6nU8G/cIfGkqpkjX2wNaIW9RFG0TwIFnG6bA== jquery@^3.4.0, jquery@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.5.0.tgz#9980b97d9e4194611c36530e7dc46a58d7340fc9" integrity sha512-Xb7SVYMvygPxbFMpTFQiHh1J7HClEaThguL15N/Gg37Lri/qKyhRGZYzHRyLH8Stq3Aow0LsHO2O2ci86fCrNQ== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-stringify@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@3.x, js-yaml@^3.13.1: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== json5@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== dependencies: minimist "^1.2.0" json5@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== dependencies: minimist "^1.2.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= optionalDependencies: graceful-fs "^4.1.6" jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" karma-chrome-launcher@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" integrity sha512-uf/ZVpAabDBPvdPdveyk1EPgbnloPvFFGgmRhYLTDH7gEB4nZdSBk8yTU47w1g/drLSx5uMOkjKk7IWKfWg/+w== dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coverage@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-1.1.2.tgz#cc09dceb589a83101aca5fe70c287645ef387689" integrity sha512-eQawj4Cl3z/CjxslYy9ariU4uDh7cCNFZHNWXWRpl0pNeblY/4wHR7M7boTYXWrn9bY0z2pZmr11eKje/S/hIw== dependencies: dateformat "^1.0.6" istanbul "^0.4.0" lodash "^4.17.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-2.0.1.tgz#26e3e31f2faf272dd80ebb0e1898914cc3a19763" integrity sha512-iuC0hmr9b+SNn1DaUD2QEYtUxkS1J+bSJSn7ejdEexs7P8EYvA1CWkEdrDQ+8jVH3AgWlCNwjYsT1chjcNW9lA== dependencies: jasmine-core "^3.3" karma-sourcemap-loader@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma-spec-reporter@^0.0.32: version "0.0.32" resolved "https://registry.yarnpkg.com/karma-spec-reporter/-/karma-spec-reporter-0.0.32.tgz#2e9c7207ea726771260259f82becb543209e440a" integrity sha1-LpxyB+pyZ3EmAln4K+y1QyCeRAo= dependencies: colors "^1.1.2" karma-webpack@^3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-3.0.5.tgz#1ff1e3a690fb73ae95ee95f9ab58f341cfc7b40f" integrity sha512-nRudGJWstvVuA6Tbju9tyGUfXTtI1UXMXoRHVmM2/78D0q6s/Ye2IC157PKNDC15PWFGR0mVIRtWLAdcfsRJoA== dependencies: async "^2.0.0" babel-runtime "^6.0.0" loader-utils "^1.0.0" lodash "^4.0.0" source-map "^0.5.6" webpack-dev-middleware "^2.0.6" karma@^4.1.0: version "4.4.1" resolved "https://registry.yarnpkg.com/karma/-/karma-4.4.1.tgz#6d9aaab037a31136dc074002620ee11e8c2e32ab" integrity sha512-L5SIaXEYqzrh6b1wqYC42tNsFMx2PWuxky84pK9coK09MvmL7mxii3G3bZBh/0rvD27lqDd0le9jyhzvwif73A== dependencies: bluebird "^3.3.0" body-parser "^1.16.1" braces "^3.0.2" chokidar "^3.0.0" colors "^1.1.0" connect "^3.6.0" di "^0.0.1" dom-serialize "^2.2.0" flatted "^2.0.0" glob "^7.1.1" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^4.17.14" log4js "^4.0.0" mime "^2.3.1" minimatch "^3.0.2" optimist "^0.6.1" qjobs "^1.1.4" range-parser "^1.2.0" rimraf "^2.6.0" safe-buffer "^5.0.1" socket.io "2.1.1" source-map "^0.6.1" tmp "0.0.33" useragent "2.3.0" killable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" less-loader@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-5.0.0.tgz#498dde3a6c6c4f887458ee9ed3f086a12ad1b466" integrity sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg== dependencies: clone "^2.1.1" loader-utils "^1.1.0" pify "^4.0.1" less@^3.9.0: version "3.10.3" resolved "https://registry.yarnpkg.com/less/-/less-3.10.3.tgz#417a0975d5eeecc52cff4bcfa3c09d35781e6792" integrity sha512-vz32vqfgmoxF1h3K4J+yKCtajH0PWmjkIFgbs5d78E/c/e+UQTnI+lWK+1eQRE95PXM2mC3rJlLSSP9VQHnaow== dependencies: clone "^2.1.2" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" mime "^1.4.1" mkdirp "^0.5.0" promise "^7.1.1" request "^2.83.0" source-map "~0.6.0" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" loader-runner@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== dependencies: big.js "^5.2.2" emojis-list "^2.0.0" json5 "^1.0.1" loader-utils@^0.2.10, loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= dependencies: big.js "^3.1.3" emojis-list "^2.0.0" json5 "^0.5.0" object-assign "^4.0.1" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lodash@^4.0.0, lodash@^4.17.0, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.3, lodash@^4.17.4: version "4.17.19" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== lodash@^4.17.19: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: chalk "^2.0.1" log4js@^4.0.0: version "4.5.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-4.5.1.tgz#e543625e97d9e6f3e6e7c9fc196dd6ab2cae30b5" integrity sha512-EEEgFcE9bLgaYUKuozyFfytQM2wDHtXn4tAN41pkaxpNjAykv11GVdeI4tHtmPWW4Xrgh9R/2d7XYghDVjbKKw== dependencies: date-format "^2.0.0" debug "^4.1.1" flatted "^2.0.0" rfdc "^1.1.4" streamroller "^1.0.6" loglevel@^1.6.4: version "1.6.6" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.6.tgz#0ee6300cc058db6b3551fa1c4bf73b83bb771312" integrity sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ== loglevelnext@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/loglevelnext/-/loglevelnext-1.0.5.tgz#36fc4f5996d6640f539ff203ba819641680d75a2" integrity sha512-V/73qkPuJmx4BcBF19xPBr+0ZRVBhc4POxvZTZdMeXpJ4NItXSJ/MSwuFT0kQJlCbXvdlZoQQ/418bS1y9Jh6A== dependencies: es6-symbol "^3.1.1" object.assign "^4.1.0" loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0, loud-rejection@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= lru-cache@4.1.x: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" semver "^5.6.0" mamacro@^0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== dependencies: p-defer "^1.0.0" map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= mem@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^2.0.0" p-is-promise "^2.0.0" memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= dependencies: errno "^0.1.3" readable-stream "^2.0.1" memory-fs@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== dependencies: errno "^0.1.3" readable-stream "^2.0.1" meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" brorand "^1.0.1" mime-db@1.42.0, "mime-db@>= 1.40.0 < 2": version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.1.0, mime@^2.3.1, mime@^2.4.4: version "2.4.4" resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== mimic-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== mini-css-extract-plugin@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== dependencies: loader-utils "^1.1.0" normalize-url "^2.0.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== dependencies: concat-stream "^1.5.0" duplexify "^3.4.2" end-of-stream "^1.1.0" flush-write-stream "^1.0.0" from2 "^2.1.0" parallel-transform "^1.1.0" pump "^3.0.0" pumpify "^1.3.3" stream-each "^1.1.0" through2 "^2.0.0" mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= dependencies: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multicast-dns-service-types@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= multicast-dns@^6.0.1: version "6.2.3" resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== dependencies: dns-packet "^1.3.1" thunky "^1.0.2" nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== next-tick@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== dependencies: lower-case "^1.1.1" node-forge@0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579" integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ== node-libs-browser@^2.2.0, node-libs-browser@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== dependencies: assert "^1.1.1" browserify-zlib "^0.2.0" buffer "^4.3.0" console-browserify "^1.1.0" constants-browserify "^1.0.0" crypto-browserify "^3.11.0" domain-browser "^1.1.1" events "^3.0.0" https-browserify "^1.0.0" os-browserify "^0.3.0" path-browserify "0.0.1" process "^0.11.10" punycode "^1.2.4" querystring-es3 "^0.2.0" readable-stream "^2.3.3" stream-browserify "^2.0.1" stream-http "^2.7.2" string_decoder "^1.0.0" timers-browserify "^2.0.4" tty-browserify "0.0.0" url "^0.11.0" util "^0.11.0" vm-browserify "^1.0.1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.71: version "1.1.72" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== nopt@3.x: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= normalize-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== dependencies: prepend-http "^2.0.0" query-string "^5.0.1" sort-keys "^2.0.0" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.6" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4" integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" nth-check@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== dependencies: boolbase "~1.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= null-loader@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-1.0.0.tgz#90e85798e50e9dd1d568495a44e74829dec26744" integrity sha512-mYLDjDVTkjTlFoidxRhzO75rdcwfVXfw5G5zpj8sXnBkHtKJxMk4hTcRR4i5SOhDB6EvcQuYriy6IV23eq6uog== dependencies: loader-utils "^1.2.3" schema-utils "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-inspect@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-is@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.0.1.tgz#0aa60ec9989a0b3ed795cf4d06f62cf1ad6539b6" integrity sha1-CqYOyZiaCz7Xlc9NBvYs8a1lObY= object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@1.x, once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== dependencies: is-wsl "^1.1.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.6" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" word-wrap "~1.2.3" original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== dependencies: url-parse "^1.4.3" os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^3.0.0, os-locale@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== dependencies: execa "^1.0.0" lcid "^2.0.0" mem "^4.0.0" os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== p-limit@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== dependencies: retry "^0.12.0" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~1.0.5: version "1.0.10" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== parallel-transform@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== dependencies: cyclist "^1.0.1" inherits "^2.0.3" readable-stream "^2.1.5" param-case@2.1.x: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= dependencies: no-case "^2.2.0" parse-asn1@^5.0.0: version "5.1.5" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-key@^2.0.0, path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= path-parse@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" pbkdf2@^3.0.3: version "3.0.17" resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" ripemd160 "^2.0.1" safe-buffer "^5.0.1" sha.js "^2.4.8" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= picomatch@^2.0.4: version "2.1.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.1.1.tgz#ecdfbea7704adb5fe6fb47f9866c4c0e15e905c5" integrity sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA== pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= pkg-dir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" portfinder@^1.0.25: version "1.0.25" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca" integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg== dependencies: async "^2.6.2" debug "^3.1.1" mkdirp "^0.5.1" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= postcss-load-config@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.0.tgz#c84d692b7bb7b41ddced94ee62e8ab31b417b003" integrity sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q== dependencies: cosmiconfig "^5.0.0" import-cwd "^2.0.0" postcss-loader@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== dependencies: loader-utils "^1.1.0" postcss "^7.0.0" postcss-load-config "^2.0.0" schema-utils "^1.0.0" postcss-modules-extract-imports@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== dependencies: postcss "^7.0.5" postcss-modules-local-by-default@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.6.tgz#dd9953f6dd476b5fd1ef2d8830c8929760b56e63" integrity sha512-oLUV5YNkeIBa0yQl7EYnxMgy4N6noxmiwZStaEJUSe2xPMcdNc8WmBQuQCx18H5psYbVxz8zoHk0RAAYZXP9gA== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-value-parser "^3.3.1" postcss-modules-scope@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.0.tgz#ad3f5bf7856114f6fcab901b0502e2a2bc39d4eb" integrity sha512-91Rjps0JnmtUB0cujlc8KIKCsJXWjzuxGeT/+Q2i2HXKZ7nBUeF9YQTZZTNvHVoNYj1AthsjnGLtqDUE0Op79A== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-modules-values@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-2.0.0.tgz#479b46dc0c5ca3dc7fa5270851836b9ec7152f64" integrity sha512-Ki7JZa7ff1N3EIMlPnGTZfUMe69FFwiQPnVSXC9mnn3jozCRBYIxiZd44yJOV2AmabOo4qFf8s0dC/+lweG7+w== dependencies: icss-replace-symbols "^1.1.0" postcss "^7.0.6" postcss-selector-parser@^6.0.0: version "6.0.2" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== dependencies: cssesc "^3.0.0" indexes-of "^1.0.1" uniq "^1.0.1" postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss-value-parser@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.23, postcss@^7.0.5, postcss@^7.0.6: version "7.0.36" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.36.tgz#056f8cffa939662a8f5905950c07d5285644dfcb" integrity sha512-BebJSIUMwJHRH0HAQoxN4u1CN86glsrwsW0q7T+/m44eXOUAxSNdHRkNZPYz5vVUbg17hFgOQDE7fZk7li3pZw== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= pretty-error@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= dependencies: renderkid "^2.0.1" utila "~0.4" private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= promise@^7.0.1, promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" proxy-addr@~2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.9.0" prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.24: version "1.4.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" create-hash "^1.1.0" parse-asn1 "^5.0.0" randombytes "^2.0.1" safe-buffer "^5.1.2" pug-attrs@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-3.0.0.tgz#b10451e0348165e31fad1cc23ebddd9dc7347c41" integrity sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA== dependencies: constantinople "^4.0.1" js-stringify "^1.0.2" pug-runtime "^3.0.0" pug-code-gen@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-3.0.2.tgz#ad190f4943133bf186b60b80de483100e132e2ce" integrity sha512-nJMhW16MbiGRiyR4miDTQMRWDgKplnHyeLvioEJYbk1RsPI3FuA3saEP8uwnTb2nTJEKBU90NFVWJBk4OU5qyg== dependencies: constantinople "^4.0.1" doctypes "^1.1.0" js-stringify "^1.0.2" pug-attrs "^3.0.0" pug-error "^2.0.0" pug-runtime "^3.0.0" void-elements "^3.1.0" with "^7.0.0" pug-error@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-2.0.0.tgz#5c62173cb09c34de2a2ce04f17b8adfec74d8ca5" integrity sha512-sjiUsi9M4RAGHktC1drQfCr5C5eriu24Lfbt4s+7SykztEOwVZtbFk1RRq0tzLxcMxMYTBR+zMQaG07J/btayQ== pug-filters@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-4.0.0.tgz#d3e49af5ba8472e9b7a66d980e707ce9d2cc9b5e" integrity sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A== dependencies: constantinople "^4.0.1" jstransformer "1.0.0" pug-error "^2.0.0" pug-walk "^2.0.0" resolve "^1.15.1" pug-lexer@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-5.0.1.tgz#ae44628c5bef9b190b665683b288ca9024b8b0d5" integrity sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w== dependencies: character-parser "^2.2.0" is-expression "^4.0.0" pug-error "^2.0.0" pug-linker@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-4.0.0.tgz#12cbc0594fc5a3e06b9fc59e6f93c146962a7708" integrity sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw== dependencies: pug-error "^2.0.0" pug-walk "^2.0.0" pug-load@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-3.0.0.tgz#9fd9cda52202b08adb11d25681fb9f34bd41b662" integrity sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ== dependencies: object-assign "^4.1.1" pug-walk "^2.0.0" pug-parser@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-6.0.0.tgz#a8fdc035863a95b2c1dc5ebf4ecf80b4e76a1260" integrity sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw== dependencies: pug-error "^2.0.0" token-stream "1.0.0" pug-runtime@^3.0.0, pug-runtime@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-3.0.1.tgz#f636976204723f35a8c5f6fad6acda2a191b83d7" integrity sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg== pug-strip-comments@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz#f94b07fd6b495523330f490a7f554b4ff876303e" integrity sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ== dependencies: pug-error "^2.0.0" pug-walk@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-2.0.0.tgz#417aabc29232bb4499b5b5069a2b2d2a24d5f5fe" integrity sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ== pug@^3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/pug/-/pug-3.0.2.tgz#f35c7107343454e43bc27ae0ff76c731b78ea535" integrity sha512-bp0I/hiK1D1vChHh6EfDxtndHji55XP/ZJKwsRqrz6lRia6ZC2OZbdAymlxdVFwd1L70ebrVJw4/eZ79skrIaw== dependencies: pug-code-gen "^3.0.2" pug-filters "^4.0.0" pug-lexer "^5.0.1" pug-linker "^4.0.0" pug-load "^3.0.0" pug-parser "^6.0.0" pug-runtime "^3.0.1" pug-strip-comments "^2.0.0" pump@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pumpify@^1.3.3: version "1.5.1" resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" inherits "^2.0.3" pump "^2.0.0" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qjobs@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== query-string@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== dependencies: decode-uri-component "^0.2.0" object-assign "^4.1.0" strict-uri-encode "^1.0.0" querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= querystringify@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" safe-buffer "^5.1.0" range-parser@^1.0.3, range-parser@^1.2.0, range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-loader@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-2.0.0.tgz#e2813d9e1e3f80d1bbade5ad082e809679e20c26" integrity sha512-kZnO5MoIyrojfrPWqrhFNLZemIAX8edMOCp++yC5RKxzFB3m92DqKNhKlU6+FvpOhWtvyh3jOaD7J6/9tpdIKg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" readdirp@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839" integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ== dependencies: picomatch "^2.0.4" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.3" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" integrity sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw== regenerator-transform@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== dependencies: private "^0.1.6" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexp.prototype.flags@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.2.0.tgz#6b30724e306a27833eeb171b66ac8890ba37e41c" integrity sha512-ztaw4M1VqgMwl9HlPpOuiYgItcHlunW0He2fE6eNfT6E/CF2FtYi9ofOYe4mKntstYk0Fyh/rDRBdS3AnxjlrA== dependencies: define-properties "^1.1.2" regexpu-core@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" regjsgen@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c" integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" relateurl@0.2.x: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= renderkid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== dependencies: css-select "^1.1.0" dom-converter "^0.2" htmlparser2 "^3.3.0" strip-ansi "^3.0.0" utila "^0.4.0" repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" request@^2.83.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= dependencies: resolve-from "^3.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" integrity sha1-six699nWiBvItuZTM17rywoYh0g= resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.10.0, resolve@^1.3.2, resolve@^1.8.1: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" resolve@^1.15.1: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== dependencies: is-core-module "^2.2.0" path-parse "^1.0.6" ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= rfdc@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.1.4.tgz#ba72cc1367a0ccd9cf81a870b3b58bd3ad07f8c2" integrity sha512-5C9HXdzK8EAqN7JDif30jqsBzavB7wLpaubisuQIGHWf2gUXSpzy6ArX/+Da8RjFpagWsCn+pIgxTMAmKw9Zug== rimraf@^2.5.4, rimraf@^2.6.0, rimraf@^2.6.1, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" inherits "^2.0.1" run-queue@^1.0.0, run-queue@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= dependencies: aproba "^1.1.1" safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" integrity sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8= dependencies: ajv "^5.0.0" schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== dependencies: ajv "^6.1.0" ajv-errors "^1.0.0" ajv-keywords "^3.1.0" select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.7: version "1.10.7" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b" integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA== dependencies: node-forge "0.9.0" "semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== dependencies: debug "2.6.9" depd "~1.1.2" destroy "~1.0.4" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" http-errors "~1.7.2" mime "1.6.0" ms "2.1.1" on-finished "~2.3.0" range-parser "~1.2.1" statuses "~1.5.0" serialize-javascript@^1.7.0: version "1.9.1" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.9.1.tgz#cfc200aef77b600c47da9bb8149c943e798c2fdb" integrity sha512-0Vb/54WJ6k5v8sSWN09S0ora+Hnr+cX40r9F170nT+mSkaxltoE/7R3OrIdBSUv1OoiobH1QoWQbCnAO+e8J1A== serve-index@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= dependencies: accepts "~1.3.4" batch "0.6.1" debug "2.6.9" escape-html "~1.0.3" http-errors "~1.6.2" mime-types "~2.1.17" parseurl "~1.3.2" serve-static@1.14.1: version "1.14.1" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.17.1" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setimmediate@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" socket.io-adapter@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz#2a805e8a14d6372124dd9159ad4502f8cb07f06b" integrity sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs= socket.io-client@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.1.1.tgz#dcb38103436ab4578ddb026638ae2f21b623671f" integrity sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ== dependencies: backo2 "1.0.2" base64-arraybuffer "0.1.5" component-bind "1.0.0" component-emitter "1.2.1" debug "~3.1.0" engine.io-client "~3.2.0" has-binary2 "~1.0.2" has-cors "1.1.0" indexof "0.0.1" object-component "0.0.3" parseqs "0.0.5" parseuri "0.0.5" socket.io-parser "~3.2.0" to-array "0.1.4" socket.io-parser@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.2.0.tgz#e7c6228b6aa1f814e6148aea325b51aa9499e077" integrity sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA== dependencies: component-emitter "1.2.1" debug "~3.1.0" isarray "2.0.1" socket.io@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.1.1.tgz#a069c5feabee3e6b214a75b40ce0652e1cfb9980" integrity sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA== dependencies: debug "~3.1.0" engine.io "~3.2.0" has-binary2 "~1.0.2" socket.io-adapter "~1.1.0" socket.io-client "2.1.1" socket.io-parser "~3.2.0" sockjs-client@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.4.0.tgz#c9f2568e19c8fd8173b4997ea3420e0bb306c7d5" integrity sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g== dependencies: debug "^3.2.5" eventsource "^1.0.7" faye-websocket "~0.11.1" inherits "^2.0.3" json3 "^3.3.2" url-parse "^1.4.3" sockjs@0.3.19: version "0.3.19" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== dependencies: faye-websocket "^0.10.0" uuid "^3.0.1" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= dependencies: is-plain-obj "^1.0.0" source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.12: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.6, source-map@^0.5.7: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== spdx-expression-parse@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.5" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" hpack.js "^2.1.6" obuf "^1.1.2" readable-stream "^3.0.6" wbuf "^1.7.3" spdy@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.1.tgz#6f12ed1c5db7ea4f24ebb8b89ba58c87c08257f2" integrity sha512-HeZS3PBdMA+sZSu0qwpCxl3DeALD5ASx8pAX0jZdKXSpPWbQ6SYGnlg3BBmYLx5LtiZrmkAZfErCm2oECBcioA== dependencies: debug "^4.1.0" handle-thing "^2.0.0" http-deceiver "^1.2.7" select-hose "^2.0.0" spdy-transport "^3.0.0" split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" ssri@^6.0.1: version "6.0.2" resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.2.tgz#157939134f20464e7301ddba3e90ffa8f7728ac5" integrity sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q== dependencies: figgy-pudding "^3.5.1" static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== dependencies: inherits "~2.0.1" readable-stream "^2.0.2" stream-each@^1.1.0: version "1.2.3" resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== dependencies: end-of-stream "^1.1.0" stream-shift "^1.0.0" stream-http@^2.7.2: version "2.8.3" resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= streamroller@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-1.0.6.tgz#8167d8496ed9f19f05ee4b158d9611321b8cacd9" integrity sha512-3QC47Mhv3/aZNFpDDVO44qQb9gwB9QggMEE0sQmkTAwBVYdBRWISdsywlkfm5II1Q5y/pmrHflti/IgmIzdDBg== dependencies: async "^2.6.2" date-format "^2.0.0" debug "^3.2.6" fs-extra "^7.0.1" lodash "^4.17.14" strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== dependencies: emoji-regex "^7.0.1" is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string.prototype.trimright@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= style-loader@^0.23.1: version "0.23.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925" integrity sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" supports-color@6.1.0, supports-color@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== dependencies: has-flag "^3.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" tapable@^1.0.0, tapable@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^4: version "4.4.19" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA== dependencies: chownr "^1.1.4" fs-minipass "^1.2.7" minipass "^2.9.0" minizlib "^1.3.3" mkdirp "^0.5.5" safe-buffer "^5.2.1" yallist "^3.1.1" terser-webpack-plugin@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.1.tgz#61b18e40eaee5be97e771cdbb10ed1280888c2b4" integrity sha512-ZXmmfiwtCLfz8WKZyYUuuHf3dMYEjg8NrjHMb0JqHVHVOSkzp3cW2/XG1fP3tRhqEqSzMwzzRQGtAPbs4Cncxg== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" serialize-javascript "^1.7.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" worker-farm "^1.7.0" terser@^4.1.2: version "4.4.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.4.0.tgz#22c46b4817cf4c9565434bfe6ad47336af259ac3" integrity sha512-oDG16n2WKm27JO8h4y/w3iqBGAOSCtq7k8dRmrn4Wf9NouL0b2WpMHGChFGZq4nFAQy1FsNJrVQHfurXOSTmOA== dependencies: commander "^2.20.0" source-map "~0.6.1" source-map-support "~0.5.12" through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" thunky@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== timers-browserify@^2.0.4: version "2.0.11" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== dependencies: setimmediate "^1.0.4" tmp@0.0.33, tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-1.0.0.tgz#cc200eab2613f4166d27ff9afc7ca56d49df6eb4" integrity sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ= toposort@^1.0.0: version "1.0.7" resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= uglify-js@3.4.x: version "3.4.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== dependencies: commander "~2.19.0" source-map "~0.6.1" uglify-js@^3.1.4: version "3.13.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.13.5.tgz#5d71d6dbba64cf441f32929b1efce7365bb4f113" integrity sha512-xtB8yEqIkn7zmOyS2zUNBsYCBRhDkvlNxMMY2smuJ/qA8NCHeQvKCF3i9Z4k8FJH4+PJvZRtMrPynfZ75+CSZw== ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^2.0.1" uniq@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== dependencies: unique-slug "^2.0.0" unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== dependencies: imurmurhash "^0.1.4" universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url-join@^2.0.2: version "2.0.5" resolved "https://registry.yarnpkg.com/url-join/-/url-join-2.0.5.tgz#5af22f18c052a000a48d7b82c5e9c2e2feeda728" integrity sha1-WvIvGMBSoACkjXuCxenC4v7tpyg= url-parse@^1.4.3: version "1.5.3" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== useragent@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== dependencies: inherits "2.0.3" utila@^0.4.0, utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== v8-compile-cache@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vm-browserify@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== void-elements@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= void-elements@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-3.1.0.tgz#614f7fbf8d801f0bb5f0661f5b2f5785750e4f09" integrity sha1-YU9/v42AHwu18GYfWy9XhXUOTwk= watchpack@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== dependencies: chokidar "^2.0.2" graceful-fs "^4.1.2" neo-async "^2.5.0" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" webpack-cli@^3.3.1: version "3.3.10" resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.10.tgz#17b279267e9b4fb549023fae170da8e6e766da13" integrity sha512-u1dgND9+MXaEt74sJR4PR7qkPxXUSQ0RXYq8x1L6Jg1MYVEmGPrH6Ah6C4arD4r0J1P5HKjRqpab36k0eIzPqg== dependencies: chalk "2.4.2" cross-spawn "6.0.5" enhanced-resolve "4.1.0" findup-sync "3.0.0" global-modules "2.0.0" import-local "2.0.0" interpret "1.2.0" loader-utils "1.2.3" supports-color "6.1.0" v8-compile-cache "2.0.3" yargs "13.2.4" webpack-dev-middleware@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-2.0.6.tgz#a51692801e8310844ef3e3790e1eacfe52326fd4" integrity sha512-tj5LLD9r4tDuRIDa5Mu9lnY2qBBehAITv6A9irqXhw/HQquZgTx3BCd57zYbU2gMDnncA49ufK2qVQSbaKJwOw== dependencies: loud-rejection "^1.6.0" memory-fs "~0.4.1" mime "^2.1.0" path-is-absolute "^1.0.0" range-parser "^1.0.3" url-join "^2.0.2" webpack-log "^1.0.1" webpack-dev-middleware@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3" integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw== dependencies: memory-fs "^0.4.1" mime "^2.4.4" mkdirp "^0.5.1" range-parser "^1.2.1" webpack-log "^2.0.0" webpack-dev-server@^3.3.1: version "3.9.0" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c" integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw== dependencies: ansi-html "0.0.7" bonjour "^3.5.0" chokidar "^2.1.8" compression "^1.7.4" connect-history-api-fallback "^1.6.0" debug "^4.1.1" del "^4.1.1" express "^4.17.1" html-entities "^1.2.1" http-proxy-middleware "0.19.1" import-local "^2.0.0" internal-ip "^4.3.0" ip "^1.1.5" is-absolute-url "^3.0.3" killable "^1.0.1" loglevel "^1.6.4" opn "^5.5.0" p-retry "^3.0.1" portfinder "^1.0.25" schema-utils "^1.0.0" selfsigned "^1.10.7" semver "^6.3.0" serve-index "^1.9.1" sockjs "0.3.19" sockjs-client "1.4.0" spdy "^4.0.1" strip-ansi "^3.0.1" supports-color "^6.1.0" url "^0.11.0" webpack-dev-middleware "^3.7.2" webpack-log "^2.0.0" ws "^6.2.1" yargs "12.0.5" webpack-fix-style-only-entries@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/webpack-fix-style-only-entries/-/webpack-fix-style-only-entries-0.2.2.tgz#60331c608b944ac821a3b6f2ae491a6d79ba40eb" integrity sha512-0wcrLCnISP8htV0NP1mT0e2mHhfjGQdNk82s8BTLVvF7rXuoJuUUzP3aCUXnRqlLgmTBx5WgqPhnczjatl+iSQ== webpack-log@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-1.2.0.tgz#a4b34cda6b22b518dbb0ab32e567962d5c72a43d" integrity sha512-U9AnICnu50HXtiqiDxuli5gLB5PGBo7VvcHx36jRZHwK4vzOYLbImqT4lwWwoMHdQWwEKw736fCHEekokTEKHA== dependencies: chalk "^2.1.0" log-symbols "^2.1.0" loglevelnext "^1.0.1" uuid "^3.1.0" webpack-log@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== dependencies: ansi-colors "^3.0.0" uuid "^3.3.2" webpack-shell-plugin@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/webpack-shell-plugin/-/webpack-shell-plugin-0.5.0.tgz#29b8a1d80ddeae0ddb10e729667f728653c2c742" integrity sha1-Kbih2A3erg3bEOcpZn9yhlPCx0I= webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack@^4.30.0: version "4.41.2" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e" integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/wasm-edit" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" acorn "^6.2.1" ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" enhanced-resolve "^4.1.0" eslint-scope "^4.0.3" json-parse-better-errors "^1.0.2" loader-runner "^2.4.0" loader-utils "^1.2.3" memory-fs "^0.4.1" micromatch "^3.1.10" mkdirp "^0.5.1" neo-async "^2.6.1" node-libs-browser "^2.2.1" schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.1" watchpack "^1.6.0" webpack-sources "^1.4.1" websocket-driver@>=0.5.1: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.4" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" with@^7.0.0: version "7.0.2" resolved "https://registry.yarnpkg.com/with/-/with-7.0.2.tgz#ccee3ad542d25538a7a7a80aad212b9828495bac" integrity sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w== dependencies: "@babel/parser" "^7.9.6" "@babel/types" "^7.9.6" assert-never "^1.2.1" babel-walk "3.0.0-canary-5" word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= worker-farm@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== dependencies: ansi-styles "^3.2.0" string-width "^3.0.0" strip-ansi "^5.0.0" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== dependencies: async-limiter "~1.0.0" ws@~3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== dependencies: async-limiter "~1.0.0" safe-buffer "~5.1.0" ultron "~1.1.0" xmlhttprequest-ssl@~1.5.4: version "1.5.5" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e" integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4= xtend@^4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== "y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2, yallist@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs-parser@^13.1.0: version "13.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== dependencies: cliui "^4.0.0" decamelize "^1.2.0" find-up "^3.0.0" get-caller-file "^1.0.1" os-locale "^3.0.0" require-directory "^2.1.1" require-main-filename "^1.0.1" set-blocking "^2.0.0" string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" yargs@13.2.4: version "13.2.4" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== dependencies: cliui "^5.0.0" find-up "^3.0.0" get-caller-file "^2.0.1" os-locale "^3.1.0" require-directory "^2.1.1" require-main-filename "^2.0.0" set-blocking "^2.0.0" string-width "^3.0.0" which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^13.1.0" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk= buildbot-3.4.0/www/guanlecoja-ui/000077500000000000000000000000001413250514000166645ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/LICENSE000066400000000000000000000021071413250514000176710ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2014-2015 The Buildbot developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. buildbot-3.4.0/www/guanlecoja-ui/Readme.md000066400000000000000000000160671413250514000204150ustar00rootroot00000000000000# Guanlecoja-ui Note: This package is not maintained for uses outside Buildbot. Implements generic application base for angular.js, ui.router and bootstrap3, with less. To use: * `bower install guanlecoja-ui` * Include `scripts.js` and `styles.css` to your page. * Include boostrap3, and font-awesome css. * Make your app depend on `guanlecoja.ui` angular module. Directives and services are prefixed with `gl` ## Directives ### glPageWithSidebar Implements styles and behaviour for a menu with following features: * Menu appears from left side, when mouse over * Supports 1 level of sub-menus * Menus icon from Font-Awesome * Programmatic declaration, integrated with ui-router. Extracts menu from $state * Supports pin for always expanded menu The directive takes no argument and is configured via `glMenuServiceProvider` and `$stateProvider` ### glNotification Implements styles and behaviour for widget centralizing your app's notifications: * Listens for $http errors, and automatically shows them as notification * associated service for other components to broadcast notification The directive takes no argument and is configured via `glNotificationService` ### glTopbar Implements a topbar holding the page's title and breadcrumb as well as optional contextual widgets. It automatically calculate the breadcrumb from $state, with menu group information. The directive takes no argument and is configured via `glBreadcrumbService`, `$stateProvider` and `glMenuServiceProvider` The directive is transcluded. The childrens are included inside bootstrap3's ul.nav.navbar-nav.pull-right. It is made for contextual widgets like glNotification, or an authentication menu (not included). ### glTopbarContextualActions Implements buttons on the topbar for implementing contextual actions. It automatically generate buttons given the configuration. The configuration is reset at each $state change. The directive takes no argument and is configured via `glTopbarContextualActionsService` ## Services ### glMenuServiceProvider Configuration entrypoint for the sidebar menu #### `glMenuServiceProvider.addGroup(group)` Declare a group in the menu. takes on object with the following attributes: * `name`: Name of the menu, identifier for reference in menu items * `caption`: Text of the menu, as shown in the UI * `icon`: Icon name from font-awesome. E.g `bug` will use the `fa-bug` class for the icon * `order`: The menu is reordered according to this key. This allows to declare menus in different modules, without caring about the module load order. #### `glMenuServiceProvider.setDefaultGroup(group)` Declare a group to be the default group. Can be used directly after the `addGroup(group)` call. #### `glMenuServiceProvider.setFooter(footer_items)` Declare the menu footer links. The menu contains up to three footer button, that can be used for arbitrary external links. `footer_items` is a list of objects with following attributes: * `caption`: text of the button * `href`: link of the button #### `glMenuServiceProvider.setAppTitle(title)` Specify the application title. The text is shown in either the side menu, and in the topBar #### `$stateProvider.state(state)` Menu items are defined in `$stateProvider.state`'s data. glMenuService scans the list of states to find the menu items. You can use `state.data` for the usage you want, but glMenuService will look at the following attributes: * `group`: name of the group to which append this menu-item * `caption`: text of the menu-item ### glBreadcrumbService Set the breadcrumbs of glTopMenu. In some cases, the automated breadcrumb from glTopMenu is unsuitable. In this case, you can use glBreadcrumbService to override it. #### `glBreadcrumbService.setBreadcrumb(breadcrumb_list)` breadcrumb_list is a list of objects containing following attributes: * `caption`: text of the breadcrumb item * `href`: optional href for the breadcrumb * `sref`: optional sref for the breadcrumb. see ui.router's doc for more information about sref format. Dont put both `sref` and `href` argument, this does not make sense and is not supported. ### glTopbarContextualActionsService Set the contextual actions of glTopMenuContextualActions. You must instantiate the gl-topbar-contextual-actions directive inside a gl-topbar #### `glTopbarContextualActionsService.setContextualActions(action_list)` action_list is a list of objects containing following attributes: * `caption`: text of the button item * `help`: text for the help tooltip * `icon`: optional icon for the button * `action`: function called when the button is clicked ### glNotificationService API for storing notifications. glNotification directive uses this service to display the notifications. #### `glNotificationService.notify(notification)` adds a notification to the notification system. notification is an object with following attributes: * `title`: Title of the notification * `msg`: Longer message for the notification * `group`: glNotificationService supports grouping several notification of the same group together. Messages of the same group will be concatenated with carriage return, and share the same title (only first title is kept, other titles are ignored) #### `glNotificationService.error(notification)` shortcut for `notify` with title to be `Error`. #### `glNotificationService.network(notification)` Shortcut for `notify` with title to be `Network Error`, and group to be `Network` #### `glNotificationService.dismiss(id)` Remove a notification from the list. #### `glNotificationService.notifications` The stored list of notifications. ## ChangeLog * 2.0.0: This package is not maintained for use outside Buildbot. Rewrite the package from CoffeeScript to plain JavaScript. * 1.8.0: body is no more height:100%, in order to make full height page, user need to use height: 100vh. left-bar pinned preference is now stored in the browser. * 1.7.0: topbar is now responsive. It will collapse on mobile. * 1.6.3: Adds setDefaultGroup(group) to glMenuServiceProvider. This option allows to expand a menu by default * 1.6.2: Fix width issues of content which push some content off-screen for certain monitor size * 1.6.1: rebuilt with guanlecoja 1.7.2, which populates the BOWERDEPS metadata * 1.6.0: Massive upgrade of dependencies: - jquery 2.1 -> 2.2. - Angular 1.4 -> 1.5. - Lodash -> 4.11. - Drop underscore.string as lodash got most important string utils. - angular-ui-bootstrap 0.11 -> 1.3.2: Most important change is that ui-bootstrap directives are now prefixed with "uib-" e.g: in jade: ".dropdown-toggle" -> ".dropdown-toggle(uib-dropdown-toggle)" This makes the markup more complicated, but the old 0.11 documentation is not even available anymore, so its best to upgrade. * 1.5.0: Switch to angularJS 1.4.3 * 1.4.2: fix the sidebar when screen is > 800px * 1.4.1: bump ui-router version to 0.2.13 * 1.4.0: add topbar-contextual-actions directive and associated service * 1.3.1: Fix auto scrollbar on sidebar menu * 1.3.0: Switch to angularJS 1.3.1 * 1.2.3: Initial Release ## Credits Original Design by Elliot Hesp: https://github.com/Ehesp/Responsive-Dashboard buildbot-3.4.0/www/guanlecoja-ui/karma.conf.js000066400000000000000000000003641413250514000212440ustar00rootroot00000000000000const common = require('buildbot-build-common'); module.exports = function karmaConfig (config) { common.createTemplateKarmaConfig(config, { testRoot: 'src/tests.webpack.js', webpack: require('./webpack.config') }); }; buildbot-3.4.0/www/guanlecoja-ui/package.json000066400000000000000000000020561413250514000211550ustar00rootroot00000000000000{ "name": "guanlecoja-ui", "plugin_name": "guanlecoja-ui", "description": "Implements generic application base for angular.js, ui.router and bootstrap3, with less, and coffeescript.", "version": "2.0.0", "license": "MIT", "readmeFilename": "Readme.md", "main": "dist/guanlecoja-ui.js", "style": "dist/styles.css", "scripts": { "build": "rimraf dist && webpack --bail --progress --profile --env dev && webpack --bail --progress --profile --env prod", "build-dev": "rimraf dist && webpack --bail --progress --profile --env dev", "test": "karma start", "test-watch": "karma start --auto-watch --no-single-run" }, "devDependencies": { "angular-mocks": "^1.7.9", "buildbot-build-common": "link:../build_common", "rimraf": "^2.6.3" }, "dependencies": { "@uirouter/angularjs": "^1.0.15", "angular": "^1.7.9", "angular-animate": "^1.7.9", "angular-ui-bootstrap": "^2.5.6", "jquery": "^3.4.0", "lodash": "^4.17.11" } } buildbot-3.4.0/www/guanlecoja-ui/postcss.config.js000066400000000000000000000001711413250514000221630ustar00rootroot00000000000000module.exports = { plugins: { autoprefixer: { browsers: ['last 2 versions'] }, }, }; buildbot-3.4.0/www/guanlecoja-ui/src/000077500000000000000000000000001413250514000174535ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/app/000077500000000000000000000000001413250514000202335ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/app/index.jade000066400000000000000000000017021413250514000221670ustar00rootroot00000000000000extends layout.jade block scripts script(src="tests.js") block content gl-page-with-sidebar gl-topbar.noselect gl-topbar-contextual-actions gl-notification li(uib-dropdown) a(uib-dropdown-toggle,href='#') img.avatar(src='img/avatar.jpg') ul.dropdown-menu.dropdown-menu-right(uib-dropdown-menu) li.dropdown-header | Joe Bloggs li.divider li.link a(href='#') | Profile li.link a(href='#') | Menu Item li.link a(href='#') | Menu Item li.divider li.link a(href='#') | Logout ui-view buildbot-3.4.0/www/guanlecoja-ui/src/app/layout.jade000066400000000000000000000014441413250514000224000ustar00rootroot00000000000000doctype html html.no-js(xmlns:ng='http://angularjs.org', xmlns:app='ignored') head meta(charset='utf-8') meta(http-equiv='X-UA-Compatible', content='IE=edge,chrome=1') title guanlecoja ui demo meta(name='description', content='Guanlecoja UI demo') meta(name='viewport', content='initial-scale=1, minimum-scale=1, user-scalable=no, maximum-scale=1, width=device-width') link(rel='stylesheet', href='//maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css') link(rel='stylesheet', href='//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css') link(rel='stylesheet', href='styles.css') body(ng-cloak, ng-app="app") block content block footer script(src="vendors.js") script(src="scripts.js") block scripts buildbot-3.4.0/www/guanlecoja-ui/src/img/000077500000000000000000000000001413250514000202275ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/img/avatar.jpg000066400000000000000000000417401413250514000222150ustar00rootroot00000000000000ExifII*Duckyd+http://ns.adobe.com/xap/1.0/ Adobed@@     !1 AQ"2aB#S qRr3bC$Tu6wc4t%Ֆ7W8d5U&1!AQq2a"B4r ?uqa7DOP{cZc*L-w1+B$m&6 R EȜώ+ޑ>>!ņfۯI,cm9"*" _hdu^y^i{kfQկwERc_;.-ZP֞X$B[N_7Gp$;+;}'37דݓƣFVi);7]7I|U?"[(U:qGT 7}7nGlUpqZL*IS]F2%ߥ?+3}llqbC:&dCPSnDxل_V傭50:#YyuytKMK)Xb;;' T-2Qr>T[o/FDwϹ)H S~:WC ɪi c-ڙqj]0F"oY+TʹxIJ\YoU_߈d\1&å-}O+JiLL3Hx#2F EFXBigZN6Kl&IoLgKyffZ֣=LkmϨCy}F' gVBUes1L.qL4i\i\4֐s K[6nrs_q\jk2٘NIS/ǡr#Ip߆qJi1t ?g: 5uwBl#RU)%צ9IhIP m>r{K彊G>saMQTҶg.*j^ڑanZ'%7dyԮk&]n6^Ĝ7 yoGX1dm{uKqV MH[NKmmijmed/f}JBd`G|,׮<q+nQHZ"Sm_=EDzK9)Km&&Z"$Ku% 9E"47vmiy8vr>e=b6d<;&}nuy[; 3~\[m $ Fai!!%ltT*BTCKA+Ga_O/^ZH5n[TQc8e$49j5LH@AM¶[ms܋؝k]2&qJ I7,mTFyPVu9.#9$E z.m3ðmhNIR_?|L1g@|s  7jf*6g$k!-XQB0ˮISudDN'E'#˦󪽇v86bu=G:s+1o!xyZnTgZ6 MY0]ΉpeƼ-* b2L=o*/iJ̎4cC2"33Ћ""30g_YZCbd;$ä/mG 'tz&iٯe"^OWgN0fsK ׌ZfGRw73bɶ<"f:3 4K,;DԪJ}'m+sk\@g=WX֑SRk/Kx)ٳS.'ҕ PLbr޷lеE9oL=K?丕bƥr5EEb Y 'e;5Ui&k5Um믴p30= 7#=c[oO [WX[GmZ j$qOh˴3i_oc@i5SeD, e"KS l0j.Ԑ2.mv-c%4wvSK]nkA|ڹ*Dzm!'7ch|&fdreė_.]}IU@sN˃:5&$ȒSnR֓Je]핇no>ypw5n $$,>dM˱e>Tʫ<Ŭ,o7O}͍a N7xR#u9 &% ^hz{˞#4d}Yo&So zU?v+}E$ط,&ͣ MS1ݽm4( A (Ãtq F;i$iJ"""*RLT\HfFGq ?,ݽPmx i ?I3+d4RLUMelޥ|ߥc;tغĒ+?f`.`L駨яfiGk9I{&.Sj&>@meR z5I҈l2?C$z󮟭3#Ӊp2F^gTgvEBܜ.1KaqI܆aGI<9%-)簪J^F60##"2222###Ԍp22ONմ>2[p 2kcΛT}- Il]?Sq]qJBMYK 4)Hr gmwP7ӇH=Je({R#䉎NkdoElְ)ȔR37$JyFQy)J3fcf"H"=|23a&<E",b"ᩛΧBݮmƢla2IZibGdD\YY]Q[)GzJ͏&@D)eN-ƙ [~,Ω4 2>3[t.llwY"]3m#2^f{ !ş*3ʢIÿNGϒW{e޿qb%&e" 175ܚ(27wrvl GToj42R^\"539pJs|!Zk˚bJL?ԽG҃$[ۘwmݍ-ɰۋ F'>$%bŵ->폓)j8{YTB/+r<7$u71LM\{Nf>$buG@/QsV֧+ySIYagi8S+eyfl-HQar4HAhfj2x!%)DЃBq?ZPJO |@cm)JGO]0n{򺵹jo}-SzM"屇I=+g"Fƞi H&^-uOm69U-ӭe1줔ij;[)(Bzt=ӥ.v O3o>kg&C+\O-Xi\'$89[֮'.7aKgo}kq9#c#^5+Rjf6\\5KSXV\~Ңdۊ魩UuecNT̘3[N%ZV#?{$wUmvml^I.n~ܦV}m C%ʐhGtm#-f[]W^*4 %I[uۭ]LʭZBίZb8Z͠/*%; mcn nѲvEF!r2v;CVn|yPl@{U[qL$b<ͿSVB&ey6dm9ݭ2! GeČ"p|~>iʷNa_k)s̷C>(=3\V-LX{0e0f3>""..BEtؤ(4>R?G2/ˢ ll\-3y|iU|N0n_Y)SyRq\4<"(-~yeدCTPkF K$35)[ ?RRYV% ^nT* moKoES޴G1§ ;ր8xgG[&56}l22#LFFLsv"lCQ l$)JH%)IREp""!Kj~y,bi?*-8]9Lfџn-2Y- mkm-Zq8fhQ22ԌJ[@~SG\UH"u.*YI+Ӣ Fi~a'C'NTkv[#q>&bEIp(pi!6D/0K`FRP:-xs5c,Z>Z,3SmTC+3%`vJq3nQ0NkȂ$^u1R*L;hր6)ZT+BiZD*-FdɳexE2)QAxeb] +c\U#􉨽<`7 LH^q{J*ȺAJS?zGo[*|#?є}SIj2YLҤ*ITIROE%I= *IGČJ@M\˻(uU漖Z.<#{~|8Lr<"T>m.itGN hHs+gm<"YS8"sЗ8lM18q?D\L]n`Ǐ@;6ynl.]Z- &F})AR*E^˱4}.$ۭ8Jд%I= ZR1*"\zc_=VN z Y~bl›`m^)#,6izYqE+R~gH5@: K( ώܨSOyF]d|HCI=Ua'(=⿥G[D3$!e{D}b, fggS۱'\DǧI C6D~9rjMII1j6r<DDDDDDEp"""/A5@cl?̌l)7eR3Tt))qQZ^,~zcy%ˆfgp[D/堈H̸4ׁq0/) SiGqh׌$T3L7ONW$IB~P$iY_Iz!*zP#j9KO) V|ݣ&bwy,P-c_*v[:nWTehCMUTrU.*-=8y6rvD|@ʀ_eŶĸ B lQے8#JиGME u!xH"{qd\VȤdOQ4(/N;"|O%#}wyVԋ`sS)'T_S)KR)kYֵJ?Yր/ccwyb^E &%)-L"/Y I ̡f~ޏ$͙Jy mm2ArCHeeZ]D²I{.#] \Nҽ I K!İSfԈZK-]LvvsKGvTO,KlGe+qz-R:S jmgy#[ѻL/ ے;,;Tg#W2\ Өyj%%]_#.2>-[&>n7ɯ'%Aspxܯ!p:r…!Z0(F^a};bkXc{e[=X7װ)~8ݼILGe7CFx9HlApK G|St2c%ksRm|[u.T ڈ85 I;/&iaYa<6Use6OZݟuq&eGڷ211Hm<:II㤘[Ps2#BTdaK`6 یNr&VuvjjFeA}K?_&3 A-j,%*W"Z9SO N;շ?ZOCLۋ dy\0 "SE3Ћ0Uݘo)'2 $FJuqKf!XTVĎH؉+Hb4X4f.Vَ)CL kSԉ2tMlAfYtrL'袤;&}iT+"ZlςRf*S )])ͲeVc+GUQ툸) (ԽդfU5c>LqF:Jx5nP\4ќjQa&"7^Ktb: Z:q޻Λzmmws1Q́CoEZ2jDq'H|zcY-&kc31 k0ʱ3Sh*QjF[Y~Qͮl}[K)~1RqIv2>/eNs8ȅ6kk?7zm׹gI?_%d}i7v"vEjfn,%.cOB_x/o5ˊƺJ#cQ{j|멥S1%JQ^D:kM9PJ##(2[ĤMڥ :0uE17[=!ǹ|}/Q/ 5(-֢I"=53_Lcv`\NQ9`{Xi\JXU&LJ?H4dm{;!o7n+FXƮնlYq\չ 8ەVO;- IJl<H|H߹ĞtdkrЋٌf'%-{o{kԩlʏ-&KJZx'! J{}4nd4풉n&e#8Г:^Z3]?B@D8E-_Xgټ^=E"mQdf'ZfU5DY9.]y\(ɹلqY}Tb׃qi^S_3{ΊW*`iZ ot>RҸu;pt@eG =?RG'ZXxǧkk ׻ ͋mTfw2|k"L,׌_6X~MJA:9K:3Ф=&ZCdßw;]fPU}mdwܝqs1Qב|ahH/e \vx۹os ӯGuv쬜e]AuJJR[.g)m{]d %ܡ)$xg򖄓va)qbgߑm}gʰ}K1I=%t#ԾFy%ZcX 7?6/q%8+K2On2lW%RKZ^-GKqZ[Mgyo^>jd(YzKh;/E\ Ȯq[J%FY9^OFq ͸KmEdR3;1c*ݲy,Z̦;diO}gFG4 AptYSuLouO4hrucnKQ'Vf%ZJD̽(O n&mP;n6[KS5Hm--;1ԑƭr}%iO@DyY9> |D==ebˏfGFIʦhqoddOw_3,g9g1q";mITP;Ws\k7Q@D+OO#vB 2y.HuY3[h۔帼WJm\iȼUsbjU.$6 lNhfZB#Gq\F_>?O!|vyPtJf{GjFgg7E-k)Lq1~^?w%E?R?:j,~M/蓇mihImMQhkDeZ0E0+TW=Y_yl-zzy[ZB/oǯk'`\OWgĞ&uF}Wa B4l8T;MPc&Kѩ2.|>*[&$&Fgl-6UÇSDL2ZIkoY®r|'mV7AVʟou:=|D:#5Z%$fd@"/4ѕ7ڙKa{Bu3O/R&jM.#2 36ܻ)˯jJ䥲ٞ ;ve%ؒ:L`smǒJ#]8}-AA-s "dR@.TRW,e;jz Uj25kNI>eeem$l)/+\Oq6˅mJ!$$ͫ~C'u׬McJ/j?Gx)Z*B\XOn{v7_ƖBu[YVۍkIiZFF^!qgKh ;8aޔbԖ̣*v8se"TP'8s;{_Yݷ_^R(I{JQ$QE&1|:OTc}GrtloT.i.rl瑧V F ^Y v5yUZTW]2=&gXgµ吸9"ĮadXݶ7\;|8#fDq 2fhQpQp+?ydgYMJIG h+P-VOiN)&go+\r.YMFWC(6J]TwQ,",2s= })!wXk]jNpǛ䶉dK,r9NIkJ)&ɖ<{6u*qreCa+x:%hrA +)=K&D2JI)&)*#JF^2/{+[剳W'?,؈Υrݗ6[`V5 #W+b/JM"dk-$:席ySJ DzB;O]fͦcU)ȰU,S8dj=ģC=UJ3Q tm4]2".'Tf܌>iyZ)hq&FEininV=WM]QaKQ9o^"RP%/m I3H9~td_^T*AJ{&$iNSI ;8޳gBB6khAW6hj9U'H91s+!K!$LoJO-Fj[Z>&hn[g.ma<˩%.HuPRT4@kOa'%1|C 檱WVUV6("vd0:\TH& 9FMɘv([mG_}Wq % ڗB^5# BQ 2ML ;-D<vTWdP!%%>JR̍JxJ j}$/nzkVplȓ{'1[ yKa9 es7"SZ`E_!NF &X\VWˉnϵ2<8?YEŸDKD+Qȫ1{,"EEOַ #%򤼴 R5cY϶07 79"(=[y%"N9NfF~ٴ6|$]>&|Lω33ff) Qq>lM=w_!y/{-%ZOߌؤ{mͿa*zT:] Mᢨ3ϋgOc!?ߗ2F=dDl7 <uwȠd5qL ÙȞB,)N}f9'Y__b4OwA*N5)=27s8Ӛ)x6F&̄ȏVGH^űtnqAsWmsm7+)od8[k_#%)j]c&r*nʥǢ}]ŷ3L.K[7HScM}U+Jm]3iNd| j2?FzNí>îw[~<[/}).$IQߝA=)QV'0,&Min\>T)qxoI1p~/5x=>L'p2-Q Cw;q/7gprq\]ZԴ⻺|jkj;i.fc]cvVh*,n y";1ՐP*\dɶ˙FEƧ wzF9’F#A1mG!])uQw4tG;rA\F{'^}yƠE3$Cj4.c74É30WOƔ2>P2ԨRQ)P䶗ii3Jڑĭ&FdiQ)ZQq$+R]#rKdw Xuk?--;S[E7D~W^W/.?dz>>0tui5hDmZݬqh]w۳q)JMJ$JQRE""^}q{wʺw\-۝Sd,\m cʴ{TQOib*p@l'Q;ӶSXSdl:Wd= #bzY$Td+,e%ݣ܈؎S\AjdMń]v;JG˄̩Q䠉4DD p:0B҂֮kT0,c-Mȃg]!p4- %_.nSô;{L! /-ƠXO߹ᴪ4"\F}D:]p]AҎMzF&TZOC!u$fE],ԌdddeK_=;nۭ~m:o-J6\&2"xpؘEա.!HYs%iRGiQ#i%b2 ;+iټŸnI;STPI3_{k|6^|ψJm]JقU ZXsO+?c򷳉q#d)W/.?dz>>snr]36/ʨVi3#8'= #q&Zf..|^>ϴlI&~C&a}*%H%[Qo-3Q~p+҆Kw7}>g-j}uͽW,6=)%ٌf|t8uX_]4 1[KQA#D%#Gm(BK$CNـsop3vϺlI%(uەB 3.kO  1Al6mNM6y+ b+K٥Zs%yFĂ/~+'A.+,bCq /95ϑvU[ ԋgʢ(q M_Ʊl'p2V.asD(3xPӼϯOL#2lGsi{ĖDH%fT}c[vkͭ MyFHǡ잓ԒRTQOh9gJ)_g͑o=W-W-17(_uqȄspYVÇ,}_1R)[Cm6]qhmBu\Q!m$jqJRDfqF=>7{l;B7z?Ǹ ݨqzHs5LEیh *3/7{ " W ?0ەi ~a2yQeuщʃ[1[IL\o*t\- K?ϊk|D.cS[AMveVU+Sk4'/m_-D L3I>##߷9oˮZ\l () IHżsGxE^A`1t'8@\/q7bz%\Djiod䛆\Tm_wg_onA.~s7)`wNL{Ry&..G㬙Nx|q~񐝯L+Y)3333=L3>32 a;r`ޓ%ͨk3fD-٨Bauα| *&\BYBʚqM䰣'Yqm%EĈ+ٴV^Wdh45{L#Gxƥ|$E7;RcskVZEI=CDdzq#պ #>6s5(_@e5;˂rkԝ<= *<wsz #d5^W/.?dz>>mM iqKJ2bUJ"ƯYF1gK6?>d ϬJzlܕ)3qTCb1;*ǰZr,%<4ɮi[">,ds:σm!J> eKQC;e_DDDDDDDDDDD}܍տm{ZM} @$%*oni+^q$FIpg0ݳbC3BvU6ڴ-d9cw1~HSls(me^buildbot-3.4.0/www/guanlecoja-ui/src/module/000077500000000000000000000000001413250514000207405ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/breadcrumb_service/000077500000000000000000000000001413250514000245665ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/breadcrumb_service/breadcrumb.service.js000066400000000000000000000010461413250514000306720ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // a simple service to abstract breadcrumb configuration class glBreadcrumb { constructor($rootScope) { this.$rootScope = $rootScope; } setBreadcrumb(breadcrumb) { this.$rootScope.$broadcast("glBreadcrumb", breadcrumb); } } angular.module('guanlecoja.ui') .service('glBreadcrumbService', ['$rootScope', glBreadcrumb]); buildbot-3.4.0/www/guanlecoja-ui/src/module/breadcrumb_service/breadcrumb.service.spec.js000066400000000000000000000013351413250514000316240ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('breadcrumbService', function() { beforeEach(angular.mock.module("guanlecoja.ui")); // simple test to make sure the directive loads it('should forward call to setBreadcrumb via $broadcast', inject(function($rootScope, glBreadcrumbService) { let gotBreadcrumb = null; $rootScope.$on("glBreadcrumb", (e, data) => gotBreadcrumb = data); glBreadcrumbService.setBreadcrumb({foo:"bar"}); $rootScope.$digest(); expect(gotBreadcrumb).toEqual({foo:"bar"}); }) ); }); buildbot-3.4.0/www/guanlecoja-ui/src/module/main.module.js000066400000000000000000000012571413250514000235130ustar00rootroot00000000000000import 'angular-animate'; import 'angular-ui-bootstrap'; import 'lodash'; import '@uirouter/angularjs'; if (window.T === undefined){ window.T = {} } angular.module("guanlecoja.ui", ["ui.bootstrap", "ui.router", "ngAnimate"]); require('./breadcrumb_service/breadcrumb.service.js'); require('./menu_service/menu.service.js'); require('./notification_service/httpinterceptor.js'); require('./notification_service/notification.service.js'); require('./notification_widget/notification.directive.js'); require('./page_with_sidebar/page_with_sidebar.directive.js'); require('./topbar-contextual-actions/topbar-contextual-actions.directive.js'); require('./topbar/topbar.directive.js'); buildbot-3.4.0/www/guanlecoja-ui/src/module/menu_service/000077500000000000000000000000001413250514000234245ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/menu_service/menu.service.js000066400000000000000000000054661413250514000264000ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS206: Consider reworking classes to avoid initClass * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class GlMenu { static initClass() { this.prototype.appTitle = "set AppTitle using GlMenuServiceProvider.setAppTitle"; this.prototype.$get = ["$state", function($state) { let group; for (let state of Array.from($state.get().slice(1))) { ({ group } = state.data); if (group == null) { continue; } if (!this.groups.hasOwnProperty(group)) { throw Error(`group ${group} has not been defined with glMenuProvider.group(). has: ${_.keys(this.groups)}`); } this.groups[group].items.push({ caption: state.data.caption || _.capitalize(state.name), sref: state.name }); } for (let name in this.groups) { // if a group has only no item, we juste delete it group = this.groups[name]; if ((group.items.length === 0) && !group.separator) { delete this.groups[name]; // if a group has only one item, then we put the group == the item } else if (group.items.length === 1) { const item = group.items[0]; group.caption = item.caption; group.sref = item.sref; group.items = []; } else { group.sref = "."; } } const groups = _.values(this.groups); groups.sort((a,b) => a.order - b.order); const self = this; return { getGroups() { return groups; }, getDefaultGroup() { return self.defaultGroup; }, getFooter() { return self.footer; }, getAppTitle() { return self.appTitle; } }; } ]; } constructor() { this.groups = {}; this.defaultGroup = null; this.footer = []; } addGroup(group) { group.items = []; if (group.order == null) { group.order = 99; } this.groups[group.name] = group; return this.groups; } setDefaultGroup(group) { return this.defaultGroup = group; } setFooter(footer) { return this.footer = footer; } setAppTitle(title) { return this.appTitle = title; } } GlMenu.initClass(); angular.module('guanlecoja.ui') .provider('glMenuService', [GlMenu]); buildbot-3.4.0/www/guanlecoja-ui/src/module/menu_service/menu.service.spec.js000066400000000000000000000106511413250514000273210ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('menuService', function() { beforeEach(angular.mock.module("guanlecoja.ui", function($stateProvider, glMenuServiceProvider) { let group; const _glMenuServiceProvider = glMenuServiceProvider; const stateProvider = $stateProvider; const groups = []; for (let i of ["cab", "camera", "bug", "calendar", "ban", "archive", "edit"]) { group = { name: i, items: [] }; for (let j of ["cab", "camera", "bug", "calendar", "ban", "archive", "edit"]) { group.items.push({ name: i + j}); if (i === "bug") { break; } } groups.push(group); if (i === "edit") { glMenuServiceProvider.addGroup({ name: group.name}); } else { const groupForProvider = { name: group.name, caption: _.capitalize(group.name), icon: group.name, order: i === "edit" ? undefined : group.name.length }; glMenuServiceProvider.addGroup(groupForProvider); if (i === "cab") { glMenuServiceProvider.setDefaultGroup(groupForProvider); } } } glMenuServiceProvider.setFooter([{ caption: "Github", href: "https://github.com/tardyp/guanlecoja-ui" } ]); glMenuServiceProvider.setAppTitle("Guanlecoja-UI"); for (group of Array.from(groups)) { for (let item of Array.from(group.items)) { const state = { name: item.name, url: `/${item.name}`, data: { group: item.name === "banedit" ? undefined : group.name, caption: item.name === "editedit" ? undefined : _.capitalize(item.name) } }; $stateProvider.state(state); } } return null; }) ); it('should generate the menu correctly', inject(function(glMenuService) { const groups = glMenuService.getGroups(); const namedGroups = {}; for (let g of Array.from(groups)) { namedGroups[g.name] = g; } expect(groups.length).toEqual(7); expect(groups[0].items.length).toEqual(7); expect(namedGroups['bug'].items.length).toEqual(0); expect(namedGroups['bug'].caption).toEqual('Bugcab'); }) ); it('should have the default group set', inject(function(glMenuService) { const defaultGroup = glMenuService.getDefaultGroup(); const groups = glMenuService.getGroups(); expect(defaultGroup).toEqual(groups[0]); }) ); // simple test to make sure the directive loads it('should generate error if group is undefined', function() { // configure the menu a little bit more.. with an erronous state angular.mock.module(function($stateProvider, glMenuServiceProvider) { $stateProvider.state({ name: "foo", data: { group: "bar" } }); // not existing group! return null; }); const run = () => inject(function(glMenuService) { let groups; return groups = glMenuService.getGroups(); }) ; expect(run).toThrow(); }); // simple test to make sure the directive loads it('should remove empty groups', function() { // configure the menu a little bit more.. with an erronous state angular.mock.module(function(glMenuServiceProvider) { glMenuServiceProvider.addGroup({ name: "foo"}); return null; }); inject(function(glMenuService) { const groups = glMenuService.getGroups(); const namedGroups = {}; for (let g of Array.from(groups)) { namedGroups[g.name] = g; } expect(namedGroups["foo"]).not.toBeDefined(); }); }); }); buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_service/000077500000000000000000000000001413250514000251465ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_service/httpinterceptor.js000066400000000000000000000023511413250514000307430ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // I intercept the http errors and put them in the notification service // in order to enable it, please add following code in you config: //class AddInterceptor extends Config // constructor: ($httpProvider) -> // $httpProvider.responseInterceptors.push('glHttpInterceptor') class glHttpInterceptor { constructor(glNotificationService, $q, $timeout) { return function(promise) { const errorHandler = function(res) { let msg; try { msg = `${res.status}:${res.data.error} ` + `when:${res.config.method} ${res.config.url}`; } catch (e) { msg = res.toString(); } $timeout((() => glNotificationService.network(msg)), 100); $q.resolve(null); }; return promise.then(angular.identity, errorHandler); }; } } angular.module('guanlecoja.ui') .factory('glHttpInterceptor', ['glNotificationService', '$q', '$timeout', glHttpInterceptor]); buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_service/httpinterceptor.spec.js000066400000000000000000000024211413250514000316720ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('http Interceptor', function() { beforeEach(angular.mock.module("guanlecoja.ui")); it('should intercept errors', inject(function($q, $rootScope, glNotificationService, $timeout, glHttpInterceptor) { const d = $q.defer(); const i = glHttpInterceptor(d.promise); spyOn(glNotificationService, "network").and.returnValue(null); d.reject("oups"); $rootScope.$digest(); $timeout.flush(); expect(glNotificationService.network).toHaveBeenCalledWith("oups"); }) ); it('should intercept http errors', inject(function($q, $rootScope, glNotificationService, $timeout, glHttpInterceptor) { const d = $q.defer(); const i = glHttpInterceptor(d.promise); spyOn(glNotificationService, "network").and.returnValue(null); d.reject({status: "404", data:{error:"not found"}, config:{method:"get",url:"http://foo"}}); $rootScope.$digest(); $timeout.flush(); expect(glNotificationService.network).toHaveBeenCalledWith("404:not found when:get http://foo"); }) ); }); buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_service/notification.service.js000066400000000000000000000033151413250514000316330ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class glNotification { constructor($rootScope, $timeout) { this.$rootScope = $rootScope; this.$timeout = $timeout; this.notifications = []; this.curid = 0; null; } notify(opts) { this.curid += 1; if (opts.title == null) { opts.title = "Info"; } opts.id = this.curid; let id = this.curid; if (opts.group != null) { for (let i in this.notifications) { const n = this.notifications[i]; if (opts.group === n.group) { id = i; n.msg += `\n${opts.msg}`; } } } if (id === this.curid) { this.notifications.push(opts); } return null; } // some shortcuts... error(opts) { if (opts.title == null) { opts.title = "Error"; } return this.notify(opts); } network(opts) { if (opts.title == null) { opts.title = "Network issue"; } if (opts.group == null) { opts.group = "Network"; } return this.notify(opts); } dismiss(id) { for (let i in this.notifications) { const n = this.notifications[i]; if (n.id === id) { this.notifications.splice(i, 1); return null; } } return null; } } angular.module('guanlecoja.ui') .service('glNotificationService', ['$rootScope', '$timeout', glNotification]); buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_service/notification.service.spec.js000066400000000000000000000040621413250514000325640ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('notificationService', function() { beforeEach(angular.mock.module("guanlecoja.ui")); it('should add and delete notifications', inject(function(glNotificationService, $timeout) { glNotificationService.notify({msg:"done", title:"finish"}); expect(glNotificationService.notifications).toEqual([ { id : 1, msg : 'done', title : 'finish' } ]); glNotificationService.dismiss(1); expect(glNotificationService.notifications).toEqual([]); glNotificationService.notify({msg:"done", title:"finish", group:"group"}); glNotificationService.notify({msg:"msg2", title:"finish", group:"group"}); expect(glNotificationService.notifications).toEqual([ { id : 2, msg : 'done\nmsg2', title : 'finish', group:"group" } ]); glNotificationService.dismiss(2); expect(glNotificationService.notifications).toEqual([]); glNotificationService.network({msg:"404"}); glNotificationService.network({msg:"404", title:"403"}); glNotificationService.network({msg:"404", group:"Network"}); glNotificationService.dismiss(4); glNotificationService.error({msg:"oups"}); glNotificationService.error({msg:"oups", title:"error"}); glNotificationService.dismiss(8); expect(glNotificationService.notifications[0].id).toEqual(7); glNotificationService.dismiss(7); glNotificationService.dismiss(99); expect(glNotificationService.notifications).toEqual([]); glNotificationService.notify({msg:"done1", title:"finish"}); glNotificationService.notify({msg:"done2", title:"finish", group:"group"}); glNotificationService.notify({msg:"done3", title:"finish", group:"group"}); glNotificationService.dismiss(9); glNotificationService.dismiss(10); expect(glNotificationService.notifications).toEqual([]); }) ); }); buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_widget/000077500000000000000000000000001413250514000247715ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_widget/notification.directive.js000066400000000000000000000020771413250514000320000ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class GlNotification { constructor() { return { replace: true, transclude: true, restrict: 'E', scope: false, controllerAs: "n", template: require('./notification.tpl.jade'), controller: "_glNotificationController" }; } } class _glNotification { constructor($scope, glNotificationService) { this.$scope = $scope; this.glNotificationService = glNotificationService; this.notifications = this.glNotificationService.notifications; null; } dismiss(id, e) { this.glNotificationService.dismiss(id); e.stopPropagation(); return null; } } angular.module('guanlecoja.ui') .directive('glNotification', [GlNotification]) .controller('_glNotificationController', ['$scope', 'glNotificationService', _glNotification]);buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_widget/notification.less000066400000000000000000000022331413250514000303470ustar00rootroot00000000000000.notifications { .dropdown-menu { @media (max-width: 600px) { position: fixed; top:50px; height:auto; width:100%; } li > .item { display: block; @media (min-width: 900px) { min-width:800px; max-width:800px; } @media (min-width: 600px) { min-width:500px; max-width:500px; } padding: 3px 20px; clear: both; font-weight: normal; line-height: 14px; .title { font-weight: bold; text-decoration: underline; padding: 8px 5px; } .msg { white-space: pre; line-height: 20px; } // Adjust close link position .close { position: relative; top: -2px; right: -10px; color: inherit; } } } // Open state for the dropdown &.active { // Show the menu > .dropdown-menu { display: block; } } } buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_widget/notification.spec.js000066400000000000000000000025031413250514000307460ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('notification', function() { let scope; beforeEach(angular.mock.module("guanlecoja.ui")); let elmBody = (scope = null); const injected = function($rootScope, $compile) { elmBody = angular.element( '' ); scope = $rootScope; $compile(elmBody)(scope); return scope.$digest(); }; beforeEach((inject(injected))); // simple test to make sure the directive loads it('should load', function() { expect(elmBody).toBeDefined(); // if there is an ul, the sidebar has been created expect(elmBody.find("ul").length).toBeGreaterThan(0); }); // simple test to make sure the directive loads it('should dismiss pass through', inject(function(glNotificationService) { let called = false; const e = {stopPropagation() { return called = true; }}; spyOn(glNotificationService, "dismiss").and.returnValue(null); scope.n.dismiss(2, e); expect(glNotificationService.dismiss).toHaveBeenCalledWith(2); expect(called).toBe(true); }) ); }); buildbot-3.4.0/www/guanlecoja-ui/src/module/notification_widget/notification.tpl.jade000066400000000000000000000012171413250514000311030ustar00rootroot00000000000000li.dropdown.notifications(uib-dropdown) a(uib-dropdown-toggle) i.fa.fa-bell-o.fa-lg(ng-class="{'fa-ringing': n.notifications.length > 0 }") ul.uib-dropdown-menu.dropdown-menu.dropdown-menu-right(dropdown-toggle) li.dropdown-header | Notifications li.divider div(ng-repeat="msg in n.notifications") li .item button.close(ng-click="n.dismiss(msg.id, $event)") × .title {{msg.title}}: .msg {{msg.msg}} li.divider li(ng-hide="n.notifications.length>0") .item small.msg all caught up! buildbot-3.4.0/www/guanlecoja-ui/src/module/page_with_sidebar/000077500000000000000000000000001413250514000244005ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/page_with_sidebar/page_with_sidebar.directive.js000066400000000000000000000053151413250514000323570ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class GlPageWithSidebar { constructor() { return { replace: true, transclude: true, restrict: 'E', scope: false, controllerAs: "page", template: require('./page_with_sidebar.tpl.jade'), controller: "_glPageWithSidebarController" }; } } class _glPageWithSidebar { constructor($scope, glMenuService, $timeout, $window) { // by default, pin sidebar only if window is wide enough (collapse by default if narrow) this.$scope = $scope; this.$timeout = $timeout; this.$window = $window; this.sidebarPinned = this.$window.innerWidth > 800; // If user has previously pinned or unpinned the sidebar, use the saved value from localStorage const sidebarWasPinned = this.$window.localStorage.sidebarPinned; if ( (sidebarWasPinned === "true") || (sidebarWasPinned === "false") ) { // note -- localstorage only stores strings, converts bools to string. this.sidebarPinned = sidebarWasPinned !== "false"; } this.groups = glMenuService.getGroups(); this.footer = glMenuService.getFooter(); this.appTitle = glMenuService.getAppTitle(); this.activeGroup = glMenuService.getDefaultGroup(); this.inSidebar = false; this.sidebarActive = this.sidebarPinned; } toggleSidebarPinned() { this.sidebarPinned=!this.sidebarPinned; this.$window.localStorage.sidebarPinned = this.sidebarPinned.toString(); } toggleGroup(group) { if (this.activeGroup !== group) { this.activeGroup = group; } else { this.activeGroup = null; } } enterSidebar() { this.inSidebar = true; } hideSidebar() { this.sidebarActive = false; this.inSidebar = false; } leaveSidebar() { this.inSidebar = false; if (this.timeout != null) { this.$timeout.cancel(this.timeout); this.timeout = undefined; } this.timeout = this.$timeout((() => { if (!this.inSidebar && !this.sidebarPinned) { this.sidebarActive = false; this.activeGroup = null; } } ), 500); } } angular.module('guanlecoja.ui') .directive('glPageWithSidebar', [GlPageWithSidebar]) .controller('_glPageWithSidebarController', ['$scope', 'glMenuService', '$timeout', '$window', _glPageWithSidebar]); buildbot-3.4.0/www/guanlecoja-ui/src/module/page_with_sidebar/page_with_sidebar.less000066400000000000000000000137171413250514000307410ustar00rootroot00000000000000/* Based on http://ehesp.github.io/Responsive-Dashboard/ by Elliot Hesp */ @gl-sidebar-transition-time: 0.2s; @gl-sidebar-small-threshold: 600px; @gl-sidebar-width: 600px; .fa-45{ transform: rotate(45deg); -webkit-transform: rotate(45deg); } .gl-page-with-sidebar { padding-left: 70px; @media (max-width: @gl-sidebar-small-threshold) { padding-left: 30px; } height: 100%; bottom: 0; /* theme */ .sidebar.sidebar-blue { background: #30426a; ul .sidebar-main, .sidebar-footer, a:hover { background: #273759; } ul .sidebar-title { color: #627cb7; } } &.active:not(.pinned) .sidebar{ transform: translate(180px, 0px); -webkit-transform: translate(180px, 0px); } /* add scrollbar if the menu is open */ &.active .sidebar ul { overflow-y: auto; } &:not(.active){ .sidebar.blue { ul.sidebar .sidebar-title.separator { background: #273759; } } ul { .sidebar-title { alpha: 0; } &.separator { display: block; margin: 13px 0; } .sidebar-list a:hover span { border-left: 3px solid #e99d1a; text-indent: 22px; } } .sidebar-footer { display: none; } } &.pinned{ > .content { margin-left: 180px; } .sidebar { margin-left: 0px; left: 0px; } } // use ngAnimate for the sidebar &.active-add, &.active-remove { // ngAnimate wants an animation on the object it watches // even if it is not actually animated transition: all @gl-sidebar-transition-time ease-out 0s; .sidebar { transition: all @gl-sidebar-transition-time ease-out 0s; } } /* sidebar */ .sidebar { z-index:3000; user-select: none; -moz-user-select: none; -webkit-user-select: none; -ms-user-select: none; margin-left: -150px; @media (max-width: @gl-sidebar-small-threshold) { width: 210px; } left: -30px; width: 250px; position: fixed; height: 100%; ul { position: absolute; top: 0; bottom: 45px; padding: 0; margin: 0; list-style: none; text-indent: 20px; overflow-x: hidden; overflow-y: hidden; li a { color: #fff; display: block; float: left; text-decoration: none; width: 250px; @media (max-width: @gl-sidebar-small-threshold) { width: 210px; } } li.sidebar-main { height: 51px; a { font-size: 18px; line-height: 50px; } .menu-icon { float: right; font-size: 18px; padding-right: 28px; line-height: 50px; @media (max-width: @gl-sidebar-small-threshold) { padding-right: 8px; } } } li.sidebar-title { font-size: 12px; height: 35px; line-height: 40px; text-transform: uppercase; } li.sidebar-list { height: 40px; a { text-indent: 25px; font-size: 15px; color: #b2bfdc; line-height: 40px; .fa { margin-left: -34px; } } a:hover { color: #fff; border-left: 3px solid #e99d1a; text-indent: 22px; } .menu-icon { float: right; padding-right: 29px; line-height: 40px; width: 70px; @media (max-width: @gl-sidebar-small-threshold) { width: 50px; } } a:hover .menu-icon { text-indent: 25px; } &.subitem { transition: height @gl-sidebar-transition-time ease-out 0s; a { padding-left:20px; transition: line-height @gl-sidebar-transition-time ease-out 0s, color @gl-sidebar-transition-time ease-out 0s, background @gl-sidebar-transition-time ease-out 0s;; } &:not(.active) { height: 0px; a { line-height: 0px; color: rgba(255, 0, 0, 0.0); } } } } } } /* footer */ .sidebar-footer { position: absolute; height: 40px; bottom: 0; width: 100%; padding: 0; margin: 0; text-align: center; div a { color: #b2bfdc; font-size: 12px; line-height: 43px; } div a:hover { color: #ffffff; text-decoration: none; } } /* content */ > .content { position: relative; height: 100%; transition: margin-left @gl-sidebar-transition-time ease-out 0s; padding-left: 0px; margin-left: 0px; right:0px; overflow-y: visible; .container { width: 100% } .container-fluid { width: 100% } } overflow-y: visible; } buildbot-3.4.0/www/guanlecoja-ui/src/module/page_with_sidebar/page_with_sidebar.spec.js000066400000000000000000000067261413250514000313420ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('page with sidebar', function() { let rootScope, scope; beforeEach(angular.mock.module("guanlecoja.ui")); let elmBody = (scope = (rootScope = null)); const injected = function($rootScope, $compile, glMenuService) { rootScope = $rootScope; elmBody = angular.element( '' ); const groups = [{ name: 'g1', items: [{name:'i1', 'sref': ".."}] } , {name: 'g2'} ]; glMenuService.getGroups = () => groups; glMenuService.getDefaultGroup = () => groups[1]; scope = $rootScope; $compile(elmBody)(scope); return scope.$digest(); }; describe('default window', function() { beforeEach(inject(injected)); it('should load', function() { // simple test to make sure the directive loads expect(elmBody).toBeDefined(); // if there is an ul, the sidebar has been created expect(elmBody.find("ul").length).toBeGreaterThan(0); }); it('should toggle groups', function() { expect(elmBody).toBeDefined(); const g = scope.page.groups[1]; expect(scope.page.activeGroup).toBe(g); scope.page.toggleGroup(g); expect(scope.page.activeGroup).toBe(null); scope.page.toggleGroup(g); expect(scope.page.activeGroup).toBe(g); }); }); [ [750, "false", "small window, stored sidebarPinned==false"], [750, "true", "small window, stored sidebarPinned==true"], [750, undefined, "small window, stored sidebarPinned==undefined"], [850, "false", "large window, stored sidebarPinned==false"], [850, "true", "large window, stored sidebarPinned==true"], [850, undefined, "large window, stored sidebarPinned==undefined"], ].forEach(([innerWindowWidth, storedSidebarPinned, description]) => { describe(description, function() { beforeEach(function() { mockWindow = { innerWidth: innerWindowWidth, localStorage: { sidebarPinned: storedSidebarPinned }, document: new Document() }; angular.mock.module(function($provide){ $provide.value('$window', mockWindow); }); }); beforeEach(inject(injected)); it('should pin sidebar', inject(function($timeout) { if (storedSidebarPinned === "true" || (storedSidebarPinned === undefined && innerWindowWidth > 800)) { expect(scope.page.sidebarPinned).toBe(true); expect(scope.page.sidebarActive).toBe(true); } else { expect(scope.page.sidebarPinned).toBe(false); expect(scope.page.sidebarActive).toBe(false); } scope.page.sidebarPinned = false; scope.page.leaveSidebar(); $timeout.flush(); expect(scope.page.sidebarActive).toBe(false); scope.page.sidebarPinned = false; })); }); }); }); buildbot-3.4.0/www/guanlecoja-ui/src/module/page_with_sidebar/page_with_sidebar.tpl.jade000066400000000000000000000040671413250514000314720ustar00rootroot00000000000000.gl-page-with-sidebar(ng-class="{'active': page.sidebarActive, 'pinned': page.sidebarPinned}") .sidebar.sidebar-blue(ng-mouseenter='page.enterSidebar()',ng-mouseleave='page.leaveSidebar()',ng-click='page.sidebarActive=true') ul li.sidebar-main a(href='javascript:') | {{page.appTitle}} span.menu-icon.fa.fa-bars(ng-hide="page.sidebarActive", ng-click='page.sidebarActive=!page.sidebarActive') span.menu-icon.fa.fa-thumb-tack(ng-show="page.sidebarActive", ng-click="page.toggleSidebarPinned()", ng-class="{'fa-45': !page.sidebarPinned}") li.sidebar-title span NAVIGATION div(ng-repeat="group in page.groups") div(ng-if="group.items.length > 0") li.sidebar-list a(ng-click='page.toggleGroup(group)') i.fa.fa-angle-right |  {{group.caption}} span.menu-icon.fa(ng-class="'fa-' + group.icon") li.sidebar-list.subitem(ng-class="{'active': page.activeGroup==group}", ng-repeat="item in group.items") a(ui-sref='{{item.sref}}', ng-click='page.hideSidebar()') | {{item.caption}} div(ng-if="group.items.length == 0") div(ng-if="group.separator") li.sidebar-title span {{group.caption}} div(ng-if="!group.separator") li.sidebar-separator(ng-if="!$first") li.sidebar-list a(ui-sref='{{group.sref}}', ng-click='page.toggleGroup(group)') | {{group.caption}} span.menu-icon.fa(ng-class="'fa-' + group.icon") .sidebar-footer .col-xs-4(ng-repeat="item in page.footer") a(ng-href='{{item.href}}') | {{item.caption}} .content div(ng-transclude) buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar-contextual-actions/000077500000000000000000000000001413250514000260515ustar00rootroot00000000000000topbar-contextual-actions.directive.js000066400000000000000000000031351413250514000354200ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar-contextual-actions/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class GlTopbarContextualActions { constructor() { return { replace: true, restrict: 'E', scope: true, template: require('./topbar-contextual-actions.tpl.jade'), controller: "_glTopbarContextualActionsController" }; } } class _glTopbarContextualActions { constructor($scope, $sce) { $scope.$on("$stateChangeStart", (ev, state) => $scope.actions = []); $scope.$on("glSetContextualActions", function(e, data) { for (let item of Array.from(data)) { if (item.extra_class == null) { item.extra_class = ""; } } return $scope.actions = data; }); } } // a simple service to abstract TopbarContextualActions configuration class glTopbarContextualActions { constructor($rootScope) { this.$rootScope = $rootScope; ({}); } setContextualActions(actions) { this.$rootScope.$broadcast("glSetContextualActions", actions); } } angular.module('guanlecoja.ui') .directive('glTopbarContextualActions', [GlTopbarContextualActions]) .controller('_glTopbarContextualActionsController', ['$scope', '$sce', _glTopbarContextualActions]) .service('glTopbarContextualActionsService', ['$rootScope', glTopbarContextualActions]); buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar-contextual-actions/topbar-contextual-actions.less000066400000000000000000000000001413250514000340400ustar00rootroot00000000000000topbar-contextual-actions.spec.js000066400000000000000000000032461413250514000343770ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar-contextual-actions/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('topbar-contextual-actions', function() { let scope; beforeEach(angular.mock.module("guanlecoja.ui")); let elmBody = (scope = null); const injected = function($rootScope, $compile) { elmBody = angular.element( '' ); scope = $rootScope.$new(); $compile(elmBody)(scope); return scope.$digest(); }; beforeEach((inject(injected))); // simple test to make sure the directive loads it('should load', function() { expect(elmBody).toBeDefined(); // should be empty at init expect(elmBody.find("li").length).toEqual(0); }); // create the buttons via API it('should create buttons', inject(function(glTopbarContextualActionsService) { expect(elmBody).toBeDefined(); let called = 0; glTopbarContextualActionsService.setContextualActions([{ caption:"foo", action() { return called++; } } , { caption:"bar", action() { return called++; } } ]); scope.$digest(); expect(elmBody.find(".form-group").length).toEqual(2); expect(elmBody.find("button").text()).toEqual("foobar"); // make sure action is called on click elmBody.find("button").each(function() { return $(this).click(); }); scope.$digest(); expect(called).toEqual(2); }) ); }); topbar-contextual-actions.tpl.jade000066400000000000000000000005261413250514000345310ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar-contextual-actionsform.navbar-form.navbar-left .form-group(ng-repeat="a in actions") button.btn.btn-default(type="button" ng-class="a.extra_class" ng-click="a.action()", title="{{a.help}}") i.fa(ng-if="a.icon", ng-class="'fa-' + a.icon") span(ng-if="a.icon&&a.caption")   | {{::a.caption}} |   buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar/000077500000000000000000000000001413250514000222275ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar/topbar.directive.js000066400000000000000000000031721413250514000260340ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class GlTopbar { constructor() { return { replace: true, transclude: true, restrict: 'E', scope: false, controllerAs: "page", template: require('./topbar.tpl.jade'), controller: "_glTopbarController" }; } } class _glTopbar { constructor($scope, glMenuService, $location) { let groups = glMenuService.getGroups(); groups = _.zipObject(_.map(groups, g => g.name), groups); $scope.appTitle = glMenuService.getAppTitle(); $scope.$on("$stateChangeStart", function(ev, state) { $scope.breadcrumb = []; if ((state.data != null ? state.data.group : undefined) && ((state.data != null ? state.data.caption : undefined) !== groups[state.data.group].caption)) { $scope.breadcrumb.push({ caption: groups[state.data.group].caption}); } return $scope.breadcrumb.push({ caption: (state.data != null ? state.data.caption : undefined) || _.capitalize(state.name), href: `#${$location.hash()}` }); }); $scope.$on("glBreadcrumb", (e, data) => $scope.breadcrumb = data); } } angular.module('guanlecoja.ui') .directive('glTopbar', [GlTopbar]) .controller('_glTopbarController', ['$scope', 'glMenuService', '$location', _glTopbar]); buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar/topbar.less000066400000000000000000000037651413250514000244210ustar00rootroot00000000000000@gl-sidebar-small-threshold: 600px; .fa.fa-ringing { -webkit-animation-name: 'ring_animation'; -webkit-animation-duration: 2000ms; -webkit-transform-origin:70% 70%; -webkit-animation-iteration-count: infinite; -webkit-animation-timing-function: linear; animation-name: 'ring_animation'; animation-duration: 2000ms; transform-origin:50% 50%; animation-iteration-count: infinite; animation-timing-function: linear; color:red; } /* ring animation for notifications */ @-webkit-keyframes ring_animation { 0% { -webkit-transform: rotate(10deg); } 5% { -webkit-transform: rotate(-10deg); } 10% { -webkit-transform: rotate(10deg); } 15% { -webkit-transform: rotate(-10deg); } 20% { -webkit-transform: rotate(10deg); } 25% { -webkit-transform: rotate(-10deg); } 30% { -webkit-transform: rotate(10deg); } 35% { -webkit-transform: rotate(-10deg); } 40% { -webkit-transform: rotate(10deg); } 45% { -webkit-transform: rotate(-10deg); } 50% { -webkit-transform: rotate(0deg); } 100% { } } @keyframes ring_animation { 0% { transform: rotate(10deg); } 5% { transform: rotate(-10deg); } 10% { transform: rotate(10deg); } 15% { transform: rotate(-10deg); } 20% { transform: rotate(10deg); } 25% { transform: rotate(-10deg); } 30% { transform: rotate(10deg); } 35% { transform: rotate(-10deg); } 40% { transform: rotate(10deg); } 45% { transform: rotate(-10deg); } 50% { transform: rotate(0deg); } 100% { } } .navbar-nav > li { float: left; } .navbar .breadcrumb { margin-top: 6px; font-size: 16px; display: inline-block; } .navbar .navbar-brand { padding-top: 14px; font-size: 21px; } @media (max-width: @gl-sidebar-small-threshold) { .navbar .navbar-brand { display: none; } .navbar .breadcrumb { font-size: 14px; padding-left: 0px; } } img.avatar { background-color: #ccc; border-radius: 50%; height: 40px; margin: -10px; width: 40px; } buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar/topbar.spec.js000066400000000000000000000026001413250514000250030ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('topbar', function() { let scope; beforeEach(angular.mock.module("guanlecoja.ui")); let elmBody = (scope = null); const injected = function($rootScope, $compile) { elmBody = angular.element( '' ); scope = $rootScope; $compile(elmBody)(scope); return scope.$digest(); }; beforeEach((inject(injected))); // simple test to make sure the directive loads it('should load', function() { expect(elmBody).toBeDefined(); // if there is an ul, the sidebar has been created expect(elmBody.find("ul").length).toBeGreaterThan(0); }); // simple test to make sure the directive loads it('should update breadcrumb upon messages', inject(function($location){ $location.hash = () => "bar/"; scope.$broadcast("$stateChangeStart", {name: "foo"}); expect(scope.breadcrumb).toEqual([ { caption : 'Foo', href : '#bar/' } ]); scope.$broadcast("glBreadcrumb", [{ caption: "bar", sref: "foo" } ]); expect(scope.breadcrumb).toEqual([ { caption : 'bar', sref : 'foo' } ]); }) ); }); buildbot-3.4.0/www/guanlecoja-ui/src/module/topbar/topbar.tpl.jade000066400000000000000000000015201413250514000251370ustar00rootroot00000000000000nav.navbar.navbar-default .container-fluid .navbar-header button.navbar-toggle.collapsed( type="button", ng-click="collapse=!collapse", ng-init="collapse=1" aria-expanded="false") span.sr-only Toggle navigation span.icon-bar span.icon-bar span.icon-bar a.navbar-brand {{appTitle}} ol.breadcrumb li(ng-repeat="b in breadcrumb") a(ng-if="b.sref", ui-sref="{{b.sref}}") {{b.caption}} a(ng-if="b.href", ng-href="{{b.href}}") {{b.caption}} span(ng-if="b.href == undefined && b.sref == undefined", ng-href="{{b.href}}") {{b.caption}} .navbar-collapse.collapse.pull-right(ng-class='{"in": !collapse}') ul.nav.navbar-nav(ng-transclude) buildbot-3.4.0/www/guanlecoja-ui/src/styles/000077500000000000000000000000001413250514000207765ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/src/styles/styles.less000066400000000000000000000005571413250514000232200ustar00rootroot00000000000000/* Base */ @import "../module/**/*.less"; html { overflow-y: scroll; } .row { margin-left: 0; margin-right: 0; } .row > div { margin-bottom: 15px; } .noselect { -webkit-touch-callout: none; -webkit-user-select: none; -khtml-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; } buildbot-3.4.0/www/guanlecoja-ui/src/tests.webpack.js000066400000000000000000000004421413250514000225660ustar00rootroot00000000000000// This file is an entry point for angular tests // Avoids some weird issues when using webpack + angular. import 'angular'; import 'angular-mocks/angular-mocks'; import './module/main.module.js' const context = require.context('./', true, /\.spec.js$/); context.keys().forEach(context); buildbot-3.4.0/www/guanlecoja-ui/test/000077500000000000000000000000001413250514000176435ustar00rootroot00000000000000buildbot-3.4.0/www/guanlecoja-ui/test/main.js000066400000000000000000000113221413250514000211240ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ if (typeof __karma__ === 'undefined' || __karma__ === null) { window.describe = function() {}; } // define sample application logic const m = angular.module("app", ["guanlecoja.ui", "ngSanitize"]); const README = "https://github.com/tardyp/guanlecoja-ui/blob/master/Readme.md"; m.config(function($stateProvider, glMenuServiceProvider, $urlRouterProvider) { let group; $urlRouterProvider.otherwise('/bugcab'); const groups = []; for (let i of ["cab", "camera", "bug", "calendar", "ban", "archive", "edit"]) { group = { name: i, items: [] }; for (let j of ["cab", "camera", "bug", "calendar", "ban", "archive", "edit"]) { group.items.push({ name: i + j}); if (i === "bug") { break; } } groups.push(group); glMenuServiceProvider.addGroup({ name: group.name, caption: _.capitalize(group.name), icon: group.name, order: group.name.length }); } glMenuServiceProvider.setFooter([{ caption: "Github", href: "https://github.com/tardyp/guanlecoja-ui" } , { caption: "Help", href: README } , { caption: "About", href: README } ]); glMenuServiceProvider.setAppTitle("Guanlecoja-UI"); return (() => { const result = []; for (group of Array.from(groups)) { result.push((() => { const result1 = []; for (let item of Array.from(group.items)) { const state = { controller: "dummyController", template: `
\
`, name: item.name, url: `/${item.name}`, data: { group: group.name, caption: _.capitalize(item.name) } }; result1.push($stateProvider.state(state)); } return result1; })()); } return result; })(); }); m.controller("dummyController", function($scope, $state, glBreadcrumbService, glNotificationService, glTopbarContextualActionsService) { // You can set different actions given the route glTopbarContextualActionsService.setContextualActions([{ caption: "Download Doc", icon: "download", action() { return document.location = 'Readme.md'; } } , { caption: "View on Github", icon: "github", help: "Go to the github page of guanleoja-ui", action() { return document.location = README; } } , { icon: "google-plus", action() { return document.location = "https://plus.google.com"; } } ]); $scope.stateName = $state.current.name; glNotificationService.notify({msg:`You just transitioned to ${$scope.stateName}!`}, {title:"State transitions", group:"state"}); glBreadcrumbService.setBreadcrumb([ {caption: _.capitalize($state.current.data.group)} , { caption: _.capitalize($state.current.name), sref: $state.current.name } ]); }); // // angular-markdown-directive v0.3.0 // (c) 2013-2014 Brian Ford http://briantford.com // License: MIT m.provider("markdownConverter", function() { let opts = {}; return { config(newOpts) { opts = newOpts; }, $get() { return new Showdown.converter(opts); } }; }).directive("btfMarkdown", ($sanitize, markdownConverter) => ({ restrict: "AE", link(scope, element, attrs) { if (attrs.btfMarkdown) { scope.$watch(attrs.btfMarkdown, function(newVal) { const html = (newVal ? $sanitize(markdownConverter.makeHtml(newVal)) : ""); element.html(html); }); } else { const html = $sanitize(markdownConverter.makeHtml(element.text())); element.html(html); } } }) ); buildbot-3.4.0/www/guanlecoja-ui/webpack.config.js000066400000000000000000000014401413250514000221010ustar00rootroot00000000000000'use strict'; const common = require('buildbot-build-common'); const env = require('yargs').argv.env; const pkg = require('./package.json'); var event = process.env.npm_lifecycle_event; var isTest = event === 'test' || event === 'test-watch'; var isProd = env === 'prod'; module.exports = function() { var basename = isProd ? pkg.name + '.min' : pkg.name; return common.createTemplateWebpackConfig({ entry: { [basename]: './src/module/main.module.js', styles: './src/styles/styles.less', }, libraryName: pkg.name, pluginName: pkg.plugin_name, dirname: __dirname, isTest: isTest, isProd: isProd, outputPath: __dirname + '/dist', extractStyles: true, provideJquery: true, }); }(); buildbot-3.4.0/www/guanlecoja-ui/yarn.lock000066400000000000000000011433521413250514000205200ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.7.4.tgz#37e864532200cb6b50ee9a4045f5f817840166ab" integrity sha512-+bYbx56j4nYBmpsWtnPUsKW3NdnYxbqyfrP2w9wILBuHzdfIKz9prieZK0DFPyIzkjYVUe4QkusGL07r5pXznQ== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helpers" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" convert-source-map "^1.7.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369" integrity sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg== dependencies: "@babel/types" "^7.7.4" jsesc "^2.5.1" lodash "^4.17.13" source-map "^0.5.0" "@babel/helper-annotate-as-pure@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz#bb3faf1e74b74bd547e867e48f551fa6b098b6ce" integrity sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og== dependencies: "@babel/types" "^7.7.4" "@babel/helper-builder-binary-assignment-operator-visitor@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz#5f73f2b28580e224b5b9bd03146a4015d6217f5f" integrity sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ== dependencies: "@babel/helper-explode-assignable-expression" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-call-delegate@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz#621b83e596722b50c0066f9dc37d3232e461b801" integrity sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-create-regexp-features-plugin@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz#6d5762359fd34f4da1500e4cff9955b5299aaf59" integrity sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A== dependencies: "@babel/helper-regex" "^7.4.4" regexpu-core "^4.6.0" "@babel/helper-define-map@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176" integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-explode-assignable-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz#fa700878e008d85dc51ba43e9fb835cddfe05c84" integrity sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg== dependencies: "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e" integrity sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ== dependencies: "@babel/helper-get-function-arity" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-get-function-arity@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0" integrity sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA== dependencies: "@babel/types" "^7.7.4" "@babel/helper-hoist-variables@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz#612384e3d823fdfaaf9fce31550fe5d4db0f3d12" integrity sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-member-expression-to-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz#356438e2569df7321a8326644d4b790d2122cb74" integrity sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-imports@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz#e5a92529f8888bf319a6376abfbd1cebc491ad91" integrity sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-transforms@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.7.4.tgz#8d7cdb1e1f8ea3d8c38b067345924ac4f8e0879a" integrity sha512-ehGBu4mXrhs0FxAqN8tWkzF8GSIGAiEumu4ONZ/hD9M88uHcD+Yu2ttKfOCgwzoesJOJrtQh7trI5YPbRtMmnA== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-simple-access" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-optimise-call-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz#034af31370d2995242aa4df402c3b7794b2dcdf2" integrity sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg== dependencies: "@babel/types" "^7.7.4" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz#c68c2407350d9af0e061ed6726afb4fff16d0234" integrity sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-wrap-function" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-replace-supers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz#3c881a6a6a7571275a72d82e6107126ec9e2cdd2" integrity sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg== dependencies: "@babel/helper-member-expression-to-functions" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-simple-access@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz#a169a0adb1b5f418cfc19f22586b2ebf58a9a294" integrity sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A== dependencies: "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-split-export-declaration@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8" integrity sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug== dependencies: "@babel/types" "^7.7.4" "@babel/helper-validator-identifier@^7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== "@babel/helper-wrap-function@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace" integrity sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helpers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.7.4.tgz#62c215b9e6c712dadc15a9a0dcab76c92a940302" integrity sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg== dependencies: "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/highlight@^7.0.0": version "7.5.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.6.0", "@babel/parser@^7.9.6": version "7.13.9" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.13.9.tgz#ca34cb95e1c2dd126863a84465ae8ef66114be99" integrity sha512-nEUfRiARCcaVo3ny3ZQjURjHQZUo/JkEw7rLlSZy/psWGnvwXFtPcr6jb7Yb41DVW5LTe6KRq9LGleRNsg1Frw== "@babel/parser@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb" integrity sha512-jIwvLO0zCL+O/LmEJQjWA75MQTWwx3c3u2JOTDK5D3/9egrWRRA0/0hk9XXywYnXZVVpzrBYeIQTmhwUaePI9g== "@babel/plugin-proposal-async-generator-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d" integrity sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-proposal-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz#dde64a7f127691758cbfed6cf70de0fa5879d52d" integrity sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz#7700a6bfda771d8dc81973249eac416c6b4c697d" integrity sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.4.tgz#cc57849894a5c774214178c8ab64f6334ec8af71" integrity sha512-rnpnZR3/iWKmiQyJ3LKJpSwLDcX/nSXhdLk4Aq/tXOApIvyu7qoabrige0ylsAJffaUC51WiBu209Q0U+86OWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz#ec21e8aeb09ec6711bc0a39ca49520abee1de379" integrity sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.4.tgz#7c239ccaf09470dbe1d453d50057460e84517ebb" integrity sha512-cHgqHgYvffluZk85dJ02vloErm3Y6xtH+2noOBOJ2kXOJH3aVCDnj5eR/lVNlTnYu4hndAPJD3rTFjW3qee0PA== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-async-generators@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz#331aaf310a10c80c44a66b238b6e49132bd3c889" integrity sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.2.0", "@babel/plugin-syntax-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz#29ca3b4415abfe4a5ec381e903862ad1a54c3aec" integrity sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz#86e63f7d2e22f9e27129ac4e83ea989a382e86cc" integrity sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz#47cf220d19d6d0d7b154304701f468fc1cc6ff46" integrity sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz#a3e38f59f4b6233867b4a92dcb0ee05b2c334aa6" integrity sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-top-level-await@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz#bd7d8fa7b9fee793a36e4027fd6dd1aa32f946da" integrity sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz#76309bd578addd8aee3b379d809c802305a98a12" integrity sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz#694cbeae6d613a34ef0292713fa42fb45c4470ba" integrity sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz#d0d9d5c269c78eaea76227ace214b8d01e4d837b" integrity sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz#200aad0dcd6bb80372f94d9e628ea062c58bf224" integrity sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.13" "@babel/plugin-transform-classes@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz#c92c14be0a1399e15df72667067a8f510c9400ec" integrity sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-define-map" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz#e856c1628d3238ffe12d668eb42559f79a81910d" integrity sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz#2b713729e5054a1135097b6a67da1b6fe8789267" integrity sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.4.tgz#f7ccda61118c5b7a2599a72d5e3210884a021e96" integrity sha512-mk0cH1zyMa/XHeb6LOTXTbG7uIJ8Rrjlzu91pUx/KS3JpcgaTDwMS8kM+ar8SLOvlL2Lofi4CGBAjCo3a2x+lw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-duplicate-keys@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz#3d21731a42e3f598a73835299dd0169c3b90ac91" integrity sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz#dd30c0191e3a1ba19bcc7e389bdfddc0729d5db9" integrity sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz#248800e3a5e507b1f103d8b4ca998e77c63932bc" integrity sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz#75a6d3303d50db638ff8b5385d12451c865025b1" integrity sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz#27fe87d2b5017a2a5a34d1c41a6b9f6a6262643e" integrity sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz#aee127f2f3339fc34ce5e3055d7ffbf7aa26f19a" integrity sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.4.tgz#276b3845ca2b228f2995e453adc2e6f54d72fb71" integrity sha512-/542/5LNA18YDtg1F+QHvvUSlxdvjZoD/aldQwkq+E3WCkbEjNSN9zdrOXaSlfg3IfGi22ijzecklF/A7kVZFQ== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-commonjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.4.tgz#bee4386e550446343dd52a571eda47851ff857a3" integrity sha512-k8iVS7Jhc367IcNF53KCwIXtKAH7czev866ThsTgy8CwlXjnKZna2VHwChglzLleYrcHz1eQEIJlGRQxB53nqA== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.7.4" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-systemjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz#cd98152339d3e763dfe838b7d4273edaf520bb30" integrity sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-umd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz#1027c355a118de0aae9fee00ad7813c584d9061f" integrity sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz#fb3bcc4ee4198e7385805007373d6b6f42c98220" integrity sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/plugin-transform-new-target@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz#4a0753d2d60639437be07b592a9e58ee00720167" integrity sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz#48488937a2d586c0148451bf51af9d7dda567262" integrity sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/plugin-transform-parameters@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.4.tgz#da4555c97f39b51ac089d31c7380f03bca4075ce" integrity sha512-VJwhVePWPa0DqE9vcfptaJSzNDKrWU/4FbYCjZERtmqEs05g3UMXnYMZoXja7JAJ7Y7sPZipwm/pGApZt7wHlw== dependencies: "@babel/helper-call-delegate" "^7.7.4" "@babel/helper-get-function-arity" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz#2388d6505ef89b266103f450f9167e6bd73f98c2" integrity sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.4.tgz#d18eac0312a70152d7d914cbed2dc3999601cfc0" integrity sha512-e7MWl5UJvmPEwFJTwkBlPmqixCtr9yAASBqff4ggXTNicZiwbF8Eefzm6NVgfiBp7JdAGItecnctKTgH44q2Jw== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz#6a7cf123ad175bb5c69aec8f6f0770387ed3f1eb" integrity sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.4.tgz#51fe458c1c1fa98a8b07934f4ed38b6cd62177a6" integrity sha512-O8kSkS5fP74Ad/8pfsCMGa8sBRdLxYoSReaARRNSz3FbFQj3z/QUvoUmJ28gn9BO93YfnXc3j+Xyaqe8cKDNBQ== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz#74a0a9b2f6d67a684c6fbfd5f0458eb7ba99891e" integrity sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz#aa673b356fe6b7e70d69b6e33a17fef641008578" integrity sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz#ffb68c05090c30732076b1285dc1401b404a123c" integrity sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz#1eb6411736dd3fe87dbd20cc6668e5121c17d604" integrity sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz#3174626214f2d6de322882e498a38e8371b2140e" integrity sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz#a3c0f65b117c4c81c5b6484f2a5e7b95346b83ae" integrity sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/preset-env@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.7.4.tgz#ccaf309ae8d1ee2409c85a4e2b5e280ceee830f8" integrity sha512-Dg+ciGJjwvC1NIe/DGblMbcGq1HOtKbw8RLl4nIjlfcILKEOkWT/vRqPpumswABEBVudii6dnVwrBtzD7ibm4g== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.7.4" "@babel/plugin-proposal-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-syntax-top-level-await" "^7.7.4" "@babel/plugin-transform-arrow-functions" "^7.7.4" "@babel/plugin-transform-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions" "^7.7.4" "@babel/plugin-transform-block-scoping" "^7.7.4" "@babel/plugin-transform-classes" "^7.7.4" "@babel/plugin-transform-computed-properties" "^7.7.4" "@babel/plugin-transform-destructuring" "^7.7.4" "@babel/plugin-transform-dotall-regex" "^7.7.4" "@babel/plugin-transform-duplicate-keys" "^7.7.4" "@babel/plugin-transform-exponentiation-operator" "^7.7.4" "@babel/plugin-transform-for-of" "^7.7.4" "@babel/plugin-transform-function-name" "^7.7.4" "@babel/plugin-transform-literals" "^7.7.4" "@babel/plugin-transform-member-expression-literals" "^7.7.4" "@babel/plugin-transform-modules-amd" "^7.7.4" "@babel/plugin-transform-modules-commonjs" "^7.7.4" "@babel/plugin-transform-modules-systemjs" "^7.7.4" "@babel/plugin-transform-modules-umd" "^7.7.4" "@babel/plugin-transform-named-capturing-groups-regex" "^7.7.4" "@babel/plugin-transform-new-target" "^7.7.4" "@babel/plugin-transform-object-super" "^7.7.4" "@babel/plugin-transform-parameters" "^7.7.4" "@babel/plugin-transform-property-literals" "^7.7.4" "@babel/plugin-transform-regenerator" "^7.7.4" "@babel/plugin-transform-reserved-words" "^7.7.4" "@babel/plugin-transform-shorthand-properties" "^7.7.4" "@babel/plugin-transform-spread" "^7.7.4" "@babel/plugin-transform-sticky-regex" "^7.7.4" "@babel/plugin-transform-template-literals" "^7.7.4" "@babel/plugin-transform-typeof-symbol" "^7.7.4" "@babel/plugin-transform-unicode-regex" "^7.7.4" "@babel/types" "^7.7.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.4.tgz#b23a856751e4bf099262f867767889c0e3fe175b" integrity sha512-r24eVUUr0QqNZa+qrImUk8fn5SPhHq+IfYvIoIMg0do3GdK9sMdiLKP3GYVVaxpPKORgm8KRKaNTEhAjgIpLMw== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b" integrity sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" "@babel/traverse@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.7.4.tgz#9c1e7c60fb679fe4fcfaa42500833333c2058558" integrity sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.13" "@babel/types@^7.6.1", "@babel/types@^7.9.6": version "7.13.0" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.13.0.tgz#74424d2816f0171b4100f0ab34e9a374efdf7f80" integrity sha512-hE+HE8rnG1Z6Wzo+MhaKE5lM5eMx71T4EHJgku2E3xIfaULhDcxiiRxUYgwX8qwP1BBSlag+TdGOt6JAidIZTA== dependencies: "@babel/helper-validator-identifier" "^7.12.11" lodash "^4.17.19" to-fast-properties "^2.0.0" "@babel/types@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193" integrity sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA== dependencies: esutils "^2.0.2" lodash "^4.17.13" to-fast-properties "^2.0.0" "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== "@types/glob@^7.1.1": version "7.1.1" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== dependencies: "@types/events" "*" "@types/minimatch" "*" "@types/node" "*" "@types/minimatch@*": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/node@*": version "12.12.12" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.12.tgz#529bc3e73dbb35dd9e90b0a1c83606a9d3264bdb" integrity sha512-MGuvYJrPU0HUwqF7LqvIj50RZUX23Z+m583KBygKYUZLlZ88n6w28XRNJRJgsHukLEnLz6w6SvxZoLgbr5wLqQ== "@uirouter/angularjs@^1.0.15": version "1.0.23" resolved "https://registry.yarnpkg.com/@uirouter/angularjs/-/angularjs-1.0.23.tgz#aeec0f96b0c42187c5044ef244ba6ccb75a5d835" integrity sha512-r4hLSw7R3mwXGC5Sq7yxNlBK1sSzQUm/1MzigwwYRHoMO5uKcBPUhxFYx5U7kufP2Xl1165KeZvRsLCh0/Z1ng== dependencies: "@uirouter/core" "6.0.1" "@uirouter/core@6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/@uirouter/core/-/core-6.0.1.tgz#93b02a5d178a7ab7313f34b7b3f019a000d23396" integrity sha512-mHCutiHtDDRKYmrJ92XPKDoSb2bgqaDyUpHdF4hUE+riwgkCvGdBjL8u+VDTcV3slBAk6B0LBIOIajjWkkObbQ== "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== dependencies: "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@webassemblyjs/floating-point-hex-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== "@webassemblyjs/helper-api-error@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== "@webassemblyjs/helper-buffer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== "@webassemblyjs/helper-code-frame@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== dependencies: "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/helper-fsm@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== "@webassemblyjs/helper-module-context@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== dependencies: "@webassemblyjs/ast" "1.8.5" mamacro "^0.0.3" "@webassemblyjs/helper-wasm-bytecode@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== "@webassemblyjs/helper-wasm-section@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/ieee754@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== "@webassemblyjs/wasm-edit@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/helper-wasm-section" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-opt" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/wasm-gen@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wasm-opt@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wasm-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wast-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/floating-point-hex-parser" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-code-frame" "1.8.5" "@webassemblyjs/helper-fsm" "1.8.5" "@xtuc/long" "4.2.2" "@webassemblyjs/wast-printer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== dependencies: mime-types "~2.1.24" negotiator "0.6.2" acorn@^6.2.1: version "6.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.3.0.tgz#0087509119ffa4fc0a0041d1e93a417e68cb856e" integrity sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA== acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: version "3.4.1" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== ajv@^5.0.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5: version "6.10.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= angular-animate@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-animate/-/angular-animate-1.7.9.tgz#a397f82434c1e7ed5b7a298fa70fc3de989a6785" integrity sha512-fV+AISy/HTzurQH2ngsJg+lLIvfu0ahc1h4AYKauaXVw97rZc2k4iUA1bMstiEyClsdayQX568kjQc1NK+oYhw== angular-mocks@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-mocks/-/angular-mocks-1.7.9.tgz#0a3b7e28b9a493b4e3010ed2b0f69a68e9b4f79b" integrity sha512-LQRqqiV3sZ7NTHBnNmLT0bXtE5e81t97+hkJ56oU0k3dqKv1s6F+nBWRlOVzqHWPGFOiPS8ZJVdrS8DFzHyNIA== angular-ui-bootstrap@^2.5.6: version "2.5.6" resolved "https://registry.yarnpkg.com/angular-ui-bootstrap/-/angular-ui-bootstrap-2.5.6.tgz#23937322ec641a6fbee16498cc32452aa199e7c5" integrity sha512-yzcHpPMLQl0232nDzm5P4iAFTFQ9dMw0QgFLuKYbDj9M0xJ62z0oudYD/Lvh1pWfRsukiytP4Xj6BHOSrSXP8A== angular@^1.7.9: version "1.8.0" resolved "https://registry.yarnpkg.com/angular/-/angular-1.8.0.tgz#b1ec179887869215cab6dfd0df2e42caa65b1b51" integrity sha512-VdaMx+Qk0Skla7B5gw77a8hzlcOakwF8mjlW13DpIWIDlfqwAbSSLfd8N/qZnzEmQF4jC4iofInd3gE7vL8ZZg== ansi-colors@^3.0.0: version "3.2.4" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-html@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.0, ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" normalize-path "^2.1.1" anymatch@~3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" aproba@^1.0.3, aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-flatten@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-never@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/assert-never/-/assert-never-1.2.1.tgz#11f0e363bf146205fb08193b5c7b90f4d1cf44fe" integrity sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw== assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert@^1.1.1: version "1.5.0" resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== dependencies: object-assign "^4.1.1" util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= async-each@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== async@1.x: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@^2.0.0, async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== dependencies: lodash "^4.17.14" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.5.1: version "9.7.2" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.2.tgz#26cf729fbb709323b40171a874304884dcceffed" integrity sha512-LCAfcdej1182uVvPOZnytbq61AhnOZ/4JelDaJGDeNwewyU1AMaNthcHsyz1NRjTmd2FkurMckLWfkHg3Z//KA== dependencies: browserslist "^4.7.3" caniuse-lite "^1.0.30001010" chalk "^2.4.2" normalize-range "^0.1.2" num2fraction "^1.2.2" postcss "^7.0.23" postcss-value-parser "^4.0.2" aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" babel-generator@^6.18.0: version "6.26.1" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.17.4" source-map "^0.5.7" trim-right "^1.0.1" babel-loader@^8.0.5: version "8.0.6" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== dependencies: find-cache-dir "^2.0.0" loader-utils "^1.0.2" mkdirp "^0.5.1" pify "^4.0.1" babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-plugin-dynamic-import-node@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: object.assign "^4.1.0" babel-runtime@^6.0.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-template@^6.16.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: babel-runtime "^6.26.0" babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" lodash "^4.17.4" babel-traverse@^6.18.0, babel-traverse@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: babel-code-frame "^6.26.0" babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" debug "^2.6.8" globals "^9.18.0" invariant "^2.2.2" lodash "^4.17.4" babel-types@^6.18.0, babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babel-walk@3.0.0-canary-5: version "3.0.0-canary-5" resolved "https://registry.yarnpkg.com/babel-walk/-/babel-walk-3.0.0-canary-5.tgz#f66ecd7298357aee44955f235a6ef54219104b11" integrity sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw== dependencies: "@babel/types" "^7.9.6" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@^1.0.2: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary-extensions@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c" integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow== blob@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683" integrity sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig== bluebird@^3.3.0, bluebird@^3.5.5: version "3.7.1" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de" integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== body-parser@1.19.0, body-parser@^1.16.1: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= dependencies: array-flatten "^2.1.0" deep-equal "^1.0.1" dns-equal "^1.0.0" dns-txt "^2.0.2" multicast-dns "^6.0.1" multicast-dns-service-types "^1.1.0" boolbase@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" brorand@^1.0.1, brorand@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.3" inherits "^2.0.1" safe-buffer "^5.0.1" browserify-cipher@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" des.js "^1.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" browserify-rsa@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= dependencies: bn.js "^4.1.0" randombytes "^2.0.1" browserify-sign@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= dependencies: bn.js "^4.1.1" browserify-rsa "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.2" elliptic "^6.0.0" inherits "^2.0.1" parse-asn1 "^5.0.0" browserify-zlib@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" browserslist@^4.6.0, browserslist@^4.7.3: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== dependencies: caniuse-lite "^1.0.30001219" colorette "^1.2.2" electron-to-chromium "^1.3.723" escalade "^3.1.1" node-releases "^1.1.71" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer-indexof@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= buffer@^4.3.0: version "4.9.2" resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" isarray "^1.0.0" "buildbot-build-common@link:../build_common": version "1.0.0" dependencies: "@babel/core" "^7.4.3" "@babel/plugin-syntax-dynamic-import" "^7.2.0" "@babel/plugin-transform-runtime" "^7.4.3" "@babel/preset-env" "^7.4.3" "@babel/runtime" "^7.4.3" autoprefixer "^9.5.1" babel-loader "^8.0.5" css-loader "^2.1.1" file-loader "^3.0.1" html-webpack-plugin "^3.2.0" import-glob-loader "^1.1.0" istanbul-instrumenter-loader "^3.0.1" jasmine-core "^3.4.0" karma "^4.1.0" karma-chrome-launcher "^2.2.0" karma-coverage "^1.1.2" karma-jasmine "^2.0.1" karma-sourcemap-loader "^0.3.7" karma-spec-reporter "^0.0.32" karma-webpack "^3.0.5" less "^3.9.0" less-loader "^5.0.0" mini-css-extract-plugin "^0.6.0" node-libs-browser "^2.2.0" null-loader "^1.0.0" postcss-loader "^3.0.0" pug "^3.0.1" raw-loader "^2.0.0" style-loader "^0.23.1" webpack "^4.30.0" webpack-cli "^3.3.1" webpack-dev-server "^3.3.1" webpack-fix-style-only-entries "^0.2.1" webpack-shell-plugin "^0.5.0" builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cacache@^12.0.2: version "12.0.3" resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== dependencies: bluebird "^3.5.5" chownr "^1.1.1" figgy-pudding "^3.5.1" glob "^7.1.4" graceful-fs "^4.1.15" infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" ssri "^6.0.1" unique-filename "^1.1.1" y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" caller-callsite@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= dependencies: callsites "^2.0.0" caller-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= dependencies: caller-callsite "^2.0.0" callsite@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camel-case@3.0.x: version "3.0.0" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= dependencies: no-case "^2.2.0" upper-case "^1.1.1" camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0, camelcase@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30001010, caniuse-lite@^1.0.30001219: version "1.0.30001228" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz#bfdc5942cd3326fa51ee0b42fbef4da9d492a7fa" integrity sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" character-parser@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chokidar@^2.0.2, chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== dependencies: anymatch "^2.0.0" async-each "^1.0.1" braces "^2.3.2" glob-parent "^3.1.0" inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" normalize-path "^3.0.0" path-is-absolute "^1.0.0" readdirp "^2.2.1" upath "^1.1.1" optionalDependencies: fsevents "^1.2.7" chokidar@^3.0.0: version "3.3.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6" integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A== dependencies: anymatch "~3.1.1" braces "~3.0.2" glob-parent "~5.1.0" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" readdirp "~3.2.0" optionalDependencies: fsevents "~2.1.1" chownr@^1.1.1, chownr@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== chrome-trace-event@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== dependencies: tslib "^1.9.0" cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@4.2.x: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi "^2.0.0" cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== dependencies: string-width "^3.1.0" strip-ansi "^5.2.0" wrap-ansi "^5.1.0" clone@^2.1.1, clone@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colorette@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== colors@^1.1.0, colors@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@2.17.x: version "2.17.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== commander@^2.20.0: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@~2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= compressible@~2.0.16: version "2.0.17" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== dependencies: mime-db ">= 1.40.0 < 2" compression@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" compressible "~2.0.16" debug "2.6.9" on-headers "~1.0.2" safe-buffer "5.1.2" vary "~1.1.2" concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" inherits "^2.0.3" readable-stream "^2.2.2" typedarray "^0.0.6" connect-history-api-fallback@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-4.0.1.tgz#0def113fa0e4dc8de83331a5cf79c8b325213151" integrity sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw== dependencies: "@babel/parser" "^7.6.0" "@babel/types" "^7.6.1" constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= content-disposition@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== dependencies: safe-buffer "5.1.2" content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= cookie@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== dependencies: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js-compat@^3.1.1: version "3.4.2" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.4.2.tgz#652fa7c54652b7f6586a893e37001df55ea2ac37" integrity sha512-W0Aj+LM3EAxxjD0Kp2o4be8UlnxIZHNupBv2znqrheR4aY2nOn91794k/xoSp+SxqqriiZpTsSwBtZr60cbkwQ== dependencies: browserslist "^4.7.3" semver "^6.3.0" core-js@^2.4.0: version "2.6.10" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f" integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== dependencies: import-fresh "^2.0.0" is-directory "^0.3.1" js-yaml "^3.13.1" parse-json "^4.0.0" create-ecdh@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" inherits "^2.0.1" md5.js "^1.3.4" ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" ripemd160 "^2.0.0" safe-buffer "^5.0.1" sha.js "^2.4.8" cross-spawn@6.0.5, cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" semver "^5.5.0" shebang-command "^1.2.0" which "^1.2.9" crypto-browserify@^3.11.0: version "3.12.0" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" create-ecdh "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.0" diffie-hellman "^5.0.0" inherits "^2.0.1" pbkdf2 "^3.0.3" public-encrypt "^4.0.0" randombytes "^2.0.0" randomfill "^1.0.3" css-loader@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" integrity sha512-OcKJU/lt232vl1P9EEDamhoO9iKY3tIjY5GU+XDLblAykTdgs6Ux9P1hTHve8nFKy5KPpOXOsVI/hIwi3841+w== dependencies: camelcase "^5.2.0" icss-utils "^4.1.0" loader-utils "^1.2.3" normalize-path "^3.0.0" postcss "^7.0.14" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^2.0.6" postcss-modules-scope "^2.1.0" postcss-modules-values "^2.0.0" postcss-value-parser "^3.3.0" schema-utils "^1.0.0" css-select@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= dependencies: boolbase "~1.0.0" css-what "2.1" domutils "1.5.1" nth-check "~1.0.1" css-what@2.1: version "2.1.3" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" date-format@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-2.1.0.tgz#31d5b5ea211cf5fd764cd38baf9d033df7e125cf" integrity sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA== dateformat@^1.0.6: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.1.1, debug@^3.2.5, debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= deep-equal@^1.0.1: version "1.1.1" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== dependencies: is-arguments "^1.0.4" is-date-object "^1.0.1" is-regex "^1.0.4" object-is "^1.0.1" object-keys "^1.1.1" regexp.prototype.flags "^1.2.0" deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-gateway@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== dependencies: execa "^1.0.0" ip-regex "^2.1.0" define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: "@types/glob" "^7.1.1" globby "^6.1.0" is-path-cwd "^2.0.0" is-path-in-cwd "^2.0.0" p-map "^2.0.0" pify "^4.0.1" rimraf "^2.6.3" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= des.js@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== dependencies: inherits "^2.0.1" minimalistic-assert "^1.0.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detect-node@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" randombytes "^2.0.0" dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= dns-packet@^1.3.1: version "1.3.4" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.4.tgz#e3455065824a2507ba886c55a89963bb107dec6f" integrity sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA== dependencies: ip "^1.1.0" safe-buffer "^5.0.1" dns-txt@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= dependencies: buffer-indexof "^1.0.0" doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-converter@^0.2: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" dom-serializer@0: version "0.2.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== dependencies: domelementtype "^2.0.1" entities "^2.0.0" domain-browser@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== domelementtype@1, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== domhandler@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== dependencies: domelementtype "1" domutils@1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= dependencies: dom-serializer "0" domelementtype "1" domutils@^1.5.1: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.723: version "1.3.738" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.738.tgz#aec24b091c82acbfabbdcce08076a703941d17ca" integrity sha512-vCMf4gDOpEylPSLPLSwAEsz+R3ShP02Y3cAKMZvTqule3XcPp7tgc/0ESI7IS6ZeyBlGClE50N53fIOkcIVnpw== elliptic@^6.0.0: version "6.5.4" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== dependencies: bn.js "^4.11.9" brorand "^1.1.0" hash.js "^1.0.0" hmac-drbg "^1.0.1" inherits "^2.0.4" minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== emojis-list@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" engine.io-client@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.2.1.tgz#6f54c0475de487158a1a7c77d10178708b6add36" integrity sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "~3.1.0" engine.io-parser "~2.1.1" has-cors "1.1.0" indexof "0.0.1" parseqs "0.0.5" parseuri "0.0.5" ws "~3.3.1" xmlhttprequest-ssl "~1.5.4" yeast "0.1.2" engine.io-parser@~2.1.0, engine.io-parser@~2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6" integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA== dependencies: after "0.8.2" arraybuffer.slice "~0.0.7" base64-arraybuffer "0.1.5" blob "0.0.5" has-binary2 "~1.0.2" engine.io@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.2.1.tgz#b60281c35484a70ee0351ea0ebff83ec8c9522a2" integrity sha512-+VlKzHzMhaU+GsCIg4AoXF1UdDFjHHwMmMKqMJNDNLlUlejz58FCy4LBqB2YVJskHGYl06BatYWKP2TVdVXE5w== dependencies: accepts "~1.3.4" base64id "1.0.0" cookie "0.3.1" debug "~3.1.0" engine.io-parser "~2.1.0" ws "~3.3.1" enhanced-resolve@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" tapable "^1.0.0" enhanced-resolve@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== dependencies: graceful-fs "^4.1.2" memory-fs "^0.5.0" tapable "^1.0.0" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= entities@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-abstract@^1.5.1: version "1.16.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d" integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.0" is-callable "^1.1.4" is-regex "^1.0.4" object-inspect "^1.6.0" object-keys "^1.1.1" string.prototype.trimleft "^2.1.0" string.prototype.trimright "^2.1.0" es-to-primitive@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: version "0.10.53" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.3" next-tick "~1.0.0" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" ext "^1.1.2" escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" eslint-scope@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== dependencies: esrecurse "^4.1.0" estraverse "^4.1.1" esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esrecurse@^4.1.0: version "4.2.1" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== dependencies: estraverse "^4.1.0" estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@^4.1.0, estraverse@^4.1.1: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== eventsource@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" safe-buffer "^5.1.1" execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== dependencies: cross-spawn "^6.0.0" get-stream "^4.0.0" is-stream "^1.1.0" npm-run-path "^2.0.0" p-finally "^1.0.0" signal-exit "^3.0.0" strip-eof "^1.0.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== dependencies: accepts "~1.3.7" array-flatten "1.1.1" body-parser "1.19.0" content-disposition "0.5.3" content-type "~1.0.4" cookie "0.4.0" cookie-signature "1.0.6" debug "2.6.9" depd "~1.1.2" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" finalhandler "~1.1.2" fresh "0.5.2" merge-descriptors "1.0.1" methods "~1.1.2" on-finished "~2.3.0" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.5" qs "6.7.0" range-parser "~1.2.1" safe-buffer "5.1.2" send "0.17.1" serve-static "1.14.1" setprototypeof "1.1.1" statuses "~1.5.0" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" ext@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.2.0.tgz#8dd8d2dd21bcced3045be09621fa0cbf73908ba4" integrity sha512-0ccUQK/9e3NreLFg6K6np8aPyRgwycx+oFGtfx1dSp7Wj00Ozw9r05FgBRlzjf2XBM7LAzwgLyDscRrtSU91hA== dependencies: type "^2.0.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= dependencies: websocket-driver ">=0.5.1" faye-websocket@~0.11.1: version "0.11.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e" integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== dependencies: websocket-driver ">=0.5.1" figgy-pudding@^3.5.1: version "3.5.2" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== file-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== dependencies: loader-utils "^1.0.2" schema-utils "^1.0.0" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.1.2, finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-cache-dir@^2.0.0, find-cache-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" make-dir "^2.0.0" pkg-dir "^3.0.0" find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== dependencies: detect-file "^1.0.0" is-glob "^4.0.0" micromatch "^3.0.4" resolve-dir "^1.0.1" flatted@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08" integrity sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg== flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" readable-stream "^2.3.6" follow-redirects@^1.0.0: version "1.13.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.0.tgz#b42e8d93a2a7eea5ed88633676d6597bc8e384db" integrity sha512-aq6gF1BEKje4a9i9+5jimNFIpq4Q1WiwBToeRK5NvZBd/TRsmW8BsJfOEGkr76TbOyPVD3OVDN910EcUNtRYEA== for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= from2@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= dependencies: inherits "^2.0.1" readable-stream "^2.0.0" fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" universalify "^0.1.0" fs-minipass@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= dependencies: graceful-fs "^4.1.2" iferr "^0.1.5" imurmurhash "^0.1.4" readable-stream "1 || 2" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" fsevents@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-caller-file@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-stream@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" glob-parent@~5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.0.tgz#5f4c1d1e748d30cd73ad2944b3577a81b081e8c2" integrity sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw== dependencies: is-glob "^4.0.1" glob@^5.0.13, glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.3, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" global-modules@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== dependencies: global-prefix "^3.0.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" global-prefix@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== dependencies: ini "^1.3.5" kind-of "^6.0.2" which "^1.3.1" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== handle-thing@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== handlebars@^4.0.1: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== dependencies: minimist "^1.2.5" neo-async "^2.6.0" source-map "^0.6.1" wordwrap "^1.0.0" optionalDependencies: uglify-js "^3.1.4" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary2@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/has-binary2/-/has-binary2-1.0.3.tgz#7776ac627f3ea77250cfc332dab7ddf5e4f5d11d" integrity sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw== dependencies: isarray "2.0.1" has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0, has-symbols@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" minimalistic-assert "^1.0.1" he@1.2.x: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= dependencies: inherits "^2.0.1" obuf "^1.0.0" readable-stream "^2.0.1" wbuf "^1.1.0" html-entities@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= html-minifier@^3.2.3: version "3.5.21" resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== dependencies: camel-case "3.0.x" clean-css "4.2.x" commander "2.17.x" he "1.2.x" param-case "2.1.x" relateurl "0.2.x" uglify-js "3.4.x" html-webpack-plugin@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= dependencies: html-minifier "^3.2.3" loader-utils "^0.2.16" lodash "^4.17.3" pretty-error "^2.0.2" tapable "^1.0.0" toposort "^1.0.0" util.promisify "1.0.0" htmlparser2@^3.3.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== dependencies: domelementtype "^1.3.1" domhandler "^2.3.0" domutils "^1.5.1" entities "^1.1.1" inherits "^2.0.1" readable-stream "^3.1.1" http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= http-errors@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.0" statuses ">= 1.4.0 < 2" http-errors@~1.7.2: version "1.7.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== dependencies: depd "~1.1.2" inherits "2.0.4" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy-middleware@0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== dependencies: http-proxy "^1.17.0" is-glob "^4.0.0" lodash "^4.17.11" micromatch "^3.1.10" http-proxy@^1.13.0, http-proxy@^1.17.0: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" icss-replace-symbols@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= icss-utils@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== dependencies: postcss "^7.0.14" ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== iferr@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= ignore-walk@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== dependencies: minimatch "^3.0.4" image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= import-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= dependencies: import-from "^2.1.0" import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= dependencies: caller-path "^2.0.0" resolve-from "^3.0.0" import-from@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" integrity sha1-M1238qev/VOqpHHUuAId7ja387E= dependencies: resolve-from "^3.0.0" import-glob-loader@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/import-glob-loader/-/import-glob-loader-1.1.0.tgz#98d84c0f661c8ba9f821d9ddb7c6b6dc8e97eca2" integrity sha1-mNhMD2Yci6n4Idndt8a23I6X7KI= dependencies: glob "^5.0.13" loader-utils "^0.2.10" import-local@2.0.0, import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== dependencies: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= infer-owner@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.7" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== internal-ip@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== dependencies: default-gateway "^4.2.0" ipaddr.js "^1.9.0" interpret@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= ipaddr.js@1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== ipaddr.js@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== is-absolute-url@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arguments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.0.4.tgz#3faf966c7cba0ff437fb31f6250082fcf0448cf3" integrity sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA== is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-core-module@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== dependencies: has "^1.0.3" is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-directory@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-expression@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-4.0.0.tgz#c33155962abf21d0afd2552514d67d2ec16fd2ab" integrity sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A== dependencies: acorn "^7.1.1" object-assign "^4.1.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== is-path-in-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: is-path-inside "^2.1.0" is-path-inside@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== dependencies: path-is-inside "^1.0.2" is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-regex@^1.0.3, is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: has-symbols "^1.0.1" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is-wsl@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isarray@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.1.tgz#a37d94ed9cda2d59865c9f76fe596ee1f338741e" integrity sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-instrumenter-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-3.0.1.tgz#9957bd59252b373fae5c52b7b5188e6fde2a0949" integrity sha512-a5SPObZgS0jB/ixaKSMdn6n/gXSrK2S6q/UfRJBT3e6gQmVjwZROTODQsYW5ZNwOu78hG62Y3fWlebaVOL0C+w== dependencies: convert-source-map "^1.5.0" istanbul-lib-instrument "^1.7.3" loader-utils "^1.1.0" schema-utils "^0.3.0" istanbul-lib-coverage@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== istanbul-lib-instrument@^1.7.3: version "1.10.2" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" istanbul-lib-coverage "^1.2.1" semver "^5.3.0" istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" jasmine-core@^3.3, jasmine-core@^3.4.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-3.5.0.tgz#132c23e645af96d85c8bca13c8758b18429fc1e4" integrity sha512-nCeAiw37MIMA9w9IXso7bRaLl+c/ef3wnxsoSAlYrzS+Ot0zTG6nU8G/cIfGkqpkjX2wNaIW9RFG0TwIFnG6bA== jquery@^3.4.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.5.0.tgz#9980b97d9e4194611c36530e7dc46a58d7340fc9" integrity sha512-Xb7SVYMvygPxbFMpTFQiHh1J7HClEaThguL15N/Gg37Lri/qKyhRGZYzHRyLH8Stq3Aow0LsHO2O2ci86fCrNQ== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-stringify@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@3.x, js-yaml@^3.13.1: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== json5@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== dependencies: minimist "^1.2.0" json5@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== dependencies: minimist "^1.2.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= optionalDependencies: graceful-fs "^4.1.6" jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" karma-chrome-launcher@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" integrity sha512-uf/ZVpAabDBPvdPdveyk1EPgbnloPvFFGgmRhYLTDH7gEB4nZdSBk8yTU47w1g/drLSx5uMOkjKk7IWKfWg/+w== dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coverage@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-1.1.2.tgz#cc09dceb589a83101aca5fe70c287645ef387689" integrity sha512-eQawj4Cl3z/CjxslYy9ariU4uDh7cCNFZHNWXWRpl0pNeblY/4wHR7M7boTYXWrn9bY0z2pZmr11eKje/S/hIw== dependencies: dateformat "^1.0.6" istanbul "^0.4.0" lodash "^4.17.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-2.0.1.tgz#26e3e31f2faf272dd80ebb0e1898914cc3a19763" integrity sha512-iuC0hmr9b+SNn1DaUD2QEYtUxkS1J+bSJSn7ejdEexs7P8EYvA1CWkEdrDQ+8jVH3AgWlCNwjYsT1chjcNW9lA== dependencies: jasmine-core "^3.3" karma-sourcemap-loader@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma-spec-reporter@^0.0.32: version "0.0.32" resolved "https://registry.yarnpkg.com/karma-spec-reporter/-/karma-spec-reporter-0.0.32.tgz#2e9c7207ea726771260259f82becb543209e440a" integrity sha1-LpxyB+pyZ3EmAln4K+y1QyCeRAo= dependencies: colors "^1.1.2" karma-webpack@^3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-3.0.5.tgz#1ff1e3a690fb73ae95ee95f9ab58f341cfc7b40f" integrity sha512-nRudGJWstvVuA6Tbju9tyGUfXTtI1UXMXoRHVmM2/78D0q6s/Ye2IC157PKNDC15PWFGR0mVIRtWLAdcfsRJoA== dependencies: async "^2.0.0" babel-runtime "^6.0.0" loader-utils "^1.0.0" lodash "^4.0.0" source-map "^0.5.6" webpack-dev-middleware "^2.0.6" karma@^4.1.0: version "4.4.1" resolved "https://registry.yarnpkg.com/karma/-/karma-4.4.1.tgz#6d9aaab037a31136dc074002620ee11e8c2e32ab" integrity sha512-L5SIaXEYqzrh6b1wqYC42tNsFMx2PWuxky84pK9coK09MvmL7mxii3G3bZBh/0rvD27lqDd0le9jyhzvwif73A== dependencies: bluebird "^3.3.0" body-parser "^1.16.1" braces "^3.0.2" chokidar "^3.0.0" colors "^1.1.0" connect "^3.6.0" di "^0.0.1" dom-serialize "^2.2.0" flatted "^2.0.0" glob "^7.1.1" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^4.17.14" log4js "^4.0.0" mime "^2.3.1" minimatch "^3.0.2" optimist "^0.6.1" qjobs "^1.1.4" range-parser "^1.2.0" rimraf "^2.6.0" safe-buffer "^5.0.1" socket.io "2.1.1" source-map "^0.6.1" tmp "0.0.33" useragent "2.3.0" killable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" less-loader@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-5.0.0.tgz#498dde3a6c6c4f887458ee9ed3f086a12ad1b466" integrity sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg== dependencies: clone "^2.1.1" loader-utils "^1.1.0" pify "^4.0.1" less@^3.9.0: version "3.10.3" resolved "https://registry.yarnpkg.com/less/-/less-3.10.3.tgz#417a0975d5eeecc52cff4bcfa3c09d35781e6792" integrity sha512-vz32vqfgmoxF1h3K4J+yKCtajH0PWmjkIFgbs5d78E/c/e+UQTnI+lWK+1eQRE95PXM2mC3rJlLSSP9VQHnaow== dependencies: clone "^2.1.2" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" mime "^1.4.1" mkdirp "^0.5.0" promise "^7.1.1" request "^2.83.0" source-map "~0.6.0" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" loader-runner@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== dependencies: big.js "^5.2.2" emojis-list "^2.0.0" json5 "^1.0.1" loader-utils@^0.2.10, loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= dependencies: big.js "^3.1.3" emojis-list "^2.0.0" json5 "^0.5.0" object-assign "^4.0.1" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lodash@^4.0.0, lodash@^4.17.0, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.3, lodash@^4.17.4: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: chalk "^2.0.1" log4js@^4.0.0: version "4.5.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-4.5.1.tgz#e543625e97d9e6f3e6e7c9fc196dd6ab2cae30b5" integrity sha512-EEEgFcE9bLgaYUKuozyFfytQM2wDHtXn4tAN41pkaxpNjAykv11GVdeI4tHtmPWW4Xrgh9R/2d7XYghDVjbKKw== dependencies: date-format "^2.0.0" debug "^4.1.1" flatted "^2.0.0" rfdc "^1.1.4" streamroller "^1.0.6" loglevel@^1.6.4: version "1.6.6" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.6.tgz#0ee6300cc058db6b3551fa1c4bf73b83bb771312" integrity sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ== loglevelnext@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/loglevelnext/-/loglevelnext-1.0.5.tgz#36fc4f5996d6640f539ff203ba819641680d75a2" integrity sha512-V/73qkPuJmx4BcBF19xPBr+0ZRVBhc4POxvZTZdMeXpJ4NItXSJ/MSwuFT0kQJlCbXvdlZoQQ/418bS1y9Jh6A== dependencies: es6-symbol "^3.1.1" object.assign "^4.1.0" loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0, loud-rejection@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= lru-cache@4.1.x: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" semver "^5.6.0" mamacro@^0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== dependencies: p-defer "^1.0.0" map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= mem@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^2.0.0" p-is-promise "^2.0.0" memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= dependencies: errno "^0.1.3" readable-stream "^2.0.1" memory-fs@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== dependencies: errno "^0.1.3" readable-stream "^2.0.1" meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" brorand "^1.0.1" mime-db@1.42.0, "mime-db@>= 1.40.0 < 2": version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.1.0, mime@^2.3.1, mime@^2.4.4: version "2.4.4" resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== mimic-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== mini-css-extract-plugin@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== dependencies: loader-utils "^1.1.0" normalize-url "^2.0.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== dependencies: concat-stream "^1.5.0" duplexify "^3.4.2" end-of-stream "^1.1.0" flush-write-stream "^1.0.0" from2 "^2.1.0" parallel-transform "^1.1.0" pump "^3.0.0" pumpify "^1.3.3" stream-each "^1.1.0" through2 "^2.0.0" mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= dependencies: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multicast-dns-service-types@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= multicast-dns@^6.0.1: version "6.2.3" resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== dependencies: dns-packet "^1.3.1" thunky "^1.0.2" nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== next-tick@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== dependencies: lower-case "^1.1.1" node-forge@0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579" integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ== node-libs-browser@^2.2.0, node-libs-browser@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== dependencies: assert "^1.1.1" browserify-zlib "^0.2.0" buffer "^4.3.0" console-browserify "^1.1.0" constants-browserify "^1.0.0" crypto-browserify "^3.11.0" domain-browser "^1.1.1" events "^3.0.0" https-browserify "^1.0.0" os-browserify "^0.3.0" path-browserify "0.0.1" process "^0.11.10" punycode "^1.2.4" querystring-es3 "^0.2.0" readable-stream "^2.3.3" stream-browserify "^2.0.1" stream-http "^2.7.2" string_decoder "^1.0.0" timers-browserify "^2.0.4" tty-browserify "0.0.0" url "^0.11.0" util "^0.11.0" vm-browserify "^1.0.1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.71: version "1.1.72" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== nopt@3.x: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= normalize-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== dependencies: prepend-http "^2.0.0" query-string "^5.0.1" sort-keys "^2.0.0" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.6" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4" integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" nth-check@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== dependencies: boolbase "~1.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= null-loader@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-1.0.0.tgz#90e85798e50e9dd1d568495a44e74829dec26744" integrity sha512-mYLDjDVTkjTlFoidxRhzO75rdcwfVXfw5G5zpj8sXnBkHtKJxMk4hTcRR4i5SOhDB6EvcQuYriy6IV23eq6uog== dependencies: loader-utils "^1.2.3" schema-utils "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-inspect@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-is@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.0.1.tgz#0aa60ec9989a0b3ed795cf4d06f62cf1ad6539b6" integrity sha1-CqYOyZiaCz7Xlc9NBvYs8a1lObY= object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@1.x, once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== dependencies: is-wsl "^1.1.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.6" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" word-wrap "~1.2.3" original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== dependencies: url-parse "^1.4.3" os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^3.0.0, os-locale@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== dependencies: execa "^1.0.0" lcid "^2.0.0" mem "^4.0.0" os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== p-limit@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== dependencies: retry "^0.12.0" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~1.0.5: version "1.0.10" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== parallel-transform@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== dependencies: cyclist "^1.0.1" inherits "^2.0.3" readable-stream "^2.1.5" param-case@2.1.x: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= dependencies: no-case "^2.2.0" parse-asn1@^5.0.0: version "5.1.5" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-key@^2.0.0, path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= path-parse@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" pbkdf2@^3.0.3: version "3.0.17" resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" ripemd160 "^2.0.1" safe-buffer "^5.0.1" sha.js "^2.4.8" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= picomatch@^2.0.4: version "2.1.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.1.1.tgz#ecdfbea7704adb5fe6fb47f9866c4c0e15e905c5" integrity sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA== pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= pkg-dir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" portfinder@^1.0.25: version "1.0.25" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca" integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg== dependencies: async "^2.6.2" debug "^3.1.1" mkdirp "^0.5.1" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= postcss-load-config@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.0.tgz#c84d692b7bb7b41ddced94ee62e8ab31b417b003" integrity sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q== dependencies: cosmiconfig "^5.0.0" import-cwd "^2.0.0" postcss-loader@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== dependencies: loader-utils "^1.1.0" postcss "^7.0.0" postcss-load-config "^2.0.0" schema-utils "^1.0.0" postcss-modules-extract-imports@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== dependencies: postcss "^7.0.5" postcss-modules-local-by-default@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.6.tgz#dd9953f6dd476b5fd1ef2d8830c8929760b56e63" integrity sha512-oLUV5YNkeIBa0yQl7EYnxMgy4N6noxmiwZStaEJUSe2xPMcdNc8WmBQuQCx18H5psYbVxz8zoHk0RAAYZXP9gA== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-value-parser "^3.3.1" postcss-modules-scope@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.0.tgz#ad3f5bf7856114f6fcab901b0502e2a2bc39d4eb" integrity sha512-91Rjps0JnmtUB0cujlc8KIKCsJXWjzuxGeT/+Q2i2HXKZ7nBUeF9YQTZZTNvHVoNYj1AthsjnGLtqDUE0Op79A== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-modules-values@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-2.0.0.tgz#479b46dc0c5ca3dc7fa5270851836b9ec7152f64" integrity sha512-Ki7JZa7ff1N3EIMlPnGTZfUMe69FFwiQPnVSXC9mnn3jozCRBYIxiZd44yJOV2AmabOo4qFf8s0dC/+lweG7+w== dependencies: icss-replace-symbols "^1.1.0" postcss "^7.0.6" postcss-selector-parser@^6.0.0: version "6.0.2" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== dependencies: cssesc "^3.0.0" indexes-of "^1.0.1" uniq "^1.0.1" postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss-value-parser@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.23, postcss@^7.0.5, postcss@^7.0.6: version "7.0.36" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.36.tgz#056f8cffa939662a8f5905950c07d5285644dfcb" integrity sha512-BebJSIUMwJHRH0HAQoxN4u1CN86glsrwsW0q7T+/m44eXOUAxSNdHRkNZPYz5vVUbg17hFgOQDE7fZk7li3pZw== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= pretty-error@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= dependencies: renderkid "^2.0.1" utila "~0.4" private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= promise@^7.0.1, promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" proxy-addr@~2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.9.0" prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.24: version "1.4.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" create-hash "^1.1.0" parse-asn1 "^5.0.0" randombytes "^2.0.1" safe-buffer "^5.1.2" pug-attrs@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-3.0.0.tgz#b10451e0348165e31fad1cc23ebddd9dc7347c41" integrity sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA== dependencies: constantinople "^4.0.1" js-stringify "^1.0.2" pug-runtime "^3.0.0" pug-code-gen@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-3.0.2.tgz#ad190f4943133bf186b60b80de483100e132e2ce" integrity sha512-nJMhW16MbiGRiyR4miDTQMRWDgKplnHyeLvioEJYbk1RsPI3FuA3saEP8uwnTb2nTJEKBU90NFVWJBk4OU5qyg== dependencies: constantinople "^4.0.1" doctypes "^1.1.0" js-stringify "^1.0.2" pug-attrs "^3.0.0" pug-error "^2.0.0" pug-runtime "^3.0.0" void-elements "^3.1.0" with "^7.0.0" pug-error@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-2.0.0.tgz#5c62173cb09c34de2a2ce04f17b8adfec74d8ca5" integrity sha512-sjiUsi9M4RAGHktC1drQfCr5C5eriu24Lfbt4s+7SykztEOwVZtbFk1RRq0tzLxcMxMYTBR+zMQaG07J/btayQ== pug-filters@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-4.0.0.tgz#d3e49af5ba8472e9b7a66d980e707ce9d2cc9b5e" integrity sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A== dependencies: constantinople "^4.0.1" jstransformer "1.0.0" pug-error "^2.0.0" pug-walk "^2.0.0" resolve "^1.15.1" pug-lexer@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-5.0.1.tgz#ae44628c5bef9b190b665683b288ca9024b8b0d5" integrity sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w== dependencies: character-parser "^2.2.0" is-expression "^4.0.0" pug-error "^2.0.0" pug-linker@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-4.0.0.tgz#12cbc0594fc5a3e06b9fc59e6f93c146962a7708" integrity sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw== dependencies: pug-error "^2.0.0" pug-walk "^2.0.0" pug-load@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-3.0.0.tgz#9fd9cda52202b08adb11d25681fb9f34bd41b662" integrity sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ== dependencies: object-assign "^4.1.1" pug-walk "^2.0.0" pug-parser@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-6.0.0.tgz#a8fdc035863a95b2c1dc5ebf4ecf80b4e76a1260" integrity sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw== dependencies: pug-error "^2.0.0" token-stream "1.0.0" pug-runtime@^3.0.0, pug-runtime@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-3.0.1.tgz#f636976204723f35a8c5f6fad6acda2a191b83d7" integrity sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg== pug-strip-comments@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz#f94b07fd6b495523330f490a7f554b4ff876303e" integrity sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ== dependencies: pug-error "^2.0.0" pug-walk@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-2.0.0.tgz#417aabc29232bb4499b5b5069a2b2d2a24d5f5fe" integrity sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ== pug@^3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/pug/-/pug-3.0.2.tgz#f35c7107343454e43bc27ae0ff76c731b78ea535" integrity sha512-bp0I/hiK1D1vChHh6EfDxtndHji55XP/ZJKwsRqrz6lRia6ZC2OZbdAymlxdVFwd1L70ebrVJw4/eZ79skrIaw== dependencies: pug-code-gen "^3.0.2" pug-filters "^4.0.0" pug-lexer "^5.0.1" pug-linker "^4.0.0" pug-load "^3.0.0" pug-parser "^6.0.0" pug-runtime "^3.0.1" pug-strip-comments "^2.0.0" pump@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pumpify@^1.3.3: version "1.5.1" resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" inherits "^2.0.3" pump "^2.0.0" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qjobs@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== query-string@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== dependencies: decode-uri-component "^0.2.0" object-assign "^4.1.0" strict-uri-encode "^1.0.0" querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= querystringify@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" safe-buffer "^5.1.0" range-parser@^1.0.3, range-parser@^1.2.0, range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-loader@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-2.0.0.tgz#e2813d9e1e3f80d1bbade5ad082e809679e20c26" integrity sha512-kZnO5MoIyrojfrPWqrhFNLZemIAX8edMOCp++yC5RKxzFB3m92DqKNhKlU6+FvpOhWtvyh3jOaD7J6/9tpdIKg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" readdirp@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839" integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ== dependencies: picomatch "^2.0.4" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.3" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" integrity sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw== regenerator-transform@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== dependencies: private "^0.1.6" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexp.prototype.flags@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.2.0.tgz#6b30724e306a27833eeb171b66ac8890ba37e41c" integrity sha512-ztaw4M1VqgMwl9HlPpOuiYgItcHlunW0He2fE6eNfT6E/CF2FtYi9ofOYe4mKntstYk0Fyh/rDRBdS3AnxjlrA== dependencies: define-properties "^1.1.2" regexpu-core@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" regjsgen@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c" integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" relateurl@0.2.x: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= renderkid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== dependencies: css-select "^1.1.0" dom-converter "^0.2" htmlparser2 "^3.3.0" strip-ansi "^3.0.0" utila "^0.4.0" repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" request@^2.83.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= dependencies: resolve-from "^3.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" integrity sha1-six699nWiBvItuZTM17rywoYh0g= resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.10.0, resolve@^1.3.2, resolve@^1.8.1: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" resolve@^1.15.1: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== dependencies: is-core-module "^2.2.0" path-parse "^1.0.6" ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= rfdc@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.1.4.tgz#ba72cc1367a0ccd9cf81a870b3b58bd3ad07f8c2" integrity sha512-5C9HXdzK8EAqN7JDif30jqsBzavB7wLpaubisuQIGHWf2gUXSpzy6ArX/+Da8RjFpagWsCn+pIgxTMAmKw9Zug== rimraf@^2.5.4, rimraf@^2.6.0, rimraf@^2.6.1, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" inherits "^2.0.1" run-queue@^1.0.0, run-queue@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= dependencies: aproba "^1.1.1" safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" integrity sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8= dependencies: ajv "^5.0.0" schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== dependencies: ajv "^6.1.0" ajv-errors "^1.0.0" ajv-keywords "^3.1.0" select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.7: version "1.10.7" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b" integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA== dependencies: node-forge "0.9.0" "semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== dependencies: debug "2.6.9" depd "~1.1.2" destroy "~1.0.4" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" http-errors "~1.7.2" mime "1.6.0" ms "2.1.1" on-finished "~2.3.0" range-parser "~1.2.1" statuses "~1.5.0" serialize-javascript@^1.7.0: version "1.9.1" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.9.1.tgz#cfc200aef77b600c47da9bb8149c943e798c2fdb" integrity sha512-0Vb/54WJ6k5v8sSWN09S0ora+Hnr+cX40r9F170nT+mSkaxltoE/7R3OrIdBSUv1OoiobH1QoWQbCnAO+e8J1A== serve-index@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= dependencies: accepts "~1.3.4" batch "0.6.1" debug "2.6.9" escape-html "~1.0.3" http-errors "~1.6.2" mime-types "~2.1.17" parseurl "~1.3.2" serve-static@1.14.1: version "1.14.1" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.17.1" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setimmediate@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" socket.io-adapter@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz#2a805e8a14d6372124dd9159ad4502f8cb07f06b" integrity sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs= socket.io-client@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.1.1.tgz#dcb38103436ab4578ddb026638ae2f21b623671f" integrity sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ== dependencies: backo2 "1.0.2" base64-arraybuffer "0.1.5" component-bind "1.0.0" component-emitter "1.2.1" debug "~3.1.0" engine.io-client "~3.2.0" has-binary2 "~1.0.2" has-cors "1.1.0" indexof "0.0.1" object-component "0.0.3" parseqs "0.0.5" parseuri "0.0.5" socket.io-parser "~3.2.0" to-array "0.1.4" socket.io-parser@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.2.0.tgz#e7c6228b6aa1f814e6148aea325b51aa9499e077" integrity sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA== dependencies: component-emitter "1.2.1" debug "~3.1.0" isarray "2.0.1" socket.io@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.1.1.tgz#a069c5feabee3e6b214a75b40ce0652e1cfb9980" integrity sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA== dependencies: debug "~3.1.0" engine.io "~3.2.0" has-binary2 "~1.0.2" socket.io-adapter "~1.1.0" socket.io-client "2.1.1" socket.io-parser "~3.2.0" sockjs-client@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.4.0.tgz#c9f2568e19c8fd8173b4997ea3420e0bb306c7d5" integrity sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g== dependencies: debug "^3.2.5" eventsource "^1.0.7" faye-websocket "~0.11.1" inherits "^2.0.3" json3 "^3.3.2" url-parse "^1.4.3" sockjs@0.3.19: version "0.3.19" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== dependencies: faye-websocket "^0.10.0" uuid "^3.0.1" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= dependencies: is-plain-obj "^1.0.0" source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.12: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.6, source-map@^0.5.7: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== spdx-expression-parse@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.5" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" hpack.js "^2.1.6" obuf "^1.1.2" readable-stream "^3.0.6" wbuf "^1.7.3" spdy@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.1.tgz#6f12ed1c5db7ea4f24ebb8b89ba58c87c08257f2" integrity sha512-HeZS3PBdMA+sZSu0qwpCxl3DeALD5ASx8pAX0jZdKXSpPWbQ6SYGnlg3BBmYLx5LtiZrmkAZfErCm2oECBcioA== dependencies: debug "^4.1.0" handle-thing "^2.0.0" http-deceiver "^1.2.7" select-hose "^2.0.0" spdy-transport "^3.0.0" split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" ssri@^6.0.1: version "6.0.2" resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.2.tgz#157939134f20464e7301ddba3e90ffa8f7728ac5" integrity sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q== dependencies: figgy-pudding "^3.5.1" static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== dependencies: inherits "~2.0.1" readable-stream "^2.0.2" stream-each@^1.1.0: version "1.2.3" resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== dependencies: end-of-stream "^1.1.0" stream-shift "^1.0.0" stream-http@^2.7.2: version "2.8.3" resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= streamroller@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-1.0.6.tgz#8167d8496ed9f19f05ee4b158d9611321b8cacd9" integrity sha512-3QC47Mhv3/aZNFpDDVO44qQb9gwB9QggMEE0sQmkTAwBVYdBRWISdsywlkfm5II1Q5y/pmrHflti/IgmIzdDBg== dependencies: async "^2.6.2" date-format "^2.0.0" debug "^3.2.6" fs-extra "^7.0.1" lodash "^4.17.14" strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== dependencies: emoji-regex "^7.0.1" is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string.prototype.trimright@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= style-loader@^0.23.1: version "0.23.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925" integrity sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" supports-color@6.1.0, supports-color@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== dependencies: has-flag "^3.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" tapable@^1.0.0, tapable@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^4: version "4.4.19" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA== dependencies: chownr "^1.1.4" fs-minipass "^1.2.7" minipass "^2.9.0" minizlib "^1.3.3" mkdirp "^0.5.5" safe-buffer "^5.2.1" yallist "^3.1.1" terser-webpack-plugin@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.1.tgz#61b18e40eaee5be97e771cdbb10ed1280888c2b4" integrity sha512-ZXmmfiwtCLfz8WKZyYUuuHf3dMYEjg8NrjHMb0JqHVHVOSkzp3cW2/XG1fP3tRhqEqSzMwzzRQGtAPbs4Cncxg== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" serialize-javascript "^1.7.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" worker-farm "^1.7.0" terser@^4.1.2: version "4.4.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.4.0.tgz#22c46b4817cf4c9565434bfe6ad47336af259ac3" integrity sha512-oDG16n2WKm27JO8h4y/w3iqBGAOSCtq7k8dRmrn4Wf9NouL0b2WpMHGChFGZq4nFAQy1FsNJrVQHfurXOSTmOA== dependencies: commander "^2.20.0" source-map "~0.6.1" source-map-support "~0.5.12" through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" thunky@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== timers-browserify@^2.0.4: version "2.0.11" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== dependencies: setimmediate "^1.0.4" tmp@0.0.33, tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-1.0.0.tgz#cc200eab2613f4166d27ff9afc7ca56d49df6eb4" integrity sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ= toposort@^1.0.0: version "1.0.7" resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= uglify-js@3.4.x: version "3.4.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== dependencies: commander "~2.19.0" source-map "~0.6.1" uglify-js@^3.1.4: version "3.13.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.13.5.tgz#5d71d6dbba64cf441f32929b1efce7365bb4f113" integrity sha512-xtB8yEqIkn7zmOyS2zUNBsYCBRhDkvlNxMMY2smuJ/qA8NCHeQvKCF3i9Z4k8FJH4+PJvZRtMrPynfZ75+CSZw== ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^2.0.1" uniq@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== dependencies: unique-slug "^2.0.0" unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== dependencies: imurmurhash "^0.1.4" universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url-join@^2.0.2: version "2.0.5" resolved "https://registry.yarnpkg.com/url-join/-/url-join-2.0.5.tgz#5af22f18c052a000a48d7b82c5e9c2e2feeda728" integrity sha1-WvIvGMBSoACkjXuCxenC4v7tpyg= url-parse@^1.4.3: version "1.5.3" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== useragent@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== dependencies: inherits "2.0.3" utila@^0.4.0, utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== v8-compile-cache@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vm-browserify@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== void-elements@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= void-elements@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-3.1.0.tgz#614f7fbf8d801f0bb5f0661f5b2f5785750e4f09" integrity sha1-YU9/v42AHwu18GYfWy9XhXUOTwk= watchpack@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== dependencies: chokidar "^2.0.2" graceful-fs "^4.1.2" neo-async "^2.5.0" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" webpack-cli@^3.3.1: version "3.3.10" resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.10.tgz#17b279267e9b4fb549023fae170da8e6e766da13" integrity sha512-u1dgND9+MXaEt74sJR4PR7qkPxXUSQ0RXYq8x1L6Jg1MYVEmGPrH6Ah6C4arD4r0J1P5HKjRqpab36k0eIzPqg== dependencies: chalk "2.4.2" cross-spawn "6.0.5" enhanced-resolve "4.1.0" findup-sync "3.0.0" global-modules "2.0.0" import-local "2.0.0" interpret "1.2.0" loader-utils "1.2.3" supports-color "6.1.0" v8-compile-cache "2.0.3" yargs "13.2.4" webpack-dev-middleware@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-2.0.6.tgz#a51692801e8310844ef3e3790e1eacfe52326fd4" integrity sha512-tj5LLD9r4tDuRIDa5Mu9lnY2qBBehAITv6A9irqXhw/HQquZgTx3BCd57zYbU2gMDnncA49ufK2qVQSbaKJwOw== dependencies: loud-rejection "^1.6.0" memory-fs "~0.4.1" mime "^2.1.0" path-is-absolute "^1.0.0" range-parser "^1.0.3" url-join "^2.0.2" webpack-log "^1.0.1" webpack-dev-middleware@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3" integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw== dependencies: memory-fs "^0.4.1" mime "^2.4.4" mkdirp "^0.5.1" range-parser "^1.2.1" webpack-log "^2.0.0" webpack-dev-server@^3.3.1: version "3.9.0" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c" integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw== dependencies: ansi-html "0.0.7" bonjour "^3.5.0" chokidar "^2.1.8" compression "^1.7.4" connect-history-api-fallback "^1.6.0" debug "^4.1.1" del "^4.1.1" express "^4.17.1" html-entities "^1.2.1" http-proxy-middleware "0.19.1" import-local "^2.0.0" internal-ip "^4.3.0" ip "^1.1.5" is-absolute-url "^3.0.3" killable "^1.0.1" loglevel "^1.6.4" opn "^5.5.0" p-retry "^3.0.1" portfinder "^1.0.25" schema-utils "^1.0.0" selfsigned "^1.10.7" semver "^6.3.0" serve-index "^1.9.1" sockjs "0.3.19" sockjs-client "1.4.0" spdy "^4.0.1" strip-ansi "^3.0.1" supports-color "^6.1.0" url "^0.11.0" webpack-dev-middleware "^3.7.2" webpack-log "^2.0.0" ws "^6.2.1" yargs "12.0.5" webpack-fix-style-only-entries@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/webpack-fix-style-only-entries/-/webpack-fix-style-only-entries-0.2.2.tgz#60331c608b944ac821a3b6f2ae491a6d79ba40eb" integrity sha512-0wcrLCnISP8htV0NP1mT0e2mHhfjGQdNk82s8BTLVvF7rXuoJuUUzP3aCUXnRqlLgmTBx5WgqPhnczjatl+iSQ== webpack-log@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-1.2.0.tgz#a4b34cda6b22b518dbb0ab32e567962d5c72a43d" integrity sha512-U9AnICnu50HXtiqiDxuli5gLB5PGBo7VvcHx36jRZHwK4vzOYLbImqT4lwWwoMHdQWwEKw736fCHEekokTEKHA== dependencies: chalk "^2.1.0" log-symbols "^2.1.0" loglevelnext "^1.0.1" uuid "^3.1.0" webpack-log@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== dependencies: ansi-colors "^3.0.0" uuid "^3.3.2" webpack-shell-plugin@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/webpack-shell-plugin/-/webpack-shell-plugin-0.5.0.tgz#29b8a1d80ddeae0ddb10e729667f728653c2c742" integrity sha1-Kbih2A3erg3bEOcpZn9yhlPCx0I= webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack@^4.30.0: version "4.41.2" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e" integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/wasm-edit" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" acorn "^6.2.1" ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" enhanced-resolve "^4.1.0" eslint-scope "^4.0.3" json-parse-better-errors "^1.0.2" loader-runner "^2.4.0" loader-utils "^1.2.3" memory-fs "^0.4.1" micromatch "^3.1.10" mkdirp "^0.5.1" neo-async "^2.6.1" node-libs-browser "^2.2.1" schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.1" watchpack "^1.6.0" webpack-sources "^1.4.1" websocket-driver@>=0.5.1: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.4" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" with@^7.0.0: version "7.0.2" resolved "https://registry.yarnpkg.com/with/-/with-7.0.2.tgz#ccee3ad542d25538a7a7a80aad212b9828495bac" integrity sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w== dependencies: "@babel/parser" "^7.9.6" "@babel/types" "^7.9.6" assert-never "^1.2.1" babel-walk "3.0.0-canary-5" word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= worker-farm@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== dependencies: ansi-styles "^3.2.0" string-width "^3.0.0" strip-ansi "^5.0.0" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== dependencies: async-limiter "~1.0.0" ws@~3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== dependencies: async-limiter "~1.0.0" safe-buffer "~5.1.0" ultron "~1.1.0" xmlhttprequest-ssl@~1.5.4: version "1.5.5" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e" integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4= xtend@^4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== "y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2, yallist@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs-parser@^13.1.0: version "13.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== dependencies: cliui "^4.0.0" decamelize "^1.2.0" find-up "^3.0.0" get-caller-file "^1.0.1" os-locale "^3.0.0" require-directory "^2.1.1" require-main-filename "^1.0.1" set-blocking "^2.0.0" string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" yargs@13.2.4: version "13.2.4" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== dependencies: cliui "^5.0.0" find-up "^3.0.0" get-caller-file "^2.0.1" os-locale "^3.1.0" require-directory "^2.1.1" require-main-filename "^2.0.0" set-blocking "^2.0.0" string-width "^3.0.0" which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^13.1.0" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk= buildbot-3.4.0/www/nestedexample/000077500000000000000000000000001413250514000167775ustar00rootroot00000000000000buildbot-3.4.0/www/nestedexample/README000066400000000000000000000054711413250514000176660ustar00rootroot00000000000000This plugin permits to create two linked UI inputs that can be integrated in the force dialog: - pizza: a text field where the name of the pizza (lower or uppercase) can be written, - ingredients: a select input where the ingredients to make the pizza described in the input above can be selected. This input is automatically populated via a custom webservice provided by the plugin. The force dialog is the UI element that is displayed when one clicks on the FORCE button associated to a ForceScheduler. More precisely the code is composed of two parts: - python: * buildbot_nestedexample/__init__.py: definition of NestedExample, child of buildbot.schedulers.forcesched.NestedParameter. The two UI elements are embedded in the fields attribute, and linked to the coffee/jade code by means of its type="nestedexample". * buildbot_nestedexample/api.py: define the "getIngredients" endpoint that returns the ingredients necessary to make one pizza. - coffee: * plugin boilerplate: guanlecoja/config.coffee gulpfile.js package.json setup.py * ui logic: src/module/nestedexamplefield.directive.coffee src/module/nestedexamplefield.tpl.jade Note that the name of the files match, as they must, the following naming convention: field.directive.coffee field.tpl.jade Regarding the coffee code, the only non standard angular code, is the one that permits: * ...directive.coffee: to extract the embedded elements from the scope, and * ...tpl.jade: to communicate to the two basefield-s where the embedded elements are. This permits to benefit from the error displaying features provided by basefield. Please have a look at the commentaries in the code for more details. To activate that plugin in one buildbot instance, one should: - add that UI element in the ForceScheduler like, ... from buildbot_nestedexample import NestedExample from buildbot.schedulers.forcesched import ForceScheduler ForceScheduler(codebases=[CodebaseParameter(codebase="", branch=FixedParameter(name="branch", default=""), revision=FixedParameter(name="revision", default=""), repository=FixedParameter(name="repository", default=""), project=FixedParameter(name="project", default=""))], reason=StringParameter(name="reason", default=""), properties=[NestedExample(required=True, default="", size=80)]) - and activate the plugin in the buildbot configuration, c['www'] = dict(... plugins=dict(nestedexample={})) buildbot-3.4.0/www/nestedexample/buildbot_nestedexample/000077500000000000000000000000001413250514000235215ustar00rootroot00000000000000buildbot-3.4.0/www/nestedexample/buildbot_nestedexample/__init__.py000066400000000000000000000055671413250514000256470ustar00rootroot00000000000000from twisted.internet import defer from buildbot.schedulers.forcesched import ChoiceStringParameter from buildbot.schedulers.forcesched import NestedParameter from buildbot.schedulers.forcesched import StringParameter from buildbot.schedulers.forcesched import ValidationError from buildbot.www.plugin import Application from .api import Api class NestedExample(NestedParameter): """UI zone""" type = "nestedexample" PIZZA = "pizza" INGREDIENTS = "ingredients" def __init__(self, **kw): pizzaInput = StringParameter(label="type the name of your pizza", name=self.PIZZA, required=True) ingredientsInput = ChoiceStringParameter(name=self.INGREDIENTS, label="ingredients necessary to make the pizza", multiple=True, strict=False, default="", choices=[]) self.params = {self.PIZZA: pizzaInput, self.INGREDIENTS: ingredientsInput} self.allIngredients = set(sum([ingr for ingr in Api.pizzaIngredients.values()], [])) fields = self.params.values() super(NestedExample, self).__init__(self.type, label='', fields=fields, **kw) def createNestedPropertyName(self, propertyName): return "{}_{}".format(self.type, propertyName) @defer.inlineCallbacks def validateProperties(self, collector, properties): # we implement the check between the input and # the ingredients if properties[self.INGREDIENTS] not in self.allIngredients or\ not properties[self.PIZZA]: # we trigger a specific error message in PIZZA only def f(): return defer.fail(ValidationError('Invalid pizza')) nestedProp = self.createNestedPropertyName(self.PIZZA) yield collector.collectValidationErrors(nestedProp, f) @defer.inlineCallbacks def updateFromKwargs(self, kwargs, properties, collector, **kw): yield super(NestedExample, self).updateFromKwargs(kwargs, properties, collector, **kw) # the properties we have are in the form # {nestedexample: {input: , # ingredients: }} # we just flatten the dict to have # - input, and # - ingredients # in properties for prop, val in properties.pop(self.type).items(): properties[prop] = val yield self.validateProperties(collector, properties) # create the interface for the setuptools entry point ep = Application(__name__, "Buildbot nested parameter example") api = Api(ep) ep.resource.putChild("api", api.app.resource()) buildbot-3.4.0/www/nestedexample/buildbot_nestedexample/api.py000066400000000000000000000014721413250514000246500ustar00rootroot00000000000000 import json from klein import Klein from twisted.internet import defer class Api: app = Klein() pizzaIngredients = {'margherita': ['tomato', 'ham', 'cheese'], 'regina': ['tomato', 'ham', 'cheese', 'mushrooms']} def __init__(self, ep): self.ep = ep @app.route("/getIngredients", methods=['GET']) def getIngredients(self, request): pizzaArgument = request.args.get('pizza') if pizzaArgument is None: return defer.succeed(json.dumps("invalid request")) pizza = pizzaArgument[0].lower() res = self.pizzaIngredients.get(pizza, ["only {} are supported " "for now".format(self.pizzaIngredients.keys())]) return defer.succeed(json.dumps(res)) buildbot-3.4.0/www/nestedexample/guanlecoja/000077500000000000000000000000001413250514000211075ustar00rootroot00000000000000buildbot-3.4.0/www/nestedexample/guanlecoja/config.js000066400000000000000000000037251413250514000227210ustar00rootroot00000000000000/* *///########################################################################################### // // This module contains all configuration for the build process // /* *///########################################################################################### const ANGULAR_TAG = "~1.5.3"; const config = { /* *///####################################################################################### // Name of the plugin /* *///####################################################################################### name: 'nestedexample', /* *///####################################################################################### // Directories /* *///####################################################################################### dir: { // The build folder is where the app resides once it's completely built build: 'buildbot_nestedexample/static' }, /* *///####################################################################################### // Bower dependencies configuration /* *///####################################################################################### bower: { testdeps: { jquery: { version: '2.1.1', files: 'dist/jquery.js' }, angular: { version: ANGULAR_TAG, files: 'angular.js' }, lodash: { version: "~2.4.1", files: 'dist/lodash.js' }, "angular-mocks": { version: ANGULAR_TAG, files: "angular-mocks.js" } } }, buildtasks: ['scripts', 'styles', 'fonts', 'imgs', 'index', 'tests', 'generatedfixtures', 'fixtures'], karma: { // we put tests first, so that we have angular, and fake app defined files: ["tests.js", "scripts.js", 'fixtures.js', "mode-python.js"] } }; module.exports = config; buildbot-3.4.0/www/nestedexample/gulpfile.js000066400000000000000000000000471413250514000211450ustar00rootroot00000000000000require("guanlecoja")(require("gulp")) buildbot-3.4.0/www/nestedexample/package.json000066400000000000000000000003131413250514000212620ustar00rootroot00000000000000{ "name": "buildbot-nestedexample", "engines": { "node": ">=0.10.0", "npm": ">=1.4.0" }, "dependencies": { "guanlecoja": "~1.1.0", "gulp": "3.9.0" } } buildbot-3.4.0/www/nestedexample/setup.cfg000066400000000000000000000000001413250514000206060ustar00rootroot00000000000000buildbot-3.4.0/www/nestedexample/setup.py000066400000000000000000000016321413250514000205130ustar00rootroot00000000000000#!/usr/bin/env python try: from buildbot_pkg import setup_www_plugin except ImportError: import sys print('Please install buildbot_pkg module in order to install that ' 'package, or use the pre-build .whl modules available on pypi', file=sys.stderr) sys.exit(1) setup_www_plugin( name='buildbot-nestedexample', description='"An example of a custom nested parameter"', author=u'Ion Alberdi', author_email=u'ialberdi@intel.com', url='http://buildbot.net/', version='0.0.1', packages=['buildbot_nestedexample'], install_requires=[ 'klein' ], package_data={ '': [ 'VERSION', 'static/*' ] }, entry_points=""" [buildbot.www] nestedexample = buildbot_nestedexample:ep """, classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)' ], ) buildbot-3.4.0/www/nestedexample/src/000077500000000000000000000000001413250514000175665ustar00rootroot00000000000000buildbot-3.4.0/www/nestedexample/src/module/000077500000000000000000000000001413250514000210535ustar00rootroot00000000000000buildbot-3.4.0/www/nestedexample/src/module/nestedexamplefield.directive.js000066400000000000000000000057071413250514000272410ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class Nestedexamplefield { constructor() { return { replace: false, restrict: 'E', scope: false, templateUrl: "nestedexample/views/nestedexamplefield.html", controller: '_nestedexamplefieldController' }; } } class _nestedexamplefield { constructor($scope, $http) { // boilerplate to extract our two embedded // UI elements // the name of the embedded UI elements // are prefixed by the type of the root // element, "nestedexample" in our case. // This method permits to compute that // prefixed name. const createNestedName = name => `nestedexample_${name}`; // utility method to find the embedded // field from the scope const findNestedElement = function(name) { const nameInNestedField = createNestedName(name); let res = undefined; $scope.field.fields.forEach(function(v, i) { if (v.fullName === nameInNestedField) { res = v; } }); return res; }; // we put our two embedded fields in the scope $scope.pizza = findNestedElement('pizza'); $scope.ingredients = findNestedElement('ingredients'); // function that will be called each time a change // event happens in the pizza input. const ingredientsUrl = pizza => `nestedexample/api/getIngredients?pizza=${pizza}`; const updateValues = function(pizza) { if (pizza === "") { $scope.ingredients.choices = []; $scope.ingredients.value = ""; return; } $http.get(ingredientsUrl(pizza)).then(function(r) { if (r.status === 200) { if (r.data.error != null) { $scope.ingredients.choices = [r.data.error]; } else { $scope.ingredients.choices = r.data; } } else { const error = `unexpected error got ${r.status}`; $scope.ingredients.choices = [error]; } if ($scope.ingredients.choices.length > 0) { $scope.ingredients.value = $scope.ingredients.choices[0]; } else { $scope.ingredients.value = ""; } }); }; $scope.getIngredients = () => updateValues($scope.pizza.value); } } angular.module('nestedexample', ['common']) .directive('nestedexamplefield', [Nestedexamplefield]) .controller('_nestedexamplefieldController', ['$scope', '$http', _nestedexamplefield]); buildbot-3.4.0/www/nestedexample/src/module/nestedexamplefield.tpl.jade000066400000000000000000000021071413250514000263400ustar00rootroot00000000000000div //- basefield is an angular-js directive provided by buildbot-nine. //- (ng-init="field=pizza") (ng-init="field=ingredients") //- are necessary to benefit //- from the error reporting mechanism provided by basefield //- in our embedded UI elements. They just say that the field //- element to process is not the root element (here nestedexample) but //- each of the embedded elements. basefield(ng-init="field=pizza") label.control-label.col-sm-2(for="{{pizza.name}}") | {{pizza.label}} .col-sm-10 input.form-control(type='text', name="{{pizza.name}}", ng-model="pizza.value", ng-change="getIngredients()") basefield(ng-init="field=ingredients") label.control-label.col-sm-2(for="{{ingredients.name}}") | {{ingredients.label}} .col-sm-10 select.form-control(name="{{ingredients.name}}", ng-model="ingredients.value", ng-options="v for v in ingredients.choices") buildbot-3.4.0/www/nestedexample/src/styles/000077500000000000000000000000001413250514000211115ustar00rootroot00000000000000buildbot-3.4.0/www/nestedexample/src/styles/styles.less000066400000000000000000000000001413250514000233120ustar00rootroot00000000000000buildbot-3.4.0/www/nestedexample/yarn.lock000066400000000000000000012152541413250514000206340ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.7.4.tgz#37e864532200cb6b50ee9a4045f5f817840166ab" integrity sha512-+bYbx56j4nYBmpsWtnPUsKW3NdnYxbqyfrP2w9wILBuHzdfIKz9prieZK0DFPyIzkjYVUe4QkusGL07r5pXznQ== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helpers" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" convert-source-map "^1.7.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369" integrity sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg== dependencies: "@babel/types" "^7.7.4" jsesc "^2.5.1" lodash "^4.17.13" source-map "^0.5.0" "@babel/helper-annotate-as-pure@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz#bb3faf1e74b74bd547e867e48f551fa6b098b6ce" integrity sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og== dependencies: "@babel/types" "^7.7.4" "@babel/helper-builder-binary-assignment-operator-visitor@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz#5f73f2b28580e224b5b9bd03146a4015d6217f5f" integrity sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ== dependencies: "@babel/helper-explode-assignable-expression" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-call-delegate@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz#621b83e596722b50c0066f9dc37d3232e461b801" integrity sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-create-regexp-features-plugin@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz#6d5762359fd34f4da1500e4cff9955b5299aaf59" integrity sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A== dependencies: "@babel/helper-regex" "^7.4.4" regexpu-core "^4.6.0" "@babel/helper-define-map@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176" integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-explode-assignable-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz#fa700878e008d85dc51ba43e9fb835cddfe05c84" integrity sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg== dependencies: "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e" integrity sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ== dependencies: "@babel/helper-get-function-arity" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-get-function-arity@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0" integrity sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA== dependencies: "@babel/types" "^7.7.4" "@babel/helper-hoist-variables@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz#612384e3d823fdfaaf9fce31550fe5d4db0f3d12" integrity sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-member-expression-to-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz#356438e2569df7321a8326644d4b790d2122cb74" integrity sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-imports@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz#e5a92529f8888bf319a6376abfbd1cebc491ad91" integrity sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-transforms@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.7.4.tgz#8d7cdb1e1f8ea3d8c38b067345924ac4f8e0879a" integrity sha512-ehGBu4mXrhs0FxAqN8tWkzF8GSIGAiEumu4ONZ/hD9M88uHcD+Yu2ttKfOCgwzoesJOJrtQh7trI5YPbRtMmnA== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-simple-access" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-optimise-call-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz#034af31370d2995242aa4df402c3b7794b2dcdf2" integrity sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg== dependencies: "@babel/types" "^7.7.4" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz#c68c2407350d9af0e061ed6726afb4fff16d0234" integrity sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-wrap-function" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-replace-supers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz#3c881a6a6a7571275a72d82e6107126ec9e2cdd2" integrity sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg== dependencies: "@babel/helper-member-expression-to-functions" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-simple-access@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz#a169a0adb1b5f418cfc19f22586b2ebf58a9a294" integrity sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A== dependencies: "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-split-export-declaration@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8" integrity sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug== dependencies: "@babel/types" "^7.7.4" "@babel/helper-wrap-function@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace" integrity sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helpers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.7.4.tgz#62c215b9e6c712dadc15a9a0dcab76c92a940302" integrity sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg== dependencies: "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/highlight@^7.0.0": version "7.5.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb" integrity sha512-jIwvLO0zCL+O/LmEJQjWA75MQTWwx3c3u2JOTDK5D3/9egrWRRA0/0hk9XXywYnXZVVpzrBYeIQTmhwUaePI9g== "@babel/plugin-proposal-async-generator-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d" integrity sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-proposal-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz#dde64a7f127691758cbfed6cf70de0fa5879d52d" integrity sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz#7700a6bfda771d8dc81973249eac416c6b4c697d" integrity sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.4.tgz#cc57849894a5c774214178c8ab64f6334ec8af71" integrity sha512-rnpnZR3/iWKmiQyJ3LKJpSwLDcX/nSXhdLk4Aq/tXOApIvyu7qoabrige0ylsAJffaUC51WiBu209Q0U+86OWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz#ec21e8aeb09ec6711bc0a39ca49520abee1de379" integrity sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.4.tgz#7c239ccaf09470dbe1d453d50057460e84517ebb" integrity sha512-cHgqHgYvffluZk85dJ02vloErm3Y6xtH+2noOBOJ2kXOJH3aVCDnj5eR/lVNlTnYu4hndAPJD3rTFjW3qee0PA== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-async-generators@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz#331aaf310a10c80c44a66b238b6e49132bd3c889" integrity sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz#29ca3b4415abfe4a5ec381e903862ad1a54c3aec" integrity sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz#86e63f7d2e22f9e27129ac4e83ea989a382e86cc" integrity sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz#47cf220d19d6d0d7b154304701f468fc1cc6ff46" integrity sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz#a3e38f59f4b6233867b4a92dcb0ee05b2c334aa6" integrity sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-top-level-await@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz#bd7d8fa7b9fee793a36e4027fd6dd1aa32f946da" integrity sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz#76309bd578addd8aee3b379d809c802305a98a12" integrity sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz#694cbeae6d613a34ef0292713fa42fb45c4470ba" integrity sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz#d0d9d5c269c78eaea76227ace214b8d01e4d837b" integrity sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz#200aad0dcd6bb80372f94d9e628ea062c58bf224" integrity sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.13" "@babel/plugin-transform-classes@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz#c92c14be0a1399e15df72667067a8f510c9400ec" integrity sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-define-map" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz#e856c1628d3238ffe12d668eb42559f79a81910d" integrity sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz#2b713729e5054a1135097b6a67da1b6fe8789267" integrity sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.4.tgz#f7ccda61118c5b7a2599a72d5e3210884a021e96" integrity sha512-mk0cH1zyMa/XHeb6LOTXTbG7uIJ8Rrjlzu91pUx/KS3JpcgaTDwMS8kM+ar8SLOvlL2Lofi4CGBAjCo3a2x+lw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-duplicate-keys@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz#3d21731a42e3f598a73835299dd0169c3b90ac91" integrity sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz#dd30c0191e3a1ba19bcc7e389bdfddc0729d5db9" integrity sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz#248800e3a5e507b1f103d8b4ca998e77c63932bc" integrity sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz#75a6d3303d50db638ff8b5385d12451c865025b1" integrity sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz#27fe87d2b5017a2a5a34d1c41a6b9f6a6262643e" integrity sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz#aee127f2f3339fc34ce5e3055d7ffbf7aa26f19a" integrity sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.4.tgz#276b3845ca2b228f2995e453adc2e6f54d72fb71" integrity sha512-/542/5LNA18YDtg1F+QHvvUSlxdvjZoD/aldQwkq+E3WCkbEjNSN9zdrOXaSlfg3IfGi22ijzecklF/A7kVZFQ== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-commonjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.4.tgz#bee4386e550446343dd52a571eda47851ff857a3" integrity sha512-k8iVS7Jhc367IcNF53KCwIXtKAH7czev866ThsTgy8CwlXjnKZna2VHwChglzLleYrcHz1eQEIJlGRQxB53nqA== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.7.4" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-systemjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz#cd98152339d3e763dfe838b7d4273edaf520bb30" integrity sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-umd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz#1027c355a118de0aae9fee00ad7813c584d9061f" integrity sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz#fb3bcc4ee4198e7385805007373d6b6f42c98220" integrity sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/plugin-transform-new-target@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz#4a0753d2d60639437be07b592a9e58ee00720167" integrity sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz#48488937a2d586c0148451bf51af9d7dda567262" integrity sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/plugin-transform-parameters@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.4.tgz#da4555c97f39b51ac089d31c7380f03bca4075ce" integrity sha512-VJwhVePWPa0DqE9vcfptaJSzNDKrWU/4FbYCjZERtmqEs05g3UMXnYMZoXja7JAJ7Y7sPZipwm/pGApZt7wHlw== dependencies: "@babel/helper-call-delegate" "^7.7.4" "@babel/helper-get-function-arity" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz#2388d6505ef89b266103f450f9167e6bd73f98c2" integrity sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.4.tgz#d18eac0312a70152d7d914cbed2dc3999601cfc0" integrity sha512-e7MWl5UJvmPEwFJTwkBlPmqixCtr9yAASBqff4ggXTNicZiwbF8Eefzm6NVgfiBp7JdAGItecnctKTgH44q2Jw== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz#6a7cf123ad175bb5c69aec8f6f0770387ed3f1eb" integrity sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.4.tgz#51fe458c1c1fa98a8b07934f4ed38b6cd62177a6" integrity sha512-O8kSkS5fP74Ad/8pfsCMGa8sBRdLxYoSReaARRNSz3FbFQj3z/QUvoUmJ28gn9BO93YfnXc3j+Xyaqe8cKDNBQ== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz#74a0a9b2f6d67a684c6fbfd5f0458eb7ba99891e" integrity sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz#aa673b356fe6b7e70d69b6e33a17fef641008578" integrity sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz#ffb68c05090c30732076b1285dc1401b404a123c" integrity sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz#1eb6411736dd3fe87dbd20cc6668e5121c17d604" integrity sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz#3174626214f2d6de322882e498a38e8371b2140e" integrity sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz#a3c0f65b117c4c81c5b6484f2a5e7b95346b83ae" integrity sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/preset-env@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.7.4.tgz#ccaf309ae8d1ee2409c85a4e2b5e280ceee830f8" integrity sha512-Dg+ciGJjwvC1NIe/DGblMbcGq1HOtKbw8RLl4nIjlfcILKEOkWT/vRqPpumswABEBVudii6dnVwrBtzD7ibm4g== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.7.4" "@babel/plugin-proposal-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-syntax-top-level-await" "^7.7.4" "@babel/plugin-transform-arrow-functions" "^7.7.4" "@babel/plugin-transform-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions" "^7.7.4" "@babel/plugin-transform-block-scoping" "^7.7.4" "@babel/plugin-transform-classes" "^7.7.4" "@babel/plugin-transform-computed-properties" "^7.7.4" "@babel/plugin-transform-destructuring" "^7.7.4" "@babel/plugin-transform-dotall-regex" "^7.7.4" "@babel/plugin-transform-duplicate-keys" "^7.7.4" "@babel/plugin-transform-exponentiation-operator" "^7.7.4" "@babel/plugin-transform-for-of" "^7.7.4" "@babel/plugin-transform-function-name" "^7.7.4" "@babel/plugin-transform-literals" "^7.7.4" "@babel/plugin-transform-member-expression-literals" "^7.7.4" "@babel/plugin-transform-modules-amd" "^7.7.4" "@babel/plugin-transform-modules-commonjs" "^7.7.4" "@babel/plugin-transform-modules-systemjs" "^7.7.4" "@babel/plugin-transform-modules-umd" "^7.7.4" "@babel/plugin-transform-named-capturing-groups-regex" "^7.7.4" "@babel/plugin-transform-new-target" "^7.7.4" "@babel/plugin-transform-object-super" "^7.7.4" "@babel/plugin-transform-parameters" "^7.7.4" "@babel/plugin-transform-property-literals" "^7.7.4" "@babel/plugin-transform-regenerator" "^7.7.4" "@babel/plugin-transform-reserved-words" "^7.7.4" "@babel/plugin-transform-shorthand-properties" "^7.7.4" "@babel/plugin-transform-spread" "^7.7.4" "@babel/plugin-transform-sticky-regex" "^7.7.4" "@babel/plugin-transform-template-literals" "^7.7.4" "@babel/plugin-transform-typeof-symbol" "^7.7.4" "@babel/plugin-transform-unicode-regex" "^7.7.4" "@babel/types" "^7.7.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.4.tgz#b23a856751e4bf099262f867767889c0e3fe175b" integrity sha512-r24eVUUr0QqNZa+qrImUk8fn5SPhHq+IfYvIoIMg0do3GdK9sMdiLKP3GYVVaxpPKORgm8KRKaNTEhAjgIpLMw== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b" integrity sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" "@babel/traverse@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.7.4.tgz#9c1e7c60fb679fe4fcfaa42500833333c2058558" integrity sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.13" "@babel/types@^7.2.0", "@babel/types@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193" integrity sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA== dependencies: esutils "^2.0.2" lodash "^4.17.13" to-fast-properties "^2.0.0" "@types/babel-types@*", "@types/babel-types@^7.0.0": version "7.0.9" resolved "https://registry.yarnpkg.com/@types/babel-types/-/babel-types-7.0.9.tgz#01d7b86949f455402a94c788883fe4ba574cad41" integrity sha512-qZLoYeXSTgQuK1h7QQS16hqLGdmqtRmN8w/rl3Au/l5x/zkHx+a4VHrHyBsi1I1vtK2oBHxSzKIu0R5p6spdOA== "@types/babylon@^6.16.2": version "6.16.5" resolved "https://registry.yarnpkg.com/@types/babylon/-/babylon-6.16.5.tgz#1c5641db69eb8cdf378edd25b4be7754beeb48b4" integrity sha512-xH2e58elpj1X4ynnKp9qSnWlsRTIs6n3tgLGNfwAGHwePw0mulHQllV34n0T25uYSu1k0hRKkWXF890B1yS47w== dependencies: "@types/babel-types" "*" "@types/color-name@^1.1.1": version "1.1.1" resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0" integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ== Base64@~0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/Base64/-/Base64-0.2.1.tgz#ba3a4230708e186705065e66babdd4c35cf60028" integrity sha1-ujpCMHCOGGcFBl5mur3Uw1z2ACg= JSONStream@~0.6.4: version "0.6.4" resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-0.6.4.tgz#4b2c8063f8f512787b2375f7ee9db69208fa2dcb" integrity sha1-SyyAY/j1Enh7I3X37p22kgj6Lcs= dependencies: jsonparse "0.0.5" through "~2.2.7" JSONStream@~0.7.1: version "0.7.4" resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-0.7.4.tgz#734290e41511eea7c2cfe151fbf9a563a97b9786" integrity sha1-c0KQ5BUR7qfCz+FR+/mlY6l7l4Y= dependencies: jsonparse "0.0.5" through ">=2.2.7 <3" abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x, abbrev@~1.0.4: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.3.tgz#c3ca7434938648c3e0d9c1e328dd68b622c284ca" integrity sha1-w8p0NJOGSMPg2cHjKN1otiLChMo= dependencies: mime-types "~2.1.11" negotiator "0.6.1" acorn-globals@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-3.1.0.tgz#fd8270f71fbb4996b004fa880ee5d46573a731bf" integrity sha1-/YJw9x+7SZawBPqIDuXUZXOnMb8= dependencies: acorn "^4.0.4" acorn@^2.7.0: version "2.7.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-2.7.0.tgz#ab6e7d9d886aaca8b085bc3312b79a198433f0e7" integrity sha1-q259nYhqrKiwhbwzEreaGYQz8Oc= acorn@^3.1.0: version "3.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" integrity sha1-ReN/s56No/JbruP/U2niu18iAXo= acorn@^4.0.3, acorn@^4.0.4, acorn@~4.0.2: version "4.0.13" resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787" integrity sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c= acorn@~2.6.4: version "2.6.4" resolved "https://registry.yarnpkg.com/acorn/-/acorn-2.6.4.tgz#eb1f45b4a43fa31d03701a5ec46f3b52673e90ee" integrity sha1-6x9FtKQ/ox0DcBpexG87Umc+kO4= after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv@^6.12.3: version "6.12.4" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.4.tgz#0614facc4522127fa713445c6bfd3ebd376e2234" integrity sha512-eienB2c9qVQs2KWexhkrdMLVDoIQCz5KSeLxwg9Lzk4DOfBtIK9PQwwufcsn1jjGuf9WZmqPMbGxOzfcuphJCQ== dependencies: fast-deep-equal "^3.1.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" align-text@^0.1.1, align-text@^0.1.3: version "0.1.4" resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" integrity sha1-DNkKVhCT810KmSVsIrcGlDP60Rc= dependencies: kind-of "^3.0.2" longest "^1.0.1" repeat-string "^1.5.2" alter@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/alter/-/alter-0.2.0.tgz#c7588808617572034aae62480af26b1d4d1cb3cd" integrity sha1-x1iICGF1cgNKrmJICvJrHU0cs80= dependencies: stable "~0.1.3" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= ansi-colors@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-1.1.0.tgz#6374b4dd5d4718ff3ce27a671a3b1cad077132a9" integrity sha512-SFKX67auSNoVR38N3L+nvsPjOE0bybKTYbkf5tRvushrAPQ9V75huw0ZxBkKVeRU9kqH3d6HA4xTckbwZ4ixmA== dependencies: ansi-wrap "^0.1.0" ansi-gray@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-gray/-/ansi-gray-0.1.1.tgz#2962cf54ec9792c48510a3deb524436861ef7251" integrity sha1-KWLPVOyXksSFEKPetSRDaGHvclE= dependencies: ansi-wrap "0.1.0" ansi-regex@^0.2.0, ansi-regex@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-0.2.1.tgz#0d8e946967a3d8143f93e24e298525fc1b2235f9" integrity sha1-DY6UaWej2BQ/k+JOKYUl/BsiNfk= ansi-regex@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-1.1.1.tgz#41c847194646375e6a1a5d10c3ca054ef9fc980d" integrity sha1-QchHGUZGN15qGl0Qw8oFTvn8mA0= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.1.0.tgz#eaecbf66cd706882760b2f4691582b8f55d7a7de" integrity sha1-6uy/Zs1waIJ2Cy9GkVgrj1XXp94= ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.0, ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" ansi-styles@^4.1.0: version "4.2.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.2.0.tgz#5681f0dcf7ae5880a7841d8831c4724ed9cc0172" integrity sha512-7kFQgnEaMdRtwf6uSfUnVr9gSGC7faurn+J/Mv90/W+iTtN0405/nLdopfMWwchyxhbGYl6TC4Sccn9TUkGAgg== dependencies: "@types/color-name" "^1.1.1" color-convert "^2.0.1" ansi-styles@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.0.0.tgz#cb102df1c56f5123eab8b67cd7b98027a0279178" integrity sha1-yxAt8cVvUSPquLZ817mAJ6AnkXg= ansi-wrap@0.1.0, ansi-wrap@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/ansi-wrap/-/ansi-wrap-0.1.0.tgz#a82250ddb0015e9a27ca82e82ea603bbfa45efaf" integrity sha1-qCJQ3bABXponyoLoLqYDu/pF768= ansicolors@~0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.2.1.tgz#be089599097b74a5c9c4a84a0cdbcdb62bd87aef" integrity sha1-vgiVmQl7dKXJxKhKDNvNtivYeu8= anymatch@^1.3.0: version "1.3.2" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.2.tgz#553dcb8f91e3c889845dfdba34c77721b90b9d7a" integrity sha512-0XNayC8lTHQ2OI8aljNCN3sSx6hsr/1+rlcDAotXJR7C1oZZHCNsfpbKwMjRA3Uqb5tF1Rae2oloTr4xpq+WjA== dependencies: micromatch "^2.1.5" normalize-path "^2.0.0" aproba@^1.0.3: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== archy@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" integrity sha1-+cjBN1fMHde8N5rHeyxipcKGjEA= archy@~0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/archy/-/archy-0.0.2.tgz#910f43bf66141fc335564597abc189df44b3d35e" integrity sha1-kQ9Dv2YUH8M1VkWXq8GJ30Sz014= are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" "argparse@~ 0.1.11": version "0.1.16" resolved "https://registry.yarnpkg.com/argparse/-/argparse-0.1.16.tgz#cfd01e0fbba3d6caed049fbd758d40f65196f57c" integrity sha1-z9AeD7uj1srtBJ+9dY1A9lGW9Xw= dependencies: underscore "~1.7.0" underscore.string "~2.4.0" arr-diff@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf" integrity sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8= dependencies: arr-flatten "^1.0.1" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.0.1, arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-differ@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-1.0.0.tgz#eff52e3758249d33be402b8bb8e564bb2b5d4031" integrity sha1-7/UuN1gknTO+QCuLuOVkuytdQDE= array-each@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/array-each/-/array-each-1.0.1.tgz#a794af0c05ab1752846ee753a1f211a05ba0c44f" integrity sha1-p5SvDAWrF1KEbudTofIRoFugxE8= array-filter@~0.0.0: version "0.0.1" resolved "https://registry.yarnpkg.com/array-filter/-/array-filter-0.0.1.tgz#7da8cf2e26628ed732803581fd21f67cacd2eeec" integrity sha1-fajPLiZijtcygDWB/SH2fKzS7uw= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-map@~0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/array-map/-/array-map-0.0.0.tgz#88a2bab73d1cf7bcd5c1b118a003f66f665fa662" integrity sha1-iKK6tz0c97zVwbEYoAP2b2ZfpmI= array-reduce@~0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/array-reduce/-/array-reduce-0.0.0.tgz#173899d3ffd1c7d9383e4479525dbe278cab5f2b" integrity sha1-FziZ0//Rx9k4PkR5Ul2+J4yrXys= array-slice@^0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-0.2.3.tgz#dd3cfb80ed7973a75117cdac69b0b99ec86186f5" integrity sha1-3Tz7gO15c6dRF82sabC5nshhhvU= array-slice@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-1.1.0.tgz#e368ea15f89bc7069f7ffb89aec3a6c7d4ac22d4" integrity sha512-B1qMD3RBP7O8o0H2KbrXDyB0IccejMF15+87Lvlor12ONPRHP6gTjXMNkt/d3ZuOGbAe66hFmaCfECI24Ufp6w== array-uniq@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" integrity sha1-odl8yvy8JiXMcPrc6zalDFiwGlM= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.6.tgz#f33b2159f0532a3f3107a272c0ccfbd1ad2979ca" integrity sha1-8zshWfBTKj8xB6JywMz70a0peco= asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1@0.1.11: version "0.1.11" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.1.11.tgz#559be18376d08a4ec4dbe80877d27818639b2df7" integrity sha1-VZvhg3bQik7E2+gId9J4GGObLfc= asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert-plus@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.1.5.tgz#ee74009413002d84cec7219c6ac811812e723160" integrity sha1-7nQAlBMALYTOxyGcasgRgS5yMWA= assert@~1.1.0: version "1.1.2" resolved "https://registry.yarnpkg.com/assert/-/assert-1.1.2.tgz#adaa04c46bb58c6dd1f294da3eb26e6228eb6e44" integrity sha1-raoExGu1jG3R8pTaPrJuYijrbkQ= dependencies: util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= astw@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/astw/-/astw-2.2.0.tgz#7bd41784d32493987aeb239b6b4e1c57a873b917" integrity sha1-e9QXhNMkk5h66yOba04cV6hzuRc= dependencies: acorn "^4.0.3" async-each@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-foreach@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/async-foreach/-/async-foreach-0.1.3.tgz#36121f845c0578172de419a97dbeb1d16ec34542" integrity sha1-NhIfhFwFeBct5Bmpfb6x0W7DRUI= async@1.x: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@~0.2.6, async@~0.2.8, async@~0.2.9: version "0.2.10" resolved "https://registry.yarnpkg.com/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1" integrity sha1-trvgsGdLnXGXCMo43owjfLUmw9E= async@~0.8.0: version "0.8.0" resolved "https://registry.yarnpkg.com/async/-/async-0.8.0.tgz#ee65ec77298c2ff1456bc4418a052d0f06435112" integrity sha1-7mXsdymML/FFa8RBigUtDwZDURI= async@~0.9.0: version "0.9.2" resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" integrity sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0= asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== aws-sign2@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.5.0.tgz#c57103f7a17fc037f02d7c2e64b602ea223f7d63" integrity sha1-xXED96F/wDfwLXwuZLYC6iI/fWM= aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws-sign@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/aws-sign/-/aws-sign-0.3.0.tgz#3d81ca69b474b1e16518728b51c24ff0bbedc6e9" integrity sha1-PYHKabR0seFlGHKLUcJP8Lvtxuk= aws4@^1.8.0: version "1.10.1" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.10.1.tgz#e1e82e4f3e999e2cfd61b161280d16a111f86428" integrity sha512-zg7Hz2k5lI8kb7U32998pRRFin7zJlkfezGJjUc2heaD4Pw2wObakCDVzkKztTm/Ln7eiVvYsjqak0Ed4LkMDA== babel-plugin-angularjs-annotate@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/babel-plugin-angularjs-annotate/-/babel-plugin-angularjs-annotate-0.10.0.tgz#4213b3aaae494a087aad0b8237c5d0716d22ca76" integrity sha512-NPE7FOAxcLPCUR/kNkrhHIjoScR3RyIlRH3yRn79j8EZWtpILVnCOdA9yKfsOmRh6BHnLHKl8ZAThc+YDd/QwQ== dependencies: "@babel/code-frame" "^7.0.0" "@babel/types" "^7.2.0" simple-is "~0.2.0" babel-plugin-dynamic-import-node@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: object.assign "^4.1.0" babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@~0.0.4: version "0.0.8" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-0.0.8.tgz#1101e9544f4a76b1bc3b26d452ca96d7a35e7978" integrity sha1-EQHpVE9KdrG8OybUUsqW16NeeXg= base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@^0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/batch/-/batch-0.5.3.tgz#3f3414f380321743bfc1042f9a83ff1d5824d464" integrity sha1-PzQU84AyF0O/wQQvmoP/HVgk1GQ= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" beeper@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/beeper/-/beeper-1.1.1.tgz#e6d5ea8c5dad001304a70b22638447f69cb2f809" integrity sha1-5tXqjF2tABMEpwsiY4RH9pyy+Ak= better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/binary/-/binary-0.3.0.tgz#9f60553bc5ce8c3386f3b553cff47462adecaa79" integrity sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk= dependencies: buffers "~0.1.1" chainsaw "~0.1.0" binaryextensions@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/binaryextensions/-/binaryextensions-1.0.1.tgz#1e637488b35b58bda5f4774bf96a5212a8c90755" integrity sha1-HmN0iLNbWL2l9HdL+WpSEqjJB1U= bl@~0.9.0: version "0.9.5" resolved "https://registry.yarnpkg.com/bl/-/bl-0.9.5.tgz#c06b797af085ea00bc527afc8efcf11de2232054" integrity sha1-wGt5evCF6gC8Unr8jvzxHeIjIFQ= dependencies: readable-stream "~1.0.26" blob@0.0.4: version "0.0.4" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.4.tgz#bcf13052ca54463f30f9fc7e95b9a47630a94921" integrity sha1-vPEwUspURj8w+fx+lbmkdjCpSSE= block-stream@*: version "0.0.9" resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a" integrity sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo= dependencies: inherits "~2.0.0" bluebird@^2.9.27: version "2.11.0" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-2.11.0.tgz#534b9033c022c9579c56ba3b3e5a5caafbb650e1" integrity sha1-U0uQM8AiyVecVro7Plpcqvu2UOE= body-parser@^1.12.4: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" body-parser@~1.14.0: version "1.14.2" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.14.2.tgz#1015cb1fe2c443858259581db53332f8d0cf50f9" integrity sha1-EBXLH+LEQ4WCWVgdtTMy+NDPUPk= dependencies: bytes "2.2.0" content-type "~1.0.1" debug "~2.2.0" depd "~1.1.0" http-errors "~1.3.1" iconv-lite "0.4.13" on-finished "~2.3.0" qs "5.2.0" raw-body "~2.1.5" type-is "~1.6.10" boom@0.4.x: version "0.4.2" resolved "https://registry.yarnpkg.com/boom/-/boom-0.4.2.tgz#7a636e9ded4efcefb19cef4947a3c67dfaee911b" integrity sha1-emNune1O/O+xnO9JR6PGffrukRs= dependencies: hoek "0.9.x" bower-config@~0.5.0, bower-config@~0.5.2: version "0.5.3" resolved "https://registry.yarnpkg.com/bower-config/-/bower-config-0.5.3.tgz#98fc5b41a87870ef9cbb9297635cf81f5505fdb1" integrity sha1-mPxbQah4cO+cu5KXY1z4H1UF/bE= dependencies: graceful-fs "~2.0.0" mout "~0.9.0" optimist "~0.6.0" osenv "0.0.3" bower-endpoint-parser@~0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/bower-endpoint-parser/-/bower-endpoint-parser-0.2.2.tgz#00b565adbfab6f2d35addde977e97962acbcb3f6" integrity sha1-ALVlrb+rby01rd3pd+l5Yqy8s/Y= bower-json@~0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/bower-json/-/bower-json-0.4.0.tgz#a99c3ccf416ef0590ed0ded252c760f1c6d93766" integrity sha1-qZw8z0Fu8FkO0N7SUsdg8cbZN2Y= dependencies: deep-extend "~0.2.5" graceful-fs "~2.0.0" intersect "~0.0.3" bower-logger@^0.2.2, bower-logger@~0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/bower-logger/-/bower-logger-0.2.2.tgz#39be07e979b2fc8e03a94634205ed9422373d381" integrity sha1-Ob4H6Xmy/I4DqUY0IF7ZQiNz04E= bower-registry-client@~0.2.0: version "0.2.4" resolved "https://registry.yarnpkg.com/bower-registry-client/-/bower-registry-client-0.2.4.tgz#269fc7e898b627fb939d1144a593254d7fbbeebc" integrity sha1-Jp/H6Ji2J/uTnRFEpZMlTX+77rw= dependencies: async "~0.2.8" bower-config "~0.5.0" graceful-fs "~2.0.0" lru-cache "~2.3.0" mkdirp "~0.3.5" request "~2.51.0" request-replay "~0.2.0" rimraf "~2.2.0" bower@1.3.8: version "1.3.8" resolved "https://registry.yarnpkg.com/bower/-/bower-1.3.8.tgz#afa3338a8a88a6e084c38112ea4a15998cbee3e6" integrity sha1-r6MzioqIpuCEw4ES6koVmYy+4+Y= dependencies: abbrev "~1.0.4" archy "~0.0.2" bower-config "~0.5.2" bower-endpoint-parser "~0.2.2" bower-json "~0.4.0" bower-logger "~0.2.2" bower-registry-client "~0.2.0" cardinal "~0.4.0" chalk "~0.4.0" chmodr "~0.1.0" decompress-zip "~0.0.6" fstream "~0.1.22" fstream-ignore "~0.0.6" glob "~4.0.2" graceful-fs "~3.0.1" handlebars "~1.3.0" inquirer "~0.5.1" insight "~0.3.0" is-root "~0.1.0" junk "~0.3.0" lockfile "~0.4.2" lru-cache "~2.5.0" mkdirp "~0.5.0" mout "~0.9.1" nopt "~3.0.0" opn "~0.1.1" osenv "~0.1.0" p-throttler "~0.0.1" promptly "~0.2.0" q "~1.0.1" request "~2.36.0" request-progress "~0.3.0" retry "~0.6.0" rimraf "~2.2.0" semver "~2.3.0" shell-quote "~1.4.1" stringify-object "~0.2.0" tar "~0.1.17" tmp "0.0.23" update-notifier "~0.2.0" which "~1.0.5" brace-expansion@^1.0.0, brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^0.1.2: version "0.1.5" resolved "https://registry.yarnpkg.com/braces/-/braces-0.1.5.tgz#c085711085291d8b75fdd74eab0f8597280711e6" integrity sha1-wIVxEIUpHYt1/ddOqw+FlygHEeY= dependencies: expand-range "^0.1.0" braces@^1.8.2: version "1.8.5" resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7" integrity sha1-uneWLhLf+WnWt2cR6RS3N4V79qc= dependencies: expand-range "^1.8.1" preserve "^0.2.0" repeat-element "^1.1.2" braces@^2.3.1: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" browser-pack@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/browser-pack/-/browser-pack-2.0.1.tgz#5d1c527f56c582677411c4db2a128648ff6bf150" integrity sha1-XRxSf1bFgmd0EcTbKhKGSP9r8VA= dependencies: JSONStream "~0.6.4" combine-source-map "~0.3.0" through "~2.3.4" browser-resolve@~1.2.1, browser-resolve@~1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-1.2.4.tgz#59ae7820a82955ecd32f5fb7c468ac21c4723806" integrity sha1-Wa54IKgpVezTL1+3xGisIcRyOAY= dependencies: resolve "0.6.3" browserify-shim@~2.0.10: version "2.0.10" resolved "https://registry.yarnpkg.com/browserify-shim/-/browserify-shim-2.0.10.tgz#74a0ed5b9b784a5a287906513a896d31f54a84b8" integrity sha1-dKDtW5t4SlooeQZROoltMfVKhLg= dependencies: through "~2.3.4" browserify-zlib@~0.1.2: version "0.1.4" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.1.4.tgz#bb35f8a519f600e0fa6b8485241c979d0141fb2d" integrity sha1-uzX4pRn2AOD6a4SFJByXnQFB+y0= dependencies: pako "~0.2.0" browserify@3.x: version "3.46.1" resolved "https://registry.yarnpkg.com/browserify/-/browserify-3.46.1.tgz#2c2e4a7f2f408178e78c223b5b57b37c2185ad8e" integrity sha1-LC5Kfy9AgXjnjCI7W1ezfCGFrY4= dependencies: JSONStream "~0.7.1" assert "~1.1.0" browser-pack "~2.0.0" browser-resolve "~1.2.1" browserify-zlib "~0.1.2" buffer "~2.1.4" builtins "~0.0.3" commondir "0.0.1" concat-stream "~1.4.1" console-browserify "~1.0.1" constants-browserify "~0.0.1" crypto-browserify "~1.0.9" deep-equal "~0.1.0" defined "~0.0.0" deps-sort "~0.1.1" derequire "~0.8.0" domain-browser "~1.1.0" duplexer "~0.1.1" events "~1.0.0" glob "~3.2.8" http-browserify "~1.3.1" https-browserify "~0.0.0" inherits "~2.0.1" insert-module-globals "~6.0.0" module-deps "~2.0.0" os-browserify "~0.1.1" parents "~0.0.1" path-browserify "~0.0.0" process "^0.7.0" punycode "~1.2.3" querystring-es3 "0.2.0" resolve "~0.6.1" shallow-copy "0.0.1" shell-quote "~0.0.1" stream-browserify "~0.1.0" stream-combiner "~0.0.2" string_decoder "~0.0.0" subarg "0.0.1" syntax-error "~1.1.0" through2 "~0.4.1" timers-browserify "~1.0.1" tty-browserify "~0.0.0" umd "~2.0.0" url "~0.10.1" util "~0.10.1" vm-browserify "~0.0.1" xtend "^3.0.0" browserslist@^4.6.0, browserslist@^4.7.3: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== dependencies: caniuse-lite "^1.0.30001219" colorette "^1.2.2" electron-to-chromium "^1.3.723" escalade "^3.1.1" node-releases "^1.1.71" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer@~2.1.4: version "2.1.13" resolved "https://registry.yarnpkg.com/buffer/-/buffer-2.1.13.tgz#c88838ebf79f30b8b4a707788470bea8a62c2355" integrity sha1-yIg46/efMLi0pwd4hHC+qKYsI1U= dependencies: base64-js "~0.0.4" ieee754 "~1.1.1" buffers@~0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb" integrity sha1-skV5w77U1tOWru5tmorn9Ugqt7s= bufferstreams@~0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/bufferstreams/-/bufferstreams-0.0.2.tgz#7ce8dff968bbac00b9e90158a2c41456f740abdd" integrity sha1-fOjf+Wi7rAC56QFYosQUVvdAq90= dependencies: readable-stream "^1.0.26-2" builtins@~0.0.3: version "0.0.7" resolved "https://registry.yarnpkg.com/builtins/-/builtins-0.0.7.tgz#355219cd6cf18dbe7c01cc7fd2dce765cfdc549a" integrity sha1-NVIZzWzxjb58Acx/0tznZc/cVJo= bytes@2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.2.0.tgz#fd35464a403f6f9117c2de3609ecff9cae000588" integrity sha1-/TVGSkA/b5EXwt42Cez/nK4ABYg= bytes@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.4.0.tgz#7d97196f9d5baf7f6935e25985549edd2a6c2339" integrity sha1-fZcZb51br39pNeJZhVSe3SpsIzk= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" callsite@1.0.0, callsite@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^1.0.2: version "1.2.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" integrity sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk= camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30001219: version "1.0.30001228" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz#bfdc5942cd3326fa51ee0b42fbef4da9d492a7fa" integrity sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A== cardinal@~0.4.0: version "0.4.4" resolved "https://registry.yarnpkg.com/cardinal/-/cardinal-0.4.4.tgz#ca5bb68a5b511b90fe93b9acea49bdee5c32bfe2" integrity sha1-ylu2iltRG5D+k7ms6km97lwyv+I= dependencies: ansicolors "~0.2.1" redeyed "~0.4.0" caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= caseless@~0.8.0: version "0.8.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.8.0.tgz#5bca2881d41437f54b2407ebe34888c7b9ad4f7d" integrity sha1-W8oogdQUN/VLJAfr40iIx7mtT30= center-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" integrity sha1-qg0yYptu6XIgBBHL1EYckHvCt60= dependencies: align-text "^0.1.3" lazy-cache "^1.0.3" chainsaw@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/chainsaw/-/chainsaw-0.1.0.tgz#5eab50b28afe58074d0d58291388828b5e5fbc98" integrity sha1-XqtQsor+WAdNDVgpE4iCi15fvJg= dependencies: traverse ">=0.3.0 <0.4" chalk@*: version "3.0.0" resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== dependencies: ansi-styles "^4.1.0" supports-color "^7.1.0" chalk@^0.5.0, chalk@^0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.1.tgz#663b3a648b68b55d04690d49167aa837858f2174" integrity sha1-Zjs6ZItotV0EaQ1JFnqoN4WPIXQ= dependencies: ansi-styles "^1.1.0" escape-string-regexp "^1.0.0" has-ansi "^0.1.0" strip-ansi "^0.3.0" supports-color "^0.2.0" chalk@^1.0.0, chalk@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" chalk@^2.0.0: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" chalk@~0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.4.0.tgz#5199a3ddcd0c1efe23bc08c1b027b06176e0c64f" integrity sha1-UZmj3c0MHv4jvAjBsCewYXbgxk8= dependencies: ansi-styles "~1.0.0" has-color "~0.1.0" strip-ansi "~0.1.0" character-parser@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chmodr@~0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/chmodr/-/chmodr-0.1.2.tgz#0dd8041c915087575bec383b47827bb7576a4fd6" integrity sha1-DdgEHJFQh1db7Dg7R4J7t1dqT9Y= chokidar@^1.4.1: version "1.7.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468" integrity sha1-eY5ol3gVHIB2tLNg5e3SjNortGg= dependencies: anymatch "^1.3.0" async-each "^1.0.0" glob-parent "^2.0.0" inherits "^2.0.1" is-binary-path "^1.0.0" is-glob "^2.0.0" path-is-absolute "^1.0.0" readdirp "^2.0.0" optionalDependencies: fsevents "^1.0.0" chownr@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.3.tgz#42d837d5239688d55f303003a508230fa6727142" integrity sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw== class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@2.2.x: version "2.2.23" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-2.2.23.tgz#0590b5478b516c4903edc2d89bd3fdbdd286328c" integrity sha1-BZC1R4tRbEkD7cLYm9P9vdKGMow= dependencies: commander "2.2.x" clean-css@^4.0.4, clean-css@^4.1.11: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cli-color@~0.2.2: version "0.2.3" resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-0.2.3.tgz#0a25ceae5a6a1602be7f77d28563c36700274e88" integrity sha1-CiXOrlpqFgK+f3fShWPDZwAnTog= dependencies: es5-ext "~0.9.2" memoizee "~0.2.5" cli-color@~0.3.2: version "0.3.3" resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-0.3.3.tgz#12d5bdd158ff8a0b0db401198913c03df069f6f5" integrity sha1-EtW90Vj/igsNtAEZiRPAPfBp9vU= dependencies: d "~0.1.1" es5-ext "~0.10.6" memoizee "~0.3.8" timers-ext "0.1" cliui@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" integrity sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE= dependencies: center-align "^0.1.1" right-align "^0.1.1" wordwrap "0.0.2" cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== dependencies: string-width "^3.1.0" strip-ansi "^5.2.0" wrap-ansi "^5.1.0" clone-buffer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/clone-buffer/-/clone-buffer-1.0.0.tgz#e3e25b207ac4e701af721e2cb5a16792cac3dc58" integrity sha1-4+JbIHrE5wGvch4staFnksrD3Fg= clone-stats@^0.0.1, clone-stats@~0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/clone-stats/-/clone-stats-0.0.1.tgz#b88f94a82cf38b8791d58046ea4029ad88ca99d1" integrity sha1-uI+UqCzzi4eR1YBG6kAprYjKmdE= clone-stats@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/clone-stats/-/clone-stats-1.0.0.tgz#b3782dff8bb5474e18b9b6bf0fdfe782f8777680" integrity sha1-s3gt/4u1R04Yuba/D9/ngvh3doA= clone@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/clone/-/clone-0.2.0.tgz#c6126a90ad4f72dbf5acdb243cc37724fe93fc1f" integrity sha1-xhJqkK1Pctv1rNskPMN3JP6T/B8= clone@^1.0.0, clone@^1.0.2: version "1.0.4" resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= clone@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= cloneable-readable@^1.0.0: version "1.1.3" resolved "https://registry.yarnpkg.com/cloneable-readable/-/cloneable-readable-1.1.3.tgz#120a00cb053bfb63a222e709f9683ea2e11d8cec" integrity sha512-2EF8zTQOxYq70Y4XKtorQupqF0m49MBz2/yf5Bj+MHjvpG3Hy7sImifnqD6UA+TKYxeSV+u6qqQPawN5UvnpKQ== dependencies: inherits "^2.0.1" process-nextick-args "^2.0.0" readable-stream "^2.3.5" code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= coffee-script@^1.7.1, coffee-script@~1: version "1.12.7" resolved "https://registry.yarnpkg.com/coffee-script/-/coffee-script-1.12.7.tgz#c05dae0cb79591d05b3070a8433a98c9a89ccc53" integrity sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw== coffee-script@~1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/coffee-script/-/coffee-script-1.10.0.tgz#12938bcf9be1948fa006f92e0c4c9e81705108c0" integrity sha1-EpOLz5vhlI+gBvkuDEyegXBRCMA= coffeescript@^1.10.0: version "1.12.7" resolved "https://registry.yarnpkg.com/coffeescript/-/coffeescript-1.12.7.tgz#e57ee4c4867cf7f606bfc4a0f2d550c0981ddd27" integrity sha512-pLXHFxQMPklVoEekowk8b3erNynC+DVJzChxS/LCBBgR6/8AJkHivkm//zbowcfc7BTCAjryuhx6gPqPRfsFoA== collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-convert@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== dependencies: color-name "~1.1.4" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= color-name@~1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== color-support@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== colorette@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== colors@^1.1.0: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combine-source-map@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/combine-source-map/-/combine-source-map-0.3.0.tgz#d9e74f593d9cd43807312cb5d846d451efaa9eb7" integrity sha1-2edPWT2c1DgHMSy12EbUUe+qnrc= dependencies: convert-source-map "~0.3.0" inline-source-map "~0.3.0" source-map "~0.1.31" combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" combined-stream@~0.0.4, combined-stream@~0.0.5: version "0.0.7" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-0.0.7.tgz#0137e657baa5a7541c57ac37ac5fc07d73b4dc1f" integrity sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8= dependencies: delayed-stream "0.0.5" commander@2.2.x: version "2.2.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.2.0.tgz#175ad4b9317f3ff615f201c1e57224f55a3e91df" integrity sha1-F1rUuTF/P/YV8gHB5XIk9Vo+kd8= commander@^2.19.0, commander@~2.20.3: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commondir@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-0.0.1.tgz#89f00fdcd51b519c578733fec563e6a6da7f5be2" integrity sha1-ifAP3NUbUZxXhzP+xWPmptp/W+I= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.1.2.tgz#296594f2753daa63996d2af08d15a95116c9aec3" integrity sha1-KWWU8nU9qmOZbSrwjRWpURbJrsM= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@~1.4.1, concat-stream@~1.4.5: version "1.4.11" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.4.11.tgz#1dc9f666f2621da9c618b1e7f8f3b2ff70b5f76f" integrity sha512-X3JMh8+4je3U1cQpG87+f9lXHDrqcb2MVLg9L7o8b1UZ0DzhRrUpdn65ttzu10PpJPPI3MQNkis+oha6TSA9Mw== dependencies: inherits "~2.0.1" readable-stream "~1.1.9" typedarray "~0.0.5" concat-with-sourcemaps@*, concat-with-sourcemaps@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/concat-with-sourcemaps/-/concat-with-sourcemaps-1.1.0.tgz#d4ea93f05ae25790951b99e7b3b09e3908a4082e" integrity sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg== dependencies: source-map "^0.6.1" configstore@^0.3.1: version "0.3.2" resolved "https://registry.yarnpkg.com/configstore/-/configstore-0.3.2.tgz#25e4c16c3768abf75c5a65bc61761f495055b459" integrity sha1-JeTBbDdoq/dcWmW8YXYfSVBVtFk= dependencies: graceful-fs "^3.0.1" js-yaml "^3.1.0" mkdirp "^0.5.0" object-assign "^2.0.0" osenv "^0.1.0" user-home "^1.0.0" uuid "^2.0.1" xdg-basedir "^1.0.0" configstore@~0.2.1: version "0.2.3" resolved "https://registry.yarnpkg.com/configstore/-/configstore-0.2.3.tgz#b1bdc4ad823a25423dc15d220fcc1ae1d7efab02" integrity sha1-sb3ErYI6JUI9wV0iD8wa4dfvqwI= dependencies: graceful-fs "~2.0.1" js-yaml "~3.0.1" mkdirp "~0.3.5" object-assign "~0.1.1" osenv "0.0.3" uuid "~1.4.1" connect@^3.3.5: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@~1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.0.3.tgz#d3898d2c3a93102f364197f8874b4f92b5286a8e" integrity sha1-04mNLDqTEC82QZf4h0tPkrUoao4= console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^3.0.1, constantinople@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-3.1.2.tgz#d45ed724f57d3d10500017a7d3a889c1381ae647" integrity sha512-yePcBqEFhLOqSBtwYOGGS1exHo/s1xjekXiinh4itpNQGCu4KA1euPh1fg07N2wMITZXQkBz75Ntdt1ctGZouw== dependencies: "@types/babel-types" "^7.0.0" "@types/babylon" "^6.16.2" babel-types "^6.26.0" babylon "^6.18.0" constants-browserify@~0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-0.0.1.tgz#92577db527ba6c4cf0a4568d84bc031f441e21f2" integrity sha1-kld9tSe6bEzwpFaNhLwDH0QeIfI= content-type@~1.0.1, content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^0.4.0: version "0.4.1" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-0.4.1.tgz#f919a0099fe31f80fc5a1d0eb303161b394070c7" integrity sha1-+RmgCZ/jH4D8Wh0OswMWGzlAcMc= convert-source-map@^1.1.1, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" convert-source-map@~0.3.0: version "0.3.5" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-0.3.5.tgz#f1d802950af7dd2631a1febe0596550c86ab3190" integrity sha1-8dgClQr33SYxof6+BZZVDIarMZA= convert-source-map@~1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.1.3.tgz#4829c877e9fe49b3161f3bf3673888e204699860" integrity sha1-SCnId+n+SbMWHzvzZziI4gRpmGA= cookie-jar@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/cookie-jar/-/cookie-jar-0.3.0.tgz#bc9a27d4e2b97e186cd57c9e2063cb99fa68cccc" integrity sha1-vJon1OK5fhhs1XyeIGPLmfpozMw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js-compat@^3.1.1: version "3.4.2" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.4.2.tgz#652fa7c54652b7f6586a893e37001df55ea2ac37" integrity sha512-W0Aj+LM3EAxxjD0Kp2o4be8UlnxIZHNupBv2znqrheR4aY2nOn91794k/xoSp+SxqqriiZpTsSwBtZr60cbkwQ== dependencies: browserslist "^4.7.3" semver "^6.3.0" core-js@^2.1.0: version "2.6.10" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f" integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA== core-js@^2.4.0: version "2.6.12" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.12.tgz#d9333dfa7b065e347cc5682219d6f690859cc2ec" integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cross-spawn@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-3.0.1.tgz#1256037ecb9f0c5f79e3d6ef135e30770184b982" integrity sha1-ElYDfsufDF9549bvE14wdwGEuYI= dependencies: lru-cache "^4.0.1" which "^1.2.9" cryptiles@0.2.x: version "0.2.2" resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-0.2.2.tgz#ed91ff1f17ad13d3748288594f8a48a0d26f325c" integrity sha1-7ZH/HxetE9N0gohZT4pIoNJvMlw= dependencies: boom "0.4.x" crypto-browserify@~1.0.9: version "1.0.9" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-1.0.9.tgz#cc5449685dfb85eb11c9828acc7cb87ab5bbfcc0" integrity sha1-zFRJaF37hesRyYKKzHy4erW7/MA= ctype@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/ctype/-/ctype-0.5.3.tgz#82c18c2461f74114ef16c135224ad0b9144ca12f" integrity sha1-gsGMJGH3QRTvFsE1IkrQuRRMoS8= currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" d@~0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/d/-/d-0.1.1.tgz#da184c535d18d8ee7ba2aa229b914009fae11309" integrity sha1-2hhMU10Y2O57oqoim5FACfrhEwk= dependencies: es5-ext "~0.10.2" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" dateformat@^1.0.11, dateformat@^1.0.6, dateformat@^1.0.7-1.2.3: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" dateformat@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-2.2.0.tgz#4065e2013cf9fb916ddfd82efb506ad4c6769062" integrity sha1-QGXiATz5+5Ft39gu+1Bq1MZ2kGI= debug@2.2.0, debug@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/debug/-/debug-2.2.0.tgz#f87057e995b1a1f6ae6a4960664137bc56f039da" integrity sha1-+HBX6ZWxofauaklgZkE3vFbwOdo= dependencies: ms "0.7.1" debug@2.3.3: version "2.3.3" resolved "https://registry.yarnpkg.com/debug/-/debug-2.3.3.tgz#40c453e67e6e13c901ddec317af8986cda9eff8c" integrity sha1-QMRT5n5uE8kB3ewxeviYbNqe/4w= dependencies: ms "0.7.2" debug@2.6.9, debug@^2.1.0, debug@^2.2.0, debug@^2.3.3: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" decamelize@^1.0.0, decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= decompress-zip@~0.0.6: version "0.0.8" resolved "https://registry.yarnpkg.com/decompress-zip/-/decompress-zip-0.0.8.tgz#4a265b22c7b209d7b24fa66f2b2dfbced59044f3" integrity sha1-SiZbIseyCdeyT6ZvKy37ztWQRPM= dependencies: binary "~0.3.0" graceful-fs "~3.0.0" mkpath "~0.1.0" nopt "~2.2.0" q "~1.0.0" readable-stream "~1.1.8" touch "0.0.2" deep-equal@~0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-0.1.2.tgz#b246c2b80a570a47c11be1d9bd1070ec878b87ce" integrity sha1-skbCuApXCkfBG+HZvRBw7IeLh84= deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-extend@~0.2.5: version "0.2.11" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.2.11.tgz#7a16ba69729132340506170494bc83f7076fe08f" integrity sha1-eha6aXKRMjQFBhcElLyD9wdv4I8= deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= defaults@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" integrity sha1-xlYFHpgX2f8I7YgUd/P+QBnz730= dependencies: clone "^1.0.2" define-properties@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" defined@~0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/defined/-/defined-0.0.0.tgz#f35eea7d705e933baf13b2f03b3f83d921403b3e" integrity sha1-817qfXBekzuvE7LwOz+D2SFAOz4= delayed-stream@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-0.0.5.tgz#d4b1f43a93e8296dfe02694f4680bc37a313c73f" integrity sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8= delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.0, depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= deprecated@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/deprecated/-/deprecated-0.0.1.tgz#f9c9af5464afa1e7a971458a8bdef2aa94d5bb19" integrity sha1-+cmvVGSvoeepcUWKi97yqpTVuxk= deps-sort@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/deps-sort/-/deps-sort-0.1.2.tgz#daa2fb614a17c9637d801e2f55339ae370f3611a" integrity sha1-2qL7YUoXyWN9gB4vVTOa43DzYRo= dependencies: JSONStream "~0.6.4" minimist "~0.0.1" through "~2.3.4" derequire@~0.8.0: version "0.8.0" resolved "https://registry.yarnpkg.com/derequire/-/derequire-0.8.0.tgz#c1f7f1da2cede44adede047378f03f444e9c4c0d" integrity sha1-wffx2izt5Ere3gRzePA/RE6cTA0= dependencies: esprima-fb "^3001.1.0-dev-harmony-fb" esrefactor "~0.1.0" estraverse "~1.5.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detective@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/detective/-/detective-3.1.0.tgz#77782444ab752b88ca1be2e9d0a0395f1da25eed" integrity sha1-d3gkRKt1K4jKG+Lp0KA5Xx2iXu0= dependencies: escodegen "~1.1.0" esprima-fb "3001.1.0-dev-harmony-fb" di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" domain-browser@~1.1.0: version "1.1.7" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.1.7.tgz#867aa4b093faa05f1de08c06f4d7b21fdf8698bc" integrity sha1-hnqksJP6oF8d4IwG9NeyH9+GmLw= duplexer2@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.0.2.tgz#c614dcf67e2fb14995a91711e5a617e8a60a31db" integrity sha1-xhTc9n4vsUmVqRcR5aYX6KYKMds= dependencies: readable-stream "~1.1.9" duplexer@^0.1.1, duplexer@~0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.1.tgz#ace6ff808c1ce66b57d1ebf97977acb02334cfc1" integrity sha1-rOb/gIwc5mtX0ev5eXessCM0z8E= duplexify@^3.2.0, duplexify@^3.5.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.723: version "1.3.738" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.738.tgz#aec24b091c82acbfabbdcce08076a703941d17ca" integrity sha512-vCMf4gDOpEylPSLPLSwAEsz+R3ShP02Y3cAKMZvTqule3XcPp7tgc/0ESI7IS6ZeyBlGClE50N53fIOkcIVnpw== emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" end-of-stream@~0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-0.1.5.tgz#8e177206c3c80837d85632e8b9359dfe8b2f6eaf" integrity sha1-jhdyBsPICDfYVjLouTWd/osvbq8= dependencies: once "~1.3.0" engine.io-client@~1.8.4: version "1.8.5" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-1.8.5.tgz#fe7fb60cb0dcf2fa2859489329cb5968dedeb11f" integrity sha512-AYTgHyeVUPitsseqjoedjhYJapNVoSPShbZ+tEUX9/73jgZ/Z3sUlJf9oYgdEBBdVhupUpUqSxH0kBCXlQnmZg== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "2.3.3" engine.io-parser "1.3.2" has-cors "1.1.0" indexof "0.0.1" parsejson "0.0.3" parseqs "0.0.5" parseuri "0.0.5" ws "~1.1.5" xmlhttprequest-ssl "1.5.3" yeast "0.1.2" engine.io-parser@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-1.3.2.tgz#937b079f0007d0893ec56d46cb220b8cb435220a" integrity sha1-k3sHnwAH0Ik+xW1GyyILjLQ1Igo= dependencies: after "0.8.2" arraybuffer.slice "0.0.6" base64-arraybuffer "0.1.5" blob "0.0.4" has-binary "0.1.7" wtf-8 "1.0.0" engine.io@~1.8.4: version "1.8.5" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-1.8.5.tgz#4ebe5e75c6dc123dee4afdce6e5fdced21eb93f6" integrity sha512-j1DWIcktw4hRwrv6nWx++5nFH2X64x16MAG2P0Lmi5Dvdfi3I+Jhc7JKJIdAmDJa+5aZ/imHV7dWRPy2Cqjh3A== dependencies: accepts "1.3.3" base64id "1.0.0" cookie "0.3.1" debug "2.3.3" engine.io-parser "1.3.2" ws "~1.1.5" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= error-ex@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es5-ext@^0.10.35, es5-ext@^0.10.50, es5-ext@~0.10.11, es5-ext@~0.10.14, es5-ext@~0.10.2, es5-ext@~0.10.46, es5-ext@~0.10.5, es5-ext@~0.10.6: version "0.10.53" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.3" next-tick "~1.0.0" es5-ext@~0.9.2: version "0.9.2" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.9.2.tgz#d2e309d1f223b0718648835acf5b8823a8061f8a" integrity sha1-0uMJ0fIjsHGGSINaz1uII6gGH4o= es6-iterator@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-0.1.3.tgz#d6f58b8c4fc413c249b4baa19768f8e4d7c8944e" integrity sha1-1vWLjE/EE8JJtLqhl2j45NfIlE4= dependencies: d "~0.1.1" es5-ext "~0.10.5" es6-symbol "~2.0.1" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" ext "^1.1.2" es6-symbol@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-2.0.1.tgz#761b5c67cfd4f1d18afb234f691d678682cb3bf3" integrity sha1-dhtcZ8/U8dGK+yNPaR1nhoLLO/M= dependencies: d "~0.1.1" es5-ext "~0.10.5" es6-weak-map@~0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-0.1.4.tgz#706cef9e99aa236ba7766c239c8b9e286ea7d228" integrity sha1-cGzvnpmqI2undmwjnIueKG6n0ig= dependencies: d "~0.1.1" es5-ext "~0.10.6" es6-iterator "~0.1.3" es6-symbol "~2.0.1" escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.0, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.3, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" escodegen@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.1.0.tgz#c663923f6e20aad48d0c0fa49f31c6d4f49360cf" integrity sha1-xmOSP24gqtSNDA+knzHG1PSTYM8= dependencies: esprima "~1.0.4" estraverse "~1.5.0" esutils "~1.0.0" optionalDependencies: source-map "~0.1.30" escope@~0.0.13: version "0.0.16" resolved "https://registry.yarnpkg.com/escope/-/escope-0.0.16.tgz#418c7a0afca721dafe659193fd986283e746538f" integrity sha1-QYx6CvynIdr+ZZGT/Zhig+dGU48= dependencies: estraverse ">= 0.0.2" esprima-fb@3001.1.0-dev-harmony-fb, esprima-fb@^3001.1.0-dev-harmony-fb: version "3001.1.0-dev-harmony-fb" resolved "https://registry.yarnpkg.com/esprima-fb/-/esprima-fb-3001.0001.0000-dev-harmony-fb.tgz#b77d37abcd38ea0b77426bb8bc2922ce6b426411" integrity sha1-t303q8046gt3Qmu4vCkizmtCZBE= esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== "esprima@~ 1.0.2", esprima@~1.0.2, esprima@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/esprima/-/esprima-1.0.4.tgz#9f557e08fc3b4d26ece9dd34f8fbf476b62585ad" integrity sha1-n1V+CPw7TSbs6d00+Pv0drYlha0= esrefactor@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/esrefactor/-/esrefactor-0.1.0.tgz#d142795a282339ab81e936b5b7a21b11bf197b13" integrity sha1-0UJ5WigjOauB6Ta1t6IbEb8ZexM= dependencies: escope "~0.0.13" esprima "~1.0.2" estraverse "~0.0.4" "estraverse@>= 0.0.2": version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@~0.0.4: version "0.0.4" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-0.0.4.tgz#01a0932dfee574684a598af5a67c3bf9b6428db2" integrity sha1-AaCTLf7ldGhKWYr1pnw7+bZCjbI= estraverse@~1.5.0: version "1.5.1" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.5.1.tgz#867a3e8e58a9f84618afb6c2ddbcd916b7cbaf71" integrity sha1-hno+jlip+EYYr7bC3bzZFrfLr3E= esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== esutils@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/esutils/-/esutils-1.0.0.tgz#8151d358e20c8acc7fb745e7472c0025fe496570" integrity sha1-gVHTWOIMisx/t0XnRywAJf5JZXA= etag@~1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/etag/-/etag-1.7.0.tgz#03d30b5f67dd6e632d2945d30d6652731a34d5d8" integrity sha1-A9MLX2fdbmMtKUXTDWZScxo01dg= event-emitter@~0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.2.2.tgz#c81e3724eb55407c5a0d5ee3299411f700f54291" integrity sha1-yB43JOtVQHxaDV7jKZQR9wD1QpE= dependencies: es5-ext "~0.9.2" event-emitter@~0.3.4: version "0.3.5" resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" integrity sha1-34xp7vFkeSPHFXuc6DhAYQsCzDk= dependencies: d "1" es5-ext "~0.10.14" event-stream@*: version "4.0.1" resolved "https://registry.yarnpkg.com/event-stream/-/event-stream-4.0.1.tgz#4092808ec995d0dd75ea4580c1df6a74db2cde65" integrity sha512-qACXdu/9VHPBzcyhdOWR5/IahhGMf0roTeZJfzz077GwylcDd90yOHLouhmv7GJ5XzPi6ekaQWd8AvPP2nOvpA== dependencies: duplexer "^0.1.1" from "^0.1.7" map-stream "0.0.7" pause-stream "^0.0.11" split "^1.0.1" stream-combiner "^0.2.2" through "^2.3.8" event-stream@3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/event-stream/-/event-stream-3.3.2.tgz#3cc310feb1f28d2f62b2a085d736a9ef566378b8" integrity sha1-PMMQ/rHyjS9isqCF1zap71ZjeLg= dependencies: duplexer "~0.1.1" from "~0" map-stream "~0.1.0" pause-stream "0.0.11" split "0.3" stream-combiner "~0.0.4" through "~2.3.1" event-stream@^3.1.5, event-stream@^3.1.7: version "3.3.5" resolved "https://registry.yarnpkg.com/event-stream/-/event-stream-3.3.5.tgz#e5dd8989543630d94c6cf4d657120341fa31636b" integrity sha512-vyibDcu5JL20Me1fP734QBH/kenBGLZap2n0+XXM7mvuUPzJ20Ydqj1aKcIeMdri1p+PU+4yAKugjN8KCVst+g== dependencies: duplexer "^0.1.1" from "^0.1.7" map-stream "0.0.7" pause-stream "^0.0.11" split "^1.0.1" stream-combiner "^0.2.2" through "^2.3.8" eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/events/-/events-1.0.2.tgz#75849dcfe93d10fb057c30055afdbd51d06a8e24" integrity sha1-dYSdz+k9EPsFfDAFWv29UdBqjiQ= expand-braces@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/expand-braces/-/expand-braces-0.1.2.tgz#488b1d1d2451cb3d3a6b192cfc030f44c5855fea" integrity sha1-SIsdHSRRyz06axks/AMPRMWFX+o= dependencies: array-slice "^0.2.3" array-unique "^0.2.1" braces "^0.1.2" expand-brackets@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" integrity sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s= dependencies: is-posix-bracket "^0.1.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-range@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-0.1.1.tgz#4cb8eda0993ca56fa4f41fc42f3cbb4ccadff044" integrity sha1-TLjtoJk8pW+k9B/ELzy7TMrf8EQ= dependencies: is-number "^0.1.1" repeat-string "^0.2.2" expand-range@^1.8.1: version "1.8.2" resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" integrity sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc= dependencies: fill-range "^2.1.0" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" ext@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.2.0.tgz#8dd8d2dd21bcced3045be09621fa0cbf73908ba4" integrity sha512-0ccUQK/9e3NreLFg6K6np8aPyRgwycx+oFGtfx1dSp7Wj00Ozw9r05FgBRlzjf2XBM7LAzwgLyDscRrtSU91hA== dependencies: type "^2.0.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^0.3.1: version "0.3.2" resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1" integrity sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE= dependencies: is-extglob "^1.0.0" extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fancy-log@^1.1.0: version "1.3.3" resolved "https://registry.yarnpkg.com/fancy-log/-/fancy-log-1.3.3.tgz#dbc19154f558690150a23953a0adbd035be45fc7" integrity sha512-k9oEhlyc0FrVh25qYuSELjr8oxsCoc4/LEZfg2iJJrfEk/tZL9bCoJE47gqAvI2m/AUjluCS4+3I0eTx8n3AEw== dependencies: ansi-gray "^0.1.1" color-support "^1.1.3" parse-node-version "^1.0.0" time-stamp "^1.0.0" fast-deep-equal@^3.1.1: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== fast-json-stable-stringify@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@~0.7.2: version "0.7.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.7.3.tgz#cc4074c7f4a4dfd03af54dd65c354b135132ce11" integrity sha1-zEB0x/Sk39A69U3WXDVLE1EyzhE= dependencies: websocket-driver ">=0.3.6" filename-regex@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26" integrity sha1-wcS5vuPglyXdsQa3XB4wH+LxiyY= fill-range@^2.1.0: version "2.2.4" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.4.tgz#eb1e773abb056dcd8df2bfdf6af59b8b3a936565" integrity sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q== dependencies: is-number "^2.1.0" isobject "^2.0.0" randomatic "^3.0.0" repeat-element "^1.1.2" repeat-string "^1.5.2" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" finalhandler@1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-index@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/find-index/-/find-index-0.1.1.tgz#675d358b2ca3892d795a1ab47232f8b6e2e0dde4" integrity sha1-Z101iyyjiS15Whq0cjL4tuLg3eQ= find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-2.0.0.tgz#9326b1488c22d1a6088650a86901b2d9a90a2cbc" integrity sha1-kyaxSIwi0aYIhlCoaQGy2akKLLw= dependencies: detect-file "^1.0.0" is-glob "^3.1.0" micromatch "^3.0.4" resolve-dir "^1.0.1" fined@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/fined/-/fined-1.2.0.tgz#d00beccf1aa2b475d16d423b0238b713a2c4a37b" integrity sha512-ZYDqPLGxDkDhDZBjZBb+oD1+j0rA4E0pXY50eplAAOPg2N/gUBSSk5IM1/QhPfyVo19lJ+CvXpqfvk+b2p/8Ng== dependencies: expand-tilde "^2.0.2" is-plain-object "^2.0.3" object.defaults "^1.1.0" object.pick "^1.2.0" parse-filepath "^1.0.1" first-chunk-stream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/first-chunk-stream/-/first-chunk-stream-1.0.0.tgz#59bfb50cd905f60d7c394cd3d9acaab4e6ad934e" integrity sha1-Wb+1DNkF9g18OUzT2ayqtOatk04= fixtures2js@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/fixtures2js/-/fixtures2js-0.0.0.tgz#602f7964c5b2963fe73224c8e4a3607d75c1b34f" integrity sha1-YC95ZMWylj/nMiTI5KNgfXXBs08= dependencies: lodash "^2.4.1" minimatch "^0.2.14" flagged-respawn@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/flagged-respawn/-/flagged-respawn-1.0.1.tgz#e7de6f1279ddd9ca9aac8a5971d618606b3aab41" integrity sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q== follow-redirects@^1.0.0: version "1.13.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.0.tgz#b42e8d93a2a7eea5ed88633676d6597bc8e384db" integrity sha512-aq6gF1BEKje4a9i9+5jimNFIpq4Q1WiwBToeRK5NvZBd/TRsmW8BsJfOEGkr76TbOyPVD3OVDN910EcUNtRYEA== for-in@^1.0.1, for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= for-own@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce" integrity sha1-UmXGgaTylNq78XyVCbZ2OqhFEM4= dependencies: for-in "^1.0.1" for-own@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b" integrity sha1-xjMy9BXO3EsE2/5wz4NklMU8tEs= dependencies: for-in "^1.0.1" forever-agent@~0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.5.2.tgz#6d0e09c4921f94a27f63d3b49c5feff1ea4c5130" integrity sha1-bQ4JxJIflKJ/Y9O0nF/v8epMUTA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= fork-stream@^0.0.4: version "0.0.4" resolved "https://registry.yarnpkg.com/fork-stream/-/fork-stream-0.0.4.tgz#db849fce77f6708a5f8f386ae533a0907b54ae70" integrity sha1-24Sfznf2cIpfjzhq5TOgkHtUrnA= form-data@~0.1.0: version "0.1.4" resolved "https://registry.yarnpkg.com/form-data/-/form-data-0.1.4.tgz#91abd788aba9702b1aabfa8bc01031a2ac9e3b12" integrity sha1-kavXiKupcCsaq/qLwBAxoqyeOxI= dependencies: async "~0.9.0" combined-stream "~0.0.4" mime "~1.2.11" form-data@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-0.2.0.tgz#26f8bc26da6440e299cbdcfb69035c4f77a6e466" integrity sha1-Jvi8JtpkQOKZy9z7aQNcT3em5GY= dependencies: async "~0.9.0" combined-stream "~0.0.4" mime-types "~2.0.3" form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.3.0.tgz#651f838e22424e7566de161d8358caa199f83d4f" integrity sha1-ZR+DjiJCTnVm3hYdg1jKoZn4PU8= from@^0.1.7, from@~0: version "0.1.7" resolved "https://registry.yarnpkg.com/from/-/from-0.1.7.tgz#83c60afc58b9c56997007ed1a768b3ab303a44fe" integrity sha1-g8YK/Fi5xWmXAH7Rp2izqzA6RP4= fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-minipass@^1.2.5: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.0.0: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" fstream-ignore@~0.0.6: version "0.0.10" resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-0.0.10.tgz#b10f8f522cc55415f80b41f7d3a32e6cba254e8c" integrity sha1-sQ+PUizFVBX4C0H306MubLolTow= dependencies: fstream "~0.1.17" inherits "2" minimatch "^0.3.0" fstream@^1.0.0, fstream@^1.0.12: version "1.0.12" resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.12.tgz#4e8ba8ee2d48be4f7d0de505455548eae5932045" integrity sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg== dependencies: graceful-fs "^4.1.2" inherits "~2.0.0" mkdirp ">=0.5 0" rimraf "2" fstream@~0.1.17, fstream@~0.1.22, fstream@~0.1.28: version "0.1.31" resolved "https://registry.yarnpkg.com/fstream/-/fstream-0.1.31.tgz#7337f058fbbbbefa8c9f561a28cab0849202c988" integrity sha1-czfwWPu7vvqMn1YaKMqwhJICyYg= dependencies: graceful-fs "~3.0.2" inherits "~2.0.0" mkdirp "0.5" rimraf "2" function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" gaze@^0.5.1: version "0.5.2" resolved "https://registry.yarnpkg.com/gaze/-/gaze-0.5.2.tgz#40b709537d24d1d45767db5a908689dfe69ac44f" integrity sha1-QLcJU30k0dRXZ9takIaJ3+aaxE8= dependencies: globule "~0.1.0" gaze@^1.0.0: version "1.1.3" resolved "https://registry.yarnpkg.com/gaze/-/gaze-1.1.3.tgz#c441733e13b927ac8c0ff0b4c3b033f28812924a" integrity sha512-BRdNm8hbWzFzWHERTrejLqwHDfS4GibPoq5wjTPIoJHoBtKGPg3xAFfxmM+9ztbXelxcf2hwQcaz1PtmFeue8g== dependencies: globule "^1.0.0" get-caller-file@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-base@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" integrity sha1-27Fk9iIbHAscz4Kuoyi0l98Oo8Q= dependencies: glob-parent "^2.0.0" is-glob "^2.0.0" glob-parent@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" integrity sha1-gTg9ctsFT8zPUzbaqQLxgvbtuyg= dependencies: is-glob "^2.0.0" glob-stream@^3.1.5: version "3.1.18" resolved "https://registry.yarnpkg.com/glob-stream/-/glob-stream-3.1.18.tgz#9170a5f12b790306fdfe598f313f8f7954fd143b" integrity sha1-kXCl8St5Awb9/lmPMT+PeVT9FDs= dependencies: glob "^4.3.1" glob2base "^0.0.12" minimatch "^2.0.1" ordered-read-streams "^0.1.0" through2 "^0.6.1" unique-stream "^1.0.0" glob-watcher@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/glob-watcher/-/glob-watcher-0.0.6.tgz#b95b4a8df74b39c83298b0c05c978b4d9a3b710b" integrity sha1-uVtKjfdLOcgymLDAXJeLTZo7cQs= dependencies: gaze "^0.5.1" glob2base@^0.0.12: version "0.0.12" resolved "https://registry.yarnpkg.com/glob2base/-/glob2base-0.0.12.tgz#9d419b3e28f12e83a362164a277055922c9c0d56" integrity sha1-nUGbPijxLoOjYhZKJ3BVkiycDVY= dependencies: find-index "^0.1.1" glob@^4.3.1: version "4.5.3" resolved "https://registry.yarnpkg.com/glob/-/glob-4.5.3.tgz#c6cb73d3226c1efef04de3c56d012f03377ee15f" integrity sha1-xstz0yJsHv7wTePFbQEvAzd+4V8= dependencies: inflight "^1.0.4" inherits "2" minimatch "^2.0.1" once "^1.3.0" glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^6.0.1: version "6.0.4" resolved "https://registry.yarnpkg.com/glob/-/glob-6.0.4.tgz#0f08860f6a155127b2fadd4f9ce24b1aab6e4d22" integrity sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.0, glob@^7.0.3, glob@^7.1.2, glob@^7.1.3, glob@~7.1.1: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" glob@~3.1.21: version "3.1.21" resolved "https://registry.yarnpkg.com/glob/-/glob-3.1.21.tgz#d29e0a055dea5138f4d07ed40e8982e83c2066cd" integrity sha1-0p4KBV3qUTj00H7UDomC6DwgZs0= dependencies: graceful-fs "~1.2.0" inherits "1" minimatch "~0.2.11" glob@~3.2.8: version "3.2.11" resolved "https://registry.yarnpkg.com/glob/-/glob-3.2.11.tgz#4a973f635b9190f715d10987d5c00fd2815ebe3d" integrity sha1-Spc/Y1uRkPcV0QmH1cAP0oFevj0= dependencies: inherits "2" minimatch "0.3" glob@~4.0.2: version "4.0.6" resolved "https://registry.yarnpkg.com/glob/-/glob-4.0.6.tgz#695c50bdd4e2fb5c5d370b091f388d3707e291a7" integrity sha1-aVxQvdTi+1xdNwsJHziNNwfikac= dependencies: graceful-fs "^3.0.2" inherits "2" minimatch "^1.0.0" once "^1.3.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globule@^1.0.0: version "1.3.2" resolved "https://registry.yarnpkg.com/globule/-/globule-1.3.2.tgz#d8bdd9e9e4eef8f96e245999a5dee7eb5d8529c4" integrity sha512-7IDTQTIu2xzXkT+6mlluidnWo+BypnbSoEVVQCGfzqnl5Ik8d3e1d4wycb8Rj9tWW+Z39uPWsdlquqiqPCd/pA== dependencies: glob "~7.1.1" lodash "~4.17.10" minimatch "~3.0.2" globule@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/globule/-/globule-0.1.0.tgz#d9c8edde1da79d125a151b79533b978676346ae5" integrity sha1-2cjt3h2nnRJaFRt5UzuXhnY0auU= dependencies: glob "~3.1.21" lodash "~1.0.1" minimatch "~0.2.11" glogg@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/glogg/-/glogg-1.0.2.tgz#2d7dd702beda22eb3bffadf880696da6d846313f" integrity sha512-5mwUoSuBk44Y4EshyiqcH95ZntbDdTQqA3QYSrxmzj28Ai0vXBGMH1ApSANH14j2sIRtqCEyg6PfsuP7ElOEDA== dependencies: sparkles "^1.0.0" got@^3.2.0: version "3.3.1" resolved "https://registry.yarnpkg.com/got/-/got-3.3.1.tgz#e5d0ed4af55fc3eef4d56007769d98192bcb2eca" integrity sha1-5dDtSvVfw+701WAHdp2YGSvLLso= dependencies: duplexify "^3.2.0" infinity-agent "^2.0.0" is-redirect "^1.0.0" is-stream "^1.0.0" lowercase-keys "^1.0.0" nested-error-stacks "^1.0.0" object-assign "^3.0.0" prepend-http "^1.0.0" read-all-stream "^3.0.0" timed-out "^2.0.0" graceful-fs@^3.0.0, graceful-fs@^3.0.1, graceful-fs@^3.0.2, graceful-fs@~3.0.0, graceful-fs@~3.0.1, graceful-fs@~3.0.2: version "3.0.12" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-3.0.12.tgz#0034947ce9ed695ec8ab0b854bc919e82b1ffaef" integrity sha512-J55gaCS4iTTJfTXIxSVw3EMQckcqkpdRv3IR7gu6sq0+tbC363Zx6KH/SEwXASK9JRbhyZmVjJEVJIOxYsB3Qg== dependencies: natives "^1.1.3" graceful-fs@^4.1.11: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== graceful-fs@^4.1.2: version "4.2.4" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== graceful-fs@~1.2.0: version "1.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-1.2.3.tgz#15a4806a57547cb2d2dbf27f42e89a8c3451b364" integrity sha1-FaSAaldUfLLS2/J/QuiajDRRs2Q= graceful-fs@~2.0.0, graceful-fs@~2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-2.0.3.tgz#7cd2cdb228a4a3f36e95efa6cc142de7d1a136d0" integrity sha1-fNLNsiiko/Nule+mzBQt59GhNtA= guanlecoja@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/guanlecoja/-/guanlecoja-1.1.0.tgz#39b9dc5a873d86a8607f04405c254a3cff667efa" integrity sha512-G2ZN3F2/kFFEZ4uTx3WsJITBD1EUOoQRMYvvi2FiaO/RqPhJNMPQB6faV2CY8X4UvNprjNqAZVO0eGSG1vtOOQ== dependencies: "@babel/core" "^7.4.3" "@babel/plugin-transform-runtime" "^7.4.3" "@babel/preset-env" "^7.4.3" "@babel/runtime" "^7.4.3" babel-plugin-angularjs-annotate "^0.10.0" coffee-script "~1.10.0" gulp "~3.9.0" gulp-angular-templatecache "^1.7.0" gulp-babel "^8.0.0" gulp-bower-deps "~0.3.2" gulp-browserify "^0.5.0" gulp-cached "~1.1.0" gulp-clean-css "^2.0.13" gulp-coffee "~2.3.1" gulp-concat "~2.6.0" gulp-fixtures2js "0.0.1" gulp-footer "1.x" gulp-header "1.x" gulp-help "~1.6.1" gulp-if "~2.0.1" gulp-less "~1.3.2" gulp-livereload "~3.8.0" gulp-ng-annotate "~1.1.0" gulp-ng-classify "~4.0.0" gulp-pug "^3.1.0" gulp-remember "~0.3.0" gulp-rename "~1.2.0" gulp-replace "^0.5.4" gulp-sass "^3.1.0" gulp-sourcemaps "~1.6.0" gulp-uglify-es "^1.0.4" gulp-util "~3.0.6" gulp-wrap "0.8.0" jasmine-core "^2.4.1" karma "~0.13.9" karma-chrome-launcher "~2.1.0" karma-coffee-preprocessor "~0.3.0" karma-coverage "^0.5.1" karma-jasmine "~0.3.6" karma-sourcemap-loader "^0.3.5" lazypipe "^1.0.1" lodash "^3.10.0" mime "^1.2.11" minimist "~1.2.0" rimraf "~2.4.3" run-sequence "~1.1.2" serve-static "~1.10.0" gulp-angular-templatecache@^1.7.0: version "1.9.1" resolved "https://registry.yarnpkg.com/gulp-angular-templatecache/-/gulp-angular-templatecache-1.9.1.tgz#60f631e97aaaa765d942e37042e4bfbb1a5df244" integrity sha1-YPYx6Xqqp2XZQuNwQuS/uxpd8kQ= dependencies: event-stream "3.3.2" gulp-concat "2.6.0" gulp-footer "1.0.5" gulp-header "1.8.2" gulp-util "3.0.7" js-string-escape "~1.0.0" gulp-babel@^8.0.0: version "8.0.0" resolved "https://registry.yarnpkg.com/gulp-babel/-/gulp-babel-8.0.0.tgz#e0da96f4f2ec4a88dd3a3030f476e38ab2126d87" integrity sha512-oomaIqDXxFkg7lbpBou/gnUkX51/Y/M2ZfSjL2hdqXTAlSWZcgZtd2o0cOH0r/eE8LWD0+Q/PsLsr2DKOoqToQ== dependencies: plugin-error "^1.0.1" replace-ext "^1.0.0" through2 "^2.0.0" vinyl-sourcemaps-apply "^0.2.0" gulp-bower-deps@~0.3.2: version "0.3.3" resolved "https://registry.yarnpkg.com/gulp-bower-deps/-/gulp-bower-deps-0.3.3.tgz#2109fe3fbee35069f67312bb977e591d890296cd" integrity sha512-/1b9KokVEAlQk3YmXTE7QQ6vLGO8BsPpywvoXbSS2mjSYnd0KKkjkKmkpKHt/yx30rijl19nhFjeN36WRNfi4Q== dependencies: bower "1.3.8" bower-logger "^0.2.2" gulp-util "~3.0.0" mout "^0.9.1" q "^1.0.1" through2 "~0.5.1" gulp-browserify@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/gulp-browserify/-/gulp-browserify-0.5.1.tgz#820108ac2554a954adb8be17d23958b0c04be083" integrity sha1-ggEIrCVUqVStuL4X0jlYsMBL4IM= dependencies: browserify "3.x" browserify-shim "~2.0.10" gulp-util "~2.2.5" readable-stream "~1.1.10" through2 "~0.4.0" gulp-cached@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/gulp-cached/-/gulp-cached-1.1.1.tgz#fe7cd4f87f37601e6073cfedee5c2bdaf8b6acce" integrity sha1-/nzU+H83YB5gc8/t7lwr2vi2rM4= dependencies: lodash.defaults "^4.2.0" through2 "^2.0.1" gulp-clean-css@^2.0.13: version "2.4.0" resolved "https://registry.yarnpkg.com/gulp-clean-css/-/gulp-clean-css-2.4.0.tgz#2ae48109fe83ccc967ff5ad53c044949a4863b36" integrity sha1-KuSBCf6DzMln/1rVPARJSaSGOzY= dependencies: clean-css "^4.0.4" gulp-util "^3.0.8" object-assign "^4.1.1" through2 "^2.0.3" vinyl-sourcemaps-apply "^0.2.1" gulp-coffee@~2.3.1: version "2.3.5" resolved "https://registry.yarnpkg.com/gulp-coffee/-/gulp-coffee-2.3.5.tgz#8c64e9ac884e1bab4e20b66ac7c386a816859041" integrity sha512-PbgPGZVyYFnBTYtfYkVN6jcK8Qsuh3BxycPzvu8y5lZroCw3/x1m25KeyEDX110KsVLDmJxoULjscR21VEN4wA== dependencies: coffeescript "^1.10.0" gulp-util "^3.0.2" merge "^1.2.0" through2 "^2.0.1" vinyl-sourcemaps-apply "^0.2.1" gulp-concat@2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/gulp-concat/-/gulp-concat-2.6.0.tgz#585cfb115411f348773131140566b6a81c69cb91" integrity sha1-WFz7EVQR80h3MTEUBWa2qBxpy5E= dependencies: concat-with-sourcemaps "^1.0.0" gulp-util "^3.0.1" through2 "^0.6.3" gulp-concat@~2.6.0: version "2.6.1" resolved "https://registry.yarnpkg.com/gulp-concat/-/gulp-concat-2.6.1.tgz#633d16c95d88504628ad02665663cee5a4793353" integrity sha1-Yz0WyV2IUEYorQJmVmPO5aR5M1M= dependencies: concat-with-sourcemaps "^1.0.0" through2 "^2.0.0" vinyl "^2.0.0" gulp-fixtures2js@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/gulp-fixtures2js/-/gulp-fixtures2js-0.0.1.tgz#4a3cbb4404bdfab92f7d2b6e09de2bfd0ae78506" integrity sha1-Sjy7RAS9+rkvfStuCd4r/QrnhQY= dependencies: fixtures2js "0.0.0" gulp-util "^2.2.14" lodash "^2.4.1" through "^2.3.4" gulp-footer@1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/gulp-footer/-/gulp-footer-1.0.5.tgz#e84ca777e266be7bbc2d45d2df0e7eba8dfa3e54" integrity sha1-6Eynd+Jmvnu8LUXS3w5+uo36PlQ= dependencies: event-stream "*" gulp-util "*" lodash.assign "*" gulp-footer@1.x: version "1.1.2" resolved "https://registry.yarnpkg.com/gulp-footer/-/gulp-footer-1.1.2.tgz#7fe28324ec67e3d618d31e0f5ea2ee5b454f6877" integrity sha512-G6Z8DNNeIhq1KU++7kZnbuwbvCubkUMOVADOt+0qTHSIqjy2OPo1W4bu4n1aE9JGZncuRTvVQrYecGx2uazlpg== dependencies: event-stream "*" lodash._reescape "^3.0.0" lodash._reevaluate "^3.0.0" lodash._reinterpolate "^3.0.0" lodash.template "^3.6.2" gulp-header@1.8.2: version "1.8.2" resolved "https://registry.yarnpkg.com/gulp-header/-/gulp-header-1.8.2.tgz#3ab222f53719d2d03d81d9134252fe7d52425aa4" integrity sha1-OrIi9TcZ0tA9gdkTQlL+fVJCWqQ= dependencies: concat-with-sourcemaps "*" gulp-util "*" object-assign "*" through2 "^2.0.0" gulp-header@1.x: version "1.8.12" resolved "https://registry.yarnpkg.com/gulp-header/-/gulp-header-1.8.12.tgz#ad306be0066599127281c4f8786660e705080a84" integrity sha512-lh9HLdb53sC7XIZOYzTXM4lFuXElv3EVkSDhsd7DoJBj7hm+Ni7D3qYbb+Rr8DuM8nRanBvkVO9d7askreXGnQ== dependencies: concat-with-sourcemaps "*" lodash.template "^4.4.0" through2 "^2.0.0" gulp-help@~1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/gulp-help/-/gulp-help-1.6.1.tgz#261db186e18397fef3f6a2c22e9c315bfa88ae0c" integrity sha1-Jh2xhuGDl/7z9qLCLpwxW/qIrgw= dependencies: chalk "^1.0.0" object-assign "^3.0.0" gulp-if@~2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/gulp-if/-/gulp-if-2.0.2.tgz#a497b7e7573005041caa2bc8b7dda3c80444d629" integrity sha1-pJe351cwBQQcqivIt92jyARE1ik= dependencies: gulp-match "^1.0.3" ternary-stream "^2.0.1" through2 "^2.0.1" gulp-less@~1.3.2: version "1.3.9" resolved "https://registry.yarnpkg.com/gulp-less/-/gulp-less-1.3.9.tgz#e129750f236693ead5b522af311cc33eeff1910e" integrity sha1-4Sl1DyNmk+rVtSKvMRzDPu/xkQ4= dependencies: convert-source-map "^0.4.0" gulp-util "^3.0.0" less "^1.7.4" lodash.defaults "^2.4.1" through2 "^0.5.1" vinyl-sourcemaps-apply "^0.1.1" gulp-livereload@~3.8.0: version "3.8.1" resolved "https://registry.yarnpkg.com/gulp-livereload/-/gulp-livereload-3.8.1.tgz#00f744b2d749d3e9e3746589c8a44acac779b50f" integrity sha1-APdEstdJ0+njdGWJyKRKysd5tQ8= dependencies: chalk "^0.5.1" debug "^2.1.0" event-stream "^3.1.7" gulp-util "^3.0.2" lodash.assign "^3.0.0" mini-lr "^0.1.8" gulp-match@^1.0.3: version "1.1.0" resolved "https://registry.yarnpkg.com/gulp-match/-/gulp-match-1.1.0.tgz#552b7080fc006ee752c90563f9fec9d61aafdf4f" integrity sha512-DlyVxa1Gj24DitY2OjEsS+X6tDpretuxD6wTfhXE/Rw2hweqc1f6D/XtsJmoiCwLWfXgR87W9ozEityPCVzGtQ== dependencies: minimatch "^3.0.3" gulp-ng-annotate@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/gulp-ng-annotate/-/gulp-ng-annotate-1.1.0.tgz#5e801be073d8f6d343c4ff65b71717b90fac13ce" integrity sha1-XoAb4HPY9tNDxP9ltxcXuQ+sE84= dependencies: bufferstreams "~0.0.2" gulp-util "^3.0.6" merge "^1.1.3" ng-annotate "^1.0.0" through2 "^0.4.1" vinyl-sourcemaps-apply "^0.1.1" gulp-ng-classify@~4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/gulp-ng-classify/-/gulp-ng-classify-4.0.1.tgz#3019161127c35c317ea08dc2a65ea35b592dbfd7" integrity sha1-MBkWESfDXDF+oI3Cpl6jW1ktv9c= dependencies: coffee-script "^1.7.1" event-stream "^3.1.5" ng-classify "^4.0.0" gulp-pug@^3.1.0: version "3.3.0" resolved "https://registry.yarnpkg.com/gulp-pug/-/gulp-pug-3.3.0.tgz#46982c1439c094c360542ed8ba5c882d3bb711cf" integrity sha1-RpgsFDnAlMNgVC7YulyILTu3Ec8= dependencies: gulp-util "^3.0.2" object-assign "^4.1.0" pug ">=2.0.0-alpha <3" through2 "^2.0.0" gulp-remember@~0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/gulp-remember/-/gulp-remember-0.3.1.tgz#5776b6f64c5a1c5c4d4555406723ec8e2b0407e7" integrity sha1-V3a29kxaHFxNRVVAZyPsjisEB+c= dependencies: gulp-util "^3.0.1" through2 "^0.5.0" gulp-rename@~1.2.0: version "1.2.3" resolved "https://registry.yarnpkg.com/gulp-rename/-/gulp-rename-1.2.3.tgz#37b75298e9d3e6c0fe9ac4eac13ce3be5434646b" integrity sha512-CmdPM0BjJ105QCX1fk+j7NGhiN/1rCl9HLGss+KllBS/tdYadpjTxqdKyh/5fNV+M3yjT1MFz5z93bXdrTyzAw== gulp-replace@^0.5.4: version "0.5.4" resolved "https://registry.yarnpkg.com/gulp-replace/-/gulp-replace-0.5.4.tgz#69a67914bbd13c562bff14f504a403796aa0daa9" integrity sha1-aaZ5FLvRPFYr/xT1BKQDeWqg2qk= dependencies: istextorbinary "1.0.2" readable-stream "^2.0.1" replacestream "^4.0.0" gulp-sass@^3.1.0: version "3.2.1" resolved "https://registry.yarnpkg.com/gulp-sass/-/gulp-sass-3.2.1.tgz#2e3688a96fd8be1c0c01340750c191b2e79fab94" integrity sha512-UATbRpSDsyXCnpYSPBUEvdvtSEzksJs7/oQ0CujIpzKqKrO6vlnYwhX2UTsGrf4rNLwqlSSaM271It0uHYvJ3Q== dependencies: gulp-util "^3.0" lodash.clonedeep "^4.3.2" node-sass "^4.8.3" through2 "^2.0.0" vinyl-sourcemaps-apply "^0.2.0" gulp-sourcemaps@~1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/gulp-sourcemaps/-/gulp-sourcemaps-1.6.0.tgz#b86ff349d801ceb56e1d9e7dc7bbcb4b7dee600c" integrity sha1-uG/zSdgBzrVuHZ59x7vLS33uYAw= dependencies: convert-source-map "^1.1.1" graceful-fs "^4.1.2" strip-bom "^2.0.0" through2 "^2.0.0" vinyl "^1.0.0" gulp-uglify-es@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/gulp-uglify-es/-/gulp-uglify-es-1.0.4.tgz#59ee0d5ea98c1e09c6eaa58c8b018a6ad33f48d4" integrity sha512-UMRufZsBmQizCYpftutaiVoLswpbzFEfY90EJLU4YlTgculeHnanb794s88TMd5tpCZVC638sAX6JrLVYTP/Wg== dependencies: o-stream "^0.2.2" plugin-error "^1.0.1" terser "^3.7.5" vinyl "^2.1.0" vinyl-sourcemaps-apply "^0.2.1" gulp-util@*, gulp-util@^3.0, gulp-util@^3.0.0, gulp-util@^3.0.1, gulp-util@^3.0.2, gulp-util@^3.0.6, gulp-util@^3.0.8, gulp-util@~3.0.0, gulp-util@~3.0.1, gulp-util@~3.0.6: version "3.0.8" resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-3.0.8.tgz#0054e1e744502e27c04c187c3ecc505dd54bbb4f" integrity sha1-AFTh50RQLifATBh8PsxQXdVLu08= dependencies: array-differ "^1.0.0" array-uniq "^1.0.2" beeper "^1.0.0" chalk "^1.0.0" dateformat "^2.0.0" fancy-log "^1.1.0" gulplog "^1.0.0" has-gulplog "^0.1.0" lodash._reescape "^3.0.0" lodash._reevaluate "^3.0.0" lodash._reinterpolate "^3.0.0" lodash.template "^3.0.0" minimist "^1.1.0" multipipe "^0.1.2" object-assign "^3.0.0" replace-ext "0.0.1" through2 "^2.0.0" vinyl "^0.5.0" gulp-util@3.0.7: version "3.0.7" resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-3.0.7.tgz#78925c4b8f8b49005ac01a011c557e6218941cbb" integrity sha1-eJJcS4+LSQBawBoBHFV+YhiUHLs= dependencies: array-differ "^1.0.0" array-uniq "^1.0.2" beeper "^1.0.0" chalk "^1.0.0" dateformat "^1.0.11" fancy-log "^1.1.0" gulplog "^1.0.0" has-gulplog "^0.1.0" lodash._reescape "^3.0.0" lodash._reevaluate "^3.0.0" lodash._reinterpolate "^3.0.0" lodash.template "^3.0.0" minimist "^1.1.0" multipipe "^0.1.2" object-assign "^3.0.0" replace-ext "0.0.1" through2 "^2.0.0" vinyl "^0.5.0" gulp-util@^2.2.14, gulp-util@~2.2.5: version "2.2.20" resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-2.2.20.tgz#d7146e5728910bd8f047a6b0b1e549bc22dbd64c" integrity sha1-1xRuVyiRC9jwR6awseVJvCLb1kw= dependencies: chalk "^0.5.0" dateformat "^1.0.7-1.2.3" lodash._reinterpolate "^2.4.1" lodash.template "^2.4.1" minimist "^0.2.0" multipipe "^0.1.0" through2 "^0.5.0" vinyl "^0.2.1" gulp-wrap@0.8.0: version "0.8.0" resolved "https://registry.yarnpkg.com/gulp-wrap/-/gulp-wrap-0.8.0.tgz#c41ce89a374947788b78c9e67f33bbe838c69b86" integrity sha1-xBzomjdJR3iLeMnmfzO76DjGm4Y= dependencies: event-stream "*" gulp-util "~3.0.1" js-yaml "^3.2.3" lodash.template "~2.4.1" node.extend "~1.1.2" gulp@3.9.0: version "3.9.0" resolved "https://registry.yarnpkg.com/gulp/-/gulp-3.9.0.tgz#cf1fba4cb558bb8c6ae6c9613f583ae2620d214a" integrity sha1-zx+6TLVYu4xq5slhP1g64mINIUo= dependencies: archy "^1.0.0" chalk "^1.0.0" deprecated "^0.0.1" gulp-util "^3.0.0" interpret "^0.6.2" liftoff "^2.1.0" minimist "^1.1.0" orchestrator "^0.3.0" pretty-hrtime "^1.0.0" semver "^4.1.0" tildify "^1.0.0" v8flags "^2.0.2" vinyl-fs "^0.3.0" gulp@~3.9.0: version "3.9.1" resolved "https://registry.yarnpkg.com/gulp/-/gulp-3.9.1.tgz#571ce45928dd40af6514fc4011866016c13845b4" integrity sha1-VxzkWSjdQK9lFPxAEYZgFsE4RbQ= dependencies: archy "^1.0.0" chalk "^1.0.0" deprecated "^0.0.1" gulp-util "^3.0.0" interpret "^1.0.0" liftoff "^2.1.0" minimist "^1.1.0" orchestrator "^0.3.0" pretty-hrtime "^1.0.0" semver "^4.1.0" tildify "^1.0.0" v8flags "^2.0.2" vinyl-fs "^0.3.0" gulplog@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/gulplog/-/gulplog-1.0.0.tgz#e28c4d45d05ecbbed818363ce8f9c5926229ffe5" integrity sha1-4oxNRdBey77YGDY86PnFkmIp/+U= dependencies: glogg "^1.0.0" handlebars@^4.0.1: version "4.5.3" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.5.3.tgz#5cf75bd8714f7605713511a56be7c349becb0482" integrity sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA== dependencies: neo-async "^2.6.0" optimist "^0.6.1" source-map "^0.6.1" optionalDependencies: uglify-js "^3.1.4" handlebars@~1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-1.3.0.tgz#9e9b130a93e389491322d975cf3ec1818c37ce34" integrity sha1-npsTCpPjiUkTItl1zz7BgYw3zjQ= dependencies: optimist "~0.3" optionalDependencies: uglify-js "~2.3" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.3: version "5.1.5" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== dependencies: ajv "^6.12.3" har-schema "^2.0.0" has-ansi@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-0.1.0.tgz#84f265aae8c0e6a88a12d7022894b7568894c62e" integrity sha1-hPJlqujA5qiKEtcCKJS3VoiUxi4= dependencies: ansi-regex "^0.2.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/has-binary/-/has-binary-0.1.7.tgz#68e61eb16210c9545a0a5cce06a873912fe1e68c" integrity sha1-aOYesWIQyVRaClzOBqhzkS/h5ow= dependencies: isarray "0.0.1" has-color@~0.1.0: version "0.1.7" resolved "https://registry.yarnpkg.com/has-color/-/has-color-0.1.7.tgz#67144a5260c34fc3cca677d041daf52fe7b78b2f" integrity sha1-ZxRKUmDDT8PMpnfQQdr1L+e3iy8= has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-flag@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== has-gulplog@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/has-gulplog/-/has-gulplog-0.1.0.tgz#6414c82913697da51590397dafb12f22967811ce" integrity sha1-ZBTIKRNpfaUVkDl9r7EvIpZ4Ec4= dependencies: sparkles "^1.0.0" has-symbols@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hawk@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/hawk/-/hawk-1.1.1.tgz#87cd491f9b46e4e2aeaca335416766885d2d1ed9" integrity sha1-h81JH5tG5OKurKM1QWdmiF0tHtk= dependencies: boom "0.4.x" cryptiles "0.2.x" hoek "0.9.x" sntp "0.2.x" hawk@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/hawk/-/hawk-1.0.0.tgz#b90bb169807285411da7ffcb8dd2598502d3b52d" integrity sha1-uQuxaYByhUEdp//LjdJZhQLTtS0= dependencies: boom "0.4.x" cryptiles "0.2.x" hoek "0.9.x" sntp "0.2.x" hoek@0.9.x: version "0.9.1" resolved "https://registry.yarnpkg.com/hoek/-/hoek-0.9.1.tgz#3d322462badf07716ea7eb85baf88079cddce505" integrity sha1-PTIkYrrfB3Fup+uFuviAec3c5QU= homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== http-browserify@~1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/http-browserify/-/http-browserify-1.3.2.tgz#b562c34479349a690d7a6597df495aefa8c604f5" integrity sha1-tWLDRHk0mmkNemWX30la76jGBPU= dependencies: Base64 "~0.2.0" inherits "~2.0.1" http-errors@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.3.1.tgz#197e22cdebd4198585e8694ef6786197b91ed942" integrity sha1-GX4izevUGYWF6GlO9nhhl7ke2UI= dependencies: inherits "~2.0.1" statuses "1" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy@^1.13.0: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~0.10.0: version "0.10.1" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-0.10.1.tgz#4fbdac132559aa8323121e540779c0a012b27e66" integrity sha1-T72sEyVZqoMjEh5UB3nAoBKyfmY= dependencies: asn1 "0.1.11" assert-plus "^0.1.5" ctype "0.5.3" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@~0.0.0: version "0.0.1" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-0.0.1.tgz#3f91365cabe60b77ed0ebba24b454e3e09d95a82" integrity sha1-P5E2XKvmC3ftDruiS0VOPgnZWoI= iconv-lite@0.4.13: version "0.4.13" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.13.tgz#1f88aba4ab0b1508e8312acc39345f36e992e2f2" integrity sha1-H4irpKsLFQjoMSrMOTRfNumS4vI= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" ieee754@~1.1.1: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== ignore-walk@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== dependencies: minimatch "^3.0.4" in-publish@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/in-publish/-/in-publish-2.0.1.tgz#948b1a535c8030561cea522f73f78f4be357e00c" integrity sha512-oDM0kUSNFC31ShNxHKUyfZKy8ZeXZBWMjMdZHKLOk13uvT27VTL/QzRGfRUcevJhpkZAvlhPYuXkF7eNWrtyxQ== indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= infinity-agent@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/infinity-agent/-/infinity-agent-2.0.3.tgz#45e0e2ff7a9eb030b27d62b74b3744b7a7ac4216" integrity sha1-ReDi/3qesDCyfWK3SzdEt6esQhY= inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@1: version "1.0.2" resolved "https://registry.yarnpkg.com/inherits/-/inherits-1.0.2.tgz#ca4309dadee6b54cc0b8d247e8d7c7a0975bdc9b" integrity sha1-ykMJ2t7mtUzAuNJH6NfHoJdb3Js= inherits@2, inherits@^2.0.1, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@~1.3.0: version "1.3.7" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== inline-source-map@~0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/inline-source-map/-/inline-source-map-0.3.1.tgz#a528b514e689fce90db3089e870d92f527acb5eb" integrity sha1-pSi1FOaJ/OkNswiehw2S9Sestes= dependencies: source-map "~0.3.0" inquirer@~0.4.0: version "0.4.1" resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.4.1.tgz#6cf74eb1a347f97a1a207bea8ad1c987d0ff4b81" integrity sha1-bPdOsaNH+XoaIHvqitHJh9D/S4E= dependencies: async "~0.2.8" cli-color "~0.2.2" lodash "~2.4.1" mute-stream "0.0.4" readline2 "~0.1.0" through "~2.3.4" inquirer@~0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.5.1.tgz#e9f2cd1ee172c7a32e054b78a03d4ddb0d7707f1" integrity sha1-6fLNHuFyx6MuBUt4oD1N2w13B/E= dependencies: async "~0.8.0" chalk "~0.4.0" cli-color "~0.3.2" lodash "~2.4.1" mute-stream "0.0.4" readline2 "~0.1.0" through "~2.3.4" insert-module-globals@~6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/insert-module-globals/-/insert-module-globals-6.0.0.tgz#ee8aeb9dee16819e33aa14588a558824af0c15dc" integrity sha1-7orrne4WgZ4zqhRYilWIJK8MFdw= dependencies: JSONStream "~0.7.1" concat-stream "~1.4.1" lexical-scope "~1.1.0" process "~0.6.0" through "~2.3.4" xtend "^3.0.0" insight@~0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/insight/-/insight-0.3.1.tgz#1a14f32c06115c0850338c38a253d707b611d448" integrity sha1-GhTzLAYRXAhQM4w4olPXB7YR1Eg= dependencies: async "~0.2.9" chalk "~0.4.0" configstore "~0.2.1" inquirer "~0.4.0" lodash.debounce "~2.4.1" object-assign "~0.1.2" request "~2.27.0" interpret@^0.6.2: version "0.6.6" resolved "https://registry.yarnpkg.com/interpret/-/interpret-0.6.6.tgz#fecd7a18e7ce5ca6abfb953e1f86213a49f1625b" integrity sha1-/s16GOfOXKar+5U+H4YhOknxYls= interpret@^1.0.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== intersect@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/intersect/-/intersect-0.0.3.tgz#c1a4a5e5eac6ede4af7504cc07e0ada7bc9f4920" integrity sha1-waSl5erG7eSvdQTMB+Ctp7yfSSA= invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= is-absolute@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-absolute/-/is-absolute-1.0.0.tgz#395e1ae84b11f26ad1795e73c17378e48a301576" integrity sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA== dependencies: is-relative "^1.0.0" is-windows "^1.0.1" is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-dotfile@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1" integrity sha1-pqLzL/0t+wT1yiXs0Pa4PPeYoeE= is-equal-shallow@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534" integrity sha1-IjgJj8Ih3gvPpdnqxMRdY4qhxTQ= dependencies: is-primitive "^2.0.0" is-expression@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-3.0.0.tgz#39acaa6be7fd1f3471dc42c7416e61c24317ac9f" integrity sha1-Oayqa+f9HzRx3ELHQW5hwkMXrJ8= dependencies: acorn "~4.0.2" object-assign "^4.0.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" integrity sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA= is-extglob@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.1.0.tgz#904135c77fb42c0641d6aa1bcdbc4daa8da082f3" integrity sha512-cdyMtqX/BOqqNBBiKlIVkytNHm49MtMlYyn1zxzvJKWmFMlGzm+ry5BBfYyeY9YmNKbRSo/o7OX9w9ale0wg3w== is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^2.0.0, is-glob@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" integrity sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM= dependencies: is-extglob "^1.0.0" is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-npm@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4" integrity sha1-8vtjpl5JBbQGyGBydloaTceTufQ= is-number@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-number/-/is-number-0.1.1.tgz#69a7af116963d47206ec9bd9b48a14216f1e3806" integrity sha1-aaevEWlj1HIG7JvZtIoUIW8eOAY= is-number@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" integrity sha1-Afy7s5NGOlSPL0ZszhbezknbkI8= dependencies: kind-of "^3.0.2" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-number@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff" integrity sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ== is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-posix-bracket@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" integrity sha1-MzTceXdDaOkvAW5vvAqI9c1ua8Q= is-primitive@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" integrity sha1-IHurkWOEmcB7Kt8kCkGochADRXU= is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-redirect@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24" integrity sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ= is-regex@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-relative@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-relative/-/is-relative-1.0.0.tgz#a1bb6935ce8c5dba1e8b9754b9b2dcc020e2260d" integrity sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA== dependencies: is-unc-path "^1.0.0" is-root@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/is-root/-/is-root-0.1.0.tgz#825e394ab593df2d73c5d0092fce507270b53dcb" integrity sha1-gl45SrWT3y1zxdAJL85QcnC1Pcs= is-stream@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-unc-path@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-unc-path/-/is-unc-path-1.0.0.tgz#d731e8898ed090a12c352ad2eaed5095ad322c9d" integrity sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ== dependencies: unc-path-regex "^0.1.2" is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is@^3.2.1: version "3.3.0" resolved "https://registry.yarnpkg.com/is/-/is-3.3.0.tgz#61cff6dd3c4193db94a3d62582072b44e5645d79" integrity sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg== isarray@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= isarray@1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" istextorbinary@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/istextorbinary/-/istextorbinary-1.0.2.tgz#ace19354d1a9a0173efeb1084ce0f87b0ad7decf" integrity sha1-rOGTVNGpoBc+/rEITOD4ewrX3s8= dependencies: binaryextensions "~1.0.0" textextensions "~1.0.0" jasmine-core@^2.4.1: version "2.99.1" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-2.99.1.tgz#e6400df1e6b56e130b61c4bcd093daa7f6e8ca15" integrity sha1-5kAN8ea1bhMLYcS80JPap/boyhU= js-base64@^2.1.8: version "2.6.4" resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.6.4.tgz#f4e686c5de1ea1f867dbcad3d46d969428df98c4" integrity sha512-pZe//GGmwJndub7ZghVHz7vjb2LgC1m8B07Au3eYqeqv9emhESByMXxaEgkUkEqJe87oBbSniGYoQNIBklc7IQ== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-string-escape@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/js-string-escape/-/js-string-escape-1.0.1.tgz#e2625badbc0d67c7533e9edc1068c587ae4137ef" integrity sha1-4mJbrbwNZ8dTPp7cEGjFh65BN+8= js-stringify@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-yaml@3.x, js-yaml@^3.1.0, js-yaml@^3.2.3: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" js-yaml@~3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.0.2.tgz#9937865f8e897a5e894e73c2c5cf2e89b32eb771" integrity sha1-mTeGX46Jel6JTnPCxc8uibMut3E= dependencies: argparse "~ 0.1.11" esprima "~ 1.0.2" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.0, json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1" integrity sha1-PAQ0dD35Pi9cQq7nsZvLSDV19OE= json5@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== dependencies: minimist "^1.2.0" jsonify@~0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" integrity sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM= jsonparse@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-0.0.5.tgz#330542ad3f0a654665b778f3eb2d9a9fa507ac64" integrity sha1-MwVCrT8KZUZlt3jz6y2an6UHrGQ= jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" junk@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/junk/-/junk-0.3.0.tgz#6c89c636f6e99898d8efbfc50430db40be71e10c" integrity sha1-bInGNvbpmJjY77/FBDDbQL5x4Qw= karma-chrome-launcher@~2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.1.1.tgz#216879c68ac04d8d5140e99619ba04b59afd46cf" integrity sha1-IWh5xorATY1RQOmWGboEtZr9Rs8= dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coffee-preprocessor@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/karma-coffee-preprocessor/-/karma-coffee-preprocessor-0.3.0.tgz#a4d8dc2b145bfef458a203d308b63bc03c9b4259" integrity sha1-pNjcKxRb/vRYogPTCLY7wDybQlk= dependencies: coffee-script "~1" karma-coverage@^0.5.1: version "0.5.5" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-0.5.5.tgz#b0d58b1025d59d5c6620263186f1d58f5d5348c5" integrity sha1-sNWLECXVnVxmICYxhvHVj11TSMU= dependencies: dateformat "^1.0.6" istanbul "^0.4.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@~0.3.6: version "0.3.8" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-0.3.8.tgz#5b6457791ad9b89aa173f079e3ebe1b8c805236c" integrity sha1-W2RXeRrZuJqhc/B54+vhuMgFI2w= karma-sourcemap-loader@^0.3.5: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma@~0.13.9: version "0.13.22" resolved "https://registry.yarnpkg.com/karma/-/karma-0.13.22.tgz#07750b1bd063d7e7e7b91bcd2e6354d8f2aa8744" integrity sha1-B3ULG9Bj1+fnuRvNLmNU2PKqh0Q= dependencies: batch "^0.5.3" bluebird "^2.9.27" body-parser "^1.12.4" chokidar "^1.4.1" colors "^1.1.0" connect "^3.3.5" core-js "^2.1.0" di "^0.0.1" dom-serialize "^2.2.0" expand-braces "^0.1.1" glob "^7.0.0" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^3.8.0" log4js "^0.6.31" mime "^1.3.4" minimatch "^3.0.0" optimist "^0.6.1" rimraf "^2.3.3" socket.io "^1.4.5" source-map "^0.5.3" useragent "^2.1.6" kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== latest-version@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-1.0.1.tgz#72cfc46e3e8d1be651e1ebb54ea9f6ea96f374bb" integrity sha1-cs/Ebj6NG+ZR4eu1Tqn26pbzdLs= dependencies: package-json "^1.0.0" lazy-cache@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" integrity sha1-odePw6UEdMuAhF07O24dpJpEbo4= lazypipe@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/lazypipe/-/lazypipe-1.0.2.tgz#b66f64ed7fd8b04869f1f1bcb795dbbaa80e418c" integrity sha512-CrU+NYdFHW8ElaeXCWz5IbmetiYVYq1fOCmpdAeZ8L+khbv1e7EnshyjlKqkO+pJbVPrsJQnHbVxEiLujG6qhQ== dependencies: stream-combiner "*" less@^1.7.4: version "1.7.5" resolved "https://registry.yarnpkg.com/less/-/less-1.7.5.tgz#4f220cf7288a27eaca739df6e4808a2d4c0d5756" integrity sha1-TyIM9yiKJ+rKc5325ICKLUwNV1Y= optionalDependencies: clean-css "2.2.x" graceful-fs "~3.0.2" mime "~1.2.11" mkdirp "~0.5.0" request "~2.40.0" source-map "0.1.x" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" lexical-scope@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/lexical-scope/-/lexical-scope-1.1.1.tgz#debac1067435f1359d90fcfd9e94bcb2ee47b2bf" integrity sha1-3rrBBnQ18TWdkPz9npS8su5Hsr8= dependencies: astw "^2.0.0" liftoff@^2.1.0: version "2.5.0" resolved "https://registry.yarnpkg.com/liftoff/-/liftoff-2.5.0.tgz#2009291bb31cea861bbf10a7c15a28caf75c31ec" integrity sha1-IAkpG7Mc6oYbvxCnwVooyvdcMew= dependencies: extend "^3.0.0" findup-sync "^2.0.0" fined "^1.0.1" flagged-respawn "^1.0.0" is-plain-object "^2.0.4" object.map "^1.0.0" rechoir "^0.6.2" resolve "^1.1.7" livereload-js@^2.2.0: version "2.4.0" resolved "https://registry.yarnpkg.com/livereload-js/-/livereload-js-2.4.0.tgz#447c31cf1ea9ab52fc20db615c5ddf678f78009c" integrity sha512-XPQH8Z2GDP/Hwz2PCDrh2mth4yFejwA1OZ/81Ti3LgKyhDcEjsSsqFWZojHG0va/duGd+WyosY7eXLDoOyqcPw== load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lockfile@~0.4.2: version "0.4.3" resolved "https://registry.yarnpkg.com/lockfile/-/lockfile-0.4.3.tgz#79b965ee9b32d9dd24b59cf81205e6dcb6d3b224" integrity sha1-ebll7psy2d0ktZz4EgXm3LbTsiQ= lodash._baseassign@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz#8c38a099500f215ad09e59f1722fd0c52bfe0a4e" integrity sha1-jDigmVAPIVrQnlnxci/QxSv+Ck4= dependencies: lodash._basecopy "^3.0.0" lodash.keys "^3.0.0" lodash._basecopy@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz#8da0e6a876cf344c0ad8a54882111dd3c5c7ca36" integrity sha1-jaDmqHbPNEwK2KVIghEd08XHyjY= lodash._basetostring@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/lodash._basetostring/-/lodash._basetostring-3.0.1.tgz#d1861d877f824a52f669832dcaf3ee15566a07d5" integrity sha1-0YYdh3+CSlL2aYMtyvPuFVZqB9U= lodash._basevalues@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/lodash._basevalues/-/lodash._basevalues-3.0.0.tgz#5b775762802bde3d3297503e26300820fdf661b7" integrity sha1-W3dXYoAr3j0yl1A+JjAIIP32Ybc= lodash._bindcallback@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/lodash._bindcallback/-/lodash._bindcallback-3.0.1.tgz#e531c27644cf8b57a99e17ed95b35c748789392e" integrity sha1-5THCdkTPi1epnhftlbNcdIeJOS4= lodash._createassigner@^3.0.0: version "3.1.1" resolved "https://registry.yarnpkg.com/lodash._createassigner/-/lodash._createassigner-3.1.1.tgz#838a5bae2fdaca63ac22dee8e19fa4e6d6970b11" integrity sha1-g4pbri/aymOsIt7o4Z+k5taXCxE= dependencies: lodash._bindcallback "^3.0.0" lodash._isiterateecall "^3.0.0" lodash.restparam "^3.0.0" lodash._escapehtmlchar@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash._escapehtmlchar/-/lodash._escapehtmlchar-2.4.1.tgz#df67c3bb6b7e8e1e831ab48bfa0795b92afe899d" integrity sha1-32fDu2t+jh6DGrSL+geVuSr+iZ0= dependencies: lodash._htmlescapes "~2.4.1" lodash._escapestringchar@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash._escapestringchar/-/lodash._escapestringchar-2.4.1.tgz#ecfe22618a2ade50bfeea43937e51df66f0edb72" integrity sha1-7P4iYYoq3lC/7qQ5N+Ud9m8O23I= lodash._getnative@^3.0.0: version "3.9.1" resolved "https://registry.yarnpkg.com/lodash._getnative/-/lodash._getnative-3.9.1.tgz#570bc7dede46d61cdcde687d65d3eecbaa3aaff5" integrity sha1-VwvH3t5G1hzc3mh9ZdPuy6o6r/U= lodash._htmlescapes@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash._htmlescapes/-/lodash._htmlescapes-2.4.1.tgz#32d14bf0844b6de6f8b62a051b4f67c228b624cb" integrity sha1-MtFL8IRLbeb4tioFG09nwii2JMs= lodash._isiterateecall@^3.0.0: version "3.0.9" resolved "https://registry.yarnpkg.com/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz#5203ad7ba425fae842460e696db9cf3e6aac057c" integrity sha1-UgOte6Ql+uhCRg5pbbnPPmqsBXw= lodash._isnative@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash._isnative/-/lodash._isnative-2.4.1.tgz#3ea6404b784a7be836c7b57580e1cdf79b14832c" integrity sha1-PqZAS3hKe+g2x7V1gOHN95sUgyw= lodash._objecttypes@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz#7c0b7f69d98a1f76529f890b0cdb1b4dfec11c11" integrity sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE= lodash._reescape@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/lodash._reescape/-/lodash._reescape-3.0.0.tgz#2b1d6f5dfe07c8a355753e5f27fac7f1cde1616a" integrity sha1-Kx1vXf4HyKNVdT5fJ/rH8c3hYWo= lodash._reevaluate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/lodash._reevaluate/-/lodash._reevaluate-3.0.0.tgz#58bc74c40664953ae0b124d806996daca431e2ed" integrity sha1-WLx0xAZklTrgsSTYBpltrKQx4u0= lodash._reinterpolate@^2.4.1, lodash._reinterpolate@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-2.4.1.tgz#4f1227aa5a8711fc632f5b07a1f4607aab8b3222" integrity sha1-TxInqlqHEfxjL1sHofRgequLMiI= lodash._reinterpolate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d" integrity sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0= lodash._reunescapedhtml@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash._reunescapedhtml/-/lodash._reunescapedhtml-2.4.1.tgz#747c4fc40103eb3bb8a0976e571f7a2659e93ba7" integrity sha1-dHxPxAED6zu4oJduVx96JlnpO6c= dependencies: lodash._htmlescapes "~2.4.1" lodash.keys "~2.4.1" lodash._root@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/lodash._root/-/lodash._root-3.0.1.tgz#fba1c4524c19ee9a5f8136b4609f017cf4ded692" integrity sha1-+6HEUkwZ7ppfgTa0YJ8BfPTe1pI= lodash._shimkeys@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash._shimkeys/-/lodash._shimkeys-2.4.1.tgz#6e9cc9666ff081f0b5a6c978b83e242e6949d203" integrity sha1-bpzJZm/wgfC1psl4uD4kLmlJ0gM= dependencies: lodash._objecttypes "~2.4.1" lodash.assign@*: version "4.2.0" resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7" integrity sha1-DZnzzNem0mHRm9rrkkUAXShYCOc= lodash.assign@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-3.2.0.tgz#3ce9f0234b4b2223e296b8fa0ac1fee8ebca64fa" integrity sha1-POnwI0tLIiPilrj6CsH+6OvKZPo= dependencies: lodash._baseassign "^3.0.0" lodash._createassigner "^3.0.0" lodash.keys "^3.0.0" lodash.clonedeep@^4.3.2: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8= lodash.debounce@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-2.4.1.tgz#d8cead246ec4b926e8b85678fc396bfeba8cc6fc" integrity sha1-2M6tJG7EuSbouFZ4/Dlr/rqMxvw= dependencies: lodash.isfunction "~2.4.1" lodash.isobject "~2.4.1" lodash.now "~2.4.1" lodash.defaults@^2.4.1, lodash.defaults@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-2.4.1.tgz#a7e8885f05e68851144b6e12a8f3678026bc4c54" integrity sha1-p+iIXwXmiFEUS24SqPNngCa8TFQ= dependencies: lodash._objecttypes "~2.4.1" lodash.keys "~2.4.1" lodash.defaults@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c" integrity sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw= lodash.escape@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/lodash.escape/-/lodash.escape-3.2.0.tgz#995ee0dc18c1b48cc92effae71a10aab5b487698" integrity sha1-mV7g3BjBtIzJLv+ucaEKq1tIdpg= dependencies: lodash._root "^3.0.0" lodash.escape@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.escape/-/lodash.escape-2.4.1.tgz#2ce12c5e084db0a57dda5e5d1eeeb9f5d175a3b4" integrity sha1-LOEsXghNsKV92l5dHu659dF1o7Q= dependencies: lodash._escapehtmlchar "~2.4.1" lodash._reunescapedhtml "~2.4.1" lodash.keys "~2.4.1" lodash.isarguments@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a" integrity sha1-L1c9hcaiQon/AGY7SRwdM4/zRYo= lodash.isarray@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/lodash.isarray/-/lodash.isarray-3.0.4.tgz#79e4eb88c36a8122af86f844aa9bcd851b5fbb55" integrity sha1-eeTriMNqgSKvhvhEqpvNhRtfu1U= lodash.isfunction@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.isfunction/-/lodash.isfunction-2.4.1.tgz#2cfd575c73e498ab57e319b77fa02adef13a94d1" integrity sha1-LP1XXHPkmKtX4xm3f6Aq3vE6lNE= lodash.isobject@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.isobject/-/lodash.isobject-2.4.1.tgz#5a2e47fe69953f1ee631a7eba1fe64d2d06558f5" integrity sha1-Wi5H/mmVPx7mMafrof5k0tBlWPU= dependencies: lodash._objecttypes "~2.4.1" lodash.keys@^3.0.0: version "3.1.2" resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-3.1.2.tgz#4dbc0472b156be50a0b286855d1bd0b0c656098a" integrity sha1-TbwEcrFWvlCgsoaFXRvQsMZWCYo= dependencies: lodash._getnative "^3.0.0" lodash.isarguments "^3.0.0" lodash.isarray "^3.0.0" lodash.keys@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-2.4.1.tgz#48dea46df8ff7632b10d706b8acb26591e2b3727" integrity sha1-SN6kbfj/djKxDXBrissmWR4rNyc= dependencies: lodash._isnative "~2.4.1" lodash._shimkeys "~2.4.1" lodash.isobject "~2.4.1" lodash.now@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.now/-/lodash.now-2.4.1.tgz#6872156500525185faf96785bb7fe7fe15b562c6" integrity sha1-aHIVZQBSUYX6+WeFu3/n/hW1YsY= dependencies: lodash._isnative "~2.4.1" lodash.restparam@^3.0.0: version "3.6.1" resolved "https://registry.yarnpkg.com/lodash.restparam/-/lodash.restparam-3.6.1.tgz#936a4e309ef330a7645ed4145986c85ae5b20805" integrity sha1-k2pOMJ7zMKdkXtQUWYbIWuWyCAU= lodash.template@^2.4.1, lodash.template@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-2.4.1.tgz#9e611007edf629129a974ab3c48b817b3e1cf20d" integrity sha1-nmEQB+32KRKal0qzxIuBez4c8g0= dependencies: lodash._escapestringchar "~2.4.1" lodash._reinterpolate "~2.4.1" lodash.defaults "~2.4.1" lodash.escape "~2.4.1" lodash.keys "~2.4.1" lodash.templatesettings "~2.4.1" lodash.values "~2.4.1" lodash.template@^3.0.0, lodash.template@^3.6.2: version "3.6.2" resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-3.6.2.tgz#f8cdecc6169a255be9098ae8b0c53d378931d14f" integrity sha1-+M3sxhaaJVvpCYrosMU9N4kx0U8= dependencies: lodash._basecopy "^3.0.0" lodash._basetostring "^3.0.0" lodash._basevalues "^3.0.0" lodash._isiterateecall "^3.0.0" lodash._reinterpolate "^3.0.0" lodash.escape "^3.0.0" lodash.keys "^3.0.0" lodash.restparam "^3.0.0" lodash.templatesettings "^3.0.0" lodash.template@^4.4.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-4.5.0.tgz#f976195cf3f347d0d5f52483569fe8031ccce8ab" integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A== dependencies: lodash._reinterpolate "^3.0.0" lodash.templatesettings "^4.0.0" lodash.templatesettings@^3.0.0: version "3.1.1" resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-3.1.1.tgz#fb307844753b66b9f1afa54e262c745307dba8e5" integrity sha1-+zB4RHU7Zrnxr6VOJix0UwfbqOU= dependencies: lodash._reinterpolate "^3.0.0" lodash.escape "^3.0.0" lodash.templatesettings@^4.0.0: version "4.2.0" resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz#e481310f049d3cf6d47e912ad09313b154f0fb33" integrity sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ== dependencies: lodash._reinterpolate "^3.0.0" lodash.templatesettings@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-2.4.1.tgz#ea76c75d11eb86d4dbe89a83893bb861929ac699" integrity sha1-6nbHXRHrhtTb6JqDiTu4YZKaxpk= dependencies: lodash._reinterpolate "~2.4.1" lodash.escape "~2.4.1" lodash.values@~2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/lodash.values/-/lodash.values-2.4.1.tgz#abf514436b3cb705001627978cbcf30b1280eea4" integrity sha1-q/UUQ2s8twUAFieXjLzzCxKA7qQ= dependencies: lodash.keys "~2.4.1" lodash@^2.4.1, lodash@~2.4.1: version "2.4.2" resolved "https://registry.yarnpkg.com/lodash/-/lodash-2.4.2.tgz#fadd834b9683073da179b3eae6d9c0d15053f73e" integrity sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4= lodash@^3.10.0, lodash@^3.8.0: version "3.10.1" resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6" integrity sha1-W/Rejkm6QYnhfUgnid/RW9FAt7Y= lodash@^4.0.0, lodash@^4.17.13, lodash@^4.17.15, lodash@~4.17.10: version "4.17.20" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52" integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA== lodash@^4.17.4: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== lodash@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/lodash/-/lodash-1.0.2.tgz#8f57560c83b59fc270bd3d561b690043430e2551" integrity sha1-j1dWDIO1n8JwvT1WG2kAQ0MOJVE= log4js@^0.6.31: version "0.6.38" resolved "https://registry.yarnpkg.com/log4js/-/log4js-0.6.38.tgz#2c494116695d6fb25480943d3fc872e662a522fd" integrity sha1-LElBFmldb7JUgJQ9P8hy5mKlIv0= dependencies: readable-stream "~1.0.2" semver "~4.3.3" longest@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" integrity sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc= loophole@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/loophole/-/loophole-1.1.0.tgz#37949fea453b6256acc725c320ce0c5a7f70a2bd" integrity sha1-N5Sf6kU7YlasxyXDIM4MWn9wor0= loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lowercase-keys@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== lru-cache@2: version "2.7.3" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.7.3.tgz#6d4524e8b955f95d4f5b58851ce21dd72fb4e952" integrity sha1-bUUk6LlV+V1PW1iFHOId1y+06VI= lru-cache@4.1.x, lru-cache@^4.0.1: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@~2.3.0: version "2.3.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.3.1.tgz#b3adf6b3d856e954e2c390e6cef22081245a53d6" integrity sha1-s632s9hW6VTiw5DmzvIggSRaU9Y= lru-cache@~2.5.0: version "2.5.2" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.5.2.tgz#1fddad938aae1263ce138680be1b3f591c0ab41c" integrity sha1-H92tk4quEmPOE4aAvhs/WRwKtBw= lru-queue@0.1: version "0.1.0" resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" integrity sha1-Jzi9nw089PhEkMVzbEhpmsYyzaM= dependencies: es5-ext "~0.10.2" make-iterator@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/make-iterator/-/make-iterator-1.0.1.tgz#29b33f312aa8f547c4a5e490f56afcec99133ad6" integrity sha512-pxiuXh0iVEq7VM7KMIhs5gxsfxCux2URptUQaXo4iZZJxBAzTPOLE2BumO5dbfVYq/hBJFBR/a1mFDmOx5AGmw== dependencies: kind-of "^6.0.2" map-cache@^0.2.0, map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-stream@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/map-stream/-/map-stream-0.0.7.tgz#8a1f07896d82b10926bd3744a2420009f88974a8" integrity sha1-ih8HiW2CsQkmvTdEokIACfiJdKg= map-stream@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/map-stream/-/map-stream-0.1.0.tgz#e56aa94c4c8055a16404a0674b78f215f7c8e194" integrity sha1-5WqpTEyAVaFkBKBnS3jyFffI4ZQ= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" math-random@^1.0.1: version "1.0.4" resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c" integrity sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A== media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= memoizee@~0.2.5: version "0.2.6" resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.2.6.tgz#bb45a7ad02530082f1612671dab35219cd2e0741" integrity sha1-u0WnrQJTAILxYSZx2rNSGc0uB0E= dependencies: es5-ext "~0.9.2" event-emitter "~0.2.2" next-tick "0.1.x" memoizee@~0.3.8: version "0.3.10" resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.3.10.tgz#4eca0d8aed39ec9d017f4c5c2f2f6432f42e5c8f" integrity sha1-TsoNiu057J0Bf0xcLy9kMvQuXI8= dependencies: d "~0.1.1" es5-ext "~0.10.11" es6-weak-map "~0.1.4" event-emitter "~0.3.4" lru-queue "0.1" next-tick "~0.2.2" timers-ext "0.1" meow@^3.3.0, meow@^3.7.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-stream@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-1.0.1.tgz#4041202d508a342ba00174008df0c251b8c135e1" integrity sha1-QEEgLVCKNCugAXQAjfDCUbjBNeE= dependencies: readable-stream "^2.0.1" merge@^1.1.3, merge@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/merge/-/merge-1.2.1.tgz#38bebf80c3220a8a487b6fcfb3941bb11720c145" integrity sha512-VjFo4P5Whtj4vsLzsYBu5ayHhoHJ0UqNm7ibvShmbmoz7tGi0vXaoJbGdB+GmDMLUdg8DpQXEIeVDAe8MaABvQ== micromatch@^2.1.5: version "2.3.11" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" integrity sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU= dependencies: arr-diff "^2.0.0" array-unique "^0.2.1" braces "^1.8.2" expand-brackets "^0.1.4" extglob "^0.3.1" filename-regex "^2.0.0" is-extglob "^1.0.0" is-glob "^2.0.1" kind-of "^3.0.2" normalize-path "^2.0.1" object.omit "^2.0.0" parse-glob "^3.0.4" regex-cache "^0.4.2" micromatch@^3.0.4, micromatch@^3.1.10: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" mime-db@1.42.0: version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-db@1.44.0: version "1.44.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.44.0.tgz#fa11c5eb0aca1334b4233cb4d52f10c5a6272f92" integrity sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg== mime-db@~1.12.0: version "1.12.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.12.0.tgz#3d0c63180f458eb10d325aaa37d7c58ae312e9d7" integrity sha1-PQxjGA9FjrENMlqqN9fFiuMS6dc= mime-types@^2.1.12, mime-types@~2.1.19: version "2.1.27" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.27.tgz#47949f98e279ea53119f5722e0f34e529bec009f" integrity sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w== dependencies: mime-db "1.44.0" mime-types@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-1.0.2.tgz#995ae1392ab8affcbfcb2641dd054e943c0d5dce" integrity sha1-mVrhOSq4r/y/yyZB3QVOlDwNXc4= mime-types@~2.0.3: version "2.0.14" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.0.14.tgz#310e159db23e077f8bb22b748dabfa4957140aa6" integrity sha1-MQ4VnbI+B3+Lsit0jav6SVcUCqY= dependencies: mime-db "~1.12.0" mime-types@~2.1.11, mime-types@~2.1.24: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" mime@1.3.4: version "1.3.4" resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.4.tgz#115f9e3b6b3daf2959983cb38f149a2d40eb5d53" integrity sha1-EV+eO2s9rylZmDyzjxSaLUDrXVM= mime@^1.2.11, mime@^1.3.4: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@~1.2.11, mime@~1.2.9: version "1.2.11" resolved "https://registry.yarnpkg.com/mime/-/mime-1.2.11.tgz#58203eed86e3a5ef17aed2b7d9ebd47f0a60dd10" integrity sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA= mini-lr@^0.1.8: version "0.1.9" resolved "https://registry.yarnpkg.com/mini-lr/-/mini-lr-0.1.9.tgz#02199d27347953d1fd1d6dbded4261f187b2d0f6" integrity sha1-AhmdJzR5U9H9HW297UJh8Yey0PY= dependencies: body-parser "~1.14.0" debug "^2.2.0" faye-websocket "~0.7.2" livereload-js "^2.2.0" parseurl "~1.3.0" qs "~2.2.3" minimatch@0.3, minimatch@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-0.3.0.tgz#275d8edaac4f1bb3326472089e7949c8394699dd" integrity sha1-J12O2qxPG7MyZHIInnlJyDlGmd0= dependencies: lru-cache "2" sigmund "~1.0.0" "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.3, minimatch@^3.0.4, minimatch@~3.0.2: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimatch@^0.2.14, minimatch@~0.2.11: version "0.2.14" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-0.2.14.tgz#c74e780574f63c6f9a090e90efbe6ef53a6a756a" integrity sha1-x054BXT2PG+aCQ6Q775u9TpqdWo= dependencies: lru-cache "2" sigmund "~1.0.0" minimatch@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-1.0.0.tgz#e0dd2120b49e1b724ce8d714c520822a9438576d" integrity sha1-4N0hILSeG3JM6NcUxSCCKpQ4V20= dependencies: lru-cache "2" sigmund "~1.0.0" minimatch@^2.0.1: version "2.0.10" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-2.0.10.tgz#8d087c39c6b38c001b97fca7ce6d0e1e80afbac7" integrity sha1-jQh8OcazjAAbl/ynzm0OHoCvusc= dependencies: brace-expansion "^1.0.0" minimist@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.2.0.tgz#4dffe525dae2b864c66c2e23c6271d7afdecefce" integrity sha1-Tf/lJdriuGTGbC4jxicdev3s784= minimist@^1.1.0, minimist@^1.2.0, minimist@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= minimist@^1.1.3, minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== minimist@~0.0.1, minimist@~0.0.7, minimist@~0.0.9: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.8.6, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.2.1: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5, mkdirp@0.5.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" mkdirp@~0.3.5: version "0.3.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.5.tgz#de3e5f8961c88c787ee1368df849ac4413eca8d7" integrity sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc= mkpath@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/mkpath/-/mkpath-0.1.0.tgz#7554a6f8d871834cc97b5462b122c4c124d6de91" integrity sha1-dVSm+Nhxg0zJe1RisSLEwSTW3pE= module-deps@~2.0.0: version "2.0.6" resolved "https://registry.yarnpkg.com/module-deps/-/module-deps-2.0.6.tgz#b999321c73ac33580f00712c0f3075fdca42563f" integrity sha1-uZkyHHOsM1gPAHEsDzB1/cpCVj8= dependencies: JSONStream "~0.7.1" browser-resolve "~1.2.4" concat-stream "~1.4.5" detective "~3.1.0" duplexer2 "0.0.2" inherits "~2.0.1" minimist "~0.0.9" parents "0.0.2" readable-stream "^1.0.27-1" resolve "~0.6.3" stream-combiner "~0.1.0" through2 "~0.4.1" mout@^0.9.1, mout@~0.9.0, mout@~0.9.1: version "0.9.1" resolved "https://registry.yarnpkg.com/mout/-/mout-0.9.1.tgz#84f0f3fd6acc7317f63de2affdcc0cee009b0477" integrity sha1-hPDz/WrMcxf2PeKv/cwM7gCbBHc= ms@0.7.1: version "0.7.1" resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.1.tgz#9cd13c03adbff25b65effde7ce864ee952017098" integrity sha1-nNE8A62/8ltl7/3nzoZO6VIBcJg= ms@0.7.2: version "0.7.2" resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765" integrity sha1-riXPJRKziFodldfwN4aNhDESR2U= ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multipipe@^0.1.0, multipipe@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/multipipe/-/multipipe-0.1.2.tgz#2a8f2ddf70eed564dff2d57f1e1a137d9f05078b" integrity sha1-Ko8t33Du1WTf8tV/HhoTfZ8FB4s= dependencies: duplexer2 "0.0.2" mute-stream@0.0.4: version "0.0.4" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.4.tgz#a9219960a6d5d5d046597aee51252c6655f7177e" integrity sha1-qSGZYKbV1dBGWXruUSUsZlX3F34= mute-stream@~0.0.4: version "0.0.8" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== nan@^2.12.1, nan@^2.13.2: version "2.14.1" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.1.tgz#d7be34dfa3105b91494c3147089315eff8874b01" integrity sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" natives@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/natives/-/natives-1.1.6.tgz#a603b4a498ab77173612b9ea1acdec4d980f00bb" integrity sha512-6+TDFewD4yxY14ptjKaS63GVdtKiES1pTPyxn9Jb0rBqPMZ7VcCiooEhPNsr+mqHtMGxa/5c/HhcC4uPEUw/nA== needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9" integrity sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk= neo-async@^2.6.0: version "2.6.1" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c" integrity sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw== nested-error-stacks@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/nested-error-stacks/-/nested-error-stacks-1.0.2.tgz#19f619591519f096769a5ba9a86e6eeec823c3cf" integrity sha1-GfYZWRUZ8JZ2mlupqG5u7sgjw88= dependencies: inherits "~2.0.1" next-tick@0.1.x: version "0.1.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-0.1.0.tgz#1912cce8eb9b697d640fbba94f8f00dec3b94259" integrity sha1-GRLM6OubaX1kD7upT48A3sO5Qlk= next-tick@1, next-tick@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= next-tick@~0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-0.2.2.tgz#75da4a927ee5887e39065880065b7336413b310d" integrity sha1-ddpKkn7liH45BliABltzNkE7MQ0= ng-annotate@^1.0.0: version "1.2.2" resolved "https://registry.yarnpkg.com/ng-annotate/-/ng-annotate-1.2.2.tgz#dc3fc51ba0b2f8b385dbe047f4da06f580a1fd61" integrity sha1-3D/FG6Cy+LOF2+BH9NoG9YCh/WE= dependencies: acorn "~2.6.4" alter "~0.2.0" convert-source-map "~1.1.2" optimist "~0.6.1" ordered-ast-traverse "~1.1.1" simple-fmt "~0.1.0" simple-is "~0.2.0" source-map "~0.5.3" stable "~0.1.5" stringmap "~0.2.2" stringset "~0.2.1" tryor "~0.1.2" ng-classify@^4.0.0: version "4.1.1" resolved "https://registry.yarnpkg.com/ng-classify/-/ng-classify-4.1.1.tgz#6688b924c3c9b5f94da5f8fbc0d55176f3dda122" integrity sha1-Zoi5JMPJtflNpfj7wNVRdvPdoSI= dependencies: coffee-script "^1.7.1" loophole "^1.0.0" node.extend "^1.0.10" node-gyp@^3.8.0: version "3.8.0" resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-3.8.0.tgz#540304261c330e80d0d5edce253a68cb3964218c" integrity sha512-3g8lYefrRRzvGeSowdJKAKyks8oUpLEd/DyPV4eMhVlhJ0aNaZqIrNUIPuEWWTAoPqyFkfGrM67MC69baqn6vA== dependencies: fstream "^1.0.0" glob "^7.0.3" graceful-fs "^4.1.2" mkdirp "^0.5.0" nopt "2 || 3" npmlog "0 || 1 || 2 || 3 || 4" osenv "0" request "^2.87.0" rimraf "2" semver "~5.3.0" tar "^2.0.0" which "1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.71: version "1.1.72" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== node-sass@^4.8.3: version "4.14.1" resolved "https://registry.yarnpkg.com/node-sass/-/node-sass-4.14.1.tgz#99c87ec2efb7047ed638fb4c9db7f3a42e2217b5" integrity sha512-sjCuOlvGyCJS40R8BscF5vhVlQjNN069NtQ1gSxyK1u9iqvn6tf7O1R4GNowVZfiZUCRt5MmMs1xd+4V/7Yr0g== dependencies: async-foreach "^0.1.3" chalk "^1.1.1" cross-spawn "^3.0.0" gaze "^1.0.0" get-stdin "^4.0.1" glob "^7.0.3" in-publish "^2.0.0" lodash "^4.17.15" meow "^3.7.0" mkdirp "^0.5.1" nan "^2.13.2" node-gyp "^3.8.0" npmlog "^4.0.0" request "^2.88.0" sass-graph "2.2.5" stdout-stream "^1.4.0" "true-case-path" "^1.0.2" node-uuid@~1.4.0: version "1.4.8" resolved "https://registry.yarnpkg.com/node-uuid/-/node-uuid-1.4.8.tgz#b040eb0923968afabf8d32fb1f17f1167fdab907" integrity sha1-sEDrCSOWivq/jTL7HxfxFn/auQc= node.extend@^1.0.10, node.extend@~1.1.2: version "1.1.8" resolved "https://registry.yarnpkg.com/node.extend/-/node.extend-1.1.8.tgz#0aab3e63789f4e6d68b42bc00073ad1881243cf0" integrity sha512-L/dvEBwyg3UowwqOUTyDsGBU6kjBQOpOhshio9V3i3BMPv5YUb9+mWNN8MK0IbWqT0AqaTSONZf0aTuMMahWgA== dependencies: has "^1.0.3" is "^3.2.1" "nopt@2 || 3", nopt@3.x, nopt@~3.0.0: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" nopt@~1.0.10: version "1.0.10" resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee" integrity sha1-bd0hvSoxQXuScn3Vhfim83YI6+4= dependencies: abbrev "1" nopt@~2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-2.2.1.tgz#2aa09b7d1768487b3b89a9c5aa52335bff0baea7" integrity sha1-KqCbfRdoSHs7ianFqlIzW/8Lrqc= dependencies: abbrev "1" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.0.0, normalize-path@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.6" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4" integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" "npmlog@0 || 1 || 2 || 3 || 4", npmlog@^4.0.0, npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= o-stream@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/o-stream/-/o-stream-0.2.2.tgz#7fe03af870b8f9537af33b312b381b3034ab410f" integrity sha512-V3j76KU3g/Gyl8rpdi2z72rn5zguMvTCQgAXfBe3pxEefKqXmOUOD7mvx/mNjykdxGqDVfpSoo8r+WdrkWg/1Q== oauth-sign@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.3.0.tgz#cb540f93bb2b22a7d5941691a288d60e8ea9386e" integrity sha1-y1QPk7srIqfVlBaRoojWDo6pOG4= oauth-sign@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.5.0.tgz#d767f5169325620eab2e087ef0c472e773db6461" integrity sha1-12f1FpMlYg6rLgh+8MRy53PbZGE= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@*, object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-assign@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.0.tgz#7a3b3d0e98063d43f4c03f2e8ae6cd51a86883a0" integrity sha1-ejs9DpgGPUP0wD8uiubNUahog6A= object-assign@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-2.1.1.tgz#43c36e5d569ff8e4816c4efa8be02d26967c18aa" integrity sha1-Q8NuXVaf+OSBbE76i+AtJpZ8GKo= object-assign@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-3.0.0.tgz#9bedd5ca0897949bca47e7ff408062d549f587f2" integrity sha1-m+3VygiXlJvKR+f/QIBi1Un1h/I= object-assign@~0.1.1, object-assign@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-0.1.2.tgz#036992f073aff7b2db83d06b3fb3155a5ccac37f" integrity sha1-A2mS8HOv97Lbg9BrP7MVWlzKw38= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-keys@^1.0.11, object-keys@^1.0.12: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-keys@~0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-0.4.0.tgz#28a6aae7428dd2c3a92f3d95f21335dd204e0336" integrity sha1-KKaq50KN0sOpLz2V8hM13SBOAzY= object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.defaults@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/object.defaults/-/object.defaults-1.1.0.tgz#3a7f868334b407dea06da16d88d5cd29e435fecf" integrity sha1-On+GgzS0B96gbaFtiNXNKeQ1/s8= dependencies: array-each "^1.0.1" array-slice "^1.0.0" for-own "^1.0.0" isobject "^3.0.0" object.map@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object.map/-/object.map-1.0.1.tgz#cf83e59dc8fcc0ad5f4250e1f78b3b81bd801d37" integrity sha1-z4Plncj8wK1fQlDh94s7gb2AHTc= dependencies: for-own "^1.0.0" make-iterator "^1.0.0" object.omit@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" integrity sha1-Gpx0SCnznbuFjHbKNXmuKlTr0fo= dependencies: for-own "^0.1.4" is-extendable "^0.1.1" object.pick@^1.2.0, object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" once@1.x, once@^1.3.0, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" once@~1.3.0: version "1.3.3" resolved "https://registry.yarnpkg.com/once/-/once-1.3.3.tgz#b2e261557ce4c314ec8304f3fa82663e4297ca20" integrity sha1-suJhVXzkwxTsgwTz+oJmPkKXyiA= dependencies: wrappy "1" opn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/opn/-/opn-0.1.2.tgz#c527832cfd964d52096b524d0035ecaece51db4f" integrity sha1-xSeDLP2WTVIJa1JNADXsrs5R208= optimist@^0.6.1, optimist@~0.6.0, optimist@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optimist@~0.3, optimist@~0.3.5: version "0.3.7" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.3.7.tgz#c90941ad59e4273328923074d2cf2e7cbc6ec0d9" integrity sha1-yQlBrVnkJzMokjB00s8ufLxuwNk= dependencies: wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.6" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" word-wrap "~1.2.3" options@>=0.0.5: version "0.0.6" resolved "https://registry.yarnpkg.com/options/-/options-0.0.6.tgz#ec22d312806bb53e731773e7cdaefcf1c643128f" integrity sha1-7CLTEoBrtT5zF3Pnza788cZDEo8= orchestrator@^0.3.0: version "0.3.8" resolved "https://registry.yarnpkg.com/orchestrator/-/orchestrator-0.3.8.tgz#14e7e9e2764f7315fbac184e506c7aa6df94ad7e" integrity sha1-FOfp4nZPcxX7rBhOUGx6pt+UrX4= dependencies: end-of-stream "~0.1.5" sequencify "~0.0.7" stream-consume "~0.1.0" ordered-ast-traverse@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ordered-ast-traverse/-/ordered-ast-traverse-1.1.1.tgz#6843a170bc0eee8b520cc8ddc1ddd3aa30fa057c" integrity sha1-aEOhcLwO7otSDMjdwd3TqjD6BXw= dependencies: ordered-esprima-props "~1.1.0" ordered-esprima-props@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/ordered-esprima-props/-/ordered-esprima-props-1.1.0.tgz#a9827086df5f010aa60e9bd02b6e0335cea2ffcb" integrity sha1-qYJwht9fAQqmDpvQK24DNc6i/8s= ordered-read-streams@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/ordered-read-streams/-/ordered-read-streams-0.1.0.tgz#fd565a9af8eb4473ba69b6ed8a34352cb552f126" integrity sha1-/VZamvjrRHO6abbtijQ1LLVS8SY= os-browserify@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.1.2.tgz#49ca0293e0b19590a5f5de10c7f265a617d8fe54" integrity sha1-ScoCk+CxlZCl9d4Qx/JlphfY/lQ= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@0, osenv@^0.1.0, osenv@^0.1.4, osenv@~0.1.0: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" osenv@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.0.3.tgz#cd6ad8ddb290915ad9e22765576025d411f29cb6" integrity sha1-zWrY3bKQkVrZ4idlV2Al1BHynLY= p-limit@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-throttler@~0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/p-throttler/-/p-throttler-0.0.1.tgz#c341e3589ec843852a035e6f88e6c1e96150029b" integrity sha1-w0HjWJ7IQ4UqA15viObB6WFQAps= dependencies: q "~0.9.2" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== package-json@^1.0.0: version "1.2.0" resolved "https://registry.yarnpkg.com/package-json/-/package-json-1.2.0.tgz#c8ecac094227cdf76a316874ed05e27cc939a0e0" integrity sha1-yOysCUInzfdqMWh07QXifMk5oOA= dependencies: got "^3.2.0" registry-url "^3.0.0" pako@~0.2.0: version "0.2.9" resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75" integrity sha1-8/dSL073gjSNqBYbrZ7P1Rv4OnU= parents@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/parents/-/parents-0.0.2.tgz#67147826e497d40759aaf5ba4c99659b6034d302" integrity sha1-ZxR4JuSX1AdZqvW6TJllm2A00wI= parents@~0.0.1: version "0.0.3" resolved "https://registry.yarnpkg.com/parents/-/parents-0.0.3.tgz#fa212f024d9fa6318dbb6b4ce676c8be493b9c43" integrity sha1-+iEvAk2fpjGNu2tM5nbIvkk7nEM= dependencies: path-platform "^0.0.1" parse-filepath@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/parse-filepath/-/parse-filepath-1.0.2.tgz#a632127f53aaf3d15876f5872f3ffac763d6c891" integrity sha1-pjISf1Oq89FYdvWHLz/6x2PWyJE= dependencies: is-absolute "^1.0.0" map-cache "^0.2.0" path-root "^0.1.1" parse-glob@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" integrity sha1-ssN2z7EfNVE7rdFz7wu246OIORw= dependencies: glob-base "^0.3.0" is-dotfile "^1.0.0" is-extglob "^1.0.0" is-glob "^2.0.0" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-node-version@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/parse-node-version/-/parse-node-version-1.0.1.tgz#e2b5dbede00e7fa9bc363607f53327e8b073189b" integrity sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA== parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parsejson@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/parsejson/-/parsejson-0.0.3.tgz#ab7e3759f209ece99437973f7d0f1f64ae0e64ab" integrity sha1-q343WfIJ7OmUN5c/fQ8fZK4OZKs= dependencies: better-assert "~1.0.0" parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.0, parseurl@~1.3.1, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@~0.0.0: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-parse@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-platform@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-platform/-/path-platform-0.0.1.tgz#b5585d7c3c463d89aa0060d86611cf1afd617e2a" integrity sha1-tVhdfDxGPYmqAGDYZhHPGv1hfio= path-root-regex@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/path-root-regex/-/path-root-regex-0.1.2.tgz#bfccdc8df5b12dc52c8b43ec38d18d72c04ba96d" integrity sha1-v8zcjfWxLcUsi0PsONGNcsBLqW0= path-root@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/path-root/-/path-root-0.1.1.tgz#9a4a6814cac1c0cd73360a95f32083c8ea4745b7" integrity sha1-mkpoFMrBwM1zNgqV8yCDyOpHRbc= dependencies: path-root-regex "^0.1.0" path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" pause-stream@0.0.11, pause-stream@^0.0.11: version "0.0.11" resolved "https://registry.yarnpkg.com/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445" integrity sha1-/lo0sMvOErWqaitAPuLnO2AvFEU= dependencies: through "~2.3" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= plugin-error@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/plugin-error/-/plugin-error-1.0.1.tgz#77016bd8919d0ac377fdcdd0322328953ca5781c" integrity sha512-L1zP0dk7vGweZME2i+EeakvUNqSrdiI3F91TwEoYiGrAfUXmVv6fJIq4g82PAXxNsWOp0J7ZqQy/3Szz0ajTxA== dependencies: ansi-colors "^1.0.1" arr-diff "^4.0.0" arr-union "^3.1.0" extend-shallow "^3.0.2" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= preserve@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" integrity sha1-gV7R9uvGWSb4ZbMQwHE7yzMVzks= pretty-hrtime@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz#b7e3ea42435a4c9b2759d99e0f201eb195802ee1" integrity sha1-t+PqQkNaTJsnWdmeDyAesZWALuE= private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@^2.0.0, process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/process/-/process-0.7.0.tgz#c52208161a34adf3812344ae85d3e6150469389d" integrity sha1-xSIIFho0rfOBI0SuhdPmFQRpOJ0= process@~0.5.1: version "0.5.2" resolved "https://registry.yarnpkg.com/process/-/process-0.5.2.tgz#1638d8a8e34c2f440a91db95ab9aeb677fc185cf" integrity sha1-FjjYqONML0QKkduVq5rrZ3/Bhc8= process@~0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/process/-/process-0.6.0.tgz#7dd9be80ffaaedd4cb628f1827f1cbab6dc0918f" integrity sha1-fdm+gP+q7dTLYo8YJ/HLq23AkY8= promise@^7.0.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" promptly@~0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/promptly/-/promptly-0.2.1.tgz#6444e7ca4dbd9899e7eeb5ec3922827ebdc22b3b" integrity sha1-ZETnyk29mJnn7rXsOSKCfr3CKzs= dependencies: read "~1.0.4" pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.28: version "1.8.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== pug-attrs@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-2.0.4.tgz#b2f44c439e4eb4ad5d4ef25cac20d18ad28cc336" integrity sha512-TaZ4Z2TWUPDJcV3wjU3RtUXMrd3kM4Wzjbe3EWnSsZPsJ3LDI0F3yCnf2/W7PPFF+edUFQ0HgDL1IoxSz5K8EQ== dependencies: constantinople "^3.0.1" js-stringify "^1.0.1" pug-runtime "^2.0.5" pug-code-gen@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-2.0.3.tgz#122eb9ada9b5bf601705fe15aaa0a7d26bc134ab" integrity sha512-r9sezXdDuZJfW9J91TN/2LFbiqDhmltTFmGpHTsGdrNGp3p4SxAjjXEfnuK2e4ywYsRIVP0NeLbSAMHUcaX1EA== dependencies: constantinople "^3.1.2" doctypes "^1.1.0" js-stringify "^1.0.1" pug-attrs "^2.0.4" pug-error "^1.3.3" pug-runtime "^2.0.5" void-elements "^2.0.1" with "^5.0.0" pug-error@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-1.3.3.tgz#f342fb008752d58034c185de03602dd9ffe15fa6" integrity sha512-qE3YhESP2mRAWMFJgKdtT5D7ckThRScXRwkfo+Erqga7dyJdY3ZquspprMCj/9sJ2ijm5hXFWQE/A3l4poMWiQ== pug-filters@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-3.1.1.tgz#ab2cc82db9eeccf578bda89130e252a0db026aa7" integrity sha512-lFfjNyGEyVWC4BwX0WyvkoWLapI5xHSM3xZJFUhx4JM4XyyRdO8Aucc6pCygnqV2uSgJFaJWW3Ft1wCWSoQkQg== dependencies: clean-css "^4.1.11" constantinople "^3.0.1" jstransformer "1.0.0" pug-error "^1.3.3" pug-walk "^1.1.8" resolve "^1.1.6" uglify-js "^2.6.1" pug-lexer@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-4.1.0.tgz#531cde48c7c0b1fcbbc2b85485c8665e31489cfd" integrity sha512-i55yzEBtjm0mlplW4LoANq7k3S8gDdfC6+LThGEvsK4FuobcKfDAwt6V4jKPH9RtiE3a2Akfg5UpafZ1OksaPA== dependencies: character-parser "^2.1.1" is-expression "^3.0.0" pug-error "^1.3.3" pug-linker@^3.0.6: version "3.0.6" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-3.0.6.tgz#f5bf218b0efd65ce6670f7afc51658d0f82989fb" integrity sha512-bagfuHttfQOpANGy1Y6NJ+0mNb7dD2MswFG2ZKj22s8g0wVsojpRlqveEQHmgXXcfROB2RT6oqbPYr9EN2ZWzg== dependencies: pug-error "^1.3.3" pug-walk "^1.1.8" pug-load@^2.0.12: version "2.0.12" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-2.0.12.tgz#d38c85eb85f6e2f704dea14dcca94144d35d3e7b" integrity sha512-UqpgGpyyXRYgJs/X60sE6SIf8UBsmcHYKNaOccyVLEuT6OPBIMo6xMPhoJnqtB3Q3BbO4Z3Bjz5qDsUWh4rXsg== dependencies: object-assign "^4.1.0" pug-walk "^1.1.8" pug-parser@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-5.0.1.tgz#03e7ada48b6840bd3822f867d7d90f842d0ffdc9" integrity sha512-nGHqK+w07p5/PsPIyzkTQfzlYfuqoiGjaoqHv1LjOv2ZLXmGX1O+4Vcvps+P4LhxZ3drYSljjq4b+Naid126wA== dependencies: pug-error "^1.3.3" token-stream "0.0.1" pug-runtime@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-2.0.5.tgz#6da7976c36bf22f68e733c359240d8ae7a32953a" integrity sha512-P+rXKn9un4fQY77wtpcuFyvFaBww7/91f3jHa154qU26qFAnOe6SW1CbIDcxiG5lLK9HazYrMCCuDvNgDQNptw== pug-strip-comments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-1.0.4.tgz#cc1b6de1f6e8f5931cf02ec66cdffd3f50eaf8a8" integrity sha512-i5j/9CS4yFhSxHp5iKPHwigaig/VV9g+FgReLJWWHEHbvKsbqL0oP/K5ubuLco6Wu3Kan5p7u7qk8A4oLLh6vw== dependencies: pug-error "^1.3.3" pug-walk@^1.1.8: version "1.1.8" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-1.1.8.tgz#b408f67f27912f8c21da2f45b7230c4bd2a5ea7a" integrity sha512-GMu3M5nUL3fju4/egXwZO0XLi6fW/K3T3VTgFQ14GxNi8btlxgT5qZL//JwZFm/2Fa64J/PNS8AZeys3wiMkVA== "pug@>=2.0.0-alpha <3": version "2.0.4" resolved "https://registry.yarnpkg.com/pug/-/pug-2.0.4.tgz#ee7682ec0a60494b38d48a88f05f3b0ac931377d" integrity sha512-XhoaDlvi6NIzL49nu094R2NA6P37ijtgMDuWE+ofekDChvfKnzFal60bhSdiy8y2PBO6fmz3oMEIcfpBVRUdvw== dependencies: pug-code-gen "^2.0.2" pug-filters "^3.1.1" pug-lexer "^4.1.0" pug-linker "^3.0.6" pug-load "^2.0.12" pug-parser "^5.0.1" pug-runtime "^2.0.5" pug-strip-comments "^1.0.4" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^2.1.0, punycode@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== punycode@~1.2.3: version "1.2.4" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.2.4.tgz#54008ac972aec74175def9cba6df7fa9d3918740" integrity sha1-VACKyXKux0F13vnLpt9/qdORh0A= q@^1.0.1: version "1.5.1" resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= q@~0.9.2: version "0.9.7" resolved "https://registry.yarnpkg.com/q/-/q-0.9.7.tgz#4de2e6cb3b29088c9e4cbc03bf9d42fb96ce2f75" integrity sha1-TeLmyzspCIyeTLwDv51C+5bOL3U= q@~1.0.0, q@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/q/-/q-1.0.1.tgz#11872aeedee89268110b10a718448ffb10112a14" integrity sha1-EYcq7t7okmgRCxCnGESP+xARKhQ= qs@5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/qs/-/qs-5.2.0.tgz#a9f31142af468cb72b25b30136ba2456834916be" integrity sha1-qfMRQq9GjLcrJbMBNrokVoNJFr4= qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~0.6.0: version "0.6.6" resolved "https://registry.yarnpkg.com/qs/-/qs-0.6.6.tgz#6e015098ff51968b8a3c819001d5f2c89bc4b107" integrity sha1-bgFQmP9RlouKPIGQAdXyyJvEsQc= qs@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/qs/-/qs-1.0.2.tgz#50a93e2b5af6691c31bcea5dae78ee6ea1903768" integrity sha1-UKk+K1r2aRwxvOpdrnjubqGQN2g= qs@~2.2.3: version "2.2.5" resolved "https://registry.yarnpkg.com/qs/-/qs-2.2.5.tgz#1088abaf9dcc0ae5ae45b709e6c6b5888b23923c" integrity sha1-EIirr53MCuWuRbcJ5sa1iIsjkjw= qs@~2.3.1: version "2.3.3" resolved "https://registry.yarnpkg.com/qs/-/qs-2.3.3.tgz#e9e85adbe75da0bbe4c8e0476a086290f863b404" integrity sha1-6eha2+ddoLvkyOBHaghikPhjtAQ= qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== querystring-es3@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.0.tgz#c365a08a69c443accfeb3a9deab35e3f0abaa476" integrity sha1-w2WgimnEQ6zP6zqd6rNePwq6pHY= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= randomatic@^3.0.0: version "3.1.1" resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.1.1.tgz#b776efc59375984e36c537b2f51a1f0aff0da1ed" integrity sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw== dependencies: is-number "^4.0.0" kind-of "^6.0.0" math-random "^1.0.1" range-parser@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.0.3.tgz#6872823535c692e2c2a0103826afd82c2e0ff175" integrity sha1-aHKCNTXGkuLCoBA4Jq/YLC4P8XU= raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-body@~2.1.5: version "2.1.7" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.1.7.tgz#adfeace2e4fb3098058014d08c072dcc59758774" integrity sha1-rf6s4uT7MJgFgBTQjActzFl1h3Q= dependencies: bytes "2.4.0" iconv-lite "0.4.13" unpipe "1.0.0" rc@^1.0.1, rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-all-stream@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/read-all-stream/-/read-all-stream-3.1.0.tgz#35c3e177f2078ef789ee4bfafa4373074eaef4fa" integrity sha1-NcPhd/IHjveJ7kv6+kNzB06u9Po= dependencies: pinkie-promise "^2.0.0" readable-stream "^2.0.0" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" read@~1.0.4: version "1.0.7" resolved "https://registry.yarnpkg.com/read/-/read-1.0.7.tgz#b3da19bd052431a97671d44a42634adf710b40c4" integrity sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ= dependencies: mute-stream "~0.0.4" "readable-stream@>=1.0.33-1 <1.1.0-0", readable-stream@~1.0.17, readable-stream@~1.0.2, readable-stream@~1.0.26: version "1.0.34" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" integrity sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw= dependencies: core-util-is "~1.0.0" inherits "~2.0.1" isarray "0.0.1" string_decoder "~0.10.x" readable-stream@^1.0.26-2, readable-stream@^1.0.27-1, readable-stream@~1.1.10, readable-stream@~1.1.8, readable-stream@~1.1.9: version "1.1.14" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" integrity sha1-fPTFTvZI44EwhMY23SB54WbAgdk= dependencies: core-util-is "~1.0.0" inherits "~2.0.1" isarray "0.0.1" string_decoder "~0.10.x" readable-stream@^2.0.0, readable-stream@^2.0.2, readable-stream@^2.3.5, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^2.0.1, readable-stream@^2.0.6: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readdirp@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" readline2@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/readline2/-/readline2-0.1.1.tgz#99443ba6e83b830ef3051bfd7dc241a82728d568" integrity sha1-mUQ7pug7gw7zBRv9fcJBqCco1Wg= dependencies: mute-stream "0.0.4" strip-ansi "^2.0.1" rechoir@^0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" integrity sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q= dependencies: resolve "^1.1.6" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" redeyed@~0.4.0: version "0.4.4" resolved "https://registry.yarnpkg.com/redeyed/-/redeyed-0.4.4.tgz#37e990a6f2b21b2a11c2e6a48fd4135698cba97f" integrity sha1-N+mQpvKyGyoRwuakj9QTVpjLqX8= dependencies: esprima "~1.0.4" regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.3" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" integrity sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw== regenerator-transform@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== dependencies: private "^0.1.6" regex-cache@^0.4.2: version "0.4.4" resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.4.tgz#75bdc58a2a1496cec48a12835bc54c8d562336dd" integrity sha512-nVIZwtCjkC9YgvWkpM55B5rBhBYRZhAaJbgcFYXXsHnbZ9UZI9nnVWYZpBlCqv9ho2eZryPnWrZGsOdPwVWXWQ== dependencies: is-equal-shallow "^0.1.3" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexpu-core@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" registry-url@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-3.1.0.tgz#3d4ef870f73dde1d77f0cf9a381432444e174942" integrity sha1-PU74cPc93h138M+aOBQyRE4XSUI= dependencies: rc "^1.0.1" regjsgen@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c" integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-0.2.2.tgz#c7a8d3236068362059a7e4651fc6884e8b1fb4ae" integrity sha1-x6jTI2BoNiBZp+RlH8aITosftK4= repeat-string@^1.5.2, repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" replace-ext@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-0.0.1.tgz#29bbd92078a739f0bcce2b4ee41e837953522924" integrity sha1-KbvZIHinOfC8zitO5B6DeVNSKSQ= replace-ext@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.0.tgz#de63128373fcbf7c3ccfa4de5a480c45a67958eb" integrity sha1-3mMSg3P8v3w8z6TeWkgMRaZ5WOs= replacestream@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/replacestream/-/replacestream-4.0.3.tgz#3ee5798092be364b1cdb1484308492cb3dff2f36" integrity sha512-AC0FiLS352pBBiZhd4VXB1Ab/lh0lEgpP+GGvZqbQh8a5cmXVoTe5EX/YeTFArnp4SRGTHh1qCHu9lGs1qG8sA== dependencies: escape-string-regexp "^1.0.3" object-assign "^4.0.1" readable-stream "^2.0.2" request-progress@~0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/request-progress/-/request-progress-0.3.1.tgz#0721c105d8a96ac6b2ce8b2c89ae2d5ecfcf6b3a" integrity sha1-ByHBBdipasayzossia4tXs/Pazo= dependencies: throttleit "~0.0.2" request-replay@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/request-replay/-/request-replay-0.2.0.tgz#9b693a5d118b39f5c596ead5ed91a26444057f60" integrity sha1-m2k6XRGLOfXFlurV7ZGiZEQFf2A= dependencies: retry "~0.6.0" request@^2.87.0, request@^2.88.0: version "2.88.2" resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.3" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.5.0" tunnel-agent "^0.6.0" uuid "^3.3.2" request@~2.27.0: version "2.27.0" resolved "https://registry.yarnpkg.com/request/-/request-2.27.0.tgz#dfb1a224dd3a5a9bade4337012503d710e538668" integrity sha1-37GiJN06Wput5DNwElA9cQ5Thmg= dependencies: aws-sign "~0.3.0" cookie-jar "~0.3.0" forever-agent "~0.5.0" form-data "~0.1.0" hawk "~1.0.0" http-signature "~0.10.0" json-stringify-safe "~5.0.0" mime "~1.2.9" node-uuid "~1.4.0" oauth-sign "~0.3.0" qs "~0.6.0" tunnel-agent "~0.3.0" request@~2.36.0: version "2.36.0" resolved "https://registry.yarnpkg.com/request/-/request-2.36.0.tgz#28c6c04262c7b9ffdd21b9255374517ee6d943f5" integrity sha1-KMbAQmLHuf/dIbklU3RRfubZQ/U= dependencies: forever-agent "~0.5.0" json-stringify-safe "~5.0.0" mime "~1.2.9" node-uuid "~1.4.0" qs "~0.6.0" optionalDependencies: aws-sign2 "~0.5.0" form-data "~0.1.0" hawk "~1.0.0" http-signature "~0.10.0" oauth-sign "~0.3.0" tough-cookie ">=0.12.0" tunnel-agent "~0.4.0" request@~2.40.0: version "2.40.0" resolved "https://registry.yarnpkg.com/request/-/request-2.40.0.tgz#4dd670f696f1e6e842e66b4b5e839301ab9beb67" integrity sha1-TdZw9pbx5uhC5mtLXoOTAaub62c= dependencies: forever-agent "~0.5.0" json-stringify-safe "~5.0.0" mime-types "~1.0.1" node-uuid "~1.4.0" qs "~1.0.0" optionalDependencies: aws-sign2 "~0.5.0" form-data "~0.1.0" hawk "1.1.1" http-signature "~0.10.0" oauth-sign "~0.3.0" stringstream "~0.0.4" tough-cookie ">=0.12.0" tunnel-agent "~0.4.0" request@~2.51.0: version "2.51.0" resolved "https://registry.yarnpkg.com/request/-/request-2.51.0.tgz#35d00bbecc012e55f907b1bd9e0dbd577bfef26e" integrity sha1-NdALvswBLlX5B7G9ng29V3v+8m4= dependencies: aws-sign2 "~0.5.0" bl "~0.9.0" caseless "~0.8.0" combined-stream "~0.0.5" forever-agent "~0.5.0" form-data "~0.2.0" hawk "1.1.1" http-signature "~0.10.0" json-stringify-safe "~5.0.0" mime-types "~1.0.1" node-uuid "~1.4.0" oauth-sign "~0.5.0" qs "~2.3.1" stringstream "~0.0.4" tough-cookie ">=0.12.0" tunnel-agent "~0.4.0" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@0.6.3, resolve@~0.6.1, resolve@~0.6.3: version "0.6.3" resolved "https://registry.yarnpkg.com/resolve/-/resolve-0.6.3.tgz#dd957982e7e736debdf53b58a4dd91754575dd46" integrity sha1-3ZV5gufnNt699TtYpN2RdUV13UY= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.1.6, resolve@^1.1.7, resolve@^1.3.2, resolve@^1.8.1: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" resolve@^1.10.0: version "1.17.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444" integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w== dependencies: path-parse "^1.0.6" resolve@~0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/resolve/-/resolve-0.3.1.tgz#34c63447c664c70598d1c9b126fc43b2a24310a4" integrity sha1-NMY0R8ZkxwWY0cmxJvxDsqJDEKQ= ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@~0.6.0: version "0.6.1" resolved "https://registry.yarnpkg.com/retry/-/retry-0.6.1.tgz#fdc90eed943fde11b893554b8cc63d0e899ba918" integrity sha1-/ckO7ZQ/3hG4k1VLjMY9DombqRg= rfile@~1.0, rfile@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/rfile/-/rfile-1.0.0.tgz#59708cf90ca1e74c54c3cfc5c36fdb9810435261" integrity sha1-WXCM+Qyh50xUw8/Fw2/bmBBDUmE= dependencies: callsite "~1.0.0" resolve "~0.3.0" right-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" integrity sha1-YTObci/mo1FWiSENJOFMlhSGE+8= dependencies: align-text "^0.1.1" rimraf@2, rimraf@^2.3.3, rimraf@^2.6.1: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" rimraf@~2.2.0: version "2.2.8" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.2.8.tgz#e439be2aaee327321952730f99a8929e4fc50582" integrity sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI= rimraf@~2.4.3: version "2.4.5" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.4.5.tgz#ee710ce5d93a8fdb856fb5ea8ff0e2d75934b2da" integrity sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto= dependencies: glob "^6.0.1" ruglify@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/ruglify/-/ruglify-1.0.0.tgz#dc8930e2a9544a274301cc9972574c0d0986b675" integrity sha1-3Ikw4qlUSidDAcyZcldMDQmGtnU= dependencies: rfile "~1.0" uglify-js "~2.2" run-sequence@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/run-sequence/-/run-sequence-1.1.5.tgz#556bd47eb47877349e36c9c582748897db7be4f7" integrity sha1-VWvUfrR4dzSeNsnFgnSIl9t75Pc= dependencies: chalk "*" gulp-util "*" safe-buffer@>=5.1.0: version "5.2.0" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== safe-buffer@^5.0.1, safe-buffer@^5.1.2: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sass-graph@2.2.5: version "2.2.5" resolved "https://registry.yarnpkg.com/sass-graph/-/sass-graph-2.2.5.tgz#a981c87446b8319d96dce0671e487879bd24c2e8" integrity sha512-VFWDAHOe6mRuT4mZRd4eKE+d8Uedrk6Xnh7Sh9b4NGufQLQjOrvf/MQoOdx+0s92L89FeyUUNfU597j/3uNpag== dependencies: glob "^7.0.0" lodash "^4.0.0" scss-tokenizer "^0.2.3" yargs "^13.3.2" sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== scss-tokenizer@^0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/scss-tokenizer/-/scss-tokenizer-0.2.3.tgz#8eb06db9a9723333824d3f5530641149847ce5d1" integrity sha1-jrBtualyMzOCTT9VMGQRSYR85dE= dependencies: js-base64 "^2.1.8" source-map "^0.4.2" semver-diff@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" integrity sha1-S7uEN8jTfksM8aaP1ybsbWRdbTY= dependencies: semver "^5.0.3" "semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== semver@^4.1.0, semver@~4.3.3: version "4.3.6" resolved "https://registry.yarnpkg.com/semver/-/semver-4.3.6.tgz#300bc6e0e86374f7ba61068b5b1ecd57fc6532da" integrity sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto= semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== semver@~2.3.0: version "2.3.2" resolved "https://registry.yarnpkg.com/semver/-/semver-2.3.2.tgz#b9848f25d6cf36333073ec9ef8856d42f1233e52" integrity sha1-uYSPJdbPNjMwc+ye+IVtQvEjPlI= semver@~5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" integrity sha1-myzl094C0XxgEq0yaqa00M9U+U8= send@0.13.2: version "0.13.2" resolved "https://registry.yarnpkg.com/send/-/send-0.13.2.tgz#765e7607c8055452bba6f0b052595350986036de" integrity sha1-dl52B8gFVFK7pvCwUllTUJhgNt4= dependencies: debug "~2.2.0" depd "~1.1.0" destroy "~1.0.4" escape-html "~1.0.3" etag "~1.7.0" fresh "0.3.0" http-errors "~1.3.1" mime "1.3.4" ms "0.7.1" on-finished "~2.3.0" range-parser "~1.0.3" statuses "~1.2.1" sequencify@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/sequencify/-/sequencify-0.0.7.tgz#90cff19d02e07027fd767f5ead3e7b95d1e7380c" integrity sha1-kM/xnQLgcCf9dn9erT57ldHnOAw= serve-static@~1.10.0: version "1.10.3" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.10.3.tgz#ce5a6ecd3101fed5ec09827dac22a9c29bfb0535" integrity sha1-zlpuzTEB/tXsCYJ9rCKpwpv7BTU= dependencies: escape-html "~1.0.3" parseurl "~1.3.1" send "0.13.2" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== shallow-copy@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/shallow-copy/-/shallow-copy-0.0.1.tgz#415f42702d73d810330292cc5ee86eae1a11a170" integrity sha1-QV9CcC1z2BAzApLMXuhurhoRoXA= shell-quote@~0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-0.0.1.tgz#1a41196f3c0333c482323593d6886ecf153dd986" integrity sha1-GkEZbzwDM8SCMjWT1ohuzxU92YY= shell-quote@~1.4.1: version "1.4.3" resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.4.3.tgz#952c44e0b1ed9013ef53958179cc643e8777466b" integrity sha1-lSxE4LHtkBPvU5WBecxkPod3Rms= dependencies: array-filter "~0.0.0" array-map "~0.0.0" array-reduce "~0.0.0" jsonify "~0.0.0" sigmund@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/sigmund/-/sigmund-1.0.1.tgz#3ff21f198cad2175f9f3b781853fd94d0d19b590" integrity sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA= signal-exit@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== simple-fmt@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/simple-fmt/-/simple-fmt-0.1.0.tgz#191bf566a59e6530482cb25ab53b4a8dc85c3a6b" integrity sha1-GRv1ZqWeZTBILLJatTtKjchcOms= simple-is@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/simple-is/-/simple-is-0.2.0.tgz#2abb75aade39deb5cc815ce10e6191164850baf0" integrity sha1-Krt1qt453rXMgVzhDmGRFkhQuvA= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" sntp@0.2.x: version "0.2.4" resolved "https://registry.yarnpkg.com/sntp/-/sntp-0.2.4.tgz#fb885f18b0f3aad189f824862536bceeec750900" integrity sha1-+4hfGLDzqtGJ+CSGJTa87ux1CQA= dependencies: hoek "0.9.x" socket.io-adapter@0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-0.5.0.tgz#cb6d4bb8bec81e1078b99677f9ced0046066bb8b" integrity sha1-y21LuL7IHhB4uZZ3+c7QBGBmu4s= dependencies: debug "2.3.3" socket.io-parser "2.3.1" socket.io-client@1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-1.7.4.tgz#ec9f820356ed99ef6d357f0756d648717bdd4281" integrity sha1-7J+CA1btme9tNX8HVtZIcXvdQoE= dependencies: backo2 "1.0.2" component-bind "1.0.0" component-emitter "1.2.1" debug "2.3.3" engine.io-client "~1.8.4" has-binary "0.1.7" indexof "0.0.1" object-component "0.0.3" parseuri "0.0.5" socket.io-parser "2.3.1" to-array "0.1.4" socket.io-parser@2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-2.3.1.tgz#dd532025103ce429697326befd64005fcfe5b4a0" integrity sha1-3VMgJRA85Clpcya+/WQAX8/ltKA= dependencies: component-emitter "1.1.2" debug "2.2.0" isarray "0.0.1" json3 "3.3.2" socket.io@^1.4.5: version "1.7.4" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-1.7.4.tgz#2f7ecedc3391bf2d5c73e291fe233e6e34d4dd00" integrity sha1-L37O3DORvy1cc+KR/iM+bjTU3QA= dependencies: debug "2.3.3" engine.io "~1.8.4" has-binary "0.1.7" object-assign "4.1.0" socket.io-adapter "0.5.0" socket.io-client "1.7.4" socket.io-parser "2.3.1" source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.10: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@0.1.34: version "0.1.34" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.1.34.tgz#a7cfe89aec7b1682c3b198d0acfb47d7d090566b" integrity sha1-p8/omux7FoLDsZjQrPtH19CQVms= dependencies: amdefine ">=0.0.4" source-map@0.1.x, source-map@^0.1.39, source-map@~0.1.30, source-map@~0.1.31, source-map@~0.1.7: version "0.1.43" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.1.43.tgz#c24bc146ca517c1471f5dacbe2571b2b7f9e3346" integrity sha1-wkvBRspRfBRx9drL4lcbK3+eM0Y= dependencies: amdefine ">=0.0.4" source-map@^0.4.2: version "0.4.4" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" integrity sha1-66T12pwNyZneaAMti092FzZSA2s= dependencies: amdefine ">=0.0.4" source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.3, source-map@^0.5.6, source-map@~0.5.1, source-map@~0.5.3: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" source-map@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.3.0.tgz#8586fb9a5a005e5b501e21cd18b6f21b457ad1f9" integrity sha1-hYb7mloAXltQHiHNGLbyG0V60fk= dependencies: amdefine ">=0.0.4" sparkles@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/sparkles/-/sparkles-1.0.1.tgz#008db65edce6c50eec0c5e228e1945061dd0437c" integrity sha512-dSO0DDYUahUt/0/pD/Is3VIm5TGJjludZ0HVymmhYF6eNA53PVLhnUk0znSYbH8IYBuJdCE+1luR22jNLMaQdw== spdx-correct@^3.0.0: version "3.1.1" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== spdx-expression-parse@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.5" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" split@0.3: version "0.3.3" resolved "https://registry.yarnpkg.com/split/-/split-0.3.3.tgz#cd0eea5e63a211dfff7eb0f091c4133e2d0dd28f" integrity sha1-zQ7qXmOiEd//frDwkcQTPi0N0o8= dependencies: through "2" split@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/split/-/split-1.0.1.tgz#605bd9be303aa59fb35f9229fbea0ddec9ea07d9" integrity sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg== dependencies: through "2" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" stable@~0.1.3, stable@~0.1.5: version "0.1.8" resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" statuses@1, "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= statuses@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.2.1.tgz#dded45cc18256d51ed40aec142489d5c61026d28" integrity sha1-3e1FzBglbVHtQK7BQkidXGECbSg= stdout-stream@^1.4.0: version "1.4.1" resolved "https://registry.yarnpkg.com/stdout-stream/-/stdout-stream-1.4.1.tgz#5ac174cdd5cd726104aa0c0b2bd83815d8d535de" integrity sha512-j4emi03KXqJWcIeF8eIXkjMFN1Cmb8gUlDYGeBALLPo5qdyTfA9bOtl8m33lRoC+vFMkP3gl0WsDr6+gzxbbTA== dependencies: readable-stream "^2.0.1" stream-browserify@~0.1.0: version "0.1.3" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-0.1.3.tgz#95cf1b369772e27adaf46352265152689c6c4be9" integrity sha1-lc8bNpdy4nra9GNSJlFSaJxsS+k= dependencies: inherits "~2.0.1" process "~0.5.1" stream-combiner@*, stream-combiner@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/stream-combiner/-/stream-combiner-0.2.2.tgz#aec8cbac177b56b6f4fa479ced8c1912cee52858" integrity sha1-rsjLrBd7Vrb0+kec7YwZEs7lKFg= dependencies: duplexer "~0.1.1" through "~2.3.4" stream-combiner@~0.0.2, stream-combiner@~0.0.4: version "0.0.4" resolved "https://registry.yarnpkg.com/stream-combiner/-/stream-combiner-0.0.4.tgz#4d5e433c185261dde623ca3f44c586bcf5c4ad14" integrity sha1-TV5DPBhSYd3mI8o/RMWGvPXErRQ= dependencies: duplexer "~0.1.1" stream-combiner@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/stream-combiner/-/stream-combiner-0.1.0.tgz#0dc389a3c203f8f4d56368f95dde52eb9269b5be" integrity sha1-DcOJo8ID+PTVY2j5Xd5S65Jptb4= dependencies: duplexer "~0.1.1" through "~2.3.4" stream-consume@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/stream-consume/-/stream-consume-0.1.1.tgz#d3bdb598c2bd0ae82b8cac7ac50b1107a7996c48" integrity sha512-tNa3hzgkjEP7XbCkbRXe1jpg+ievoa0O4SCFlMOYEscGSS4JJsckGL8swUyAa/ApGU3Ae4t6Honor4HhL+tRyg== stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= string-length@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/string-length/-/string-length-1.0.1.tgz#56970fb1c38558e9e70b728bf3de269ac45adfac" integrity sha1-VpcPscOFWOnnC3KL894mmsRa36w= dependencies: strip-ansi "^3.0.0" string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2": version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== dependencies: emoji-regex "^7.0.1" is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" string_decoder@~0.0.0: version "0.0.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.0.1.tgz#f5472d0a8d1650ec823752d24e6fd627b39bf141" integrity sha1-9UctCo0WUOyCN1LSTm/WJ7Ob8UE= string_decoder@~0.10.x: version "0.10.31" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" stringify-object@~0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-0.2.1.tgz#b58be50b3ff5f371038c545d4332656bfded5620" integrity sha1-tYvlCz/183EDjFRdQzJla/3tViA= stringmap@~0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/stringmap/-/stringmap-0.2.2.tgz#556c137b258f942b8776f5b2ef582aa069d7d1b1" integrity sha1-VWwTeyWPlCuHdvWy71gqoGnX0bE= stringset@~0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/stringset/-/stringset-0.2.1.tgz#ef259c4e349344377fcd1c913dd2e848c9c042b5" integrity sha1-7yWcTjSTRDd/zRyRPdLoSMnAQrU= stringstream@~0.0.4: version "0.0.6" resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.6.tgz#7880225b0d4ad10e30927d167a1d6f2fd3b33a72" integrity sha512-87GEBAkegbBcweToUrdzf3eLhWNg06FJTebl4BVJz/JgWy8CvEr9dRtX5qWphiynMSQlxxi+QqN0z5T32SLlhA== strip-ansi@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.3.0.tgz#25f48ea22ca79187f3174a4db8759347bb126220" integrity sha1-JfSOoiynkYfzF0pNuHWTR7sSYiA= dependencies: ansi-regex "^0.2.1" strip-ansi@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-2.0.1.tgz#df62c1aa94ed2f114e1d0f21fd1d50482b79a60e" integrity sha1-32LBqpTtLxFOHQ8h/R1QSCt5pg4= dependencies: ansi-regex "^1.0.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" strip-ansi@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.1.1.tgz#39e8a98d044d150660abe4a6808acf70bb7bc991" integrity sha1-OeipjQRNFQZgq+SmgIrPcLt7yZE= strip-bom@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-1.0.0.tgz#85b8862f3844b5a6d5ec8467a93598173a36f794" integrity sha1-hbiGLzhEtabV7IRnqTWYFzo295Q= dependencies: first-chunk-stream "^1.0.0" is-utf8 "^0.2.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= subarg@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/subarg/-/subarg-0.0.1.tgz#3d56b07dacfbc45bbb63f7672b43b63e46368e3a" integrity sha1-PVawfaz7xFu7Y/dnK0O2PkY2jjo= dependencies: minimist "~0.0.7" supports-color@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-0.2.0.tgz#d92de2694eb3f67323973d7ae3d8b55b4c22190a" integrity sha1-2S3iaU6z9nMjlz1649i1W0wiGQo= supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" supports-color@^7.1.0: version "7.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.1.0.tgz#68e32591df73e25ad1c4b49108a2ec507962bfd1" integrity sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g== dependencies: has-flag "^4.0.0" syntax-error@~1.1.0: version "1.1.6" resolved "https://registry.yarnpkg.com/syntax-error/-/syntax-error-1.1.6.tgz#b4549706d386cc1c1dc7c2423f18579b6cade710" integrity sha1-tFSXBtOGzBwdx8JCPxhXm2yt5xA= dependencies: acorn "^2.7.0" tar@^2.0.0: version "2.2.2" resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.2.tgz#0ca8848562c7299b8b446ff6a4d60cdbb23edc40" integrity sha512-FCEhQ/4rE1zYv9rYXJw/msRqsnmlje5jHP6huWeBZ704jUTy02c5AZyWujpMR1ax6mVw9NyJMfuK2CMDWVIfgA== dependencies: block-stream "*" fstream "^1.0.12" inherits "2" tar@^4: version "4.4.13" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.13.tgz#43b364bc52888d555298637b10d60790254ab525" integrity sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA== dependencies: chownr "^1.1.1" fs-minipass "^1.2.5" minipass "^2.8.6" minizlib "^1.2.1" mkdirp "^0.5.0" safe-buffer "^5.1.2" yallist "^3.0.3" tar@~0.1.17: version "0.1.20" resolved "https://registry.yarnpkg.com/tar/-/tar-0.1.20.tgz#42940bae5b5f22c74483699126f9f3f27449cb13" integrity sha1-QpQLrltfIsdEg2mRJvnz8nRJyxM= dependencies: block-stream "*" fstream "~0.1.28" inherits "2" ternary-stream@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ternary-stream/-/ternary-stream-2.1.1.tgz#4ad64b98668d796a085af2c493885a435a8a8bfc" integrity sha512-j6ei9hxSoyGlqTmoMjOm+QNvUKDOIY6bNl4Uh1lhBvl6yjPW2iLqxDUYyfDPZknQ4KdRziFl+ec99iT4l7g0cw== dependencies: duplexify "^3.5.0" fork-stream "^0.0.4" merge-stream "^1.0.0" through2 "^2.0.1" terser@^3.7.5: version "3.17.0" resolved "https://registry.yarnpkg.com/terser/-/terser-3.17.0.tgz#f88ffbeda0deb5637f9d24b0da66f4e15ab10cb2" integrity sha512-/FQzzPJmCpjAH9Xvk2paiWrFq+5M6aVOf+2KRbwhByISDX/EujxsK+BAvrhb6H+2rtrLCHK9N01wO014vrIwVQ== dependencies: commander "^2.19.0" source-map "~0.6.1" source-map-support "~0.5.10" textextensions@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/textextensions/-/textextensions-1.0.2.tgz#65486393ee1f2bb039a60cbba05b0b68bd9501d2" integrity sha1-ZUhjk+4fK7A5pgy7oFsLaL2VAdI= throttleit@~0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/throttleit/-/throttleit-0.0.2.tgz#cfedf88e60c00dd9697b61fdd2a8343a9b680eaf" integrity sha1-z+34jmDADdlpe2H90qg0OptoDq8= through2@^0.4.1, through2@~0.4.0, through2@~0.4.1: version "0.4.2" resolved "https://registry.yarnpkg.com/through2/-/through2-0.4.2.tgz#dbf5866031151ec8352bb6c4db64a2292a840b9b" integrity sha1-2/WGYDEVHsg1K7bE22SiKSqEC5s= dependencies: readable-stream "~1.0.17" xtend "~2.1.1" through2@^0.5.0, through2@^0.5.1, through2@~0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/through2/-/through2-0.5.1.tgz#dfdd012eb9c700e2323fd334f38ac622ab372da7" integrity sha1-390BLrnHAOIyP9M084rGIqs3Lac= dependencies: readable-stream "~1.0.17" xtend "~3.0.0" through2@^0.6.1, through2@^0.6.3: version "0.6.5" resolved "https://registry.yarnpkg.com/through2/-/through2-0.6.5.tgz#41ab9c67b29d57209071410e1d7a7a968cd3ad48" integrity sha1-QaucZ7KdVyCQcUEOHXp6lozTrUg= dependencies: readable-stream ">=1.0.33-1 <1.1.0-0" xtend ">=4.0.0 <4.1.0-0" through2@^2.0.0, through2@^2.0.1, through2@^2.0.3: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" through@2, "through@>=2.2.7 <3", through@^2.3.4, through@^2.3.8, through@~2.3, through@~2.3.1, through@~2.3.4: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= through@~2.2.7: version "2.2.7" resolved "https://registry.yarnpkg.com/through/-/through-2.2.7.tgz#6e8e21200191d4eb6a99f6f010df46aa1c6eb2bd" integrity sha1-bo4hIAGR1OtqmfbwEN9Gqhxusr0= tildify@^1.0.0: version "1.2.0" resolved "https://registry.yarnpkg.com/tildify/-/tildify-1.2.0.tgz#dcec03f55dca9b7aa3e5b04f21817eb56e63588a" integrity sha1-3OwD9V3Km3qj5bBPIYF+tW5jWIo= dependencies: os-homedir "^1.0.0" time-stamp@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-1.1.0.tgz#764a5a11af50561921b133f3b44e618687e0f5c3" integrity sha1-dkpaEa9QVhkhsTPztE5hhofg9cM= timed-out@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-2.0.0.tgz#f38b0ae81d3747d628001f41dafc652ace671c0a" integrity sha1-84sK6B03R9YoAB9B2vxlKs5nHAo= timers-browserify@~1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-1.0.3.tgz#ffba70c9c12eed916fd67318e629ac6f32295551" integrity sha1-/7pwycEu7ZFv1nMY5imsbzIpVVE= dependencies: process "~0.5.1" timers-ext@0.1: version "0.1.7" resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6" integrity sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ== dependencies: es5-ext "~0.10.46" next-tick "1" tmp@0.0.23: version "0.0.23" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.23.tgz#de874aa5e974a85f0a32cdfdbd74663cb3bd9c74" integrity sha1-3odKpel0qF8KMs39vXRmPLO9nHQ= tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-0.0.1.tgz#ceeefc717a76c4316f126d0b9dbaa55d7e7df01a" integrity sha1-zu78cXp2xDFvEm0LnbqlXX598Bo= touch@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/touch/-/touch-0.0.2.tgz#a65a777795e5cbbe1299499bdc42281ffb21b5f4" integrity sha1-plp3d5Xly74SmUmb3EIoH/shtfQ= dependencies: nopt "~1.0.10" tough-cookie@>=0.12.0: version "3.0.1" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-3.0.1.tgz#9df4f57e739c26930a018184887f4adb7dca73b2" integrity sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg== dependencies: ip-regex "^2.1.0" psl "^1.1.28" punycode "^2.1.1" tough-cookie@~2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== dependencies: psl "^1.1.28" punycode "^2.1.1" "traverse@>=0.3.0 <0.4": version "0.3.9" resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9" integrity sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk= trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= "true-case-path@^1.0.2": version "1.0.3" resolved "https://registry.yarnpkg.com/true-case-path/-/true-case-path-1.0.3.tgz#f813b5a8c86b40da59606722b144e3225799f47d" integrity sha512-m6s2OdQe5wgpFMC+pAJ+q9djG82O2jcHPOI6RNg1yy9rCYR+WD6Nbpl32fDpfC56nirdRy+opFa/Vk7HYhqaew== dependencies: glob "^7.1.2" tryor@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/tryor/-/tryor-0.1.2.tgz#8145e4ca7caff40acde3ccf946e8b8bb75b4172b" integrity sha1-gUXkynyv9ArN48z5Rui4u3W0Fys= tty-browserify@~0.0.0: version "0.0.1" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.1.tgz#3f05251ee17904dfd0677546670db9651682b811" integrity sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw== tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tunnel-agent@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.3.0.tgz#ad681b68f5321ad2827c4cfb1b7d5df2cfe942ee" integrity sha1-rWgbaPUyGtKCfEz7G31d8s/pQu4= tunnel-agent@~0.4.0: version "0.4.3" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.4.3.tgz#6373db76909fe570e08d73583365ed828a74eeeb" integrity sha1-Y3PbdpCf5XDgjXNYM2Xtgop07us= tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.10, type-is@~1.6.17: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== typedarray@~0.0.5: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= uglify-js@^2.6.1: version "2.8.29" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" integrity sha1-KcVzMUgFe7Th913zW3qcty5qWd0= dependencies: source-map "~0.5.1" yargs "~3.10.0" optionalDependencies: uglify-to-browserify "~1.0.0" uglify-js@^3.1.4: version "3.6.9" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.6.9.tgz#85d353edb6ddfb62a9d798f36e91792249320611" integrity sha512-pcnnhaoG6RtrvHJ1dFncAe8Od6Nuy30oaJ82ts6//sGSXOP5UjBMEthiProjXmMNHOfd93sqlkztifFMcb+4yw== dependencies: commander "~2.20.3" source-map "~0.6.1" uglify-js@~2.2: version "2.2.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.2.5.tgz#a6e02a70d839792b9780488b7b8b184c095c99c7" integrity sha1-puAqcNg5eSuXgEiLe4sYTAlcmcc= dependencies: optimist "~0.3.5" source-map "~0.1.7" uglify-js@~2.3: version "2.3.6" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.3.6.tgz#fa0984770b428b7a9b2a8058f46355d14fef211a" integrity sha1-+gmEdwtCi3qbKoBY9GNV0U/vIRo= dependencies: async "~0.2.6" optimist "~0.3.5" source-map "~0.1.7" uglify-js@~2.4.0: version "2.4.24" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.4.24.tgz#fad5755c1e1577658bb06ff9ab6e548c95bebd6e" integrity sha1-+tV1XB4Vd2WLsG/5q25UjJW+vW4= dependencies: async "~0.2.6" source-map "0.1.34" uglify-to-browserify "~1.0.0" yargs "~3.5.4" uglify-to-browserify@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" integrity sha1-bgkk1r2mta/jSeOabWMoUKD4grc= ultron@1.0.x: version "1.0.2" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.0.2.tgz#ace116ab557cd197386a4e88f4685378c8b2e4fa" integrity sha1-rOEWq1V80Zc4ak6I9GhTeMiy5Po= umd@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/umd/-/umd-2.0.0.tgz#749683b0d514728ae0e1b6195f5774afc0ad4f8f" integrity sha1-dJaDsNUUcorg4bYZX1d0r8CtT48= dependencies: rfile "~1.0.0" ruglify "~1.0.0" through "~2.3.4" uglify-js "~2.4.0" unc-path-regex@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa" integrity sha1-5z3T17DXxe2G+6xrCufYxqadUPo= underscore.string@~2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-2.4.0.tgz#8cdd8fbac4e2d2ea1e7e2e8097c42f442280f85b" integrity sha1-jN2PusTi0uoefi6Al8QvRCKA+Fs= underscore@~1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.7.0.tgz#6bbaf0877500d36be34ecaa584e0db9fef035209" integrity sha1-a7rwh3UA02vjTsqlhODbn+8DUgk= unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^2.0.1" unique-stream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unique-stream/-/unique-stream-1.0.0.tgz#d59a4a75427447d9aa6c91e70263f8d26a4b104b" integrity sha1-1ZpKdUJ0R9mqbJHnAmP40mpLEEs= unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" update-notifier@~0.2.0: version "0.2.2" resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-0.2.2.tgz#e69b3a784b4e686a2acd98f5e66944591996e187" integrity sha1-5ps6eEtOaGoqzZj15mlEWRmW4Yc= dependencies: chalk "^0.5.1" configstore "^0.3.1" is-npm "^1.0.0" latest-version "^1.0.0" semver-diff "^2.0.0" string-length "^1.0.0" uri-js@^4.2.2: version "4.4.0" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.0.tgz#aa714261de793e8a82347a7bcc9ce74e86f28602" integrity sha512-B0yRTzYdUCCn9n+F4+Gh4yIDtMQcaJsmYBDsTSG8g/OejKBodLQ2IHfN3bM7jUsRXndopT7OIXWdYqc1fjmV6g== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url@~0.10.1: version "0.10.3" resolved "https://registry.yarnpkg.com/url/-/url-0.10.3.tgz#021e4d9c7705f21bbf37d03ceb58767402774c64" integrity sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== user-home@^1.0.0, user-home@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/user-home/-/user-home-1.1.1.tgz#2b5be23a32b63a7c9deb8d0f28d485724a3df190" integrity sha1-K1viOjK2Onyd640PKNSFcko98ZA= useragent@^2.1.6: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@~0.10.1: version "0.10.4" resolved "https://registry.yarnpkg.com/util/-/util-0.10.4.tgz#3aa0125bfe668a4672de58857d3ace27ecb76901" integrity sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A== dependencies: inherits "2.0.3" utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-2.0.3.tgz#67e2e863797215530dff318e5bf9dcebfd47b21a" integrity sha1-Z+LoY3lyFVMN/zGOW/nc6/1Hsho= uuid@^3.3.2: version "3.4.0" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== uuid@~1.4.1: version "1.4.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-1.4.2.tgz#453019f686966a6df83cdc5244e7c990ecc332fc" integrity sha1-RTAZ9oaWam34PNxSROfJkOzDMvw= v8flags@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-2.1.1.tgz#aab1a1fa30d45f88dd321148875ac02c0b55e5b4" integrity sha1-qrGh+jDUX4jdMhFIh1rALAtV5bQ= dependencies: user-home "^1.1.1" validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vinyl-fs@^0.3.0: version "0.3.14" resolved "https://registry.yarnpkg.com/vinyl-fs/-/vinyl-fs-0.3.14.tgz#9a6851ce1cac1c1cea5fe86c0931d620c2cfa9e6" integrity sha1-mmhRzhysHBzqX+hsCTHWIMLPqeY= dependencies: defaults "^1.0.0" glob-stream "^3.1.5" glob-watcher "^0.0.6" graceful-fs "^3.0.0" mkdirp "^0.5.0" strip-bom "^1.0.0" through2 "^0.6.1" vinyl "^0.4.0" vinyl-sourcemaps-apply@^0.1.1: version "0.1.4" resolved "https://registry.yarnpkg.com/vinyl-sourcemaps-apply/-/vinyl-sourcemaps-apply-0.1.4.tgz#c5fcbd43e2f238423c2dc98bddd6f79b72bc345b" integrity sha1-xfy9Q+LyOEI8LcmL3db3m3K8NFs= dependencies: source-map "^0.1.39" vinyl-sourcemaps-apply@^0.2.0, vinyl-sourcemaps-apply@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/vinyl-sourcemaps-apply/-/vinyl-sourcemaps-apply-0.2.1.tgz#ab6549d61d172c2b1b87be5c508d239c8ef87705" integrity sha1-q2VJ1h0XLCsbh75cUI0jnI74dwU= dependencies: source-map "^0.5.1" vinyl@^0.2.1: version "0.2.3" resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-0.2.3.tgz#bca938209582ec5a49ad538a00fa1f125e513252" integrity sha1-vKk4IJWC7FpJrVOKAPofEl5RMlI= dependencies: clone-stats "~0.0.1" vinyl@^0.4.0: version "0.4.6" resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-0.4.6.tgz#2f356c87a550a255461f36bbeb2a5ba8bf784847" integrity sha1-LzVsh6VQolVGHza76ypbqL94SEc= dependencies: clone "^0.2.0" clone-stats "^0.0.1" vinyl@^0.5.0: version "0.5.3" resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-0.5.3.tgz#b0455b38fc5e0cf30d4325132e461970c2091cde" integrity sha1-sEVbOPxeDPMNQyUTLkYZcMIJHN4= dependencies: clone "^1.0.0" clone-stats "^0.0.1" replace-ext "0.0.1" vinyl@^1.0.0: version "1.2.0" resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-1.2.0.tgz#5c88036cf565e5df05558bfc911f8656df218884" integrity sha1-XIgDbPVl5d8FVYv8kR+GVt8hiIQ= dependencies: clone "^1.0.0" clone-stats "^0.0.1" replace-ext "0.0.1" vinyl@^2.0.0, vinyl@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-2.2.0.tgz#d85b07da96e458d25b2ffe19fece9f2caa13ed86" integrity sha512-MBH+yP0kC/GQ5GwBqrTPTzEfiiLjta7hTtvQtbxBgTeSXsmKQRQecjibMbxIXzVT3Y9KJK+drOz1/k+vsu8Nkg== dependencies: clone "^2.1.1" clone-buffer "^1.0.0" clone-stats "^1.0.0" cloneable-readable "^1.0.0" remove-trailing-separator "^1.0.1" replace-ext "^1.0.0" vm-browserify@~0.0.1: version "0.0.4" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-0.0.4.tgz#5d7ea45bbef9e4a6ff65f95438e0a87c357d5a73" integrity sha1-XX6kW7755Kb/ZflUOOCofDV9WnM= dependencies: indexof "0.0.1" void-elements@^2.0.0, void-elements@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= websocket-driver@>=0.3.6: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.4" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@1, which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" which@~1.0.5: version "1.0.9" resolved "https://registry.yarnpkg.com/which/-/which-1.0.9.tgz#460c1da0f810103d0321a9b633af9e575e64486f" integrity sha1-RgwdoPgQED0DIam2M6+eV15kSG8= wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" window-size@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" integrity sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0= with@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/with/-/with-5.1.1.tgz#fa4daa92daf32c4ea94ed453c81f04686b575dfe" integrity sha1-+k2qktrzLE6pTtRTyB8EaGtXXf4= dependencies: acorn "^3.1.0" acorn-globals "^3.0.0" word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== wordwrap@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" integrity sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8= wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== dependencies: ansi-styles "^3.2.0" string-width "^3.0.0" strip-ansi "^5.0.0" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@~1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.5.tgz#cbd9e6e75e09fc5d2c90015f21f0c40875e0dd51" integrity sha512-o3KqipXNUdS7wpQzBHSe180lBGO60SoK0yVo3CYJgb2MkobuWuBX6dhkYP5ORCLd55y+SaflMOV5fqAB53ux4w== dependencies: options ">=0.0.5" ultron "1.0.x" wtf-8@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wtf-8/-/wtf-8-1.0.0.tgz#392d8ba2d0f1c34d1ee2d630f15d0efb68e1048a" integrity sha1-OS2LotDxw00e4tYw8V0O+2jhBIo= xdg-basedir@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-1.0.1.tgz#14ff8f63a4fdbcb05d5b6eea22b36f3033b9f04e" integrity sha1-FP+PY6T9vLBdW27qIrNvMDO58E4= dependencies: user-home "^1.0.0" xmlhttprequest-ssl@1.5.3: version "1.5.3" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.3.tgz#185a888c04eca46c3e4070d99f7b49de3528992d" integrity sha1-GFqIjATspGw+QHDZn3tJ3jUomS0= "xtend@>=4.0.0 <4.1.0-0", xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== xtend@^3.0.0, xtend@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/xtend/-/xtend-3.0.0.tgz#5cce7407baf642cba7becda568111c493f59665a" integrity sha1-XM50B7r2Qsunvs2laBEcST9ZZlo= xtend@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-2.1.2.tgz#6efecc2a4dad8e6962c4901b337ce7ba87b5d28b" integrity sha1-bv7MKk2tjmlixJAbM3znuoe10os= dependencies: object-keys "~0.4.0" y18n@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4" integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.3: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^13.1.2: version "13.1.2" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@^13.3.2: version "13.3.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd" integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw== dependencies: cliui "^5.0.0" find-up "^3.0.0" get-caller-file "^2.0.1" require-directory "^2.1.1" require-main-filename "^2.0.0" set-blocking "^2.0.0" string-width "^3.0.0" which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^13.1.2" yargs@~3.10.0: version "3.10.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" integrity sha1-9+572FfdfB0tOMDnTvvWgdFDH9E= dependencies: camelcase "^1.0.2" cliui "^2.1.0" decamelize "^1.0.0" window-size "0.1.0" yargs@~3.5.4: version "3.5.4" resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.5.4.tgz#d8aff8f665e94c34bd259bdebd1bfaf0ddd35361" integrity sha1-2K/49mXpTDS9JZvevRv68N3TU2E= dependencies: camelcase "^1.0.2" decamelize "^1.0.0" window-size "0.1.0" wordwrap "0.0.2" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk= buildbot-3.4.0/www/utils/000077500000000000000000000000001413250514000153015ustar00rootroot00000000000000buildbot-3.4.0/www/utils/run-ng-classify.js000066400000000000000000000007621413250514000206650ustar00rootroot00000000000000const ngClassify = require('ng-classify') const fs = require('fs') const util = require('util'); const readFile = util.promisify(fs.readFile) const writeFile = util.promisify(fs.writeFile) async function processNgClassify(args) { for (var i in args) { var path = args[i]; console.log(path); data = await readFile(path, 'utf8'); data = ngClassify(data); await writeFile(path, data); } } var args = process.argv.slice(2); processNgClassify(args); buildbot-3.4.0/www/waterfall_view/000077500000000000000000000000001413250514000171545ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/buildbot_waterfall_view/000077500000000000000000000000001413250514000240535ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/buildbot_waterfall_view/__init__.py000066400000000000000000000015361413250514000261710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.www.plugin import Application # create the interface for the setuptools entry point ep = Application(__name__, "Buildbot Waterfall View UI") buildbot-3.4.0/www/waterfall_view/karma.conf.js000066400000000000000000000003641413250514000215340ustar00rootroot00000000000000const common = require('buildbot-build-common'); module.exports = function karmaConfig (config) { common.createTemplateKarmaConfig(config, { testRoot: 'src/tests.webpack.js', webpack: require('./webpack.config') }); }; buildbot-3.4.0/www/waterfall_view/package.json000066400000000000000000000021631413250514000214440ustar00rootroot00000000000000{ "name": "buildbot-waterfall-view", "plugin_name": "waterfall_view", "private": true, "main": "buildbot_waterfall_view/static/scripts.js", "style": "buildbot_waterfall_view/static/styles.css", "scripts": { "build": "rimraf buildbot_waterfall_view/static && webpack --bail --progress --profile --env prod", "build-dev": "rimraf buildbot_waterfall_view/static && webpack --bail --progress --profile --env dev", "dev": "webpack --bail --progress --profile --watch --env dev", "test": "karma start", "test-watch": "karma start --auto-watch --no-single-run" }, "devDependencies": { "angular-mocks": "^1.7.9", "angular-ui-bootstrap": "^2.5.6", "buildbot-build-common": "link:../build_common", "lodash": "^4.17.19", "rimraf": "^2.6.3" }, "dependencies": { "@uirouter/angularjs": "^1.0.15", "angular": "^1.8.0", "angular-animate": "^1.7.9", "buildbot-data-js": "link:../data_module", "d3": "^3.5.17", "guanlecoja-ui": "link:../guanlecoja-ui", "jquery": "^3.5.0" } } buildbot-3.4.0/www/waterfall_view/postcss.config.js000066400000000000000000000001711413250514000224530ustar00rootroot00000000000000module.exports = { plugins: { autoprefixer: { browsers: ['last 2 versions'] }, }, }; buildbot-3.4.0/www/waterfall_view/setup.cfg000066400000000000000000000000001413250514000207630ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/setup.py000066400000000000000000000030231413250514000206640ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from buildbot_pkg import setup_www_plugin except ImportError: import sys print('Please install buildbot_pkg module in order to install that ' 'package, or use the pre-build .whl modules available on pypi', file=sys.stderr) sys.exit(1) setup_www_plugin( name='buildbot-waterfall-view', description='Buildbot Waterfall View plugin', author=u'Pierre Tardy', author_email=u'tardyp@gmail.com', url='http://buildbot.net/', packages=['buildbot_waterfall_view'], package_data={ '': [ 'VERSION', 'static/*' ] }, entry_points=""" [buildbot.www] waterfall_view = buildbot_waterfall_view:ep """, classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)' ], ) buildbot-3.4.0/www/waterfall_view/src/000077500000000000000000000000001413250514000177435ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/src/module/000077500000000000000000000000001413250514000212305ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/src/module/dataProcessor/000077500000000000000000000000001413250514000240415ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/src/module/dataProcessor/dataProcessor.service.js000066400000000000000000000060401413250514000306470ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS205: Consider reworking code to avoid use of IIFEs * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class DataProcessor { constructor() { ({}); } // Returns groups and adds builds to builders getGroups(builders, builds, threshold) { // Sort builds by buildid let builder; builds.sort((a, b) => a.buildid - b.buildid); // Create groups: ignore periods when builders are idle const groups = []; let groupid = -1; const last = {groupid: 0, time: 0}; // Create empty builds array for all the builders for (builder of Array.from(builders)) { builder.builds = []; } for (let build of Array.from(builds)) { builder = builders.get(build.builderid); if ((builder == null) || !builder.builds) { // builder is filtered, so we don't take its build in account continue; } // Group number starts from 0, for the first time the condition is always true if ((build.started_at - last.time) > threshold) { ++groupid; } // Create new object for a group with the minimum time if (groups[groupid] == null) { groups[groupid] = {min: build.started_at}; } // Add maximum time to the group object when the groupid is increased if (last.groupid !== groupid) { groups[last.groupid].max = last.time; } if (!build.complete) { build.complete_at = Math.round(new Date() / 1000); } build.groupid = (last.groupid = groupid); builder = builders.get(build.builderid); builder.builds.push(build); if (build.complete_at > last.time) { last.time = build.complete_at; } } // The last group maximum time if (groups[last.groupid]) { groups[last.groupid].max = last.time; } return groups; } // Add the most recent build result to the builder addStatus(builders) { for (let builder of Array.from(builders)) { let latest = null; for (let build of Array.from(builder.builds)) { latest = build; if (build.number > latest.number) { latest = build; } } builder.started_at = latest != null ? latest.started_at : undefined; builder.complete = (latest != null ? latest.complete : undefined) || false; builder.results = latest != null ? latest.results : undefined; } } filterBuilders(builders) { const ret = []; for (let builder of Array.from(builders)) { if (builder.builds != null ? builder.builds.length : undefined) { ret.push(builder); } } return ret; } } angular.module('waterfall_view') .service('dataProcessorService', [DataProcessor]); buildbot-3.4.0/www/waterfall_view/src/module/dataProcessor/dataProcessor.service.spec.js000066400000000000000000000132251413250514000316030ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS205: Consider reworking code to avoid use of IIFEs * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ describe('Data Processor service', function() { let builders, builds; let dataProcessorService = (builds = (builders = null)); const injected = function($injector) { const $rootScope = $injector.get('$rootScope'); dataProcessorService = $injector.get('dataProcessorService'); const Collection = $injector.get('Collection'); const _builders = [{ builderid: 1, name: 'builder1', masterids: [1] } , { builderid: 2, name: 'builder2', masterids: [1] } , { builderid: 3, name: 'builder3', masterids: [1] } , { builderid: 4, name: 'builder4', masterids: [1] } ]; builders = new Collection("builders", {}); builders.from(_builders); const _builds = [{ buildid: 1, builderid: 1, started_at: 1403059709, complete_at: 1403059772, complete: true, results: 'success' } , { buildid: 2, builderid: 2, buildrequestid: 1, started_at: 1403059802, complete_at: 1403060287, complete: true, results: 'success' } , { buildid: 3, builderid: 2, buildrequestid: 2, started_at: 1403059710, complete_at: 1403060278, complete: true, results: 'failure' } , { buildid: 4, builderid: 3, buildrequestid: 2, started_at: 1403060250, complete_at: 0, complete: false } ]; builds = new Collection("builds", {}); builds.from(_builds); }; beforeEach(inject(injected)); it('should be defined', function() { expect(dataProcessorService).toBeDefined(); // getGroups is a function expect(dataProcessorService.getGroups).toBeDefined(); expect(typeof dataProcessorService.getGroups).toBe('function'); // addStatus is a function expect(dataProcessorService.addStatus).toBeDefined(); expect(typeof dataProcessorService.addStatus).toBe('function'); }); it('should add builds to builders', function() { // Add builds to builders dataProcessorService.getGroups(builders, builds, 0); for (let build of Array.from(builds)) { // A builder should contain its build expect(builders[build.builderid - 1].builds).toContain(build); } // Builders builds length should be equal to all builds length let buildsInBuilders = 0; for (let builder of Array.from(builders)) { buildsInBuilders += builder.builds.length; } expect(buildsInBuilders).toEqual(builds.length); }); it('should create groups', function() { // Create groups with a bigger threshold let threshold = builds[1].started_at - builds[0].complete_at; let groups = dataProcessorService.getGroups(builders, builds, threshold); expect(groups.length).toBe(1); // Create groups with a smaller threshold threshold = builds[1].started_at - builds[0].complete_at; groups = dataProcessorService.getGroups(builders, builds, threshold - 1); expect(groups.length).toBe(2); // Add builds to groups, all build have to be in one group let buildsInGroups = 0; for (let build of Array.from(builds)) { for (let group of Array.from(groups)) { if (group.builds == null) { group.builds = []; } if ((build.started_at <= group.min) && (build.complete_at <= group.max)) { group.builds.push(build); buildsInGroups++; } } } expect(buildsInGroups).toEqual(builds.length); // If the time between two builds is less than the threshold, they should be in different groups Array.from(builds).map((build1, i) => { for (let build2 of Array.from(builds.slice(i + 1))) { // If build2 starts earlier than build1, swap them if (build2.buildid < build1.buildid) { [build1, build2] = Array.from([build2, build1]); } if ((build2.started_at - build1.complete_at) > threshold) { expect(build1.groupid).not.toBe(build2.groupid); } } }); }); it('should add complete_at to unfinished builds', function() { const unfinishedBuilds = builds.filter(build => !build.complete); dataProcessorService.getGroups(builders, unfinishedBuilds, 0); for (let build of Array.from(unfinishedBuilds)) { expect(build.complete_at).toBeDefined(); // It should be a correct timestamp expect(build.complete_at.toString().length).toBe(10); } }); it('should add status to builders', function() { // Add builds to builders first dataProcessorService.getGroups(builders, builds, 0); dataProcessorService.addStatus(builders); Array.from(builders).map((builder) => builder.complete ? expect(builder.results).toBeDefined() : expect(builder.results).not.toBeDefined()); }); }); buildbot-3.4.0/www/waterfall_view/src/module/main.module.js000066400000000000000000000732471413250514000240130ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ import 'angular-animate'; import '@uirouter/angularjs'; import 'guanlecoja-ui'; import 'buildbot-data-js'; import _ from 'lodash'; var WaterfallController = (function() { let self = undefined; let Cls = class Waterfall { static initClass() { self = null; // Y axis tick values this.prototype.ticks = []; } constructor($rootElement, $scope, $q, $timeout, $window, $log, $uibModal, dataService, d3Service, dataProcessorService, scaleService, bbSettingsService, glTopbarContextualActionsService, $location, $rootScope) { this.zoomPlus = this.zoomPlus.bind(this); this.zoomMinus = this.zoomMinus.bind(this); this.renderNewData = this.renderNewData.bind(this); this.$rootElement = $rootElement; this.$scope = $scope; this.$window = $window; this.$log = $log; this.$uibModal = $uibModal; this.$location = $location; this.$rootScope = $rootScope; this.$scope.tags_filter = this.tags_filter = []; this.dataProcessorService = dataProcessorService; this.bbSettingsService = bbSettingsService; self = this; const actions = [{ caption: "", icon: "search-plus", action: this.zoomPlus } , { caption: "", icon: "search-minus", action: this.zoomMinus } ]; // 'waterfall' class needs to be dynamically added to the body in order // to support waterfall-specific styling of the body. (this is a bit // awkward since the body is provided by guanlecoja-ui and is the same // element as you switch between different plugin pages, therefore the // class needs to removed upon exiting the waterfall via the $destroy // event below.) const body = this.$rootElement.find("body"); body.addClass("hundredpercent"); this.$scope.$on("$destroy", ()=> { return body.removeClass("hundredpercent"); }); glTopbarContextualActionsService.setContextualActions(actions); // Clear contextual action buttons on destroy const clearGl = function () { glTopbarContextualActionsService.setContextualActions([]); }; $scope.$on('$destroy', clearGl); // Show the loading spinner this.loading = true; this.dataAccessor = dataService.open().closeOnDestroy(this.$scope); // Get Waterfall settings this.s = this.bbSettingsService.getSettingsGroup('Waterfall'); this.c = { // Margins around the chart margin: { top: 15, right: 20, bottom: 20, left: 70 }, // Gap between groups (px) gap: 30, // Default vertical scaling scaling: this.s.scaling_waterfall.value, // Minimum builder column width (px) minColumnWidth: this.s.min_column_width_waterfall.value, // Y axis time format (new line: ^) timeFormat: '%x^%H:%M', // Lazy load limit limit: this.s.lazy_limit_waterfall.value, // Idle time threshold in unix time stamp (eg. 300 = 5 min) threshold: this.s.idle_threshold_waterfall.value, // Grey rectangle below buildids buildidBackground: this.s.number_background_waterfall.value }; // Load data (builds and builders) this.all_builders = this.dataAccessor.getBuilders({order: 'name'}); this.$scope.builders = (this.builders = this.all_builders); this.buildLimit = this.c.limit; this.$scope.builds = (this.builds = this.dataAccessor.getBuilds({limit: this.buildLimit, order: '-started_at'})); this.$scope.masters = this.dataAccessor.getMasters(); d3Service.get().then(d3 => { // Create a scale object this.d3 = d3; this.scale = new scaleService(this.d3); // Create groups and add builds to builders this.groups = this.dataProcessorService.getGroups(this.all_builders, this.builds, this.c.threshold); if (this.s.show_builders_without_builds.value) { this.$scope.builders = this.all_builders; } else { this.$scope.builders = (this.builders = this.dataProcessorService.filterBuilders(this.all_builders)); } if (!this.s.show_old_builders.value) { const ret = []; for (let builder of this.$scope.builders) { if (this.hasActiveMaster(builder)) { ret.push(builder); } } this.$scope.builders = this.builders = ret; } // Add builder status to builders this.dataProcessorService.addStatus(this.builders); // Select containers this.waterfall = this.d3.select('.waterfall'); this.container = this.waterfall.select('.svg-container'); this.header = this.waterfall.select('.header-content'); // Append svg elements to the containers this.createElements(); // Render the waterfall this.render(); // Hide the spinner this.loading = false; // Render on resize this.$scope.$watch( () => this.waterfall.style('width') , (n, o) => { if (n !== o) { this.render(); } } , true ); // Update view on data change this.loadingMore = false; this.$scope.masters.onChange = this.renderNewData; this.builds.onChange = (this.all_builders.onChange = this.renderNewData); // Lazy load builds on scroll const containerParent = this.container.node().parentNode; const onScroll = () => { if (!this.loadingMore && ((this.getHeight() - containerParent.scrollTop) < 1000)) { this.loadingMore = true; return this.loadMore(); } }; // Bind scroll event listener angular.element(containerParent).bind('scroll', onScroll); const resizeHandler = () => this.render(); const window = angular.element(this.$window); window.bind('resize', resizeHandler); const keyHandler = e => { // + if (e.key === '+') { e.preventDefault(); this.zoomPlus(); } // - if (e.key === '-') { e.preventDefault(); return this.zoomMinus(); } }; window.bind('keypress', keyHandler); this.$scope.$on('$destroy', function() { window.unbind('keypress', keyHandler); return window.unbind('resize', resizeHandler); }); }); $rootScope.$on('$locationChangeSuccess', function() { self.renderNewData(self.$scope.tags_filter); }); } hasActiveMaster(builder) { let active = false; if ((builder.masterids == null)) { return false; } for (let mid of Array.from(builder.masterids)) { const m = this.$scope.masters.get(mid); if ((m != null) && m.active) { active = true; } } if (builder.tags.includes('_virtual_')) { active = true; } return active; } zoomPlus() { this.incrementScaleFactor(); this.render(); } zoomMinus() { this.decrementScaleFactor(); this.render(); } /* * Increment and decrement the scale factor */ incrementScaleFactor() { this.c.scaling *= 1.5; this.s.scaling_waterfall.value *= 1.5; return this.bbSettingsService.save(); } decrementScaleFactor() { this.c.scaling /= 1.5; this.s.scaling_waterfall.value /= 1.5; return this.bbSettingsService.save(); } /* * Load more builds */ loadMore() { if (this.builds.length < this.buildLimit) { // last query returned less build than expected, so we went to the beginning of time // no need to query again return; } this.buildLimit = this.builds.length + this.c.limit; const builds = this.dataAccessor.getBuilds({limit: this.buildLimit, order: '-started_at'}); builds.onChange = builds => { this.builds.close(); // force close the old collection's auto-update this.builds = builds; // renders the new data builds.onChange = this.renderNewData; builds.onChange(); }; } /* * Create svg elements for chart and header, append svg groups */ createElements() { // Remove any unwanted elements first this.container.selectAll('*').remove(); this.header.selectAll('*').remove(); this.chart = this.container.append('svg') .append('g') .attr('transform', `translate(${this.c.margin.left}, ${this.c.margin.top})`) .attr('class', 'chart'); const height = this.getHeaderHeight(); this.waterfall.select(".header").style("height", height); return this.header = this.header.append('svg') .append('g') .attr('transform', `translate(${this.c.margin.left}, ${height})`) .attr('class', 'header'); } /* * Get the container width */ getWidth() { return parseInt(this.container.style('width').replace('px', ''), 10); } /* * Set the content width */ setWidth() { if (this.c.minColumnWidth > 0) { const columnWidth = (this.$window.innerWidth - this.c.margin.right - this.c.margin.left) / this.builders.length; const wider = this.c.minColumnWidth <= columnWidth; const width = wider ? '100%' : `${(this.builders.length * this.c.minColumnWidth) + this.c.margin.right + this.c.margin.left}px`; this.waterfall.select('.inner-content').style('width', width); return this.waterfall.select('.header-content').style('width', width); } else { return this.$log.error(`Bad column width configuration\n\t min: ${this.c.minColumnWidth}`); } } /* * Get the container height */ getHeight() { return parseInt(this.container.style('height').replace('px', ''), 10); } /* * Set the container height */ setHeight() { let h = -this.c.gap; for (let group of Array.from(this.groups)) { h += ((group.max - group.min) + this.c.gap); } let height = (h * this.c.scaling) + this.c.margin.top + this.c.margin.bottom; if (height < parseInt(this.waterfall.style('height').replace('px', ''), 10)) { this.loadMore(); } this.container.style('height', `${height}px`); height = this.getHeaderHeight(); this.waterfall.select("div.header").style("height", height + "px"); return this.header.attr('transform', `translate(${this.c.margin.left}, ${height})`); } /* * Returns content width */ getInnerWidth() { const width = this.getWidth(); return width - this.c.margin.left - this.c.margin.right; } /* * Returns content height */ getInnerHeight() { const height = this.getHeight(); return height - this.c.margin.top - this.c.margin.bottom; } /* * Returns headers height */ getHeaderHeight() { let max_buildername = 0; for (let builder of Array.from(this.builders)) { max_buildername = Math.max(builder.name.length, max_buildername); } return Math.max(100, max_buildername * 3); } /* * Returns the result string of a builder, build or step */ getResultClassFromThing(b) { let result; if (!b.complete && (b.started_at >= 0)) { result = 'pending'; } else { switch (b.results) { case 0: result = 'success'; break; case 1: result = 'warnings'; break; case 2: result = 'failure'; break; case 3: result = 'skipped'; break; case 4: result = 'exception'; break; case 5: result = 'cancelled'; break; default: result = 'unknown'; } } return result; } /* * Draw x axis */ drawXAxis() { const x = this.scale.getX(this.builders, this.getInnerWidth()); const builderName = this.scale.getBuilderName(this.builders); // Remove old axis this.header.select('.axis.x').remove(); // Select axis const axis = this.header.append('g') .attr('class', 'axis x'); // Remove previous elements axis.selectAll('*').remove(); // Top axis shows builder names const xAxis = this.d3.svg.axis() .scale(x) .orient('top') .tickFormat(builderName); const xAxisSelect = axis.call(xAxis); // Add link const link = function(builderid) { const p = self.d3.select(this.parentNode); const a = p.append('a') .attr('xlink:href', `#/builders/${builderid}`); return a.node().appendChild(this); }; // Rotate text xAxisSelect.selectAll('text') .style('text-anchor', 'start') .attr('transform', 'translate(0, -16) rotate(-25)') .attr('dy', '0.75em') .each(link); // Rotate tick lines return xAxisSelect.selectAll('line') .data(this.builders) .attr('transform', 'rotate(90)') .attr('x1', 0) .attr('x2', 0) .attr('y1', x.rangeBand(1) / 2) .attr('y2', -x.rangeBand(1) / 2) .attr('class', self.getResultClassFromThing) .classed('stroke', true); } addTicks(build) { const y = this.scale.getY(this.groups, this.c.gap, this.getInnerHeight()); return this.ticks = this.ticks.concat([y.getCoord(build.complete_at), y.getCoord(build.started_at)]); } removeTicks() { return this.ticks = []; } /* * Draw y axis */ drawYAxis() { let i = this.d3.scale.linear(); const y = this.scale.getY(this.groups, this.c.gap, this.getInnerHeight()); // Remove old axis this.chart.select('.axis.y').remove(); const axis = this.chart.append('g') .attr('class', 'axis y'); // Stay on left on horizontal scrolling axis.attr('transform', `translate(${this.waterfall.node().scrollLeft}, 0)`); this.waterfall.on('scroll', function() { return yAxis.attr('transform', `translate(${this.scrollLeft}, 0)`); }); // White background axis.append('rect') .attr('x', -this.c.margin.left) .attr('y', -this.c.margin.top) .attr('width', this.c.margin.left) .attr('height', this.getHeight()) .style('fill', '#fff'); let { ticks } = this; for (let group of Array.from(this.groups)) { ticks = ticks.concat([y.getCoord(group.min), y.getCoord(group.max)]); } // Y axis tick format const tickFormat = coordinate => { const timestamp = y.invert(coordinate); const date = new Date(timestamp * 1000); const format = this.d3.time.format(this.c.timeFormat); return format(date); }; var yAxis = this.d3.svg.axis() .scale(i) .orient('left') .tickValues(ticks) .tickFormat(tickFormat); yAxis = axis.call(yAxis); // Break text on ^ character const lineBreak = function() { const e = self.d3.select(this); const words = e.text().split('^'); e.text(''); for (i = 0; i < words.length; i++) { const word = words[i]; const text = e.append('tspan').text(word); if (i !== 0) { const x = e.attr('x'); text.attr('x', x).attr('dy', i * 10); } } }; yAxis.selectAll('text').each(lineBreak); const dasharray = tick => Array.from(this.ticks).includes(tick) ? '2, 5' : '2, 1'; return yAxis.selectAll('.tick') .append('line') .attr('x2', this.getInnerWidth()) .attr('stroke-dasharray', dasharray); } drawBuilds() { const x = this.scale.getX(this.builders, this.getInnerWidth()); const y = this.scale.getY(this.groups, this.c.gap, this.getInnerHeight()); // Remove previous elements this.chart.selectAll('.builder').remove(); // Create builder columns const builders = this.chart.selectAll('.builder') .data(this.builders).enter() .append('g') .attr('class', 'builder') .attr('transform', builder => `translate(${x(builder.builderid)}, 0)`); // Create build group for each build const data = builder => builder.builds; const key = build => build.buildid; const builds = builders.selectAll('.build') .data(data, key).enter() .append('g') .attr('class', 'build') .attr('transform', build => `translate(0, ${y.getCoord(build.complete_at)})`); const max = function(a, b) { if (a > b) { return a; } return b; }; // Draw rectangle for each build const height = build => max(10, Math.abs(y.getCoord(build.started_at) - y.getCoord(build.complete_at))); builds.append('rect') .attr('class', self.getResultClassFromThing) .attr('width', x.rangeBand(1)) .attr('height', height) .classed('fill', true); // Optional: grey rectangle below buildids if (this.c.buildidBackground) { builds.append('rect') .attr('y', -15) .attr('width', x.rangeBand(1)) .attr('height', 15) .style('fill', '#ccc'); } // Draw text over builds builds.append('text') .attr('class', 'id') .attr('x', x.rangeBand(1) / 2) .attr('y', -3) .text(build => build.number); // Add event listeners return builds .on('mouseover', this.mouseOver) .on('mousemove', this.mouseMove) .on('mouseout', this.mouseOut) .on('click', this.click); } /* * Event actions */ mouseOver(build) { const e = self.d3.select(this); const mouse = self.d3.mouse(this); self.addTicks(build); self.drawYAxis(); // Move build and builder to front const p = self.d3.select(this.parentNode); this.parentNode.appendChild(this); p.each(function() { return this.parentNode.appendChild(this); }); // Show tooltip on the left or on the right const r = build.builderid < (self.builders.length / 2); // Create tooltip let height = 40; const points = function() { if (r) { return `20,0 0,${height / 2} 20,${height} 170,${height} 170,0`; } else { return `150,0 170,${height / 2} 150,${height} 0,${height} 0,0`; } }; const tooltip = e.append('g') .attr('class', 'svg-tooltip') .attr('transform', `translate(${mouse[0]}, ${mouse[1]})`) .append('g') .attr('class', 'tooltip-content') .attr('transform', `translate(${r ? 5 : -175}, ${- height / 2})`); tooltip.append('polygon') .attr('points', points()); // Load steps build.loadSteps().onChange = function(buildsteps) { // Resize the tooltip height = (buildsteps.length * 15) + 7; tooltip.transition().duration(100) .attr('transform', `translate(${r ? 5 : -175}, ${- height / 2})`) .select('polygon') .attr('points', points()); const duration = function(step) { const d = new Date((step.complete_at - step.started_at) * 1000); if (d > 0) { return `(${d / 1000}s)`; } else { return ''; } }; tooltip.selectAll('.buildstep') .data(buildsteps) .enter().append('g') .attr('class', 'buildstep') // Add text .append('text') .attr('y', (step, i) => 15 * (i + 1)) .attr('x', r ? 30 : 10) .attr('class', self.getResultClassFromThing) .classed('fill', true) .transition().delay(100) // Text format .text((step, i) => `${i + 1}. ${step.name} ${duration(step)}`); }; } mouseMove(build) { const e = self.d3.select(this); // Move the tooltip to the mouse position const mouse = self.d3.mouse(this); return e.select('.svg-tooltip') .attr('transform', `translate(${mouse[0]}, ${mouse[1]})`); } mouseOut(build) { const e = self.d3.select(this); self.removeTicks(); self.drawYAxis(); // Remove tooltip return e.selectAll('.svg-tooltip').remove(); } click(build) { // Open modal on click let modal; return modal = self.$uibModal.open({ template: require('./modal/modal.tpl.jade'), controller: 'waterfallModalController as modal', windowClass: 'modal-small', resolve: { selectedBuild() { return build; } } }); } toggleTag(tag) { if (!this.$scope.tags_filter.includes(tag)) { this.$scope.tags_filter.push(tag); } else { this.$scope.tags_filter = _.remove(this.$scope.tags_filter, function(currentTag) { return currentTag != tag; }); } this.$location.search("tags", this.$scope.tags_filter); } isTagFiltered(tag) { if (this.$scope.tags_filter.includes(tag)) { return true; } else { return false; } } clearTags() { this.$scope.tags_filter = this.tags_filter = []; this.$location.search("tags", this.$scope.tags_filter); } makeTagBuilders(currentTags, builders) { let tag_builders = []; let anyTagSelected = false; if (typeof currentTags != 'string') { anyTagSelected = true; for (const builder of builders) { let v = currentTags.every(currentTag => builder.tags.includes(currentTag)); if (v) { tag_builders.push(builder); } } } else if (typeof currentTags == 'string') { anyTagSelected = true; for (const builder of builders) { if (builder.tags.includes(currentTags)) { tag_builders.push(builder); } } } return [anyTagSelected, tag_builders]; } renderNewData(currentTags) { currentTags = this.$location.search()['tags']; if (currentTags != null) { if (typeof currentTags == 'string') { this.$scope.tags_filter = this.tags_filter = [currentTags]; } else { this.$scope.tags_filter = this.tags_filter = currentTags; } } this.groups = this.dataProcessorService.getGroups(this.all_builders, this.builds, this.c.threshold); if (this.s.show_builders_without_builds.value) { this.$scope.builders = this.all_builders; } else { this.$scope.builders = (this.builders = this.dataProcessorService.filterBuilders(this.all_builders)); } if (!this.s.show_old_builders.value) { const ret = []; for (let builder of this.$scope.builders) { if (this.hasActiveMaster(builder)) { ret.push(builder); } } this.$scope.builders = this.builders = ret; } var all_tags = []; for (let builder of this.builders) { for (let tag of builder.tags) { if (!all_tags.includes(tag)) { all_tags.push(tag); } } } all_tags.sort(); this.$scope.all_tags = this.all_tags = all_tags; this.dataProcessorService.addStatus(this.builders); let anyTagSelected, tag_builders; if (currentTags != null) { [anyTagSelected, tag_builders] = this.makeTagBuilders(currentTags, this.$scope.builders); } if (anyTagSelected) { this.$scope.builders = this.builders = tag_builders; } this.render(); return this.loadingMore = false; } /* * Render the waterfall view */ render() { const containerParent = this.container.node().parentNode; const y = this.scale.getY(this.groups, this.c.gap, this.getInnerHeight()); const time = y.invert(containerParent.scrollTop); // Set the content width this.setWidth(); // Set the height of the container this.setHeight(); // Draw the waterfall this.drawBuilds(); this.drawXAxis(); this.drawYAxis(); } }; Cls.initClass(); return Cls; })(); angular.module('waterfall_view', [ 'ui.router', 'ngAnimate', 'guanlecoja.ui', 'bbData', ]) .controller('waterfallController', ['$rootElement', '$scope', '$q', '$timeout', '$window', '$log', '$uibModal', 'dataService', 'd3Service', 'dataProcessorService', 'scaleService', 'bbSettingsService', 'glTopbarContextualActionsService', '$location', '$rootScope', WaterfallController]) .config(['$locationProvider', function($locationProvider) { $locationProvider.hashPrefix(''); }]); require('./dataProcessor/dataProcessor.service.js'); require('./main.module.js'); require('./modal/modal.controller.js'); require('./scale/scale.service.js'); require('./waterfall.config.js'); require('./waterfall.route.js'); buildbot-3.4.0/www/waterfall_view/src/module/main.module.spec.js000066400000000000000000000223251413250514000247330ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * DS205: Consider reworking code to avoid use of IIFEs * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ beforeEach(function() { angular.mock.module(function($provide) { $provide.service('$uibModal', function() { return {open() {}}; }); }); // Mock bbSettingsProvider angular.mock.module(function($provide) { $provide.provider('bbSettingsService', (function() { let group = undefined; const Cls = class { static initClass() { group = {}; } addSettingsGroup(g) { return g.items.map(function(i) { if (i.name === 'lazy_limit_waterfall') { i.default_value = 2; } return group[i.name] = {value: i.default_value}; }); } $get() { return { getSettingsGroup() { return group; }, save() {} }; } }; Cls.initClass(); return Cls; })() ); }); angular.mock.module('waterfall_view'); }); describe('Waterfall view controller', function() { let $document, $state, $timeout, $uibModal, $window, bbSettingsService, dataService, elem, w; let $rootScope = ($state = (elem = (w = ($document = ($window = ($uibModal = ($timeout = (bbSettingsService = ($rootElement = (dataService = null)))))))))); const masters = [{ masterid: 1, name: 'master1', active: true, } ,{ masterid: 2, name: 'master2', active: false, } ]; const builders = [{ builderid: 1, name: 'builder1', masterids: [1], tags: [""], } , { builderid: 2, name: 'builder2', masterids: [1], tags: [""], } , { builderid: 3, name: 'builder3', masterids: [1], tags: [""], } , { builderid: 4, name: 'builder4', masterids: [1], tags: [""], } , { builderid: 5, name: 'builder5', masterids: [2], tags: [""], } ]; const builds = [{ buildid: 1, builderid: 1, started_at: 1403059709, complete_at: 1403059772, complete: true, results: 'success' } , { buildid: 2, builderid: 2, buildrequestid: 1, started_at: 1403059802, complete_at: 1403060287, complete: true, results: 'success' } , { buildid: 3, builderid: 2, buildrequestid: 2, started_at: 1403059710, complete_at: 1403060278, complete: true, results: 'failure' } , { buildid: 4, builderid: 3, buildrequestid: 2, started_at: 1403060250, complete_at: 0, complete: false } ]; const buildrequests = [{ builderid: 1, buildrequestid: 1, buildsetid: 1 } , { builderid: 1, buildrequestid: 2, buildsetid: 1 } , { builderid: 1, buildrequestid: 3, buildsetid: 2 } ]; const injected = function($injector) { $rootElement = $injector.get('$rootElement'); $rootScope = $injector.get('$rootScope'); let scope = $rootScope.$new(); const $compile = $injector.get('$compile'); const $controller = $injector.get('$controller'); $state = $injector.get('$state'); $document = $injector.get('$document'); $window = $injector.get('$window'); $uibModal = $injector.get('$uibModal'); $timeout = $injector.get('$timeout'); bbSettingsService = $injector.get('bbSettingsService'); dataService = $injector.get('dataService'); dataService.when('masters', masters); dataService.when('builds', {limit: 2}, builds.slice(0, 2)); dataService.when('builders', builders); dataService.when('buildrequests', buildrequests); dataService.when('builds/1/steps', [{buildid: 1}]); mockBody = $compile('')(scope); $rootElement.append(mockBody); elem = $compile('
')(scope); $document.find('body').append(elem); $state.transitionTo('waterfall'); $rootScope.$digest(); elem = elem.children(); const waterfall = elem.children(); scope = waterfall.scope(); w = waterfall.controller(); spyOn(w, 'mouseOver').and.callThrough(); spyOn(w, 'mouseOut').and.callThrough(); spyOn(w, 'mouseMove').and.callThrough(); spyOn(w, 'click').and.callThrough(); spyOn(w, 'loadMore').and.callThrough(); // Data is loaded $timeout.flush(); }; beforeEach(inject(injected)); // make sure we remove the element from the dom afterEach(function() { expect($document.find('svg').length).toEqual(2); elem.remove(); expect($document.find('svg').length).toEqual(0); }); it('should be defined', () => expect(w).toBeDefined()); it('should bind the masters, builds, and builders to scope', function() { const group = bbSettingsService.getSettingsGroup(); const limit = group.lazy_limit_waterfall.value; expect(w.$scope.masters).toBeDefined(); expect(w.$scope.masters.length).not.toBe(0); expect(w.builds).toBeDefined(); expect(w.builds.length).toBe(limit); expect(w.builders).toBeDefined(); expect(w.builders.length).not.toBe(0); }); it('should create svg elements', function() { expect(elem.find('svg').length).toBeGreaterThan(1); expect(elem.find('g').length).toBeGreaterThan(1); }); it('should remove body class called hundredpercent on destroy', function() { expect(w.$rootElement.find('body').hasClass('hundredpercent')).toBeTruthy(); w.$scope.$destroy(); expect(w.$rootElement.find('body').hasClass('hundredpercent')).toBeFalsy(); }); it('should check if builder has active master or not', function() { expect(w.hasActiveMaster(builders[3])).toBeTruthy(); expect(w.hasActiveMaster(builders[4])).toBeFalsy(); }); it('should rerender the waterfall on resize', function() { spyOn(w, 'render').and.callThrough(); expect(w.render).not.toHaveBeenCalled(); angular.element($window).triggerHandler('resize'); expect(w.render).toHaveBeenCalled(); }); it('should rerender the waterfall on builds data change', function() { dataService.when('builds', builds); spyOn(w, 'render').and.callThrough(); expect(w.render).not.toHaveBeenCalled(); // force load more w.buildLimit = 0; w.loadMore(); $timeout.flush(); expect(w.render).toHaveBeenCalled(); }); it('should rerender the waterfall on masters data change', function() { spyOn(w, 'render').and.callThrough(); expect(w.render).not.toHaveBeenCalled(); w.$scope.masters.onChange(); expect(w.render).toHaveBeenCalled(); }); it('should rerender the waterfall on builders data change', function() { spyOn(w, 'render').and.callThrough(); expect(w.render).not.toHaveBeenCalled(); w.all_builders.onChange(); expect(w.render).toHaveBeenCalled(); }); it('should lazy load data on scroll', function() { spyOn(w, 'getHeight').and.returnValue(900); const e = d3.select('.inner-content'); const n = e.node(); w.loadMore.calls.reset(); let callCount = w.loadMore.calls.count(); expect(callCount).toBe(0); angular.element(n).triggerHandler('scroll'); callCount = w.loadMore.calls.count(); expect(callCount).toBe(1); }); it('height should be scalable', function() { const height = w.getInnerHeight(); const group = bbSettingsService.getSettingsGroup(); const oldSetting = group.scaling_waterfall.value; w.incrementScaleFactor(); w.render(); const newHeight = w.getInnerHeight(); expect(newHeight).toBe(height * 1.5); const newSetting = group.scaling_waterfall.value; expect(newSetting).toBe(oldSetting * 1.5); }); it('should have string representations of result codes', function() { const testBuild = { complete: false, started_at: 0 }; expect(w.getResultClassFromThing(testBuild)).toBe('pending'); testBuild.complete = true; expect(w.getResultClassFromThing(testBuild)).toBe('unknown'); const results = { 0: 'success', 1: 'warnings', 2: 'failure', 3: 'skipped', 4: 'exception', 5: 'cancelled' }; for (let i = 0; i <= 5; i++) { testBuild.results = i; expect(w.getResultClassFromThing(testBuild)).toBe(results[i]); } }); }); buildbot-3.4.0/www/waterfall_view/src/module/modal/000077500000000000000000000000001413250514000223245ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/src/module/modal/modal.controller.js000066400000000000000000000012561413250514000261440ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class WaterfallModal { constructor($scope, $uibModalInstance, selectedBuild) { this.$uibModalInstance = $uibModalInstance; this.selectedBuild = selectedBuild; $scope.$on('$stateChangeStart', () => { return this.close(); }); } close() { return this.$uibModalInstance.close(); } } angular.module('waterfall_view') .controller('waterfallModalController', ['$scope', '$uibModalInstance', 'selectedBuild', WaterfallModal]); buildbot-3.4.0/www/waterfall_view/src/module/modal/modal.controller.spec.js000066400000000000000000000035441413250514000270770ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ beforeEach(() => // Mock modalService angular.mock.module(function($provide) { $provide.service('$uibModalInstance', function() { return {close() {}}; }); }) ); describe('Waterfall modal controller', function() { let $rootScope, $uibModalInstance, scope; let createController = ($rootScope = ($uibModalInstance = (scope = null))); const injected = function($injector) { const $controller = $injector.get('$controller'); $rootScope = $injector.get('$rootScope'); $uibModalInstance = $injector.get('$uibModalInstance'); scope = $rootScope.$new(); createController = () => $controller('waterfallModalController as m', { $scope: scope, selectedBuild: {} }) ; }; beforeEach(inject(injected)); it('should be defined', function() { createController(); const { m } = scope; expect(m).toBeDefined(); // close function should be to defined expect(m.close).toBeDefined(); expect(typeof m.close).toBe('function'); }); it('should call close() on stateChangeStart event', function() { createController(); const { m } = scope; spyOn(m, 'close'); $rootScope.$broadcast('$stateChangeStart'); expect(m.close).toHaveBeenCalled(); }); it('should call $uibModalInstance.close on close()', function() { createController(); const { m } = scope; spyOn($uibModalInstance, 'close'); expect($uibModalInstance.close).not.toHaveBeenCalled(); m.close(); expect($uibModalInstance.close).toHaveBeenCalled(); }); }); buildbot-3.4.0/www/waterfall_view/src/module/modal/modal.style.less000066400000000000000000000001311413250514000254420ustar00rootroot00000000000000.modal-small { .modal-dialog { width: 80%; } .fa { cursor: pointer; } } buildbot-3.4.0/www/waterfall_view/src/module/modal/modal.tpl.jade000066400000000000000000000004041413250514000250410ustar00rootroot00000000000000// Show build summary for the selected build in a modal window .modal-header i.fa.fa-times.pull-right(ng-click='modal.close()') h4.modal-title Build summary .modal-body buildsummary(ng-if='modal.selectedBuild' buildid='modal.selectedBuild.buildid')buildbot-3.4.0/www/waterfall_view/src/module/scale/000077500000000000000000000000001413250514000223175ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/src/module/scale/scale.service.js000066400000000000000000000057711413250514000254150ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ class ScaleService { constructor() { let Service; return (Service = class Service { constructor(d3) { this.d3 = d3; } // Returns x scale getX(builders, width) { builders.map(builder => builder.builderid); return this.d3.scale.ordinal() .domain(builders.map(builder => builder.builderid)) .rangeRoundBands([0, width], 0.05); // 5% padding } // Returns y scale getY(groups, gap, height) { let group; const H = height; const I = H - ((groups.length - 1) * gap); let T = 0; for (group of Array.from(groups)) { T += (group.max - group.min); } class Y { // date to coordinate getCoord(date) { const periods = []; for (let id = 0; id < groups.length; id++) { group = groups[id]; if (group.min <= date && date <= group.max) { periods.push(date - group.min); let sum = 0; for (let period of Array.from(periods)) { sum += period; } return H - ((I / T) * sum) - (id * gap); } else { periods.push(group.max - group.min); } } return undefined; } // coordinate to date invert(coordinate) { const periods = []; for (let id = 0; id < groups.length; id++) { group = groups[id]; let sum = 0; for (let period of Array.from(periods)) { sum += period; } const date = (((H - coordinate - (id * gap)) * (T / I)) - sum) + group.min; if (group.min <= date && date <= group.max) { return date; } periods.push(group.max - group.min); } return undefined; } } return new Y(); } // Returns an id to name scale getBuilderName(builders) { return this.d3.scale.ordinal() .domain(builders.map(builder => builder.builderid)) .range(builders.map(builder => builder.name)); } }); } } angular.module('waterfall_view') .factory('scaleService', [ScaleService]); buildbot-3.4.0/www/waterfall_view/src/module/scale/scale.service.spec.js000066400000000000000000000070011413250514000263320ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ import * as d3 from 'd3'; describe('Scale service', function() { let builders, scale; const groups = [{ // Y.M.D - h:m:s min: 1325376000, // 2012.01.01 - 0:0:0 max: 1325548800 } // 2012.01.03 - 0:0:0 , { min: 1395104461, // 2014.03.18 - 1:1:1 max: 1396450952 } // 2014.04.02 - 15:2:32 ]; let scaleService = (scale = (builders = null)); const injected = function($injector) { const $rootScope = $injector.get('$rootScope'); scaleService = $injector.get('scaleService'); scale = new scaleService(d3); builders = [{ builderid: 1, name: 'builder1' } , { builderid: 2, name: 'builder2' } , { builderid: 3, name: 'builder3' } , { builderid: 4, name: 'builder4' } ]; $rootScope.$digest(); }; beforeEach(inject(injected)); it('should be defined', function() { expect(scaleService).toBeDefined(); expect(scale).toBeDefined(); // getX is a function expect(scale.getX).toBeDefined(); expect(typeof scale.getX).toBe('function'); // getY is a function expect(scale.getY).toBeDefined(); expect(typeof scale.getY).toBe('function'); // getBuilderName is a function expect(scale.getBuilderName).toBeDefined(); expect(typeof scale.getBuilderName).toBe('function'); }); it('should return a builderid to X scale', function() { // Get new scale, range: 100 const idToX = scale.getX(builders, 100); // A build with smaller builderid should come first for (let i = 0; i < builders.length; i += 2) { const builder = builders[i]; const a = idToX(builders[i].builderid); const b = idToX(builders[i+1].builderid) || 100; expect(a).toBeLessThan(b); } // Out of domain expect(idToX(8)).toBeUndefined(); }); it('should return a build length to height scale', function() { // gap: 5, range: 100 let date; const idToY = scale.getY(groups, 5, 100); // Check gap size expect(idToY.getCoord(groups[0].max) - idToY.getCoord(groups[1].min)).toBe(5); // All dates are in domain const dates = [ 1325376000, // 2012.01.01 - 0:0:0 1325386000, // 2012.01.01 - 2:46:40 1396328527 // 2014.04.01 - 5:2:7 ]; for (date of Array.from(dates)) { // date -> coordinate -> date, the starting and the ending date should be equal expect(idToY.invert(idToY.getCoord(date))).toEqual(date); } // Later times have greater Y coordinate expect(idToY.getCoord(date)).toBeGreaterThan(idToY.getCoord(date + 10000)); // Out of domain expect(idToY.getCoord(1359731101)).toBeUndefined(); expect(idToY.invert(120)).toBeUndefined(); }); it('should return a builderid to name scale', function() { // Get new scale const idToName = scale.getBuilderName(builders); // The return value should be the name of the builder Array.from(builders).map((builder) => expect(idToName(builder.builderid)).toEqual(builder.name)); }); }); buildbot-3.4.0/www/waterfall_view/src/module/waterfall.config.js000066400000000000000000000032701413250514000250150ustar00rootroot00000000000000class Waterfall { constructor(bbSettingsServiceProvider) { bbSettingsServiceProvider.addSettingsGroup({ name: 'Waterfall', caption: 'Waterfall related settings', items: [{ type: 'integer', name: 'scaling_waterfall', caption: 'Scaling factor', default_value: 1 } , { type: 'integer', name: 'min_column_width_waterfall', caption: 'Minimum column width (px)', default_value: 40 } , { type: 'integer', name: 'lazy_limit_waterfall', caption: 'Lazy loading limit', default_value: 40 } , { type: 'integer', name: 'idle_threshold_waterfall', caption: 'Idle time threshold in unix time stamp (eg. 300 = 5 min)', default_value: 300 } , { type: 'bool', name: 'number_background_waterfall', caption: 'Build number background', default_value: false } , { type: 'bool', name: 'show_builders_without_builds', caption: 'Show builders without builds', default_value: false } , { type: 'bool', name: 'show_old_builders', caption: 'Show old builders', default_value: false } ]}); } } angular.module('waterfall_view') .config(['bbSettingsServiceProvider', Waterfall]); buildbot-3.4.0/www/waterfall_view/src/module/waterfall.route.js000066400000000000000000000021571413250514000247110ustar00rootroot00000000000000// Register new state class WaterfallState { constructor($stateProvider, glMenuServiceProvider) { // Name of the state const name = 'waterfall'; // Configuration glMenuServiceProvider.addGroup({ name, caption: 'Waterfall View', icon: 'bar-chart-o', order: 5 }); const cfg = { group: name, caption: 'Waterfall View' }; // Register new state const state = { controller: `${name}Controller`, controllerAs: "w", template: require('./waterfall.tpl.jade'), name, url: `/${name}?tags`, data: cfg, reloadOnSearch: false }; $stateProvider.state(state); } } angular.module('waterfall_view') .config(['$stateProvider', 'glMenuServiceProvider', WaterfallState]) .config(['$locationProvider', function($locationProvider) { $locationProvider.hashPrefix(''); }]) .run([ '$rootScope', '$location', function($rootScope, $location) { $rootScope.location = $location } ])buildbot-3.4.0/www/waterfall_view/src/module/waterfall.route.spec.js000066400000000000000000000006161413250514000256400ustar00rootroot00000000000000describe('Waterfall view', function() { let $state = null; const injected = $injector => $state = $injector.get('$state'); beforeEach(inject(injected)); it('should register a new state with the correct configuration', function() { const name = 'waterfall'; const states = $state.get().map(state => state.name); expect(states).toContain(name); }); }); buildbot-3.4.0/www/waterfall_view/src/module/waterfall.tpl.jade000066400000000000000000000011321413250514000246310ustar00rootroot00000000000000.waterfall .load-indicator(ng-show='w.loading') .spinner i.fa.fa-circle-o-notch.fa-spin.fa-2x p loading p Tags: span(ng-show="w.all_tags.length > 0" ng-repeat="tag in w.all_tags") span.builder-tag.label.clickable.label-default(ng-click="w.toggleTag(tag)" ng-class="w.isTagFiltered(tag) ? 'label-success': 'label-default'") | {{ tag }} |   span.label.clickable.label-danger(ng-click="w.clearTags()") x .header .header-content .content .inner-content .svg-container buildbot-3.4.0/www/waterfall_view/src/styles/000077500000000000000000000000001413250514000212665ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/src/styles/styles.less000066400000000000000000000073331413250514000235070ustar00rootroot00000000000000 @import "../module/**/*.less"; @navbar-height: 52px; @main-color: #3498db; .waterfall { position: absolute; top: @navbar-height; left: 20px; bottom: 0; right: 0; width: auto; overflow-x: auto; overflow-y: hidden; .load-indicator { position: fixed; top: @navbar-height; left: 20px; padding-right: 20px; width: 100%; height: 100%; z-index: 900; background-color: #ffffff; display: table; .spinner { display: table-cell; vertical-align: middle; text-align: center; p { font-weight: 300; margin-top: 10px; } } } .header { position: relative; z-index: 1; height: 100px; line-height: 100px; margin-bottom: 2px; // @FIXME: add a bit of margin-right in order to be able to see the rotated titles. margin-right: -150px; vertical-align: middle; .header-content { height: 100%; overflow: hidden; svg { width: 100%; height: 100%; } .axis { .domain { fill: none; } &.x { .tick { a { text-decoration: none; } text { fill: @main-color; font-size: 12px; } line { stroke-width: 20px; } } } } } } .content { position: relative; height: 100%; .inner-content { position: absolute; top: 0; bottom: 110px; overflow-y: auto; overflow-x: visible; width: 100%; .svg-container { position: relative; width: 100%; height: 16000px; svg { width: 100%; height: 100%; .chart { .builder { .build { cursor: pointer; rect, polygon { stroke: @main-color; stroke-width: 1px; } polygon { fill: #fff; } .buildstep { text { font-size: 12px; } } } text.id { fill: @main-color; font-size: 12px; text-anchor: middle; } circle { stroke: @main-color; stroke-width: 1px; } } } .axis { .domain { fill: none; } &.y { .tick { text { fill: @main-color; font-size: 10px; } line { stroke: @main-color; stroke-width: 1px; } } } } } } } } .fill { &.success { fill: #88DD33; } &.warnings { fill: #FFAA33; } &.failure { fill: #EE8888; } &.skipped { fill: #AADDEE; } &.exception { fill: #CC66CC; } &.retry { fill: #EECCCC; } // ? &.cancelled { fill: #EECCCC; } &.pending { fill: #E7D100; } &.unknown { fill: #EEEEEE; } } .stroke { &.success { stroke: #88DD33; } &.warnings { stroke: #FFAA33; } &.failure { stroke: #EE8888; } &.skipped { stroke: #AADDEE; } &.exception { stroke: #CC66CC; } &.retry { stroke: #EECCCC; } // ? &.cancelled { stroke: #EECCCC; } &.pending { stroke: #E7D100; } &.unknown { stroke: #EEEEEE; } } } .svg-tooltip { pointer-events: none; } buildbot-3.4.0/www/waterfall_view/src/tests.webpack.js000066400000000000000000000005041413250514000230550ustar00rootroot00000000000000// This file is an entry point for angular tests // Avoids some weird issues when using webpack + angular. import 'angular'; import 'angular-mocks/angular-mocks'; import './module/main.module.js' import '../test/d3/d3.service.js' const context = require.context('./', true, /\.spec.js$/); context.keys().forEach(context); buildbot-3.4.0/www/waterfall_view/test/000077500000000000000000000000001413250514000201335ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/test/d3/000077500000000000000000000000001413250514000204415ustar00rootroot00000000000000buildbot-3.4.0/www/waterfall_view/test/d3/d3.service.js000066400000000000000000000006171413250514000227500ustar00rootroot00000000000000// fake d3service for tests. // d3Service is supposed to be provided by the main www/base app // and is loading d3 asynchronously on demand import * as d3 from 'd3'; class D3 { constructor($q) { const d = $q.defer(); // Resolve function d.resolve(d3); return {get() { return d.promise; }}; } } angular.module('bbData') .service('d3Service', ['$q', D3]); buildbot-3.4.0/www/waterfall_view/test/main.js000066400000000000000000000000751413250514000214170ustar00rootroot00000000000000// Mocked module dependency angular.module('ngAnimate', []); buildbot-3.4.0/www/waterfall_view/webpack.config.js000066400000000000000000000013741413250514000223770ustar00rootroot00000000000000'use strict'; const common = require('buildbot-build-common'); const env = require('yargs').argv.env; const pkg = require('./package.json'); var event = process.env.npm_lifecycle_event; var isTest = event === 'test' || event === 'test-watch'; var isProd = env === 'prod'; module.exports = function() { return common.createTemplateWebpackConfig({ entry: { scripts: './src/module/main.module.js', styles: './src/styles/styles.less', }, libraryName: pkg.name, pluginName: pkg.plugin_name, dirname: __dirname, isTest: isTest, isProd: isProd, outputPath: __dirname + '/buildbot_waterfall_view/static', extractStyles: true, provideJquery: true, }); }(); buildbot-3.4.0/www/waterfall_view/yarn.lock000066400000000000000000011447711413250514000210160ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.7.4.tgz#37e864532200cb6b50ee9a4045f5f817840166ab" integrity sha512-+bYbx56j4nYBmpsWtnPUsKW3NdnYxbqyfrP2w9wILBuHzdfIKz9prieZK0DFPyIzkjYVUe4QkusGL07r5pXznQ== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helpers" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" convert-source-map "^1.7.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369" integrity sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg== dependencies: "@babel/types" "^7.7.4" jsesc "^2.5.1" lodash "^4.17.13" source-map "^0.5.0" "@babel/helper-annotate-as-pure@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz#bb3faf1e74b74bd547e867e48f551fa6b098b6ce" integrity sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og== dependencies: "@babel/types" "^7.7.4" "@babel/helper-builder-binary-assignment-operator-visitor@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz#5f73f2b28580e224b5b9bd03146a4015d6217f5f" integrity sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ== dependencies: "@babel/helper-explode-assignable-expression" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-call-delegate@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz#621b83e596722b50c0066f9dc37d3232e461b801" integrity sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-create-regexp-features-plugin@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz#6d5762359fd34f4da1500e4cff9955b5299aaf59" integrity sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A== dependencies: "@babel/helper-regex" "^7.4.4" regexpu-core "^4.6.0" "@babel/helper-define-map@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176" integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-explode-assignable-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz#fa700878e008d85dc51ba43e9fb835cddfe05c84" integrity sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg== dependencies: "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e" integrity sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ== dependencies: "@babel/helper-get-function-arity" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-get-function-arity@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0" integrity sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA== dependencies: "@babel/types" "^7.7.4" "@babel/helper-hoist-variables@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz#612384e3d823fdfaaf9fce31550fe5d4db0f3d12" integrity sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-member-expression-to-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz#356438e2569df7321a8326644d4b790d2122cb74" integrity sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-imports@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz#e5a92529f8888bf319a6376abfbd1cebc491ad91" integrity sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-transforms@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.7.4.tgz#8d7cdb1e1f8ea3d8c38b067345924ac4f8e0879a" integrity sha512-ehGBu4mXrhs0FxAqN8tWkzF8GSIGAiEumu4ONZ/hD9M88uHcD+Yu2ttKfOCgwzoesJOJrtQh7trI5YPbRtMmnA== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-simple-access" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-optimise-call-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz#034af31370d2995242aa4df402c3b7794b2dcdf2" integrity sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg== dependencies: "@babel/types" "^7.7.4" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz#c68c2407350d9af0e061ed6726afb4fff16d0234" integrity sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-wrap-function" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-replace-supers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz#3c881a6a6a7571275a72d82e6107126ec9e2cdd2" integrity sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg== dependencies: "@babel/helper-member-expression-to-functions" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-simple-access@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz#a169a0adb1b5f418cfc19f22586b2ebf58a9a294" integrity sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A== dependencies: "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-split-export-declaration@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8" integrity sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug== dependencies: "@babel/types" "^7.7.4" "@babel/helper-validator-identifier@^7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== "@babel/helper-wrap-function@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace" integrity sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helpers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.7.4.tgz#62c215b9e6c712dadc15a9a0dcab76c92a940302" integrity sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg== dependencies: "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/highlight@^7.0.0": version "7.5.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.6.0", "@babel/parser@^7.9.6": version "7.13.9" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.13.9.tgz#ca34cb95e1c2dd126863a84465ae8ef66114be99" integrity sha512-nEUfRiARCcaVo3ny3ZQjURjHQZUo/JkEw7rLlSZy/psWGnvwXFtPcr6jb7Yb41DVW5LTe6KRq9LGleRNsg1Frw== "@babel/parser@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb" integrity sha512-jIwvLO0zCL+O/LmEJQjWA75MQTWwx3c3u2JOTDK5D3/9egrWRRA0/0hk9XXywYnXZVVpzrBYeIQTmhwUaePI9g== "@babel/plugin-proposal-async-generator-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d" integrity sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-proposal-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz#dde64a7f127691758cbfed6cf70de0fa5879d52d" integrity sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz#7700a6bfda771d8dc81973249eac416c6b4c697d" integrity sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.4.tgz#cc57849894a5c774214178c8ab64f6334ec8af71" integrity sha512-rnpnZR3/iWKmiQyJ3LKJpSwLDcX/nSXhdLk4Aq/tXOApIvyu7qoabrige0ylsAJffaUC51WiBu209Q0U+86OWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz#ec21e8aeb09ec6711bc0a39ca49520abee1de379" integrity sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.4.tgz#7c239ccaf09470dbe1d453d50057460e84517ebb" integrity sha512-cHgqHgYvffluZk85dJ02vloErm3Y6xtH+2noOBOJ2kXOJH3aVCDnj5eR/lVNlTnYu4hndAPJD3rTFjW3qee0PA== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-async-generators@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz#331aaf310a10c80c44a66b238b6e49132bd3c889" integrity sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.2.0", "@babel/plugin-syntax-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz#29ca3b4415abfe4a5ec381e903862ad1a54c3aec" integrity sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz#86e63f7d2e22f9e27129ac4e83ea989a382e86cc" integrity sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz#47cf220d19d6d0d7b154304701f468fc1cc6ff46" integrity sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz#a3e38f59f4b6233867b4a92dcb0ee05b2c334aa6" integrity sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-top-level-await@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz#bd7d8fa7b9fee793a36e4027fd6dd1aa32f946da" integrity sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz#76309bd578addd8aee3b379d809c802305a98a12" integrity sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz#694cbeae6d613a34ef0292713fa42fb45c4470ba" integrity sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz#d0d9d5c269c78eaea76227ace214b8d01e4d837b" integrity sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz#200aad0dcd6bb80372f94d9e628ea062c58bf224" integrity sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.13" "@babel/plugin-transform-classes@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz#c92c14be0a1399e15df72667067a8f510c9400ec" integrity sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-define-map" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz#e856c1628d3238ffe12d668eb42559f79a81910d" integrity sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz#2b713729e5054a1135097b6a67da1b6fe8789267" integrity sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.4.tgz#f7ccda61118c5b7a2599a72d5e3210884a021e96" integrity sha512-mk0cH1zyMa/XHeb6LOTXTbG7uIJ8Rrjlzu91pUx/KS3JpcgaTDwMS8kM+ar8SLOvlL2Lofi4CGBAjCo3a2x+lw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-duplicate-keys@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz#3d21731a42e3f598a73835299dd0169c3b90ac91" integrity sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz#dd30c0191e3a1ba19bcc7e389bdfddc0729d5db9" integrity sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz#248800e3a5e507b1f103d8b4ca998e77c63932bc" integrity sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz#75a6d3303d50db638ff8b5385d12451c865025b1" integrity sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz#27fe87d2b5017a2a5a34d1c41a6b9f6a6262643e" integrity sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz#aee127f2f3339fc34ce5e3055d7ffbf7aa26f19a" integrity sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.4.tgz#276b3845ca2b228f2995e453adc2e6f54d72fb71" integrity sha512-/542/5LNA18YDtg1F+QHvvUSlxdvjZoD/aldQwkq+E3WCkbEjNSN9zdrOXaSlfg3IfGi22ijzecklF/A7kVZFQ== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-commonjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.4.tgz#bee4386e550446343dd52a571eda47851ff857a3" integrity sha512-k8iVS7Jhc367IcNF53KCwIXtKAH7czev866ThsTgy8CwlXjnKZna2VHwChglzLleYrcHz1eQEIJlGRQxB53nqA== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.7.4" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-systemjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz#cd98152339d3e763dfe838b7d4273edaf520bb30" integrity sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-umd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz#1027c355a118de0aae9fee00ad7813c584d9061f" integrity sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz#fb3bcc4ee4198e7385805007373d6b6f42c98220" integrity sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/plugin-transform-new-target@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz#4a0753d2d60639437be07b592a9e58ee00720167" integrity sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz#48488937a2d586c0148451bf51af9d7dda567262" integrity sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/plugin-transform-parameters@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.4.tgz#da4555c97f39b51ac089d31c7380f03bca4075ce" integrity sha512-VJwhVePWPa0DqE9vcfptaJSzNDKrWU/4FbYCjZERtmqEs05g3UMXnYMZoXja7JAJ7Y7sPZipwm/pGApZt7wHlw== dependencies: "@babel/helper-call-delegate" "^7.7.4" "@babel/helper-get-function-arity" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz#2388d6505ef89b266103f450f9167e6bd73f98c2" integrity sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.4.tgz#d18eac0312a70152d7d914cbed2dc3999601cfc0" integrity sha512-e7MWl5UJvmPEwFJTwkBlPmqixCtr9yAASBqff4ggXTNicZiwbF8Eefzm6NVgfiBp7JdAGItecnctKTgH44q2Jw== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz#6a7cf123ad175bb5c69aec8f6f0770387ed3f1eb" integrity sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.4.tgz#51fe458c1c1fa98a8b07934f4ed38b6cd62177a6" integrity sha512-O8kSkS5fP74Ad/8pfsCMGa8sBRdLxYoSReaARRNSz3FbFQj3z/QUvoUmJ28gn9BO93YfnXc3j+Xyaqe8cKDNBQ== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz#74a0a9b2f6d67a684c6fbfd5f0458eb7ba99891e" integrity sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz#aa673b356fe6b7e70d69b6e33a17fef641008578" integrity sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz#ffb68c05090c30732076b1285dc1401b404a123c" integrity sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz#1eb6411736dd3fe87dbd20cc6668e5121c17d604" integrity sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz#3174626214f2d6de322882e498a38e8371b2140e" integrity sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz#a3c0f65b117c4c81c5b6484f2a5e7b95346b83ae" integrity sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/preset-env@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.7.4.tgz#ccaf309ae8d1ee2409c85a4e2b5e280ceee830f8" integrity sha512-Dg+ciGJjwvC1NIe/DGblMbcGq1HOtKbw8RLl4nIjlfcILKEOkWT/vRqPpumswABEBVudii6dnVwrBtzD7ibm4g== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.7.4" "@babel/plugin-proposal-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-syntax-top-level-await" "^7.7.4" "@babel/plugin-transform-arrow-functions" "^7.7.4" "@babel/plugin-transform-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions" "^7.7.4" "@babel/plugin-transform-block-scoping" "^7.7.4" "@babel/plugin-transform-classes" "^7.7.4" "@babel/plugin-transform-computed-properties" "^7.7.4" "@babel/plugin-transform-destructuring" "^7.7.4" "@babel/plugin-transform-dotall-regex" "^7.7.4" "@babel/plugin-transform-duplicate-keys" "^7.7.4" "@babel/plugin-transform-exponentiation-operator" "^7.7.4" "@babel/plugin-transform-for-of" "^7.7.4" "@babel/plugin-transform-function-name" "^7.7.4" "@babel/plugin-transform-literals" "^7.7.4" "@babel/plugin-transform-member-expression-literals" "^7.7.4" "@babel/plugin-transform-modules-amd" "^7.7.4" "@babel/plugin-transform-modules-commonjs" "^7.7.4" "@babel/plugin-transform-modules-systemjs" "^7.7.4" "@babel/plugin-transform-modules-umd" "^7.7.4" "@babel/plugin-transform-named-capturing-groups-regex" "^7.7.4" "@babel/plugin-transform-new-target" "^7.7.4" "@babel/plugin-transform-object-super" "^7.7.4" "@babel/plugin-transform-parameters" "^7.7.4" "@babel/plugin-transform-property-literals" "^7.7.4" "@babel/plugin-transform-regenerator" "^7.7.4" "@babel/plugin-transform-reserved-words" "^7.7.4" "@babel/plugin-transform-shorthand-properties" "^7.7.4" "@babel/plugin-transform-spread" "^7.7.4" "@babel/plugin-transform-sticky-regex" "^7.7.4" "@babel/plugin-transform-template-literals" "^7.7.4" "@babel/plugin-transform-typeof-symbol" "^7.7.4" "@babel/plugin-transform-unicode-regex" "^7.7.4" "@babel/types" "^7.7.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.4.tgz#b23a856751e4bf099262f867767889c0e3fe175b" integrity sha512-r24eVUUr0QqNZa+qrImUk8fn5SPhHq+IfYvIoIMg0do3GdK9sMdiLKP3GYVVaxpPKORgm8KRKaNTEhAjgIpLMw== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b" integrity sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" "@babel/traverse@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.7.4.tgz#9c1e7c60fb679fe4fcfaa42500833333c2058558" integrity sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.13" "@babel/types@^7.6.1", "@babel/types@^7.9.6": version "7.13.0" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.13.0.tgz#74424d2816f0171b4100f0ab34e9a374efdf7f80" integrity sha512-hE+HE8rnG1Z6Wzo+MhaKE5lM5eMx71T4EHJgku2E3xIfaULhDcxiiRxUYgwX8qwP1BBSlag+TdGOt6JAidIZTA== dependencies: "@babel/helper-validator-identifier" "^7.12.11" lodash "^4.17.19" to-fast-properties "^2.0.0" "@babel/types@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193" integrity sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA== dependencies: esutils "^2.0.2" lodash "^4.17.13" to-fast-properties "^2.0.0" "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== "@types/glob@^7.1.1": version "7.1.1" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== dependencies: "@types/events" "*" "@types/minimatch" "*" "@types/node" "*" "@types/minimatch@*": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/node@*": version "12.12.12" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.12.tgz#529bc3e73dbb35dd9e90b0a1c83606a9d3264bdb" integrity sha512-MGuvYJrPU0HUwqF7LqvIj50RZUX23Z+m583KBygKYUZLlZ88n6w28XRNJRJgsHukLEnLz6w6SvxZoLgbr5wLqQ== "@uirouter/angularjs@^1.0.15": version "1.0.23" resolved "https://registry.yarnpkg.com/@uirouter/angularjs/-/angularjs-1.0.23.tgz#aeec0f96b0c42187c5044ef244ba6ccb75a5d835" integrity sha512-r4hLSw7R3mwXGC5Sq7yxNlBK1sSzQUm/1MzigwwYRHoMO5uKcBPUhxFYx5U7kufP2Xl1165KeZvRsLCh0/Z1ng== dependencies: "@uirouter/core" "6.0.1" "@uirouter/core@6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/@uirouter/core/-/core-6.0.1.tgz#93b02a5d178a7ab7313f34b7b3f019a000d23396" integrity sha512-mHCutiHtDDRKYmrJ92XPKDoSb2bgqaDyUpHdF4hUE+riwgkCvGdBjL8u+VDTcV3slBAk6B0LBIOIajjWkkObbQ== "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== dependencies: "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@webassemblyjs/floating-point-hex-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== "@webassemblyjs/helper-api-error@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== "@webassemblyjs/helper-buffer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== "@webassemblyjs/helper-code-frame@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== dependencies: "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/helper-fsm@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== "@webassemblyjs/helper-module-context@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== dependencies: "@webassemblyjs/ast" "1.8.5" mamacro "^0.0.3" "@webassemblyjs/helper-wasm-bytecode@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== "@webassemblyjs/helper-wasm-section@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/ieee754@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== "@webassemblyjs/wasm-edit@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/helper-wasm-section" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-opt" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/wasm-gen@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wasm-opt@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wasm-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wast-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/floating-point-hex-parser" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-code-frame" "1.8.5" "@webassemblyjs/helper-fsm" "1.8.5" "@xtuc/long" "4.2.2" "@webassemblyjs/wast-printer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== dependencies: mime-types "~2.1.24" negotiator "0.6.2" acorn@^6.2.1: version "6.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.3.0.tgz#0087509119ffa4fc0a0041d1e93a417e68cb856e" integrity sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA== acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: version "3.4.1" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== ajv@^5.0.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5: version "6.10.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= angular-animate@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-animate/-/angular-animate-1.7.9.tgz#a397f82434c1e7ed5b7a298fa70fc3de989a6785" integrity sha512-fV+AISy/HTzurQH2ngsJg+lLIvfu0ahc1h4AYKauaXVw97rZc2k4iUA1bMstiEyClsdayQX568kjQc1NK+oYhw== angular-mocks@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-mocks/-/angular-mocks-1.7.9.tgz#0a3b7e28b9a493b4e3010ed2b0f69a68e9b4f79b" integrity sha512-LQRqqiV3sZ7NTHBnNmLT0bXtE5e81t97+hkJ56oU0k3dqKv1s6F+nBWRlOVzqHWPGFOiPS8ZJVdrS8DFzHyNIA== angular-ui-bootstrap@^2.5.6: version "2.5.6" resolved "https://registry.yarnpkg.com/angular-ui-bootstrap/-/angular-ui-bootstrap-2.5.6.tgz#23937322ec641a6fbee16498cc32452aa199e7c5" integrity sha512-yzcHpPMLQl0232nDzm5P4iAFTFQ9dMw0QgFLuKYbDj9M0xJ62z0oudYD/Lvh1pWfRsukiytP4Xj6BHOSrSXP8A== angular@^1.7.9, angular@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/angular/-/angular-1.8.0.tgz#b1ec179887869215cab6dfd0df2e42caa65b1b51" integrity sha512-VdaMx+Qk0Skla7B5gw77a8hzlcOakwF8mjlW13DpIWIDlfqwAbSSLfd8N/qZnzEmQF4jC4iofInd3gE7vL8ZZg== ansi-colors@^3.0.0: version "3.2.4" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-html@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.0, ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" normalize-path "^2.1.1" anymatch@~3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" aproba@^1.0.3, aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-flatten@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-never@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/assert-never/-/assert-never-1.2.1.tgz#11f0e363bf146205fb08193b5c7b90f4d1cf44fe" integrity sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw== assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert@^1.1.1: version "1.5.0" resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== dependencies: object-assign "^4.1.1" util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= async-each@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== async@1.x: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@^2.0.0, async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== dependencies: lodash "^4.17.14" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.5.1: version "9.7.2" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.2.tgz#26cf729fbb709323b40171a874304884dcceffed" integrity sha512-LCAfcdej1182uVvPOZnytbq61AhnOZ/4JelDaJGDeNwewyU1AMaNthcHsyz1NRjTmd2FkurMckLWfkHg3Z//KA== dependencies: browserslist "^4.7.3" caniuse-lite "^1.0.30001010" chalk "^2.4.2" normalize-range "^0.1.2" num2fraction "^1.2.2" postcss "^7.0.23" postcss-value-parser "^4.0.2" aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" babel-generator@^6.18.0: version "6.26.1" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.17.4" source-map "^0.5.7" trim-right "^1.0.1" babel-loader@^8.0.5: version "8.0.6" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== dependencies: find-cache-dir "^2.0.0" loader-utils "^1.0.2" mkdirp "^0.5.1" pify "^4.0.1" babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-plugin-dynamic-import-node@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: object.assign "^4.1.0" babel-runtime@^6.0.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-template@^6.16.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: babel-runtime "^6.26.0" babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" lodash "^4.17.4" babel-traverse@^6.18.0, babel-traverse@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: babel-code-frame "^6.26.0" babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" debug "^2.6.8" globals "^9.18.0" invariant "^2.2.2" lodash "^4.17.4" babel-types@^6.18.0, babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babel-walk@3.0.0-canary-5: version "3.0.0-canary-5" resolved "https://registry.yarnpkg.com/babel-walk/-/babel-walk-3.0.0-canary-5.tgz#f66ecd7298357aee44955f235a6ef54219104b11" integrity sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw== dependencies: "@babel/types" "^7.9.6" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@^1.0.2: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary-extensions@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c" integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow== blob@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683" integrity sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig== bluebird@^3.3.0, bluebird@^3.5.5: version "3.7.1" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de" integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== body-parser@1.19.0, body-parser@^1.16.1: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= dependencies: array-flatten "^2.1.0" deep-equal "^1.0.1" dns-equal "^1.0.0" dns-txt "^2.0.2" multicast-dns "^6.0.1" multicast-dns-service-types "^1.1.0" boolbase@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" brorand@^1.0.1, brorand@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.3" inherits "^2.0.1" safe-buffer "^5.0.1" browserify-cipher@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" des.js "^1.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" browserify-rsa@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= dependencies: bn.js "^4.1.0" randombytes "^2.0.1" browserify-sign@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= dependencies: bn.js "^4.1.1" browserify-rsa "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.2" elliptic "^6.0.0" inherits "^2.0.1" parse-asn1 "^5.0.0" browserify-zlib@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" browserslist@^4.6.0, browserslist@^4.7.3: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== dependencies: caniuse-lite "^1.0.30001219" colorette "^1.2.2" electron-to-chromium "^1.3.723" escalade "^3.1.1" node-releases "^1.1.71" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer-indexof@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= buffer@^4.3.0: version "4.9.2" resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" isarray "^1.0.0" "buildbot-build-common@link:../build_common": version "1.0.0" dependencies: "@babel/core" "^7.4.3" "@babel/plugin-syntax-dynamic-import" "^7.2.0" "@babel/plugin-transform-runtime" "^7.4.3" "@babel/preset-env" "^7.4.3" "@babel/runtime" "^7.4.3" autoprefixer "^9.5.1" babel-loader "^8.0.5" css-loader "^2.1.1" file-loader "^3.0.1" html-webpack-plugin "^3.2.0" import-glob-loader "^1.1.0" istanbul-instrumenter-loader "^3.0.1" jasmine-core "^3.4.0" karma "^4.1.0" karma-chrome-launcher "^2.2.0" karma-coverage "^1.1.2" karma-jasmine "^2.0.1" karma-sourcemap-loader "^0.3.7" karma-spec-reporter "^0.0.32" karma-webpack "^3.0.5" less "^3.9.0" less-loader "^5.0.0" mini-css-extract-plugin "^0.6.0" node-libs-browser "^2.2.0" null-loader "^1.0.0" postcss-loader "^3.0.0" pug "^3.0.1" raw-loader "^2.0.0" style-loader "^0.23.1" webpack "^4.30.0" webpack-cli "^3.3.1" webpack-dev-server "^3.3.1" webpack-fix-style-only-entries "^0.2.1" webpack-shell-plugin "^0.5.0" "buildbot-data-js@link:../data_module": version "3.0.1" dependencies: angular "^1.7.9" builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cacache@^12.0.2: version "12.0.3" resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== dependencies: bluebird "^3.5.5" chownr "^1.1.1" figgy-pudding "^3.5.1" glob "^7.1.4" graceful-fs "^4.1.15" infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" ssri "^6.0.1" unique-filename "^1.1.1" y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" caller-callsite@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= dependencies: callsites "^2.0.0" caller-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= dependencies: caller-callsite "^2.0.0" callsite@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camel-case@3.0.x: version "3.0.0" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= dependencies: no-case "^2.2.0" upper-case "^1.1.1" camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0, camelcase@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30001010, caniuse-lite@^1.0.30001219: version "1.0.30001228" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz#bfdc5942cd3326fa51ee0b42fbef4da9d492a7fa" integrity sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" character-parser@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chokidar@^2.0.2, chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== dependencies: anymatch "^2.0.0" async-each "^1.0.1" braces "^2.3.2" glob-parent "^3.1.0" inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" normalize-path "^3.0.0" path-is-absolute "^1.0.0" readdirp "^2.2.1" upath "^1.1.1" optionalDependencies: fsevents "^1.2.7" chokidar@^3.0.0: version "3.3.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6" integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A== dependencies: anymatch "~3.1.1" braces "~3.0.2" glob-parent "~5.1.0" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" readdirp "~3.2.0" optionalDependencies: fsevents "~2.1.1" chownr@^1.1.1, chownr@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== chrome-trace-event@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== dependencies: tslib "^1.9.0" cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@4.2.x: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi "^2.0.0" cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== dependencies: string-width "^3.1.0" strip-ansi "^5.2.0" wrap-ansi "^5.1.0" clone@^2.1.1, clone@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colorette@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== colors@^1.1.0, colors@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@2.17.x: version "2.17.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== commander@^2.20.0: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@~2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= compressible@~2.0.16: version "2.0.17" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== dependencies: mime-db ">= 1.40.0 < 2" compression@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" compressible "~2.0.16" debug "2.6.9" on-headers "~1.0.2" safe-buffer "5.1.2" vary "~1.1.2" concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" inherits "^2.0.3" readable-stream "^2.2.2" typedarray "^0.0.6" connect-history-api-fallback@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-4.0.1.tgz#0def113fa0e4dc8de83331a5cf79c8b325213151" integrity sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw== dependencies: "@babel/parser" "^7.6.0" "@babel/types" "^7.6.1" constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= content-disposition@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== dependencies: safe-buffer "5.1.2" content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= cookie@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== dependencies: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js-compat@^3.1.1: version "3.4.2" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.4.2.tgz#652fa7c54652b7f6586a893e37001df55ea2ac37" integrity sha512-W0Aj+LM3EAxxjD0Kp2o4be8UlnxIZHNupBv2znqrheR4aY2nOn91794k/xoSp+SxqqriiZpTsSwBtZr60cbkwQ== dependencies: browserslist "^4.7.3" semver "^6.3.0" core-js@^2.4.0: version "2.6.10" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f" integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== dependencies: import-fresh "^2.0.0" is-directory "^0.3.1" js-yaml "^3.13.1" parse-json "^4.0.0" create-ecdh@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" inherits "^2.0.1" md5.js "^1.3.4" ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" ripemd160 "^2.0.0" safe-buffer "^5.0.1" sha.js "^2.4.8" cross-spawn@6.0.5, cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" semver "^5.5.0" shebang-command "^1.2.0" which "^1.2.9" crypto-browserify@^3.11.0: version "3.12.0" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" create-ecdh "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.0" diffie-hellman "^5.0.0" inherits "^2.0.1" pbkdf2 "^3.0.3" public-encrypt "^4.0.0" randombytes "^2.0.0" randomfill "^1.0.3" css-loader@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" integrity sha512-OcKJU/lt232vl1P9EEDamhoO9iKY3tIjY5GU+XDLblAykTdgs6Ux9P1hTHve8nFKy5KPpOXOsVI/hIwi3841+w== dependencies: camelcase "^5.2.0" icss-utils "^4.1.0" loader-utils "^1.2.3" normalize-path "^3.0.0" postcss "^7.0.14" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^2.0.6" postcss-modules-scope "^2.1.0" postcss-modules-values "^2.0.0" postcss-value-parser "^3.3.0" schema-utils "^1.0.0" css-select@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= dependencies: boolbase "~1.0.0" css-what "2.1" domutils "1.5.1" nth-check "~1.0.1" css-what@2.1: version "2.1.3" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= d3@^3.5.17: version "3.5.17" resolved "https://registry.yarnpkg.com/d3/-/d3-3.5.17.tgz#bc46748004378b21a360c9fc7cf5231790762fb8" integrity sha1-vEZ0gAQ3iyGjYMn8fPUjF5B2L7g= d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" date-format@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-2.1.0.tgz#31d5b5ea211cf5fd764cd38baf9d033df7e125cf" integrity sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA== dateformat@^1.0.6: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.1.1, debug@^3.2.5, debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= deep-equal@^1.0.1: version "1.1.1" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== dependencies: is-arguments "^1.0.4" is-date-object "^1.0.1" is-regex "^1.0.4" object-is "^1.0.1" object-keys "^1.1.1" regexp.prototype.flags "^1.2.0" deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-gateway@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== dependencies: execa "^1.0.0" ip-regex "^2.1.0" define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: "@types/glob" "^7.1.1" globby "^6.1.0" is-path-cwd "^2.0.0" is-path-in-cwd "^2.0.0" p-map "^2.0.0" pify "^4.0.1" rimraf "^2.6.3" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= des.js@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== dependencies: inherits "^2.0.1" minimalistic-assert "^1.0.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detect-node@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" randombytes "^2.0.0" dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= dns-packet@^1.3.1: version "1.3.4" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.4.tgz#e3455065824a2507ba886c55a89963bb107dec6f" integrity sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA== dependencies: ip "^1.1.0" safe-buffer "^5.0.1" dns-txt@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= dependencies: buffer-indexof "^1.0.0" doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-converter@^0.2: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" dom-serializer@0: version "0.2.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== dependencies: domelementtype "^2.0.1" entities "^2.0.0" domain-browser@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== domelementtype@1, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== domhandler@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== dependencies: domelementtype "1" domutils@1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= dependencies: dom-serializer "0" domelementtype "1" domutils@^1.5.1: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.723: version "1.3.738" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.738.tgz#aec24b091c82acbfabbdcce08076a703941d17ca" integrity sha512-vCMf4gDOpEylPSLPLSwAEsz+R3ShP02Y3cAKMZvTqule3XcPp7tgc/0ESI7IS6ZeyBlGClE50N53fIOkcIVnpw== elliptic@^6.0.0: version "6.5.4" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== dependencies: bn.js "^4.11.9" brorand "^1.1.0" hash.js "^1.0.0" hmac-drbg "^1.0.1" inherits "^2.0.4" minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== emojis-list@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" engine.io-client@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.2.1.tgz#6f54c0475de487158a1a7c77d10178708b6add36" integrity sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "~3.1.0" engine.io-parser "~2.1.1" has-cors "1.1.0" indexof "0.0.1" parseqs "0.0.5" parseuri "0.0.5" ws "~3.3.1" xmlhttprequest-ssl "~1.5.4" yeast "0.1.2" engine.io-parser@~2.1.0, engine.io-parser@~2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6" integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA== dependencies: after "0.8.2" arraybuffer.slice "~0.0.7" base64-arraybuffer "0.1.5" blob "0.0.5" has-binary2 "~1.0.2" engine.io@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.2.1.tgz#b60281c35484a70ee0351ea0ebff83ec8c9522a2" integrity sha512-+VlKzHzMhaU+GsCIg4AoXF1UdDFjHHwMmMKqMJNDNLlUlejz58FCy4LBqB2YVJskHGYl06BatYWKP2TVdVXE5w== dependencies: accepts "~1.3.4" base64id "1.0.0" cookie "0.3.1" debug "~3.1.0" engine.io-parser "~2.1.0" ws "~3.3.1" enhanced-resolve@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" tapable "^1.0.0" enhanced-resolve@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== dependencies: graceful-fs "^4.1.2" memory-fs "^0.5.0" tapable "^1.0.0" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= entities@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-abstract@^1.5.1: version "1.16.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d" integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.0" is-callable "^1.1.4" is-regex "^1.0.4" object-inspect "^1.6.0" object-keys "^1.1.1" string.prototype.trimleft "^2.1.0" string.prototype.trimright "^2.1.0" es-to-primitive@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: version "0.10.53" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.3" next-tick "~1.0.0" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" ext "^1.1.2" escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" eslint-scope@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== dependencies: esrecurse "^4.1.0" estraverse "^4.1.1" esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esrecurse@^4.1.0: version "4.2.1" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== dependencies: estraverse "^4.1.0" estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@^4.1.0, estraverse@^4.1.1: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== eventsource@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" safe-buffer "^5.1.1" execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== dependencies: cross-spawn "^6.0.0" get-stream "^4.0.0" is-stream "^1.1.0" npm-run-path "^2.0.0" p-finally "^1.0.0" signal-exit "^3.0.0" strip-eof "^1.0.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== dependencies: accepts "~1.3.7" array-flatten "1.1.1" body-parser "1.19.0" content-disposition "0.5.3" content-type "~1.0.4" cookie "0.4.0" cookie-signature "1.0.6" debug "2.6.9" depd "~1.1.2" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" finalhandler "~1.1.2" fresh "0.5.2" merge-descriptors "1.0.1" methods "~1.1.2" on-finished "~2.3.0" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.5" qs "6.7.0" range-parser "~1.2.1" safe-buffer "5.1.2" send "0.17.1" serve-static "1.14.1" setprototypeof "1.1.1" statuses "~1.5.0" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" ext@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.2.0.tgz#8dd8d2dd21bcced3045be09621fa0cbf73908ba4" integrity sha512-0ccUQK/9e3NreLFg6K6np8aPyRgwycx+oFGtfx1dSp7Wj00Ozw9r05FgBRlzjf2XBM7LAzwgLyDscRrtSU91hA== dependencies: type "^2.0.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= dependencies: websocket-driver ">=0.5.1" faye-websocket@~0.11.1: version "0.11.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e" integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== dependencies: websocket-driver ">=0.5.1" figgy-pudding@^3.5.1: version "3.5.2" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== file-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== dependencies: loader-utils "^1.0.2" schema-utils "^1.0.0" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.1.2, finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-cache-dir@^2.0.0, find-cache-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" make-dir "^2.0.0" pkg-dir "^3.0.0" find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== dependencies: detect-file "^1.0.0" is-glob "^4.0.0" micromatch "^3.0.4" resolve-dir "^1.0.1" flatted@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08" integrity sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg== flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" readable-stream "^2.3.6" follow-redirects@^1.0.0: version "1.13.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.0.tgz#b42e8d93a2a7eea5ed88633676d6597bc8e384db" integrity sha512-aq6gF1BEKje4a9i9+5jimNFIpq4Q1WiwBToeRK5NvZBd/TRsmW8BsJfOEGkr76TbOyPVD3OVDN910EcUNtRYEA== for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= from2@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= dependencies: inherits "^2.0.1" readable-stream "^2.0.0" fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" universalify "^0.1.0" fs-minipass@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= dependencies: graceful-fs "^4.1.2" iferr "^0.1.5" imurmurhash "^0.1.4" readable-stream "1 || 2" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" fsevents@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-caller-file@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-stream@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" glob-parent@~5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.0.tgz#5f4c1d1e748d30cd73ad2944b3577a81b081e8c2" integrity sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw== dependencies: is-glob "^4.0.1" glob@^5.0.13, glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.3, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" global-modules@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== dependencies: global-prefix "^3.0.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" global-prefix@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== dependencies: ini "^1.3.5" kind-of "^6.0.2" which "^1.3.1" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== "guanlecoja-ui@link:../guanlecoja-ui": version "2.0.0" dependencies: "@uirouter/angularjs" "^1.0.15" angular "^1.7.9" angular-animate "^1.7.9" angular-ui-bootstrap "^2.5.6" jquery "^3.4.0" lodash "^4.17.11" handle-thing@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== handlebars@^4.0.1: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== dependencies: minimist "^1.2.5" neo-async "^2.6.0" source-map "^0.6.1" wordwrap "^1.0.0" optionalDependencies: uglify-js "^3.1.4" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary2@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/has-binary2/-/has-binary2-1.0.3.tgz#7776ac627f3ea77250cfc332dab7ddf5e4f5d11d" integrity sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw== dependencies: isarray "2.0.1" has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0, has-symbols@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" minimalistic-assert "^1.0.1" he@1.2.x: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= dependencies: inherits "^2.0.1" obuf "^1.0.0" readable-stream "^2.0.1" wbuf "^1.1.0" html-entities@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= html-minifier@^3.2.3: version "3.5.21" resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== dependencies: camel-case "3.0.x" clean-css "4.2.x" commander "2.17.x" he "1.2.x" param-case "2.1.x" relateurl "0.2.x" uglify-js "3.4.x" html-webpack-plugin@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= dependencies: html-minifier "^3.2.3" loader-utils "^0.2.16" lodash "^4.17.3" pretty-error "^2.0.2" tapable "^1.0.0" toposort "^1.0.0" util.promisify "1.0.0" htmlparser2@^3.3.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== dependencies: domelementtype "^1.3.1" domhandler "^2.3.0" domutils "^1.5.1" entities "^1.1.1" inherits "^2.0.1" readable-stream "^3.1.1" http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= http-errors@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.0" statuses ">= 1.4.0 < 2" http-errors@~1.7.2: version "1.7.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== dependencies: depd "~1.1.2" inherits "2.0.4" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy-middleware@0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== dependencies: http-proxy "^1.17.0" is-glob "^4.0.0" lodash "^4.17.11" micromatch "^3.1.10" http-proxy@^1.13.0, http-proxy@^1.17.0: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" icss-replace-symbols@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= icss-utils@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== dependencies: postcss "^7.0.14" ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== iferr@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= ignore-walk@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== dependencies: minimatch "^3.0.4" image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= import-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= dependencies: import-from "^2.1.0" import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= dependencies: caller-path "^2.0.0" resolve-from "^3.0.0" import-from@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" integrity sha1-M1238qev/VOqpHHUuAId7ja387E= dependencies: resolve-from "^3.0.0" import-glob-loader@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/import-glob-loader/-/import-glob-loader-1.1.0.tgz#98d84c0f661c8ba9f821d9ddb7c6b6dc8e97eca2" integrity sha1-mNhMD2Yci6n4Idndt8a23I6X7KI= dependencies: glob "^5.0.13" loader-utils "^0.2.10" import-local@2.0.0, import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== dependencies: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= infer-owner@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.7" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== internal-ip@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== dependencies: default-gateway "^4.2.0" ipaddr.js "^1.9.0" interpret@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= ipaddr.js@1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== ipaddr.js@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== is-absolute-url@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arguments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.0.4.tgz#3faf966c7cba0ff437fb31f6250082fcf0448cf3" integrity sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA== is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-core-module@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== dependencies: has "^1.0.3" is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-directory@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-expression@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-4.0.0.tgz#c33155962abf21d0afd2552514d67d2ec16fd2ab" integrity sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A== dependencies: acorn "^7.1.1" object-assign "^4.1.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== is-path-in-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: is-path-inside "^2.1.0" is-path-inside@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== dependencies: path-is-inside "^1.0.2" is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-regex@^1.0.3, is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: has-symbols "^1.0.1" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is-wsl@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isarray@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.1.tgz#a37d94ed9cda2d59865c9f76fe596ee1f338741e" integrity sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-instrumenter-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-3.0.1.tgz#9957bd59252b373fae5c52b7b5188e6fde2a0949" integrity sha512-a5SPObZgS0jB/ixaKSMdn6n/gXSrK2S6q/UfRJBT3e6gQmVjwZROTODQsYW5ZNwOu78hG62Y3fWlebaVOL0C+w== dependencies: convert-source-map "^1.5.0" istanbul-lib-instrument "^1.7.3" loader-utils "^1.1.0" schema-utils "^0.3.0" istanbul-lib-coverage@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== istanbul-lib-instrument@^1.7.3: version "1.10.2" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" istanbul-lib-coverage "^1.2.1" semver "^5.3.0" istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" jasmine-core@^3.3, jasmine-core@^3.4.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-3.5.0.tgz#132c23e645af96d85c8bca13c8758b18429fc1e4" integrity sha512-nCeAiw37MIMA9w9IXso7bRaLl+c/ef3wnxsoSAlYrzS+Ot0zTG6nU8G/cIfGkqpkjX2wNaIW9RFG0TwIFnG6bA== jquery@^3.4.0, jquery@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.5.0.tgz#9980b97d9e4194611c36530e7dc46a58d7340fc9" integrity sha512-Xb7SVYMvygPxbFMpTFQiHh1J7HClEaThguL15N/Gg37Lri/qKyhRGZYzHRyLH8Stq3Aow0LsHO2O2ci86fCrNQ== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-stringify@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@3.x, js-yaml@^3.13.1: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== json5@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== dependencies: minimist "^1.2.0" json5@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== dependencies: minimist "^1.2.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= optionalDependencies: graceful-fs "^4.1.6" jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" karma-chrome-launcher@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" integrity sha512-uf/ZVpAabDBPvdPdveyk1EPgbnloPvFFGgmRhYLTDH7gEB4nZdSBk8yTU47w1g/drLSx5uMOkjKk7IWKfWg/+w== dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coverage@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-1.1.2.tgz#cc09dceb589a83101aca5fe70c287645ef387689" integrity sha512-eQawj4Cl3z/CjxslYy9ariU4uDh7cCNFZHNWXWRpl0pNeblY/4wHR7M7boTYXWrn9bY0z2pZmr11eKje/S/hIw== dependencies: dateformat "^1.0.6" istanbul "^0.4.0" lodash "^4.17.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-2.0.1.tgz#26e3e31f2faf272dd80ebb0e1898914cc3a19763" integrity sha512-iuC0hmr9b+SNn1DaUD2QEYtUxkS1J+bSJSn7ejdEexs7P8EYvA1CWkEdrDQ+8jVH3AgWlCNwjYsT1chjcNW9lA== dependencies: jasmine-core "^3.3" karma-sourcemap-loader@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma-spec-reporter@^0.0.32: version "0.0.32" resolved "https://registry.yarnpkg.com/karma-spec-reporter/-/karma-spec-reporter-0.0.32.tgz#2e9c7207ea726771260259f82becb543209e440a" integrity sha1-LpxyB+pyZ3EmAln4K+y1QyCeRAo= dependencies: colors "^1.1.2" karma-webpack@^3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-3.0.5.tgz#1ff1e3a690fb73ae95ee95f9ab58f341cfc7b40f" integrity sha512-nRudGJWstvVuA6Tbju9tyGUfXTtI1UXMXoRHVmM2/78D0q6s/Ye2IC157PKNDC15PWFGR0mVIRtWLAdcfsRJoA== dependencies: async "^2.0.0" babel-runtime "^6.0.0" loader-utils "^1.0.0" lodash "^4.0.0" source-map "^0.5.6" webpack-dev-middleware "^2.0.6" karma@^4.1.0: version "4.4.1" resolved "https://registry.yarnpkg.com/karma/-/karma-4.4.1.tgz#6d9aaab037a31136dc074002620ee11e8c2e32ab" integrity sha512-L5SIaXEYqzrh6b1wqYC42tNsFMx2PWuxky84pK9coK09MvmL7mxii3G3bZBh/0rvD27lqDd0le9jyhzvwif73A== dependencies: bluebird "^3.3.0" body-parser "^1.16.1" braces "^3.0.2" chokidar "^3.0.0" colors "^1.1.0" connect "^3.6.0" di "^0.0.1" dom-serialize "^2.2.0" flatted "^2.0.0" glob "^7.1.1" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^4.17.14" log4js "^4.0.0" mime "^2.3.1" minimatch "^3.0.2" optimist "^0.6.1" qjobs "^1.1.4" range-parser "^1.2.0" rimraf "^2.6.0" safe-buffer "^5.0.1" socket.io "2.1.1" source-map "^0.6.1" tmp "0.0.33" useragent "2.3.0" killable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" less-loader@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-5.0.0.tgz#498dde3a6c6c4f887458ee9ed3f086a12ad1b466" integrity sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg== dependencies: clone "^2.1.1" loader-utils "^1.1.0" pify "^4.0.1" less@^3.9.0: version "3.10.3" resolved "https://registry.yarnpkg.com/less/-/less-3.10.3.tgz#417a0975d5eeecc52cff4bcfa3c09d35781e6792" integrity sha512-vz32vqfgmoxF1h3K4J+yKCtajH0PWmjkIFgbs5d78E/c/e+UQTnI+lWK+1eQRE95PXM2mC3rJlLSSP9VQHnaow== dependencies: clone "^2.1.2" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" mime "^1.4.1" mkdirp "^0.5.0" promise "^7.1.1" request "^2.83.0" source-map "~0.6.0" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" loader-runner@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== dependencies: big.js "^5.2.2" emojis-list "^2.0.0" json5 "^1.0.1" loader-utils@^0.2.10, loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= dependencies: big.js "^3.1.3" emojis-list "^2.0.0" json5 "^0.5.0" object-assign "^4.0.1" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lodash@^4.0.0, lodash@^4.17.0, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.3, lodash@^4.17.4: version "4.17.19" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== lodash@^4.17.19: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: chalk "^2.0.1" log4js@^4.0.0: version "4.5.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-4.5.1.tgz#e543625e97d9e6f3e6e7c9fc196dd6ab2cae30b5" integrity sha512-EEEgFcE9bLgaYUKuozyFfytQM2wDHtXn4tAN41pkaxpNjAykv11GVdeI4tHtmPWW4Xrgh9R/2d7XYghDVjbKKw== dependencies: date-format "^2.0.0" debug "^4.1.1" flatted "^2.0.0" rfdc "^1.1.4" streamroller "^1.0.6" loglevel@^1.6.4: version "1.6.6" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.6.tgz#0ee6300cc058db6b3551fa1c4bf73b83bb771312" integrity sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ== loglevelnext@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/loglevelnext/-/loglevelnext-1.0.5.tgz#36fc4f5996d6640f539ff203ba819641680d75a2" integrity sha512-V/73qkPuJmx4BcBF19xPBr+0ZRVBhc4POxvZTZdMeXpJ4NItXSJ/MSwuFT0kQJlCbXvdlZoQQ/418bS1y9Jh6A== dependencies: es6-symbol "^3.1.1" object.assign "^4.1.0" loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0, loud-rejection@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= lru-cache@4.1.x: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" semver "^5.6.0" mamacro@^0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== dependencies: p-defer "^1.0.0" map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= mem@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^2.0.0" p-is-promise "^2.0.0" memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= dependencies: errno "^0.1.3" readable-stream "^2.0.1" memory-fs@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== dependencies: errno "^0.1.3" readable-stream "^2.0.1" meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" brorand "^1.0.1" mime-db@1.42.0, "mime-db@>= 1.40.0 < 2": version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.1.0, mime@^2.3.1, mime@^2.4.4: version "2.4.4" resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== mimic-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== mini-css-extract-plugin@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== dependencies: loader-utils "^1.1.0" normalize-url "^2.0.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== dependencies: concat-stream "^1.5.0" duplexify "^3.4.2" end-of-stream "^1.1.0" flush-write-stream "^1.0.0" from2 "^2.1.0" parallel-transform "^1.1.0" pump "^3.0.0" pumpify "^1.3.3" stream-each "^1.1.0" through2 "^2.0.0" mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= dependencies: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multicast-dns-service-types@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= multicast-dns@^6.0.1: version "6.2.3" resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== dependencies: dns-packet "^1.3.1" thunky "^1.0.2" nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== next-tick@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== dependencies: lower-case "^1.1.1" node-forge@0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579" integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ== node-libs-browser@^2.2.0, node-libs-browser@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== dependencies: assert "^1.1.1" browserify-zlib "^0.2.0" buffer "^4.3.0" console-browserify "^1.1.0" constants-browserify "^1.0.0" crypto-browserify "^3.11.0" domain-browser "^1.1.1" events "^3.0.0" https-browserify "^1.0.0" os-browserify "^0.3.0" path-browserify "0.0.1" process "^0.11.10" punycode "^1.2.4" querystring-es3 "^0.2.0" readable-stream "^2.3.3" stream-browserify "^2.0.1" stream-http "^2.7.2" string_decoder "^1.0.0" timers-browserify "^2.0.4" tty-browserify "0.0.0" url "^0.11.0" util "^0.11.0" vm-browserify "^1.0.1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.71: version "1.1.72" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== nopt@3.x: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= normalize-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== dependencies: prepend-http "^2.0.0" query-string "^5.0.1" sort-keys "^2.0.0" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.6" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4" integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" nth-check@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== dependencies: boolbase "~1.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= null-loader@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-1.0.0.tgz#90e85798e50e9dd1d568495a44e74829dec26744" integrity sha512-mYLDjDVTkjTlFoidxRhzO75rdcwfVXfw5G5zpj8sXnBkHtKJxMk4hTcRR4i5SOhDB6EvcQuYriy6IV23eq6uog== dependencies: loader-utils "^1.2.3" schema-utils "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-inspect@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-is@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.0.1.tgz#0aa60ec9989a0b3ed795cf4d06f62cf1ad6539b6" integrity sha1-CqYOyZiaCz7Xlc9NBvYs8a1lObY= object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@1.x, once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== dependencies: is-wsl "^1.1.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.6" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" word-wrap "~1.2.3" original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== dependencies: url-parse "^1.4.3" os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^3.0.0, os-locale@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== dependencies: execa "^1.0.0" lcid "^2.0.0" mem "^4.0.0" os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== p-limit@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== dependencies: retry "^0.12.0" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~1.0.5: version "1.0.10" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== parallel-transform@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== dependencies: cyclist "^1.0.1" inherits "^2.0.3" readable-stream "^2.1.5" param-case@2.1.x: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= dependencies: no-case "^2.2.0" parse-asn1@^5.0.0: version "5.1.5" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-key@^2.0.0, path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= path-parse@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" pbkdf2@^3.0.3: version "3.0.17" resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" ripemd160 "^2.0.1" safe-buffer "^5.0.1" sha.js "^2.4.8" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= picomatch@^2.0.4: version "2.1.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.1.1.tgz#ecdfbea7704adb5fe6fb47f9866c4c0e15e905c5" integrity sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA== pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= pkg-dir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" portfinder@^1.0.25: version "1.0.25" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca" integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg== dependencies: async "^2.6.2" debug "^3.1.1" mkdirp "^0.5.1" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= postcss-load-config@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.0.tgz#c84d692b7bb7b41ddced94ee62e8ab31b417b003" integrity sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q== dependencies: cosmiconfig "^5.0.0" import-cwd "^2.0.0" postcss-loader@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== dependencies: loader-utils "^1.1.0" postcss "^7.0.0" postcss-load-config "^2.0.0" schema-utils "^1.0.0" postcss-modules-extract-imports@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== dependencies: postcss "^7.0.5" postcss-modules-local-by-default@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.6.tgz#dd9953f6dd476b5fd1ef2d8830c8929760b56e63" integrity sha512-oLUV5YNkeIBa0yQl7EYnxMgy4N6noxmiwZStaEJUSe2xPMcdNc8WmBQuQCx18H5psYbVxz8zoHk0RAAYZXP9gA== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-value-parser "^3.3.1" postcss-modules-scope@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.0.tgz#ad3f5bf7856114f6fcab901b0502e2a2bc39d4eb" integrity sha512-91Rjps0JnmtUB0cujlc8KIKCsJXWjzuxGeT/+Q2i2HXKZ7nBUeF9YQTZZTNvHVoNYj1AthsjnGLtqDUE0Op79A== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-modules-values@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-2.0.0.tgz#479b46dc0c5ca3dc7fa5270851836b9ec7152f64" integrity sha512-Ki7JZa7ff1N3EIMlPnGTZfUMe69FFwiQPnVSXC9mnn3jozCRBYIxiZd44yJOV2AmabOo4qFf8s0dC/+lweG7+w== dependencies: icss-replace-symbols "^1.1.0" postcss "^7.0.6" postcss-selector-parser@^6.0.0: version "6.0.2" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== dependencies: cssesc "^3.0.0" indexes-of "^1.0.1" uniq "^1.0.1" postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss-value-parser@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.23, postcss@^7.0.5, postcss@^7.0.6: version "7.0.36" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.36.tgz#056f8cffa939662a8f5905950c07d5285644dfcb" integrity sha512-BebJSIUMwJHRH0HAQoxN4u1CN86glsrwsW0q7T+/m44eXOUAxSNdHRkNZPYz5vVUbg17hFgOQDE7fZk7li3pZw== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= pretty-error@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= dependencies: renderkid "^2.0.1" utila "~0.4" private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= promise@^7.0.1, promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" proxy-addr@~2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.9.0" prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.24: version "1.4.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" create-hash "^1.1.0" parse-asn1 "^5.0.0" randombytes "^2.0.1" safe-buffer "^5.1.2" pug-attrs@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-3.0.0.tgz#b10451e0348165e31fad1cc23ebddd9dc7347c41" integrity sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA== dependencies: constantinople "^4.0.1" js-stringify "^1.0.2" pug-runtime "^3.0.0" pug-code-gen@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-3.0.2.tgz#ad190f4943133bf186b60b80de483100e132e2ce" integrity sha512-nJMhW16MbiGRiyR4miDTQMRWDgKplnHyeLvioEJYbk1RsPI3FuA3saEP8uwnTb2nTJEKBU90NFVWJBk4OU5qyg== dependencies: constantinople "^4.0.1" doctypes "^1.1.0" js-stringify "^1.0.2" pug-attrs "^3.0.0" pug-error "^2.0.0" pug-runtime "^3.0.0" void-elements "^3.1.0" with "^7.0.0" pug-error@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-2.0.0.tgz#5c62173cb09c34de2a2ce04f17b8adfec74d8ca5" integrity sha512-sjiUsi9M4RAGHktC1drQfCr5C5eriu24Lfbt4s+7SykztEOwVZtbFk1RRq0tzLxcMxMYTBR+zMQaG07J/btayQ== pug-filters@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-4.0.0.tgz#d3e49af5ba8472e9b7a66d980e707ce9d2cc9b5e" integrity sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A== dependencies: constantinople "^4.0.1" jstransformer "1.0.0" pug-error "^2.0.0" pug-walk "^2.0.0" resolve "^1.15.1" pug-lexer@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-5.0.1.tgz#ae44628c5bef9b190b665683b288ca9024b8b0d5" integrity sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w== dependencies: character-parser "^2.2.0" is-expression "^4.0.0" pug-error "^2.0.0" pug-linker@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-4.0.0.tgz#12cbc0594fc5a3e06b9fc59e6f93c146962a7708" integrity sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw== dependencies: pug-error "^2.0.0" pug-walk "^2.0.0" pug-load@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-3.0.0.tgz#9fd9cda52202b08adb11d25681fb9f34bd41b662" integrity sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ== dependencies: object-assign "^4.1.1" pug-walk "^2.0.0" pug-parser@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-6.0.0.tgz#a8fdc035863a95b2c1dc5ebf4ecf80b4e76a1260" integrity sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw== dependencies: pug-error "^2.0.0" token-stream "1.0.0" pug-runtime@^3.0.0, pug-runtime@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-3.0.1.tgz#f636976204723f35a8c5f6fad6acda2a191b83d7" integrity sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg== pug-strip-comments@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz#f94b07fd6b495523330f490a7f554b4ff876303e" integrity sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ== dependencies: pug-error "^2.0.0" pug-walk@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-2.0.0.tgz#417aabc29232bb4499b5b5069a2b2d2a24d5f5fe" integrity sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ== pug@^3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/pug/-/pug-3.0.2.tgz#f35c7107343454e43bc27ae0ff76c731b78ea535" integrity sha512-bp0I/hiK1D1vChHh6EfDxtndHji55XP/ZJKwsRqrz6lRia6ZC2OZbdAymlxdVFwd1L70ebrVJw4/eZ79skrIaw== dependencies: pug-code-gen "^3.0.2" pug-filters "^4.0.0" pug-lexer "^5.0.1" pug-linker "^4.0.0" pug-load "^3.0.0" pug-parser "^6.0.0" pug-runtime "^3.0.1" pug-strip-comments "^2.0.0" pump@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pumpify@^1.3.3: version "1.5.1" resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" inherits "^2.0.3" pump "^2.0.0" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qjobs@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== query-string@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== dependencies: decode-uri-component "^0.2.0" object-assign "^4.1.0" strict-uri-encode "^1.0.0" querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= querystringify@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" safe-buffer "^5.1.0" range-parser@^1.0.3, range-parser@^1.2.0, range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-loader@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-2.0.0.tgz#e2813d9e1e3f80d1bbade5ad082e809679e20c26" integrity sha512-kZnO5MoIyrojfrPWqrhFNLZemIAX8edMOCp++yC5RKxzFB3m92DqKNhKlU6+FvpOhWtvyh3jOaD7J6/9tpdIKg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" readdirp@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839" integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ== dependencies: picomatch "^2.0.4" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.3" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" integrity sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw== regenerator-transform@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== dependencies: private "^0.1.6" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexp.prototype.flags@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.2.0.tgz#6b30724e306a27833eeb171b66ac8890ba37e41c" integrity sha512-ztaw4M1VqgMwl9HlPpOuiYgItcHlunW0He2fE6eNfT6E/CF2FtYi9ofOYe4mKntstYk0Fyh/rDRBdS3AnxjlrA== dependencies: define-properties "^1.1.2" regexpu-core@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" regjsgen@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c" integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" relateurl@0.2.x: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= renderkid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== dependencies: css-select "^1.1.0" dom-converter "^0.2" htmlparser2 "^3.3.0" strip-ansi "^3.0.0" utila "^0.4.0" repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" request@^2.83.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= dependencies: resolve-from "^3.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" integrity sha1-six699nWiBvItuZTM17rywoYh0g= resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.10.0, resolve@^1.3.2, resolve@^1.8.1: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" resolve@^1.15.1: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== dependencies: is-core-module "^2.2.0" path-parse "^1.0.6" ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= rfdc@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.1.4.tgz#ba72cc1367a0ccd9cf81a870b3b58bd3ad07f8c2" integrity sha512-5C9HXdzK8EAqN7JDif30jqsBzavB7wLpaubisuQIGHWf2gUXSpzy6ArX/+Da8RjFpagWsCn+pIgxTMAmKw9Zug== rimraf@^2.5.4, rimraf@^2.6.0, rimraf@^2.6.1, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" inherits "^2.0.1" run-queue@^1.0.0, run-queue@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= dependencies: aproba "^1.1.1" safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" integrity sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8= dependencies: ajv "^5.0.0" schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== dependencies: ajv "^6.1.0" ajv-errors "^1.0.0" ajv-keywords "^3.1.0" select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.7: version "1.10.7" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b" integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA== dependencies: node-forge "0.9.0" "semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== dependencies: debug "2.6.9" depd "~1.1.2" destroy "~1.0.4" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" http-errors "~1.7.2" mime "1.6.0" ms "2.1.1" on-finished "~2.3.0" range-parser "~1.2.1" statuses "~1.5.0" serialize-javascript@^1.7.0: version "1.9.1" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.9.1.tgz#cfc200aef77b600c47da9bb8149c943e798c2fdb" integrity sha512-0Vb/54WJ6k5v8sSWN09S0ora+Hnr+cX40r9F170nT+mSkaxltoE/7R3OrIdBSUv1OoiobH1QoWQbCnAO+e8J1A== serve-index@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= dependencies: accepts "~1.3.4" batch "0.6.1" debug "2.6.9" escape-html "~1.0.3" http-errors "~1.6.2" mime-types "~2.1.17" parseurl "~1.3.2" serve-static@1.14.1: version "1.14.1" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.17.1" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setimmediate@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" socket.io-adapter@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz#2a805e8a14d6372124dd9159ad4502f8cb07f06b" integrity sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs= socket.io-client@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.1.1.tgz#dcb38103436ab4578ddb026638ae2f21b623671f" integrity sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ== dependencies: backo2 "1.0.2" base64-arraybuffer "0.1.5" component-bind "1.0.0" component-emitter "1.2.1" debug "~3.1.0" engine.io-client "~3.2.0" has-binary2 "~1.0.2" has-cors "1.1.0" indexof "0.0.1" object-component "0.0.3" parseqs "0.0.5" parseuri "0.0.5" socket.io-parser "~3.2.0" to-array "0.1.4" socket.io-parser@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.2.0.tgz#e7c6228b6aa1f814e6148aea325b51aa9499e077" integrity sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA== dependencies: component-emitter "1.2.1" debug "~3.1.0" isarray "2.0.1" socket.io@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.1.1.tgz#a069c5feabee3e6b214a75b40ce0652e1cfb9980" integrity sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA== dependencies: debug "~3.1.0" engine.io "~3.2.0" has-binary2 "~1.0.2" socket.io-adapter "~1.1.0" socket.io-client "2.1.1" socket.io-parser "~3.2.0" sockjs-client@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.4.0.tgz#c9f2568e19c8fd8173b4997ea3420e0bb306c7d5" integrity sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g== dependencies: debug "^3.2.5" eventsource "^1.0.7" faye-websocket "~0.11.1" inherits "^2.0.3" json3 "^3.3.2" url-parse "^1.4.3" sockjs@0.3.19: version "0.3.19" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== dependencies: faye-websocket "^0.10.0" uuid "^3.0.1" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= dependencies: is-plain-obj "^1.0.0" source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.12: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.6, source-map@^0.5.7: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== spdx-expression-parse@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.5" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" hpack.js "^2.1.6" obuf "^1.1.2" readable-stream "^3.0.6" wbuf "^1.7.3" spdy@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.1.tgz#6f12ed1c5db7ea4f24ebb8b89ba58c87c08257f2" integrity sha512-HeZS3PBdMA+sZSu0qwpCxl3DeALD5ASx8pAX0jZdKXSpPWbQ6SYGnlg3BBmYLx5LtiZrmkAZfErCm2oECBcioA== dependencies: debug "^4.1.0" handle-thing "^2.0.0" http-deceiver "^1.2.7" select-hose "^2.0.0" spdy-transport "^3.0.0" split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" ssri@^6.0.1: version "6.0.2" resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.2.tgz#157939134f20464e7301ddba3e90ffa8f7728ac5" integrity sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q== dependencies: figgy-pudding "^3.5.1" static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== dependencies: inherits "~2.0.1" readable-stream "^2.0.2" stream-each@^1.1.0: version "1.2.3" resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== dependencies: end-of-stream "^1.1.0" stream-shift "^1.0.0" stream-http@^2.7.2: version "2.8.3" resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= streamroller@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-1.0.6.tgz#8167d8496ed9f19f05ee4b158d9611321b8cacd9" integrity sha512-3QC47Mhv3/aZNFpDDVO44qQb9gwB9QggMEE0sQmkTAwBVYdBRWISdsywlkfm5II1Q5y/pmrHflti/IgmIzdDBg== dependencies: async "^2.6.2" date-format "^2.0.0" debug "^3.2.6" fs-extra "^7.0.1" lodash "^4.17.14" strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== dependencies: emoji-regex "^7.0.1" is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string.prototype.trimright@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= style-loader@^0.23.1: version "0.23.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925" integrity sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" supports-color@6.1.0, supports-color@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== dependencies: has-flag "^3.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" tapable@^1.0.0, tapable@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^4: version "4.4.19" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA== dependencies: chownr "^1.1.4" fs-minipass "^1.2.7" minipass "^2.9.0" minizlib "^1.3.3" mkdirp "^0.5.5" safe-buffer "^5.2.1" yallist "^3.1.1" terser-webpack-plugin@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.1.tgz#61b18e40eaee5be97e771cdbb10ed1280888c2b4" integrity sha512-ZXmmfiwtCLfz8WKZyYUuuHf3dMYEjg8NrjHMb0JqHVHVOSkzp3cW2/XG1fP3tRhqEqSzMwzzRQGtAPbs4Cncxg== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" serialize-javascript "^1.7.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" worker-farm "^1.7.0" terser@^4.1.2: version "4.4.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.4.0.tgz#22c46b4817cf4c9565434bfe6ad47336af259ac3" integrity sha512-oDG16n2WKm27JO8h4y/w3iqBGAOSCtq7k8dRmrn4Wf9NouL0b2WpMHGChFGZq4nFAQy1FsNJrVQHfurXOSTmOA== dependencies: commander "^2.20.0" source-map "~0.6.1" source-map-support "~0.5.12" through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" thunky@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== timers-browserify@^2.0.4: version "2.0.11" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== dependencies: setimmediate "^1.0.4" tmp@0.0.33, tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-1.0.0.tgz#cc200eab2613f4166d27ff9afc7ca56d49df6eb4" integrity sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ= toposort@^1.0.0: version "1.0.7" resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= uglify-js@3.4.x: version "3.4.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== dependencies: commander "~2.19.0" source-map "~0.6.1" uglify-js@^3.1.4: version "3.13.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.13.5.tgz#5d71d6dbba64cf441f32929b1efce7365bb4f113" integrity sha512-xtB8yEqIkn7zmOyS2zUNBsYCBRhDkvlNxMMY2smuJ/qA8NCHeQvKCF3i9Z4k8FJH4+PJvZRtMrPynfZ75+CSZw== ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^2.0.1" uniq@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== dependencies: unique-slug "^2.0.0" unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== dependencies: imurmurhash "^0.1.4" universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url-join@^2.0.2: version "2.0.5" resolved "https://registry.yarnpkg.com/url-join/-/url-join-2.0.5.tgz#5af22f18c052a000a48d7b82c5e9c2e2feeda728" integrity sha1-WvIvGMBSoACkjXuCxenC4v7tpyg= url-parse@^1.4.3: version "1.5.3" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== useragent@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== dependencies: inherits "2.0.3" utila@^0.4.0, utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== v8-compile-cache@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vm-browserify@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== void-elements@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= void-elements@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-3.1.0.tgz#614f7fbf8d801f0bb5f0661f5b2f5785750e4f09" integrity sha1-YU9/v42AHwu18GYfWy9XhXUOTwk= watchpack@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== dependencies: chokidar "^2.0.2" graceful-fs "^4.1.2" neo-async "^2.5.0" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" webpack-cli@^3.3.1: version "3.3.10" resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.10.tgz#17b279267e9b4fb549023fae170da8e6e766da13" integrity sha512-u1dgND9+MXaEt74sJR4PR7qkPxXUSQ0RXYq8x1L6Jg1MYVEmGPrH6Ah6C4arD4r0J1P5HKjRqpab36k0eIzPqg== dependencies: chalk "2.4.2" cross-spawn "6.0.5" enhanced-resolve "4.1.0" findup-sync "3.0.0" global-modules "2.0.0" import-local "2.0.0" interpret "1.2.0" loader-utils "1.2.3" supports-color "6.1.0" v8-compile-cache "2.0.3" yargs "13.2.4" webpack-dev-middleware@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-2.0.6.tgz#a51692801e8310844ef3e3790e1eacfe52326fd4" integrity sha512-tj5LLD9r4tDuRIDa5Mu9lnY2qBBehAITv6A9irqXhw/HQquZgTx3BCd57zYbU2gMDnncA49ufK2qVQSbaKJwOw== dependencies: loud-rejection "^1.6.0" memory-fs "~0.4.1" mime "^2.1.0" path-is-absolute "^1.0.0" range-parser "^1.0.3" url-join "^2.0.2" webpack-log "^1.0.1" webpack-dev-middleware@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3" integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw== dependencies: memory-fs "^0.4.1" mime "^2.4.4" mkdirp "^0.5.1" range-parser "^1.2.1" webpack-log "^2.0.0" webpack-dev-server@^3.3.1: version "3.9.0" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c" integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw== dependencies: ansi-html "0.0.7" bonjour "^3.5.0" chokidar "^2.1.8" compression "^1.7.4" connect-history-api-fallback "^1.6.0" debug "^4.1.1" del "^4.1.1" express "^4.17.1" html-entities "^1.2.1" http-proxy-middleware "0.19.1" import-local "^2.0.0" internal-ip "^4.3.0" ip "^1.1.5" is-absolute-url "^3.0.3" killable "^1.0.1" loglevel "^1.6.4" opn "^5.5.0" p-retry "^3.0.1" portfinder "^1.0.25" schema-utils "^1.0.0" selfsigned "^1.10.7" semver "^6.3.0" serve-index "^1.9.1" sockjs "0.3.19" sockjs-client "1.4.0" spdy "^4.0.1" strip-ansi "^3.0.1" supports-color "^6.1.0" url "^0.11.0" webpack-dev-middleware "^3.7.2" webpack-log "^2.0.0" ws "^6.2.1" yargs "12.0.5" webpack-fix-style-only-entries@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/webpack-fix-style-only-entries/-/webpack-fix-style-only-entries-0.2.2.tgz#60331c608b944ac821a3b6f2ae491a6d79ba40eb" integrity sha512-0wcrLCnISP8htV0NP1mT0e2mHhfjGQdNk82s8BTLVvF7rXuoJuUUzP3aCUXnRqlLgmTBx5WgqPhnczjatl+iSQ== webpack-log@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-1.2.0.tgz#a4b34cda6b22b518dbb0ab32e567962d5c72a43d" integrity sha512-U9AnICnu50HXtiqiDxuli5gLB5PGBo7VvcHx36jRZHwK4vzOYLbImqT4lwWwoMHdQWwEKw736fCHEekokTEKHA== dependencies: chalk "^2.1.0" log-symbols "^2.1.0" loglevelnext "^1.0.1" uuid "^3.1.0" webpack-log@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== dependencies: ansi-colors "^3.0.0" uuid "^3.3.2" webpack-shell-plugin@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/webpack-shell-plugin/-/webpack-shell-plugin-0.5.0.tgz#29b8a1d80ddeae0ddb10e729667f728653c2c742" integrity sha1-Kbih2A3erg3bEOcpZn9yhlPCx0I= webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack@^4.30.0: version "4.41.2" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e" integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/wasm-edit" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" acorn "^6.2.1" ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" enhanced-resolve "^4.1.0" eslint-scope "^4.0.3" json-parse-better-errors "^1.0.2" loader-runner "^2.4.0" loader-utils "^1.2.3" memory-fs "^0.4.1" micromatch "^3.1.10" mkdirp "^0.5.1" neo-async "^2.6.1" node-libs-browser "^2.2.1" schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.1" watchpack "^1.6.0" webpack-sources "^1.4.1" websocket-driver@>=0.5.1: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.4" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" with@^7.0.0: version "7.0.2" resolved "https://registry.yarnpkg.com/with/-/with-7.0.2.tgz#ccee3ad542d25538a7a7a80aad212b9828495bac" integrity sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w== dependencies: "@babel/parser" "^7.9.6" "@babel/types" "^7.9.6" assert-never "^1.2.1" babel-walk "3.0.0-canary-5" word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= worker-farm@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== dependencies: ansi-styles "^3.2.0" string-width "^3.0.0" strip-ansi "^5.0.0" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== dependencies: async-limiter "~1.0.0" ws@~3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== dependencies: async-limiter "~1.0.0" safe-buffer "~5.1.0" ultron "~1.1.0" xmlhttprequest-ssl@~1.5.4: version "1.5.5" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e" integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4= xtend@^4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== "y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2, yallist@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs-parser@^13.1.0: version "13.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== dependencies: cliui "^4.0.0" decamelize "^1.2.0" find-up "^3.0.0" get-caller-file "^1.0.1" os-locale "^3.0.0" require-directory "^2.1.1" require-main-filename "^1.0.1" set-blocking "^2.0.0" string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" yargs@13.2.4: version "13.2.4" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== dependencies: cliui "^5.0.0" find-up "^3.0.0" get-caller-file "^2.0.1" os-locale "^3.1.0" require-directory "^2.1.1" require-main-filename "^2.0.0" set-blocking "^2.0.0" string-width "^3.0.0" which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^13.1.0" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk= buildbot-3.4.0/www/wsgi_dashboards/000077500000000000000000000000001413250514000173045ustar00rootroot00000000000000buildbot-3.4.0/www/wsgi_dashboards/buildbot_wsgi_dashboards/000077500000000000000000000000001413250514000243335ustar00rootroot00000000000000buildbot-3.4.0/www/wsgi_dashboards/buildbot_wsgi_dashboards/__init__.py000066400000000000000000000031171413250514000264460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import reactor from twisted.internet.threads import blockingCallFromThread from twisted.web.wsgi import WSGIResource from buildbot.util import unicode2bytes from buildbot.www.plugin import Application class WSGIDashboardsApplication(Application): def setConfiguration(self, config): for dashboard in config: dashboard['app'].buildbot_api = self resource = WSGIResource(reactor, reactor.getThreadPool(), dashboard['app']) self.resource.putChild(unicode2bytes(dashboard['name']), resource) def dataGet(self, path, **kwargs): if not isinstance(path, tuple): path = tuple(path.strip("/").split("/")) return blockingCallFromThread(reactor, self.master.data.get, path, **kwargs) # create the interface for the setuptools entry point ep = WSGIDashboardsApplication(__name__, "Buildbot WSGI Dashboard Glue") buildbot-3.4.0/www/wsgi_dashboards/karma.conf.js000066400000000000000000000003641413250514000216640ustar00rootroot00000000000000const common = require('buildbot-build-common'); module.exports = function karmaConfig (config) { common.createTemplateKarmaConfig(config, { testRoot: 'src/tests.webpack.js', webpack: require('./webpack.config') }); }; buildbot-3.4.0/www/wsgi_dashboards/package.json000066400000000000000000000014151413250514000215730ustar00rootroot00000000000000{ "name": "buildbot-wsgi-dashboards", "plugin_name": "wsgi_dashboards", "private": true, "main": "buildbot_wsgi_dashboards/static/scripts.js", "scripts": { "build": "rimraf buildbot_wsgi_dashboards/static && webpack --bail --progress --profile --env prod", "build-dev": "rimraf buildbot_wsgi_dashboards/static && webpack --bail --progress --profile --env dev", "dev": "webpack --bail --progress --profile --watch --env dev", "test": "karma start", "test-watch": "karma start --auto-watch --no-single-run" }, "devDependencies": { "angular-mocks": "^1.7.9", "buildbot-build-common": "link:../build_common", "rimraf": "^2.6.3" }, "dependencies": { "angular": "^1.8.0" } } buildbot-3.4.0/www/wsgi_dashboards/postcss.config.js000066400000000000000000000001711413250514000226030ustar00rootroot00000000000000module.exports = { plugins: { autoprefixer: { browsers: ['last 2 versions'] }, }, }; buildbot-3.4.0/www/wsgi_dashboards/setup.cfg000066400000000000000000000000001413250514000211130ustar00rootroot00000000000000buildbot-3.4.0/www/wsgi_dashboards/setup.py000066400000000000000000000031111413250514000210120ustar00rootroot00000000000000#!/usr/bin/env python # # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from buildbot_pkg import setup_www_plugin except ImportError: import sys print('Please install buildbot_pkg module in order to install that ' 'package, or use the pre-build .whl modules available on pypi', file=sys.stderr) sys.exit(1) setup_www_plugin( name='buildbot-wsgi-dashboards', description='Buildbot plugin to integrate flask or bottle dashboards to buildbot UI', author=u'Buildbot maintainers', author_email=u'devel@buildbot.net', url='http://buildbot.net/', packages=['buildbot_wsgi_dashboards'], package_data={ '': [ 'VERSION', 'static/*' ] }, entry_points=""" [buildbot.www] wsgi_dashboards = buildbot_wsgi_dashboards:ep """, classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)' ], ) buildbot-3.4.0/www/wsgi_dashboards/src/000077500000000000000000000000001413250514000200735ustar00rootroot00000000000000buildbot-3.4.0/www/wsgi_dashboards/src/module/000077500000000000000000000000001413250514000213605ustar00rootroot00000000000000buildbot-3.4.0/www/wsgi_dashboards/src/module/dashboards.route.js000066400000000000000000000030721413250514000251670ustar00rootroot00000000000000/* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ // Register new state class WsgiDashboardsState { constructor($stateProvider, glMenuServiceProvider, config) { for (let dashboard of Array.from(config.plugins.wsgi_dashboards)) { // Name of the state const { name } = dashboard; let { caption } = dashboard; if (caption == null) { caption = _.capitalize(name); } if (dashboard.order == null) { dashboard.order = 5; } // Configuration glMenuServiceProvider.addGroup({ name, caption, icon: dashboard.icon, order: dashboard.order }); const cfg = { group: name, caption }; // Register new state const state = { controller: "wsgiDashboardsController", templateUrl: `wsgi_dashboards/${name}/index.html`, name, url: `/${name}`, data: cfg }; $stateProvider.state(state); } } } class WsgiDashboardsController { constructor($scope, $state) {} } angular.module('wsgi_dashboards') .config(['$stateProvider', 'glMenuServiceProvider', 'config', WsgiDashboardsState]) .controller('wsgiDashboardsController', ['$scope', '$state', WsgiDashboardsController]); buildbot-3.4.0/www/wsgi_dashboards/src/module/main.module.js000066400000000000000000000002101413250514000241170ustar00rootroot00000000000000 angular.module('wsgi_dashboards', [ 'ui.router', 'buildbot_config', 'guanlecoja.ui' ]); require('./dashboards.route.js'); buildbot-3.4.0/www/wsgi_dashboards/src/module/main.module.spec.js000066400000000000000000000001151413250514000250540ustar00rootroot00000000000000describe('WSGIDashboards', () => it('should have one test', function() {})); buildbot-3.4.0/www/wsgi_dashboards/src/styles/000077500000000000000000000000001413250514000214165ustar00rootroot00000000000000buildbot-3.4.0/www/wsgi_dashboards/src/styles/styles.less000066400000000000000000000000001413250514000236170ustar00rootroot00000000000000buildbot-3.4.0/www/wsgi_dashboards/src/tests.webpack.js000066400000000000000000000004421413250514000232060ustar00rootroot00000000000000// This file is an entry point for angular tests // Avoids some weird issues when using webpack + angular. import 'angular'; import 'angular-mocks/angular-mocks'; import './module/main.module.js' const context = require.context('./', true, /\.spec.js$/); context.keys().forEach(context); buildbot-3.4.0/www/wsgi_dashboards/webpack.config.js000066400000000000000000000012231413250514000225200ustar00rootroot00000000000000'use strict'; const common = require('buildbot-build-common'); const env = require('yargs').argv.env; const pkg = require('./package.json'); var event = process.env.npm_lifecycle_event; var isTest = event === 'test' || event === 'test-watch'; var isProd = env === 'prod'; module.exports = function() { return common.createTemplateWebpackConfig({ entry: { scripts: './src/module/main.module.js', }, libraryName: pkg.name, pluginName: pkg.plugin_name, dirname: __dirname, isTest: isTest, isProd: isProd, outputPath: __dirname + '/buildbot_wsgi_dashboards/static', }); }(); buildbot-3.4.0/www/wsgi_dashboards/yarn.lock000066400000000000000000011405401413250514000211340ustar00rootroot00000000000000# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.7.4.tgz#37e864532200cb6b50ee9a4045f5f817840166ab" integrity sha512-+bYbx56j4nYBmpsWtnPUsKW3NdnYxbqyfrP2w9wILBuHzdfIKz9prieZK0DFPyIzkjYVUe4QkusGL07r5pXznQ== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helpers" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" convert-source-map "^1.7.0" debug "^4.1.0" json5 "^2.1.0" lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" "@babel/generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369" integrity sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg== dependencies: "@babel/types" "^7.7.4" jsesc "^2.5.1" lodash "^4.17.13" source-map "^0.5.0" "@babel/helper-annotate-as-pure@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz#bb3faf1e74b74bd547e867e48f551fa6b098b6ce" integrity sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og== dependencies: "@babel/types" "^7.7.4" "@babel/helper-builder-binary-assignment-operator-visitor@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz#5f73f2b28580e224b5b9bd03146a4015d6217f5f" integrity sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ== dependencies: "@babel/helper-explode-assignable-expression" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-call-delegate@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz#621b83e596722b50c0066f9dc37d3232e461b801" integrity sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-create-regexp-features-plugin@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz#6d5762359fd34f4da1500e4cff9955b5299aaf59" integrity sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A== dependencies: "@babel/helper-regex" "^7.4.4" regexpu-core "^4.6.0" "@babel/helper-define-map@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176" integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-explode-assignable-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz#fa700878e008d85dc51ba43e9fb835cddfe05c84" integrity sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg== dependencies: "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e" integrity sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ== dependencies: "@babel/helper-get-function-arity" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-get-function-arity@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0" integrity sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA== dependencies: "@babel/types" "^7.7.4" "@babel/helper-hoist-variables@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz#612384e3d823fdfaaf9fce31550fe5d4db0f3d12" integrity sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-member-expression-to-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz#356438e2569df7321a8326644d4b790d2122cb74" integrity sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-imports@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz#e5a92529f8888bf319a6376abfbd1cebc491ad91" integrity sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ== dependencies: "@babel/types" "^7.7.4" "@babel/helper-module-transforms@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.7.4.tgz#8d7cdb1e1f8ea3d8c38b067345924ac4f8e0879a" integrity sha512-ehGBu4mXrhs0FxAqN8tWkzF8GSIGAiEumu4ONZ/hD9M88uHcD+Yu2ttKfOCgwzoesJOJrtQh7trI5YPbRtMmnA== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-simple-access" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" lodash "^4.17.13" "@babel/helper-optimise-call-expression@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz#034af31370d2995242aa4df402c3b7794b2dcdf2" integrity sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg== dependencies: "@babel/types" "^7.7.4" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": version "7.5.5" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz#c68c2407350d9af0e061ed6726afb4fff16d0234" integrity sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-wrap-function" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-replace-supers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz#3c881a6a6a7571275a72d82e6107126ec9e2cdd2" integrity sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg== dependencies: "@babel/helper-member-expression-to-functions" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-simple-access@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz#a169a0adb1b5f418cfc19f22586b2ebf58a9a294" integrity sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A== dependencies: "@babel/template" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helper-split-export-declaration@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8" integrity sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug== dependencies: "@babel/types" "^7.7.4" "@babel/helper-validator-identifier@^7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== "@babel/helper-wrap-function@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace" integrity sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/helpers@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.7.4.tgz#62c215b9e6c712dadc15a9a0dcab76c92a940302" integrity sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg== dependencies: "@babel/template" "^7.7.4" "@babel/traverse" "^7.7.4" "@babel/types" "^7.7.4" "@babel/highlight@^7.0.0": version "7.5.0" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" "@babel/parser@^7.6.0", "@babel/parser@^7.9.6": version "7.13.9" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.13.9.tgz#ca34cb95e1c2dd126863a84465ae8ef66114be99" integrity sha512-nEUfRiARCcaVo3ny3ZQjURjHQZUo/JkEw7rLlSZy/psWGnvwXFtPcr6jb7Yb41DVW5LTe6KRq9LGleRNsg1Frw== "@babel/parser@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb" integrity sha512-jIwvLO0zCL+O/LmEJQjWA75MQTWwx3c3u2JOTDK5D3/9egrWRRA0/0hk9XXywYnXZVVpzrBYeIQTmhwUaePI9g== "@babel/plugin-proposal-async-generator-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d" integrity sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-proposal-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz#dde64a7f127691758cbfed6cf70de0fa5879d52d" integrity sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz#7700a6bfda771d8dc81973249eac416c6b4c697d" integrity sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.4.tgz#cc57849894a5c774214178c8ab64f6334ec8af71" integrity sha512-rnpnZR3/iWKmiQyJ3LKJpSwLDcX/nSXhdLk4Aq/tXOApIvyu7qoabrige0ylsAJffaUC51WiBu209Q0U+86OWQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz#ec21e8aeb09ec6711bc0a39ca49520abee1de379" integrity sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.4.tgz#7c239ccaf09470dbe1d453d50057460e84517ebb" integrity sha512-cHgqHgYvffluZk85dJ02vloErm3Y6xtH+2noOBOJ2kXOJH3aVCDnj5eR/lVNlTnYu4hndAPJD3rTFjW3qee0PA== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-async-generators@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz#331aaf310a10c80c44a66b238b6e49132bd3c889" integrity sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-dynamic-import@^7.2.0", "@babel/plugin-syntax-dynamic-import@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz#29ca3b4415abfe4a5ec381e903862ad1a54c3aec" integrity sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-json-strings@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz#86e63f7d2e22f9e27129ac4e83ea989a382e86cc" integrity sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz#47cf220d19d6d0d7b154304701f468fc1cc6ff46" integrity sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-optional-catch-binding@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz#a3e38f59f4b6233867b4a92dcb0ee05b2c334aa6" integrity sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-top-level-await@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz#bd7d8fa7b9fee793a36e4027fd6dd1aa32f946da" integrity sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-arrow-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz#76309bd578addd8aee3b379d809c802305a98a12" integrity sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz#694cbeae6d613a34ef0292713fa42fb45c4470ba" integrity sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz#d0d9d5c269c78eaea76227ace214b8d01e4d837b" integrity sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-block-scoping@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz#200aad0dcd6bb80372f94d9e628ea062c58bf224" integrity sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" lodash "^4.17.13" "@babel/plugin-transform-classes@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz#c92c14be0a1399e15df72667067a8f510c9400ec" integrity sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-define-map" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-optimise-call-expression" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" globals "^11.1.0" "@babel/plugin-transform-computed-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz#e856c1628d3238ffe12d668eb42559f79a81910d" integrity sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-destructuring@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz#2b713729e5054a1135097b6a67da1b6fe8789267" integrity sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-dotall-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.4.tgz#f7ccda61118c5b7a2599a72d5e3210884a021e96" integrity sha512-mk0cH1zyMa/XHeb6LOTXTbG7uIJ8Rrjlzu91pUx/KS3JpcgaTDwMS8kM+ar8SLOvlL2Lofi4CGBAjCo3a2x+lw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-duplicate-keys@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz#3d21731a42e3f598a73835299dd0169c3b90ac91" integrity sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-exponentiation-operator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz#dd30c0191e3a1ba19bcc7e389bdfddc0729d5db9" integrity sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-for-of@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz#248800e3a5e507b1f103d8b4ca998e77c63932bc" integrity sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-function-name@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz#75a6d3303d50db638ff8b5385d12451c865025b1" integrity sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g== dependencies: "@babel/helper-function-name" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz#27fe87d2b5017a2a5a34d1c41a6b9f6a6262643e" integrity sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-member-expression-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz#aee127f2f3339fc34ce5e3055d7ffbf7aa26f19a" integrity sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-amd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.4.tgz#276b3845ca2b228f2995e453adc2e6f54d72fb71" integrity sha512-/542/5LNA18YDtg1F+QHvvUSlxdvjZoD/aldQwkq+E3WCkbEjNSN9zdrOXaSlfg3IfGi22ijzecklF/A7kVZFQ== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-commonjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.4.tgz#bee4386e550446343dd52a571eda47851ff857a3" integrity sha512-k8iVS7Jhc367IcNF53KCwIXtKAH7czev866ThsTgy8CwlXjnKZna2VHwChglzLleYrcHz1eQEIJlGRQxB53nqA== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.7.4" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-systemjs@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz#cd98152339d3e763dfe838b7d4273edaf520bb30" integrity sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw== dependencies: "@babel/helper-hoist-variables" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" babel-plugin-dynamic-import-node "^2.3.0" "@babel/plugin-transform-modules-umd@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz#1027c355a118de0aae9fee00ad7813c584d9061f" integrity sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw== dependencies: "@babel/helper-module-transforms" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-named-capturing-groups-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz#fb3bcc4ee4198e7385805007373d6b6f42c98220" integrity sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/plugin-transform-new-target@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz#4a0753d2d60639437be07b592a9e58ee00720167" integrity sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-object-super@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz#48488937a2d586c0148451bf51af9d7dda567262" integrity sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-replace-supers" "^7.7.4" "@babel/plugin-transform-parameters@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.4.tgz#da4555c97f39b51ac089d31c7380f03bca4075ce" integrity sha512-VJwhVePWPa0DqE9vcfptaJSzNDKrWU/4FbYCjZERtmqEs05g3UMXnYMZoXja7JAJ7Y7sPZipwm/pGApZt7wHlw== dependencies: "@babel/helper-call-delegate" "^7.7.4" "@babel/helper-get-function-arity" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-property-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz#2388d6505ef89b266103f450f9167e6bd73f98c2" integrity sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-regenerator@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.4.tgz#d18eac0312a70152d7d914cbed2dc3999601cfc0" integrity sha512-e7MWl5UJvmPEwFJTwkBlPmqixCtr9yAASBqff4ggXTNicZiwbF8Eefzm6NVgfiBp7JdAGItecnctKTgH44q2Jw== dependencies: regenerator-transform "^0.14.0" "@babel/plugin-transform-reserved-words@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz#6a7cf123ad175bb5c69aec8f6f0770387ed3f1eb" integrity sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.4.tgz#51fe458c1c1fa98a8b07934f4ed38b6cd62177a6" integrity sha512-O8kSkS5fP74Ad/8pfsCMGa8sBRdLxYoSReaARRNSz3FbFQj3z/QUvoUmJ28gn9BO93YfnXc3j+Xyaqe8cKDNBQ== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" resolve "^1.8.1" semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz#74a0a9b2f6d67a684c6fbfd5f0458eb7ba99891e" integrity sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-spread@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz#aa673b356fe6b7e70d69b6e33a17fef641008578" integrity sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-sticky-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz#ffb68c05090c30732076b1285dc1401b404a123c" integrity sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-regex" "^7.0.0" "@babel/plugin-transform-template-literals@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz#1eb6411736dd3fe87dbd20cc6668e5121c17d604" integrity sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ== dependencies: "@babel/helper-annotate-as-pure" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-typeof-symbol@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz#3174626214f2d6de322882e498a38e8371b2140e" integrity sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-unicode-regex@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz#a3c0f65b117c4c81c5b6484f2a5e7b95346b83ae" integrity sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/preset-env@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.7.4.tgz#ccaf309ae8d1ee2409c85a4e2b5e280ceee830f8" integrity sha512-Dg+ciGJjwvC1NIe/DGblMbcGq1HOtKbw8RLl4nIjlfcILKEOkWT/vRqPpumswABEBVudii6dnVwrBtzD7ibm4g== dependencies: "@babel/helper-module-imports" "^7.7.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-async-generator-functions" "^7.7.4" "@babel/plugin-proposal-dynamic-import" "^7.7.4" "@babel/plugin-proposal-json-strings" "^7.7.4" "@babel/plugin-proposal-object-rest-spread" "^7.7.4" "@babel/plugin-proposal-optional-catch-binding" "^7.7.4" "@babel/plugin-proposal-unicode-property-regex" "^7.7.4" "@babel/plugin-syntax-async-generators" "^7.7.4" "@babel/plugin-syntax-dynamic-import" "^7.7.4" "@babel/plugin-syntax-json-strings" "^7.7.4" "@babel/plugin-syntax-object-rest-spread" "^7.7.4" "@babel/plugin-syntax-optional-catch-binding" "^7.7.4" "@babel/plugin-syntax-top-level-await" "^7.7.4" "@babel/plugin-transform-arrow-functions" "^7.7.4" "@babel/plugin-transform-async-to-generator" "^7.7.4" "@babel/plugin-transform-block-scoped-functions" "^7.7.4" "@babel/plugin-transform-block-scoping" "^7.7.4" "@babel/plugin-transform-classes" "^7.7.4" "@babel/plugin-transform-computed-properties" "^7.7.4" "@babel/plugin-transform-destructuring" "^7.7.4" "@babel/plugin-transform-dotall-regex" "^7.7.4" "@babel/plugin-transform-duplicate-keys" "^7.7.4" "@babel/plugin-transform-exponentiation-operator" "^7.7.4" "@babel/plugin-transform-for-of" "^7.7.4" "@babel/plugin-transform-function-name" "^7.7.4" "@babel/plugin-transform-literals" "^7.7.4" "@babel/plugin-transform-member-expression-literals" "^7.7.4" "@babel/plugin-transform-modules-amd" "^7.7.4" "@babel/plugin-transform-modules-commonjs" "^7.7.4" "@babel/plugin-transform-modules-systemjs" "^7.7.4" "@babel/plugin-transform-modules-umd" "^7.7.4" "@babel/plugin-transform-named-capturing-groups-regex" "^7.7.4" "@babel/plugin-transform-new-target" "^7.7.4" "@babel/plugin-transform-object-super" "^7.7.4" "@babel/plugin-transform-parameters" "^7.7.4" "@babel/plugin-transform-property-literals" "^7.7.4" "@babel/plugin-transform-regenerator" "^7.7.4" "@babel/plugin-transform-reserved-words" "^7.7.4" "@babel/plugin-transform-shorthand-properties" "^7.7.4" "@babel/plugin-transform-spread" "^7.7.4" "@babel/plugin-transform-sticky-regex" "^7.7.4" "@babel/plugin-transform-template-literals" "^7.7.4" "@babel/plugin-transform-typeof-symbol" "^7.7.4" "@babel/plugin-transform-unicode-regex" "^7.7.4" "@babel/types" "^7.7.4" browserslist "^4.6.0" core-js-compat "^3.1.1" invariant "^2.2.2" js-levenshtein "^1.1.3" semver "^5.5.0" "@babel/runtime@^7.4.3": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.4.tgz#b23a856751e4bf099262f867767889c0e3fe175b" integrity sha512-r24eVUUr0QqNZa+qrImUk8fn5SPhHq+IfYvIoIMg0do3GdK9sMdiLKP3GYVVaxpPKORgm8KRKaNTEhAjgIpLMw== dependencies: regenerator-runtime "^0.13.2" "@babel/template@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b" integrity sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw== dependencies: "@babel/code-frame" "^7.0.0" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" "@babel/traverse@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.7.4.tgz#9c1e7c60fb679fe4fcfaa42500833333c2058558" integrity sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw== dependencies: "@babel/code-frame" "^7.5.5" "@babel/generator" "^7.7.4" "@babel/helper-function-name" "^7.7.4" "@babel/helper-split-export-declaration" "^7.7.4" "@babel/parser" "^7.7.4" "@babel/types" "^7.7.4" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.13" "@babel/types@^7.6.1", "@babel/types@^7.9.6": version "7.13.0" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.13.0.tgz#74424d2816f0171b4100f0ab34e9a374efdf7f80" integrity sha512-hE+HE8rnG1Z6Wzo+MhaKE5lM5eMx71T4EHJgku2E3xIfaULhDcxiiRxUYgwX8qwP1BBSlag+TdGOt6JAidIZTA== dependencies: "@babel/helper-validator-identifier" "^7.12.11" lodash "^4.17.19" to-fast-properties "^2.0.0" "@babel/types@^7.7.4": version "7.7.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193" integrity sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA== dependencies: esutils "^2.0.2" lodash "^4.17.13" to-fast-properties "^2.0.0" "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== "@types/glob@^7.1.1": version "7.1.1" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== dependencies: "@types/events" "*" "@types/minimatch" "*" "@types/node" "*" "@types/minimatch@*": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/node@*": version "12.12.12" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.12.tgz#529bc3e73dbb35dd9e90b0a1c83606a9d3264bdb" integrity sha512-MGuvYJrPU0HUwqF7LqvIj50RZUX23Z+m583KBygKYUZLlZ88n6w28XRNJRJgsHukLEnLz6w6SvxZoLgbr5wLqQ== "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== dependencies: "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@webassemblyjs/floating-point-hex-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== "@webassemblyjs/helper-api-error@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== "@webassemblyjs/helper-buffer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== "@webassemblyjs/helper-code-frame@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== dependencies: "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/helper-fsm@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== "@webassemblyjs/helper-module-context@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== dependencies: "@webassemblyjs/ast" "1.8.5" mamacro "^0.0.3" "@webassemblyjs/helper-wasm-bytecode@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== "@webassemblyjs/helper-wasm-section@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/ieee754@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== "@webassemblyjs/wasm-edit@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/helper-wasm-section" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-opt" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wast-printer" "1.8.5" "@webassemblyjs/wasm-gen@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wasm-opt@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-buffer" "1.8.5" "@webassemblyjs/wasm-gen" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" "@webassemblyjs/wasm-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-wasm-bytecode" "1.8.5" "@webassemblyjs/ieee754" "1.8.5" "@webassemblyjs/leb128" "1.8.5" "@webassemblyjs/utf8" "1.8.5" "@webassemblyjs/wast-parser@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/floating-point-hex-parser" "1.8.5" "@webassemblyjs/helper-api-error" "1.8.5" "@webassemblyjs/helper-code-frame" "1.8.5" "@webassemblyjs/helper-fsm" "1.8.5" "@xtuc/long" "4.2.2" "@webassemblyjs/wast-printer@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/wast-parser" "1.8.5" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU= accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== dependencies: mime-types "~2.1.24" negotiator "0.6.2" acorn@^6.2.1: version "6.3.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.3.0.tgz#0087509119ffa4fc0a0041d1e93a417e68cb856e" integrity sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA== acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" integrity sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8= ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: version "3.4.1" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== ajv@^5.0.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5: version "6.10.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: fast-deep-equal "^2.0.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" integrity sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU= angular-mocks@^1.7.9: version "1.7.9" resolved "https://registry.yarnpkg.com/angular-mocks/-/angular-mocks-1.7.9.tgz#0a3b7e28b9a493b4e3010ed2b0f69a68e9b4f79b" integrity sha512-LQRqqiV3sZ7NTHBnNmLT0bXtE5e81t97+hkJ56oU0k3dqKv1s6F+nBWRlOVzqHWPGFOiPS8ZJVdrS8DFzHyNIA== angular@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/angular/-/angular-1.8.0.tgz#b1ec179887869215cab6dfd0df2e42caa65b1b51" integrity sha512-VdaMx+Qk0Skla7B5gw77a8hzlcOakwF8mjlW13DpIWIDlfqwAbSSLfd8N/qZnzEmQF4jC4iofInd3gE7vL8ZZg== ansi-colors@^3.0.0: version "3.2.4" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-html@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.0, ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" normalize-path "^2.1.1" anymatch@~3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" aproba@^1.0.3, aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-flatten@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= arraybuffer.slice@~0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== dependencies: safer-buffer "~2.1.0" assert-never@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/assert-never/-/assert-never-1.2.1.tgz#11f0e363bf146205fb08193b5c7b90f4d1cf44fe" integrity sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw== assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assert@^1.1.1: version "1.5.0" resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== dependencies: object-assign "^4.1.1" util "0.10.3" assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= async-each@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== async@1.x: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= async@^2.0.0, async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== dependencies: lodash "^4.17.14" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= atob@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.5.1: version "9.7.2" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.2.tgz#26cf729fbb709323b40171a874304884dcceffed" integrity sha512-LCAfcdej1182uVvPOZnytbq61AhnOZ/4JelDaJGDeNwewyU1AMaNthcHsyz1NRjTmd2FkurMckLWfkHg3Z//KA== dependencies: browserslist "^4.7.3" caniuse-lite "^1.0.30001010" chalk "^2.4.2" normalize-range "^0.1.2" num2fraction "^1.2.2" postcss "^7.0.23" postcss-value-parser "^4.0.2" aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" js-tokens "^3.0.2" babel-generator@^6.18.0: version "6.26.1" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.17.4" source-map "^0.5.7" trim-right "^1.0.1" babel-loader@^8.0.5: version "8.0.6" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== dependencies: find-cache-dir "^2.0.0" loader-utils "^1.0.2" mkdirp "^0.5.1" pify "^4.0.1" babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-plugin-dynamic-import-node@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: object.assign "^4.1.0" babel-runtime@^6.0.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" regenerator-runtime "^0.11.0" babel-template@^6.16.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: babel-runtime "^6.26.0" babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" lodash "^4.17.4" babel-traverse@^6.18.0, babel-traverse@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: babel-code-frame "^6.26.0" babel-messages "^6.23.0" babel-runtime "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" debug "^2.6.8" globals "^9.18.0" invariant "^2.2.2" lodash "^4.17.4" babel-types@^6.18.0, babel-types@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: babel-runtime "^6.26.0" esutils "^2.0.2" lodash "^4.17.4" to-fast-properties "^1.0.3" babel-walk@3.0.0-canary-5: version "3.0.0-canary-5" resolved "https://registry.yarnpkg.com/babel-walk/-/babel-walk-3.0.0-canary-5.tgz#f66ecd7298357aee44955f235a6ef54219104b11" integrity sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw== dependencies: "@babel/types" "^7.9.6" babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" integrity sha1-c5JncZI7Whl0etZmqlzUv5xunOg= base64-js@^1.0.2: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== base64id@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY= base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" class-utils "^0.3.5" component-emitter "^1.2.1" define-property "^1.0.0" isobject "^3.0.1" mixin-deep "^1.2.0" pascalcase "^0.1.1" batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" better-assert@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" integrity sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI= dependencies: callsite "1.0.0" big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^1.0.0: version "1.13.1" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary-extensions@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c" integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow== blob@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683" integrity sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig== bluebird@^3.3.0, bluebird@^3.5.5: version "3.7.1" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de" integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== body-parser@1.19.0, body-parser@^1.16.1: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== dependencies: bytes "3.1.0" content-type "~1.0.4" debug "2.6.9" depd "~1.1.2" http-errors "1.7.2" iconv-lite "0.4.24" on-finished "~2.3.0" qs "6.7.0" raw-body "2.4.0" type-is "~1.6.17" bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= dependencies: array-flatten "^2.1.0" deep-equal "^1.0.1" dns-equal "^1.0.0" dns-txt "^2.0.2" multicast-dns "^6.0.1" multicast-dns-service-types "^1.1.0" boolbase@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" split-string "^3.0.2" to-regex "^3.0.1" braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" brorand@^1.0.1, brorand@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.3" inherits "^2.0.1" safe-buffer "^5.0.1" browserify-cipher@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" des.js "^1.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" browserify-rsa@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= dependencies: bn.js "^4.1.0" randombytes "^2.0.1" browserify-sign@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= dependencies: bn.js "^4.1.1" browserify-rsa "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.2" elliptic "^6.0.0" inherits "^2.0.1" parse-asn1 "^5.0.0" browserify-zlib@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" browserslist@^4.6.0, browserslist@^4.7.3: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== dependencies: caniuse-lite "^1.0.30001219" colorette "^1.2.2" electron-to-chromium "^1.3.723" escalade "^3.1.1" node-releases "^1.1.71" buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== buffer-alloc@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== dependencies: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" buffer-fill@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== buffer-indexof@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= buffer@^4.3.0: version "4.9.2" resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" isarray "^1.0.0" "buildbot-build-common@link:../build_common": version "1.0.0" dependencies: "@babel/core" "^7.4.3" "@babel/plugin-syntax-dynamic-import" "^7.2.0" "@babel/plugin-transform-runtime" "^7.4.3" "@babel/preset-env" "^7.4.3" "@babel/runtime" "^7.4.3" autoprefixer "^9.5.1" babel-loader "^8.0.5" css-loader "^2.1.1" file-loader "^3.0.1" html-webpack-plugin "^3.2.0" import-glob-loader "^1.1.0" istanbul-instrumenter-loader "^3.0.1" jasmine-core "^3.4.0" karma "^4.1.0" karma-chrome-launcher "^2.2.0" karma-coverage "^1.1.2" karma-jasmine "^2.0.1" karma-sourcemap-loader "^0.3.7" karma-spec-reporter "^0.0.32" karma-webpack "^3.0.5" less "^3.9.0" less-loader "^5.0.0" mini-css-extract-plugin "^0.6.0" node-libs-browser "^2.2.0" null-loader "^1.0.0" postcss-loader "^3.0.0" pug "^3.0.1" raw-loader "^2.0.0" style-loader "^0.23.1" webpack "^4.30.0" webpack-cli "^3.3.1" webpack-dev-server "^3.3.1" webpack-fix-style-only-entries "^0.2.1" webpack-shell-plugin "^0.5.0" builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= bytes@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== cacache@^12.0.2: version "12.0.3" resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== dependencies: bluebird "^3.5.5" chownr "^1.1.1" figgy-pudding "^3.5.1" glob "^7.1.4" graceful-fs "^4.1.15" infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" ssri "^6.0.1" unique-filename "^1.1.1" y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" component-emitter "^1.2.1" get-value "^2.0.6" has-value "^1.0.0" isobject "^3.0.1" set-value "^2.0.0" to-object-path "^0.3.0" union-value "^1.0.0" unset-value "^1.0.0" caller-callsite@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= dependencies: callsites "^2.0.0" caller-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= dependencies: caller-callsite "^2.0.0" callsite@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" integrity sha1-KAOY5dZkvXQDi28JBRU+borxvCA= callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camel-case@3.0.x: version "3.0.0" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= dependencies: no-case "^2.2.0" upper-case "^1.1.1" camelcase-keys@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= dependencies: camelcase "^2.0.0" map-obj "^1.0.0" camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= camelcase@^5.0.0, camelcase@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== caniuse-lite@^1.0.30001010, caniuse-lite@^1.0.30001219: version "1.0.30001228" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz#bfdc5942cd3326fa51ee0b42fbef4da9d492a7fa" integrity sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" has-ansi "^2.0.0" strip-ansi "^3.0.0" supports-color "^2.0.0" character-parser@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" integrity sha1-x84o821LzZdE5f/CxfzeHHMmH8A= dependencies: is-regex "^1.0.3" chokidar@^2.0.2, chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== dependencies: anymatch "^2.0.0" async-each "^1.0.1" braces "^2.3.2" glob-parent "^3.1.0" inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" normalize-path "^3.0.0" path-is-absolute "^1.0.0" readdirp "^2.2.1" upath "^1.1.1" optionalDependencies: fsevents "^1.2.7" chokidar@^3.0.0: version "3.3.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6" integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A== dependencies: anymatch "~3.1.1" braces "~3.0.2" glob-parent "~5.1.0" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" readdirp "~3.2.0" optionalDependencies: fsevents "~2.1.1" chownr@^1.1.1, chownr@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== chrome-trace-event@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== dependencies: tslib "^1.9.0" cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" define-property "^0.2.5" isobject "^3.0.0" static-extend "^0.1.1" clean-css@4.2.x: version "4.2.1" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== dependencies: source-map "~0.6.0" cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi "^2.0.0" cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== dependencies: string-width "^3.1.0" strip-ansi "^5.2.0" wrap-ansi "^5.1.0" clone@^2.1.1, clone@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colorette@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== colors@^1.1.0, colors@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@2.17.x: version "2.17.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== commander@^2.20.0: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@~2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-bind@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E= component-emitter@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== component-inherit@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" integrity sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM= compressible@~2.0.16: version "2.0.17" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== dependencies: mime-db ">= 1.40.0 < 2" compression@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" compressible "~2.0.16" debug "2.6.9" on-headers "~1.0.2" safe-buffer "5.1.2" vary "~1.1.2" concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" inherits "^2.0.3" readable-stream "^2.2.2" typedarray "^0.0.6" connect-history-api-fallback@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== dependencies: debug "2.6.9" finalhandler "1.1.2" parseurl "~1.3.3" utils-merge "1.0.1" console-browserify@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= constantinople@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-4.0.1.tgz#0def113fa0e4dc8de83331a5cf79c8b325213151" integrity sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw== dependencies: "@babel/parser" "^7.6.0" "@babel/types" "^7.6.1" constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= content-disposition@0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== dependencies: safe-buffer "5.1.2" content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= cookie@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== dependencies: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js-compat@^3.1.1: version "3.4.2" resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.4.2.tgz#652fa7c54652b7f6586a893e37001df55ea2ac37" integrity sha512-W0Aj+LM3EAxxjD0Kp2o4be8UlnxIZHNupBv2znqrheR4aY2nOn91794k/xoSp+SxqqriiZpTsSwBtZr60cbkwQ== dependencies: browserslist "^4.7.3" semver "^6.3.0" core-js@^2.4.0: version "2.6.10" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f" integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== dependencies: import-fresh "^2.0.0" is-directory "^0.3.1" js-yaml "^3.13.1" parse-json "^4.0.0" create-ecdh@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" inherits "^2.0.1" md5.js "^1.3.4" ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" ripemd160 "^2.0.0" safe-buffer "^5.0.1" sha.js "^2.4.8" cross-spawn@6.0.5, cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" semver "^5.5.0" shebang-command "^1.2.0" which "^1.2.9" crypto-browserify@^3.11.0: version "3.12.0" resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" create-ecdh "^4.0.0" create-hash "^1.1.0" create-hmac "^1.1.0" diffie-hellman "^5.0.0" inherits "^2.0.1" pbkdf2 "^3.0.3" public-encrypt "^4.0.0" randombytes "^2.0.0" randomfill "^1.0.3" css-loader@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" integrity sha512-OcKJU/lt232vl1P9EEDamhoO9iKY3tIjY5GU+XDLblAykTdgs6Ux9P1hTHve8nFKy5KPpOXOsVI/hIwi3841+w== dependencies: camelcase "^5.2.0" icss-utils "^4.1.0" loader-utils "^1.2.3" normalize-path "^3.0.0" postcss "^7.0.14" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^2.0.6" postcss-modules-scope "^2.1.0" postcss-modules-values "^2.0.0" postcss-value-parser "^3.3.0" schema-utils "^1.0.0" css-select@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= dependencies: boolbase "~1.0.0" css-what "2.1" domutils "1.5.1" nth-check "~1.0.1" css-what@2.1: version "2.1.3" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= dependencies: array-find-index "^1.0.1" custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha1-XQKkaFCt8bSjF5RqOSj8y1v9BCU= cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" type "^1.0.1" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" date-format@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-2.1.0.tgz#31d5b5ea211cf5fd764cd38baf9d033df7e125cf" integrity sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA== dateformat@^1.0.6: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" integrity sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk= dependencies: get-stdin "^4.0.1" meow "^3.3.0" debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^3.1.1, debug@^3.2.5, debug@^3.2.6: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= decode-uri-component@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= deep-equal@^1.0.1: version "1.1.1" resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== dependencies: is-arguments "^1.0.4" is-date-object "^1.0.1" is-regex "^1.0.4" object-is "^1.0.1" object-keys "^1.1.1" regexp.prototype.flags "^1.2.0" deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-gateway@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== dependencies: execa "^1.0.0" ip-regex "^2.1.0" define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== dependencies: object-keys "^1.0.12" define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" isobject "^3.0.1" del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: "@types/glob" "^7.1.1" globby "^6.1.0" is-path-cwd "^2.0.0" is-path-in-cwd "^2.0.0" p-map "^2.0.0" pify "^4.0.1" rimraf "^2.6.3" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= des.js@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== dependencies: inherits "^2.0.1" minimalistic-assert "^1.0.0" destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= detect-node@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" randombytes "^2.0.0" dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= dns-packet@^1.3.1: version "1.3.4" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.4.tgz#e3455065824a2507ba886c55a89963bb107dec6f" integrity sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA== dependencies: ip "^1.1.0" safe-buffer "^5.0.1" dns-txt@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= dependencies: buffer-indexof "^1.0.0" doctypes@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" integrity sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk= dom-converter@^0.2: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-serialize@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" integrity sha1-ViromZ9Evl6jB29UGdzVnrQ6yVs= dependencies: custom-event "~1.0.0" ent "~2.2.0" extend "^3.0.0" void-elements "^2.0.0" dom-serializer@0: version "0.2.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== dependencies: domelementtype "^2.0.1" entities "^2.0.0" domain-browser@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== domelementtype@1, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== domhandler@^2.3.0: version "2.4.2" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== dependencies: domelementtype "1" domutils@1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= dependencies: dom-serializer "0" domelementtype "1" domutils@^1.5.1: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" inherits "^2.0.1" readable-stream "^2.0.0" stream-shift "^1.0.0" ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.3.723: version "1.3.738" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.738.tgz#aec24b091c82acbfabbdcce08076a703941d17ca" integrity sha512-vCMf4gDOpEylPSLPLSwAEsz+R3ShP02Y3cAKMZvTqule3XcPp7tgc/0ESI7IS6ZeyBlGClE50N53fIOkcIVnpw== elliptic@^6.0.0: version "6.5.4" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== dependencies: bn.js "^4.11.9" brorand "^1.1.0" hash.js "^1.0.0" hmac-drbg "^1.0.1" inherits "^2.0.4" minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== emojis-list@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" engine.io-client@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.2.1.tgz#6f54c0475de487158a1a7c77d10178708b6add36" integrity sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw== dependencies: component-emitter "1.2.1" component-inherit "0.0.3" debug "~3.1.0" engine.io-parser "~2.1.1" has-cors "1.1.0" indexof "0.0.1" parseqs "0.0.5" parseuri "0.0.5" ws "~3.3.1" xmlhttprequest-ssl "~1.5.4" yeast "0.1.2" engine.io-parser@~2.1.0, engine.io-parser@~2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6" integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA== dependencies: after "0.8.2" arraybuffer.slice "~0.0.7" base64-arraybuffer "0.1.5" blob "0.0.5" has-binary2 "~1.0.2" engine.io@~3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.2.1.tgz#b60281c35484a70ee0351ea0ebff83ec8c9522a2" integrity sha512-+VlKzHzMhaU+GsCIg4AoXF1UdDFjHHwMmMKqMJNDNLlUlejz58FCy4LBqB2YVJskHGYl06BatYWKP2TVdVXE5w== dependencies: accepts "~1.3.4" base64id "1.0.0" cookie "0.3.1" debug "~3.1.0" engine.io-parser "~2.1.0" ws "~3.3.1" enhanced-resolve@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" tapable "^1.0.0" enhanced-resolve@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== dependencies: graceful-fs "^4.1.2" memory-fs "^0.5.0" tapable "^1.0.0" ent@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= entities@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-abstract@^1.5.1: version "1.16.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d" integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg== dependencies: es-to-primitive "^1.2.0" function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.0" is-callable "^1.1.4" is-regex "^1.0.4" object-inspect "^1.6.0" object-keys "^1.1.1" string.prototype.trimleft "^2.1.0" string.prototype.trimright "^2.1.0" es-to-primitive@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: version "0.10.53" resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== dependencies: es6-iterator "~2.0.3" es6-symbol "~3.1.3" next-tick "~1.0.0" es6-iterator@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" ext "^1.1.2" escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" integrity sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg= dependencies: esprima "^2.7.1" estraverse "^1.9.1" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: source-map "~0.2.0" eslint-scope@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== dependencies: esrecurse "^4.1.0" estraverse "^4.1.1" esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esrecurse@^4.1.0: version "4.2.1" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== dependencies: estraverse "^4.1.0" estraverse@^1.9.1: version "1.9.3" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= estraverse@^4.1.0, estraverse@^4.1.1: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== eventsource@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" safe-buffer "^5.1.1" execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== dependencies: cross-spawn "^6.0.0" get-stream "^4.0.0" is-stream "^1.1.0" npm-run-path "^2.0.0" p-finally "^1.0.0" signal-exit "^3.0.0" strip-eof "^1.0.0" expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= dependencies: debug "^2.3.3" define-property "^0.2.5" extend-shallow "^2.0.1" posix-character-classes "^0.1.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= dependencies: homedir-polyfill "^1.0.1" express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== dependencies: accepts "~1.3.7" array-flatten "1.1.1" body-parser "1.19.0" content-disposition "0.5.3" content-type "~1.0.4" cookie "0.4.0" cookie-signature "1.0.6" debug "2.6.9" depd "~1.1.2" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" finalhandler "~1.1.2" fresh "0.5.2" merge-descriptors "1.0.1" methods "~1.1.2" on-finished "~2.3.0" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.5" qs "6.7.0" range-parser "~1.2.1" safe-buffer "5.1.2" send "0.17.1" serve-static "1.14.1" setprototypeof "1.1.1" statuses "~1.5.0" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" ext@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.2.0.tgz#8dd8d2dd21bcced3045be09621fa0cbf73908ba4" integrity sha512-0ccUQK/9e3NreLFg6K6np8aPyRgwycx+oFGtfx1dSp7Wj00Ozw9r05FgBRlzjf2XBM7LAzwgLyDscRrtSU91hA== dependencies: type "^2.0.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= dependencies: assign-symbols "^1.0.0" is-extendable "^1.0.1" extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" define-property "^1.0.0" expand-brackets "^2.1.4" extend-shallow "^2.0.1" fragment-cache "^0.2.1" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-deep-equal@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= faye-websocket@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= dependencies: websocket-driver ">=0.5.1" faye-websocket@~0.11.1: version "0.11.3" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e" integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== dependencies: websocket-driver ">=0.5.1" figgy-pudding@^3.5.1: version "3.5.2" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== file-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== dependencies: loader-utils "^1.0.2" schema-utils "^1.0.0" fill-range@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= dependencies: extend-shallow "^2.0.1" is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range "^2.1.0" fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.1.2, finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" on-finished "~2.3.0" parseurl "~1.3.3" statuses "~1.5.0" unpipe "~1.0.0" find-cache-dir@^2.0.0, find-cache-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" make-dir "^2.0.0" pkg-dir "^3.0.0" find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" findup-sync@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== dependencies: detect-file "^1.0.0" is-glob "^4.0.0" micromatch "^3.0.4" resolve-dir "^1.0.1" flatted@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08" integrity sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg== flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" readable-stream "^2.3.6" follow-redirects@^1.0.0: version "1.13.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.0.tgz#b42e8d93a2a7eea5ed88633676d6597bc8e384db" integrity sha512-aq6gF1BEKje4a9i9+5jimNFIpq4Q1WiwBToeRK5NvZBd/TRsmW8BsJfOEGkr76TbOyPVD3OVDN910EcUNtRYEA== for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= dependencies: map-cache "^0.2.2" fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= from2@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= dependencies: inherits "^2.0.1" readable-stream "^2.0.0" fs-access@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" integrity sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o= dependencies: null-check "^1.0.0" fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" universalify "^0.1.0" fs-minipass@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== dependencies: minipass "^2.6.0" fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= dependencies: graceful-fs "^4.1.2" iferr "^0.1.5" imurmurhash "^0.1.4" readable-stream "1 || 2" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: version "1.2.9" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: nan "^2.12.1" node-pre-gyp "^0.12.0" fsevents@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" has-unicode "^2.0.0" object-assign "^4.1.0" signal-exit "^3.0.0" string-width "^1.0.1" strip-ansi "^3.0.1" wide-align "^1.1.0" get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-caller-file@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= get-stream@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" glob-parent@~5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.0.tgz#5f4c1d1e748d30cd73ad2944b3577a81b081e8c2" integrity sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw== dependencies: is-glob "^4.0.1" glob@^5.0.13, glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" integrity sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E= dependencies: inflight "^1.0.4" inherits "2" minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" glob@^7.0.3, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" global-modules@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== dependencies: global-prefix "^3.0.0" global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" is-windows "^1.0.1" resolve-dir "^1.0.0" global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= dependencies: expand-tilde "^2.0.2" homedir-polyfill "^1.0.1" ini "^1.3.4" is-windows "^1.0.1" which "^1.2.14" global-prefix@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== dependencies: ini "^1.3.5" kind-of "^6.0.2" which "^1.3.1" globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" object-assign "^4.0.1" pify "^2.0.0" pinkie-promise "^2.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== handle-thing@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== handlebars@^4.0.1: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== dependencies: minimist "^1.2.5" neo-async "^2.6.0" source-map "^0.6.1" wordwrap "^1.0.0" optionalDependencies: uglify-js "^3.1.4" har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.0: version "5.1.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: ajv "^6.5.5" har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-binary2@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/has-binary2/-/has-binary2-1.0.3.tgz#7776ac627f3ea77250cfc332dab7ddf5e4f5d11d" integrity sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw== dependencies: isarray "2.0.1" has-cors@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk= has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-symbols@^1.0.0, has-symbols@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= has-value@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= dependencies: get-value "^2.0.3" has-values "^0.1.4" isobject "^2.0.0" has-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: get-value "^2.0.6" has-values "^1.0.0" isobject "^3.0.0" has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= has-values@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= dependencies: is-number "^3.0.0" kind-of "^4.0.0" has@^1.0.1, has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" minimalistic-assert "^1.0.1" he@1.2.x: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" homedir-polyfill@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= dependencies: inherits "^2.0.1" obuf "^1.0.0" readable-stream "^2.0.1" wbuf "^1.1.0" html-entities@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= html-minifier@^3.2.3: version "3.5.21" resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== dependencies: camel-case "3.0.x" clean-css "4.2.x" commander "2.17.x" he "1.2.x" param-case "2.1.x" relateurl "0.2.x" uglify-js "3.4.x" html-webpack-plugin@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= dependencies: html-minifier "^3.2.3" loader-utils "^0.2.16" lodash "^4.17.3" pretty-error "^2.0.2" tapable "^1.0.0" toposort "^1.0.0" util.promisify "1.0.0" htmlparser2@^3.3.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== dependencies: domelementtype "^1.3.1" domhandler "^2.3.0" domutils "^1.5.1" entities "^1.1.1" inherits "^2.0.1" readable-stream "^3.1.1" http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= http-errors@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" setprototypeof "1.1.0" statuses ">= 1.4.0 < 2" http-errors@~1.7.2: version "1.7.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== dependencies: depd "~1.1.2" inherits "2.0.4" setprototypeof "1.1.1" statuses ">= 1.5.0 < 2" toidentifier "1.0.0" "http-parser-js@>=0.4.0 <0.4.11": version "0.4.10" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= http-proxy-middleware@0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== dependencies: http-proxy "^1.17.0" is-glob "^4.0.0" lodash "^4.17.11" micromatch "^3.1.10" http-proxy@^1.13.0, http-proxy@^1.17.0: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" requires-port "^1.0.0" http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= iconv-lite@0.4.24, iconv-lite@^0.4.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" icss-replace-symbols@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= icss-utils@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== dependencies: postcss "^7.0.14" ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== iferr@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= ignore-walk@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== dependencies: minimatch "^3.0.4" image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= import-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= dependencies: import-from "^2.1.0" import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= dependencies: caller-path "^2.0.0" resolve-from "^3.0.0" import-from@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" integrity sha1-M1238qev/VOqpHHUuAId7ja387E= dependencies: resolve-from "^3.0.0" import-glob-loader@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/import-glob-loader/-/import-glob-loader-1.1.0.tgz#98d84c0f661c8ba9f821d9ddb7c6b6dc8e97eca2" integrity sha1-mNhMD2Yci6n4Idndt8a23I6X7KI= dependencies: glob "^5.0.13" loader-utils "^0.2.10" import-local@2.0.0, import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== dependencies: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= dependencies: repeating "^2.0.0" indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= infer-owner@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.7" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== internal-ip@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== dependencies: default-gateway "^4.2.0" ipaddr.js "^1.9.0" interpret@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= ipaddr.js@1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== ipaddr.js@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== is-absolute-url@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arguments@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.0.4.tgz#3faf966c7cba0ff437fb31f6250082fcf0448cf3" integrity sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA== is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-callable@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== is-core-module@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== dependencies: has "^1.0.3" is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" is-data-descriptor "^1.0.0" kind-of "^6.0.2" is-directory@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-expression@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-4.0.0.tgz#c33155962abf21d0afd2552514d67d2ec16fd2ab" integrity sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A== dependencies: acorn "^7.1.1" object-assign "^4.1.1" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= is-extendable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== is-path-in-cwd@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: is-path-inside "^2.1.0" is-path-inside@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== dependencies: path-is-inside "^1.0.2" is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= is-regex@^1.0.3, is-regex@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= dependencies: has "^1.0.1" is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: has-symbols "^1.0.1" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== is-wsl@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isarray@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.1.tgz#a37d94ed9cda2d59865c9f76fe596ee1f338741e" integrity sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4= isbinaryfile@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== dependencies: buffer-alloc "^1.2.0" isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-instrumenter-loader@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-3.0.1.tgz#9957bd59252b373fae5c52b7b5188e6fde2a0949" integrity sha512-a5SPObZgS0jB/ixaKSMdn6n/gXSrK2S6q/UfRJBT3e6gQmVjwZROTODQsYW5ZNwOu78hG62Y3fWlebaVOL0C+w== dependencies: convert-source-map "^1.5.0" istanbul-lib-instrument "^1.7.3" loader-utils "^1.1.0" schema-utils "^0.3.0" istanbul-lib-coverage@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== istanbul-lib-instrument@^1.7.3: version "1.10.2" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" istanbul-lib-coverage "^1.2.1" semver "^5.3.0" istanbul@^0.4.0: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" integrity sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs= dependencies: abbrev "1.0.x" async "1.x" escodegen "1.8.x" esprima "2.7.x" glob "^5.0.15" handlebars "^4.0.1" js-yaml "3.x" mkdirp "0.5.x" nopt "3.x" once "1.x" resolve "1.1.x" supports-color "^3.1.0" which "^1.1.1" wordwrap "^1.0.0" jasmine-core@^3.3, jasmine-core@^3.4.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-3.5.0.tgz#132c23e645af96d85c8bca13c8758b18429fc1e4" integrity sha512-nCeAiw37MIMA9w9IXso7bRaLl+c/ef3wnxsoSAlYrzS+Ot0zTG6nU8G/cIfGkqpkjX2wNaIW9RFG0TwIFnG6bA== js-levenshtein@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-stringify@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" integrity sha1-Fzb939lyTyijaCrcYjCufk6Weds= "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@3.x, js-yaml@^3.13.1: version "3.13.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= json3@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== json5@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== dependencies: minimist "^1.2.0" json5@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== dependencies: minimist "^1.2.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= optionalDependencies: graceful-fs "^4.1.6" jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" json-schema "0.2.3" verror "1.10.0" jstransformer@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" integrity sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM= dependencies: is-promise "^2.0.0" promise "^7.0.1" karma-chrome-launcher@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" integrity sha512-uf/ZVpAabDBPvdPdveyk1EPgbnloPvFFGgmRhYLTDH7gEB4nZdSBk8yTU47w1g/drLSx5uMOkjKk7IWKfWg/+w== dependencies: fs-access "^1.0.0" which "^1.2.1" karma-coverage@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/karma-coverage/-/karma-coverage-1.1.2.tgz#cc09dceb589a83101aca5fe70c287645ef387689" integrity sha512-eQawj4Cl3z/CjxslYy9ariU4uDh7cCNFZHNWXWRpl0pNeblY/4wHR7M7boTYXWrn9bY0z2pZmr11eKje/S/hIw== dependencies: dateformat "^1.0.6" istanbul "^0.4.0" lodash "^4.17.0" minimatch "^3.0.0" source-map "^0.5.1" karma-jasmine@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-2.0.1.tgz#26e3e31f2faf272dd80ebb0e1898914cc3a19763" integrity sha512-iuC0hmr9b+SNn1DaUD2QEYtUxkS1J+bSJSn7ejdEexs7P8EYvA1CWkEdrDQ+8jVH3AgWlCNwjYsT1chjcNW9lA== dependencies: jasmine-core "^3.3" karma-sourcemap-loader@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8" integrity sha1-kTIsd/jxPUb+0GKwQuEAnUxFBdg= dependencies: graceful-fs "^4.1.2" karma-spec-reporter@^0.0.32: version "0.0.32" resolved "https://registry.yarnpkg.com/karma-spec-reporter/-/karma-spec-reporter-0.0.32.tgz#2e9c7207ea726771260259f82becb543209e440a" integrity sha1-LpxyB+pyZ3EmAln4K+y1QyCeRAo= dependencies: colors "^1.1.2" karma-webpack@^3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-3.0.5.tgz#1ff1e3a690fb73ae95ee95f9ab58f341cfc7b40f" integrity sha512-nRudGJWstvVuA6Tbju9tyGUfXTtI1UXMXoRHVmM2/78D0q6s/Ye2IC157PKNDC15PWFGR0mVIRtWLAdcfsRJoA== dependencies: async "^2.0.0" babel-runtime "^6.0.0" loader-utils "^1.0.0" lodash "^4.0.0" source-map "^0.5.6" webpack-dev-middleware "^2.0.6" karma@^4.1.0: version "4.4.1" resolved "https://registry.yarnpkg.com/karma/-/karma-4.4.1.tgz#6d9aaab037a31136dc074002620ee11e8c2e32ab" integrity sha512-L5SIaXEYqzrh6b1wqYC42tNsFMx2PWuxky84pK9coK09MvmL7mxii3G3bZBh/0rvD27lqDd0le9jyhzvwif73A== dependencies: bluebird "^3.3.0" body-parser "^1.16.1" braces "^3.0.2" chokidar "^3.0.0" colors "^1.1.0" connect "^3.6.0" di "^0.0.1" dom-serialize "^2.2.0" flatted "^2.0.0" glob "^7.1.1" graceful-fs "^4.1.2" http-proxy "^1.13.0" isbinaryfile "^3.0.0" lodash "^4.17.14" log4js "^4.0.0" mime "^2.3.1" minimatch "^3.0.2" optimist "^0.6.1" qjobs "^1.1.4" range-parser "^1.2.0" rimraf "^2.6.0" safe-buffer "^5.0.1" socket.io "2.1.1" source-map "^0.6.1" tmp "0.0.33" useragent "2.3.0" killable@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" less-loader@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-5.0.0.tgz#498dde3a6c6c4f887458ee9ed3f086a12ad1b466" integrity sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg== dependencies: clone "^2.1.1" loader-utils "^1.1.0" pify "^4.0.1" less@^3.9.0: version "3.10.3" resolved "https://registry.yarnpkg.com/less/-/less-3.10.3.tgz#417a0975d5eeecc52cff4bcfa3c09d35781e6792" integrity sha512-vz32vqfgmoxF1h3K4J+yKCtajH0PWmjkIFgbs5d78E/c/e+UQTnI+lWK+1eQRE95PXM2mC3rJlLSSP9VQHnaow== dependencies: clone "^2.1.2" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" mime "^1.4.1" mkdirp "^0.5.0" promise "^7.1.1" request "^2.83.0" source-map "~0.6.0" levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" pify "^2.0.0" pinkie-promise "^2.0.0" strip-bom "^2.0.0" loader-runner@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== loader-utils@1.2.3, loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== dependencies: big.js "^5.2.2" emojis-list "^2.0.0" json5 "^1.0.1" loader-utils@^0.2.10, loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= dependencies: big.js "^3.1.3" emojis-list "^2.0.0" json5 "^0.5.0" object-assign "^4.0.1" locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" lodash@^4.0.0, lodash@^4.17.0, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.3, lodash@^4.17.4: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: chalk "^2.0.1" log4js@^4.0.0: version "4.5.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-4.5.1.tgz#e543625e97d9e6f3e6e7c9fc196dd6ab2cae30b5" integrity sha512-EEEgFcE9bLgaYUKuozyFfytQM2wDHtXn4tAN41pkaxpNjAykv11GVdeI4tHtmPWW4Xrgh9R/2d7XYghDVjbKKw== dependencies: date-format "^2.0.0" debug "^4.1.1" flatted "^2.0.0" rfdc "^1.1.4" streamroller "^1.0.6" loglevel@^1.6.4: version "1.6.6" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.6.tgz#0ee6300cc058db6b3551fa1c4bf73b83bb771312" integrity sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ== loglevelnext@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/loglevelnext/-/loglevelnext-1.0.5.tgz#36fc4f5996d6640f539ff203ba819641680d75a2" integrity sha512-V/73qkPuJmx4BcBF19xPBr+0ZRVBhc4POxvZTZdMeXpJ4NItXSJ/MSwuFT0kQJlCbXvdlZoQQ/418bS1y9Jh6A== dependencies: es6-symbol "^3.1.1" object.assign "^4.1.0" loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loud-rejection@^1.0.0, loud-rejection@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= dependencies: currently-unhandled "^0.4.1" signal-exit "^3.0.0" lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= lru-cache@4.1.x: version "4.1.5" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" semver "^5.6.0" mamacro@^0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== dependencies: p-defer "^1.0.0" map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" inherits "^2.0.1" safe-buffer "^5.1.2" media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= mem@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^2.0.0" p-is-promise "^2.0.0" memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= dependencies: errno "^0.1.3" readable-stream "^2.0.1" memory-fs@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== dependencies: errno "^0.1.3" readable-stream "^2.0.1" meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= dependencies: camelcase-keys "^2.0.0" decamelize "^1.1.2" loud-rejection "^1.0.0" map-obj "^1.0.1" minimist "^1.1.3" normalize-package-data "^2.3.4" object-assign "^4.0.1" read-pkg-up "^1.0.1" redent "^1.0.0" trim-newlines "^1.0.0" merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" braces "^2.3.1" define-property "^2.0.2" extend-shallow "^3.0.2" extglob "^2.0.4" fragment-cache "^0.2.1" kind-of "^6.0.2" nanomatch "^1.2.9" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.2" miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" brorand "^1.0.1" mime-db@1.42.0, "mime-db@>= 1.40.0 < 2": version "1.42.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.42.0.tgz#3e252907b4c7adb906597b4b65636272cf9e7bac" integrity sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: version "2.1.25" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.25.tgz#39772d46621f93e2a80a856c53b86a62156a6437" integrity sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg== dependencies: mime-db "1.42.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.1.0, mime@^2.3.1, mime@^2.4.4: version "2.4.4" resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== mimic-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== mini-css-extract-plugin@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== dependencies: loader-utils "^1.1.0" normalize-url "^2.0.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= "minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= minipass@^2.6.0, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" minizlib@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== dependencies: minipass "^2.9.0" mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== dependencies: concat-stream "^1.5.0" duplexify "^3.4.2" end-of-stream "^1.1.0" flush-write-stream "^1.0.0" from2 "^2.1.0" parallel-transform "^1.1.0" pump "^3.0.0" pumpify "^1.3.3" stream-each "^1.1.0" through2 "^2.0.0" mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= dependencies: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== ms@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== multicast-dns-service-types@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= multicast-dns@^6.0.1: version "6.2.3" resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== dependencies: dns-packet "^1.3.1" thunky "^1.0.2" nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" array-unique "^0.3.2" define-property "^2.0.2" extend-shallow "^3.0.2" fragment-cache "^0.2.1" is-windows "^1.0.2" kind-of "^6.0.2" object.pick "^1.3.0" regex-not "^1.0.0" snapdragon "^0.8.1" to-regex "^3.0.1" needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== next-tick@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== dependencies: lower-case "^1.1.1" node-forge@0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579" integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ== node-libs-browser@^2.2.0, node-libs-browser@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== dependencies: assert "^1.1.1" browserify-zlib "^0.2.0" buffer "^4.3.0" console-browserify "^1.1.0" constants-browserify "^1.0.0" crypto-browserify "^3.11.0" domain-browser "^1.1.1" events "^3.0.0" https-browserify "^1.0.0" os-browserify "^0.3.0" path-browserify "0.0.1" process "^0.11.10" punycode "^1.2.4" querystring-es3 "^0.2.0" readable-stream "^2.3.3" stream-browserify "^2.0.1" stream-http "^2.7.2" string_decoder "^1.0.0" timers-browserify "^2.0.4" tty-browserify "0.0.0" url "^0.11.0" util "^0.11.0" vm-browserify "^1.0.1" node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" needle "^2.2.1" nopt "^4.0.1" npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" tar "^4" node-releases@^1.1.71: version "1.1.72" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== nopt@3.x: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" integrity sha1-xkZdvwirzU2zWTF/eaxopkayj/k= dependencies: abbrev "1" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= normalize-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== dependencies: prepend-http "^2.0.0" query-string "^5.0.1" sort-keys "^2.0.0" npm-bundled@^1.0.1: version "1.0.6" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: version "1.4.6" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4" integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" gauge "~2.7.3" set-blocking "~2.0.0" nth-check@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== dependencies: boolbase "~1.0.0" null-check@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" integrity sha1-l33/1xdgErnsMNKjnbXPcqBDnt0= null-loader@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-1.0.0.tgz#90e85798e50e9dd1d568495a44e74829dec26744" integrity sha512-mYLDjDVTkjTlFoidxRhzO75rdcwfVXfw5G5zpj8sXnBkHtKJxMk4hTcRR4i5SOhDB6EvcQuYriy6IV23eq6uog== dependencies: loader-utils "^1.2.3" schema-utils "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" integrity sha1-8MaapQ78lbhmwYb0AKM3acsvEpE= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" object-inspect@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-is@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.0.1.tgz#0aa60ec9989a0b3ed795cf4d06f62cf1ad6539b6" integrity sha1-CqYOyZiaCz7Xlc9NBvYs8a1lObY= object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== dependencies: define-properties "^1.1.2" function-bind "^1.1.1" has-symbols "^1.0.0" object-keys "^1.0.11" object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= dependencies: define-properties "^1.1.2" es-abstract "^1.5.1" object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= dependencies: isobject "^3.0.1" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@1.x, once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== dependencies: is-wsl "^1.1.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.6" levn "~0.3.0" prelude-ls "~1.1.2" type-check "~0.3.2" word-wrap "~1.2.3" original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== dependencies: url-parse "^1.4.3" os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^3.0.0, os-locale@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== dependencies: execa "^1.0.0" lcid "^2.0.0" mem "^4.0.0" os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-is-promise@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== p-limit@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== dependencies: p-try "^2.0.0" p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== dependencies: retry "^0.12.0" p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~1.0.5: version "1.0.10" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== parallel-transform@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== dependencies: cyclist "^1.0.1" inherits "^2.0.3" readable-stream "^2.1.5" param-case@2.1.x: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= dependencies: no-case "^2.2.0" parse-asn1@^5.0.0: version "5.1.5" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= parseqs@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" integrity sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0= dependencies: better-assert "~1.0.0" parseuri@0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" integrity sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo= dependencies: better-assert "~1.0.0" parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascalcase@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= path-key@^2.0.0, path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= path-parse@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" pinkie-promise "^2.0.0" pbkdf2@^3.0.3: version "3.0.17" resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" ripemd160 "^2.0.1" safe-buffer "^5.0.1" sha.js "^2.4.8" performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= picomatch@^2.0.4: version "2.1.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.1.1.tgz#ecdfbea7704adb5fe6fb47f9866c4c0e15e905c5" integrity sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA== pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= pkg-dir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" portfinder@^1.0.25: version "1.0.25" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca" integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg== dependencies: async "^2.6.2" debug "^3.1.1" mkdirp "^0.5.1" posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= postcss-load-config@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.0.tgz#c84d692b7bb7b41ddced94ee62e8ab31b417b003" integrity sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q== dependencies: cosmiconfig "^5.0.0" import-cwd "^2.0.0" postcss-loader@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== dependencies: loader-utils "^1.1.0" postcss "^7.0.0" postcss-load-config "^2.0.0" schema-utils "^1.0.0" postcss-modules-extract-imports@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== dependencies: postcss "^7.0.5" postcss-modules-local-by-default@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.6.tgz#dd9953f6dd476b5fd1ef2d8830c8929760b56e63" integrity sha512-oLUV5YNkeIBa0yQl7EYnxMgy4N6noxmiwZStaEJUSe2xPMcdNc8WmBQuQCx18H5psYbVxz8zoHk0RAAYZXP9gA== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-value-parser "^3.3.1" postcss-modules-scope@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.0.tgz#ad3f5bf7856114f6fcab901b0502e2a2bc39d4eb" integrity sha512-91Rjps0JnmtUB0cujlc8KIKCsJXWjzuxGeT/+Q2i2HXKZ7nBUeF9YQTZZTNvHVoNYj1AthsjnGLtqDUE0Op79A== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" postcss-modules-values@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-2.0.0.tgz#479b46dc0c5ca3dc7fa5270851836b9ec7152f64" integrity sha512-Ki7JZa7ff1N3EIMlPnGTZfUMe69FFwiQPnVSXC9mnn3jozCRBYIxiZd44yJOV2AmabOo4qFf8s0dC/+lweG7+w== dependencies: icss-replace-symbols "^1.1.0" postcss "^7.0.6" postcss-selector-parser@^6.0.0: version "6.0.2" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== dependencies: cssesc "^3.0.0" indexes-of "^1.0.1" uniq "^1.0.1" postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss-value-parser@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.23, postcss@^7.0.5, postcss@^7.0.6: version "7.0.36" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.36.tgz#056f8cffa939662a8f5905950c07d5285644dfcb" integrity sha512-BebJSIUMwJHRH0HAQoxN4u1CN86glsrwsW0q7T+/m44eXOUAxSNdHRkNZPYz5vVUbg17hFgOQDE7fZk7li3pZw== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= pretty-error@^2.0.2: version "2.1.1" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= dependencies: renderkid "^2.0.1" utila "~0.4" private@^0.1.6: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= promise@^7.0.1, promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== dependencies: asap "~2.0.3" proxy-addr@~2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.9.0" prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= psl@^1.1.24: version "1.4.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" create-hash "^1.1.0" parse-asn1 "^5.0.0" randombytes "^2.0.1" safe-buffer "^5.1.2" pug-attrs@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-3.0.0.tgz#b10451e0348165e31fad1cc23ebddd9dc7347c41" integrity sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA== dependencies: constantinople "^4.0.1" js-stringify "^1.0.2" pug-runtime "^3.0.0" pug-code-gen@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-3.0.2.tgz#ad190f4943133bf186b60b80de483100e132e2ce" integrity sha512-nJMhW16MbiGRiyR4miDTQMRWDgKplnHyeLvioEJYbk1RsPI3FuA3saEP8uwnTb2nTJEKBU90NFVWJBk4OU5qyg== dependencies: constantinople "^4.0.1" doctypes "^1.1.0" js-stringify "^1.0.2" pug-attrs "^3.0.0" pug-error "^2.0.0" pug-runtime "^3.0.0" void-elements "^3.1.0" with "^7.0.0" pug-error@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-2.0.0.tgz#5c62173cb09c34de2a2ce04f17b8adfec74d8ca5" integrity sha512-sjiUsi9M4RAGHktC1drQfCr5C5eriu24Lfbt4s+7SykztEOwVZtbFk1RRq0tzLxcMxMYTBR+zMQaG07J/btayQ== pug-filters@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-4.0.0.tgz#d3e49af5ba8472e9b7a66d980e707ce9d2cc9b5e" integrity sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A== dependencies: constantinople "^4.0.1" jstransformer "1.0.0" pug-error "^2.0.0" pug-walk "^2.0.0" resolve "^1.15.1" pug-lexer@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-5.0.1.tgz#ae44628c5bef9b190b665683b288ca9024b8b0d5" integrity sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w== dependencies: character-parser "^2.2.0" is-expression "^4.0.0" pug-error "^2.0.0" pug-linker@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-4.0.0.tgz#12cbc0594fc5a3e06b9fc59e6f93c146962a7708" integrity sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw== dependencies: pug-error "^2.0.0" pug-walk "^2.0.0" pug-load@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-3.0.0.tgz#9fd9cda52202b08adb11d25681fb9f34bd41b662" integrity sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ== dependencies: object-assign "^4.1.1" pug-walk "^2.0.0" pug-parser@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-6.0.0.tgz#a8fdc035863a95b2c1dc5ebf4ecf80b4e76a1260" integrity sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw== dependencies: pug-error "^2.0.0" token-stream "1.0.0" pug-runtime@^3.0.0, pug-runtime@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-3.0.1.tgz#f636976204723f35a8c5f6fad6acda2a191b83d7" integrity sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg== pug-strip-comments@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz#f94b07fd6b495523330f490a7f554b4ff876303e" integrity sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ== dependencies: pug-error "^2.0.0" pug-walk@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-2.0.0.tgz#417aabc29232bb4499b5b5069a2b2d2a24d5f5fe" integrity sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ== pug@^3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/pug/-/pug-3.0.2.tgz#f35c7107343454e43bc27ae0ff76c731b78ea535" integrity sha512-bp0I/hiK1D1vChHh6EfDxtndHji55XP/ZJKwsRqrz6lRia6ZC2OZbdAymlxdVFwd1L70ebrVJw4/eZ79skrIaw== dependencies: pug-code-gen "^3.0.2" pug-filters "^4.0.0" pug-lexer "^5.0.1" pug-linker "^4.0.0" pug-load "^3.0.0" pug-parser "^6.0.0" pug-runtime "^3.0.1" pug-strip-comments "^2.0.0" pump@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" once "^1.3.1" pumpify@^1.3.3: version "1.5.1" resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" inherits "^2.0.3" pump "^2.0.0" punycode@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qjobs@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== query-string@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== dependencies: decode-uri-component "^0.2.0" object-assign "^4.1.0" strict-uri-encode "^1.0.0" querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= querystringify@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" safe-buffer "^5.1.0" range-parser@^1.0.3, range-parser@^1.2.0, range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== dependencies: bytes "3.1.0" http-errors "1.7.2" iconv-lite "0.4.24" unpipe "1.0.0" raw-loader@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-2.0.0.tgz#e2813d9e1e3f80d1bbade5ad082e809679e20c26" integrity sha512-kZnO5MoIyrojfrPWqrhFNLZemIAX8edMOCp++yC5RKxzFB3m92DqKNhKlU6+FvpOhWtvyh3jOaD7J6/9tpdIKg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" path-type "^1.0.0" "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" micromatch "^3.1.10" readable-stream "^2.0.2" readdirp@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839" integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ== dependencies: picomatch "^2.0.4" redent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= dependencies: indent-string "^2.1.0" strip-indent "^1.0.1" regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" regenerate@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== regenerator-runtime@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.2: version "0.13.3" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" integrity sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw== regenerator-transform@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== dependencies: private "^0.1.6" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" safe-regex "^1.1.0" regexp.prototype.flags@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.2.0.tgz#6b30724e306a27833eeb171b66ac8890ba37e41c" integrity sha512-ztaw4M1VqgMwl9HlPpOuiYgItcHlunW0He2fE6eNfT6E/CF2FtYi9ofOYe4mKntstYk0Fyh/rDRBdS3AnxjlrA== dependencies: define-properties "^1.1.2" regexpu-core@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== dependencies: regenerate "^1.4.0" regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" unicode-match-property-value-ecmascript "^1.1.0" regjsgen@^0.5.0: version "0.5.1" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c" integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg== regjsparser@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" relateurl@0.2.x: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= renderkid@^2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== dependencies: css-select "^1.1.0" dom-converter "^0.2" htmlparser2 "^3.3.0" strip-ansi "^3.0.0" utila "^0.4.0" repeat-element@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" request@^2.83.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: aws-sign2 "~0.7.0" aws4 "^1.8.0" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" form-data "~2.3.2" har-validator "~5.1.0" http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "~2.1.19" oauth-sign "~0.9.0" performance-now "^2.1.0" qs "~6.5.2" safe-buffer "^5.1.2" tough-cookie "~2.4.3" tunnel-agent "^0.6.0" uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= dependencies: resolve-from "^3.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= dependencies: expand-tilde "^2.0.0" global-modules "^1.0.0" resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" integrity sha1-six699nWiBvItuZTM17rywoYh0g= resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= resolve@^1.10.0, resolve@^1.3.2, resolve@^1.8.1: version "1.12.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.2.tgz#08b12496d9aa8659c75f534a8f05f0d892fff594" integrity sha512-cAVTI2VLHWYsGOirfeYVVQ7ZDejtQ9fp4YhYckWDEkFfqbVjaT11iM8k6xSAfGFMM+gDpZjMnFssPu8we+mqFw== dependencies: path-parse "^1.0.6" resolve@^1.15.1: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== dependencies: is-core-module "^2.2.0" path-parse "^1.0.6" ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= rfdc@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.1.4.tgz#ba72cc1367a0ccd9cf81a870b3b58bd3ad07f8c2" integrity sha512-5C9HXdzK8EAqN7JDif30jqsBzavB7wLpaubisuQIGHWf2gUXSpzy6ArX/+Da8RjFpagWsCn+pIgxTMAmKw9Zug== rimraf@^2.5.4, rimraf@^2.6.0, rimraf@^2.6.1, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" inherits "^2.0.1" run-queue@^1.0.0, run-queue@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= dependencies: aproba "^1.1.1" safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= dependencies: ret "~0.1.10" "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" integrity sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8= dependencies: ajv "^5.0.0" schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== dependencies: ajv "^6.1.0" ajv-errors "^1.0.0" ajv-keywords "^3.1.0" select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.7: version "1.10.7" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b" integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA== dependencies: node-forge "0.9.0" "semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== dependencies: debug "2.6.9" depd "~1.1.2" destroy "~1.0.4" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" http-errors "~1.7.2" mime "1.6.0" ms "2.1.1" on-finished "~2.3.0" range-parser "~1.2.1" statuses "~1.5.0" serialize-javascript@^1.7.0: version "1.9.1" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.9.1.tgz#cfc200aef77b600c47da9bb8149c943e798c2fdb" integrity sha512-0Vb/54WJ6k5v8sSWN09S0ora+Hnr+cX40r9F170nT+mSkaxltoE/7R3OrIdBSUv1OoiobH1QoWQbCnAO+e8J1A== serve-index@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= dependencies: accepts "~1.3.4" batch "0.6.1" debug "2.6.9" escape-html "~1.0.3" http-errors "~1.6.2" mime-types "~2.1.17" parseurl "~1.3.2" serve-static@1.14.1: version "1.14.1" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.17.1" set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" is-plain-object "^2.0.3" split-string "^3.0.1" setimmediate@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" safe-buffer "^5.0.1" shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" isobject "^3.0.0" snapdragon-util "^3.0.1" snapdragon-util@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" debug "^2.2.0" define-property "^0.2.5" extend-shallow "^2.0.1" map-cache "^0.2.2" source-map "^0.5.6" source-map-resolve "^0.5.0" use "^3.1.0" socket.io-adapter@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz#2a805e8a14d6372124dd9159ad4502f8cb07f06b" integrity sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs= socket.io-client@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.1.1.tgz#dcb38103436ab4578ddb026638ae2f21b623671f" integrity sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ== dependencies: backo2 "1.0.2" base64-arraybuffer "0.1.5" component-bind "1.0.0" component-emitter "1.2.1" debug "~3.1.0" engine.io-client "~3.2.0" has-binary2 "~1.0.2" has-cors "1.1.0" indexof "0.0.1" object-component "0.0.3" parseqs "0.0.5" parseuri "0.0.5" socket.io-parser "~3.2.0" to-array "0.1.4" socket.io-parser@~3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.2.0.tgz#e7c6228b6aa1f814e6148aea325b51aa9499e077" integrity sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA== dependencies: component-emitter "1.2.1" debug "~3.1.0" isarray "2.0.1" socket.io@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.1.1.tgz#a069c5feabee3e6b214a75b40ce0652e1cfb9980" integrity sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA== dependencies: debug "~3.1.0" engine.io "~3.2.0" has-binary2 "~1.0.2" socket.io-adapter "~1.1.0" socket.io-client "2.1.1" socket.io-parser "~3.2.0" sockjs-client@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.4.0.tgz#c9f2568e19c8fd8173b4997ea3420e0bb306c7d5" integrity sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g== dependencies: debug "^3.2.5" eventsource "^1.0.7" faye-websocket "~0.11.1" inherits "^2.0.3" json3 "^3.3.2" url-parse "^1.4.3" sockjs@0.3.19: version "0.3.19" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== dependencies: faye-websocket "^0.10.0" uuid "^3.0.1" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= dependencies: is-plain-obj "^1.0.0" source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" urix "^0.1.0" source-map-support@~0.5.12: version "0.5.16" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= source-map@^0.5.0, source-map@^0.5.1, source-map@^0.5.6, source-map@^0.5.7: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" integrity sha1-2rc/vPwrqBm03gO9b26qSBZLP50= dependencies: amdefine ">=0.0.4" spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== spdx-expression-parse@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: version "3.0.5" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" hpack.js "^2.1.6" obuf "^1.1.2" readable-stream "^3.0.6" wbuf "^1.7.3" spdy@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.1.tgz#6f12ed1c5db7ea4f24ebb8b89ba58c87c08257f2" integrity sha512-HeZS3PBdMA+sZSu0qwpCxl3DeALD5ASx8pAX0jZdKXSpPWbQ6SYGnlg3BBmYLx5LtiZrmkAZfErCm2oECBcioA== dependencies: debug "^4.1.0" handle-thing "^2.0.0" http-deceiver "^1.2.7" select-hose "^2.0.0" spdy-transport "^3.0.0" split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: version "1.16.1" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" bcrypt-pbkdf "^1.0.0" dashdash "^1.12.0" ecc-jsbn "~0.1.1" getpass "^0.1.1" jsbn "~0.1.0" safer-buffer "^2.0.2" tweetnacl "~0.14.0" ssri@^6.0.1: version "6.0.2" resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.2.tgz#157939134f20464e7301ddba3e90ffa8f7728ac5" integrity sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q== dependencies: figgy-pudding "^3.5.1" static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== dependencies: inherits "~2.0.1" readable-stream "^2.0.2" stream-each@^1.1.0: version "1.2.3" resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== dependencies: end-of-stream "^1.1.0" stream-shift "^1.0.0" stream-http@^2.7.2: version "2.8.3" resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= streamroller@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-1.0.6.tgz#8167d8496ed9f19f05ee4b158d9611321b8cacd9" integrity sha512-3QC47Mhv3/aZNFpDDVO44qQb9gwB9QggMEE0sQmkTAwBVYdBRWISdsywlkfm5II1Q5y/pmrHflti/IgmIzdDBg== dependencies: async "^2.6.2" date-format "^2.0.0" debug "^3.2.6" fs-extra "^7.0.1" lodash "^4.17.14" strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" "string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== dependencies: emoji-regex "^7.0.1" is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string.prototype.trimright@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== dependencies: define-properties "^1.1.3" function-bind "^1.1.1" string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= dependencies: get-stdin "^4.0.1" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= style-loader@^0.23.1: version "0.23.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925" integrity sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg== dependencies: loader-utils "^1.1.0" schema-utils "^1.0.0" supports-color@6.1.0, supports-color@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== dependencies: has-flag "^3.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.0: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" tapable@^1.0.0, tapable@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^4: version "4.4.19" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA== dependencies: chownr "^1.1.4" fs-minipass "^1.2.7" minipass "^2.9.0" minizlib "^1.3.3" mkdirp "^0.5.5" safe-buffer "^5.2.1" yallist "^3.1.1" terser-webpack-plugin@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.1.tgz#61b18e40eaee5be97e771cdbb10ed1280888c2b4" integrity sha512-ZXmmfiwtCLfz8WKZyYUuuHf3dMYEjg8NrjHMb0JqHVHVOSkzp3cW2/XG1fP3tRhqEqSzMwzzRQGtAPbs4Cncxg== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" serialize-javascript "^1.7.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" worker-farm "^1.7.0" terser@^4.1.2: version "4.4.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.4.0.tgz#22c46b4817cf4c9565434bfe6ad47336af259ac3" integrity sha512-oDG16n2WKm27JO8h4y/w3iqBGAOSCtq7k8dRmrn4Wf9NouL0b2WpMHGChFGZq4nFAQy1FsNJrVQHfurXOSTmOA== dependencies: commander "^2.20.0" source-map "~0.6.1" source-map-support "~0.5.12" through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" xtend "~4.0.1" thunky@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== timers-browserify@^2.0.4: version "2.0.11" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== dependencies: setimmediate "^1.0.4" tmp@0.0.33, tmp@0.0.x: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" to-array@0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" integrity sha1-F+bBH3PdTz10zaek/zI46a2b+JA= to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= dependencies: is-number "^3.0.0" repeat-string "^1.6.1" to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" extend-shallow "^3.0.2" regex-not "^1.0.2" safe-regex "^1.1.0" toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== token-stream@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-1.0.0.tgz#cc200eab2613f4166d27ff9afc7ca56d49df6eb4" integrity sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ= toposort@^1.0.0: version "1.0.7" resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= tough-cookie@~2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: psl "^1.1.24" punycode "^1.4.1" trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" type@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= uglify-js@3.4.x: version "3.4.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== dependencies: commander "~2.19.0" source-map "~0.6.1" uglify-js@^3.1.4: version "3.13.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.13.5.tgz#5d71d6dbba64cf441f32929b1efce7365bb4f113" integrity sha512-xtB8yEqIkn7zmOyS2zUNBsYCBRhDkvlNxMMY2smuJ/qA8NCHeQvKCF3i9Z4k8FJH4+PJvZRtMrPynfZ75+CSZw== ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unicode-canonical-property-names-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== unicode-match-property-ecmascript@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== dependencies: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" unicode-match-property-value-ecmascript@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" set-value "^2.0.1" uniq@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== dependencies: unique-slug "^2.0.0" unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== dependencies: imurmurhash "^0.1.4" universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= dependencies: has-value "^0.3.1" isobject "^3.0.0" upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= uri-js@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= url-join@^2.0.2: version "2.0.5" resolved "https://registry.yarnpkg.com/url-join/-/url-join-2.0.5.tgz#5af22f18c052a000a48d7b82c5e9c2e2feeda728" integrity sha1-WvIvGMBSoACkjXuCxenC4v7tpyg= url-parse@^1.4.3: version "1.5.3" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= dependencies: punycode "1.3.2" querystring "0.2.0" use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== useragent@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" integrity sha512-4AoH4pxuSvHCjqLO04sU6U/uE65BYza8l/KKBS0b0hnUPWi+cQ2BpeTEwejCSx9SPV5/U03nniDTrWx5NrmKdw== dependencies: lru-cache "4.1.x" tmp "0.0.x" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== dependencies: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" util@0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= dependencies: inherits "2.0.1" util@^0.11.0: version "0.11.1" resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== dependencies: inherits "2.0.3" utila@^0.4.0, utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2: version "3.3.3" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== v8-compile-cache@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" vm-browserify@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== void-elements@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= void-elements@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-3.1.0.tgz#614f7fbf8d801f0bb5f0661f5b2f5785750e4f09" integrity sha1-YU9/v42AHwu18GYfWy9XhXUOTwk= watchpack@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== dependencies: chokidar "^2.0.2" graceful-fs "^4.1.2" neo-async "^2.5.0" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" webpack-cli@^3.3.1: version "3.3.10" resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.10.tgz#17b279267e9b4fb549023fae170da8e6e766da13" integrity sha512-u1dgND9+MXaEt74sJR4PR7qkPxXUSQ0RXYq8x1L6Jg1MYVEmGPrH6Ah6C4arD4r0J1P5HKjRqpab36k0eIzPqg== dependencies: chalk "2.4.2" cross-spawn "6.0.5" enhanced-resolve "4.1.0" findup-sync "3.0.0" global-modules "2.0.0" import-local "2.0.0" interpret "1.2.0" loader-utils "1.2.3" supports-color "6.1.0" v8-compile-cache "2.0.3" yargs "13.2.4" webpack-dev-middleware@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-2.0.6.tgz#a51692801e8310844ef3e3790e1eacfe52326fd4" integrity sha512-tj5LLD9r4tDuRIDa5Mu9lnY2qBBehAITv6A9irqXhw/HQquZgTx3BCd57zYbU2gMDnncA49ufK2qVQSbaKJwOw== dependencies: loud-rejection "^1.6.0" memory-fs "~0.4.1" mime "^2.1.0" path-is-absolute "^1.0.0" range-parser "^1.0.3" url-join "^2.0.2" webpack-log "^1.0.1" webpack-dev-middleware@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3" integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw== dependencies: memory-fs "^0.4.1" mime "^2.4.4" mkdirp "^0.5.1" range-parser "^1.2.1" webpack-log "^2.0.0" webpack-dev-server@^3.3.1: version "3.9.0" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c" integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw== dependencies: ansi-html "0.0.7" bonjour "^3.5.0" chokidar "^2.1.8" compression "^1.7.4" connect-history-api-fallback "^1.6.0" debug "^4.1.1" del "^4.1.1" express "^4.17.1" html-entities "^1.2.1" http-proxy-middleware "0.19.1" import-local "^2.0.0" internal-ip "^4.3.0" ip "^1.1.5" is-absolute-url "^3.0.3" killable "^1.0.1" loglevel "^1.6.4" opn "^5.5.0" p-retry "^3.0.1" portfinder "^1.0.25" schema-utils "^1.0.0" selfsigned "^1.10.7" semver "^6.3.0" serve-index "^1.9.1" sockjs "0.3.19" sockjs-client "1.4.0" spdy "^4.0.1" strip-ansi "^3.0.1" supports-color "^6.1.0" url "^0.11.0" webpack-dev-middleware "^3.7.2" webpack-log "^2.0.0" ws "^6.2.1" yargs "12.0.5" webpack-fix-style-only-entries@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/webpack-fix-style-only-entries/-/webpack-fix-style-only-entries-0.2.2.tgz#60331c608b944ac821a3b6f2ae491a6d79ba40eb" integrity sha512-0wcrLCnISP8htV0NP1mT0e2mHhfjGQdNk82s8BTLVvF7rXuoJuUUzP3aCUXnRqlLgmTBx5WgqPhnczjatl+iSQ== webpack-log@^1.0.1: version "1.2.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-1.2.0.tgz#a4b34cda6b22b518dbb0ab32e567962d5c72a43d" integrity sha512-U9AnICnu50HXtiqiDxuli5gLB5PGBo7VvcHx36jRZHwK4vzOYLbImqT4lwWwoMHdQWwEKw736fCHEekokTEKHA== dependencies: chalk "^2.1.0" log-symbols "^2.1.0" loglevelnext "^1.0.1" uuid "^3.1.0" webpack-log@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== dependencies: ansi-colors "^3.0.0" uuid "^3.3.2" webpack-shell-plugin@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/webpack-shell-plugin/-/webpack-shell-plugin-0.5.0.tgz#29b8a1d80ddeae0ddb10e729667f728653c2c742" integrity sha1-Kbih2A3erg3bEOcpZn9yhlPCx0I= webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack@^4.30.0: version "4.41.2" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e" integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A== dependencies: "@webassemblyjs/ast" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/wasm-edit" "1.8.5" "@webassemblyjs/wasm-parser" "1.8.5" acorn "^6.2.1" ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" enhanced-resolve "^4.1.0" eslint-scope "^4.0.3" json-parse-better-errors "^1.0.2" loader-runner "^2.4.0" loader-utils "^1.2.3" memory-fs "^0.4.1" micromatch "^3.1.10" mkdirp "^0.5.1" neo-async "^2.6.1" node-libs-browser "^2.2.1" schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.1" watchpack "^1.6.0" webpack-sources "^1.4.1" websocket-driver@>=0.5.1: version "0.7.3" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.3.tgz#a2d4e0d4f4f116f1e6297eba58b05d430100e9f9" integrity sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg== dependencies: http-parser-js ">=0.4.0 <0.4.11" safe-buffer ">=5.1.0" websocket-extensions ">=0.1.1" websocket-extensions@>=0.1.1: version "0.1.4" resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.1.1, which@^1.2.1, which@^1.2.14, which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" with@^7.0.0: version "7.0.2" resolved "https://registry.yarnpkg.com/with/-/with-7.0.2.tgz#ccee3ad542d25538a7a7a80aad212b9828495bac" integrity sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w== dependencies: "@babel/parser" "^7.9.6" "@babel/types" "^7.9.6" assert-never "^1.2.1" babel-walk "3.0.0-canary-5" word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= worker-farm@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== dependencies: ansi-styles "^3.2.0" string-width "^3.0.0" strip-ansi "^5.0.0" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= ws@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== dependencies: async-limiter "~1.0.0" ws@~3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== dependencies: async-limiter "~1.0.0" safe-buffer "~5.1.0" ultron "~1.1.0" xmlhttprequest-ssl@~1.5.4: version "1.5.5" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e" integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4= xtend@^4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== "y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2, yallist@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs-parser@^13.1.0: version "13.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" yargs@12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== dependencies: cliui "^4.0.0" decamelize "^1.2.0" find-up "^3.0.0" get-caller-file "^1.0.1" os-locale "^3.0.0" require-directory "^2.1.1" require-main-filename "^1.0.1" set-blocking "^2.0.0" string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" yargs@13.2.4: version "13.2.4" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== dependencies: cliui "^5.0.0" find-up "^3.0.0" get-caller-file "^2.0.1" os-locale "^3.1.0" require-directory "^2.1.1" require-main-filename "^2.0.0" set-blocking "^2.0.0" string-width "^3.0.0" which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^13.1.0" yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk=